index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
992,500 | aeab09bbf0041fa2a1d8b0ccc6f563f50c3f8d09 | # Generated by Django 2.2.7 on 2020-04-27 04:46
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Oner',
fields=[
('oner_id', models.AutoField(primary_key=True, serialize=False)),
('first_name', models.CharField(max_length=200)),
('last_name', models.CharField(max_length=200)),
('email', models.EmailField(max_length=254)),
('contact', models.IntegerField()),
('city', models.CharField(max_length=200)),
('address', models.CharField(max_length=200)),
('img', models.ImageField(upload_to='media/')),
('password', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('pro_id', models.AutoField(primary_key=True, serialize=False)),
('panting_title', models.CharField(max_length=200)),
('panting_by', models.CharField(max_length=200)),
('panting_rate', models.IntegerField(default='free')),
],
),
migrations.CreateModel(
name='User',
fields=[
('user_id', models.AutoField(primary_key=True, serialize=False)),
('first_name', models.CharField(max_length=200)),
('last_name', models.CharField(max_length=200)),
('email', models.EmailField(max_length=254)),
('contact', models.IntegerField()),
('city', models.CharField(max_length=200)),
('pin_code', models.IntegerField(max_length=100)),
('address', models.CharField(max_length=200)),
('state', models.CharField(max_length=200)),
('password', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Order',
fields=[
('order_id', models.AutoField(primary_key=True, serialize=False)),
('order_status', models.CharField(choices=[('1', 'active'), ('0', 'dactive')], max_length=20)),
('order_time', models.DateTimeField(default=django.utils.timezone.now)),
('pro_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='artwork.Product')),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='artwork.User')),
],
),
]
|
992,501 | 4fd4c6a05aa9cbee81abdb459010f27079787994 | """
Programming assignment #1
Created by: Raphael Miller
PA1 - Due Feb 1st.
Purpose:
To demonstrate the common filters used in Computer Vision applications.
This program is designed to implement various filters for pictures
including but not limited to, box, gaussian, and sobel filters.
"""
"""
run.py - run handles the running of the program, and handles the arguments coming from the command line.
"""
import sys
import filters
import cv2
# check for args incoming from cli
if len(sys.argv) > 5:
print("ERROR: too many arguments correct use is run.py [filter_name][kernel_size][image_path]")
def main(filter, kernel_size, image, sigma):
"""
main function
reads the image coming from the image path cli arg
handles the switch of argument/ perams for the PA1 program.
accepts two arguments, filter and kernel_size
:argument filter[dict], kernel_size[integer value]
:returns success or failure value from dict filter
"""
image = cv2.imread(image, cv2.IMREAD_GRAYSCALE)
# my_dict = {
# 'box': filters.box(kernel_size, image),
# 'median': filters.median(kernel_size, image),
# 'guassian': filters.gaussian(kernel_size, image),
# 'gradient': filters.gradient(kernel_size, image),
# 'sobel': filters.sobel(image),
# 'fast_gaussian': filters.fast_gaussian(kernel_size, image),
# 'histogram': filters.histogram(kernel_size, image),
# 'thresholding': filters.thesholding(kernel_size, image)
# }
# return my_dict[filter](kernel_size, image)
if filter == 'box':
return filters.box(kernel_size, image)
elif filter == 'median':
return filters.median(image)
elif filter == 'gaussian':
return filters.gaussian(kernel_size, sigma, image)
elif filter == 'gradient':
return filters.gradient(image)
elif filter == 'sobel':
return filters.sobel(image)
elif filter == 'fast_gaussian':
return filters.fast_gaussian(kernel_size, image, sigma)
elif filter == 'histogram':
return filters.histogram(kernel_size, image)
elif filter == 'thresholding':
return filters.thresholding(kernel_size, image)
else:
print("function not recognized")
return 0
# collect value from main()
res = main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
# print("Status Code: ", res)
sys.exit(res) |
992,502 | 74a9ec06a13c7ac36f955ef1b74572c1abc24337 | # data strucure
thisset = {"she","is","samrt"}
print(len(thisset))
thisset.remove("is")
say = {"apple","banana","shosho","welcome"}
x = say.pop()
print(x)
print(say)
say.clear()
print(say)
i = (("hello","hello","hello"))
print(i) |
992,503 | 2e028ee425849396a0731b677b39922eec331871 | ### Author: Matiur Rahman Minar ###
### EMCOM Lab, SeoulTech, 2021 ###
### Task: Generating binary mask/silhouette/segmentation ###
### especially for clothing image ###
### Focused method: Binary thresholding ###
import os
import cv2
import numpy as np
from PIL import Image
from matplotlib import pyplot as plt
def cloth_masking(im_path, save_path, viz=False):
img = cv2.imread(im_path, 0)
img1 = Image.open(im_path).convert('RGB')
lo = 250
hi = 255
# ret,thresh1 = cv2.threshold(img, lo, hi,cv2.THRESH_BINARY)
ret,th_bin = cv2.threshold(img, lo, hi, cv2.THRESH_BINARY_INV)
# ret,thresh3 = cv2.threshold(img, lo, hi,cv2.THRESH_TRUNC)
# ret,thresh4 = cv2.threshold(img, lo, hi,cv2.THRESH_TOZERO)
# ret,thresh5 = cv2.threshold(img, lo, hi,cv2.THRESH_TOZERO_INV)
# ret, thresh3 = cv2.threshold(img, lo, hi, cv2.THRESH_OTSU+cv2.THRESH_BINARY)
# th_otsu = cv2.bitwise_not(thresh3)
# thresh4 = cv2.adaptiveThreshold(img,255,cv2.ADAPTIVE_THRESH_MEAN_C,cv2.THRESH_BINARY,11,2)
# th_mean = cv2.bitwise_not(thresh4)
# thresh5 = cv2.adaptiveThreshold(img,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,11,2)
# th_gauss = cv2.bitwise_not(thresh5)
# Filling operation:
# Copy the thresholded image.
im_floodfill = th_bin.copy()
# Mask used to flood filling.
# Notice the size needs to be 2 pixels than the image.
h, w = th_bin.shape[:2]
mask = np.zeros((h+2, w+2), np.uint8)
# Floodfill from point (0, 0)
cv2.floodFill(im_floodfill, mask, (0,0), 255);
# Invert floodfilled image
im_floodfill_inv = cv2.bitwise_not(im_floodfill)
# Combine the two images to get the foreground.
th_filled = th_bin | im_floodfill_inv
# Morphology operation:
kernel = np.ones((2,2),np.uint8)
# opening for salt noise removal
th_opened = cv2.morphologyEx(th_filled, cv2.MORPH_OPEN, kernel)
# closing for pepper noise removal (not needed it seems)
# th_closed = cv2.morphologyEx(th_opened, cv2.MORPH_CLOSE, kernel)
# erosion for thinning out boundary
kernel = np.ones((3,3),np.uint8)
th_eroded = cv2.erode(th_opened,kernel,iterations=1)
if viz:
# plot figures:
titles = ['Original Image','Binary thresholding', 'Filling', 'Image', 'Opening', 'Erosion']
images = [img1, th_bin, th_filled, img1, th_opened, th_eroded]
for i in range(6):
plt.subplot(2,3,i+1),plt.imshow(images[i],'gray')
plt.title(titles[i])
plt.xticks([]),plt.yticks([])
plt.show()
# save result
print("Saving ", save_path)
cv2.imwrite(save_path, th_eroded)
def main():
# Get paths
cloth_dir = "data/cloth/"
res_dir = "results/masks/"
image_list = os.listdir(cloth_dir)
# iterate images in the path
for each in image_list:
image_path = os.path.join(cloth_dir, each)
res_path = os.path.join(res_dir, each.replace(".jpg", ".png"))
cloth_masking(image_path, res_path, viz=True)
if __name__ == "__main__":
main()
|
992,504 | 9a439f227533274d40766fa9123dfbe92fbf3fdc | import numpy as np
import math
from simulation_parameters import *
from sensors import Enc, GPS, UWB, Camera, Range
import matplotlib.pyplot as plt
import sympy as sym
class kal_hist():
def __init__(self,xk,Pk,real_state):
self.xk = (xk.flatten()).tolist()
self.Pk = (Pk.flatten()).tolist()
self.Time = [0]
self.Dxk = ((xk - real_state).flatten()).tolist()
self.z = []
class kal():
def __init__(self, agent):
self.agent = agent
self.xk = np.array([self.agent.state.x,
self.agent.state.y,
self.agent.state.yaw]) + (
np.diag([sigmaX0,sigmaY0,sigmaPsi0]) @ np.random.randn(3))
self.xk1 = self.xk
self.Pk = np.diag([sigmaX0**2,sigmaY0**2,sigmaPsi0**2])
self.Pk1 = self.Pk
self.Enc = Enc()
self.sensors = [Camera()] #[GPS(), UWB(), Camera()]
self.sensors_measuring_at_T = self.sensors
self.Range = Range()
self.Camera = Camera()
self.R = [] # redefined later
self.H = [] # redefined later
self.S = np.array([[0,0,0],[0,0,0],[0,0,0]])
self.W = np.array([[0,0,0],[0,0,0],[0,0,0]])
self.z = self.xk
self.u_est_dt = np.array([0,0])
self.kalmanTime = 0
real_state = np.array([self.agent.state.x, self.agent.state.y, self.agent.state.yaw])
self.hist = kal_hist(self.xk,self.Pk,real_state)
def control_estimation(self):
self.u_est_dt = self.Enc.measure(self.agent)
# sqrt cause the element of the matrix are the covariances squared
def state_prediction(self):
theta = self.xk[2]
B = np.array([[math.cos(theta),0],[math.sin(theta),0],[0,1]]) @ np.array([[L,L],[1,-1]]) * r_w/(2*L)
self.xk1 = self.xk + B @ self.u_est_dt
def covariance_prediction(self):
theta = self.xk[2]
A = np.array([[1, 0, -math.sin(theta) * r_w/2 * (self.u_est_dt[0] + self.u_est_dt[1])],
[0, 1, math.cos(theta) * r_w/2 * (self.u_est_dt[0] + self.u_est_dt[1])],
[0, 0, 1]]
)
B = np.array([[ math.cos(theta) * r_w/2 , math.cos(theta) * r_w/2 ],
[ math.sin(theta) * r_w/2 , math.sin(theta) * r_w/2 ],
[ r_w/(2*L) , - r_w/(2*L) ]])
Pk = self.Pk
Q = self.Enc.Q
Pk1 = A @ Pk @ A.T + B @ Q @ B.T
self.Pk1= Pk1
def is_measuring(self):
meas_list = []
meas_flag = False
for sensor in self.sensors:
if (int(self.kalmanTime/dt) % int((1/sensor.rate)/dt)) == 0 and self.kalmanTime != 0:
meas_list.append(sensor)
meas_flag = True
self.sensors_measuring_at_T = meas_list
return meas_flag
def observation_and_measure(self):
H = np.array([])
R = np.array([])
hh = 0
state = np.array([self.agent.state.x, self.agent.state.y, self.agent.state.yaw])
z = np.array([])
for sensor in self.sensors_measuring_at_T:
hh = hh + sensor.H.shape[0]
H = np.append(H , sensor.H.flatten())
R = np.append(R , np.diag(sensor.R))
z = np.append(z , sensor.measure(state))
self.H = H.reshape((hh, 3))
self.R = np.diag(R)
self.z = z
def covariance_innovation(self):
self.S = self.H @ self.Pk1 @ self.H.T + self.R
def gain(self):
self.W = self.Pk1 @ self.H.T @ np.linalg.inv(self.S)
def state_update(self):
self.xk = self.xk1 + self.W @ (self.z - self.H @ self.xk1)
def covariance_update(self):
self.Pk = (np.eye(3) - self.W @ self.H) @ self.Pk1
def update_without_measure(self):
self.xk = self.xk1
self.Pk = self.Pk1
def history_save(self):
self.hist.xk.extend((self.xk.flatten()).tolist())
self.hist.Pk.extend((self.Pk.flatten()).tolist())
self.hist.Time.extend([self.kalmanTime])
state = np.array([self.agent.state.x, self.agent.state.y, self.agent.state.yaw])
self.hist.Dxk.extend(((self.xk - state).flatten()).tolist())
self.hist.z.append((self.z).tolist())
def filter(self):
self.control_estimation()
self.state_prediction()
self.covariance_prediction()
if self.is_measuring():
self.observation_and_measure()
self.covariance_innovation()
self.gain()
self.state_update()
self.covariance_update()
else:
self.update_without_measure()
self.kalmanTime += dt
self.history_save()
def ellipse_plot(self):
Pxy = self.Pk[0:2,0:2]
eigval, eigvec = np.linalg.eig(Pxy)
if eigval[0] >= eigval[1]:
bigind = 0
smallind = 1
else:
bigind = 1
smallind = 0
t = np.arange(0, 2 * math.pi + 0.1, 0.1)
a = math.sqrt(eigval[bigind]) * 3
b = math.sqrt(eigval[smallind]) * 3
x = [a * math.cos(it) for it in t]
y = [b * math.sin(it) for it in t]
angle = math.atan2(eigvec[bigind, 1], eigvec[bigind, 0])
rot = np.array([[math.cos(angle), math.sin(angle)],
[-math.sin(angle), math.cos(angle)]])
fx = rot @ (np.array([x, y]))
px = np.array(fx[0, :] + self.xk[0]).flatten()
py = np.array(fx[1, :] + self.xk[1]).flatten()
plt.plot(px, py, "-", color = self.agent.color) |
992,505 | 4aa9138d7b682d18ded02fd1f6d00ffa95d9d395 | sqlalchemy_imperative_template_str = """
from dataclasses import dataclass
from dataclasses import field
from typing import List
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import Text
from sqlalchemy import Integer
from sqlalchemy.orm import registry
from sqlalchemy.orm import relationship
mapper_registry = registry()
metadata = MetaData()
{% if not no_model_import %}
from {{model_path}} import *
{% endif %}
{% for c in classes %}
tbl_{{classname(c.name)}} = Table('{{c.name}}', metadata,
{%- for s in c.attributes.values() %}
Column('{{s.name}}',
Text,
{% if 'foreign_key' in s.annotations -%}
ForeignKey('{{ s.annotations['foreign_key'].value }}'),
{% endif -%}
{% if 'primary_key' in s.annotations -%}
primary_key=True
{%- endif -%}
),
{%- endfor %}
)
{% endfor -%}
# -- Mappings --
{% for c in classes if not is_join_table(c) %}
mapper_registry.map_imperatively({{classname(c.name)}}, tbl_{{classname(c.name)}}, properties = {
## NOTE: mapping omitted for now, see https://stackoverflow.com/questions/11746922/sqlalchemy-object-has-no-attribute-sa-adapter
{% for mapping in backrefs[c.name] %}
{% if mapping.uses_join_table %}
{% else %}
#'{{mapping.source_slot}}': relationship( {{ mapping.target_class }}, backref='{{c.name}}' ),
{% endif %}
#'{mapping.source_slot}': relationship()
## {{ mapping }}
{% endfor %}
})
{% endfor %}
"""
|
992,506 | e65a2dd56dd2cbe53e5a33bc52cfda66399e888e | # coding:utf-8
from dao.db import session
from model.models import TrackingDetail
class TrackingDetailDao(object):
@staticmethod
def get_by_task(task_id, status):
tracking_details = session.query(TrackingDetail).filter(TrackingDetail.task_id == task_id,
TrackingDetail.status == status).all()
return tracking_details
@staticmethod
def update(tracking_detail):
session.query(TrackingDetail).filter(TrackingDetail.id == tracking_detail.id).update(
{TrackingDetail.status: tracking_detail.status,
TrackingDetail.start_time: tracking_detail.start_time,
TrackingDetail.end_time: tracking_detail.end_time,
TrackingDetail.url: tracking_detail.url,
TrackingDetail.des: tracking_detail.des,
TrackingDetail.snapshot: tracking_detail.snapshot,
TrackingDetail.result: tracking_detail.result,
TrackingDetail.retry: tracking_detail.retry + 1})
|
992,507 | 6bbf56b7ccd4627904d42525ebe646ad1a0507a0 | #coding:utf-8
from db.connDB import get_conn,get_cur
from random import choice
from config import set_uuid,get_uuid
class data_manage():
def __init__(self):
self.conn = get_conn()
self.cur = get_cur()
print("cur::L",self.cur)
print("conn::L",self.conn)
def conn_close(self):
self.cur.close()
self.conn.close()
def select_query_all(self,sql):
self.cur.execute(sql)
data = self.cur.fetchall()
self.conn_close()
return data
def select_query_one(self,sql,parm):
self.cur.execute(sql,parm)
data = self.cur.fetchone()
self.conn_close()
return data
def select_test(self,uid):
pass
def select_tourddid(self,uid):
sql = "select id from 630_tour where customer_id = '%s' ORDER BY id DESC" % (uid)
data = self.select_query_all(sql)
res = choice(data)[0]
set_uuid((res))
#update data
#更新发票状态
def update_invoic(self,uid,status):
sql = ""
if status == '1':
sql = "update 630_invoice SET STATUS = '%s',post_company = '%s',post_num = '%s' where uid = '%s' ORDER BY id desc limit 1" % (status,'圆通快递','xxx0001',uid)
else:
sql = "update 630_invoice SET STATUS = '%s' where uid = '%s' ORDER BY id desc limit 1" % (status,uid)
print("sql::::",sql)
self.cur.execute(sql)
self.conn.commit()
self.cur.close()
self.conn.close()
#print(self.cur.fetchone())
|
992,508 | 9afb9bb20ce6e064165b396d098aa49669a50e70 | # -*- coding: utf-8 -*-
"""
Discription: Preferences Window
Author(s): M. Fränzl
Data: 19/06/11
"""
import os
import numpy as np
from PyQt5.QtWidgets import QDialog
from PyQt5.uic import loadUi
from pyqtgraph import QtCore, QtGui
class HelpWidget(QDialog):
def __init__(self):
super().__init__(None, QtCore.Qt.WindowCloseButtonHint)
loadUi(os.path.splitext(os.path.relpath(__file__))[0] + '.ui', self)
|
992,509 | ad4236e7e373c693d9d09ddcb61d51a4c9df638d | import logging
_LOGGER = logging.getLogger(__name__)
class Database:
"""A base database.
Database classes are used to persist key/value pairs in a database.
"""
def __init__(self, config, asm=None):
"""Create the database.
Set some basic properties from the database config such as the name
of this database. It could also be a good place to setup properties
to hold things like the database connection object and the database
name.
Args:
config (dict): The config for this database specified in the
`configuration.yaml` file.
"""
self.name = ""
self.config = config
self.asm = asm
self.client = None
self.database = None
async def connect(self):
"""Connect to database service and store the connection object.
This method should connect to the given database using a native
python library for that database. The library will most likely involve
a connection object which will be used by the put and get methods.
This object should be stored in self.
"""
raise NotImplementedError
async def disconnect(self):
"""Disconnect from the database.
This method should disconnect from the given database using a native
python library for that database.
"""
async def put(self, collection, key, data):
"""Store the data object in a database against the key.
The data object will need to be serialised in a sensible way which
suits the database being used and allows for reconstruction of the
object.
Args:
collection (str): the collection is the databasename
key (str): the key is key value
data (object): The data object to store.
Returns:
bool: True for data successfully stored, False otherwise.
"""
raise NotImplementedError
async def get(self, collection, key):
"""Return a data object for a given key.
Args:
collection (str): the collection is the databasename
key (str): the key is key value
Returns:
object or None: The data object stored for that key, or None if no
object found for that key.
"""
raise NotImplementedError
async def get_keys(self, collection):
"""Return a list of keys.
Args:
collection (str): the collection is the databasename
Returns:
object or None: List of keys, or None if no
object found.
"""
raise NotImplementedError
class Memory:
"""A Memory object.
An object to obtain, store and persist data outside of asm.
Attributes:
databases (:obj:`list` of :obj:`Database`): List of database objects.
memory (:obj:`dict`): In-memory dictionary to store data.
"""
def __init__(self):
"""Create object with minimum properties."""
self.databases = []
async def get_keys(self, collection):
"""Return a list of keys.
Args:
collection (str): the collection is the databasename
Returns:
object or None: List of keys, or None if no
object found.
"""
_LOGGER.debug("Getting %s from memory.", collection)
results = []
for database in self.databases:
results.append(await database.get_keys(collection))
return results[0]
async def get(self, collection, key):
"""Get data object for a given key.
Gets the key value found in-memory or from the database(s).
Args:
collection (str): database.
key (str): Key to retrieve data.
Returns:
A data object for the given key, otherwise `None`.
"""
_LOGGER.debug("Getting %s from memory.", collection, key)
results = []
for database in self.databases:
results.append(await database.get(collection, key))
return results[0]
async def put(self, collection, key, data):
"""Put a data object to a given key.
Stores the key and value in memory and the database(s).
Args:
collection (str): database.
key (str): Key for the data to store.
data (obj): Data object to store.
"""
_LOGGER.debug("Putting %s to memory", collection, key)
if self.databases:
for database in self.databases:
await database.put(collection, key, data)
|
992,510 | beaa27d0d59d8ce10781480b004d6a031a94673e | from boto.swf.layer1_decisions import Layer1Decisions
from flow.api import make_request
from flow.core import SWF
from flow.core import check_and_add_kwargs
def poll_for_decision_task(domain, task_list, identity=None,
next_page_token=None, maximum_page_size=1000,
reverse_order=False):
"""Used by deciders to get a DecisionTask from the specified decision
taskList . A decision task may be returned for any open workflow execution
that is using the specified task list. The task includes a paginated view of
the history of the workflow execution. The decider should use the workflow
type and the history to determine how to properly handle the task.
This action initiates a long poll, where the service holds the HTTP
connection open and responds as soon a task becomes available. If no
decision task is available in the specified task list before the timeout of
60 seconds expires, an empty result is returned. An empty result, in this
context, means that a DecisionTask is returned, but that the value of
task_token is an empty string.
http://boto3.readthedocs.org/en/latest/reference/services/swf.html#SWF.Client.poll_for_decision_task
domain (string) -- [REQUIRED]
The name of the domain containing the task lists to poll.
task_list (dict) -- [REQUIRED]
Specifies the task list to poll for decision tasks.
ITEMS:
name (string) -- [REQUIRED]
The name of the task list.
identity (string) -- Identity of the decider making the request, which is
recorded in the DecisionTaskStarted event in the workflow history. This
enables diagnostic tracing when problems arise. The form of this identity is
user defined.
next_page_token (string) --
If a NextPageToken was returned by a previous call, there are more results
available. To retrieve the next page of results, make the call again using
the returned token in nextPageToken . Keep all other arguments unchanged.
maximum_page_size (integer) --
The maximum number of results that will be returned per call. nextPageToken
can be used to obtain futher pages of results. The default is 1000, which is
the maximum allowed page size. You can, however, specify a page size smaller
than the maximum.
reverse_order (boolean) --
When set to true , returns the events in reverse order. By default the
results are returned in ascending order of the eventTimestamp of the events.
returns dict
"""
kwargs = {}
for aws_prop, value, conversion in (
('identity', identity, None),
('maximumPageSize', maximum_page_size, None),
('reverseOrder', reverse_order, None),
('nextPageToken', next_page_token, None)):
kwargs = check_and_add_kwargs(aws_prop, value, conversion, kwargs)
result = make_request(
SWF.poll_for_decision_task,
domain=domain,
taskList=task_list,
**kwargs)
if result.success:
return result.result
return None
def respond_decision_task_completed(token, decisions, execution_context=None):
"""Used by deciders to tell the service that the DecisionTask identified by
the task_token has successfully completed. The decisions argument specifies
the list of decisions made while processing the task.
A DecisionTaskCompleted event is added to the workflow history. The
executionContext specified is attached to the event in the workflow
execution history.
token (string) -- [REQUIRED]
The task_token from the DecisionTask .
decisions (list) --
The list of decisions (possibly empty) made by the decider while processing
this decision task. See the docs for the decision structure for details.
execution_context (string) --
User defined context to add to workflow execution.
returns Boolean
"""
kwargs = {}
if execution_context:
kwargs['executionContext'] = execution_context
result = make_request(
SWF.respond_decision_task_completed,
taskToken=token,
decisions=decisions,
**kwargs)
if result.success:
return result.result
return None
def get_decision_manager():
return Layer1Decisions()
|
992,511 | 4a56ecd30b746f60c086df6ee89502d44caf8df3 | y=eval(char("please enter the variable")
if(y=a)&&(y==e)&&(y==i)&&(y==o)&&(y==u)
print("Vowel")
else
print("Consonant")
|
992,512 | 9214a329e5735923a01f1a2ecb819f41f1e53f85 | """
load HBT data, both processed and unprocessed
NOTES
-----
The convention for units is to maintain everything in SI until they are
plotted.
A few of these functions are merely wrappers for other people's code
In most of the functions below, I've specified default shotno's. This is
largely to make debugging easier as there is nothing special about the
provided shotnos.
"""
###############################################################################
### import libraries
# common libraries
import numpy as _np
import MDSplus as _mds
from copy import copy as _copy
import sys as _sys
import _socket
import matplotlib.pyplot as _plt
import time as _time
# hbtepLib libraries
import _processData as _process
import _plotTools as _plot
try:
import _hbtPreferences as _pref
except ImportError:
_sys.exit("Code hault: _hbtPreferences.py file not found. See readme.md" +
" concerning the creation of _hbtPreferences.py")
###############################################################################
### constants
#_REMOTE_DATA_PATH='/opt/hbt/data/control'
if _socket.gethostname()==_pref._HBT_SERVER_NAME:
_ON_HBTEP_SERVER=True;
else:
_ON_HBTEP_SERVER=False;
###############################################################################
### global variables
# default time limits for data. units in seconds
_TSTART = 0*1e-3;
_TSTOP = 10*1e-3;
# list of known bad sensors. likely VERY outdated. also note that the code does not YET do anything with this info...
_SENSORBLACKLIST = ['PA1_S29R', 'PA1_S16P', 'PA2_S13R', 'PA2_S14P', 'PA2_S27P', 'FB03_S4R', 'FB04_S4R', 'FB08_S3P', 'FB10_S1P', 'FB04_S3P', 'FB06_S2P', 'FB08_S4P', 'TA07_S1P', 'TA02_S1P', 'TA02_S2P'];
# directory where unprocessed or minimally processed data is written locally.
#_FILEDIR='/home/john/shotData/'
###############################################################################
### decorators
def _prepShotno(func,debug=False):
"""
If no shot number is None or -1, -2, -3, etc, this decorator grabs the
latest shotnumber for whatever function is called
References
----------
https://stackoverflow.com/questions/2536307/decorators-in-the-python-standard-lib-deprecated-specifically/30253848#30253848
Notes
-----
# TODO(John) Add a try/except error handling for bad shot numbers or
# missing data
"""
from functools import wraps
@wraps(func) # allows doc-string to be visible through the decorator function
def inner1(*args, **kwargs):
# check to see if shotno is an arg or kwarg. if kwarg, effectively
# move it to be an arg and delete the redundant kwarg key
if len(args)>0:
shotno=args[0]
else:
shotno=kwargs.get('shotno')
del(kwargs['shotno'])
# if shotno == None
if debug==True:
print('args = ')
print(shotno)
print(type(shotno))
# check to see if it is a number (float,int,etc)
if _np.issubdtype(type(shotno),_np.integer):
# shotno = number if int(args[0]) does not throw an error
int(shotno)
# if less than 0, use python reverse indexing notation to return the most recent shots
if shotno<0:
args=(latestShotNumber()+shotno+1,)+args[1:]
waitUntilLatestShotNumber(args[0])
return func(*args, **kwargs)
# if a standard shot number (default case)
else:
# make sure the value is an integer
args=(int(shotno),)+args[1:]
waitUntilLatestShotNumber(int(shotno))
return func(*args, **kwargs)
# except ValueError:
# # it must be a string
# print("string... skipping...")
else:
# it might be None, a list, or an array
# try:
# try if it has a length (ie, it's either an array or list)
n=len(shotno)
out=[]
for i in range(n):
# if less than 0
if shotno[i]<0:
arg=(latestShotNumber()+shotno[i]+1,)+args[1:]
waitUntilLatestShotNumber(int(arg[0]))
out.append(func(*arg, **kwargs))
# if a standard shot number
else:
arg=(shotno[i],)+args[1:]
waitUntilLatestShotNumber(int(arg[0]))
out.append(func(*arg, **kwargs))
return out
# except TypeError:
# # it must be None
#
# args=(latestShotNumber(),)+args[1:]
# waitUntilLatestShotNumber(int(shotno))
# return func(*args, **kwargs)
return inner1
###############################################################################
### MDSplus tree data collection and misc. related functions
def _trimTime(time,data,tStart,tStop):
"""
Trims list of data arrays down to desired time
Parameters
----------
time : numpy.ndarray
time array
data : list (of 1D numpy.ndarrays)
list of data arrays to be trimmed
tStart : float
trims data before start time
tStop : float
trims data after stop time
Returns
-------
time : numpy.ndarray
trimmed time array
data : list (of numpy.ndarrays)
trimmed data
Notes
-----
This function does not concern itself with units (e.g. s or ms). Instead,
it is assumed that tStart and tStop have the same units as the variable, time.
"""
if tStart is None:
iStart=0;
iStop=len(time);
else:
# determine indices of cutoff regions
iStart=_process.findNearest(time,tStart); # index of lower cutoff
iStop=_process.findNearest(time,tStop); # index of higher cutoff
# trim time
time=time[iStart:iStop];
# trim data
if type(data) is not list:
data=[data];
for i in range(0,len(data)):
data[i]=data[i][iStart:iStop];
return time, data
def _initRemoteMDSConnection(shotno):
"""
Initiate remote connection with MDSplus HBT-EP tree
Parameters
----------
shotno : int
Returns
-------
conn : MDSplus.connection
connection class to mdsplus tree
"""
conn = _mds.Connection(_pref._HBT_SERVER_ADDRESS+':8003');
conn.openTree('hbtep2', shotno);
return conn
def latestShotNumber():
"""
Gets the latest shot number from the tree
Parameters
----------
Returns
-------
shot_num : int
latest shot number
"""
conn = _mds.Connection(_pref._HBT_SERVER_ADDRESS+':8003');
shot_num = conn.get('current_shot("hbtep2")')
return int(shot_num)
def waitUntilLatestShotNumber(shotno,debug=False):
"""
If the shotno that you are interested is the latest shotno,
this code checks to see if all of the data has finished recording.
If not, the code pauses until it has. Then it exits the code.
This code is useful to include in anything where you want to make sure
you aren't trying to get data from a shot number that hasn't finished recording yet.
Parameters
----------
shotno : int
shot number
debug : bool
default False.
prints text to screen to help with debugging.
"""
latestShotno=latestShotNumber()
if debug==True:
print("latest shot number : %d"%latestShotno)
print("shot number in question : %d"%shotno)
# if you haven't made it to the latest number yet
if shotno<latestShotno:
return
# if you are trying to access a number that hasn't even been created yet
elif shotno>latestShotno:
print("This shot number hasn't even been created yet. Waiting...")
stop=True
while(stop==True):
if shotno==latestShotNumber():
stop=False
else:
pass
_time.sleep(2)
# if you are trying to access a number that has been created but not finished
# if shotno==latestShotno:
# print("shotno==latestShotno")
stop=False
try:
mdsData(shotno,dataAddress=['\HBTEP2::TOP.DEVICES.WEST_RACK:CPCI:INPUT_96'],
tStart=[],tStop=[])
except _mds.TreeNODATA:
stop=True
print("Shot number has not finished. Waiting...")
while(stop==True):
_time.sleep(2)
try:
mdsData(shotno,
dataAddress=['\HBTEP2::TOP.DEVICES.WEST_RACK:CPCI:INPUT_96'],
tStart=[],tStop=[])
stop=False
_time.sleep(1)
except _mds.TreeNODATA:
pass
return
def mdsData(shotno=None,
dataAddress=['\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_94',
'\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_95'],
tStart=[],tStop=[]):
"""
Get data and optionally associated time from MDSplus tree
Parameters
----------
shotno : int
shotno of data. this function will establish its own mdsConn of this
shotno
dataAddress : list (of strings)
address of desired data on MDSplus tree
tStart : float
trims data before this time
tStop : float
trims data after this time
Returns
-------
data : list (of numpy.ndarray)
requested data
time : numpy.ndarray
time associated with data array
"""
# convert dataAddress to a list if it not one originally
if type(dataAddress) is not list:
dataAddress=[dataAddress];
# # if shotno == -1, use the latest shot number
# if shotno==-1:
# shotno=latestShotNumber()
# init arrays
time = []
data = []
# check if computer is located locally or remotely. The way it connects to spitzer remotely can only use one method, but locally, either method can be used.
if _ON_HBTEP_SERVER==True: # if operating local to the tree
# converted from Ian's code
tree = _mds.Tree('hbtep2', shotno)
for i in range(0,len(dataAddress)):
node = tree.getNode(dataAddress[i]) #Get the proper node
data.append(node.data()) #Get the data from this node
if type(data[0]) is _np.ndarray: # if node is an array, return data and time
time = node.dim_of().data()
else: # operaeting remotely
# if shotno is specified, this function gets its own mdsConn
if type(shotno) is float or type(shotno) is int or type(shotno) is _np.int64:
mdsConn=_initRemoteMDSConnection(shotno);
for i in range(0,len(dataAddress)):
data.append(mdsConn.get(dataAddress[i]).data())
# if data is an array, also get time
if type(data[0]) is _np.ndarray:
time = mdsConn.get('dim_of('+dataAddress[0]+')').data(); # time assocated with data
if time != [] and type(tStop)!=list:
# trim time and data
time,data= _trimTime(time,data,tStart,tStop)
if time != []:
return data, time
else:
return data
###############################################################################
### get device specific data
@_prepShotno
class ipData:
"""
Gets plasma current (I_p) data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
ip : numpy.ndarray
plasma current data
time : numpy.ndarray
time data
Subfunctions
------------
plotOfIP :
returns the plot of IP vs time
plot :
Plots all relevant plots
"""
def __init__(self,shotno=96530,tStart=_TSTART,tStop=_TSTOP,plot=False,
findDisruption=True,verbose=False):
self.shotno = shotno
self.title = "%d, Ip Data" % shotno
# get data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.ROGOWSKIS:IP'],
tStart=tStart, tStop=tStop)
self.ip=data[0];
self.time=time;
if findDisruption==True:
try:
# only look at data after breakdown
iStart=_process.findNearest(time,1.5e-3)
ipTime=time[iStart:]
ip=data[0][iStart:]
# filter data to remove low-frequency offset
ip2,temp=_process.gaussianHighPassFilter(ip,ipTime,
timeWidth=1./20e3,plot=verbose)
# find time derivative of ip2
dip2dt=_np.gradient(ip2)
# find the first large rise in d(ip2)/dt
threshold=40.0
index=_np.where(dip2dt>threshold)[0][0]
# debugging feature
if verbose==True:
_plt.figure()
_plt.plot(ipTime,dip2dt,label=r'$\frac{d(ip)}{dt}$')
_plt.plot([ipTime[0],ipTime[-1]],[threshold,threshold],
label='threshold')
_plt.legend()
# find the max value of ip immediately after the disrup. onset
while(ip[index]<ip[index+1]):
index+=1
self.timeDisruption=ipTime[index]
except:
print("time of disruption could not be found")
self.timeDisruption=None
if plot == True or plot=='all':
self.plot()
def plotOfIP(self):
"""
returns the plot of IP vs time
"""
fig,p1=_plt.subplots()
p1.plot(self.time*1e3,self.ip*1e-3)
try:
p1.plot(self.timeDisruption*1e3,self.ip[self.time==self.timeDisruption]*1e-3,label='Disruption',marker='x',linestyle='')
except:
"Time of disruption not available to plot"
_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel='Plasma Current (kA)')
_plot.finalizeFigure(fig,title=self.title)
return p1
def plot(self):
"""
Plot all relevant plots
"""
self.plotOfIP().plot()
@_prepShotno
class egunData:
"""
Gets egun data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
heatingCurrent : numpy.ndarray
egun heating current
time : numpy.ndarray
time data
Subfunctions
------------
plot :
Plots all relevant plots
"""
def __init__(self,shotno=96530,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = "%d, egun Data" % shotno
# get data
data,time=mdsData(101169,
dataAddress=['\HBTEP2::TOP.OPER_DIAGS.E_GUN:I_EMIS',
'\HBTEP2::TOP.OPER_DIAGS.E_GUN:I_HEAT',
'\HBTEP2::TOP.OPER_DIAGS.E_GUN:V_BIAS',])
self.I_EMIS=data[0]
self.heatingCurrent=data[1]
self.biasVoltage=data[2]
self.heatingCurrentRMS=_np.sqrt(_np.average((self.heatingCurrent-_np.average(self.heatingCurrent))**2));
self.time=time;
if plot == True or plot=='all':
self.plot()
def plotOfHeatingCurrent(self):
"""
returns the plot of heating current vs time
"""
fig,p1=_plt.subplots()
p1.plot(self.time*1e3,self.heatingCurrent)
_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel='Current (A)')
_plot.finalizeFigure(fig,title=self.title)
return p1
def plot(self):
"""
Plot all relevant plots
"""
self.plotOfHeatingCurrent().plot()
@_prepShotno
class cos1RogowskiData:
"""
Gets cos 1 rogowski data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
cos1 : numpy.ndarray
cos1 data
time : numpy.ndarray
time data
cos2Raw : numpy.ndarray
raw cos1 data
Subfunctions
------------
plotOfIP :
returns the plot of IP vs time
plot :
Plots all relevant plots
Notes
-----
This function initially grabs data starting at -1 ms. This is because it
needs time data before 0 ms to calculate the cos1RawOffset value. After
calculating this value, the code trims off the time before tStart.
"""
def __init__(self,shotno=96530,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = "%d, Cos1 Rog. Data" % shotno
# get data. need early time data for offset subtraction
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.ROGOWSKIS:COS_1',
'\HBTEP2::TOP.SENSORS.ROGOWSKIS:COS_1:RAW'],
tStart=-1*1e-3, tStop=tStop)
# calculate offset
self.cos1Raw=data[1]
indices=time<0.0*1e-3
self.cos1RawOffset=_np.mean(self.cos1Raw[indices])
# trime time before tStart
iStart=_process.findNearest(time,tStart)
self.cos1=data[0][iStart:];
self.time=time[iStart:];
self.cos1Raw=self.cos1Raw[iStart:]
if plot == True or plot=='all':
self.plot()
def plotOfCos1(self):
"""
returns the plot of cos1 rog vs time
"""
# p1=_plot.plot(yLabel='',xLabel='time [ms]',title=self.title,
# subtitle='Cos1 Rogowski',shotno=[self.shotno])
# p1.addTrace(xData=self.time*1000,yData=self.cos1)
#
# return p1
fig,p1=_plt.subplots()
p1.plot(self.time*1e3,self.cos1)
_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel='')
_plot.finalizeFigure(fig,title=self.title)
return p1
def plot(self):
"""
Plot all relevant plots
"""
self.plotOfCos1().plot()
@_prepShotno
class bpData:
"""
Downloads bias probe data from both probes.
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before \line
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
ip : numpy.ndarray
plasma current data
time : numpy.ndarray
time data
title : str
title of all included figures
bps9Voltage : numpy.ndarray
bps9 voltage
bps9Current : numpy.ndarray
bps9 current
bps5Voltage : numpy.ndarray
bps9 voltage
bps5Current : numpy.ndarray
bps9 current
bps9GPURequestVoltage : numpy.ndarray
CPCI measurement of pre-amp voltage, out from the GPU, and going to
control bps9
Subfunctions
------------
plotOfGPUVoltageRequest :
Plot of gpu request voltage (as measured by the CPCI)
plotOfVoltage :
Plot of both bias probe voltages
plotOfCurrent :
Plot of both bias probe currents
plotOfBPS9Voltage :
Plot of bps9 voltage only
plotOfBPS9Current :
Plot of bps9 current only
plot :
Plots all relevant plots
Notes
-----
BPS5 was moved to section 2 (now BPS2) summer of 2017. Instrumented on May 22, 2018.
"""
# TODO(John) Time should likely be split into s2 and s9 because different
# racks often have slightly different time bases
# TODO(John) Determine when the Tree node for the BP was added, and have
# this code automatically determine whether to use the old or new loading
# method
# TODO(John) Also, one of the BPs was moved recently. Need to figure out
# how to handle this
# TODO(John) these probes have been periodically moved to different nodes.
# implement if lowerbound < shotno < upperbound conditions to handle these cases
def __init__(self,shotno=98147,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = "%s, BP Data." % shotno
if shotno > 99035 or shotno==-1:
# BPS5 was moved to section 2
# get voltage data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.BIAS_PROBE_9:VOLTAGE',
'\HBTEP2::TOP.SENSORS.BIAS_PROBE_9:CURRENT'],
tStart=tStart, tStop=tStop)
self.bps9Voltage=data[0];
self.bps9Current=data[1];#r*-1; # signs are flipped somewhere
self.time=time;
# get current data
try:
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.BIAS_PROBE_2:VOLTAGE',
'\HBTEP2::TOP.SENSORS.BIAS_PROBE_2:CURRENT'],
# dataAddress=['\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_85',
# '\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_84'],
tStart=tStart, tStop=tStop)
self.bps2Voltage=data[0]#*100; #TODO get actual voltage divider info
self.bps2Current=data[1]*-1#/0.05;
except:
"no bp2"
elif shotno > 96000 and shotno < 99035 :
#TODO(determine when this probe was rewired or moved)
# get voltage data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.BIAS_PROBE_9:VOLTAGE',
'\HBTEP2::TOP.SENSORS.BIAS_PROBE_9:CURRENT'],
tStart=tStart, tStop=tStop)
self.bps9Voltage=data[0];
self.bps9Current=data[1]*-1; # signs are flipped somewhere
self.time=time;
# get current data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.BIAS_PROBE_5:VOLTAGE',
'\HBTEP2::TOP.SENSORS.BIAS_PROBE_5:CURRENT'],
tStart=tStart, tStop=tStop)
self.bps5Voltage=data[0];
self.bps5Current=data[1];
## previous BP addresses. do not delete this until implemented
# if probe == 'BPS5' or probe == 'both':
# self.currentBPS5=conn.get('\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_83').data()/.01/5;
# self.voltageBPS5 = conn.get('\HBTEP2::TOP.DEVICES.NORTH_RACK:CPCI:INPUT_82').data()*80;
# if probe == 'BPS9' or probe == 'both':
# self.timeBPS9 = conn.get('dim_of(\TOP.DEVICES.SOUTH_RACK:A14:INPUT_3)').data();
# self.voltageBPS9 = (-1.)*conn.get('\TOP.DEVICES.SOUTH_RACK:A14:INPUT_4').data()/.00971534052268532 / 1.5
else:
# get voltage data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.BIAS_PROBE:VOLTAGE',
'\HBTEP2::TOP.SENSORS.BIAS_PROBE:CURRENT'],
tStart=tStart, tStop=tStop)
self.bps9Voltage=data[0];
self.bps9Current=data[1]*-1; # signs are flipped somewhere
self.time=time;
# get current data
try:
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.BIAS_PROBE_2:VOLTAGE',
'\HBTEP2::TOP.SENSORS.BIAS_PROBE_2:CURRENT'],
tStart=tStart, tStop=tStop)
self.bps5Voltage=data[0];
self.bps5Current=data[1];
except:
print("skipping bps5")
# transformer primary voltage. first setup for shot 100505 and on.
[primaryVoltage,primaryCurrent], time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_86',
'\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_87'],
tStart=tStart, tStop=tStop)
self.primaryVoltage=primaryVoltage*(0.745/(110+.745))**(-1) # correct for voltage divider
self.primaryCurrent=primaryCurrent*0.01**(-1) # correct for Pearson correction factor
# self.primaryCurrent*=1; #the sign is wrong.
# self.primaryVoltage*=-1; #the sign is wrong.
# get gpu request voltage (for when the BP is under feedforward or feedback control)
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_93'],
tStart=tStart, tStop=tStop)
self.bps9GPURequestVoltage=data[0];
if plot==True:
self.plot()
if plot=='all':
self.plot(True)
def plotOfGPUVoltageRequest(self):
"""
returns plot of gpu voltage request
(Preamp signal out from caliban)
"""
p1=_plot.plot(title=self.title,xLabel='ms',yLabel='V',
subtitle='Voltage Request from GPU (pre-amplifier)',
shotno=self.shotno);
p1.addTrace(xData=self.time*1000,yData=self.bps9GPURequestVoltage,
yLegendLabel='BPS9')
return p1
# def plotOfPrimaryVoltage(self):
# """
# returns plot of transformer primary values
# (Preamp signal out from caliban)
# """
# p1=_plot.plot(title=self.title,xLabel='ms',yLabel='V',
# subtitle='Primary voltage',
# shotno=self.shotno);
# p1.addTrace(xData=self.time*1000,yData=self.primaryVoltage,
# yLegendLabel='')
# return p1
#
# def plotOfPrimaryCurrent(self):
# """
# returns plot of transformer primary values
# (Preamp signal out from caliban)
# """
# p1=_plot.plot(title=self.title,xLabel='ms',yLabel='A',
# subtitle='Primary current',
# shotno=self.shotno);
# p1.addTrace(xData=self.time*1000,yData=self.primaryCurrent,
# yLegendLabel='')
# return p1
def plotOfVoltage(self,primary=False):
"""
returns plot of BP voltage
"""
p1=_plot.plot(title=self.title,yLabel='V', #yLim=[-200,200]
xLabel='Time [ms]',subtitle='BP Voltage',
shotno=[self.shotno]);
p1.addTrace(xData=self.time*1000,yData=self.bps9Voltage,
yLegendLabel='BPS9')
try:
p1.addTrace(xData=self.time*1000,yData=self.bps2Voltage,
yLegendLabel='BPS2')
except:
p1.addTrace(xData=self.time*1000,yData=self.bps5Voltage,
yLegendLabel='BPS5')
if primary==True:
p1.addTrace(xData=self.time*1000,yData=self.primaryVoltage,
yLegendLabel='Primary')
return p1
def plotOfCurrent(self,primary=False):
"""
returns plot of BP current
"""
p1=_plot.plot(title=self.title,yLabel='A',
xLabel='Time [ms]',subtitle='BP Current',
shotno=[self.shotno])
p1.addTrace(xData=self.time*1000,yData=self.bps9Current,
yLegendLabel='BPS9')
try:
p1.addTrace(xData=self.time*1000,yData=self.bps2Current,
yLegendLabel='BPS2')
except:
p1.addTrace(xData=self.time*1000,yData=self.bps5Current,
yLegendLabel='BPS5')
if primary==True:
p1.addTrace(xData=self.time*1000,yData=self.primaryCurrent,
yLegendLabel='Primary')
return p1
def plotOfBPS9Voltage(self):
"""
returns plot of BPS9 voltage
"""
p1=_plot.plot(title=self.title,yLabel='V',yLim=[-200,200],
xLabel='Time [ms]',subtitle='BP Voltage',
shotno=[self.shotno])
p1.addTrace(xData=self.time*1000,yData=self.bps9Voltage,
yLegendLabel='BPS9')
return p1
def plotOfBPS9Current(self):
"""
returns plot of BPS9 current
"""
p1=_plot.plot(title=self.title,yLabel='A',
xLabel='Time [ms]',subtitle='BP Current',
shotno=[self.shotno])
p1.addTrace(xData=self.time*1000,yData=self.bps9Current,
yLegendLabel='BPS9')
return p1
# TODO(john) also make plots for BPS5 only
def plot(self,plotAll=False):
""" Plot relevant plots """
if plotAll==False:
sp1=_plot.subPlot([self.plotOfVoltage(),self.plotOfCurrent()])
else:
sp1=_plot.subPlot([self.plotOfVoltage(True),self.plotOfCurrent(True),self.plotOfGPUVoltageRequest()])
return sp1
@_prepShotno
class dpData:
"""
Downloads directional (double) probe data f
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before \line
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
ip : numpy.ndarray
plasma current data
time : numpy.ndarray
time data
title : str
title of all included figures
dp1Voltage : numpy.ndarray
double probe 1 voltage
dp1Current : numpy.ndarray
double probe 1 current
gpuRequestVoltage : numpy.ndarray
CPCI measurement of pre-amp voltage, out from the GPU, and going to
the probe
Subfunctions
------------
#TODO
Notes
-----
"""
def __init__(self,shotno,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = "%s, DP Data." % shotno
# get probe results
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_85',
'\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_84',
'\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_83'],
tStart=tStart, tStop=tStop)
self.dp1VoltageLeft=data[0]*(469.7/(469.7+100000))**(-1);
self.dp1Current=data[1]*0.1**(-1)
self.dp1VoltageRight=data[2]*(470./(470+99800))**(-1);
self.dp1VoltageDiff=self.dp1VoltageLeft-self.dp1VoltageRight
self.time=time;
# self.dp1Current*=-1;
# transformer primary voltage. first setup for shot 100505 and on.
[primaryVoltage,primaryCurrent,secondaryVoltage], time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_86',
'\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_87',
'\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_88'],
tStart=tStart, tStop=tStop)
# self.primaryVoltage=primaryVoltage*(0.745/(110+.745))**(-1) # correct for voltage divider
self.primaryVoltage=primaryVoltage*(507.2/102900.)**(-1) # correct for voltage divider
self.primaryCurrent=primaryCurrent*0.01**(-1) # correct for Pearson correction factor
# self.primaryCurrent*=-1
self.secondaryVoltage=secondaryVoltage*(514.6/102500)**(-1)
# get gpu request voltage (for when the BP is under feedforward or feedback control)
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_93'],
tStart=tStart, tStop=tStop)
self.gpuRequestVoltage=data[0];
if plot==True:
self.plot()
if plot=='all':
self.plot(True)
def plotOfGPUVoltageRequest(self):
"""
returns plot of gpu voltage request
(Preamp signal out from caliban)
"""
p1=_plot.plot(title=self.title,xLabel='ms',yLabel='V',
subtitle='Voltage Request from GPU (pre-amplifier)',
shotno=self.shotno);
p1.addTrace(xData=self.time*1000,yData=self.gpuRequestVoltage)#,
#yLegendLabel='BPS9')
return p1
# def plotOfPrimaryVoltage(self):
# """
# returns plot of transformer primary values
# (Preamp signal out from caliban)
# """
# p1=_plot.plot(title=self.title,xLabel='ms',yLabel='V',
# subtitle='Primary voltage',
# shotno=self.shotno);
# p1.addTrace(xData=self.time*1000,yData=self.primaryVoltage,
# yLegendLabel='')
# return p1
#
# def plotOfPrimaryCurrent(self):
# """
# returns plot of transformer primary values
# (Preamp signal out from caliban)
# """
# p1=_plot.plot(title=self.title,xLabel='ms',yLabel='A',
# subtitle='Primary current',
# shotno=self.shotno);
# p1.addTrace(xData=self.time*1000,yData=self.primaryCurrent,
# yLegendLabel='')
# return p1
def plotOfVoltage(self,primary=False):
"""
returns plot of DP voltage
"""
p1=_plot.plot(title=self.title,yLabel='V', #yLim=[-200,200]
xLabel='Time [ms]',subtitle='DP Voltage',
shotno=[self.shotno]);
p1.addTrace(xData=self.time*1000,yData=self.dp1VoltageRight,
yLegendLabel='DP1 Right')
p1.addTrace(xData=self.time*1000,yData=self.dp1VoltageLeft,
yLegendLabel='DP1 Left')
p1.addTrace(xData=self.time*1000,yData=self.dp1VoltageDiff,
yLegendLabel='DP1 Difference')
if primary==True:
p1.addTrace(xData=self.time*1000,yData=self.primaryVoltage,
yLegendLabel='Primary')
p1.addTrace(xData=self.time*1000,yData=self.secondaryVoltage,
yLegendLabel='Secondary')
return p1
def plotOfCurrent(self,primary=False):
"""
returns plot of DP current
"""
p1=_plot.plot(title=self.title,yLabel='A',
xLabel='Time [ms]',subtitle='DP Current',
shotno=[self.shotno])
p1.addTrace(xData=self.time*1000,yData=self.dp1Current,
yLegendLabel='DP1')
if primary==True:
p1.addTrace(xData=self.time*1000,yData=self.primaryCurrent,
yLegendLabel='Primary')
return p1
def plot(self,plotAll=False):
""" Plot relevant plots """
if plotAll==False:
sp1=_plot.subPlot([self.plotOfVoltage(),self.plotOfCurrent()])
else:
sp1=_plot.subPlot([self.plotOfVoltage(True),self.plotOfCurrent(True),self.plotOfGPUVoltageRequest()])
return sp1
@_prepShotno
class tpData:
"""
Triple probe data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
probes : str
This parameter allows the user to specify which probe from which to
load data. There are two triple probes: tps5 (triple probe section 5)
and tps8. This str can be 'tps5', 'tps8', or 'both'.
Attributes
----------
shotno : int
shot number of desired data
title : str
title of all included figures
self.tps5TipA : numpy.ndarray
tps5 tip A voltage data. (note that this channel is typically
disconnected)
tps5TipB : numpy.ndarray
tps5 tip B voltage data.
tps5TipC : numpy.ndarray
tps5 tip C voltage data.
tps5Time : numpy.ndarray
tps5 time data
tps5Current : numpy.ndarray
tps5 current data
tps5Temp : numpy.ndarray
tps5 temperature data.
tps5VFloat : numpy.ndarray
tps5 floating voltage data
tps5Density : numpy.ndarray
tps5 density data
tps8TipA : numpy.ndarray
tps8 tip A voltage data. (note that this channel is typically
disconnected)
tps8TipB : numpy.ndarray
tps8 tip B voltage data.
tps8TipC : numpy.ndarray
tps8 tip C voltage data.
tps8Time : numpy.ndarray
tps8 time data
tps8Current : numpy.ndarray
tps8 current data
tps8Temp : numpy.ndarray
tps8 temperature data.
tps8VFloat : numpy.ndarray
tps8 floating voltage data
tps8Density : numpy.ndarray
tps8 density data
Subfunctions
------------
plotOfISat : _plotTools.plot
ion saturation current
plotOfTipC : _plotTools.plot
tip C voltages
plotOfTipB : _plotTools.plot
tip B voltages
plotOfTipA : _plotTools.plot
tip A voltages
plotOfVf : _plotTools.plot
floating voltages
plotOfNe : _plotTools.plot
density
plotOfKTe : _plotTools.plot
temperature
plot :
plots all relevant plots
Notes
-----
I am not using the same time array for the section 5 or the section 8
triple probes. I do this because different data acq. systems (e.g. north
rack CPCI vs south rack CPCI) doesn't always return the EXACT same array.
I've run into issues where the length of the time arrays weren't the same
length which causes problems during plotting.
TPS2 was moved to section 5 (now called TPS5) during the summer of 2017.
This may cause variable naming issues. Be warned.
TODO The other cases for shotnumbers need to finalized so that legacy data
can still be loaded.
"""
def __init__(self,shotno=95996,tStart=_TSTART,tStop=_TSTOP,plot=False,probes='both'): #sectionNum=2,
self.shotno = shotno
self.title = '%s, triple probe data' % shotno
self.probes=probes
# enforce probes naming convetion
if probes=='5':
probes = 'tps5'
if probes=='8':
probes = 'tps8'
# constants
# A=1.5904e-5 #(1.5mm)^2/4*pi + pi*(3.0mm)*(1.5mm), probe area
A = 2.0*(1.5/1000.0*3.0/1000.0)
e=1.602e-19; # fundamental charge
eV=1.60218e-19; # 1 eV = 1.60218e-19 joules
M=2.014102*1.66054e-27; # mass of ion, approx 2 amu converted to kg
me=9.109e-31; # mass of an electron
## Grab data
if shotno > 95000: # Shotno after 2017 summer upgrade = 97239. TPS2 was moved to section 5. Now, it's TPS5.
if probes=='both' or probes=='tps5' or probes=='tps2':
# get data
try:
data, time=mdsData(shotno=shotno,
# TODO these addresses need to be updated to section 5 in the tree before they can be updated here
dataAddress=['\HBTEP2::TOP.SENSORS.TRI_PROBE_S5.V_ION',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_S5.V_ELEC',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_S5.V_FLOAT',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_S5.I_SAT'],
tStart=tStart, tStop=tStop)
except:
data, time=mdsData(shotno=shotno,
# TODO these addresses need to be updated to section 5 in the tree before they can be updated here
dataAddress=['\HBTEP2::TOP.SENSORS.TRI_PROBE_S2.V_ION',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_S2.V_ELEC',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_S2.V_FLOAT',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_S2.I_SAT'],
tStart=tStart, tStop=tStop)
# raw TPS5 Data
self.tps5TipA = data[0] # the 180 is a ballparked number. needs "actual" calibration
self.tps5TipB = data[1]
self.tps5TipC = data[2]
self.tps5Current=data[3]
self.tps5Time = time
# processed TPS5 Data
self.tps5VFloat=self.tps5TipC;
self.tps5Temp=(self.tps5TipB-self.tps5TipC)/.693;
self.tps5Temp[self.tps5Temp>=200]=0; # trim data over 200eV. I trim this data because there are a few VERY high temperature points that throw off the autoscaling
tps5Temp=_copy(self.tps5Temp);
tps5Temp[tps5Temp<=0]=1e6; # i trim here to avoid imaginary numbers when I take the square root below
self.tps5Density=self.tps5Current/(0.605*e*_np.sqrt(tps5Temp*eV/(M))*A);
self.tps5PlasmaPotential=self.tps5VFloat-self.tps5Temp/e*_np.log(0.6*_np.sqrt(2*_np.pi*me/M))
if probes=='both' or probes=='tps8':
# get data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.TRI_PROBE_S8.V_ION',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_S8.V_ELEC',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_S8.V_FLOAT',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_S8.I_SAT'],
tStart=tStart, tStop=tStop)
# raw TPS8 Data
self.tps8TipA = data[0] # the 180 is a ballparked number. needs "actual" calibration
self.tps8TipB = data[1]
self.tps8TipC = data[2]
self.tps8Current=data[3]
self.tps8Time = time
# processed TPS8 Data
self.tps8VFloat=self.tps8TipC;
self.tps8Temp=(self.tps8TipB-self.tps8TipC)/.693;
self.tps8Temp[self.tps8Temp>=200]=0; # trim data over 200eV. I trim this data because there are a few VERY high temperature points that throw off the autoscaling
tps8Temp=_copy(self.tps8Temp);
tps8Temp[tps8Temp<=0]=1e6; # i trim here to avoid imaginary numbers when I take the square root below
self.tps8Density=self.tps8Current/(0.605*e*_np.sqrt(tps8Temp*eV/(M))*A);
self.tps8PlasmaPotential=self.tps8VFloat-self.tps8Temp/e*_np.log(0.6*_np.sqrt(2*_np.pi*me/M))
else: # Shotno after 2017 summer upgrade = 97239. TPS2 was moved to section 5. Now, it's TPS5.
if probes=='both' or probes=='tps5' or probes=='tps2':
# get data
data, time=mdsData(shotno=shotno,
# TODO these addresses need to be updated to section 5 in the tree before they can be updated here
dataAddress=['\HBTEP2::TOP.SENSORS.TRI_PROBE_1.V_ION',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_1.V_ELEC',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_1.V_FLOAT',
'\HBTEP2::TOP.SENSORS.TRI_PROBE_1.I_SAT'],
tStart=tStart, tStop=tStop)
# raw TPS5 Data
self.tps5TipA = data[0] # the 180 is a ballparked number. needs "actual" calibration
self.tps5TipB = data[1]
self.tps5TipC = data[2]
self.tps5Current=data[3]
self.tps5Time = time
# processed TPS5 Data
self.tps5VFloat=self.tps5TipC;
self.tps5Temp=(self.tps5TipB-self.tps5TipC)/.693;
self.tps5Temp[self.tps5Temp>=200]=0; # trim data over 200eV. I trim this data because there are a few VERY high temperature points that throw off the autoscaling
tps5Temp=_copy(self.tps5Temp);
tps5Temp[tps5Temp<=0]=1e6; # i trim here to avoid imaginary numbers when I take the square root below
self.tps5Density=self.tps5Current/(0.605*e*_np.sqrt(tps5Temp*eV/(M))*A);
self.tps5PlasmaPotential=self.tps5VFloat-self.tps5Temp/e*_np.log(0.6*_np.sqrt(2*_np.pi*me/M))
#
# else:
# _sys.exit("Requested shot number range not supported yet. Update code.")
if plot==True:
self.plot();
elif plot=='all':
self.plot(True)
def plotOfKTe(self):
p1=_plot.plot(yLabel='eV',subtitle='Electron Temperature',
title=self.title,yLim=[-50, 100],
shotno=self.shotno,xLabel='time [ms]');
if self.probes=='both' or self.probes=='tps5':
p1.addTrace(xData=self.tps5Time*1000,yData=self.tps5Temp,
yLegendLabel='TPS5')
if self.probes=='both' or self.probes=='tps8':
p1.addTrace(yData=self.tps8Temp,xData=self.tps8Time*1000,
yLegendLabel='TPS8')
return p1
def plotOfNe(self):
p1=_plot.plot(yLabel=r'$m^{-3}$ $10^{18}$',subtitle='Density',
yLim=[-1, 4.5],shotno=self.shotno,xLabel='time [ms]');
if self.probes=='both' or self.probes=='tps5':
p1.addTrace(yData=self.tps5Density/1e18,xData=self.tps5Time*1000,
yLegendLabel='TPS5')
if self.probes=='both' or self.probes=='tps8':
p1.addTrace(yData=self.tps8Density/1e18,xData=self.tps8Time*1000,
yLegendLabel='TPS8')
return p1
def plotOfVf(self):
p1=_plot.plot(yLabel='V',subtitle='Floating Potential',
xLabel='time [ms]',yLim=[-150, 75],shotno=[self.shotno]);
if self.probes=='both' or self.probes=='tps5':
p1.addTrace(yData=self.tps5VFloat,xData=self.tps5Time*1000,
yLegendLabel='TPS5')
if self.probes=='both' or self.probes=='tps8':
p1.addTrace(yData=self.tps8VFloat,xData=self.tps8Time*1000,
yLegendLabel='TPS8')
return p1
def plotOfTipA(self):
# initialize tip A potential plot
p1=_plot.plot(yLabel='V',subtitle=r'Tip A, V$_{-}$',xLabel='time [ms]',
shotno=[self.shotno],title=self.title);
if self.probes=='both' or self.probes=='tps5':
p1.addTrace(yData=self.tps5TipA,xData=self.tps5Time*1000,
yLegendLabel='TPS5')
if self.probes=='both' or self.probes=='tps8':
p1.addTrace(yData=self.tps8TipA,xData=self.tps8Time*1000,
yLegendLabel='TPS8')
return p1
def plotOfTipB(self):
# initialize tip B potential plot
p1=_plot.plot(yLabel='V',subtitle=r'Tip B, V$_{+}$',xLabel='time [ms]',
shotno=[self.shotno]);
if self.probes=='both' or self.probes=='tps5':
p1.addTrace(yData=self.tps5TipB,xData=self.tps5Time*1000,
yLegendLabel='TPS5')
if self.probes=='both' or self.probes=='tps8':
p1.addTrace(yData=self.tps8TipB,xData=self.tps8Time*1000,
yLegendLabel='TPS8')
return p1
def plotOfTipC(self):
# initialize tip C potential plot
p1=_plot.plot(yLabel='V',subtitle=r'Tip C, V$_{f}$',xLabel='time [ms]',
shotno=[self.shotno]);
if self.probes=='both' or self.probes=='tps5':
p1.addTrace(yData=self.tps5TipC,xData=self.tps5Time*1000,
yLegendLabel='TPS5')
if self.probes=='both' or self.probes=='tps8':
p1.addTrace(yData=self.tps8TipC,xData=self.tps8Time*1000,
yLegendLabel='TPS8')
return p1
def plotOfISat(self):
# initialize ion sat current
p1=_plot.plot(yLabel='A',xLabel='time [ms]',
subtitle='Ion Sat. Current',shotno=[self.shotno]);
if self.probes=='both' or self.probes=='tps5':
p1.addTrace(yData=self.tps5Current,xData=self.tps5Time*1000,
yLegendLabel='TPS5')
if self.probes=='both' or self.probes=='tps8':
p1.addTrace(yData=self.tps8Current,xData=self.tps8Time*1000,
yLegendLabel='TPS8')
return p1
def plot(self,plotAll=False):
if plotAll == False:
_plot.subPlot([self.plotOfKTe(),self.plotOfNe(),self.plotOfVf()]);
else:
_plot.subPlot([self.plotOfKTe(),self.plotOfNe(),self.plotOfVf()]);
# _plot.subPlot([self.plotOfTipA(),self.plotOfTipB(),self.plotOfTipC(),
# self.plotOfISat()]);
_plot.subPlot([self.plotOfTipB(),self.plotOfTipC(),
self.plotOfISat()]);
@_prepShotno
class paData:
"""
Downloads poloidal array sensor data. Presently, only poloidal
measurements as the radial sensors are not yet implemeted.
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
True - plots all 64 sensors
'sample' - plots one of each (PA1 and PA2)
'all' - same as True
smoothingAlgorithm : str
informs function as to which smoothing algorithm to use on each PA
sensor
Attributes
----------
shotno : int
shot number of desired data
title : str
title to put at the top of figures
theta : numpy.ndarray
poloidal location of sensors.
namesPA1 : numpy.ndarray
1D array of all PA1 sensor names
namesPA2 : numpy.ndarray
1D array of all PA2 sensor names
pa1Raw : list (of numpy.ndarray)
raw PA1 sensor data
pa2Raw : list (of numpy.ndarray)
raw PA2 sensor data
pa1Data : list (of numpy.ndarray)
PA1 sensor data, processed
pa2Data : list (of numpy.ndarray)
PA2 sensor data, processed
pa1RawFit : list (of numpy.ndarray)
fit applied to raw data
pa2RawFit : list (of numpy.ndarray)
fit applied to raw data
Subfunctions
------------
plotOfPA1 :
returns plot of PA1 sensor based on the provided index
plotOfPA2 :
returns plot of PA2 sensor based on the provided index
plot :
plots all relevant plots
Notes
-----
'PA2_S14P' is a known bad sensor
pa1_s16 ???
"""
def __init__(self,shotno=98170,tStart=_TSTART,tStop=_TSTOP,plot=False,
removeBadSensors=True):
self.shotno = shotno
self.title1 = '%d, PA1 sensors' % shotno
self.title2 = '%d, PA2 sensors' % shotno
self.badSensors=['PA2_S14P','PA2_S27P']
# poloidal location (in degrees)
# self.thetaPA1 = _np.array([ 5.625, 16.875, 28.125, 39.375, 50.625, 61.875, 73.125, 84.375, 95.625, 106.875, 118.125, 129.375, 140.625, 151.875, 163.125, 174.375, 185.625, 196.875, 208.125, 219.375, 230.625, 241.875, 253.125, 264.375, 275.625, 286.875, 298.125, 309.375, 320.625, 331.875, 343.125, 354.375])*_np.pi/180.
# self.thetaPA2 = _np.array([ 5.625, 16.875, 28.125, 39.375, 50.625, 61.875, 73.125, 84.375, 95.625, 106.875, 118.125, 129.375, 140.625, 151.875, 163.125, 174.375, 185.625, 196.875, 208.125, 219.375, 230.625, 241.875, 253.125, 264.375, 275.625, 286.875, 298.125, 309.375, 320.625, 331.875, 343.125, 354.375])*_np.pi/180.
self.thetaPA1 =_np.array([-174.74778518, -164.23392461, -153.66901098, -143.01895411, -132.24974382, -121.3277924 , -110.22067715, -98.93591492, -87.23999699, -75.60839722, -63.97679673, -52.34519359, -40.71359604, -29.08199717, -17.45039318, -5.81879416, 5.81280487, 17.44440438, 29.07600466, 40.70760263, 52.33920936, 63.97080017, 75.60240749, 87.23400093, 98.93591492, 110.22067715, 121.3277924 , 132.24974382, 143.01895411, 153.66901098, 164.23392461, 174.74778518])*_np.pi/180.
self.thetaPA2 =_np.array([-174.74778518, -164.23392461, -153.66901098, -143.01895411, -132.24974382, -121.3277924 , -110.22067715, -98.93591492, -87.23999699, -75.60839722, -63.97679673, -52.34519359, -40.71359604, -29.08199717, -17.45039318, -5.81879416, 5.81280487, 17.44440438, 29.07600466, 40.70760263, 52.33920936, 63.97080017, 75.60240749, 87.23400093, 98.93591492, 110.22067715, 121.3277924 , 132.24974382, 143.01895411, 153.66901098, 164.23392461, 174.74778518])*_np.pi/180.
# sensor names
self.namesPA1=_np.array([ 'PA1_S01P', 'PA1_S02P', 'PA1_S03P', 'PA1_S04P', 'PA1_S05P', 'PA1_S06P', 'PA1_S07P', 'PA1_S08P', 'PA1_S09P', 'PA1_S10P', 'PA1_S11P', 'PA1_S12P', 'PA1_S13P', 'PA1_S14P', 'PA1_S15P', 'PA1_S16P', 'PA1_S17P', 'PA1_S18P', 'PA1_S19P', 'PA1_S20P', 'PA1_S21P', 'PA1_S22P', 'PA1_S23P', 'PA1_S24P', 'PA1_S25P', 'PA1_S26P', 'PA1_S27P', 'PA1_S28P', 'PA1_S29P', 'PA1_S30P', 'PA1_S31P', 'PA1_S32P'])
self.namesPA2=_np.array([ 'PA2_S01P', 'PA2_S02P', 'PA2_S03P', 'PA2_S04P', 'PA2_S05P', 'PA2_S06P', 'PA2_S07P', 'PA2_S08P', 'PA2_S09P', 'PA2_S10P', 'PA2_S11P', 'PA2_S12P', 'PA2_S13P', 'PA2_S14P', 'PA2_S15P', 'PA2_S16P', 'PA2_S17P', 'PA2_S18P', 'PA2_S19P', 'PA2_S20P', 'PA2_S21P', 'PA2_S22P', 'PA2_S23P', 'PA2_S24P', 'PA2_S25P', 'PA2_S26P', 'PA2_S27P', 'PA2_S28P', 'PA2_S29P', 'PA2_S30P', 'PA2_S31P', 'PA2_S32P'])
# if removeBadSensors==True:
# iBad=_np.where(self.namesPA2=='PA2_S14P')
# self.namesPA2=_np.delete(self.namesPA2,iBad)
# self.thetaPA2=_np.delete(self.thetaPA2,iBad)
# compile full sensor addresses names
pa1SensorAddresses=[]
pa2SensorAddresses=[]
rootAddress='\HBTEP2::TOP.SENSORS.MAGNETIC:';
for i in range(0,len(self.namesPA1)):
pa1SensorAddresses.append(rootAddress+self.namesPA1[i])
for i in range(0,len(self.namesPA2)):
pa2SensorAddresses.append(rootAddress+self.namesPA2[i])
# get raw data
self.pa1Raw,self.pa1Time=mdsData(shotno,pa1SensorAddresses, tStart, tStop)
self.pa2Raw,self.pa2Time=mdsData(shotno,pa2SensorAddresses, tStart, tStop)
# data smoothing algorithm
self.pa1Data=[]
self.pa1RawFit=[]
self.pa2Data=[]
self.pa2RawFit=[]
# gaussian offset subtraction
for i in range(0,len(self.namesPA1)):
temp,temp2=_process.gaussianHighPassFilter(self.pa1Raw[i][:],self.pa1Time,timeWidth=1./20000)
self.pa1RawFit.append(temp2)
self.pa1Data.append(temp)
for i in range(0,len(self.namesPA2)):
temp,temp2=_process.gaussianHighPassFilter(self.pa2Raw[i][:],self.pa2Time,timeWidth=1./20000)
self.pa2RawFit.append(temp2)
self.pa2Data.append(temp)
# plot
if plot==True or plot=='all':
self.plot(True)
if plot=='sample':
self.plotOfPA1().plot();
self.plotOfPA2().plot();
def plotOfPA1Stripey(self,tStart=2e-3,tStop=4e-3):
iStart=_process.findNearest(self.pa1Time,tStart)
iStop=_process.findNearest(self.pa1Time,tStop)
p1=_plot.plot(title=self.title1,subtitle='PA1 Sensors',
xLabel='Time [ms]', yLabel='theta [rad]',zLabel='Gauss',
plotType='contour',colorMap=_plot._red_green_colormap(),
centerColorMapAroundZero=True)
data=self.pa1Data[0:len(self.namesPA1)]
for i in range(0,len(data)):
data[i]=data[i][iStart:iStop]*1e4
p1.addTrace(self.pa1Time[iStart:iStop]*1e3,self.thetaPA1,
_np.array(data))
return p1
def plotOfPA2Stripey(self,tStart=2e-3,tStop=4e-3):
iStart=_process.findNearest(self.pa2Time,tStart)
iStop=_process.findNearest(self.pa2Time,tStop)
p1=_plot.plot(title=self.title2,subtitle='PA2 Sensors',
xLabel='Time [ms]', yLabel='theta [rad]',zLabel='Gauss',
plotType='contour',colorMap=_plot._red_green_colormap(),
centerColorMapAroundZero=True)
data=self.pa2Data[0:len(self.namesPA2)]
data2=[]
theta=[]
for i in range(0,len(data)):
if self.namesPA2[i] in ['PA2_S14P', 'PA2_S27P']:
pass
else:
data2.append(data[i][iStart:iStop]*1e4)
theta.append(self.thetaPA2[i])
p1.addTrace(self.pa2Time[iStart:iStop]*1e3,theta,
data2)
return p1
def plotOfPA1(self, i=0, alsoPlotRawAndFit=True):
""" Plot one of the PA1 plots. based on index, i. """
p1=_plot.plot(xLabel='time [ms]',yLabel=r'Gauss',title=self.title1,
shotno=[self.shotno],subtitle=self.namesPA1[i]);
# smoothed data
p1.addTrace(yData=self.pa1Data[i],xData=self.pa1Time*1000,
yLegendLabel='smoothed')
if alsoPlotRawAndFit==True:
# raw data
p1.addTrace(yData=self.pa1Raw[i],xData=self.pa1Time*1000,
yLegendLabel='raw')
# fit data (which is subtracted from raw)
p1.addTrace(yData=self.pa1RawFit[i],xData=self.pa1Time*1000,
yLegendLabel='fit')
return p1
def plotOfPA2(self, i=0, alsoPlotRawAndFit=True):
""" Plot one of the PA2 plots. based on index, i. """
p1=_plot.plot(xLabel='time [ms]',yLabel='Gauss',
title=self.title2,subtitle=self.namesPA2[i],
shotno=self.shotno);
# smoothed data
p1.addTrace(yData=self.pa2Data[i],xData=self.pa2Time*1000,
yLegendLabel='smoothed')
if alsoPlotRawAndFit==True:
# raw data
p1.addTrace(yData=self.pa2Raw[i],xData=self.pa2Time*1000,
yLegendLabel='raw')
# fit data (which is subtracted from raw)
p1.addTrace(yData=self.pa2RawFit[i],xData=self.pa2Time*1000,
yLegendLabel='fit')
return p1
def plot(self,plotAll=False):
sp1=[[],[],[],[]]
sp2=[[],[],[],[]]
count=0
for i in range(0,4):
for j in range(0,8):
if plotAll==True:
newPlot=self.plotOfPA1(count,alsoPlotRawAndFit=True)
else:
newPlot=self.plotOfPA1(count,alsoPlotRawAndFit=False)
newPlot.subtitle=self.namesPA1[count]
newPlot.yLegendLabel=[]
sp1[i].append(newPlot)
count+=1;
k=0
count=0
for i in range(0,4):
for j in range(0,8):
k=i*8+j*1
print("i %d, j %d, k %d"%(i,j,k))
# check to see if all 32 sensors are present
if k>=len(self.namesPA2):
# and create an empty plot if not
newPlot=_plot.plot()
else:
print("%d" %k)
if plotAll==True:
newPlot=self.plotOfPA2(count,alsoPlotRawAndFit=True)
else:
newPlot=self.plotOfPA2(count,alsoPlotRawAndFit=False)
newPlot.subtitle=self.namesPA2[count]
newPlot.yLegendLabel=[]
sp2[i].append(newPlot)
count+=1;
sp1[0][0].title=self.title1
sp2[0][0].title=self.title2
sp1=_plot.subPlot(sp1,plot=False)
sp2=_plot.subPlot(sp2,plot=False)
# sp1.shareY=True;
sp1.plot()
sp2.plot()
@_prepShotno
class sxrData:
"""
Downloads (and optionally plots) soft xray sensor data.
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
default is False
True - plots far array of all 11 (of 16) channels
'all' - plots
Attributes
----------
shotno : int
shot number of desired data
title : str
title to put at the top of figures
data : list (of numpy.ndarray)
list of 11 (of 16) data arrays, one for each channel
Subfunctions
------------
plotAll :
plots all relevant plots
plotOfSXRStripey :
returns a stripey plot of the sensors
plotOfOneChannel :
returns a plot of a single channel based on the provided index, i
Notes
-----
Only 11 (of 16) of the SXR sensors are included in the data below. Some of the
missing sensors are broken and others include anamolous or attenuated
results.
"""
def __init__(self,shotno=98170,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = '%d, SXR sensors' % shotno
# note that sensors 5, 8, 10, 13 and 15 are not included.
self.sensor_num=_np.array([ 0, 1, 2, 3, 4, 6, 7, 9, 11, 12, 14])
# self.sensor_num=_np.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15])
channels=self.sensor_num+75;
# compile full sensor addresses names
sensorAddresses=[]
self.sensorNames=[]
for i in range(0,len(channels)):
if self.sensor_num[i] < 10:
self.sensorNames.append('CHANNEL_'+'0'+str(self.sensor_num[i]))
else:
self.sensorNames.append('CHANNEL_'+str(self.sensor_num[i]))
sensorAddresses.append('\HBTEP2::TOP.DEVICES.WEST_RACK:CPCI:INPUT_%02d' %channels[i])
# get data
self.data,self.time=mdsData(shotno,sensorAddresses, tStart, tStop)
# plot
if plot==True:
self.plotOfSXRStripey(tStart,tStop).plot()
elif plot=='all':
self.plotAll()
self.plotOfSXRStripey(tStart,tStop).plot()
def plotOfSXRStripey(self,tStart=1e-3,tStop=10e-3):
iStart=_process.findNearest(self.time,tStart)
iStop=_process.findNearest(self.time,tStop)
p1=_plot.plot(title=self.title,subtitle='SXR Fan Array',
xLabel='Time [ms]', yLabel='Sensor Number',zLabel='a.u.',
plotType='contour',#colorMap=_plot._red_green_colormap(),
centerColorMapAroundZero=True)
data=self.data;
for i in range(0,len(data)):
data[i]=data[i][iStart:iStop]
p1.addTrace(self.time[iStart:iStop]*1e3,self.sensor_num,
_np.array(data))
return p1
def plotOfOneChannel(self, i=0):
""" Plot one of the SXR chanenl. based on index, i. """
p1=_plot.plot(xLabel='time [ms]',yLabel=r'a.u.',title=self.title,
shotno=[self.shotno],subtitle=self.sensorNames[i]);
# smoothed data
p1.addTrace(yData=self.data[i],xData=self.time*1000,
yLegendLabel=self.sensorNames[i])
return p1
def plotAll(self):
sp1=[]
count=0
for i in range(0,len(self.data)):
newPlot=self.plotOfOneChannel(count)
newPlot.subtitle=self.sensorNames[count]
newPlot.yLegendLabel=[]
newPlot.plot()
sp1.append(newPlot)
count+=1;
sp1[0].title=self.title
sp1=_plot.subPlot(sp1,plot=False)
# sp1.shareY=True;
# sp1.plot()
return sp1
@_prepShotno
class fbData:
"""
Downloads feedback (FB) array sensor data.
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
True - Plots a sample of each FB poloidal and radial data
'sample'- same as True
'all' - Plots all 80 sensor data
smoothingAlgorithm : str
informs function as to which smoothing algorithm to use on each PA
sensor
Attributes
----------
shotno : int
shot number of desired data
title : str
title to be added to each plot
fbPolNames : 2D list (of str)
name of every poloidal FB sensor
fbRadNames : 2D list (of str)
name of every radial FB sensor
phi : 2D numpy.ndarray
toroidal locations for all sensors. units in radians.
theta : 2D numpy.ndarray
poloidal locations for all sensors. units in radians.
fbPolRaw : 2D list (of numpy.ndarray)
raw FB-poloidal data
fbRadRaw : 2D list (of numpy.ndarray)
raw FB-radial data
fbPolData : 2D list (of numpy.ndarray)
FB-poloidal data, processed
fbRadData : 2D list (of numpy.ndarray)
FB-radial data, processed
fbPolRawFit : 2D list (of numpy.ndarray)
smoothed fit of raw poloidal data. subtracted from data to get
fbPolData
fbRadRawFit : 2D list (of numpy.ndarray)
smoothed fit of raw radial data. subtracted from data to get fbRadData
Subfunctions
------------
plotOfSinglePol :
returns plot of a specified poloidal sensor
plotOfSingleRad :
returns plot of a specified radial sensor
plot :
plots all relevant data
Notes
-----
Known bad sensors: 'FB03_S1P','FB06_S2P','FB08_S3P'
The S4P array has no broken sensors at present.
"""
def __init__(self,shotno=98170,tStart=_TSTART,tStop=_TSTOP,plot=False,removeBadSensors=True,invertNegSignals=True):
self.shotno = shotno
self.title = "%d, FB sensors" % shotno
# self.badSensors=['FB03_S1P','FB06_S2P','FB08_S3P'] # some sensors appear to be broken
# sensor names
fbPolNames=[['FB01_S1P', 'FB02_S1P', 'FB03_S1P', 'FB04_S1P', 'FB05_S1P', 'FB06_S1P', 'FB07_S1P', 'FB08_S1P', 'FB09_S1P', 'FB10_S1P'], ['FB01_S2P', 'FB02_S2P', 'FB03_S2P', 'FB04_S2P', 'FB05_S2P', 'FB06_S2P', 'FB07_S2P', 'FB08_S2P', 'FB09_S2P', 'FB10_S2P'], ['FB01_S3P', 'FB02_S3P', 'FB03_S3P', 'FB04_S3P', 'FB05_S3P', 'FB06_S3P', 'FB07_S3P', 'FB08_S3P', 'FB09_S3P', 'FB10_S3P'], ['FB01_S4P', 'FB02_S4P', 'FB03_S4P', 'FB04_S4P', 'FB05_S4P', 'FB06_S4P', 'FB07_S4P', 'FB08_S4P', 'FB09_S4P', 'FB10_S4P']]
self.fbRadNames=[['FB01_S1R', 'FB02_S1R', 'FB03_S1R', 'FB04_S1R', 'FB05_S1R', 'FB06_S1R', 'FB07_S1R', 'FB08_S1R', 'FB09_S1R', 'FB10_S1R'], ['FB01_S2R', 'FB02_S2R', 'FB03_S2R', 'FB04_S2R', 'FB05_S2R', 'FB06_S2R', 'FB07_S2R', 'FB08_S2R', 'FB09_S2R', 'FB10_S2R'], ['FB01_S3R', 'FB02_S3R', 'FB03_S3R', 'FB04_S3R', 'FB05_S3R', 'FB06_S3R', 'FB07_S3R', 'FB08_S3R', 'FB09_S3R', 'FB10_S3R'], ['FB01_S4R', 'FB02_S4R', 'FB03_S4R', 'FB04_S4R', 'FB05_S4R', 'FB06_S4R', 'FB07_S4R', 'FB08_S4R', 'FB09_S4R', 'FB10_S4R']]
# sensor, toroidal location
# self.phi=_np.pi/180.*_np.array([242.5-360, 278.5-360, 314.5-360, 350.5-360, 26.5, 62.5, 98.5, 134.5, 170.5, 206.5]);#*_np.pi/180.
phi=_np.pi/180.*_np.array([241,277,313,349,25,61, 97,133,169,205])
phi=[phi,phi,phi,phi]
theta=_np.pi/180.*_np.array([_np.ones(10)*(-83.4),_np.ones(10)*(-29.3),_np.ones(10)*29.3,_np.ones(10)*83.4])
theta=[theta[0,:],theta[1,:],theta[2,:],theta[3,:]]
## construct full sensor addresses
fbPolSensorAddresses=[[],[],[],[]]
fbRadSensorAddresses=[[],[],[],[]]
rootAddress='\HBTEP2::TOP.SENSORS.MAGNETIC:';
for j in range(0,4):
for i in range(0,len(fbPolNames[j])):
fbPolSensorAddresses[j].append(rootAddress+fbPolNames[j][i])
fbRadSensorAddresses[j].append(rootAddress+self.fbRadNames[j][i])
# get raw data
fbPolRaw=[[],[],[],[]];
fbPolRaw[0], self.fbPolTime =mdsData(shotno,fbPolSensorAddresses[0], tStart, tStop)
fbPolRaw[1], self.fbPolTime =mdsData(shotno,fbPolSensorAddresses[1], tStart, tStop)
fbPolRaw[2], self.fbPolTime =mdsData(shotno,fbPolSensorAddresses[2], tStart, tStop)
fbPolRaw[3], self.fbPolTime =mdsData(shotno,fbPolSensorAddresses[3], tStart, tStop)
self.fbRadRaw=[[],[],[],[]];
self.fbRadRaw[0], self.fbRadTime =mdsData(shotno,fbRadSensorAddresses[0], tStart, tStop)
self.fbRadRaw[1], self.fbRadTime =mdsData(shotno,fbRadSensorAddresses[1], tStart, tStop)
self.fbRadRaw[2], self.fbRadTime =mdsData(shotno,fbRadSensorAddresses[2], tStart, tStop)
self.fbRadRaw[3], self.fbRadTime =mdsData(shotno,fbRadSensorAddresses[3], tStart, tStop)
# remove bad/broken sensors using a sigma=1 outlier rejection method
if removeBadSensors==True:
linArrayOfSensorData=[]
linListOfSensorNames=[]
for i in range(0,4):
for j in range(0,len(fbPolNames[i])):
linArrayOfSensorData.append(_np.average(_np.abs(fbPolRaw[i][j])))
linListOfSensorNames.append(fbPolNames[i][j])
temp,indicesOfGoodSensors=_process.rejectOutliers(_np.array(linArrayOfSensorData),sigma=1.0)
indicesOfBadSensors = indicesOfGoodSensors==False
self.badSensors=_np.array(linListOfSensorNames)[indicesOfBadSensors]
self.fbPolNames=[[],[],[],[]]
self.fbPolRaw=[[],[],[],[]];
self.phi=[[],[],[],[]];
self.theta=[[],[],[],[]];
for j in range(0,4):
for i in range(0,len(fbPolNames[j])):
if fbPolNames[j][i] not in self.badSensors:
self.fbPolRaw[j].append(fbPolRaw[j][i])
self.fbPolNames[j].append(fbPolNames[j][i])
self.theta[j].append(theta[j][i])
self.phi[j].append(phi[j][i])
else:
print("Removing broken signal: %s" % fbPolNames[j][i])
else:
self.fbPolNames=fbPolNames
self.fbPolRaw=fbPolRaw
self.theta=theta
self.phi=phi
# make sure the signals are not inverted
if invertNegSignals==True:
for i in range(0,4):
for j in range(0,len(self.fbPolRaw[i])):
if _np.average(self.fbPolRaw[i][j])<0:
self.fbPolRaw[i][j]*=-1
print("inverting signal %s"%self.fbPolNames[i][j])
# remove low-frequency offset (we are only interested in high-freq data)
self.fbPolData=[[],[],[],[]]
self.fbPolRawFit=[[],[],[],[]]
self.fbRadData=[[],[],[],[]]
self.fbRadRawFit=[[],[],[],[]]
for j in range(0,4):
for i in range(0,len(self.fbPolNames[j])):
temp,temp2=_process.gaussianHighPassFilter(self.fbPolRaw[j][i][:],self.fbPolTime,timeWidth=1./20000)
self.fbPolRawFit[j].append(temp2)
self.fbPolData[j].append(temp)
# plot
if plot=='sample':
self.plotOfSinglePol().plot();
self.plotOfSingleRad().plot();
elif plot == True or plot=='all':
self.plot(True)
def plotOfFBPolStripey(self,tStart=2e-3,tStop=4e-3,sensorArray='S4P'):
# grab and trim data to desired time rane
iStart=_process.findNearest(self.fbPolTime,tStart)
iStop=_process.findNearest(self.fbPolTime,tStop)
data=self.fbPolData[int(sensorArray[1])-1]*1
for i in range(0,len(data)):
data[i]=data[i][iStart:iStop]*1e4
# create and return plot
p1=_plot.plot(title=self.title,subtitle="FB "+sensorArray+' Sensors',
xLabel='Time [ms]', yLabel='phi [rad]',zLabel='Gauss',
plotType='contour',colorMap=_plot._red_green_colormap(),
centerColorMapAroundZero=True)
phi=_np.array(self.phi[int(sensorArray[1])-1])
index=_np.where(phi==_np.min(phi))[0][0]
phi[0:index]-=2*_np.pi
p1.addTrace(self.fbPolTime[iStart:iStop]*1e3,phi,
zData=_np.array(data))
return p1
def plotOfSinglePol(self, row=0, col=0,plot=True,alsoPlotRawAndFit=True):
"""
Plots poloidal data from FB sensors
"""
i=col; j=row;
# initialize plot
p1=_plot.plot(xLabel='time [ms]',yLabel=r'Gauss',
subtitle=self.fbPolNames[j][i],shotno=[self.shotno],
title=self.title);
# smoothed data
p1.addTrace(yData=self.fbPolData[j][i],xData=self.fbPolTime*1000,
yLegendLabel='smoothed')
if alsoPlotRawAndFit==True:
# raw data
p1.addTrace(yData=self.fbPolRaw[j][i],xData=self.fbPolTime*1000,
yLegendLabel='Raw')
# fit data (which is subtracted from raw)
p1.addTrace(yData=self.fbPolRawFit[j][i],xData=self.fbPolTime*1000,
yLegendLabel='Fit')
return p1
def plotOfSingleRad(self, row=0, col=0,plot=True,alsoPlotRawAndFit=True):
"""
Plots radial data from FB sensors
"""
i=col; j=row;
# initialize plot
p1=_plot.plot(xLabel='time [ms]',yLabel=r'Gauss',
subtitle=self.fbRadNames[j][i],shotno=[self.shotno],
title=self.title,yLim=[-0.01,0.05]);
# smoothed data
p1.addTrace(yData=self.fbRadData[j][i],xData=self.fbRadTime*1000,
yLegendLabel='smoothed')
if alsoPlotRawAndFit==True:
# raw data
p1.addTrace(yData=self.fbRadRaw[j][i],xData=self.fbRadTime*1000,
yLegendLabel='Raw')
# fit data (which is subtracted from raw)
p1.addTrace(yData=self.fbRadRawFit[j][i],xData=self.fbRadTime*1000,
yLegendLabel='Fit')
return p1
def plot(self,plotAll=True):
"""
Plots all 40 poloidal FB sensors
"""
for i in range(0,4):
for j in range(0,len(self.fbPolNames[i])):
if plotAll==True:
newPlot=self.plotOfSinglePol(i,j,alsoPlotRawAndFit=True)
else:
newPlot=self.plotOfSinglePol(i,j,alsoPlotRawAndFit=False)
newPlot.plot()
@_prepShotno
class taData:
"""
Downloads toroidal array (TA) sensor data. Presently, only poloidal
measurements as the radial sensors are not yet implemeted.
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
True - Plots a sample of each FB poloidal and radial data
'sample'- same as True
'all' - Plots all 80 sensor data
# smoothingAlgorithm : str
# informs function as to which smoothing algorithm to use on each PA
# sensor
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
namesTAPol : list (of str)
names of poloidal-TA sensors
namesTARad : list (of str)
names of radial-TA sensors
phi : numpy.ndarray
toroidal location of each poloidal-TA sensor. units in radians.
phiR : numpy.ndarray
toroidal location of each raidal-TA sensor. units in radians.
taPolRaw : list (of numpy.ndarray)
raw poloidal-TA sensor data
taRadRaw : list (of numpy.ndarray)
raw radial-TA sensor data
taPolTime : numpy.ndarray
time data associated with poloidal-TA sensor data
taRadTime : numpy.ndarray
time data associated with radial-TA sensor data
taPolData : list (of numpy.ndarray)
poloidal-TA sensor data
taRadData : list (of numpy.ndarray)
radial-TA sensor data
taPolRawFit : list (of numpy.ndarray)
fit of raw poloidal-TA sensor data. subtract this from taPolRaw to get
taPolData
taRadRawFit : list (of numpy.ndarray)
fit of raw radial-TA sensor data. subtract this from taRadRaw to get
taRadData
Subfunctions
------------
plotOfSinglePol :
returns plot function of a single poloidal sensor, based on provided
index
plot :
plots all relevant datas
"""
def __init__(self,shotno=98173,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = "%d, TA sensor data." % shotno
# names of poloidal and radial sensors
self.namesTAPol=['TA01_S1P', 'TA01_S2P', 'TA01_S3P', 'TA02_S1P', 'TA02_S2P', 'TA02_S3P', 'TA03_S1P', 'TA03_S2P', 'TA03_S3P', 'TA04_S1P', 'TA04_S2P', 'TA04_S3P', 'TA05_S1P', 'TA05_S2P', 'TA05_S3P', 'TA06_S1P', 'TA06_S2P', 'TA06_S3P', 'TA07_S1P', 'TA07_S2P', 'TA07_S3P', 'TA08_S1P', 'TA08_S2P', 'TA08_S3P', 'TA09_S1P', 'TA09_S2P', 'TA09_S3P', 'TA10_S1P', 'TA10_S2P', 'TA10_S3P'];
self.namesTARad=['TA01_S2R', 'TA02_S2R', 'TA03_S2R', 'TA04_S2R', 'TA05_S2R', 'TA06_S2R', 'TA07_S2R', 'TA08_S2R', 'TA09_S2R', 'TA10_S2R']
# toroidal locations for the poloidal measurements
self.phi=_np.pi/180.*_np.array([241.5,250.5,259.5,277.5,286.5,295.5,313.5,322.5,331.5,349.5,358.5,7.5,25.5,34.5,43.5,61.5,70.5,79.5,97.5,106.5,115.5,133.5,142.5,151.5,169.5,178.5,187.5,205.5,214.5,223.5])
# poloidal locations of sensors
self.theta=_np.ones(len(self.phi))*189*_np.pi/180
# # toroidal locations for the radial measurements
# self.phiR=_np.pi/180.*_np.array([-108., -72., -36., 0., 36., 72., 108., 144., 180., 216.])
# compile full sensor addresses names
taPolSensorAddresses=[]
taRadSensorAddresses=[]
rootAddress='\HBTEP2::TOP.SENSORS.MAGNETIC:';
for i in range(0,30):
taPolSensorAddresses.append(rootAddress+self.namesTAPol[i])
if i < 10:
taRadSensorAddresses.append(rootAddress+self.namesTARad[i])
# get raw data
self.taPolRaw,self.taPolTime=mdsData(shotno,taPolSensorAddresses, tStart, tStop)
self.taRadRaw,self.taRadTime=mdsData(shotno,taRadSensorAddresses, tStart, tStop)
# data smoothing algorithm
self.taPolData=[]
self.taPolRawFit=[]
self.taRadData=[]
self.taRadRawFit=[]
# high pass filter the measurements
for i in range(0,30):
temp,temp2=_process.gaussianHighPassFilter(self.taPolRaw[i][:],self.taPolTime,timeWidth=1./20000)
self.taPolData.append(temp)
self.taPolRawFit.append(temp2)
# plot
if plot=='sample':
self.plotOfSinglePol().plot();
elif plot==True or plot=='all':
self.plot(True);
# TODO Add plotOfSingleRad function
def plotOfTAStripey(self,tStart=2e-3,tStop=4e-3):
iStart=_process.findNearest(self.taPolTime,tStart)
iStop=_process.findNearest(self.taPolTime,tStop)
p1=_plot.plot(title=self.title,subtitle='TA Sensors',
xLabel='Time [ms]', yLabel='phi [rad]',zLabel='Gauss',
plotType='contour',colorMap=_plot._red_green_colormap(),
centerColorMapAroundZero=True)
# phi is not in order. this corrects for that
maxValInd=_np.where(self.phi==self.phi.min())[0][0]
# print(type(self.taPolData[0:30]))
data=list(_np.concatenate((self.taPolData[maxValInd:30],self.taPolData[0:maxValInd]),axis=0))
phi=_np.concatenate((self.phi[maxValInd:30],self.phi[0:maxValInd]))
# truncate data with time
for i in range(0,len(data)):
data[i]=data[i][iStart:iStop]*1e4
p1.addTrace(self.taPolTime[iStart:iStop]*1e3,phi,
_np.array(data))
return p1
def plotOfSinglePol(self, i=0, alsoPlotRawAndFit=True):
""" Plot one of the PA1 plots. based on index, i. """
p1=_plot.plot(xLabel='time [ms]',yLabel=r'Gauss',shotno=[self.shotno],
title=self.title,subtitle=self.namesTAPol[i]);
# smoothed data
p1.addTrace(yData=self.taPolData[i],xData=self.taPolTime*1000,
yLegendLabel='smoothed')
if alsoPlotRawAndFit==True:
# raw data
p1.addTrace(yData=self.taPolRaw[i],xData=self.taPolTime*1000,
yLegendLabel='raw')
# fit data (which is subtracted from raw)
p1.addTrace(yData=self.taPolRawFit[i],xData=self.taPolTime*1000,
yLegendLabel='fit')
return p1
def plot(self,plotAll=True):
"""
Plots poloidal sensor data for all 40 sensors
Warning, 40 plots is tough on memory.
"""
# TODO(john) update this so that all poloidal data is on a single
# window. Same with radial
sp1=[[],[],[],[],[]]
count=0
for i in range(0,5):
for j in range(0,6):
if plotAll==True:
newPlot=self.plotOfSinglePol(count,alsoPlotRawAndFit=True);
else:
newPlot=self.plotOfSinglePol(count,alsoPlotRawAndFit=False);
newPlot.subtitle=self.namesTAPol[count]
newPlot.yLegendLabel=[]
sp1[i].append(newPlot)
count+=1;
sp1[0][0].title=self.title
sp1=_plot.subPlot(sp1,plot=False)
sp1.shareY=True;
sp1.plot()
@_prepShotno
class groundCurrentData:
"""
Ground current flowing through the west and north racks to the grounding bus.
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
wRackCurrent : numpy.ndarray
west rack current to grounding bus
nRackCurrent : numpy.ndarray
north rack current to grounding bus
wRackTime : numpy.ndarray
time data
nRackTime : numpy.ndarray
time data
Subfunctions
------------
plot :
plots data
"""
def __init__(self,shotno=96530,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = "%d, Ext. Rogowski Data" % shotno
# get north rack data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.DEVICES.NORTH_RACK:CPCI:INPUT_96'],
tStart=tStart, tStop=tStop)
self.nRackCurrent=data[0];
self.nRackTime=time;
# get west rack data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.DEVICES.WEST_RACK:CPCI:INPUT_96'],
tStart=tStart, tStop=tStop)
self.wRackCurrent=data[0];
self.wRackTime=time;
if plot == True:
self.plot().plot()
def plot(self):
""" Plot all relevant plots """
fig,p1=_plt.subplots()
p1.plot(self.nRackTime*1e3,self.nRackCurrent,label='North Rack Ground Current')
p1.plot(self.wRackTime*1e3,self.wRackCurrent,label='West Rack Ground Current')
_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel='Current (A)')
_plot.finalizeFigure(fig,title=self.title)
return p1
@_prepShotno
class quartzJumperData:
"""
External rogowski data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
eRogA : numpy.ndarray
external rogowski A data
eRogB : numpy.ndarray
external rogowski B data
eRogC : numpy.ndarray
external rogowski C data
eRogD : numpy.ndarray
external rogowski D data
time : numpy.ndarray
time data
Subfunctions
------------
plotOfERogA : _plotTools.plot
plot of external rogowski A data
plotOfERogB : _plotTools.plot
plot of external rogowski B data
plotOfERogC : _plotTools.plot
plot of external rogowski C data
plotOfERogD : _plotTools.plot
plot of external rogowski D data
plotOfERogAll : _plotTools.plot
plot of all 4 external rogowskis
plot :
plots plotOfERogAll()
Notes
-----
Rog. D is permanently off for the time being
Rog. B is typically off in favor of Rog. A (not always)
"""
def __init__(self,shotno=96530,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = "%d, Ext. Rogowski Data" % shotno
# get data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.EXT_ROGS:EX_ROG_A',
'\HBTEP2::TOP.SENSORS.EXT_ROGS:EX_ROG_B',
'\HBTEP2::TOP.SENSORS.EXT_ROGS:EX_ROG_C',
'\HBTEP2::TOP.SENSORS.EXT_ROGS:EX_ROG_D',],
tStart=tStart, tStop=tStop)
self.eRogA=data[0];
self.eRogB=data[1];
self.eRogC=data[2];
self.eRogD=data[3];
self.time=time;
# self.sensorLocations=['A. Section 9-10','B. Section 3-4','C. Section 10-1','D. Section 5-6']
self.sensorNames=['A. Section 9-10','B. Section 3-4','C. Section 10-1','D. Section 5-6']
self.phi=_np.array([198,342,234,54])*_np.pi/180.
self.theta=_np.array([0,0,0,0])
if plot == True:
self.plot()
def plot(self):
""" Plot all relevant plots """
fig,p1=_plt.subplots(4,sharex=True)
p1[0].plot(self.time*1e3,self.eRogA,label='Rogowski A')
p1[1].plot(self.time*1e3,self.eRogB,label='Rogowski B')
p1[2].plot(self.time*1e3,self.eRogC,label='Rogowski C')
p1[3].plot(self.time*1e3,self.eRogD,label='Rogowski D')
_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel='Current (A)')
_plot.finalizeFigure(fig,title=self.title)
return p1
@_prepShotno
class spectrometerData:
"""
Spectrometer data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
spect : numpy.ndarray
spectrometer current data
time : numpy.ndarray
time data
Subfunctions
------------
plotOfSpect : _plotTools.plot
plot of sprectrometer data
plot :
plots all relevant data
"""
def __init__(self,shotno=98030,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = "%d, Spectrometer Data" % shotno
# get data
data, self.time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.SPECTROMETER'],
tStart=tStart, tStop=tStop)
self.spect=data[0];
if plot == True or plot=='all':
self.plot()
def plotOfSpect(self):
# generate plot
fig,p1=_plt.subplots()
p1.plot(self.time*1e3,self.spect,label='Spectrometer Intensity')
_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel='Voltage (V)')
_plot.finalizeFigure(fig,title=self.title)
return p1
def plot(self):
""" Plot all relevant plots """
self.plotOfSpect().plot()
@_prepShotno
class usbSpectrometerData:
"""
USB spectrometer data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
spect : numpy.ndarray
spectrometer current data
time : numpy.ndarray
time data
Subfunctions
------------
plotOfSpect : _plotTools.plot
plot of usb sprectrometer data
plotOfStripey : _plotTools.plot
stripey plot of usb spectrometer data
plot :
plots all relevant data
"""
def __init__(self,shotno=98415,plot=False):
self.shotno = shotno
self.title = "%d, USB Spectrometer Data" % shotno
# get data
dataAddressRoot = '\HBTEP2::TOP.SENSORS.USB_SPECTROM:SPECTRUM_'
self.spectrometerArrayNumber=[]
self.spectrometerData=[]
for i in range(1,11):
if i < 10:
dataAddress='%s0%d' % (dataAddressRoot, i)
else:
dataAddress='%s%d' % (dataAddressRoot, i)
try:
data, xData=mdsData(shotno=shotno,
dataAddress=dataAddress)
self.spectrometerArrayNumber.append(i)
self.spectrometerData.append(data[0])
except:# _mds.MdsIpException:
print("usb spectrometer channel %d data does not exist for shot number %d" % (i, shotno))
self.spectrometerArrayNumber=_np.array(self.spectrometerArrayNumber)
# get wavelength
yData, xData=mdsData(shotno=shotno,
dataAddress='\HBTEP2::TOP.SENSORS.USB_SPECTROM:WAVELENGTH')
self.wavelength=yData[0]
# plot if requested
if plot == True:
self.plotOfSpect()
if plot == 'all':
self.plotOfSpect()
self.plotOfStripey().plot()
def plotOfSpect(self):
# generate subplot of data
fig,p1=_plt.subplots(len(self.spectrometerArrayNumber),sharex=True)
for i in range(0,len(self.spectrometerArrayNumber)):
p1[i].plot(self.wavelength,self.spectrometerData[i],label='Time slice %d'%(self.spectrometerArrayNumber[i]))
_plot.finalizeSubplot(p1,xlabel='Wavelength [nm]',ylabel='Intensity')
_plot.finalizeFigure(fig,title=self.title)
return fig
def plotOfStripey(self):
# generate stripey plot of data
p1=_plot.plot(yLabel='Channel',xLabel='Wavelength [nm]',zLabel='Intensity',
plotType='contour', shotno=self.shotno,title=self.title)
p1.addTrace(zData=_np.array(self.spectrometerData),xData=self.wavelength,yData=_np.array(self.spectrometerArrayNumber))
return p1
def plot(self):
""" Plot all relevant plots """
self.plotOfSpect()
@_prepShotno
class solData:
"""
SOL tile sensor data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
numPointsForSmothing : int
number of points to be used in removing the offset. Note that
numPointsForSmothing is effectively a high pass filter. the smaller the
value, the more aggressive the filter is on high frequencies.
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
sensorNames : list (of str)
names of each SOL sensor
solData : list (of numpy.ndarray)
SOL sensor data with offset subtracted
solFitData : list (of numpy.ndarray)
Fits that was used to remove the offset
solDataRaw : list (of numpy.ndarray)
Raw SOL data (prior to offset subtraction)
time : numpy.ndarray
time data
Subfunctions
------------
plotOfSingleSensor : _plotTools.plot
returns plot of single sensor
plot :
plots all SOL data
"""
def __init__(self,shotno=98030,tStart=_TSTART,tStop=_TSTOP,plot=False,
numPointsForSmothing=201):
# note that numPointsForSmothing is effectively a high pass filter.
# the larger the value, the more aggressive the filter is on high frequencies.
# initialize
self.shotno = shotno
self.title = "%d, SOL Data" % shotno
self.sensorNames = ['LFS01_S1', 'LFS01_S2', 'LFS01_S3', 'LFS01_S4', 'LFS01_S5', 'LFS01_S6', 'LFS01_S7', 'LFS01_S8', 'LFS04_S1', 'LFS04_S2', 'LFS04_S3', 'LFS04_S4', 'LFS08_S1', 'LFS08_S2', 'LFS08_S3', 'LFS08_S4', 'LFS08_S5', 'LFS08_S6', 'LFS08_S7', 'LFS08_S8']
self.phis=_np.array([234.8, 234.8, 234.8, 234.8, 234.8, 234.8, 234.8, 234.8, 342.8, 342.8, 342.8, 342.8, 126.8, 126.8, 126.8, 126.8, 126.8, 126.8, 126.8, 126.8])
self.thetas=_np.array([-70. , -50. , -30. , -10. , 10. , 30. , 50. , 70. , -83. , -28.2, 28.2, 83. , -70. , -50. , -30. , -10. , 10. , 30. , 50. , 70. ])
sensorPathRoot='\HBTEP2::TOP.SENSORS.SOL:'
# compile list of sensor addresses for all 20 SOL tiles
sensorAddress=[]
for i in range(0,len(self.sensorNames)):
sensorAddress.append(sensorPathRoot+'%s' % self.sensorNames[i])
# get raw data from the tree
self.solDataRaw, self.time=mdsData(shotno=shotno,
dataAddress=sensorAddress,
tStart=tStart, tStop=tStop)
# subtract offset from sensors
self.solDataFit=[]
self.solData=[]
for i in range(0,len(self.sensorNames)):
temp,temp2=_process.gaussianHighPassFilter(self.solDataRaw[i],self.time,timeWidth=1./20000)
self.solData.append(temp)
self.solDataFit.append(temp2)
# optional plotting
if plot == True:
self.plot()
if plot == 'all':
self.plot('all')
def plotOfSingleSensor(self,index,plot='all'): #name='LFS01_S1'
"""
Returns plot of a single sol sensor. Plots raw, fit, and smoothed
"""
p1=_plot.plot(yLabel='V',xLabel='time [ms]',
subtitle=self.sensorNames[index],title=self.title,
shotno=self.shotno)
if plot=='all' or plot=='raw':
p1.addTrace(yData=self.solDataRaw[index],xData=self.time*1000,
yLegendLabel=self.sensorNames[index]+' Raw')
if plot=='all' or plot=='fit':
p1.addTrace(yData=self.solDataFit[index],xData=self.time*1000,
yLegendLabel=self.sensorNames[index]+' Fit')
if plot=='all' or plot=='smoothed' or plot=='smoothedOnly':
p1.addTrace(yData=self.solData[index],xData=self.time*1000,
yLegendLabel=self.sensorNames[index]+' Without Offset')
return p1
def plotOfContour(self,tStart=2e-3,tStop=4e-3,section='LFS01'):
"""
contour plot of LFS01 Data
"""
iStart=_process.findNearest(self.time,tStart)
iStop=_process.findNearest(self.time,tStop)
p1=_plot.plot(title=self.title,subtitle=section+' SOL sensors',
xLabel='Time [ms]', yLabel='phi [rad]',zLabel='A',
plotType='contour')
if section=='LFS01':
data=self.solData[0:8]
elif section=='LFS04':
data=self.solData[8:12]
elif section=='LFS08':
data=self.solData[12:20]
elif section=='all':
data=self.solData[0:20]
for i in range(0,len(data)):
data[i]=data[i][iStart:iStop]
# return data
p1.addTrace(self.time[iStart:iStop]*1e3,_np.arange(0,8),_np.array(data))
return p1
def plot(self,plot='smoothedOnly',includeBP=True):
""" plots all 20 sol sensor currents on three plots """
if plot=='all':
for j in range(0,20):
p1=self.plotOfSingleSensor(j,'all').plot()
else:
for j in range(0,8):
if j==0:
p1=self.plotOfSingleSensor(j,plot)
p3=self.plotOfSingleSensor(12+j,plot)
if j<4:
p2=self.plotOfSingleSensor(8+j,plot)
else:
p1.mergePlots(self.plotOfSingleSensor(j,plot))
p3.mergePlots(self.plotOfSingleSensor(12+j,plot))
if j<4:
p2.mergePlots(self.plotOfSingleSensor(8+j,plot))
p1.subtitle='Section 1 SOL Sensors'
p2.subtitle='Section 4 SOL Sensors'
p3.subtitle='Section 8 SOL Sensors'
return _plot.subPlot([p1,p2,p3],plot=True)
@_prepShotno
class loopVoltageData:
"""
loo voltage data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
loopVoltage : numpy.ndarray
SOL sensor data
time : numpy.ndarray
time data
Subfunctions
------------
plotOfLV : _plotTools.plot
returns plot of loop voltage data
plot :
plots loop voltage data
"""
def __init__(self,shotno=96530,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno = shotno
self.title = "%d, Loop voltage data." % shotno
# get data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.LOOP_VOlTAGE'],
tStart=tStart, tStop=tStop)
self.loopVoltage=data[0];
self.time=time;
if plot == True or plot=='all':
self.plot()
def plotOfLoopVoltage(self):
# generate plot
fig,p1=_plt.subplots()
p1.plot(self.time*1e3,self.loopVoltage)
_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel='Voltage (V)',ylim=[0,15])
_plot.finalizeFigure(fig,title=self.title)
return fig
def plot(self):
""" Plot all relevant plots """
self.plotOfLoopVoltage()
@_prepShotno
class tfData:
"""
Toroidal field data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
tfBankField : numpy.ndarray
Toroidal mangetic field data
time : numpy.ndarray
Toroidla field time data
Subfunctions
------------
plotOfTF : _plotTools.plot
returns plot of TF data
plot :
plots all relevant data
upSample :
upsamples TF's normal A14 time base (1e-5 s period) to the CPCI time
base (2e-6 s) using a linear interpolation method
Notes
-----
note that the TF field data is recorded on an A14 where most of HBTEP data
is stored with the CPCI. Because the A14 has a slower sampling rate, this
means that the TF data has fewer points than the rest of the HBTEP data,
and this makes comparing data difficult. Therefore by default, I up-sample
the data to match the CPCI sampling rate.
"""
def __init__(self,shotno=96530,tStart=None,tStop=None,plot=False,
upSample=True):
self.shotno = shotno
self.title = "%d, TF Field Data" % shotno
# get tf data
data, self.time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.TF_PROBE'],
tStart=tStart, tStop=tStop)
self.tfBankField=data[0];
if upSample==True:
self.upSample()
if plot == True:
self.plot()
def upSample(self):
# time step sizes
dtUp=2*1e-6 # CPCI sampling period
dtDown=self.time[-1]-self.time[-2] # A14 sampling period
# reconstruct CPCI time base
upTime=_np.arange(self.time[0],self.time[-1]+dtDown-dtUp,dtUp) # note that there is some trickery here with reconstructing the CPCI time base.
# upsample data
self.tfBankField=_process.upSampleData(upTime,self.time,self.tfBankField)
self.time=upTime
def plotOfTF(self,tStart=None,tStop=None):
# generate tf plot
p1=_plot.plot(yLabel='T',xLabel='time [ms]',subtitle='TF Bank Field',
title=self.title,shotno=self.shotno)
p1.addTrace(yData=self.tfBankField,xData=self.time*1000)
return p1
def plot(self):
""" Plot all relevant plots """
self.plotOfTF().plot()
@_prepShotno
class capBankData:
"""
Capacitor bank data. Currents.
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
Toroidla field time data
vfBankCurrent : numpy.ndarray
Vertical Field (VF) bank current data
vfTime : numpy.ndarray
VF time data
ohBankCurrent : numpy.ndarray
Ohmic heating (OH) bank current data
ohTime : numpy.ndarray
OH time data
shBankCurrent : numpy.ndarray
SHaping (SH) bank current data
shTime : numpy.ndarray
SH time data
Subfunctions
------------
plotOfVF : _plotTools.plot
returns plot of VF data
plotOfOH : _plotTools.plot
returns plot of OH data
plotOfSH : _plotTools.plot
returns plot of SH data
plot :
plots all relevant data
Notes
-----
Note that all 3 banks have their own time array. This is because the
data doesn't always have the same length and therefore must have their own
time array.
Note that tStart and tStop are intentionally left as None because the TF
data is so incredibly long next to the other data.
"""
def __init__(self,shotno=96530,tStart=None,tStop=None,plot=False):
self.shotno = shotno
self.title = "%d, Capacitor Bank Data" % shotno
# get vf data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.VF_CURRENT'],
tStart=tStart, tStop=tStop)
self.vfBankCurrent=data[0];
self.vfTime=time;
# get oh data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.OH_CURRENT'],
tStart=tStart, tStop=tStop)
self.ohBankCurrent=data[0];
self.ohTime=time;
# get sh data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.SH_CURRENT'],
tStart=tStart, tStop=tStop)
self.shBankCurrent=data[0];
self.shTime=time;
if plot == True:
self.plot()
def plot(self):
""" Plot all relevant plots """
tf=tfData(self.shotno,tStart=None,tStop=None)
_plt.figure()
ax1 = _plt.subplot2grid((3,2), (0,1), rowspan=3) #tf
ax2 = _plt.subplot2grid((3,2), (0,0)) #vf
ax3 = _plt.subplot2grid((3,2), (1,0),sharex=ax2) #oh
ax4 = _plt.subplot2grid((3,2), (2, 0),sharex=ax2) #sh
fig=_plt.gcf()
fig.set_size_inches(10,5)
tStart=-2
tStop=20
ax1.plot(tf.time*1e3,tf.tfBankField)
ax1.axvspan(tStart,tStop,color='r',alpha=0.3)
_plot.finalizeSubplot(ax1,xlabel='Time (s)',xlim=[-150,450],ylabel='TF Field (T)')#,title=self.title
ax2.plot(self.vfTime*1e3,self.vfBankCurrent*1e-3)
_plot.finalizeSubplot(ax2,ylabel='VF Current\n(kA)')
ax3.plot(self.ohTime*1e3,self.ohBankCurrent*1e-3)
_plot.finalizeSubplot(ax3,ylim=[-20,30],ylabel='OH Current\n(kA)')
ax4.plot(self.shTime*1e3,self.shBankCurrent*1e-3)
_plot.finalizeSubplot(ax4,ylim=[tStart,tStop],xlabel='Time (s)',ylabel='SH Current\n(kA)')
_plot.finalizeFigure(fig,title=self.title)
# fig.set_tight_layout(True)
return fig
#####################################################
@_prepShotno
class plasmaRadiusData:
"""
Calculate the major and minor radius.
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
majorRadius : numpy.ndarray
plasma major radius in meters
minorRadius : numpy.ndarray
plasma minor radius in meters
time : numpy.ndarray
time (in seconds) associated with data
Subfunctions
------------
plotOfMajorRadius :
returns the plot of major radius vs time
plotOfMinorRadius :
returns the plot of major radius vs time
plot :
Plots all relevant plots
Notes
-----
The radius calculations below are pulled from Paul Hughes's
pauls_MDSplus_toolbox.py code. In that code, he attributes Niko Rath for
its implementation
"""
def __init__(self,shotno=95782,tStart=_TSTART,tStop=_TSTOP, plot=False, probeRadius=[]):
self.shotno=shotno;
self.title = "%d, plasma radius" % shotno
# Determined by Daisuke during copper plasma calibration
a=.00643005
b=-1.10423
c=48.2567
# Calculated by Jeff, but still has errors
vf_pickup = 0.0046315133 * -1e-3
oh_pickup = 7.0723416e-08
# get vf and oh data
capBank=capBankData(shotno,tStart=tStart,tStop=tStop)
vf=capBank.vfBankCurrent
oh=capBank.ohBankCurrent
self.time=capBank.vfTime
# get plasma current
ip=ipData(shotno,tStart=tStart,tStop=tStop)
ip=ip.ip*1212.3*1e-9 # ip gain
# get cos-1 raw data
cos1=cos1RogowskiData(shotno,tStart=tStart,tStop=tStop+2e-06) # note that the cumtrapz function below loses a data point. by adding 2e-06 to the time, i start with an additional point that it's ok to lose
# subtract offset
cos1Raw=cos1.cos1Raw-cos1.cos1RawOffset
# integrate cos-1 raw
from scipy.integrate import cumtrapz
cos1 = cumtrapz(cos1Raw,cos1.time) + cos1Raw[:-1]*.004571
# r-major calculations
pickup = vf * vf_pickup + oh * oh_pickup
ratio = ip / (cos1 - pickup)
arg = b**2 - 4 * a * (c-ratio)
arg[arg < 0] = 0
r_major = (-b + _np.sqrt(arg)) / (2*a)
self.majorRadius = r_major / 100 # Convert to meters
# self.majorRadius -= 0.45/100
# r-minor calculations
self.minorRadius=_np.ones(len(self.majorRadius))*0.15
outwardLimitedIndices=self.majorRadius > (0.92)
self.minorRadius[outwardLimitedIndices] = 1.07 - self.majorRadius[outwardLimitedIndices] # Outboard limited
inwardLimitedIndices=self.majorRadius < (0.92 - 0.01704)
self.minorRadius[inwardLimitedIndices] = self.majorRadius[inwardLimitedIndices] - 0.75296 # inward limited
if plot==True:
self.plot();
def plot(self,plotAll=False):
fig,p1=_plt.subplots(2,sharex=True)
p1[0].plot(self.time*1e3,self.majorRadius*1e2,label='Major Radius')
p1[1].plot(self.time*1e3,self.minorRadius*1e2,label='Minor Radius')
_plot.finalizeSubplot(p1[0],xlabel='Time (ms)',ylabel='Major Radius (cm)',ylim=[89,95])
_plot.finalizeSubplot(p1[1],ylabel='Minor Radius (cm)',ylim=[10,16])
_plot.finalizeFigure(fig,title=self.title)
return p1
@_prepShotno
class qStarData:
"""
Gets qstar data
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
qStar : numpy.ndarray
plasma current data
time : numpy.ndarray
time data
Subfunctions
------------
plotOfQStar :
returns the plot of IP vs time
plot :
Plots all relevant plots
"""
def __init__(self,shotno=96496, tStart=_TSTART, tStop=_TSTOP, plot=False):
self.shotno = shotno
self.title = r"%d, q$^*$ Data" % shotno
# get data
ip=ipData(shotno,tStart=tStart,tStop=tStop)
plasmaRadius=plasmaRadiusData(shotno,tStart=tStart,tStop=tStop)
# tfProbeData,tfProbeTime=mdsData(shotno=96496,
tfProbeData,tfProbeTime=mdsData(shotno,
dataAddress=['\HBTEP2::TOP.SENSORS.TF_PROBE'],
tStart=tStart,tStop=tStop)
# upsample tfprobe data (its recorded on the A14)
data=_process.upSampleData(ip.time,tfProbeTime,tfProbeData[0])
# more tf calculations
tfProbeData=data*1.23/plasmaRadius.majorRadius
# calc q star
self.qStar= plasmaRadius.minorRadius**2 * tfProbeData / (2e-7 * ip.ip * plasmaRadius.majorRadius)
self.qStarCorrected=self.qStar*(1.15) # 15% correction factor. jeff believes our qstar measurement might be about 15% to 20% too low.
self.time=ip.time
if plot == True:
self.plot()
def plot(self):
"""
Plot all relevant plots
"""
fig,p1=_plt.subplots()
p1.plot(self.time*1e3,self.qStar,label=r'q$^*$')
p1.plot(self.time*1e3,self.qStarCorrected,label=r'q$^* * 1.15$')
_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel=r'q$^*$',ylim=[1,5])
_plot.finalizeFigure(fig,title=self.title)
###############################################################################
### sensor black list data. presently not used anywhere
def checkBlackList_depricated(inData,inName):
# TODO(John) this needs an overhaul
"""
Takes in data and sensor name. Checks sensor name against blacklist.
If bad sensor, return all zeros # and a true boolean.
Otherwise, returns original data # and a false boolean.
"""
if inName in _SENSORBLACKLIST==True:
outData=_np.zeros(inData.size);
else:
outData=inData;
return outData
###############################################################################
### Processed data from HBTEP
@_prepShotno
class nModeData:
"""
This function performs n-mode (toroidal) mode analysis on the plasma.
Provides mode amplitude, phase, and frequency
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
True - plots relevant data
'all' - plots all data
nModeSensor : str
sensors to be used to calculate the modes
'FB' - feedback sensors
'TA' - toroidal array sensors
method : str
method to calculate mode analysis
'leastSquares' - performs a matrix least squares analysis
Attributes
----------
shotno : int
data shot number
title : str
title to be placed on each plot
nModeSensor : str
sensors to be used to calculate the modes
n1Amp : numpy.ndarray
n=1 mode amplitude data
n2Amp : numpy.ndarray
n=2 mode amplitude data
n1Phase : numpy.ndarray
filtered n=1 mode phase data
n1PhaseRaw : numpy.ndarray
raw n=1 mode phase data
n1Freq : numpy.ndarray
filtered n=1 mode frequency data
n1FreqRaw : numpy.ndarray
raw n=1 mode frequency data
Subfunctions
------------
plot :
plots relevant plots
Bn1 :
Generates a pretend B_{n=1} signal at the toroidal location, phi0
Not presently in use
plotOfAmps :
returns a plot of n=1 and n=2 mode amplitudes
plotOfN1Phase
returns a plot of the n=1 mode phase
self.plotOfN1Freq
returns a plot of the n=1 mode frequency
self.plotOfN1Amp
returns a plot of the n=1 mode amplitude
Notes
-----
The convolution filters used with the phase and frequency "mess up" the
last tenth of a millisecond of data
"""
def Bn1(self,phi0=0):
"""
Generates a pretend B_{n=1} signal at the toroidal location, phi0
Not presently in use
"""
return self.x[1,:]*_np.sin(self.phi0)+self.x[2,:]*_np.cos(self.phi0)
def __init__(self,shotno=96530,tStart=_TSTART,tStop=_TSTOP,plot=False,
nModeSensor='FB',method='leastSquares',phaseFilter='gaussian',
phaseFilterTimeConstant=1.0/100e3):
self.shotno=shotno
self.title = '%d. %s sensor. n mode analysis' % (shotno,nModeSensor)
self.nModeSensor=nModeSensor
# load data from requested sensor array
if nModeSensor=='TA':
## load TA data
temp=taData(self.shotno,tStart,tStop+0.5e-3); # asking for an extra half millisecond (see Notes above)
data=temp.taPolData
self.time=temp.taPolTime
phi=temp.phi
[n,m]=_np.shape(data)
elif nModeSensor=='FB' or nModeSensor=='FB_S4':
## load FB data
array=3 # the 4th array (4-1=3) is the top most FB array and has no broken sensors
temp=fbData(self.shotno,tStart=tStart,tStop=tStop+0.5e-3); # asking for an extra half millisecond (see Notes above)
data=temp.fbPolData[array] ## top toroidal array = 0, bottom = 3
self.time=temp.fbPolTime
phi=_np.array(temp.phi[array])
self._theta=_np.array(temp.theta[array])
[n,m]=_np.shape(data)
self._data=data
self._phi=phi
if method=='leastSquares':
## Construct A matrix and its inversion
A=_np.zeros((n,5))
A[:,0]=_np.ones(n);
A[:,1]=_np.sin(phi)
A[:,2]=_np.cos(phi)
A[:,3]=_np.sin(2*phi)
A[:,4]=_np.cos(2*phi)
Ainv=_np.linalg.pinv(A)
## Solve for coefficients, x, for every time step and assign values to appropriate arrays
x=_np.zeros([5,m]);
self.n1Amp=_np.zeros(m)
self.n1PhaseRaw=_np.zeros(m)
self.n2Amp=_np.zeros(m)
# TODO(John): remove for loop and convert into all matrix math
# Should simplify code and make it run faster
for j in range(0,m):
y=_np.zeros(n);
for i in range(0,n):
y[i]=data[i][j]*1e4
x[:,j]=Ainv.dot(y)
self.n1Amp[j]=_np.sqrt(x[1,j]**2+x[2,j]**2)
self.n2Amp[j]=_np.sqrt(x[3,j]**2+x[4,j]**2)
self.n1PhaseRaw[j]=_np.arctan2(x[1,j],x[2,j])
self._x=x
self.n1PhaseRaw*=-1 # for some reason, the slope of phase had the wrong sign. this corrects that.
else:
_sys.exit("Invalid mode analysis method requested.")
# filter phase. (this is necessary to get a clean frequency)
self.n1Phase=_np.zeros(len(self.n1PhaseRaw))
if phaseFilter == 'gaussian':
self.n1Phase=_process.wrapPhase(
_process.gaussianLowPassFilter(
_process.unwrapPhase(self.n1PhaseRaw),
self.time,
timeWidth=phaseFilterTimeConstant))
else:
_sys.exit("Invalid phase filter requested.")
## Calculate frequency (in Hz) using second order deriv
self.n1Freq=_np.gradient(_process.unwrapPhase(self.n1Phase))/_np.gradient(self.time)/(2*_np.pi)
# trim off extra half millisecond (see Notes)
self.time, temp=_trimTime(self.time,
[self.n1Amp,self.n2Amp,self.n1Phase,
self.n1PhaseRaw,self.n1Freq],
tStart,tStop)
self.n1Amp=temp[0]
self.n2Amp=temp[1]
self.n1Phase=temp[2]
self.n1PhaseRaw=temp[3]
self.n1Freq=temp[4]
## plot data
if plot==True:
self.plot()
elif plot == 'all':
self.plotOfSlice(index=int(m/4)).plot();
self.plotOfSlice(index=int(m/2)).plot();
self.plot(True)
def plot(self,plotAll=False):
"""
plots and returns a subplot of n=1 mode amplitude, phase, and frequency
Parameters
----------
includeRaw : bool
if True, also plots the raw (unfiltered) phase and frequency
"""
fig,p1=_plt.subplots(3,sharex=True)
p1[0].plot(self.time*1e3,self.n1Amp,label=r'$\left| \delta B\right|_{n=1}$')
p1[1].plot(self.time*1e3,self.n1Phase,label=r'$\angle \delta B_{n=1}$',marker='.',markersize=2,linestyle='')
p1[2].plot(self.time*1e3,self.n1Freq*1e-3,label='Frequency (kHz)')
if plotAll==True:
p1[0].plot(self.time*1e3,self.n2Amp,label=r'$\left| \delta B\right|_{n=2}$')
p1[1].plot(self.time*1e3,self.n1PhaseRaw,label=r'$\angle \delta B_{n=1}$ unfiltered',marker='.',markersize=2,linestyle='')
_plot.finalizeSubplot(p1[0],ylabel='Mode amplitude (G)',ylim=[-0.5,12])#,title=self.title)#xlabel='Time (ms)',
_plot.finalizeSubplot(p1[1],ylabel='Mode phase (rad)',ylim=[-_np.pi,_np.pi]) #xlabel='Time (ms)',
_plot.finalizeSubplot(p1[2],ylabel='Frequency (kHz)',xlabel='Time (ms)')#,ylim=[-_np.pi,_np.pi]
_plot.finalizeFigure(fig,self.title)
#
# def plotOfPhaseAmp(self):
#
# # hybrid plot of phase AND amplitude
# # TODO(John) implement in new plot function
# # TODO(john) subfunction needs overhaul
# p1=_plot.plot()
# p1.yData=[self.n1Phase]
# p1.xData=[self.time*1000]
# p1.colorData=[self.n1Amp]#[self.n1Amp]
# p1.linestyle=['']
# p1.marker=['.']
# p1.subtitle='n=1 Phase and Filtered Amplitude'
# p1.title=self.title
# p1.shotno=[self.shotno]
# p1.xLabel='ms'
# p1.yLabel=r'$\phi$'
# p1.zLabel='Gauss'
# p1.yLegendLabel=['TA sensors']
# p1.plotType='scatter'
# p1.yLim=[-_np.pi,_np.pi]
# return p1
## mx=_np.max(self.n1AmpFiltered)
## lCutoff=2.5
## uCutoff=8.
## cm = _processt.singleColorMapWithLowerAndUpperCutoffs(lowerCutoff=lCutoff/mx,upperCutoff=uCutoff/mx)
## self.plotOfPhaseAmp.cmap=cm
def plotOfSlice(self,index=0):
"""
Plots fit data for a single time value
"""
j=index;
[n,m]=_np.shape(self._data)
y=_np.zeros(n);
for i in range(0,n):
y[i]=self._data[i][j]*1e4
p1=_plot.plot(shotno=[self.shotno],
title=self.title+', t='+str(self.time[j]*1000)+'ms.')
phi=_np.linspace(0,_np.pi*2,100)
n1Fit=self._x[0,j]+self._x[1,j]*_np.sin(phi)+self._x[2,j]*_np.cos(phi)
n2Fit=self._x[0,j]+self._x[3,j]*_np.sin(2*phi)+self._x[4,j]*_np.cos(2*phi)
fitTotal=self._x[0,j]+self._x[1,j]*_np.sin(phi)+self._x[2,j]*_np.cos(phi)+self._x[3,j]*_np.sin(2*phi)+self._x[4,j]*_np.cos(2*phi)
# plot
p1.addTrace(yData=y,xData=self._phi,
marker='x',linestyle='',yLegendLabel='raw')
p1.addTrace(yData=n1Fit,xData=phi,
yLegendLabel='n=1')
p1.addTrace(yData=n2Fit,xData=phi,
yLegendLabel='n=2')
p1.addTrace(yData=fitTotal,xData=phi,
yLegendLabel='Superposition')
return p1
@_prepShotno
class mModeData:
"""
This function performs a least squares fit to a poloidal array of sensors and analyzes m=2,3 and 4 modes. Mode amplitude, phase, and phase velocity.
In addtion, this code generates a perturbed B_pol(t) measurement as observed by a sensor at location, theta0
Function uses either 32 poloidal PA1 or PA2 sensors
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
"""
def __init__(self,shotno=96530,tStart=_TSTART,tStop=_TSTOP,plot=False,theta0=0,sensor='PA1',phaseFilter = 'gaussian'):
self.shotno=shotno
self.title= '%d. sensor = %s. m mode analysis' % (shotno, sensor)
if sensor=='PA1':
data=paData(self.shotno,tStart=tStart,tStop=tStop);
self._data=data.pa1Data
self.time=data.pa1Time
self._theta=data.thetaPA1
[n,m]=_np.shape(self._data)
if sensor=='PA2':
data=paData(self.shotno,tStart=tStart,tStop=tStop);
self._data=data.pa2Data
self.time=data.pa2Time
self._theta=data.thetaPA2
[n,m]=_np.shape(self._data)
## Construct A matrix and its inversion
A=_np.zeros((n,11))
A[:,0]=_np.ones(n);
A[:,1]=_np.sin(self._theta)
A[:,2]=_np.cos(self._theta)
A[:,3]=_np.sin(2*self._theta)
A[:,4]=_np.cos(2*self._theta)
A[:,5]=_np.sin(3*self._theta)
A[:,6]=_np.cos(3*self._theta)
A[:,7]=_np.sin(4*self._theta)
A[:,8]=_np.cos(4*self._theta)
A[:,9]=_np.sin(5*self._theta)
A[:,10]=_np.cos(5*self._theta)
Ainv=_np.linalg.pinv(A)
## Solve for coefficients, x, for every time step and assign values to appropriate arrays
self._x=_np.zeros([11,m]);
# self.m0Offset=_np.zeros(m)
self.m0Amp=_np.zeros(m)
self.m1Amp=_np.zeros(m)
self.m1PhaseRaw=_np.zeros(m)
self.m2Amp=_np.zeros(m)
self.m2PhaseRaw=_np.zeros(m)
self.m3Amp=_np.zeros(m)
self.m3PhaseRaw=_np.zeros(m)
self.m4Amp=_np.zeros(m)
self.m4PhaseRaw=_np.zeros(m)
self.m5Amp=_np.zeros(m)
self.m5PhaseRaw=_np.zeros(m)
for j in range(0,m):
y=_np.zeros(n);
for i in range(0,n):
y[i]=self._data[i][j]*1e4
self._x[:,j]=Ainv.dot(y)
# self.m0Offset=self.x[0,j]
self.m0Amp[j]=self._x[0,j]**2
self.m1Amp[j]=_np.sqrt(self._x[1,j]**2+self._x[2,j]**2)
self.m2Amp[j]=_np.sqrt(self._x[3,j]**2+self._x[4,j]**2)
self.m3Amp[j]=_np.sqrt(self._x[5,j]**2+self._x[6,j]**2)
self.m4Amp[j]=_np.sqrt(self._x[7,j]**2+self._x[8,j]**2)
self.m5Amp[j]=_np.sqrt(self._x[9,j]**2+self._x[10,j]**2)
self.m1PhaseRaw[j]=_np.arctan2(self._x[1,j],self._x[2,j])
self.m2PhaseRaw[j]=_np.arctan2(self._x[3,j],self._x[4,j])
self.m3PhaseRaw[j]=_np.arctan2(self._x[5,j],self._x[6,j])
self.m4PhaseRaw[j]=_np.arctan2(self._x[7,j],self._x[8,j])
self.m5PhaseRaw[j]=_np.arctan2(self._x[9,j],self._x[10,j])
if phaseFilter == 'gaussian':
self.m1Phase=_process.wrapPhase(
_process.gaussianLowPassFilter(
_process.unwrapPhase(self.m1PhaseRaw),
self.time,timeWidth=1./20e3))
self.m2Phase=_process.wrapPhase(
_process.gaussianLowPassFilter(
_process.unwrapPhase(self.m2PhaseRaw),
self.time,timeWidth=1./20e3))
self.m3Phase=_process.wrapPhase(
_process.gaussianLowPassFilter(
_process.unwrapPhase(self.m3PhaseRaw),
self.time,timeWidth=1./20e3))
self.m4Phase=_process.wrapPhase(
_process.gaussianLowPassFilter(
_process.unwrapPhase(self.m4PhaseRaw),
self.time,timeWidth=1./20e3))
self.m5Phase=_process.wrapPhase(
_process.gaussianLowPassFilter(
_process.unwrapPhase(self.m5PhaseRaw),
self.time,timeWidth=1./20e3))
else:
self.m1Phase=_np.zeros(len(self.m1PhaseRaw))
self.m1Phase[:]=self.m1PhaseRaw[:]
self.m2Phase=_np.zeros(len(self.m2PhaseRaw))
self.m2Phase[:]=self.m2PhaseRaw[:]
self.m3Phase=_np.zeros(len(self.m3PhaseRaw))
self.m3Phase[:]=self.m3PhaseRaw[:]
self.m4Phase=_np.zeros(len(self.m4PhaseRaw))
self.m4Phase[:]=self.m4PhaseRaw[:]
self.m5Phase=_np.zeros(len(self.m5PhaseRaw))
self.m5Phase[:]=self.m5PhaseRaw[:]
self.m1Freq=_np.gradient(_process.unwrapPhase(self.m1Phase))/_np.gradient(self.time)/(2*_np.pi)
self.m2Freq=_np.gradient(_process.unwrapPhase(self.m2Phase))/_np.gradient(self.time)/(2*_np.pi)
self.m3Freq=_np.gradient(_process.unwrapPhase(self.m3Phase))/_np.gradient(self.time)/(2*_np.pi)
self.m4Freq=_np.gradient(_process.unwrapPhase(self.m4Phase))/_np.gradient(self.time)/(2*_np.pi)
self.m5Freq=_np.gradient(_process.unwrapPhase(self.m5Phase))/_np.gradient(self.time)/(2*_np.pi)
if plot == True:
self.plot()
elif plot == 'all':
self.plotOfSlice(index=int(m/4)).plot();
self.plotOfSlice(index=int(m/2)).plot();
self.plotOfAmplitudes().plot()
def plotOfAmplitudes(self):
# plot amplitudes
p1=_plot.plot(title=self.title,shotno=self.shotno,
xLabel='ms',yLabel='G',yLim=[0,20])
p1.addTrace(yData=self.m1Amp,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=1}|$')
p1.addTrace(yData=self.m2Amp,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=2}|$')
p1.addTrace(yData=self.m3Amp,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=3}|$')
p1.addTrace(yData=self.m4Amp,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=4}|$')
p1.addTrace(yData=self.m5Amp,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=5}|$')
return p1
def plotOfPhases(self):
# plot amplitudes
p1=_plot.plot(title=self.title,shotno=self.shotno,
xLabel='ms',yLabel='rad')
p1.addTrace(yData=self.m1Phase,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=1}|$',linestyle='',marker='.')
p1.addTrace(yData=self.m2Phase,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=2}|$',linestyle='',marker='.')
p1.addTrace(yData=self.m3Phase,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=3}|$',linestyle='',marker='.')
p1.addTrace(yData=self.m4Phase,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=4}|$',linestyle='',marker='.')
p1.addTrace(yData=self.m5Phase,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=5}|$',linestyle='',marker='.')
return p1
def plotOfFreqs(self):
# plot amplitudes
p1=_plot.plot(title=self.title,shotno=self.shotno,
xLabel='ms',yLabel='kHz',yLim=[-20,20])
p1.addTrace(yData=self.m1Freq*1e-3,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=1}|$')
p1.addTrace(yData=self.m2Freq*1e-3,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=2}|$')
p1.addTrace(yData=self.m3Freq*1e-3,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=3}|$')
p1.addTrace(yData=self.m4Freq*1e-3,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=4}|$')
p1.addTrace(yData=self.m5Freq*1e-3,xData=self.time*1000,
yLegendLabel=r'$|B_{pol, m=5}|$')
return p1
def plot(self):
sp1=_plot.subPlot([self.plotOfAmplitudes(),self.plotOfPhases(),
self.plotOfFreqs()],plot=False)
sp1.plot()
return sp1
# TODO: add other plots
def plotOfSlice(self,index=0):
"""
Plot fits for a single instant in time
"""
j=index;
[n,m]=_np.shape(self._data)
y=_np.zeros(n);
for i in range(0,n):
y[i]=self._data[i][j]*1e4
p1=_plot.plot(title='t=%.3f ms. %s ' % (self.time[j]*1000, self.title),
shotno=self.shotno)
theta=_np.linspace(self._theta[0],self._theta[-1],100)
# m0Fit=self._x[0,j]
m1Fit=self._x[0,j]+self._x[1,j]*_np.sin(theta)+self._x[2,j]*_np.cos(theta)
m2Fit=self._x[0,j]+self._x[3,j]*_np.sin(2*theta)+self._x[4,j]*_np.cos(2*theta)
m3Fit=self._x[0,j]+self._x[5,j]*_np.sin(3*theta)+self._x[6,j]*_np.cos(3*theta)
m4Fit=self._x[0,j]+self._x[7,j]*_np.sin(4*theta)+self._x[8,j]*_np.cos(4*theta)
m5Fit=self._x[0,j]+self._x[9,j]*_np.sin(5*theta)+self._x[10,j]*_np.cos(5*theta)
fitTotal=(-4.)*self._x[0,j]+m1Fit+m2Fit+m3Fit+m4Fit+m5Fit # the -4 corrects for the 4 extra offsets added from the preview 5 fits
p1.addTrace(yData=y,xData=self._theta,
linestyle='',marker='.',yLegendLabel='raw')
p1.addTrace(yData=m1Fit,xData=theta,
yLegendLabel='m=1')
p1.addTrace(yData=m2Fit,xData=theta,
yLegendLabel='m=2')
p1.addTrace(yData=m3Fit,xData=theta,
yLegendLabel='m=3')
p1.addTrace(yData=m4Fit,xData=theta,
yLegendLabel='m=4')
p1.addTrace(yData=m5Fit,xData=theta,
yLegendLabel='m=5')
p1.addTrace(yData=fitTotal,xData=theta,
yLegendLabel='m=1-5')
return p1
@_prepShotno
class euvData:
"""""
Pull EUV array data, similar to SXR format
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
default is False
True - plots far array of all 11 (of 16) channels
'all' - plots
Attributes
----------
shotno : int
shot number of desired data
title : str
title to put at the top of figures
data : list (of numpy.ndarray)
list of 11 (of 16) data arrays, one for each channel
Subfunctions
------------
plotAll :
plots all relevant plots
plotOfEUVStripey :
returns a stripey plot of the sensors
plotOfOneChannel :
returns a plot of a single channel based on the provided index, i
Notes
-----
"""""
def __init__(self,shotno=101393,tStart=_TSTART,tStop=_TSTOP,plot=False):
self.shotno=shotno
self.title = '%d, EUV Array' % shotno
detectors = [0,25,90,270]
#".sensors.euv.pol.det000:channel_01:raw
mdsAddressRaw = []
mdsAddressR = []
mdsAddressZ = []
mdsAddressGain = []
for det in detectors:
for i in range(16):
address = '\HBTEP2::TOP.SENSORS.EUV.POL.DET%03d' %det + '.CHANNEL_%02d' %(i+1)
mdsAddressRaw.append(address + ':RAW')
mdsAddressR.append(address+':R')
mdsAddressZ.append(address+':Z')
mdsAddressGain.append(address+':GAIN')
# Pull data
self.data, self.time = mdsData(shotno,mdsAddressRaw,tStart,tStop)
self.R = mdsData(shotno,mdsAddressR)
self.Z = mdsData(shotno,mdsAddressZ)
self.Gain = mdsData(shotno,mdsAddressGain)
# aperture location
self.det_ap_R = _np.array([1.160508, 1.090508, 0.907057, 0.929746])
self.det_ap_Z = _np.array([0.000000, 0.099975, 0.166773,-0.173440])
self.det_ap_Pol = _np.array([0.000635,.000635,.000635,.000635])
self.det_ap_Tor = _np.array([0.000635,.0254,.0254,.0244])
# plot
if plot==True:
self.plotOfEUVStripey(tStart,tStop).plot()
elif plot=='all':
self.plotAll()
self.plotOfEUVStripey(tStart,tStop).plot()
def plotOfEUVStripey(self,tStart=1e-3,tStop=10e-3):
iStart=_process.findNearest(self.time,tStart)
iStop=_process.findNearest(self.time,tStop)
p1=_plot.plot(title=self.title,subtitle='EUV Fan Array',
xLabel='Time [ms]', yLabel='Sensor Number',zLabel='a.u.',
plotType='contour',colorMap=_plot._red_green_colormap(),
centerColorMapAroundZero=True)
#_plt.set_cmap('plasma')
data=self.data;
for i in range(0,len(data)):
data[i]=data[i][iStart:iStop]
p1.addTrace(self.time[iStart:iStop]*1e3,_np.arange(64),
_np.array(data))
return p1
def plotOfOneChannel(self, i=0):
""" Plot one of the EUV chanenl. based on index, i. """
p1=_plot.plot(xLabel='time [ms]',yLabel=r'a.u.',title=self.title,
shotno=[self.shotno],subtitle='%d' %i);
# smoothed data
p1.addTrace(yData=self.data[i],xData=self.time*1000,
yLegendLabel=str(i))
return p1
def plotAll(self):
sp1=[]
count=0
for i in range(0,len(self.data)):
newPlot=self.plotOfOneChannel(count)
newPlot.subtitle=str(count)
newPlot.yLegendLabel=[]
newPlot.plot()
sp1.append(newPlot)
count+=1;
sp1[0].title=self.title
sp1=_plot.subPlot(sp1,plot=False)
# sp1.shareY=True;
# sp1.plot()
return sp1
#@_prepShotno
#def _hbtPlot(shotnos=_np.array([98147, 98148]),plot=True,bp=False,tZoom=[2e-3,4e-3],saveFig=True):
# """
# This function acts similarly to hbtplot.py
# Still under development
# """
# try:
# len(shotnos)
# except:
# shotnos=_np.array([shotnos])
#
# for i in range(0,len(shotnos)):
#
# shotno=shotnos[i]
# print(str(shotno))
#
# subplots=[]
#
#
# ip=ipData(shotno)
# subplots.append(ip.plotOfIP())
#
# q=qStarData(shotno)
# subplots.append(q.plotOfQStar())
#
# rad=plasmaRadiusData(shotno)
# subplots.append(rad.plotOfMajorRadius())
#
# pol=paData(shotno)
# subplots.append(pol.plotOfPA1Stripey(tZoom[0],tZoom[1]))
#
# mode=nModeData(shotno)
# subplots.append(mode.plotOfN1Amp())
# n1freq=mode.plotOfN1Freq()
# n1freq.yLim=[-10,20]
# subplots.append(n1freq)
#
# if bp==True:
# bpIn=bpData(shotno)
# bpIn.bps9Current*=-1
# bpIn.bps9Voltage*=-1
# subplots.append(bpIn.plotOfBPS9Voltage())
# subplots.append(bpIn.plotOfBPS9Current())
#
# _plot.subPlot(subplots)
#
# if saveFig==True:
# p=_plt.gcf()
# p.savefig(str(shotno)+'.png')
# _plt.close('all')
#
class xrayData:
"""
Gets x-ray sensor data
X-ray sensor is currently connected at: devices.north_rack:cpci:input_96
Parameters
----------
shotno : int
shot number of desired data
tStart : float
time (in seconds) to trim data before
default is 0 ms
tStop : float
time (in seconds) to trim data after
default is 10 ms
plot : bool
plots all relevant plots if true
default is False
Attributes
----------
shotno : int
shot number of desired data
title : str
title to go on all plots
xray : numpy.ndarray
plasma current data
time : numpy.ndarray
time data
Subfunctions
------------
plotOfXray :
returns the plot of x-ray intensity vs time
plot :
Plots all relevant plots
"""
def __init__(self,shotno=96530,tStart=_TSTART,tStop=_TSTOP,plot=False,verbose=False):
self.shotno = shotno
self.title = "%d, X-ray Data" % shotno
# get data
data, time=mdsData(shotno=shotno,
dataAddress=['\HBTEP2::TOP.DEVICES.NORTH_RACK:CPCI:INPUT_96 '],
tStart=tStart, tStop=tStop)
self.xray=data[0];
self.time=time;
if plot == True or plot=='all':
self.plot()
def plotOfXray(self):
"""
returns the plot of IP vs time
"""
fig,p1=_plt.subplots()
p1.plot(self.time*1e3,self.xray)
_plot.finalizeSubplot(p1,xlabel='Time (ms)',ylabel='Intensity')
_plot.finalizeFigure(fig,title=self.title)
return p1
def plot(self):
"""
Plot all relevant plots
"""
self.plotOfXray().plot()
###############################################################################
### debugging code
def _debugPlotExamplesOfAll():
"""
This code executes each function above and plots an example of most every
plot. Effectively, this allows the testing of most every function all in
one go.
TODO(John): This needs to be updated in light of _prepShotno().
TODO(John): Also, provide default shot numbers here.
"""
bpData(plot=True)
capBankData(plot=True)
cos1RogowskiData(plot=True)
quartzJumperData(plot=True)
fbData(plot=True)
ipData(plot=True)
loopVoltageData(plot=True)
mModeData(plot=True)
nModeData(plot=True)
paData(plot=True)
plasmaRadiusData(plot=True)
qStarData(plot=True)
solData(plot=True)
spectrometerData(plot=True)
taData(plot=True)
tfData(plot=True)
tpData(plot=True)
sxrData(plot=True)
|
992,513 | 2f3238c5717695499c283dc8d12e9774f1cce1a6 | from bs4 import BeautifulSoup
from selenium import webdriver
import time
import pandas as pd
def scrape_nbacom_tmclutch_by_url(start, end, url):
'''Scrape team clutch statistics from NBA.com over a range of seasons given:
start, the end year of the first season in the range
end, the end year of the last season in the range
url, the url of the page with the team clutch stats to be scraped
Output:
df, a dataframe combining all team clutch stats for the entire range of seasons
'''
driver = get_driver("/Applications/chromedriver")
driver.get(url)
df = scrape_table(driver, start, end)
close_driver(driver)
return df
def scrape_table(driver, start, end):
'''Scrape table of given page of statistics given
driver, the (Chrome) WebDriver being used to scrape
start, the end year of the first season in the range
end, the end year of the last season in the range
Output:
df_main, a dataframe combining all team clutch stats for the entire range of seasons
'''
df_main = pd.DataFrame()
for year in range(start, end+1):
start = str(year-1)
end = str(year)[2:]
time.sleep(2)
xpath = './/option[@label="' + start + '-' + end + '"]'
season = driver.find_element_by_xpath(xpath)
season.click()
time.sleep(2)
soup = BeautifulSoup(driver.page_source, 'html.parser')
tbl = soup.find('tbody').find_all('tr')
d = {}
for row in tbl:
items = row.find_all('td')
team = items[1].text.strip().replace('*', '') + ' ' + str(year)
d[team] = [j.text for j in items[2:]]
df = pd.DataFrame.from_dict(d, orient='index')
df_main = pd.concat([df_main, df])
tbl_cols = soup.find_all('th')
df_main.columns = ['Clutch' + c.text.strip().replace('xa0', '') for c in tbl_cols][2:(2+df_main.shape[1])]
df_main.index = [teamyr.replace('LA Clippers', 'Los Angeles Clippers') for teamyr in df_main.index]
return df_main
def get_driver(chromedriver):
'''
Given
chromedriver, the path to chromedriver
Return
driver, the (Chrome) WebDriver to be used to scrape
'''
chromedriver = chromedriver
driver = webdriver.Chrome(chromedriver)
return driver
def close_driver(driver):
'''
Given
driver, the (Chrome) WebDriver being used to scrape
close and quit the driver
'''
driver.close()
driver.quit() |
992,514 | 37f651b9b93332de4d1bf7fc9b1de64b948bfda3 | from django.shortcuts import render, get_object_or_404
from .models import Doctor, MedicalPractice
from django.db.models import Q
# Create your views here.
# def do_search(request):
# query=request.GET['search_box']
# doctors = Doctor.objects.filter(Q(name__icontains=query) | Q(practice__icontains=query) | Q(location__icontains=query))
# return render(request, "medical_practice.html", {"doctors": doctors})
# def all_doctors(request):
# medicalPractice = Doctor.objects.all()
# return render(request, "medical_practice.html", {"medicalPractice": medicalPractice})
def practice_details(request, id):
this_doctor = get_object_or_404(Doctor, pk=id)
return render(request, "practice_detail.html", {"doctor" : this_doctor})
def all_docs(request):
doctors = Doctor.objects.all()
return render (request, 'all_docs.html', {'doctors': doctors})
|
992,515 | 91441bd0acdb5097fe407ceb1735750ff0aa6421 | """
Desafio 097
- Faça um programa que tenha uma função chamada escreva(), que receba um texto qualquer como parametro e mostre uma
mensagem com tamanho adaptável.
Ex. escreva('Ola, Mundo!')
Saída.
'~~~~~~~~~~~'
'Ola, Mundo!'
'~~~~~~~~~~~'
"""
def escreva(txt):
lens = len(txt) + 2
print('~' * lens)
print(f' {txt} ')
print('~' * lens)
escreva('Ola, mundo!')
escreva('IHHAAAAAAAAA QUE TOP VAMOS LÁ')
escreva('oi')
|
992,516 | 49eb45f445ad8ab6bdbbc51f94be1b262de1b7fb | # LSTM (Long Short Term Memory)
from keras.layers.core import Dense, Dropout, Activation
from keras.layers import LSTM
import os
from keras.models import Sequential, load_model
from sklearn.preprocessing import MinMaxScaler
import pandas as pd
import numpy as np
dic={'batch_size': 57.0, 'dropout1': 0.7113823489864011, 'dropout3': 0.4933270399135308, 'dropout4': 0.33318009255640524, 'units1': 162.0, 'units3': 364.0, 'units4': 506.0}
forword=30
look_back=120
tr_size=0.86
ep=50
df=pd.read_excel('test_data.xlsx')
df1=df.copy()
df1=df1[['日期(月度)','USA_output','OPEC_output','demand_current','supply_current']]
#df1=df1.bfill()
df1=df1.dropna()
df1=df1.rename(columns={"日期(月度)":'日期(日期)'})
df1=df1.set_index('日期(日期)')
df1=df1.asfreq(freq='d')
df1=df1.bfill()
#print(df1.head(40))
df=df.set_index('日期(日期)')
df=df[['Brent','USD','CRB','Premium','trade','BDI']]
df=df.ffill()
df=df.bfill()
df=df.join(df1)
df=df.dropna()
df=df.astype('float64')
df_raw=df.copy()
#df['flag']=[ (lambda y: 1 if (y%7==6)|(y%7==0) else 0)(x) for x in range(df.shape[0])]
print(df.shape[0])
print(df_raw.shape[0])
#print(df)
#df=df.iloc[0:50]
dataset=df.values
#dataset = dataset.astype('float32')
#print(dataset)
scaler = MinMaxScaler(feature_range=(0, 1))
dataset = scaler.fit_transform(dataset)
train_size = int(len(dataset) * tr_size)
trainlist = dataset[:train_size]
testlist = dataset[train_size:]
def create_dataset(dataset, look_back,forword):
dataX, dataY = [], []
for i in range(len(dataset)-look_back-forword):
a = dataset[i:(i+look_back)]
dataX.append(a)
dataY.append(dataset[(i + look_back):(i+look_back+forword)])
return np.array(dataX),np.array(dataY)
trainX,trainY = create_dataset(trainlist,look_back,forword)
testX,testY = create_dataset(testlist,look_back,forword)
trainY=np.reshape(trainY,(trainY.shape[0],-1))
#print(trainX)
#print(trainY)
def trainModel():
model = Sequential()
model.add(LSTM(
int(dic['units1']),
input_shape=(trainX.shape[1], trainX.shape[2]),
return_sequences=True))
model.add(Dropout(dic['dropout1']))
model.add(LSTM(
int(dic['units3']),
return_sequences=True))
model.add(Dropout(dic['dropout3']))
model.add(LSTM(
int(dic['units4']),
return_sequences=False))
model.add(Dropout(dic['dropout4']))
model.add(Dense(
trainY.shape[1]))
model.add(Activation("relu"))
model.compile(loss='mse', optimizer='adam')
return model
model=trainModel()
model.fit(trainX, trainY, epochs=ep, batch_size=int(dic['batch_size']), verbose=2)
model.save('model2.h5')
#y_hat = model.predict(np.reshape(trainX[0:2],(-1,trainX.shape[1],trainX.shape[2])))
y_hat = model.predict(testX)
def retrive(hat):
hat=np.reshape(hat,(-1,df.shape[1]))
hat=scaler.inverse_transform(hat)
hat=np.reshape(hat,(-1,forword,df.shape[1]))
result=[[y[0] for y in x] for x in hat]
#print(np.array(result))
return np.array(result)
#print(df_raw.iloc[train_size:])
yhat=retrive(y_hat)
ytest=retrive(testY)
diff=yhat-ytest
print(yhat[-1])
print(ytest[-1])
mae=(np.abs(diff)).mean()
#rmse=np.sqrt((diff**2).mean())
print(mae)
|
992,517 | a64a62564bd5e7f416d642d3de159e8f5b39afe2 | # -*- coding: utf-8 -*-
SECRET = '' # 登录transfereasy后台进入设置页面查看
ACCOUNT_NO = '' # 登录transfereasy后台进入设置页面查看
TE_HOST = 'https://open.transfereasy.com/{url}'
PUBLIC_KEY = '''
-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1FuC6Pj3d5qFcK7G0vOD
Nbi3H6bTm/1i19KfA/qpKvxHimD2OYU18fqq+1r3FOqByg5bJF4q/lSOkZvqLATx
RFCXVq/r8/h+BMPWuZBv+N3rsvw1yvvuyDJl9IF868OzLk4DBs0BM5FIMNtIUQU0
grgk9LxZ8CJfWjEDSs7qo0Wky9apmT3VwfMY4FDZLxv6fm33NvmHg8BortIDRDP4
UetzZoLqYUSJWAh1OiGbDveqOHFkWQOR5HXfVwMiX5SH/oRX5OtQTr2mYiRDEsmY
MmvKN0Pk7xuVR7G3MeqpRScsYMFuDVWVXa+HLLqmXgmHBk+TXm85Gd7T4pJzwdV1
RwIDAQAB
-----END PUBLIC KEY-----
''' |
992,518 | 6b7c907d5f0c3033ba0b510c0a1abe42c9d33b9d | # DATA WRANGLING WITH PANDAS
import pandas as pd
# Reading in csv files:
csv_data = pd.read_csv('training-set.csv')
# Pandas loads the csv data into a DataFrame (table)
# Displaying the DataFrame:
csv_data.head()
# In Pandas, columns are called 'series'
# The describe function shows a table of statistics about the data in your DataFrame
csv_data.describe(include='all')
# Shuffling your DataFrame
csv_data = csv_data.sample(frac=1).reset_index(drop=True)
# You can print out all columns (series) in the DataFrame using:
csv_data.columns
# Then print out contents of one of the listed columns by passing in its name
csv_data['employee_name']
## Viewing multiple columns from DataFrame
# By passing in array of column names to index:
csv_data[['employee_name','email']]
# Or by indexing a range of columns:
cols_2_4 = csv_data.columns[2:4]
csv_data[cols_2_4]
# Accessing rows by index:
csv_data.iloc[5]
# Accessing multiple rows
csv_data.iloc[5:10]
# EXAMPLES
# Accessing row 5 of column named 'employee_salary'
csv_data['employee_salary'].iloc[4]
# Accessing subset of rows and subset of columns:
cols_2_4 = csvdata.columns[2:4] # First choose the column names
cols_2_4 = csv_data[cols_2_4] # Then we get the columns
cols_2_4.iloc[5:10] # Now select the rows from that DataFrame (subset)
csv_data[csv_data.columns[2:4]].iloc[5:10] # Same expression in one line
|
992,519 | 7e783cae694b17565659a1de073a18bb4e16fb18 | # coding: utf-8
"""
Python Users API
This is a simple Users API. It provides basic logic, CRUD operations of users. # noqa: E501
OpenAPI spec version: 1.0.0
Contact: valentin.sheboldaev@gmail.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.api.user_api_api import UserAPIApi # noqa: E501
from swagger_client.rest import ApiException
class TestUserAPIApi(unittest.TestCase):
"""UserAPIApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.user_api_api.UserAPIApi() # noqa: E501
def tearDown(self):
pass
def test_delete_useruser_uuid_post(self):
"""Test case for delete_useruser_uuid_post
Delete a single user record # noqa: E501
"""
pass
def test_get_total_users_get(self):
"""Test case for get_total_users_get
Get all system users # noqa: E501
"""
pass
def test_get_user_by_emailuser_email_get(self):
"""Test case for get_user_by_emailuser_email_get
Get's a single user record # noqa: E501
"""
pass
def test_get_user_by_uuiduser_uuid_get(self):
"""Test case for get_user_by_uuiduser_uuid_get
Get's a single user record # noqa: E501
"""
pass
def test_post_user_post(self):
"""Test case for post_user_post
Adds a single user record # noqa: E501
"""
pass
def test_post_users_post(self):
"""Test case for post_users_post
Adds users to User db. Upload as many as you want. # noqa: E501
"""
pass
def test_update_useruser_uuid_put(self):
"""Test case for update_useruser_uuid_put
Updates an existing user # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
992,520 | 948c435a38701e243cef83677ff8c0bae3e06fea | class Student():
def __init__(self, number_in_list, name):
self.number_in_list = number_in_list
self.name = name
# def __str__(self):
# return f'Имя студента под номером {self.number_in_list}: {self.name}'
class School():
def __init__(self, students, marks):
self.students = students
self.marks = marks
def __len__(self):
return len(self.marks)
def __getitem__(self, i):
return f'Имя студента под номером {self.students[i].number_in_list}: {self.students[i].name}. Его оценка - {self.marks[i]}'
def __setitem__(self, key, value):
self.marks[key] = value
def __iter__(self):
for i in range(len(self.marks)): yield self.students[i].name, self.marks[i]
class Activity():
def __init__(self, st_information, certificate):
self.st_information = st_information
self.certificate = certificate
def __add__(self, other):
return self.certificate + other.certificate
def __str__(self):
return f'Имя студента: {self.st_information.students.name}. Количество его сертификатов - {self.certificate}'
def __gt__(self, other):
return self.certificate > other.certificate
def __ge__(self, other):
return self.certificate >= other
def __int__(self):
return int(self.st_information.marks)
def __eq__(self, other):
return self.certificate == other.certificate
if __name__ == '__main__':
students = [Student(1, 'Василий'), Student(2, 'Иван'), Student(3, 'Олег')]
mark = ['5', '4', '3']
s = School(students, mark)
print('Количество студентов:', len(s))
print()
print(s[0])
print('А теперь ...')
s[0] = '2'
print(s[0])
print()
for st, m in iter(s):
print(st, m)
print()
st_1 = Activity(School(Student(1, 'Мария'), '5'), 2)
st_2 = Activity(School(Student(2, 'Наталья'), '4'), 8)
st_3 = Activity(School(Student(3, 'Марина'), '4'), 8)
print(st_1, st_2, st_3, sep='\n')
print('Количество сертификатов на команду:', st_1 + st_2)
if st_1 > st_2:
print('У первого студента сертификатов больше, чем у второго')
else:
print('У второго студента сертификатов больше, чем у первого')
print()
print('Первый студент имеет сертификатов не менее, чем среднее значение по команде?', st_1 >= 5)
print('Второй студент имеет сертификатов не менее, чем среднее значение по команде?', st_2 >= 5)
print()
print('Средняя оценка по команде:', (int(st_1)+int(st_2))/2)
print()
print('У второго и третьего студента равное количество сертификатов?', st_2 == st_3)
|
992,521 | 6dd740a962fe62deb6a16a2db4919e3ff0622dd7 | import turtle
from turtle import Turtle
from random import random
timmy = Turtle()
screen = turtle.Screen()
timmy.speed(0)
timmy.width(2)
def change_color() :
R = random()
B = random()
G = random()
timmy.color(R, G, B)
for i in range(72):
timmy.circle(100)
timmy.right(5)
change_color()
screen.exitonclick()
|
992,522 | ea3b937d3a3966b195fb20517098e5ec6d31d249 | import os
import re
import pytest
from unit.applications.lang.python import ApplicationPython
from unit.option import option
prerequisites = {'modules': {'python': 'any'}}
client = ApplicationPython()
@pytest.fixture(autouse=True)
def setup_method_fixture():
assert 'success' in client.conf(
{
"listeners": {
"*:7080": {"pass": "upstreams/one"},
"*:7090": {"pass": "upstreams/two"},
"*:7081": {"pass": "routes/one"},
"*:7082": {"pass": "routes/two"},
"*:7083": {"pass": "routes/three"},
},
"upstreams": {
"one": {
"servers": {
"127.0.0.1:7081": {},
"127.0.0.1:7082": {},
},
},
"two": {
"servers": {
"127.0.0.1:7081": {},
"127.0.0.1:7082": {},
},
},
},
"routes": {
"one": [{"action": {"return": 200}}],
"two": [{"action": {"return": 201}}],
"three": [{"action": {"return": 202}}],
},
"applications": {},
},
), 'upstreams initial configuration'
client.cpu_count = os.cpu_count()
def get_resps(req=100, port=7080):
resps = [0]
for _ in range(req):
status = client.get(port=port)['status']
if 200 > status or status > 209:
continue
ups = status % 10
if ups > len(resps) - 1:
resps.extend([0] * (ups - len(resps) + 1))
resps[ups] += 1
return resps
def get_resps_sc(req=100, port=7080):
to_send = b"""GET / HTTP/1.1
Host: localhost
""" * (
req - 1
)
to_send += b"""GET / HTTP/1.1
Host: localhost
Connection: close
"""
resp = client.http(to_send, raw_resp=True, raw=True, port=port)
status = re.findall(r'HTTP\/\d\.\d\s(\d\d\d)', resp)
status = list(filter(lambda x: x[:2] == '20', status))
ups = list(map(lambda x: int(x[-1]), status))
resps = [0] * (max(ups) + 1)
for _, up in enumerate(ups):
resps[up] += 1
return resps
def test_upstreams_rr_no_weight():
resps = get_resps()
assert sum(resps) == 100, 'no weight sum'
assert abs(resps[0] - resps[1]) <= client.cpu_count, 'no weight'
assert 'success' in client.conf_delete(
'upstreams/one/servers/127.0.0.1:7081'
), 'no weight server remove'
resps = get_resps(req=50)
assert resps[1] == 50, 'no weight 2'
assert 'success' in client.conf(
{}, 'upstreams/one/servers/127.0.0.1:7081'
), 'no weight server revert'
resps = get_resps()
assert sum(resps) == 100, 'no weight 3 sum'
assert abs(resps[0] - resps[1]) <= client.cpu_count, 'no weight 3'
assert 'success' in client.conf(
{}, 'upstreams/one/servers/127.0.0.1:7083'
), 'no weight server new'
resps = get_resps()
assert sum(resps) == 100, 'no weight 4 sum'
assert max(resps) - min(resps) <= client.cpu_count, 'no weight 4'
resps = get_resps_sc(req=30)
assert resps[0] == 10, 'no weight 4 0'
assert resps[1] == 10, 'no weight 4 1'
assert resps[2] == 10, 'no weight 4 2'
def test_upstreams_rr_weight():
assert 'success' in client.conf(
{"weight": 3}, 'upstreams/one/servers/127.0.0.1:7081'
), 'configure weight'
resps = get_resps_sc()
assert resps[0] == 75, 'weight 3 0'
assert resps[1] == 25, 'weight 3 1'
assert 'success' in client.conf_delete(
'upstreams/one/servers/127.0.0.1:7081/weight'
), 'configure weight remove'
resps = get_resps_sc(req=10)
assert resps[0] == 5, 'weight 0 0'
assert resps[1] == 5, 'weight 0 1'
assert 'success' in client.conf(
'1', 'upstreams/one/servers/127.0.0.1:7081/weight'
), 'configure weight 1'
resps = get_resps_sc()
assert resps[0] == 50, 'weight 1 0'
assert resps[1] == 50, 'weight 1 1'
assert 'success' in client.conf(
{
"127.0.0.1:7081": {"weight": 3},
"127.0.0.1:7083": {"weight": 2},
},
'upstreams/one/servers',
), 'configure weight 2'
resps = get_resps_sc()
assert resps[0] == 60, 'weight 2 0'
assert resps[2] == 40, 'weight 2 1'
def test_upstreams_rr_weight_rational():
def set_weights(w1, w2):
assert 'success' in client.conf(
{
"127.0.0.1:7081": {"weight": w1},
"127.0.0.1:7082": {"weight": w2},
},
'upstreams/one/servers',
), 'configure weights'
def check_reqs(w1, w2, reqs=10):
resps = get_resps_sc(req=reqs)
assert resps[0] == reqs * w1 / (w1 + w2), 'weight 1'
assert resps[1] == reqs * w2 / (w1 + w2), 'weight 2'
def check_weights(w1, w2):
set_weights(w1, w2)
check_reqs(w1, w2)
check_weights(0, 1)
check_weights(0, 999999.0123456)
check_weights(1, 9)
check_weights(100000, 900000)
check_weights(1, 0.25)
check_weights(1, 0.25)
check_weights(0.2, 0.8)
check_weights(1, 1.5)
check_weights(1e-3, 1e-3)
check_weights(1e-20, 1e-20)
check_weights(1e4, 1e4)
check_weights(1000000, 1000000)
set_weights(0.25, 0.25)
assert 'success' in client.conf_delete(
'upstreams/one/servers/127.0.0.1:7081/weight'
), 'delete weight'
check_reqs(1, 0.25)
assert 'success' in client.conf(
{
"127.0.0.1:7081": {"weight": 0.1},
"127.0.0.1:7082": {"weight": 1},
"127.0.0.1:7083": {"weight": 0.9},
},
'upstreams/one/servers',
), 'configure weights'
resps = get_resps_sc(req=20)
assert resps[0] == 1, 'weight 3 1'
assert resps[1] == 10, 'weight 3 2'
assert resps[2] == 9, 'weight 3 3'
def test_upstreams_rr_independent():
def sum_resps(*args):
sum_r = [0] * len(args[0])
for arg in args:
sum_r = [x + y for x, y in zip(sum_r, arg)]
return sum_r
resps = get_resps_sc(req=30, port=7090)
assert resps[0] == 15, 'dep two before 0'
assert resps[1] == 15, 'dep two before 1'
resps = get_resps_sc(req=30)
assert resps[0] == 15, 'dep one before 0'
assert resps[1] == 15, 'dep one before 1'
assert 'success' in client.conf(
'2', 'upstreams/two/servers/127.0.0.1:7081/weight'
), 'configure dep weight'
resps = get_resps_sc(req=30, port=7090)
assert resps[0] == 20, 'dep two 0'
assert resps[1] == 10, 'dep two 1'
resps = get_resps_sc(req=30)
assert resps[0] == 15, 'dep one 0'
assert resps[1] == 15, 'dep one 1'
assert 'success' in client.conf(
'1', 'upstreams/two/servers/127.0.0.1:7081/weight'
), 'configure dep weight 1'
r_one, r_two = [0, 0], [0, 0]
for _ in range(10):
r_one = sum_resps(r_one, get_resps(req=10))
r_two = sum_resps(r_two, get_resps(req=10, port=7090))
assert sum(r_one) == 100, 'dep one mix sum'
assert abs(r_one[0] - r_one[1]) <= client.cpu_count, 'dep one mix'
assert sum(r_two) == 100, 'dep two mix sum'
assert abs(r_two[0] - r_two[1]) <= client.cpu_count, 'dep two mix'
def test_upstreams_rr_delay():
delayed_dir = f'{option.test_dir}/python/delayed'
assert 'success' in client.conf(
{
"listeners": {
"*:7080": {"pass": "upstreams/one"},
"*:7081": {"pass": "routes"},
"*:7082": {"pass": "routes"},
},
"upstreams": {
"one": {
"servers": {
"127.0.0.1:7081": {},
"127.0.0.1:7082": {},
},
},
},
"routes": [
{
"match": {"destination": "*:7081"},
"action": {"pass": "applications/delayed"},
},
{
"match": {"destination": "*:7082"},
"action": {"return": 201},
},
],
"applications": {
"delayed": {
"type": client.get_application_type(),
"processes": {"spare": 0},
"path": delayed_dir,
"working_directory": delayed_dir,
"module": "wsgi",
}
},
},
), 'upstreams initial configuration'
req = 50
socks = []
for i in range(req):
delay = 1 if i % 5 == 0 else 0
sock = client.get(
headers={
'Host': 'localhost',
'Content-Length': '0',
'X-Delay': str(delay),
'Connection': 'close',
},
no_recv=True,
)
socks.append(sock)
resps = [0, 0]
for i in range(req):
resp = client.recvall(socks[i]).decode()
socks[i].close()
m = re.search(r'HTTP/1.1 20(\d)', resp)
assert m is not None, 'status'
resps[int(m.group(1))] += 1
assert sum(resps) == req, 'delay sum'
assert abs(resps[0] - resps[1]) <= client.cpu_count, 'delay'
def test_upstreams_rr_active_req():
conns = 5
socks = []
socks2 = []
for _ in range(conns):
sock = client.get(no_recv=True)
socks.append(sock)
sock2 = client.http(
b"""POST / HTTP/1.1
Host: localhost
Content-Length: 10
Connection: close
""",
no_recv=True,
raw=True,
)
socks2.append(sock2)
# Send one more request and read response to make sure that previous
# requests had enough time to reach server.
assert client.get()['body'] == ''
assert 'success' in client.conf(
{"127.0.0.1:7083": {"weight": 2}},
'upstreams/one/servers',
), 'active req new server'
assert 'success' in client.conf_delete(
'upstreams/one/servers/127.0.0.1:7083'
), 'active req server remove'
assert 'success' in client.conf_delete(
'listeners/*:7080'
), 'delete listener'
assert 'success' in client.conf_delete(
'upstreams/one'
), 'active req upstream remove'
for i in range(conns):
assert (
client.http(b'', sock=socks[i], raw=True)['body'] == ''
), 'active req GET'
assert (
client.http(b"""0123456789""", sock=socks2[i], raw=True)['body']
== ''
), 'active req POST'
def test_upstreams_rr_bad_server():
assert 'success' in client.conf(
{"weight": 1}, 'upstreams/one/servers/127.0.0.1:7084'
), 'configure bad server'
resps = get_resps_sc(req=30)
assert resps[0] == 10, 'bad server 0'
assert resps[1] == 10, 'bad server 1'
assert sum(resps) == 20, 'bad server sum'
def test_upstreams_rr_pipeline():
resps = get_resps_sc()
assert resps[0] == 50, 'pipeline 0'
assert resps[1] == 50, 'pipeline 1'
def test_upstreams_rr_post():
resps = [0, 0]
for _ in range(50):
resps[client.get()['status'] % 10] += 1
resps[client.post(body='0123456789')['status'] % 10] += 1
assert sum(resps) == 100, 'post sum'
assert abs(resps[0] - resps[1]) <= client.cpu_count, 'post'
def test_upstreams_rr_unix(temp_dir):
addr_0 = f'{temp_dir}/sock_0'
addr_1 = f'{temp_dir}/sock_1'
assert 'success' in client.conf(
{
"*:7080": {"pass": "upstreams/one"},
f"unix:{addr_0}": {"pass": "routes/one"},
f"unix:{addr_1}": {"pass": "routes/two"},
},
'listeners',
), 'configure listeners unix'
assert 'success' in client.conf(
{f"unix:{addr_0}": {}, f"unix:{addr_1}": {}},
'upstreams/one/servers',
), 'configure servers unix'
resps = get_resps_sc()
assert resps[0] == 50, 'unix 0'
assert resps[1] == 50, 'unix 1'
def test_upstreams_rr_ipv6():
assert 'success' in client.conf(
{
"*:7080": {"pass": "upstreams/one"},
"[::1]:7081": {"pass": "routes/one"},
"[::1]:7082": {"pass": "routes/two"},
},
'listeners',
), 'configure listeners ipv6'
assert 'success' in client.conf(
{"[::1]:7081": {}, "[::1]:7082": {}}, 'upstreams/one/servers'
), 'configure servers ipv6'
resps = get_resps_sc()
assert resps[0] == 50, 'ipv6 0'
assert resps[1] == 50, 'ipv6 1'
def test_upstreams_rr_servers_empty():
assert 'success' in client.conf(
{}, 'upstreams/one/servers'
), 'configure servers empty'
assert client.get()['status'] == 502, 'servers empty'
assert 'success' in client.conf(
{"127.0.0.1:7081": {"weight": 0}}, 'upstreams/one/servers'
), 'configure servers empty one'
assert client.get()['status'] == 502, 'servers empty one'
assert 'success' in client.conf(
{
"127.0.0.1:7081": {"weight": 0},
"127.0.0.1:7082": {"weight": 0},
},
'upstreams/one/servers',
), 'configure servers empty two'
assert client.get()['status'] == 502, 'servers empty two'
def test_upstreams_rr_invalid():
assert 'error' in client.conf({}, 'upstreams'), 'upstreams empty'
assert 'error' in client.conf({}, 'upstreams/one'), 'named upstreams empty'
assert 'error' in client.conf(
{}, 'upstreams/one/servers/127.0.0.1'
), 'invalid address'
assert 'error' in client.conf(
{}, 'upstreams/one/servers/127.0.0.1:7081/blah'
), 'invalid server option'
def check_weight(w):
assert 'error' in client.conf(
w, 'upstreams/one/servers/127.0.0.1:7081/weight'
), 'invalid weight option'
check_weight({})
check_weight('-1')
check_weight('1.')
check_weight('1.1.')
check_weight('.')
check_weight('.01234567890123')
check_weight('1000001')
check_weight('2e6')
|
992,523 | d0ad2d06c16496b402471f873eef217a8b5c94f4 | # 决策树代码实现过程
"""
熵的计算,给出数据集进行计算并返回
对数据集进行切分,返回切分后的数据集
选择最优的切分特征
叶子结点标签不唯一处理--票多为胜
创建决策树
预测函数
进行测试
"""
from math import log
import numpy as np
class ID3:
def __int__(self, dataset):
self.dataset = dataset[1:]
self.labels = dataset[0]
def calc_entropy(self,dataset):
entropy = 1
label_counts = {}
for row in dataset:
current_label = row[-1]
if current_label not in label_counts.keys():
label_counts[current_label] = 0
else:
label_counts[current_label] += 1
total_rows = len(dataset)
for key in label_counts:
p = label_counts[key]/total_rows
entropy -= p*log(p,2)
return entropy
def split_category_dataset(self,dataset,axis,value):
ret_dataset = []
for row in dataset:
if row[axis] == value:
reduce_vec = row[:axis]
reduce_vec.extend(row[axis+1:])
ret_dataset.append(reduce_vec)
return ret_dataset
def split_continuous_dataset(self, dataset, axis, value, direction: int=0):
"""
对连续性特征使用二分法进行切分数据集
:param dataset:
:param axis:
:param value:
:param direction: 0表示按照>value进行切分,1表示按照<=value进行切分
:return:
"""
ret_dataset = []
for row in dataset:
if direction == 0:
if row[axis] > value:
reduce_vec = row[:axis]
reduce_vec.extend(row[axis+1:])
ret_dataset.append(reduce_vec)
else:
if row[axis] <= value:
reduce_vec = row[:axis]
reduce_vec.extend(row[axis+1:])
ret_dataset.append(reduce_vec)
return ret_dataset
def choose_best_feature(self, dataset):
best_feature = -1
feature_num = dataset.shape[1]-1
root_entropy = self.calc_entropy(dataset=dataset)
best_info_gain = 0
best_split_dict = {}
for axis in range(feature_num):
feature_list = sorted(dataset[:, axis])
# 判断数组某一列数据类型是否是数值型,数值型number:包括int float bool complex
if type(feature_list[0]).__name__.startswith('float') or type(feature_list[0]).__name__.startswith('int'):
split_list = []
for i in range(len(feature_list)-1):
split_point = (feature_list[i]+feature_list[i+1])/2
split_list.append(split_point)
for point in split_list:
sub_entropy = 0
split_dataset0 = self.split_continuous_dataset(dataset=dataset,axis=axis,value=point,direction=0)
w_right = len(split_dataset0)/dataset.shape[0]
sub_entropy += w_right*self.calc_entropy(dataset=split_dataset0)
split_dataset1 = self.split_continuous_dataset(dataset=dataset,axis=axis,value=point,direction=1)
w_left = len(split_dataset1)/len(dataset)
sub_entropy += w_left*self.calc_entropy(dataset=split_dataset1)
if root_entropy - sub_entropy > best_info_gain:
best_info_gain = root_entropy - sub_entropy
best_split_point = point
best_feature = axis
best_split_dict[axis] = best_split_point
else:
feature_elements = set(feature_list)
for value in feature_elements:
sub_entropy = 0
split_dataset = self.split_category_dataset(dataset=dataset,axis=axis,value=value)
w = len(split_dataset)/len(dataset)
sub_entropy += w*self.calc_entropy(dataset=split_dataset)
if root_entropy - sub_entropy > best_info_gain:
best_feature = axis
# 若当前节点的最佳划分特征为连续特征,则将其对应的取值进行二值化处理,即与best_split_point进行对比,小于等于的赋值1,大于的为0
if type(dataset[0][best_feature]).__name__ == 'float' or type(dataset[0][best_feature]).__name__ == 'int':
best_split_value = best_split_dict[best_feature]
for i in range(len(dataset.shape[0])):
if dataset[i][best_feature] <= best_split_value:
dataset[i][best_feature] = 1
else:
dataset[i][best_feature] = 0
return best_feature
def majority_count(self, dataset):
label_count = {}
for row in dataset:
label = row[-1]
if label not in label_count.keys():
label_count[label] = 0
label_count[label] += 1
return max(label_count)
# todo:成员变量最为成员函数的默认参数,不可以实现
def fit(self,dataset=self.dataset,labels=self.labels):
class_label = [example[-1] for example in dataset]
# np.ndarray has no attribute 'count' but list object has this attribute
if class_label.count(class_label[0]) == len(class_label):
return class_label[0]
if len(dataset[0]) == 1:
return self.majority_count(dataset=dataset)
best_feature_index = self.choose_best_feature(dataset=dataset)
best_feature = labels[best_feature_index]
my_tree = {best_feature: {}}
# 字典对象可以接收基本数据类型
del labels[best_feature_index]
feature_values = [example[best_feature_index] for example in dataset]
unique_value = set(feature_values)
for value in unique_value:
sub_dataset = self.split_category_dataset(dataset=dataset,axis=best_feature_index,value=value)
my_tree[best_feature][value] = self.fit(sub_dataset,labels)
return my_tree |
992,524 | fde4e18b2a6455efe569e42949455c56b7599458 | # encoding: utf-8
'''
@author: shiwei hou
@contact: murdockhou@gmail.com
@software: PyCharm
@file: test_head_count.py
@time: 19-1-4 09:44
'''
import tensorflow as tf
import os
import time
import cv2
import numpy as np
import matplotlib.pyplot as plt
from src.lightweight_openpose import light_openpose
from src.pose_decode import decode_pose
# from src.pose_decode_old import decode_pose
params = {}
params['test_model'] = '/home/ulsee/work/github/lightweight_openpose/model/model.ckpt-1008540'
# params['video_path'] = '/media/ulsee/E/video/bank/jiachaojian30.mp4'
params['img_path'] = '/media/ulsee/E/ai_format_dataset/trainData_9544'
# params['img_path'] = '/media/ulsee/E/yuncong/yuncong_data/our/test/0/'
params['thre1'] = 0.1
params['thre2'] = 0.0
def main():
use_gpu = False
if use_gpu:
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
else:
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
input_img = tf.placeholder(tf.float32, shape=[1, None, None, 3])
_1, _2, cpm, paf = light_openpose(input_img, is_training=False)
saver = tf.train.Saver()
total_img = 0
total_time = 0
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, params['test_model'])
print('#---------Successfully loaded trained model.---------#')
if 'video_path'in params.keys() and params['video_path'] is not None:
# video_capture = cv2.VideoCapture('rtsp://admin:youwillsee!@10.24.1.238')
video_capture = cv2.VideoCapture(params['video_path'])
fps = video_capture.get(cv2.CAP_PROP_FPS)
start_second = 0
start_frame = fps * start_second
video_capture.set(cv2.CAP_PROP_POS_FRAMES, start_frame)
while True:
retval, img_data = video_capture.read()
if not retval:
break
img_data = cv2.cvtColor(img_data, code=cv2.COLOR_BGR2RGB)
orih, oriw, c = img_data.shape
img = cv2.resize(img_data, (368, 368)) / 255.
start_time = time.time()
heatmap, _paf = sess.run([cpm, paf], feed_dict={input_img: [img]})
end_time = time.time()
canvas, joint_list, person_to_joint_assoc = decode_pose(img, params, heatmap[0], _paf[0])
decode_time = time.time()
print ('inference + decode time == {}'.format(decode_time - start_time))
total_img += 1
total_time += (end_time-start_time)
canvas = canvas.astype(np.float32)
canvas = cv2.cvtColor(canvas, cv2.COLOR_BGR2RGB)
cv2.imshow('result', canvas)
cv2.waitKey(1)
elif params['img_path'] is not None:
for img_name in os.listdir(params['img_path']):
if img_name.split('.')[-1] != 'jpg':
continue
img_data = cv2.imread(os.path.join(params['img_path'], img_name))
img_data = cv2.cvtColor(img_data, code=cv2.COLOR_BGR2RGB)
img = cv2.resize(img_data, (368, 368)) / 255.
start_time = time.time()
heatmap, _paf = sess.run([cpm, paf], feed_dict={input_img: [img]})
end_time = time.time()
canvas, joint_list, person_to_joint_assoc = decode_pose(img, params, heatmap[0], _paf[0])
canvas = canvas.astype(np.float32)
canvas = cv2.cvtColor(canvas, cv2.COLOR_BGR2RGB)
decode_time = time.time()
print('inference + decode time == {}'.format(decode_time - start_time))
total_img += 1
total_time += (end_time - start_time)
cv2.imshow('result', canvas)
cv2.waitKey(0)
else:
print('Nothing to process.')
main() |
992,525 | a2e1dc30738d7c3a7e8a670a85f53482989d3399 | from fastapi import FastAPI
from fastapi.params import Query
import pickle
from pydantic import BaseModel
from tensorflow import keras
from tensorflow.keras.preprocessing.sequence import pad_sequences
import numpy as np
description = """
Teste Prático Ciências de Dados. 🚀
## Sentiment Analysis
Neste endpoint é realizada a classificação de um texto de tweet sobre Covid-19.
A classificação é realizada em 5 categorias de sentimentos:
* **Extremely Positive** (_Extremamente Positivo_).
* **Positive** (_Positivo_).
* **Neutral** (_Neutro_).
* **Negative** (_Negativo_).
* **Extremely Negative** (_Extremamente Negativo_).
"""
app = FastAPI(
title="PredictAPI",
description=description,
version="0.0.1",
terms_of_service="http://example.com/terms/",
contact={
"name": "Jessica Cardoso",
"url": "https://github.com/jessicacardoso/",
"email": "jessicasousa.pc@gmail.com",
},
license_info={
"name": "Apache 2.0",
"url": "https://www.apache.org/licenses/LICENSE-2.0.html",
},
)
class Tweet(BaseModel):
text: str
with open('tokenizer.pkl', 'rb') as f:
tokenizer = pickle.load(f)
model = keras.models.load_model('tweets.keras')
sentiment_list = ['Extremely Negative', 'Extremely Positive', 'Negative', 'Neutral', 'Positive']
@app.post("/tweet_sentiment")
def predict_sentiment(tweet: Tweet):
"""Informe o tweet o qual deseja obter uma predição."""
X = tokenizer.texts_to_sequences([tweet.text])
X = pad_sequences(X, maxlen=60)
y_pred = model.predict(X)
return {"sentiment": sentiment_list[np.argmax(y_pred)]}
|
992,526 | 86ca107933c58dbad8c1528ab1e420e006cb9598 | # def f(a,b):M=max(a,b);return(a-b)*(-1)**M+M*M-M+1
def f(x, y):
diag = 1
if x > y:
diag = x * x - x + 1
diff = abs(x - y)
if x & 1:
diag -= diff
else:
diag += diff
elif y > x:
diag = y * y - y + 1
diff = abs(x - y)
if y & 1:
diag += diff
else:
diag -= diff
elif x == y:
diag = y * y - y + 1
return diag
def solve():
x, y = map(int, input().split())
print(str(f(x, y)))
if __name__ == "__main__":
tt = int(input())
while tt > 0:
solve()
tt -= 1
|
992,527 | 78b6472770d95fa469f7accbc2282d21fbaf1f2e | import os
import copy
import inspect
import numpy as np
##---WP2017---##
from WPandCut2018 import *
aliases['nCleanGenJet'] = {
'linesToAdd': ['.L '+os.getcwd()+'/ngenjet.cc+'
],
'class': 'CountGenJet',
}
|
992,528 | 3a256c57f439bf04e553f126c6b4e3f39476e346 | # Generated by Django 3.1.3 on 2020-11-27 11:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('store', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='books',
name='stock',
field=models.IntegerField(default=5),
),
]
|
992,529 | b730a0aebe0cd62ec4b763453c6114b69b1cda2c | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.preprocessing import scale
from matplotlib.colors import ListedColormap
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import make_moons, make_circles, make_classification
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RBF
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
from sklearn.metrics import accuracy_score
data = pd.read_csv('finance_data.csv', index_col=['Ticker', 'Fiscal Year', 'Fiscal Period'])
print(data.columns)
Y = data.loc[:,'pos_neg']
X = data.drop(columns=['pos_neg', 'shifted_chg', 'report_date'])
X = scale(X.values)
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=.2, shuffle=False)
h = .02 # step size in the mesh#i ##i3#fff
kernal = 1.0 * RBF(1.0)
gpc = GaussianProcessClassifier(kernel=kernal)
gpc.fit(X_train, y_train)
Z = gpc.predict(X_test)
acc = accuracy_score(y_test, Z)
print(acc)
print(y_test[0:10])
print(Z[0:10])
|
992,530 | 03d31f2dd960d21d9299bf93204b763487cd4165 | # Zbiór przedziałów [(a[1], b[1]), ..., (a[n], b[n])], każdy przedział należy do [0, 1]. Opisać algorytm
# który sprawdzi czy jest możliwy taki wybór przedziałów, aby cały przedział [0, 1] zawierał się
# w wybranych odcinkach. Przedział ma składać się z jak najmniejszej ilości odcinków.
def minimum_intervals(T):
T.sort(key=lambda x: x[0])
i = 0
end = 0
result = []
while i < len(T) and end != 1:
actual_start = T[i][0]
actual_end = T[i][1]
flag = True
while i != len(T) and T[i][0] <= end:
if actual_end < T[i][1]:
actual_start = T[i][0]
actual_end = T[i][1]
i += 1
flag = False
if flag:
i += 1
result.append((actual_start, actual_end))
end = actual_end
return result
T = [[0, 0.4], [0, 0.35], [0.2, 0.6], [0.4, 0.6], [0.5, 0.6], [0.1, 0.9], [0.85, 1], [0.9, 1],
[0.3, 0.4], [0.35, 0.4], [0.2, 0.75], [0.4, 1], [0.55, 1], [0.6, 1], [0.9, 1]]
print(minimum_intervals(T))
|
992,531 | 82ab32bf882b9d33182e9238c5f0d129526ba913 | # -*- encoding: utf-8 -*-
# Module iaihwt
from numpy import *
def iaihwt(f):
from iahaarmatrix import iahaarmatrix
f = asarray(f).astype(float64)
if len(f.shape) == 1: f = f[:,newaxis]
(m, n) = f.shape
A = iahaarmatrix(m)
if (n == 1):
F = dot(transpose(A), f)
else:
B = iahaarmatrix(n)
F = dot(dot(transpose(A), f), B)
return F
|
992,532 | 15545a63e72273faa321f9004e26e809fd9560ab | #####################################
### Woodall Numbers ###
#####################################
#Formula: n*2n − 1, with n ≥ 1.
#Example : {1, 7, 23, 63, 159, 383, 895, 2047, 4607, ...}
find_val = int(input("Enter the nth value : "))
print(find_val,"th value of Woodall Number is ",find_val*(2**find_val)-1)
|
992,533 | d8d1da0fc372debd1e6ed9f4137743dcc3b798d5 | import cv2
img= cv2.imread("E:/img/spiderman.jpg",cv2.IMREAD_UNCHANGED)
print('Original Dimensions:',img.shape)
'''
The folder structure may vary from individual machines , please
replace the write and read path accordingly
'''
#Downscaling
scale_percent = 60 # percent of original size
width = int(img.shape[1] * scale_percent / 100)
height = int(img.shape[0] * scale_percent / 100)
dim = (width, height)
# resize image in down scale
resized = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
print('Resized Dimensions : ',resized.shape)
cv2.imwrite('E:/img/resize.jpg', resized)
#upscaling
scale_percent = 220 # percent of original size
width = int(img.shape[1] * scale_percent / 100)
height = int(img.shape[0] * scale_percent / 100)
dim = (width, height)
# resize image in up scale
resized_up = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
print('Resized Dimensions : ',resized_up.shape)
cv2.imwrite('E:/img/resize_upscale.jpg', resized_up)
#resize only width
width = 440
height = img.shape[0] # keep original height
dim = (width, height)
# resize image
resized_w = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
print('Resized Dimensions : ',resized_w.shape)
cv2.imwrite('E:/img/resize_width.jpg', resized_w)
#resize only height
width = img.shape[1] # keep original width
height = 440
dim = (width, height)
# resize image
resized_h = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
print('Resized Dimensions : ',resized_h.shape)
cv2.imwrite('E:/img/resize_height.jpg', resized_h)
#resize width and height
width = 350
height = 450
dim = (width, height)
# resize image
resized_hw = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
print('Resized Dimensions : ',resized_hw.shape)
cv2.imwrite('E:/img/resize_hw.jpg', resized_hw)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
992,534 | 8ff11c2ac72bd4a40d0f8cebd3c8141fc988a264 | #!/usr/bin/env python3
from abc import *
from virtualmachine.machine import Machine
class Instruction:
def __init__(self):
self.arg_registers = []
self.arg_constant = None
@abstractmethod
def execute(self, machine: Machine):
raise NotImplemented
@staticmethod
@abstractmethod
def opcode_format() -> str:
raise NotImplemented
def vx(self):
return self.arg_registers[0] if len(self.arg_registers) >= 1 else None
def vy(self):
return self.arg_registers[1] if len(self.arg_registers) >= 2 else None
# JumpInstruction - class to determinate instruction is instruction which change PC
class JumpInstruction(Instruction):
pass
|
992,535 | 953b03f33578ecbc31efef5f2419ef0a787f2d8c | # Low Frequency PWM Driver to control High Inertia Devices
#
# See GitHub: https://github.com/mchobby/esp8266-upy/tree/master/lfpwm
#
#
# Compatible with:
# * Raspberry-Pico : using the only Timer() available.
#
from lfpwm import LowFreqPWM
from machine import Pin
from os import uname
# User LED on Pico
led = None
if uname().sysname == 'rp2': # RaspberryPi Pico
led = Pin( 25 )
else:
raise Exception( 'Oups! Not test for plateform %s' % uname().sysname )
# Setup pwm
pwm = LowFreqPWM( pin=led, period=2.5 ) # 2.5s
pwm.duty_u16( 65535 / 2 ) # 50% duty cycle
# pwm.duty_u16( 0 ) # always Off
# pwm.duty_u16( 65535 ) # always On
# pwm.deinit() # stop everything
|
992,536 | d8f751ccd669d1ac77194b5507ecbaf7e6f0ecc9 | P=int(input())
if P==1:
print("One")
elif P==2:
print("Two")
elif P==3:
print("Three")
elif P==4:
print("Four")
elif P==5:
print("Five")
elif P==6:
print("Six")
elif P==7:
print("Seven")
elif P==8:
print("Eight")
elif P==9:
print("Nine")
else:
print("Ten")
#j
|
992,537 | eaddb721b602d6a17b430eae29572c00a14c1485 | print("a little something to post to git") |
992,538 | 8b64be9482b546cf00c052b94766d3ecd921ca2b |
#from system_parameter import lang_tensor
import system_parameter
which = system_parameter.SystemParam.lang_tensor
#which = "cython"
if which == "py":
from tensor_py import *
elif which == "cython":
print "using cython implementation of iTensor"
import pyximport
pyximport.install() # above two lines are necessary, but why?
from tensor_pyx import *
#from tensor_pyx import test_iTensor
else:
print "wrong module to import, see tensor.py"
exit()
|
992,539 | 9e5f65e62f18a179a98f76467395aed2818ead75 |
import sys
sys.path.append('e:\\art\\code\\deepArt-generation\\source')
import config, utils
from scipy.misc import imsave,imresize
import numpy as np
from skimage import io
from skimage.transform import resize
import os
import matplotlib.pyplot as plt
out_path = 'e:\\art\\wikiportraits'
path = config.datafile('portrait-sel')
# using directory with hand-selected images
print("Reading images...")
all_images_fn = [x for x in os.listdir(path) if x.endswith(".jpg") | x.endswith(".png") | x.endswith(".jpeg")]
all_images = []
ratios = []
for fn in all_images_fn:
n = int(fn[:fn.find('-')])
if n>8000: continue
try:
im = io.imread(os.path.join(path, fn))
if im.shape[0] >= 256 and im.shape[1] >=256:
all_images.append(im)
ratios.append(im.shape[0]/im.shape[1])
except:
print("Error reading {}".format(fn))
plt.hist(ratios,bins=30)
sel_images = [x for x in all_images if 1.23 <= x.shape[0]/x.shape[1] <= 1.35]
print(len(sel_images))
for i,im in enumerate(sel_images):
# im = resize(image=im, output_shape=(128,128), mode="reflect")
im = imresize(im, (128,128))
imsave(os.path.join(out_path, str(i) + '.png'), im)
sel_images = list(filter(lambda x: x.shape==(32,32,3),map(lambda x: imresize(x,(32,32)),sel_images)))
np.savez_compressed(os.path.join(out_path,'wikiportraits.npz'),imgs=sel_images)
|
992,540 | 8ef5d224ec6c429c30ab346dc2bb8b3751d9a280 | #!/usr/bin/env python
import sys,time
import json,glob,os,re,itertools
from itertools import izip_longest
from optparse import OptionParser
from collections import OrderedDict
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
def splitjson(inputjson,basefilename,workdir,nitems):
'''
split basefilename into pieces with nitems per piece
'''
[basebase,basesuffice]=basefilename.split('.')
outputfilenamebase=basebase+'_{0}.'+basesuffice
totitems=len(inputjson.keys())
nblocks=int(totitems/nitems)
l=totitems%nblocks
if l: nblocks=nblocks+1
print ' populating %s, will write into %d files '%(workdir,nblocks)
for i, group in enumerate(grouper(inputjson, nitems)):
outputfile = open(os.path.join(workdir,outputfilenamebase.format(i)), 'w')
if not group: continue
submap = dict([(k,inputjson[k]) for k in group if k is not None])
json.dump(submap,outputfile)
def findfiles(workdir,basefilename):
'''
find input files in workdir of pattern basename_n.suffix
'''
filelist=[]
[basebase,basesuffix]=basefilename.split('.')
p=re.compile('^'+basebase+'_\d+.'+basesuffix+'$')
filelist=[f for f in os.listdir(workdir) if re.search(p, f)]
return filelist
class DictDiffer(object):
"""
Calculate the difference between two dictionaries as:
(1) items added
(2) items removed
(3) keys same in both but changed values
(4) keys same in both and unchanged values
"""
def __init__(self, current_dict, past_dict):
self.current_dict, self.past_dict = current_dict, past_dict
self.set_current, self.set_past = set(current_dict.keys()), set(past_dict.keys())
self.intersect = self.set_current.intersection(self.set_past)
def added(self):#current added item wrt past
return self.set_current - self.intersect
def removed(self):#current removed item wrt past
return self.set_past - self.intersect
def changed(self):#current item content changed
return set(o for o in self.intersect if self.past_dict[o] != self.current_dict[o])
def unchanged(self):
return set(o for o in self.intersect if self.past_dict[o] == self.current_dict[o])
class FileSetJson(object):
"""
manipulate json files with the same basename pattern in specified directory
"""
def __init__(self,filebasename,workdir):
[self.basefilepattern,self.basefilesuffix]=filebasename.split('.')
self.workdir=workdir
filenames=findfiles(workdir,filebasename)
self.filenames=[os.path.join(workdir,x) for x in filenames]
self.keyfilemap={}
self.totaljson={}
self.filejson={}
self.changedfiles={'+':{},'-':{},'o':{}}
self.updatecache()
def files(self):
return self.filenames
def updatecache(self):
'''
build {key:filename_in_fileset}
'''
if self.changedfiles.has_key('+'):
self.filenames=self.filenames+self.changedfiles['+'].keys()
self.changedfiles={'+':{},'-':{},'o':{}}#reset status
for myf in self.filenames:
myfile=open(myf,'r')
myfilestr=myfile.read().replace('\n','').replace(' ','')
#myjson=json.loads(myfilestr)
#
#load with sorted keys, use object_pairs_hook=collections.OrderedDict requiring python2.7+
#
myjson=json.loads(myfilestr,object_pairs_hook=OrderedDict)
myfile.close()
self.totaljson=dict(self.totaljson.items()+myjson.items())
self.filejson[myf]=myjson
for key in myjson.keys():
self.keyfilemap[key]=myf
def asJSON(self,filename=None):
'''
files in set as one json
if filename == None, all set as one json
'''
if filename is None:
return self.totaljson
else:
return self.filejson[filename]
def removeItems(self,pieces):
'''
remove json item specified by key from file it belongs
'''
for key,value in pieces.items():
filewithkey=self.keyfilemap[key]
del self.filejson[filewithkey][key]
del self.totaljson[key]
self.changedfiles['-'].setdefault(filewithkey,[]).append((key,value))
def addItem(self,pieces):
'''
add new json item to file
'''
filenames=sorted(self.filenames)
maxlastfile=filenames[-1]
p=re.compile(self.basefilepattern+'_'+'(\d+)\.'+self.basefilesuffix)
lastmax=p.search(maxlastfile)
if lastmax:
lastmax=int(lastmax.groups()[0])
mynum=lastmax+1
newfilename=(os.path.join(self.workdir,self.basefilepattern)+'_{0}.'+self.basefilesuffix).format(str(mynum))
for key,value in pieces.items():
self.keyfilemap[key]=newfilename
self.changedfiles['+'].setdefault(newfilename,[]).append((key,value))
self.filejson[newfilename]=pieces
self.totaljson=dict(self.totaljson.items()+pieces.items())
def updateItem(self,item):
'''
update item on the spot
'''
for key,value in item.items():
filename=self.keyfilemap[key]
self.filejson[filename][key]=item[key]
self.totaljson[key]=item[key]
self.changedfiles['o'].setdefault(filename,[]).append((key,value))
def writeChangedFiles(self):
'''
materialize all the changes to disk
'''
print 'updated files in %s'%self.workdir
print 'changes reported in %s,%s'%(os.path.join(self.workdir,'jsonchange.summary'),os.path.join(self.workdir,'jsonchange.detail'))
for filedelta in self.changedfiles.values():
filenames=filedelta.keys()
for filename in filenames:
outfile=open(filename,'w')
json.dump(self.filejson[filename],outfile)
def fileChanged(self):
'''
format: {'+':{filename:[(key,value)],'-':{filename:[(key,value)],'o':{filename:[(key,value)]}}
'''
return self.changedfiles
def reportChanges(self):
'''
summary of total changes
will writeout 2 report files
jsonchange.summary
jsonchange.detail
'''
summaryfilename=os.path.join(self.workdir,'jsonchange.summary')
detailfilename=os.path.join(self.workdir,'jsonchange.detail')
fchanged=self.changedfiles.values()
flatflist=list(itertools.chain.from_iterable(fchanged))
if len(flatflist)==0:
print 'no change in json file'
return 0
if os.path.exists(summaryfilename):
os.rename(summaryfilename,summaryfilename+'.bak')
summaryfile=open(summaryfilename,'w')
for f in flatflist:
summaryfile.write(f+'\n')
summaryfile.close()
if os.path.exists(detailfilename):
os.rename(detailfilename,detailfilename+'.bak')
detailfile=open(detailfilename,'w')
for optype,opfiles in self.changedfiles.items():
detailfile.write(' %s :\n'%optype)
for opfilename,opfilechanged in opfiles.items():
opcontent=dict(opfilechanged)
detailfile.write(' %s: %s\n'%(opfilename,str(opcontent)))
detailfile.close()
return len(flatflist)
if __name__ == '__main__':
'''
watch a reference json file in a workdir
initialize workdir: if workdir empty, populate workdir with splited reference json into n pieces:
maintain workdir: run command regularly in workdir, the command reports the changes in the reference json and adjust the working dir accordingly
report format:
summary report: list of files changed
detail report: details of each exact chunck changed
old report files are changed to *.bak
jsonwatcher -d workdir -n nentries-per-file -i referencejsonfile
'''
usage = 'usage: %prog [options]'
parser = OptionParser(usage)
parser.add_option('-i','--input',type='string',dest='inputfilename',help='input reference file name.')
parser.add_option('-d','--dir',type='string',dest='workdir',default=os.getcwd(),help='workdir')
parser.add_option('-n','--nitems',type='int',dest='nitems',default=100,help='split reference json with nitems per piece')
parser.add_option('--dryrun',dest='isdryrun',action='store_true',default=False,help='dry run mode, not to implement the changes in the mirror json files')
(options, args) = parser.parse_args()
isdryrun=False
isdryrun=options.isdryrun
inputfilename=None
if options.inputfilename is None:
print 'mandatory option -i is missing\n'
parser.print_help()
sys.exit(-1)
inputfilename=options.inputfilename
referencefilename=os.path.basename(inputfilename)
outputjsonfiles=findfiles(options.workdir,referencefilename)
inputstr=open(inputfilename,'r').read().replace('\n','').replace(' ','')
#inputjson=json.loads(inputstr)
#
#load with sorted keys, use object_pairs_hook=collections.OrderedDict requiring python2.7+
#
inputjson=json.loads(inputstr,object_pairs_hook=OrderedDict)
if len(outputjsonfiles)==0:
splitjson(inputjson,referencefilename,options.workdir,options.nitems)
sys.exit(0)
jsonwatcher=FileSetJson(referencefilename,options.workdir)
myfilenames=jsonwatcher.files()[:]#deep copy!
mytotaljson=jsonwatcher.asJSON()
d=DictDiffer(mytotaljson,inputjson)
removedkeys=list(d.added())#note: it's reverse! added by me means *removed by reference file*
removeditems={}
for k in removedkeys:
removeditems[k]=mytotaljson[k]
addedkeys=list(d.removed())#note: it's reverse! removed by me means added by reference file
addeditems={}
for k in addedkeys:
addeditems[k]=inputjson[k]
changedkeys=[]
#print mytotaljson
for myfilename in myfilenames:#loop over on disk files
myjson=jsonwatcher.asJSON(myfilename)
#print myjson
d=DictDiffer(myjson,inputjson)
changeditems=d.changed()
if changeditems:
changedkeys=changedkeys+list(changeditems)#changed item means change in the content of a key
modifieditems={}
for k in changedkeys:
modifieditems[k]=inputjson[k]
if addeditems:
jsonwatcher.addItem(addeditems)
if removeditems:
jsonwatcher.removeItems(removeditems)
if modifieditems:
jsonwatcher.updateItem(modifieditems)
totchanged=jsonwatcher.reportChanges()
if not isdryrun:
if totchanged !=0:
jsonwatcher.writeChangedFiles()
|
992,541 | f272eaa4d7e9c1a6420b0b8d1cbcb463250bda5a | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '使用资源文件设置背景.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Test(object):
def setupUi(self, Test):
Test.setObjectName("Test")
Test.resize(551, 384)
self.centralwidget = QtWidgets.QWidget(Test)
self.centralwidget.setStyleSheet("border-image: url(:/png/Kasumigaoka1.jpg);")
self.centralwidget.setObjectName("centralwidget")
self.pushButton_3 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_3.setGeometry(QtCore.QRect(200, 230, 93, 28))
self.pushButton_3.setObjectName("pushButton_3")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(80, 80, 72, 15))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(80, 150, 72, 15))
self.label_2.setObjectName("label_2")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(330, 230, 93, 28))
self.pushButton.setObjectName("pushButton")
self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit.setGeometry(QtCore.QRect(170, 80, 271, 21))
self.lineEdit.setObjectName("lineEdit")
self.lineEdit_2 = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_2.setGeometry(QtCore.QRect(170, 140, 271, 21))
self.lineEdit_2.setObjectName("lineEdit_2")
Test.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(Test)
self.menubar.setGeometry(QtCore.QRect(0, 0, 551, 26))
self.menubar.setObjectName("menubar")
Test.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(Test)
self.statusbar.setObjectName("statusbar")
Test.setStatusBar(self.statusbar)
Test.setWindowOpacity(0.5)
self.retranslateUi(Test)
QtCore.QMetaObject.connectSlotsByName(Test)
def retranslateUi(self, Test):
_translate = QtCore.QCoreApplication.translate
Test.setWindowTitle(_translate("Test", "MainWindow"))
self.pushButton_3.setText(_translate("Test", "登录"))
self.label.setText(_translate("Test", "账号:"))
self.label_2.setText(_translate("Test", "密码:"))
self.pushButton.setText(_translate("Test", "退出"))
import img_rc
import sys
def show_MainWindow():
app = QtWidgets.QApplication(sys.argv) # 实例化QApplication类,作为GUI主程序入口
MainWindow = QtWidgets.QMainWindow() # 创建MainWindow
ui = Ui_Test() # 实例化ui类
ui.setupUi(MainWindow) # 设置窗口UI
MainWindow.setWindowFlags(QtCore.Qt.Dialog)
MainWindow.show() # 显示窗口
sys.exit(app.exec_()) # 当窗口创建完成时,需要结束主循环过程
if __name__ == "__main__":
show_MainWindow()
|
992,542 | 91ffa406851185e2cb98aec0e0f0c229490768a1 | # -*- coding: utf-8 -*-
"""
Created on Tue Nov 27 14:05:18 2018
@author: fangyucheng
"""
import hashlib
import uuid
import copy
import urllib
import time
import datetime
import requests
from crawler.crawler_sys.framework.video_fields_std import Std_fields_video
from crawler.crawler_sys.utils.trans_duration_str_to_second import trans_duration
from crawler.crawler_sys.site_crawler import crawler_v_qq
from crawler.crawler_sys.utils.output_results import output_result,retry_get_url
import re,json
try:
from crawler_sys.framework.func_get_releaser_id import *
except:
from func_get_releaser_id import *
from crawler.crawler_sys.proxy_pool.func_get_proxy_form_kuaidaili import get_proxy
class Crawler_Tencent_News():
def __init__(self, platform='腾讯新闻'):
self.platform = platform
self.devid = '008796749793280'
# self.appver = '23_android_5.4.10'
# self.devid = "7313ae71df9e5367",
self.appver = "23_android_5.8.00"
self.qnrid = str(uuid.uuid4())
self.headers = {"Host": "r.inews.qq.com",
"Accept-Encoding": "gzip,deflate",
"Referer": "http://inews.qq.com/inews/android/",
"User-Agent": "%E8%85%BE%E8%AE%AF%E6%96%B0%E9%97%BB5410(android)",
"Cookie": "lskey=;luin=;skey=;uin=; logintype=0; main_login=qq;",
"Connection": "Keep-Alive"}
self.video_data = Std_fields_video().video_data
self.video_data['platform'] = self.platform
untouch_key_lst = ['channel', 'describe', 'isOriginal', 'repost_count']
for key in untouch_key_lst:
self.video_data.pop(key)
self.crawler_video_page = crawler_v_qq.Crawler_v_qq().video_page
self.list_page_dict = {"体育": "8"}
def get_releaser_id(self, releaserUrl):
return get_releaser_id(platform=self.platform, releaserUrl=releaserUrl)
def forllower_num_to_int(self, num):
if str(num)[-1:] == "万":
return int(float(num[:-1]) * 1e4)
elif isinstance(num, float):
return num
elif isinstance(num, int):
return num
def get_releaser_follower_num(self, releaserUrl):
head = {
"chlid": self.get_releaser_id(releaserUrl),
"media_openid": "",
"is_special_device": "0",
"mid": "0",
"dpi": "270.0",
"is_chinamobile_oem": "0",
"qqnetwork": "wifi",
"rom_type": "V417IR release-keys",
"real_device_width": "3.0",
"net_proxy": "DIRECT@",
"net_bssid": "01:80:c2:00:00:03",
"currentChannelId": "news_video_child_newRecommend",
"isElderMode": "0",
"apptype": "android",
"islite": "0",
"hw": "Xiaomi_MINOTE3",
"global_session_id": "1558345020297",
"screen_width": "810",
"omgbizid": "",
"sceneid": "",
"videoAutoPlay": "1",
"imsi": "460063005313888",
"fix_store": "",
"isoem": "0",
"currentTabId": "news_live",
"lite_version": "",
"net_slot": "0",
"startTimestamp": "1558345020",
"pagestartfrom": "icon",
"mac": "mac unknown",
"activefrom": "icon",
"net_ssid": "NemuWiFi",
"store": "10611",
"screen_height": "1440",
"extinfo": "",
"real_device_height": "5.33",
"origin_imei": "261721032526201",
"network_type": "wifi",
"origCurrentTab": "live",
"global_info": "1|1|1|1|1|14|4|1|0|6|1|1|1||0|J309P000000000:J601P000000000:A401P000050901:J401P100000000:J304P000000000:J701P000000000:B703P000062002:B704P000064803:J702P000000000:B064P000065702:J267P000000000:B060P000066504:A403P000070903:J055P000000000:A402P000060701:B402P200065202:B054P100068903:A054P200070501:A054P600071201:A054P300068801:J054P000000000:J054P040000000|1414|0|1|0|0|0|0|0||3|3|1|1|1|1|1|1|-1|0|0|2|2|0|0|0|4|0|0|1|0|5|2|0|0|3|0|0|1|0|1|1|0|0|1|0|4|0|1|2|11|20|1|0|1|0|0|30|1|4|0|0|3|40|0|51|60|0|0|0|0|0",
"imsi_history": "460063005313888",
"net_apn": "0",
"uid": "7313ae71df9e5367",
"omgid": "",
"trueVersion": "5.8.00",
"qimei": "261721032526201",
"devid": "7313ae71df9e5367",
"appver": "23_android_5.8.00",
"Cookie": "lskey=;skey=;uin=; luin=;logintype=0; main_login=",
}
try:
url = "https://r.inews.qq.com/getSubItem?%s" % urllib.parse.urlencode(head)
res = requests.get(url)
res_json = res.json()
# print(res_json)
follower_num = self.forllower_num_to_int(res_json.get("channelInfo").get("subCount"))
releaser_img = self.get_releaser_image(data=res_json)
print('%s follower number is %s' % (releaserUrl, follower_num))
return follower_num, releaser_img
except:
print("can't find followers")
def get_releaser_image(self, releaserUrl=None, data=None):
if releaserUrl:
head = {
"chlid": self.get_releaser_id(releaserUrl),
"media_openid": "",
"is_special_device": "0",
"mid": "0",
"dpi": "270.0",
"is_chinamobile_oem": "0",
"qqnetwork": "wifi",
"rom_type": "V417IR release-keys",
"real_device_width": "3.0",
"net_proxy": "DIRECT@",
"net_bssid": "01:80:c2:00:00:03",
"currentChannelId": "news_video_child_newRecommend",
"isElderMode": "0",
"apptype": "android",
"islite": "0",
"hw": "Xiaomi_MINOTE3",
"global_session_id": "1558345020297",
"screen_width": "810",
"omgbizid": "",
"sceneid": "",
"videoAutoPlay": "1",
"imsi": "460063005313888",
"fix_store": "",
"isoem": "0",
"currentTabId": "news_live",
"lite_version": "",
"net_slot": "0",
"startTimestamp": "1558345020",
"pagestartfrom": "icon",
"mac": "mac unknown",
"activefrom": "icon",
"net_ssid": "NemuWiFi",
"store": "10611",
"screen_height": "1440",
"extinfo": "",
"real_device_height": "5.33",
"origin_imei": "261721032526201",
"network_type": "wifi",
"origCurrentTab": "live",
"global_info": "1|1|1|1|1|14|4|1|0|6|1|1|1||0|J309P000000000:J601P000000000:A401P000050901:J401P100000000:J304P000000000:J701P000000000:B703P000062002:B704P000064803:J702P000000000:B064P000065702:J267P000000000:B060P000066504:A403P000070903:J055P000000000:A402P000060701:B402P200065202:B054P100068903:A054P200070501:A054P600071201:A054P300068801:J054P000000000:J054P040000000|1414|0|1|0|0|0|0|0||3|3|1|1|1|1|1|1|-1|0|0|2|2|0|0|0|4|0|0|1|0|5|2|0|0|3|0|0|1|0|1|1|0|0|1|0|4|0|1|2|11|20|1|0|1|0|0|30|1|4|0|0|3|40|0|51|60|0|0|0|0|0",
"imsi_history": "460063005313888",
"net_apn": "0",
"uid": "7313ae71df9e5367",
"omgid": "",
"trueVersion": "5.8.00",
"qimei": "261721032526201",
"devid": "7313ae71df9e5367",
"appver": "23_android_5.8.00",
"Cookie": "lskey=;skey=;uin=; luin=;logintype=0; main_login=",
}
try:
url = "https://r.inews.qq.com/getSubItem?%s" % urllib.parse.urlencode(head)
res = requests.get(url)
res_json = res.json()
# print(res_json)
releaser_img_url = res_json.get("channelInfo").get("icon")
return releaser_img_url
except:
print("can't get releaser_img_url")
else:
releaser_img_url = data.get("channelInfo").get("icon")
return releaser_img_url
# def search_video_page(self, keyword, releaser_url,releaser=True,
# search_pages_max=30,
# output_to_es_raw=False,
# output_to_es_register=False,
# es_index=None,
# doc_type=None,
# **kwargs):
# """
# This is improved search page crawler, involved search_type.
# When search_type == 'searchMore', it's the same as previous one,
# which is the '综合' column in app search page.
# When search_type == 'verticalSearch', it's is the '视频' column
# in app search page.
# """
#
# def get_list(search_type, page_dict):
# # print(search_type,page_dict)
# if search_type == 'searchMore':
# return page_dict['data']['secData']
# elif search_type == 'verticalSearch':
# try:
# return page_dict['secList'][0].get('videoList')
# except:
# return []
# else:
# print('unknow search_type:', search_type)
# return None
#
# search_request_dict = {
# 'verticalSearch': {
# 'url_prefix': 'http://r.inews.qq.com/verticalSearch?',
# 'para_dict': {
# "chlid": "_qqnews_custom_search_video",
# "uid": "7313ae71df9e5367",
# "omgid": "",
# "trueVersion": "5.8.00",
# "qimei": "379317519303213",
# "devid": "008796749793280",
# "appver": "23_android_5.8.00",
# "Cookie": "lskey=;skey=;uin=; luin=;logintype=0; main_login=;",
# },
# }
# }
#
# headers = {"Host": "r.inews.qq.com",
# "Accept-Encoding": "gzip",
# "Referer": "http://inews.qq.com/inews/android/",
# "Content-Type": "application/x-www-form-urlencoded",
# "User-Agent": "%E8%85%BE%E8%AE%AF%E6%96%B0%E9%97%BB5800(android)",
# "RecentUserOperation": "2_GuidePage,1_news_background,1_news_news_top",
# "Connection": "Keep-Alive"}
#
# body = {
# "search_type": "video",
# "query": keyword,
# "page": "1",
# "type": "0",
# "transparam": '{"sessionid":"2015601560736100"}',
# "search_from": "click",
# "cp_type": "0",
# "is_special_device": "0",
# "mid": "0",
# "dpi": "270.0",
# "is_chinamobile_oem": "0",
# "qqnetwork": "wifi",
# "rom_type": "V417IR release-keys",
# "real_device_width": "3.0",
# "net_proxy": "DIRECT@",
# "net_bssid": "01:80:c2:00:00:03",
# "currentChannelId": "news_news_top",
# "isElderMode": "0",
# "apptype": "android",
# "islite": "0",
# "hw": "HUAWEI_BLA-AL00",
# "global_session_id": "1560735392163",
# "screen_width": "810",
# "videoAutoPlay": "1",
# "imsi": "460062614015394",
# "isoem": "0",
# "currentTabId": "news_news",
# "net_slot": "0",
# "startTimestamp": "0",
# "pagestartfrom": "icon",
# "mac": "mac unknown",
# "activefrom": "icon",
# "net_ssid": "WiFi",
# "store": "10611",
# "screen_height": "1440",
# "real_device_height": "5.33",
# "origin_imei": "379317519303213",
# "network_type": "wifi",
# "origCurrentTab": "top",
# "global_info": "1|1|1|1|1|14|4|1|0|6|1|1|1||0|J309P000000000:A601P000081702:A401P000050901:J401P100000000:J602P000000000:J304P000000000:B701P000075404:J703P000000000:B704P000085403:J702P000000000:B064P000065702:A267P000074401:B267P100078102:B060P000085902:J403P000000000:J403P100000000:J055P200000000:A402P100080401:J402P000000000:J402P200000000:B054P000061502:A054P600071201:J054P200000000:J054P100000000|1414|0|1|0|0|0|0|0||3|3|1|1|1|1|1|1|-1|0|0|2|2|0|0|0|4|0|0|1|2|5|2|0|0|3|0|0|1|0|1|1|0|0|1|0|4|0|1|2|11|20|1|0|1|0|0|30|1|4|0|1|4|40|0|51|60|0|0|0|0|0|4|0|0|0|0",
# "imsi_history": "460062614015394",
# "net_apn": "0"
# }
# final_lst = []
# for search_type in search_request_dict:
# print(datetime.datetime.now(), '****** search_type', search_type)
# for page in range(1, search_pages_max):
# para_dict = search_request_dict[search_type]['para_dict']
# body['page'] = page
# url_prefix = search_request_dict[search_type]['url_prefix']
# url = url_prefix + urllib.parse.urlencode(para_dict)
#
# get_page = requests.post(url, headers=headers, data=body)
# try:
# page_dict = get_page.json()
# except:
# pass
# else:
# video_lst = get_list(search_type, page_dict)
# if video_lst is not None and video_lst != []:
# for video_dict in video_lst:
# if 'hasVideo' in video_dict:
# try:
# if 'source' not in video_dict:
# # ignore those without 'source' field
# continue
# video_info = copy.deepcopy(self.video_data)
# info_id = video_dict['id']
# video_info['title'] = video_dict['title']
# video_info['url'] = video_dict['url']
# video_info['video_id'] = video_dict['vid']
# video_info['play_count'] = video_dict.get('video_channel').get('video').get(
# 'playcount')
# video_info['releaser'] = video_dict['source']
# try:
# video_info['releaserUrl'] = releaser_url
# video_info['releaser_id_str'] = ""
# except:
# video_info['releaserUrl'] = ""
# video_info['releaser_id_str'] = ""
# video_info['release_time'] = int(video_dict['timestamp'] * 1e3)
# video_info['favorite_count'] = video_dict['likeInfo']
# video_info['comment_count'] = video_dict['comments']
# try:
# dura_str = trans_duration(
# video_dict.get('video_channel').get('video').get('duration'))
# except:
# dura_str = ''
# video_info['duration'] = dura_str
# fetch_time = int(datetime.datetime.now().timestamp() * 1e3)
# video_info['fetch_time'] = fetch_time
# if releaser:
# if keyword == video_info['releaser']:
# final_lst.append(video_info)
# else:
# final_lst.append(video_info)
# # print(video_info)
# print("get video data %s" % video_info['title'])
# except:
# continue
# else:
# print("hasVideo flag is False, no data in this video_dict")
# break
# if len(final_lst) >= 100:
# output_result(result_Lst=final_lst,
# platform=self.platform,
# output_to_es_raw=output_to_es_raw,
# output_to_es_register=output_to_es_register,
# es_index=es_index,
# doc_type=doc_type)
# final_lst.clear()
#
# if final_lst != []:
# output_result(result_Lst=final_lst,
# platform=self.platform,
# output_to_es_raw=output_to_es_raw,
# output_to_es_register=output_to_es_register,
# es_index=es_index,
# doc_type=doc_type)
# return final_lst
def releaser_page(self, releaserUrl,
output_to_file=False, filepath=None,
output_to_es_raw=False,
output_to_es_register=False,
push_to_redis=False,
releaser_page_num_max=30,
es_index=None,
doc_type=None,proxies_num=None):
releaser_id = self.get_releaser_id(releaserUrl)
result_list = []
has_more = True
count = 1
page_time = ""
while has_more and count <= releaser_page_num_max and releaser_id:
url_dic = {
"chlid": releaser_id,
"page_time": page_time,
"coral_uin": "ec8bb1459b9d84100312bf035bb43cd4d0",
"coral_uid": "",
"type": "om",
"uid": "7313ae71df9e5367",
"omgid": "",
"trueVersion": "5.8.00",
"qimei": "287801615436009",
# "devid" :"7313ae71df9e5367",
# "appver" :"23_android_5.8.00",
'devid': self.devid,
'appver': self.appver,
}
post_body = {
"activefrom": "icon",
"apptype": "android",
"article_pos": "0",
"articleID": "ec8bb1459b9d84100312bf035bb43cd4d0_%s" % releaser_id,
"articlepage": "-1",
"articletype": "509",
"articleType": "509",
"articleUUID": "7a7f71aff201a175cbb8b946b1a0ab3b",
"cell_id": "normal_article_cell",
"cityList": "news_news_bj",
"coverType": "0",
"currentChannelId": "news_news_top",
"currentTabId": "news_news",
"dpi": "270.0",
"global_info": "1|1|1|1|1|14|4|1|0|6|1|1|1||0|J309P000000000:J902P000000000:J601P000000000:A601P400109701:A601P200096101:J601P500000000:J601P100000000:J601P600000000:J601P300000000:J603P000000000:J604P000000000:A401P000050901:J401P100000000:J602P000000000:J602P900000000:J304P000000000:J701P000000000:B703P000107302:J704P000000000:B702P000098602:A064P000117303:B085P000087702:B267P000118602:J267P100000000:B073P000120202:A060P000116701:J060P400000000:J060P100000000:J060P016000000:A403P100114101:J403P000000000:J055P000000000:J055P200000000:B402P100095203:J402P000000000:J402P013000000:A054P000101101:A054P600071201:J054P200000000:B901P000117402|1414|0|1|24|24|0|0|0||3|3|1|1|1|1|1|1|-1|0|0|2|2|0|0|0|4|0|0|1|2|5|2|0|0|3|0|0|1|0|1|1|0|0|1|0|4|0|1|1|11|20|1|0|1|1|0|0|1|4|0|1|1|40|0|51|60|0|0|0|0|0|4|0|0|0|0|0|0",
"global_session_id": "1564032931171",
"hasVideo": "0",
"hw": "vivo_VIVOX20Plus",
"id": "ec8bb1459b9d84100312bf035bb43cd4d0_%s" % releaser_id,
"idStr": "ec8bb1459b9d84100312bf035bb43cd4d0_%s" % releaser_id,
"imsi": "460073046925329",
"imsi_history": "460073046925329",
"is_chinamobile_oem": "0",
"is_special_device": "0",
"isAd": "0",
"isCpFocus": "0",
"isElderMode": "0",
"isGifPlayed": "0",
"isHotCommentLink": "0",
"isHotNews": "0",
"isIPSpecialVideo": "0",
"islite": "0",
"isoem": "0",
"mac": "mac unknown",
"mid": "0",
"moduleArticlePos": "0",
"net_apn": "0",
"net_bssid": "49:4a:55:76:75:58",
"net_proxy": "DIRECT@",
"net_slot": "0",
"net_ssid": "IJUvuXkoA8H",
"network_type": "wifi",
"newsID": "ec8bb1459b9d84100312bf035bb43cd4d0_%s" % releaser_id,
"origCurrentTab": "top",
"origin_imei": "287801615436009",
"originPageType": "second_timeline",
"page_type": "second_timeline",
"pageIsIPSpecialVideo": "0",
"pagestartfrom": "icon",
"qqnetwork": "wifi",
"real_device_height": "5.33",
"real_device_width": "3.0",
"realArticlePos": "0",
"rom_type": "V417IR release-keys",
"screen_height": "1440",
"screen_width": "810",
"startTimestamp": "0",
"store": "10611",
#"title": "看看新闻Knews",
"userId": "ec8bb1459b9d84100312bf035bb43cd4d0",
"userMediaId": releaser_id,
"userVipType": "0",
"videoAutoPlay": "1",
"videoBlackBorder": "0",
"videoShowType": "0",
}
post_url = "https://r.inews.qq.com/getUserVideoList?"
url = post_url + urllib.parse.urlencode(url_dic)
if not proxies_num:
get_page = requests.post(url, headers=self.headers, data=post_body)
else:
proxies = get_proxy(proxies_num)
get_page = requests.post(url, headers=self.headers, data=post_body,proxies=proxies)
page_dic = {}
try:
page_dic = get_page.json()
# print(page_dic)
data_list = page_dic.get('newslist')
has_more = page_dic.get('next')
page_time = str(page_dic.get("last_time"))
except:
if data_list:
data_list = page_dic.get('newslist')
has_more = page_dic.get('next')
else:
data_list = []
has_more = False
# offset = page_dic.get('offset')
if has_more is None:
has_more = False
if data_list == []:
print("no data in releaser %s page %s" % (releaser_id, count))
# print(page_dic)
print(url)
count += 1
has_more = False
continue
else:
count += 1
print("craw data in releaser %s page %s" % (releaser_id, count))
for one_video in data_list:
# info_str = one_video.get('content')
try:
video_dic = copy.deepcopy(self.video_data)
video_dic['title'] = one_video.get('title')
video_dic['url'] = one_video.get('url')
video_dic['releaser'] = one_video.get('chlname')
video_dic['releaserUrl'] = releaserUrl
release_time = one_video.get('timestamp')
video_dic['release_time'] = int(release_time * 1e3)
video_dic['duration'] = int(self.t2s(one_video.get('videoTotalTime')))
if not video_dic['duration']:
try:
video_dic['duration'] = int(self.t2s(one_video.get('video_channel').get("video").get("duration")))
except:
video_dic['duration'] = 0
video_dic['play_count'] = one_video.get('video_channel').get("video").get("playcount")
video_dic['repost_count'] = one_video.get('shareCount')
video_dic['comment_count'] = one_video.get('comments')
video_dic['favorite_count'] = one_video.get('likeInfo')
video_dic['video_id'] = one_video.get('vid')
video_dic['fetch_time'] = int(datetime.datetime.now().timestamp() * 1e3)
video_dic['releaser_id_str'] = "腾讯新闻_%s" % releaser_id
video_dic['video_img'] = one_video.get('pic_minivideo')
result_list.append(video_dic)
except Exception as e:
print("error",e)
if len(result_list) >= 100:
# data_count += len(result_list)
# print(result_list)
output_result(result_Lst=result_list,
platform=self.platform,
output_to_file=output_to_file,
filepath=filepath,
output_to_es_raw=output_to_es_raw,
es_index=es_index,
doc_type=doc_type,
output_to_es_register=output_to_es_register)
result_list.clear()
if result_list != []:
# data_count += len(result_list)
# print(result_list)
# print(data_count)
output_result(result_Lst=result_list,
platform=self.platform,
output_to_file=output_to_file,
filepath=filepath,
output_to_es_raw=output_to_es_raw,
es_index=es_index,
doc_type=doc_type,
output_to_es_register=output_to_es_register)
@staticmethod
def t2s(t):
if t:
if len(t) == 5:
t = str(t)
m, s = t.strip().split(":")
return float(m) * 60 + float(s)
elif len(t) >= 7:
t = str(t)
h, m, s = t.strip().split(":")
return float(h) * 3600 + float(m) * 60 + float(s)
else:
return 0
# def search_page(self, keyword,
# search_pages_max=30,
# output_to_es_raw=False,
# output_to_es_register=False,
# es_index=None,
# doc_type=None):
# """
# This is improved search page crawler, involved search_type.
# When search_type == 'searchMore', it's the same as previous one,
# which is the '综合' column in app search page.
# When search_type == 'verticalSearch', it's is the '视频' column
# in app search page.
# """
#
# def get_list(search_type, page_dict):
# # print(search_type,page_dict)
# if search_type == 'searchMore':
# return page_dict['data']['secData']
# elif search_type == 'verticalSearch':
# try:
# return page_dict['secList'][0].get('videoList')
# except:
# return []
# else:
# print('unknow search_type:', search_type)
# return None
#
# search_request_dict = {
# 'verticalSearch': {
# 'url_prefix': 'http://r.inews.qq.com/verticalSearch?',
# 'para_dict': {
# 'devid': self.devid,
# 'appver': self.appver,
# 'query': keyword,
# 'page': None,
# 'search_type': 'video'
# },
# },
# 'searchMore': {
# 'url_prefix': 'http://r.inews.qq.com/searchMore?',
# 'para_dict': {
# 'devid': self.devid,
# 'appver': self.appver,
# 'query': keyword,
# 'page': None,
# }
# },
# }
#
# final_lst = []
# for search_type in search_request_dict:
# print(datetime.datetime.now(), '****** search_type', search_type)
# for page in range(1, search_pages_max):
# para_dict = search_request_dict[search_type]['para_dict']
# para_dict['page'] = page
# url_prefix = search_request_dict[search_type]['url_prefix']
# url = url_prefix + urllib.parse.urlencode(para_dict)
#
# get_page = requests.get(url, headers=self.headers)
# try:
# page_dict = get_page.json()
# except:
# pass
# else:
# video_lst = get_list(search_type, page_dict)
# if video_lst is not None and video_lst != []:
# for video_dict in video_lst:
# if 'hasVideo' in video_dict:
# try:
# if 'source' not in video_dict:
# # ignore those without 'source' field
# continue
# video_info = copy.deepcopy(self.video_data)
# info_id = video_dict['id']
# playcnt_url = ('http://r.inews.qq.com/getSimpleNews'
# '/23_android_5.4.10/news_news_search/%s'
# % info_id)
# play_count, vid, info_source, data_source = self.get_playcnt(url=playcnt_url)
# video_info['title'] = video_dict['title']
# video_info['url'] = video_dict['url']
# video_info['video_id'] = vid
# video_info['play_count'] = play_count
# video_info['playcnt_url'] = playcnt_url
# video_info['releaser'] = video_dict['source']
# video_info['release_time'] = int(video_dict['timestamp'] * 1e3)
# video_info['info_source'] = info_source
# video_info['data_source'] = data_source
#
# try:
# dura_str = video_dict['videoTotalTime']
# except:
# dura_str = ''
# video_info['duration'] = trans_duration(dura_str)
# fetch_time = int(time.time() * 1e3)
# video_info['fetch_time'] = fetch_time
# final_lst.append(video_info)
# print("get video data %s" % video_info['title'])
# except:
# continue
# else:
# print("hasVideo flag is False, no data in this video_dict")
# if len(final_lst) >= 100:
# output_result(result_Lst=final_lst,
# platform=self.platform,
# output_to_es_raw=output_to_es_raw,
# output_to_es_register=output_to_es_register,
# es_index=es_index,
# doc_type=doc_type)
# final_lst.clear()
#
# if final_lst != []:
# output_result(result_Lst=final_lst,
# platform=self.platform,
# output_to_es_raw=output_to_es_raw,
# output_to_es_register=output_to_es_register,
# es_index=es_index,
# doc_type=doc_type)
# return final_lst
def get_playcnt(self, url):
get_page = requests.get(url)
page_dic = get_page.json()
try:
play_count = page_dic['attribute']['VIDEO_0']['playcount']
except:
play_count = None
try:
vid = page_dic['attribute']['VIDEO_0']['vid']
except:
vid = None
if play_count is not None:
info_source = 'tencent_news'
data_source = None
else:
info_source = 'tencent_video'
data_source = None
if vid is not None:
video_url = 'https://v.qq.com/x/page/%s.html' % vid
added_dic = self.crawler_video_page(url=video_url)
try:
play_count = added_dic['play_count']
data_source = added_dic['data_source']
except:
play_count = None
data_source = None
return play_count, vid, info_source, data_source
def list_page_for_main_page(self,
channel_id,
output_to_file=False,
filepath=None,
list_page_max=30,
output_to_es_raw=False,
es_index=None,
doc_type=None,
output_to_es_register=False):
post_dic = {'chlid': channel_id,
'forward': '2',
'uid': '6F0D5898-2C3A-46FE-9181-589BC52ED743',
'adReqData': '{"chid":2,"adtype":0,"pf":"iphone","launch":"0","ext":{"mob":{"mobstr":"Aejy45+NeSZw4VxymYnnIhMV+MEM+6sW9Rkw16FvkWGCz1rsPQflpTnsN+KnArzMwheqHiLErlbOlNWL0SoBI0lJtRh13iyR+LxSv3Y+hJrixm\\/Sxn\\/YhInAhlYioOjQ9cHGSSRmdgaDyqx2dDLZosKp+QSMqr649GGxQ36xbSdjbvZ3MGywBOsVNcf+EZkV+U9Q8LyDPc6PZ56b\\/GLGncf4XcrVFnKlUi+kebsg8DCD\\/nlvTDGSkWOtu33GJ4Ct\\/hfZ1c3UNHw5bRwHRM0L0+6QYANTrPzl2X6hZK3kijlJsub+RvcPNPNQGrhK3e4yYHJmspW19qE5mPgxd5lbwzJ8VQifTrjGeB+cdCcGmEPYBcZwHmxRhEAo7A0bJcSLK5KACWNsKw8I085yoKLCIE40\\/1J+umH8QsTU6K+wLdpjpaI6D3XMa\\/GZiguAcNB7HMSMpBFY6dq1saxz0u+6Ex2n2CwJlY4JYzf2S4r69t8J1WCQInAjIf\\/Io+ZVhXNnNUx3GVir\\/TaffnYpd\\/5ZvqdKtBIWXZFtXOoWC66tNBG\\/D+YAoY8\\/yVAQL7slsS1qbjdDqByVI2DMq299y6yAh0hejMouwaCGK2Q2OCMes5xrghJ1sotO5mSqioK23WbdF9GiQSVqmbE94wzpCwaPCwrEzkgKWHuPxh0UlqUs9QeGe30SHv4OOpqF9QOUeXYJ\\/Xkana90uC32g3LuM6jdPTv07qbyk1tX87pGdnyvjR9BBEhb0dyLUFi\\/Gx8t4T+yHLxt0X9yKsGKCJX1U8AdkTwLlJslIX9Rzqy+Yb1n9sg85KAS5yUsQZqSv9kKRuZpYsfj6LLaI\\/Bet9BUNtGu4hYuZBqKFWp34XegvS4d3M9U"}},"ver":"5.7.22","slot":[{"islocal":0,"orders_info":["67950414,6870997,0,1000,0,110,2","88685632,1266139,1761176905,19,101,110,3","48980066,1934913,3602870493,19,101,110,1"],"recent_rot":["1,2,3"],"refresh_type":0,"loid":"1,13,16,23","channel":"news_video_top"}],"appversion":"181210"}',
'kankaninfo': '{"gender":1,"lastExp":416,"refresh":0,"scene":2}',
'channelPosition': '1',
'rendType': 'kankan',
'page': '0'}
headers = {"content-type": "application/x-www-form-urlencoded",
"store": "1",
"accept": "*/*",
"idft": "CE1E8744-7BF9-4FDD-87A5-463C6B9A66E1",
"idfa": "05571C2D-1C86-4B5B-87EF-E4B4DAF07DDB",
"appver": "12.0.1_qqnews_5.7.22",
"devid": "d605a70a-d084-487e-aaf1-8a057d40ef39",
"devicetoken": "<f4b49138 3ca95e38 1519836e daefaab6 799b04da c164f7a7 4cb7d999 6e343393>",
"accept-language": "zh-Hans-CN;q=1, en-CN;q=0.9",
"referer": "http://inews.qq.com/inews/iphone/",
"user-agent": "QQNews/5.7.22 (iPhone; iOS 12.0.1; Scale/2.00)",
"content-length": "2169",
"accept-encoding": "br, gzip, deflate",
"cookie": "logintype=2",
"qqnetwork": "wifi"}
domain_url = "https://r.inews.qq.com/getQQNewsUnreadList?"
query_dict = {'appver': '12.0.1_qqnews_5.7.22',
'pagestartfrom': 'icon',
'page_type': 'timeline',
'apptype': 'ios',
'rtAd': '1',
'imsi': '460-01',
'screen_height': '667',
'network_type': 'wifi',
'startTimestamp': '1545835451',
'store': '1',
'deviceToken': '<f4b49138 3ca95e38 1519836e daefaab6 799b04da c164f7a7 4cb7d999 6e343393>',
'global_info': '1|1|1|1|1|14|4|1|0|6|1|1|2|2|0|J267P000000000:J060P000000000:B054P000015802:J054P600000000|1421|0|1|0|0|0|0|0|1001|0|6|1|1|1|1|1|1|-1|0|0|0|2|1|1|0|0|2|0|1|0|4|0|0|0|3|0|0|0|0',
'globalInfo': '1|1|1|1|1|14|4|1|0|6|1|1|2|2|0|J267P000000000:J060P000000000:B054P000015802:J054P600000000|1421|0|1|0|0|0|0|0|1001|0|6|1|1|1|1|1|1|-1|0|0|0|2|1|1|0|0|2|0|1|0|4|0|0|0|3|0|0|0|0',
'screen_scale': '2',
'activefrom': 'icon',
'screen_width': '375',
'isJailbreak': '0',
'qqnews_refpage': 'QNCommonListController',
'omgid': 'a305486b92cc9e48f90929497de4cb30dfde0010112206',
'device_model': 'iPhone9,1',
'pagestartFrom': 'icon',
'device_appin': '6F0D5898-2C3A-46FE-9181-589BC52ED743',
'devid': 'D605A70A-D084-487E-AAF1-8A057D40EF39',
'omgbizid': '138dc6ef3ae8a24f7c897a9bbde8b9098f210060113210',
'idfa': '05571C2D-1C86-4B5B-87EF-E4B4DAF07DDB'}
count = 0
result_list = []
while count < list_page_max:
post_dic['page'] = str(count)
timestamp = int(time.time())
query_dict['startTimestamp'] = timestamp
url = domain_url + urllib.parse.urlencode(query_dict)
get_page = requests.post(url, data=post_dic, headers=headers)
page_dict = get_page.json()
# video_list1 = page_dict["kankaninfo"]["videos"]
video_list2 = page_dict["newslist"]
count += 1
# return video_list2
if video_list2 != []:
print("get data at page %s" % str(count - 1))
for video_info in video_list2:
has_video = video_info.get('hasVideo')
video_channel = video_info.get('video_channel')
if has_video == 1 or video_channel is not None:
video_dict = copy.deepcopy(self.video_data)
video_dict['channel'] = channel_id
video_dict['title'] = video_info['longtitle']
print(video_dict['title'])
video_dict['url'] = video_info['url']
try:
dura_str = video_info['video_channel']['video']['duration']
video_dict['duration'] = trans_duration(dura_str)
except:
video_dict['duration'] = 0
try:
video_dict['releaser'] = video_info['chlname']
except:
video_dict['releaser'] = None
try:
video_dict['releaser_id'] = video_info['card']['uin']
except:
video_dict['releaser_id'] = None
video_dict['release_time'] = int(video_info['timestamp'] * 1e3)
try:
video_dict['read_count'] = video_info['read_count']
except:
video_dict['read_count'] = 0
video_dict['comment_count'] = video_info['comments']
video_dict['favorite_count'] = video_info['likeInfo']
try:
video_dict['play_count'] = video_info['video_channel']['video']['playcount']
except:
video_dict['play_count'] = 0
video_dict['article_id'] = video_info['id']
try:
video_dict['video_id_str'] = video_info['vid']
except:
video_dict['video_id_str'] = None
video_dict['fetch_time'] = int(time.time() * 1e3)
result_list.append(video_dict)
if len(result_list) >= 100:
output_result(result_Lst=result_list,
platform=self.platform,
output_to_file=output_to_file,
filepath=filepath,
output_to_es_raw=output_to_es_raw,
es_index=es_index,
doc_type=doc_type,
output_to_es_register=output_to_es_register)
result_list.clear()
if result_list != []:
output_result(result_Lst=result_list,
platform=self.platform,
output_to_file=output_to_file,
filepath=filepath,
output_to_es_raw=output_to_es_raw,
es_index=es_index,
doc_type=doc_type,
output_to_es_register=output_to_es_register)
return result_list
def list_page_for_special_area(self,
channel_id,
output_to_file=False,
filepath=None,
list_page_max=30,
output_to_es_raw=False,
es_index=None,
doc_type=None,
output_to_es_register=False):
page_list = []
post_dic = {
'adReqData': '{"chid":2,"adtype":0,"pf":"iphone","launch":"0","ext":{"mob":{"mobstr":"Aejy45+NeSZw4VxymYnnIhMV+MEM+6sW9Rkw16FvkWGCz1rsPQflpTnsN+KnArzMwheqHiLErlbOlNWL0SoBI0lJtRh13iyR+LxSv3Y+hJrixm\\/Sxn\\/YhInAhlYioOjQ9cHGSSRmdgaDyqx2dDLZosKp+QSMqr649GGxQ36xbSdjbvZ3MGywBOsVNcf+EZkV+U9Q8LyDPc6PZ56b\\/GLGncf4XcrVFnKlUi+kebsg8DCD\\/nlvTDGSkWOtu33GJ4Ct\\/hfZ1c3UNHw5bRwHRM0L0+6QYANTrPzl2X6hZK3kijlJsub+RvcPNPNQGrhK3e4yYHJmspW19qE5mPgxd5lbwzLfC4rOa2XJGXs8Am8hxBVUQBrYaSX5y1D\\/H2H+\\/KuPjUhMtylfH4pqvrYmedw8h56zQLScQQ1xOMsiYtb72YRegl4pByfwExmmQ3L8EtBRDGoJznbwnCe863BRgZTCS9jQT0Wry6f1UGhpmH98UCfP\\/fzWCLOCPaXJCH5gxYdSIOc7u4nw7mBbPk\\/xhFWz7PDTCw9wxEwVpLBshqbxVfPQ9eTND\\/BEd9hrtE\\/ZVlJz+wIIaabOUgyMMEqGqUNPvI5Dt6JLD\\/s2yPA2zd8saGSjrLcBHzKfKEt4prtCjasz+\\/IK8eWT5QCrrJC9swLAAdUFKjX6mAcpR0ZF97ubI6I4rheTkfhfQ+5gX9Dm7ahfs6b4Fzk0ewwY9uim4BEVkzQqHeIejtVShVG8LoXuqqPsen4YS2QGhDvzfop6Usr4J8Eb\\/lFZREasEN1MRNC8FqcQoWQPc\\/BGyxU0viDeZKH3wtZ2jXhs7l8xqX9jbaON1nqgdayLVLQz+POAZnQz7iwTjrFX9A9mYM\\/NgUA32jQq"}},"ver":"5.7.22","slot":[{"loid":"1,13,16,23","channel":"news_news_sh","recent_rot":["1,2,3","4,5,6","4,7,6","8","9,10"],"refresh_type":2,"islocal":1,"seq_loid":"1,1,1,1,1","cur":58,"orders_info":["91987701,8122038,0,19,4201,110,1","91234216,9163219,537783065,19,4301,110,1","89033789,1252416,860268798,19,2804,110,1","91741757,7748442,3134180117,19,4301,110,1","82330504,3792964,3640746586,1000,101,110,2","89123229,8862275,3031448928,19,507,110,1","91890078,8651597,3360875772,19,4001,110,3","91961141,7311564,2009512134,19,4301,110,1","91639391,9117817,826327870,1000,4201,110,2","76056706,6378696,2577335544,19,4107,110,1"],"current_rot":"4,7,6,8,9","seq":"4,9,15,40,50"}],"appversion":"181210"}',
'lc_ids': 'CELLSHC201504210000',
'uid': '6F0D5898-2C3A-46FE-9181-589BC52ED743',
'is_new_user': '0',
# 'feedbackNewsId': '20181228A0XN2100|2|0,20181215V0I04Q00|4|0,20181228A15XAT00|2|0,20181224V0C58S00|4|0,20181228A1EQLH00|2|0',
'chlid': channel_id,
'channelType': channel_id,
'newsTopPage': '0',
'feedbackCur': '53',
'channelPosition': '67',
'page': '1',
'forward': '1',
'picType': '1,2,0,2,1,2,1,2,1,0,1,2,0,2,1,2,1,2,1,2',
'cachedCount': '50'}
headers = {"content-type": "application/x-www-form-urlencoded",
"store": "1",
"accept": "*/*",
"idft": "CE1E8744-7BF9-4FDD-87A5-463C6B9A66E1",
"idfa": "05571C2D-1C86-4B5B-87EF-E4B4DAF07DDB",
"appver": "12.0.1_qqnews_5.7.22",
"devid": "d605a70a-d084-487e-aaf1-8a057d40ef39",
"devicetoken": "<f4b49138 3ca95e38 1519836e daefaab6 799b04da c164f7a7 4cb7d999 6e343393>",
"accept-language": "zh-Hans-CN;q=1, en-CN;q=0.9",
"referer": "http://inews.qq.com/inews/iphone/",
"user-agent": "QQNews/5.7.22 (iPhone; iOS 12.0.1; Scale/2.00)",
"content-length": "2169",
"accept-encoding": "br, gzip, deflate",
"cookie": "logintype=2",
"qqnetwork": "wifi"}
domain_url = "https://r.inews.qq.com/getQQNewsUnreadList?"
query_dict = {'appver': '12.0.1_qqnews_5.7.22',
'pagestartfrom': 'icon',
'page_type': 'timeline',
'apptype': 'ios',
'rtAd': '1',
'imsi': '460-01',
'screen_height': '667',
'network_type': 'wifi',
'startTimestamp': '1545835451',
'store': '1',
'deviceToken': '<f4b49138 3ca95e38 1519836e daefaab6 799b04da c164f7a7 4cb7d999 6e343393>',
'global_info': '1|1|1|1|1|14|4|1|0|6|1|1|2|2|0|J267P000000000:J060P000000000:B054P000015802:J054P600000000|1421|0|1|0|0|0|0|0|1001|0|6|1|1|1|1|1|1|-1|0|0|0|2|1|1|0|0|2|0|1|0|4|0|0|0|3|0|0|0|0',
'globalInfo': '1|1|1|1|1|14|4|1|0|6|1|1|2|2|0|J267P000000000:J060P000000000:B054P000015802:J054P600000000|1421|0|1|0|0|0|0|0|1001|0|6|1|1|1|1|1|1|-1|0|0|0|2|1|1|0|0|2|0|1|0|4|0|0|0|3|0|0|0|0',
'screen_scale': '2',
'activefrom': 'icon',
'screen_width': '375',
'isJailbreak': '0',
'qqnews_refpage': 'QNCommonListController',
'omgid': 'a305486b92cc9e48f90929497de4cb30dfde0010112206',
'device_model': 'iPhone9,1',
'pagestartFrom': 'icon',
'device_appin': '6F0D5898-2C3A-46FE-9181-589BC52ED743',
'devid': 'D605A70A-D084-487E-AAF1-8A057D40EF39',
'omgbizid': '138dc6ef3ae8a24f7c897a9bbde8b9098f210060113210',
'idfa': '05571C2D-1C86-4B5B-87EF-E4B4DAF07DDB'}
count = 0
result_list = []
while count < list_page_max:
post_dic['newsTopPage'] = str(count)
post_dic['page'] = str(count + 1)
timestamp = int(time.time())
query_dict['startTimestamp'] = timestamp
url = domain_url + urllib.parse.urlencode(query_dict)
get_page = requests.post(url, data=post_dic, headers=headers)
page_dict = get_page.json()
page_list.append(page_dict)
count += 1
# continue
# video_list1 = page_dict["kankaninfo"]["videos"]
video_list2 = page_dict["newslist"]
count += 1
if video_list2 != []:
print("get data at page %s" % str(count - 1))
for video_info in video_list2:
has_video = video_info.get('hasVideo')
video_channel = video_info.get('video_channel')
if has_video == 1 or video_channel is not None:
video_dict = copy.deepcopy(self.video_data)
video_dict['channel'] = channel_id
video_dict['title'] = video_info['title']
print(video_dict['title'])
video_dict['url'] = video_info['url']
try:
dura_str = video_info['video_channel']['video']['duration']
video_dict['duration'] = trans_duration(dura_str)
except:
video_dict['duration'] = 0
try:
video_dict['releaser'] = video_info['chlname']
except:
video_dict['releaser'] = None
try:
video_dict['releaser_id'] = video_info['card']['uin']
except:
video_dict['releaser_id'] = None
video_dict['release_time'] = int(video_info['timestamp'] * 1e3)
try:
video_dict['read_count'] = video_info['readCount']
except:
video_dict['read_count'] = 0
video_dict['comment_count'] = video_info['comments']
video_dict['favorite_count'] = video_info['likeInfo']
try:
video_dict['play_count'] = video_info['video_channel']['video']['playcount']
except:
video_dict['play_count'] = 0
video_dict['article_id'] = video_info['id']
try:
video_dict['video_id_str'] = video_info['vid']
except:
video_dict['video_id_str'] = None
video_dict['fetch_time'] = int(time.time() * 1e3)
result_list.append(video_dict)
if len(result_list) >= 100:
output_result(result_Lst=result_list,
platform=self.platform,
output_to_file=output_to_file,
filepath=filepath,
output_to_es_raw=output_to_es_raw,
es_index=es_index,
doc_type=doc_type,
output_to_es_register=output_to_es_register)
result_list.clear()
if result_list != []:
output_result(result_Lst=result_list,
platform=self.platform,
output_to_file=output_to_file,
filepath=filepath,
output_to_es_raw=output_to_es_raw,
es_index=es_index,
doc_type=doc_type,
output_to_es_register=output_to_es_register)
return result_list
if __name__ == "__main__":
t = Crawler_Tencent_News()
# result_list2 = t.list_page_for_special_area(channel_id='news_news_sports',
# output_to_es_raw=True,
# es_index='test2',
# doc_type='tencent_news1231')
# search_res_list = t.search_page(keyword='任正非 美国压不跨华为', search_pages_max=30)
# t.get_releaser_follower_num("https://view.inews.qq.com/media/5498518?tbkt=I&uid=")
# t.search_video_page("看看新闻Knews", None, output_to_es_raw=True,
# es_index='crawler-data-raw', doc_type='doc')
t.releaser_page("https://view.inews.qq.com/media/16647661",output_to_es_raw=True, es_index='crawler-data-raw', doc_type='doc',releaser_page_num_max=200,proxies_num=1)
# t.releaser_page("https://view.inews.qq.com/media/8851848")
|
992,543 | 2136895c44a437c429956f8224536717a218e7a4 | from django.db import models
from cards.models import CardTemplate, Deck
from npcs.models import NPC, NPCInstance
# class EffectEventLink (models.Model):
# template = models.ForeignKey (CardTemplate)
# effect = models.ForeignKey (Effect)
class Event (models.Model):
"""
This class is designed to contain an event and handle its resolution by choosing the appropriate contained result object
"""
#Basic information about the event
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True, max_length=255)
description = models.CharField(max_length=255)
content = models.TextField()
npc = models.ForeignKey (NPC, null = True, blank = True)
# Useful meta data about the class
published = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
# The generic result is what happens when the event is forced to resolve, but no triggers have been matched
generic_result = models.ForeignKey ("events.EventTrigger", default = None, null = True, related_name = "_unused_event_result", blank = True)
auto = models.BooleanField (default = False)
deck = models.ForeignKey (Deck, null = True, blank = True)
class Meta:
ordering = ['-created']
def __str__(self):
return u'%s' % self.title
def getLife (self):
if self.npc is not None:
return self.npc.life
life = property (getLife)
def trigger_event (self, player, cardStatus, played = True):
# Filter the triggers by type and strength such that the first trigger satisfies the criteria
# TODO cardStatus could keep track of its play values if it was just played
# If there is a card, play it
template, strength = player.playCard (cardStatus)
npc = self.generateNPCInstance (player)
# Filter out triggers based on whether a user played it
if played:
trigger = self.eventtrigger_set.filter (template = cardStatus.card.template).order_by ('threshold')
if npc is not None:
player.attack (npc, [(template, strength)])
value = self.npc.life + npc.life
else:
value = -strength
trigger = trigger.filter (onlyWhenNotPlayed = False)
else:
print ("Not played")
if npc is not None:
pass
# npc.attack (player, [(template, strength)])
trigger = self.eventtrigger_set.filter (template = cardStatus.card.template).order_by ('threshold')
value = -strength
print ("TRIGGERS: ", trigger)
# If there is a remaining trigger, add the event to the stack
last = None
success = False
for tr in trigger.all ():
last = tr
if value <= tr.threshold:
success = True
break
return (last, success)
def generateNPCInstance (self, player):
if self.npc is not None:
npc = self.npc.npcinstance_set.filter (player = player).first ()
if npc is None:
npc = NPCInstance (player = player, npc = self.npc)
npc.save ()
return npc
def resolve (self, player, cardStatus = None, played = True):
"""Resolve an event with or without a card to play. If the event can't resolve with current conditions, return None
Note: this method calls the card.draw () method, which effectively moves the card to the discard pile and puts any special abilities of that card into effect."""
if cardStatus is None and not self.auto:
return (None, False)
if cardStatus is not None:
# Try to trigger an event with the card
eventtrigger, success = self.trigger_event (player, cardStatus, played)
print ("Any triggers? ", eventtrigger, success)
if eventtrigger is not None:
# cardStatus.resolve ()
return (eventtrigger, not success)
print ("Resolving...")
cardStatus.resolve ()
# If nothing else works, use the generic result
if self.generic_result is not None:
return (self.generic_result, True)
return (None, True)
class EventTrigger (models.Model):
"""
The EventTrigger links an event to possible sub-events
"""
# The original event from which this EventTrigger can be triggered
originalEvent = models.ForeignKey (Event, null = True)
# The CardTemplate that this EventTrigger can be triggered by
template = models.ForeignKey (CardTemplate)
# The threshold that this card must beat in order to activate successfully. This is either the quantity that the card score must beat or the maximum remaining life of the associated NPC to be successful
threshold = models.IntegerField (default = 0)
# The event triggered by this EventTrigger, if this is None, the EventTrigger happens, but returns to the previous event
event = models.ForeignKey (Event, null = True, related_name = "_unused_2")
# Particular cards, e.g. item cards, have different effects when found than when played. This boolean is true for an event triggered ONLY when the card is put into play directly from a non-player deck
onlyWhenNotPlayed = models.BooleanField (default = False)
# The content of an EventTrigger is the text displayed as the 'result' text in the log
content = models.TextField (default = "", blank = True)
failed_content = models.TextField (default = "", blank = True)
# If this trigger resolves the parent event, this boolean is True
resolved = models.BooleanField (default = True)
def __str__ (self):
return "%s (%s %d) -> %s" % (str (self.originalEvent), str (self.template), self.threshold, str (self.event))
|
992,544 | 22de6e27203eb41f9c17dea8fe738a07ad2d61a2 | """
Author: Võ Viết Thanh
Date: 04/09/2021
Program: The tax calculator program of the case study outputs a floating-point number
that might show more than two digits of precision. Use the round function to
modify the program to display at most two digits of precision in the output
number.
Solution:
1. Analys
- The program is requested that computes a person's income tax, and we use the round function to modify the program to display at most 2 digits of precision in the output
2. Inputs
- grossIncome
- numDependents
3. Outputs
- taxableIncome
- incomeTax rounded to 2 figures
4. Design
- Initialize the constants
+ TAX_RATE
+ STANDARD_DEDUCTION
+ DEPENDENT_DEDUCTION
- Compute the income tax
+ taxableIncome = grossIncome - STANDARD_DEDUCTION - \
+ DEPENDENT_DEDUCTION * numDependents
+ incomeTax = taxableIncome * TAX_RATE
....
"""
# Initialize the constants
TAX_RATE = 0.20
STANDARD_DEDUCTION = 10000.0
DEPENDENT_DEDUCTION = 3000.0
# Request the inputs
grossIncome = float(input("Enter the gross income: "))
numDependents = int(input("Enter the number of dependents: "))
# Compute the income tax
taxableIncome = grossIncome - STANDARD_DEDUCTION - \
DEPENDENT_DEDUCTION * numDependents
incomeTax = taxableIncome * TAX_RATE
# Display the income tax
print("The income tax is $" + str(incomeTax))
round(incomeTax,2)
|
992,545 | 8b845a523dd54654edb88f07c4093137ebef0ac4 | from directio import read, write
import six.moves.cPickle as pickle
import os
import shutil
from hashlib import md5
from swift.common.utils import config_true_value
import sqlite3
from swift.dedupe.time import time, time_diff
class DatabaseTable(object):
def __init__(self, conf):
self.db_name = conf.get('data_base', ':memory:')
if not self.db_name.endswith('.db') and not self.db_name == ':memory:':
self.db_name += '.db'
self.conn = sqlite3.connect(self.db_name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (fp text PRIMARY KEY NOT NULL, container_id text)''')
self.fp_buf = dict()
self.db_max_buf = int(conf.get('db_max_buf_fp', 1024))
def __del__(self):
self.conn.close()
def put(self, fp, container_id):
self.fp_buf[fp] = container_id
if len(self.fp_buf) >= self.db_max_buf:
for fp, container_id in self.fp_buf.items():
data = (fp, container_id)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
self.fp_buf = dict()
def get(self, fp):
r = self.fp_buf.get(fp, None)
if r:
return r
data = (fp,)
self.c.execute('SELECT container_id FROM fp_index WHERE fp=?', data)
r = self.c.fetchall()
if r:
r = r[0][0]
return r
class DiskHashTable(object):
def __init__(self, conf):
self.index_size = int(conf.get('disk_hash_table_index_size', 1024))
self.direct_io = config_true_value(conf.get('disk_hash_table_directio', 'false'))
self.disk_hash_dir = conf.get('disk_hash_table_dir', '/tmp/swift/disk-hash/')
self.flush_size = int(conf.get('disk_hash_table_flush_size', 1024))
self.memory_bucket = []
self.bucket_lens = []
for _ in range(self.index_size):
self.memory_bucket.append(dict())
self.bucket_lens.append([])
if config_true_value(conf.get('clean_disk_hash', 'false')):
if os.path.exists(self.disk_hash_dir):
shutil.rmtree(self.disk_hash_dir)
if not os.path.exists(self.disk_hash_dir):
os.makedirs(self.disk_hash_dir)
self.read_disk_num = 0
self.read_disk_time = 0
self.write_disk_num = 0
self.write_disk_time = 0
self.hit_num = 0
def _map_bucket(self, key):
h = md5(key)
h = h.hexdigest()
index = int(h.upper(), 16)
index %= self.index_size
return index
def put(self, key, value):
index = self._map_bucket(key)
self.memory_bucket[index][key] = value
if len(self.memory_bucket[index]) >= self.flush_size:
self.flush(index)
def flush(self, bucket_index):
dedupe_start = time()
if not os.path.exists(self.disk_hash_dir):
os.makedirs(self.disk_hash_dir)
path = self.disk_hash_dir + '/' + str(bucket_index)
data = pickle.dumps(self.memory_bucket[bucket_index])
if self.direct_io:
f = os.open(path, os.O_CREAT | os.O_APPEND | os.O_RDWR | os.O_DIRECT)
ll = 512 - len(data)%512 # alligned by 512
data += '\0'*ll
try:
write(f, data)
except Exception as e:
pass
finally:
os.close(f)
else:
with open(path, 'ab') as f:
f.write(data)
self.bucket_lens[bucket_index].append(len(data))
self.memory_bucket[bucket_index] = dict()
dedupe_end = time()
self.write_disk_num += 1
self.write_disk_time += time_diff(dedupe_start, dedupe_end)
def get_disk_buckets(self, index):
dedupe_start = time()
buckets = []
path = self.disk_hash_dir + '/' + str(index)
if not os.path.exists(path):
return buckets
file_size = os.path.getsize(path)
data = ''
if self.direct_io:
f = os.open(path, os.O_RDONLY | os.O_DIRECT)
try:
data = read(f, file_size)
except Exception as e:
print e
finally:
os.close(f)
else:
with open(path, 'rb') as f:
data = f.read()
if not data:
print 'read data failed'
offset = 0
for l in self.bucket_lens[index]:
bucket_data = data[offset:offset+l]
bucket = pickle.loads(bucket_data)
buckets.append(bucket)
offset += l
dedupe_end = time()
self.read_disk_num += 1
self.read_disk_time += time_diff(dedupe_start, dedupe_end)
return buckets
def get(self, key):
index = self._map_bucket(key)
r = self.memory_bucket[index].get(key, None)
if r:
self.hit_num += 1
return r
path = self.disk_hash_dir + '/' + str(index)
if not os.path.exists(path):
return None
buckets = self.get_disk_buckets(index)
for bucket in buckets:
r = bucket.get(key)
if r:
self.hit_num += 1
return r
return None
class LazyHashTable(DiskHashTable):
def __init__(self, conf, callback= None):
DiskHashTable.__init__(self, conf)
self.lazy_bucket_size = int(conf.get('lazy_bucket_size', 32))
self.lazy_bucket = []
self.callback = callback
self.buffer = set()
for _ in range(self.index_size):
self.lazy_bucket.append(dict())
def _lookup_in_bucket(self, index, bucket):
result = []
for fp, v in self.lazy_bucket[index].items():
r = bucket.get(fp, None)
if r:
self.hit_num += 1
result.append((fp, r, v))
del self.lazy_bucket[index][fp]
return result
def lazy_lookup(self, index):
result = []
if self.lazy_bucket[index]:
result += self._lookup_in_bucket(index, self.memory_bucket[index])
path = self.disk_hash_dir + '/' + str(index)
if os.path.exists(path):
buckets = self.get_disk_buckets(index)
for bucket in buckets:
result.extend(self._lookup_in_bucket(index, bucket))
# the unfound fingerprints are unique
for fp, v in self.lazy_bucket[index].items():
result.append((fp, None, v))
del self.lazy_bucket[index][fp]
return result
def buf(self, fp, value):
index = self._map_bucket(fp)
if fp not in self.lazy_bucket[index]:
self.lazy_bucket[index][fp] = [value]
else:
if value not in self.lazy_bucket[index][fp]:
self.lazy_bucket[index][fp].append(value)
if len(self.lazy_bucket[index]) >= self.lazy_bucket_size:
result = self.lazy_lookup(index)
self.callback(result)
def buf_remove(self, fp):
index = self._map_bucket(fp)
if fp in self.lazy_bucket[index]:
del self.lazy_bucket[index][fp]
def buf_get(self, fp):
index = self._map_bucket(fp)
return self.lazy_bucket[index].get(fp) |
992,546 | d2cdfd1b9389c2f57794997ea9847ebd02b2c4fd | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'(?P<id>[0-9]+)', views.show_post, name='event_show_post')
]
|
992,547 | 6701c0a783f201a1dd5968f4f9bbe089a3d0c6be | # name: Jake Graham and Chris Schulz
# Plot # 1
# Line plot of epochs vs time to build
# -------------------------------------------------
import numpy as np
import matplotlib.pyplot as plt
# Load Model data
m1 = np.load('../data/m1_data.npy')[:, 0]
m2 = np.load('../data/m2_data.npy')[:, 0]
m3 = np.load('../data/m3_data.npy')[:, 0]
# Plot the three lines
plt.plot(np.arange(0, m1.shape[0], 1), m1, label='Model 1')
plt.plot(np.arange(0, m2.shape[0], 1), m2, label='Model 2')
plt.plot(np.arange(0, m3.shape[0], 1), m3, label='Model 3')
# Add a title and labels for each of the axis
plt.xlabel('Epochs')
plt.ylabel('Time to build the Model (seconds)')
plt.title('Comparing # of Epochs to the Time to Build')
plt.grid(True)
plt.legend(loc='upper right')
plt.show()
|
992,548 | 77997fb717db3a744b064c4d502cadaea6d283dc | # Copyright (c) 2010-2013 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2012-2014 Mark D. Hill and David A. Wood
# Copyright (c) 2009-2011 Advanced Micro Devices, Inc.
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
# Brad Beckmann
#import numpy as numpy
import optparse
import sys
import m5
from m5.defines import buildEnv
from m5.objects import *
from m5.util import addToPath, fatal
addToPath('gem5-stable_2015_09_03/configs/common')
addToPath('gem5-stable_2015_09_03/configs/ruby')
import Ruby
from FSConfig import *
from SysPaths import *
from Benchmarks import *
import Simulation
import CacheConfig
import MemConfig
from Caches import *
import Options
import os
# Check if KVM support has been enabled, we might need to do VM
# configuration if that's the case.
have_kvm_support = 'BaseKvmCPU' in globals()
def is_kvm_cpu(cpu_class):
return have_kvm_support and cpu_class != None and \
issubclass(cpu_class, BaseKvmCPU)
def cmd_line_template():
if options.command_line and options.command_line_file:
print "Error: --command-line and --command-line-file are " \
"mutually exclusive"
sys.exit(1)
if options.command_line:
return options.command_line
if options.command_line_file:
return open(options.command_line_file).read().strip()
return None
def build_test_system(np):
cmdline = cmd_line_template()
if buildEnv['TARGET_ISA'] == "alpha":
test_sys = makeLinuxAlphaSystem(test_mem_mode, bm[0], options.ruby,
cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "mips":
test_sys = makeLinuxMipsSystem(test_mem_mode, bm[0], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "sparc":
test_sys = makeSparcSystem(test_mem_mode, bm[0], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "x86":
test_sys = makeLinuxX86System(test_mem_mode, options.num_cpus, bm[0],
options.ruby, cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "arm":
test_sys = makeArmSystem(test_mem_mode, options.machine_type,
options.num_cpus, bm[0], options.dtb_filename,
bare_metal=options.bare_metal,
cmdline=cmdline,
external_memory=options.external_memory_system)
if options.enable_context_switch_stats_dump:
test_sys.enable_context_switch_stats_dump = True
else:
fatal("Incapable of building %s full system!", buildEnv['TARGET_ISA'])
# Set the cache line size for the entire system
test_sys.cache_line_size = options.cacheline_size
# Create a top-level voltage domain
test_sys.voltage_domain = VoltageDomain(voltage = options.sys_voltage)
# Create a source clock for the system and set the clock period
test_sys.clk_domain = SrcClockDomain(clock = options.sys_clock,
voltage_domain = test_sys.voltage_domain)
# Create a CPU voltage domain
test_sys.cpu_voltage_domain = VoltageDomain()
# Create a source clock for the CPUs and set the clock period
test_sys.cpu_clk_domain = SrcClockDomain(clock = options.cpu_clock,
voltage_domain =
test_sys.cpu_voltage_domain)
if options.kernel is not None:
test_sys.kernel = binary(options.kernel)
if options.script is not None:
test_sys.readfile = options.script
if options.lpae:
test_sys.have_lpae = True
if options.virtualisation:
test_sys.have_virtualization = True
#change the bootloader here
#print "change boot loader"
#print test_sys.boot_loader
test_sys.boot_loader = options.issd_bootloader
#print test_sys.boot_loader
test_sys.init_param = options.init_param
# For now, assign all the CPUs to the same clock domain
test_sys.cpu = [TestCPUClass(clk_domain=test_sys.cpu_clk_domain, cpu_id=i)
for i in xrange(np)]
if is_kvm_cpu(TestCPUClass) or is_kvm_cpu(FutureClass):
test_sys.vm = KvmVM()
if options.ruby:
# Check for timing mode because ruby does not support atomic accesses
if not (options.cpu_type == "detailed" or options.cpu_type == "timing"):
print >> sys.stderr, "Ruby requires TimingSimpleCPU or O3CPU!!"
sys.exit(1)
Ruby.create_system(options, True, test_sys, test_sys.iobus,
test_sys._dma_ports)
# Create a seperate clock domain for Ruby
test_sys.ruby.clk_domain = SrcClockDomain(clock = options.ruby_clock,
voltage_domain = test_sys.voltage_domain)
# Connect the ruby io port to the PIO bus,
# assuming that there is just one such port.
test_sys.iobus.master = test_sys.ruby._io_port.slave
for (i, cpu) in enumerate(test_sys.cpu):
#
# Tie the cpu ports to the correct ruby system ports
#
cpu.clk_domain = test_sys.cpu_clk_domain
cpu.createThreads()
cpu.createInterruptController()
cpu.icache_port = test_sys.ruby._cpu_ports[i].slave
cpu.dcache_port = test_sys.ruby._cpu_ports[i].slave
if buildEnv['TARGET_ISA'] == "x86":
cpu.itb.walker.port = test_sys.ruby._cpu_ports[i].slave
cpu.dtb.walker.port = test_sys.ruby._cpu_ports[i].slave
cpu.interrupts.pio = test_sys.ruby._cpu_ports[i].master
cpu.interrupts.int_master = test_sys.ruby._cpu_ports[i].slave
cpu.interrupts.int_slave = test_sys.ruby._cpu_ports[i].master
else:
if options.caches or options.l2cache:
# By default the IOCache runs at the system clock
test_sys.iocache = IOCache(addr_ranges = test_sys.mem_ranges)
test_sys.iocache.cpu_side = test_sys.iobus.master
test_sys.iocache.mem_side = test_sys.membus.slave
elif not options.external_memory_system:
test_sys.iobridge = Bridge(delay='50ns', ranges = test_sys.mem_ranges)
test_sys.iobridge.slave = test_sys.iobus.master
test_sys.iobridge.master = test_sys.membus.slave
# Sanity check
if options.fastmem:
if TestCPUClass != AtomicSimpleCPU:
fatal("Fastmem can only be used with atomic CPU!")
if (options.caches or options.l2cache):
fatal("You cannot use fastmem in combination with caches!")
if options.simpoint_profile:
if not options.fastmem:
# Atomic CPU checked with fastmem option already
fatal("SimPoint generation should be done with atomic cpu and fastmem")
if np > 1:
fatal("SimPoint generation not supported with more than one CPUs")
for i in xrange(np):
if options.fastmem:
test_sys.cpu[i].fastmem = True
if options.simpoint_profile:
test_sys.cpu[i].addSimPointProbe(options.simpoint_interval)
if options.checker:
test_sys.cpu[i].addCheckerCpu()
test_sys.cpu[i].createThreads()
CacheConfig.config_cache(options, test_sys)
MemConfig.config_mem(options, test_sys)
return test_sys
def build_drive_system(np):
# driver system CPU is always simple, so is the memory
# Note this is an assignment of a class, not an instance.
DriveCPUClass = AtomicSimpleCPU
drive_mem_mode = 'atomic'
DriveMemClass = SimpleMemory
cmdline = cmd_line_template()
if buildEnv['TARGET_ISA'] == 'alpha':
drive_sys = makeLinuxAlphaSystem(drive_mem_mode, bm[1], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == 'mips':
drive_sys = makeLinuxMipsSystem(drive_mem_mode, bm[1], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == 'sparc':
drive_sys = makeSparcSystem(drive_mem_mode, bm[1], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == 'x86':
drive_sys = makeLinuxX86System(drive_mem_mode, np, bm[1],
cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == 'arm':
drive_sys = makeArmSystem(drive_mem_mode, options.machine_type, np,
bm[1], options.dtb_filename, cmdline=cmdline)
# Create a top-level voltage domain
drive_sys.voltage_domain = VoltageDomain(voltage = options.sys_voltage)
# Create a source clock for the system and set the clock period
drive_sys.clk_domain = SrcClockDomain(clock = options.sys_clock,
voltage_domain = drive_sys.voltage_domain)
# Create a CPU voltage domain
drive_sys.cpu_voltage_domain = VoltageDomain()
# Create a source clock for the CPUs and set the clock period
drive_sys.cpu_clk_domain = SrcClockDomain(clock = options.cpu_clock,
voltage_domain =
drive_sys.cpu_voltage_domain)
drive_sys.cpu = DriveCPUClass(clk_domain=drive_sys.cpu_clk_domain,
cpu_id=0)
drive_sys.cpu.createThreads()
drive_sys.cpu.createInterruptController()
drive_sys.cpu.connectAllPorts(drive_sys.membus)
if options.fastmem:
drive_sys.cpu.fastmem = True
if options.kernel is not None:
drive_sys.kernel = binary(options.kernel)
if is_kvm_cpu(DriveCPUClass):
drive_sys.vm = KvmVM()
drive_sys.iobridge = Bridge(delay='50ns',
ranges = drive_sys.mem_ranges)
drive_sys.iobridge.slave = drive_sys.iobus.master
drive_sys.iobridge.master = drive_sys.membus.slave
# Create the appropriate memory controllers and connect them to the
# memory bus
drive_sys.mem_ctrls = [DriveMemClass(range = r)
for r in drive_sys.mem_ranges]
for i in xrange(len(drive_sys.mem_ctrls)):
drive_sys.mem_ctrls[i].port = drive_sys.membus.master
drive_sys.init_param = options.init_param
return drive_sys
# Add options
parser = optparse.OptionParser()
# Add iSSD command line options
parser.add_option('--issd_bootloader')
parser.add_option('--nand_type', type="string", default="slc" , help="Flash memory Type ")
parser.add_option('--nand_storage_image', help="Nand image file")
parser.add_option('--nand_num_chn', type="int" , default=2, help="Number of NAND Controller Channels ")
parser.add_option('--nand_pkgs_per_chn', help="Number of packeages on each channel ")
parser.add_option('--nand_dies_per_pkg', help="Number of dies is a package")
parser.add_option('--nand_planes_per_die', help="Number of planes in each sie/chip ")
parser.add_option('--nand_blocks_per_plane', help="Number of blocks on each plane ")
parser.add_option('--nand_pages_per_block', help="Number of pages in a block ")
parser.add_option('--nand_page_size', help="Page size : expressed in terms of DS(data shift) , nand_page_size=9 is 512 bytes in a page (1<<9) ")
#latencies
parser.add_option('--nand_ltcy_rcmd_issue', help="Issue read command to NAND die latency")
parser.add_option('--nand_ltcy_pcmd_issue', help="Issue program command to NAND die latency")
parser.add_option('--nand_ltcy_ecmd_issue', help="Issue erase command to NAND die latency")
parser.add_option('--nand_ltcy_read_page', help="Read page latency")
parser.add_option('--nand_ltcy_program_page', help="Program page latency")
parser.add_option('--nand_ltcy_erase_block', help="Erase block latency")
parser.add_option('--nand_ltcy_read_msb_page', help="-")
parser.add_option('--nand_ltcy_read_lsb_page', help="-")
parser.add_option('--nand_ltcy_program_msb_page', help="-")
parser.add_option('--nand_ltcy_program_lsb_page', help="-")
parser.add_option('--nand_ltcy_read_csb_page', help="-")
parser.add_option('--nand_ltcy_program_csb_page', help="-")
parser.add_option('--nand_ltcy_read_chsb_page', help="-")
parser.add_option('--nand_ltcy_read_clsb_page', help="-")
parser.add_option('--nand_ltcy_program_chsb_page', help="-")
parser.add_option('--nand_ltcy_program_clsb_page', help="-")
Options.addCommonOptions(parser)
Options.addFSOptions(parser)
# Add the ruby specific and protocol specific options
if '--ruby' in sys.argv:
Ruby.define_options(parser)
(options, args) = parser.parse_args()
if args:
print "Error: script doesn't take any positional arguments"
sys.exit(1)
# system under test can be any CPU
(TestCPUClass, test_mem_mode, FutureClass) = Simulation.setCPUClass(options)
# Match the memories with the CPUs, based on the options for the test system
TestMemClass = Simulation.setMemClass(options)
if options.benchmark:
try:
bm = Benchmarks[options.benchmark]
except KeyError:
print "Error benchmark %s has not been defined." % options.benchmark
print "Valid benchmarks are: %s" % DefinedBenchmarks
sys.exit(1)
else:
if options.dual:
bm = [SysConfig(disk=options.disk_image, rootdev=options.root_device,
mem=options.mem_size, os_type=options.os_type),
SysConfig(disk=options.disk_image, rootdev=options.root_device,
mem=options.mem_size, os_type=options.os_type)]
else:
bm = [SysConfig(disk=options.disk_image, rootdev=options.root_device,
mem=options.mem_size, os_type=options.os_type)]
np = options.num_cpus
test_sys = build_test_system(np)
if len(bm) == 2:
drive_sys = build_drive_system(np)
root = makeDualRoot(True, test_sys, drive_sys, options.etherdump)
elif len(bm) == 1:
root = Root(full_system=True, system=test_sys)
else:
print "Error I don't know how to create more than 2 systems."
sys.exit(1)
if options.timesync:
root.time_sync_enable = True
if options.frame_capture:
VncServer.frame_capture = True
# Add iSSD specific components
test_sys.NVMeInterface = iSSDHostInterface(gic = test_sys.realview.gic , pio_addr=0x2d100000, pio_size=0x8000 ,int_num=36)
test_sys.NVMeInterface.pio = test_sys.membus.master
test_sys.NVMeInterface.dma = test_sys.membus.slave
test_sys.NVMeInterface.checkDoorBell_latency = '10ns'
test_sys.realview.timer0.int_num0 = 57
test_sys.realview.timer0.int_num1 = 57
num_chn = options.nand_num_chn
nand_ctrl_count = int(options.nand_num_chn / 2)
nand_ctrl_base = 0x2d108000
NandCtrl = []
index_num = 0
chn_index = 0
print "media type : %s" % options.nand_type
for nc in xrange(nand_ctrl_count):
nand_ctrl = iSSDNandCtrl(gic = test_sys.realview.gic , pio_addr=nand_ctrl_base, pio_size=0x1000 ,int_num=58 )
print "Nand Ctrl base address %x" % nand_ctrl_base
nand_ctrl_base = nand_ctrl_base + 0x1000
nand_ctrl.pio = test_sys.membus.master
nand_ctrl.dma = test_sys.membus.slave
nand_ctrl.imgFile = options.nand_storage_image
nand_ctrl.index_num = index_num
index_num = index_num + 1
nand_ctrl.chn_0_index = chn_index
chn_index = chn_index + 1
nand_ctrl.chn_1_index = chn_index
chn_index = chn_index + 1
if options.nand_type :
nand_ctrl.media_type = options.nand_type
if options.nand_num_chn :
nand_ctrl.numChannel = options.nand_num_chn
if options.nand_pkgs_per_chn :
nand_ctrl.numPackages = options.nand_pkgs_per_chn
if options.nand_dies_per_pkg:
nand_ctrl.numDies = options.nand_dies_per_pkg
if options.nand_planes_per_die:
nand_ctrl.numPlanes = options.nand_planes_per_die
if options.nand_blocks_per_plane :
nand_ctrl.numBlocks = options.nand_blocks_per_plane
if options.nand_pages_per_block :
nand_ctrl.numPages = options.nand_pages_per_block
if options.nand_page_size :
nand_ctrl.DS = options.nand_page_size
if options.nand_ltcy_rcmd_issue :
nand_ctrl.nand_ltcy_rcmd_issue = options.nand_ltcy_rcmd_issue
if options.nand_ltcy_pcmd_issue :
nand_ctrl.nand_ltcy_pcmd_issue = options.nand_ltcy_pcmd_issue
if options.nand_ltcy_ecmd_issue :
nand_ctrl.nand_ltcy_ecmd_issue = options.nand_ltcy_ecmd_issue
if options.nand_ltcy_read_page :
nand_ctrl.nand_ltcy_read_page = options.nand_ltcy_read_page
if options.nand_ltcy_program_page :
nand_ctrl.nand_ltcy_program_page = options.nand_ltcy_program_page
if options.nand_ltcy_erase_block :
nand_ctrl.nand_ltcy_erase_block = options.nand_ltcy_erase_block
if options.nand_ltcy_read_msb_page :
nand_ctrl.nand_ltcy_read_msb_page = options.nand_ltcy_read_msb_page
if options.nand_ltcy_read_lsb_page :
nand_ctrl.nand_ltcy_read_lsb_page = options.nand_ltcy_read_lsb_page
if options.nand_ltcy_program_msb_page :
nand_ctrl.nand_ltcy_program_msb_page = options.nand_ltcy_program_msb_page
if options.nand_ltcy_program_lsb_page :
nand_ctrl.nand_ltcy_program_lsb_page = options.nand_ltcy_program_lsb_page
if options.nand_ltcy_read_csb_page :
nand_ctrl.nand_ltcy_read_csb_page = options.nand_ltcy_read_csb_page
if options.nand_ltcy_program_csb_page :
nand_ctrl.nand_ltcy_program_csb_page = options.nand_ltcy_program_csb_page
if options.nand_ltcy_read_chsb_page :
nand_ctrl.nand_ltcy_read_chsb_page = options.nand_ltcy_read_chsb_page
if options.nand_ltcy_read_clsb_page :
nand_ctrl.nand_ltcy_read_clsb_page = options.nand_ltcy_read_clsb_page
if options.nand_ltcy_program_chsb_page :
nand_ctrl.nand_ltcy_program_chsb_page = options.nand_ltcy_program_chsb_page
if options.nand_ltcy_program_clsb_page :
nand_ctrl.nand_ltcy_program_clsb_page = options.nand_ltcy_program_clsb_page
NandCtrl.append(nand_ctrl)
test_sys.NandCtrl = NandCtrl
#0x2d109000
test_sys.stats = iSSDStats(gic = test_sys.realview.gic , pio_addr=0x2d419000 , pio_size=0x4000 ,int_num=0)
test_sys.stats.pio = test_sys.membus.master
test_sys.stats.dma = test_sys.membus.slave
# EOT character on UART will end the simulation
test_sys.realview.uart.end_on_eot = True
print "num of NC = %d" % nand_ctrl_count
#print "numChannel = %d" % test_sys.NandCtrl0.numChannel
#print "numPackages = %d" % test_sys.NandCtrl0.numPackages
#print "numDies = %d" % test_sys.NandCtrl0.numDies
#print "numPlanes = %d" % test_sys.NandCtrl0.numPlanes
#print "numBlocks = %d" % test_sys.NandCtrl0.numBlocks
#print "numPages = %d" % test_sys.NandCtrl0.numPages
#print "Page size = " + str(test_sys.NandCtrl0.DS) + "(" + str((1 << int(test_sys.NandCtrl0.DS) ) ) + ")"
print
#os.system("cp -f dummy.rcS Uploadfile")
#test_sys.readfile = os.getcwd() + '/Uploadfile'
Simulation.setWorkCountOptions(test_sys, options)
Simulation.run(options, root, test_sys, FutureClass)
|
992,549 | 8c7d9bc7c8d098d27b222477630dbb3be91bb4ea | '''
10. В списке все элементы различны. Поменяйте местами минимальный и
максимальный элемент этого списка. Создавать новый список недопустимо.
'''
a = [1, 33, 5, 7, 2, 5, 99, 36, 3]
x = min(a)
y = max(a)
print(a)
print(id(a))
xi = a.index(x)
yi = a.index(y)
del a[xi]
a.insert(xi, y)
del a[yi]
a.insert(yi, x)
print(a)
print(id(a))
|
992,550 | 82bd71bdfd0f522f4960cb7377792d158d88a1d9 | x = 'jigneshjigneshjigneshjigneshjignesh'
lst = []
for char in x:
if char not in lst:
lst.append(char)
print("".join(lst)) |
992,551 | 93055acef8ec9608a619841b6d7b620918db83fd | from compas.geometry import Frame
from compas_fab.robots.ur5 import Robot
from compas_fab.backends import AnalyticalInverseKinematics
ik = AnalyticalInverseKinematics()
robot = Robot()
frame_WCF = Frame((0.381, 0.093, 0.382), (0.371, -0.292, -0.882), (0.113, 0.956, -0.269))
for jp, jn in ik.inverse_kinematics(robot, frame_WCF, options={'solver': 'ur5'}): # knows that we need the IK for the UR5 robot
print(jp)
|
992,552 | 5f992d8b2a8e5c06910877863f575399a5e27a05 | # 4poznanski
import unittest
from kol1 import Bank, Client
class MyTest(unittest.TestCase):
def setUp(self):
self.bank = Bank()
self.client1 = Client("Brown")
self.client2 = Client("Castley")
self.client1.input(100)
self.client2.input(100)
def test_bank_create(self):
self.assertIsNotNone(self.bank)
def test_client_create(self):
self.assertIsNotNone(self.client1)
def test_client_name(self):
self.assertEquals(self.client2.name, "Castley")
def test_client_cash(self):
self.assertEquals(self.client1.cash, 100)
self.assertEquals(self.client2.cash, 100)
def test_add_client(self):
self.bank.addClient(self.client1)
self.assertTrue(self.client1 in self.bank.clients)
def test_input(self):
self.client1.input(50)
self.assertEquals(self.client1.cash, 150)
def test_withdraw(self):
self.client1.withdraw(20)
self.assertEquals(self.client1.cash, 80)
def test_transfer(self):
self.bank.transfer(self.client1, self.client2, 30)
self.assertEquals(self.client1.cash, 70)
self.assertEquals(self.client2.cash, 130)
def test_input_negative_value(self):
self.client1.input(-20)
self.assertNotEquals(self.client1.cash, -80)
def test_withdraw_negative_value(self):
self.client2.withdraw(-20)
self.assertNotEquals(self.client2.cash, 120)
def test_transfer_negative_value(self):
self.bank.transfer(self.client1, self.client2, -20)
self.assertNotEquals(self.client1.cash, 120)
self.assertNotEquals(self.client2.cash, 80)
def test_withdraw_more_than_account_balance(self):
self.client1.withdraw(120)
self.assertNotEquals(self.client1.cash, -20)
if __name__ == '__main__':
unittest.main()
|
992,553 | 2855cb9a5171b930c13680480fe3ab60e91fe1cb | import bpy
import os
import json
from bpy.props import (
BoolProperty,
IntProperty,
FloatProperty,
FloatVectorProperty,
StringProperty,
EnumProperty,
CollectionProperty,
PointerProperty
)
from bpy_extras.io_utils import ExportHelper, ImportHelper
from .addon import (
ADDON_ID, ADDON_PATH, SCRIPT_PATH, SAFE_MODE, prefs, temp_prefs)
from .constants import (
ICON_OFF, ICON_ON, PM_ITEMS, PM_ITEMS_M, PM_ITEMS_M_DEFAULT, ED_DATA,
OP_CTX_ITEMS,
NUM_LIST_ROWS, LIST_PADDING, DEFAULT_POLL,
W_FILE, W_JSON, W_KEY,
SPACE_ITEMS, REGION_ITEMS, OPEN_MODE_ITEMS)
from .bl_utils import (
bp, re_operator, re_prop, re_prop_set, re_name_idx,
BaseCollectionItem
)
from .layout_helper import lh
from .debug_utils import *
from .panel_utils import (
hide_panel, unhide_panel, add_panel,
hidden_panel, rename_panel_group, remove_panel_group,
panel_context_items, bl_panel_types)
from .macro_utils import (
add_macro, remove_macro, rename_macro)
from . import keymap_helper
from . import pme
from . import operator_utils
from .keymap_helper import (
KeymapHelper, MOUSE_BUTTONS,
add_mouse_button, remove_mouse_button, to_key_name)
from .operators import (
WM_OT_pm_select, WM_OT_pme_user_pie_menu_call,
WM_OT_pmidata_specials_call
)
from .extra_operators import PME_OT_none
from .previews_helper import ph
from .ui import (
tag_redraw, draw_addons_maximized, is_userpref_maximized,
gen_op_name, gen_prop_name
)
from .ui_utils import get_pme_menu_class, pme_menu_classes
from . import ed_pie_menu
from . import ed_menu
from . import ed_popup
from . import ed_stack_key
from . import ed_sticky_key
from . import ed_macro
from . import ed_panel_group
from . import ed_hpanel_group
from .ed_base import (
WM_OT_pmi_icon_select, WM_OT_pmi_data_edit, WM_OT_pm_edit,
PME_OT_pmi_cmd_generate
)
from .ed_panel_group import (
PME_OT_interactive_panels_toggle, draw_pme_panel, poll_pme_panel)
from .ed_sticky_key import PME_OT_sticky_key_edit
EDITORS = dict(
PMENU=ed_pie_menu.Editor(),
RMENU=ed_menu.Editor(),
DIALOG=ed_popup.Editor(),
SCRIPT=ed_stack_key.Editor(),
STICKY=ed_sticky_key.Editor(),
MACRO=ed_macro.Editor(),
PANEL=ed_panel_group.Editor(),
HPANEL=ed_hpanel_group.Editor(),
)
TREE_SPLITTER = '$PME$'
MAP_TYPES = ['KEYBOARD', 'MOUSE', 'TWEAK', 'NDOF', 'TEXTINPUT', 'TIMER']
EMODE_ITEMS = [
('COMMAND', "Command", "Python code"),
('PROP', "Property", "Property"),
('MENU', "Menu", "Sub-menu"),
('HOTKEY', "Hotkey", "Hotkey"),
('CUSTOM', "Custom", "Custom layout"),
# ('OPERATOR', "Operator", "Operator"),
]
MODE_ITEMS = [
('EMPTY', "Empty", "Don't use the item")
]
MODE_ITEMS.extend(EMODE_ITEMS)
PD_MODE_ITEMS = (
('PIE', 'Pie Mode', ""),
('PANEL', 'Panel Mode', ""),
)
AVAILABLE_ICONS = {}
for k, i in bpy.types.UILayout.bl_rna.functions[
"prop"].parameters["icon"].enum_items.items():
if k != 'NONE':
AVAILABLE_ICONS[i.identifier] = True
pp = pme.props
kmis_map = {}
import_filepath = os.path.join(ADDON_PATH, "examples", "examples.json")
export_filepath = os.path.join(ADDON_PATH, "examples", "my_pie_menus.json")
class WM_OT_pm_import(bpy.types.Operator, ImportHelper):
bl_idname = "wm.pm_import"
bl_label = "Import Menus"
bl_description = "Import menus"
bl_options = {'INTERNAL'}
filename_ext = ".json"
filepath = StringProperty(subtype='FILE_PATH', default="*.json")
files = CollectionProperty(type=bpy.types.OperatorFileListElement)
filter_glob = StringProperty(default="*.json", options={'HIDDEN'})
directory = StringProperty(subtype='DIR_PATH')
mode = StringProperty()
def _draw(self, menu, context):
lh.lt(menu.layout, operator_context='INVOKE_DEFAULT')
lh.operator(
WM_OT_pm_import.bl_idname, "Rename if exists",
filepath=import_filepath,
mode='RENAME')
lh.operator(
WM_OT_pm_import.bl_idname, "Skip if exists",
filepath=import_filepath,
mode='SKIP')
lh.operator(
WM_OT_pm_import.bl_idname, "Replace if exists",
filepath=import_filepath,
mode='REPLACE')
def draw(self, context):
pass
def import_file(self, filepath):
try:
with open(filepath, 'r') as f:
s = f.read()
except:
self.report({'WARNING'}, W_FILE)
return
menus = None
try:
menus = json.loads(s)
except:
self.report({'WARNING'}, W_JSON)
return
if menus:
pr = prefs()
if self.mode == 'RENAME':
pm_names = [menu[0] for menu in menus]
new_names = {}
for name in pm_names:
if name in pr.pie_menus:
new_names[name] = pr.unique_pm_name(name)
for menu in menus:
if self.mode == 'REPLACE':
if menu[0] in pr.pie_menus:
pr.remove_pm(pr.pie_menus[menu[0]])
elif self.mode == 'RENAME':
if menu[0] in new_names:
menu[0] = new_names[menu[0]]
elif self.mode == 'SKIP':
if menu[0] in pr.pie_menus:
continue
mode = menu[4] if len(menu) > 4 else 'PMENU'
pm = pr.add_pm(mode, menu[0], True)
pm.km_name = menu[1]
n = len(menu)
if n > 5:
pm.data = menu[5]
if n > 6:
pm.open_mode = menu[6]
if n > 7:
pm.poll_cmd = menu[7]
if menu[2]:
try:
(pm.key, pm.ctrl, pm.shift, pm.alt, pm.oskey,
pm.key_mod) = keymap_helper.parse_hotkey(menu[2])
except:
self.report({'WARNING'}, W_KEY % menu[2])
items = menu[3]
for i in range(0, len(items)):
item = items[i]
pmi = pm.pmis[i] if mode == 'PMENU' else pm.pmis.add()
n = len(item)
if n == 4:
if self.mode == 'RENAME' and \
item[1] == 'MENU' and item[3] in new_names:
item[3] = new_names[item[3]]
try:
pmi.mode = item[1]
except:
pmi.mode = 'EMPTY'
pmi.name = item[0]
pmi.icon = item[2]
pmi.text = item[3]
elif n == 3:
pmi.mode = 'EMPTY'
pmi.name = item[0]
pmi.icon = item[1]
pmi.text = item[2]
elif n == 1:
pmi.mode = 'EMPTY'
pmi.text = item[0]
if pm.mode == 'SCRIPT' and not pm.data.startswith("s?"):
pmi = pm.pmis.add()
pmi.text = pm.data
pmi.mode = 'COMMAND'
pmi.name = "Command 1"
pm.data = pm.ed.default_pmi_data
for menu in menus:
pm = pr.pie_menus[menu[0]]
if pm.mode == 'PANEL':
for i, pmi in enumerate(pm.pmis):
add_panel(
pm.name, i, pmi.text, pmi.name,
pm.panel_space, pm.panel_region,
pm.panel_context, pm.panel_category,
draw_pme_panel, poll_pme_panel)
elif pm.mode == 'HPANEL':
for pmi in pm.pmis:
hide_panel(pmi.text)
elif pm.mode == 'MACRO':
add_macro(pm)
def execute(self, context):
global import_filepath
for f in self.files:
filepath = os.path.join(self.directory, f.name)
if os.path.isfile(filepath):
self.import_file(filepath)
import_filepath = self.filepath
PME_UL_pm_tree.update_tree()
return {'FINISHED'}
def invoke(self, context, event):
if not self.mode:
context.window_manager.popup_menu(
self._draw, title=self.bl_description)
return {'FINISHED'}
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class WM_OT_pm_export(bpy.types.Operator, ExportHelper):
bl_idname = "wm.pm_export"
bl_label = "Export Menus"
bl_description = "Export menus"
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
filename_ext = ".json"
filepath = StringProperty(subtype='FILE_PATH', default="*.json")
filter_glob = StringProperty(default="*.json", options={'HIDDEN'})
mode = StringProperty()
def _draw(self, menu, context):
lh.lt(menu.layout, operator_context='INVOKE_DEFAULT')
lh.operator(
WM_OT_pm_export.bl_idname, "All Menus",
filepath=export_filepath,
mode='ALL')
lh.operator(
WM_OT_pm_export.bl_idname, "All Enabled Menus",
filepath=export_filepath,
mode='ENABLED')
lh.operator(
WM_OT_pm_export.bl_idname, "Selected Menu",
filepath=export_filepath,
mode='ACTIVE')
def check(self, context):
return True
def draw(self, context):
return
def execute(self, context):
global export_filepath
pr = prefs()
if not self.filepath:
return {'CANCELLED'}
if not self.filepath.endswith(".json"):
self.filepath += ".json"
menus = []
apm = pr.selected_pm
apm_name = apm and apm.name
pms_to_export = set()
parsed_pms = set()
def parse_children(pmis):
for pmi in pmis:
if pmi.mode == 'MENU':
_, menu_name = pmi.parse_menu_data()
if menu_name in pr.pie_menus:
pms_to_export.add(menu_name)
if menu_name not in parsed_pms:
parsed_pms.add(menu_name)
parse_children(pr.pie_menus[menu_name].pmis)
for pm in pr.pie_menus:
if self.mode == 'ENABLED' and not pm.enabled:
continue
if self.mode == 'ACTIVE' and pm.name != apm_name:
continue
pms_to_export.add(pm.name)
parsed_pms.add(pm.name)
if self.mode != 'ALL':
parse_children(pm.pmis)
for pm_name in pms_to_export:
pm = pr.pie_menus[pm_name]
items = []
for pmi in pm.pmis:
if pmi.mode == 'EMPTY':
if pmi.name:
item = (pmi.name, pmi.icon, pmi.text)
else:
item = (pmi.text,)
else:
item = (
pmi.name,
pmi.mode,
pmi.icon,
pmi.text
)
items.append(item)
menu = (
pm.name,
pm.km_name,
pm.to_hotkey(use_key_names=False),
items,
pm.mode,
pm.data,
pm.open_mode,
pm.poll_cmd
)
menus.append(menu)
try:
with open(self.filepath, 'w') as f:
f.write(json.dumps(menus, indent=2, separators=(", ", ": ")))
except:
return {'CANCELLED'}
export_filepath = self.filepath
return {'FINISHED'}
def invoke(self, context, event):
if not self.mode:
context.window_manager.popup_menu(
self._draw, title=self.bl_description)
return {'FINISHED'}
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class WM_OT_pm_add(bpy.types.Operator):
bl_idname = "wm.pm_add"
bl_label = ""
bl_description = "Add an item"
bl_options = {'INTERNAL'}
mode = StringProperty()
def _draw(self, menu, context):
PME_MT_pm_new.draw_items(self, menu.layout)
def execute(self, context):
if not self.mode:
context.window_manager.popup_menu(
self._draw, WM_OT_pm_add.bl_description)
else:
prefs().add_pm(self.mode)
PME_UL_pm_tree.update_tree()
tag_redraw()
return {'CANCELLED'}
class WM_OT_pm_duplicate(bpy.types.Operator):
bl_idname = "wm.pm_duplicate"
bl_label = ""
bl_description = "Duplicate the selected menu"
bl_options = {'INTERNAL'}
def execute(self, context):
pr = prefs()
if len(pr.pie_menus) == 0:
return {'FINISHED'}
apm = pr.selected_pm
apm_name = apm.name
pm = pr.add_pm(apm.mode, apm_name, True)
apm = pr.pie_menus[apm_name]
pm.km_name = apm.km_name
if pm.km_name in PME_UL_pm_tree.collapsed_km_names:
PME_UL_pm_tree.collapsed_km_names.remove(pm.km_name)
pm.mode = apm.mode
pm.data = apm.data
pm.open_mode = apm.open_mode
pm.poll_cmd = apm.poll_cmd
if pm.mode != 'HPANEL':
for i in range(0, len(apm.pmis)):
if apm.mode != 'PMENU':
pm.pmis.add()
pm.pmis[i].name = apm.pmis[i].name
pm.pmis[i].icon = apm.pmis[i].icon
pm.pmis[i].mode = apm.pmis[i].mode
pm.pmis[i].text = apm.pmis[i].text
if pm.mode == 'MACRO':
add_macro(pm)
elif pm.mode == 'PANEL':
for i, pmi in enumerate(pm.pmis):
add_panel(
pm.name, i, pmi.text, pmi.name,
pm.panel_space, pm.panel_region,
pm.panel_context, pm.panel_category,
draw_pme_panel, poll_pme_panel)
elif pm.mode == 'HPANEL':
pass
PME_UL_pm_tree.update_tree()
return {'FINISHED'}
@classmethod
def poll(cls, context):
return len(prefs().pie_menus) > 0
class WM_OT_pm_remove(bpy.types.Operator):
bl_idname = "wm.pm_remove"
bl_label = ""
bl_description = "Remove the selected menu"
bl_options = {'INTERNAL'}
def execute(self, context):
prefs().remove_pm()
PME_UL_pm_tree.update_tree()
return {'FINISHED'}
@classmethod
def poll(cls, context):
return len(prefs().pie_menus) > 0
class WM_OT_pm_remove_all(bpy.types.Operator):
bl_idname = "wm.pm_remove_all"
bl_label = ""
bl_description = "Remove all menus"
bl_options = {'INTERNAL'}
ask = BoolProperty()
def _draw(self, menu, context):
lh.lt(menu.layout)
lh.operator(
WM_OT_pm_remove_all.bl_idname, "Remove All", 'X',
ask=False)
def execute(self, context):
pr = prefs()
if self.ask:
context.window_manager.popup_menu(
self._draw, WM_OT_pm_remove_all.bl_description, 'QUESTION')
else:
n = len(pr.pie_menus)
for i in range(0, n):
pr.remove_pm()
PME_UL_pm_tree.update_tree()
tag_redraw()
return {'FINISHED'}
@classmethod
def poll(cls, context):
return len(prefs().pie_menus) > 0
class WM_OT_pm_enable_all(bpy.types.Operator):
bl_idname = "wm.pm_enable_all"
bl_label = ""
bl_description = "Enable or disable all menus"
bl_options = {'INTERNAL'}
enabled = BoolProperty()
def execute(self, context):
for pm in prefs().pie_menus:
pm.enabled = self.enabled
return {'FINISHED'}
@classmethod
def poll(cls, context):
return len(prefs().pie_menus) > 0
class WM_OT_pm_move(bpy.types.Operator):
bl_idname = "wm.pm_move"
bl_label = ""
bl_description = "Move the selected menu"
bl_options = {'INTERNAL'}
direction = IntProperty()
def execute(self, context):
pr = prefs()
tpr = temp_prefs()
if pr.tree_mode:
link = tpr.links[tpr.links_idx]
if link.label:
return {'CANCELLED'}
new_idx = tpr.links_idx + self.direction
num_links = len(tpr.links)
if 0 <= new_idx <= num_links - 1:
new_link = tpr.links[new_idx]
if link.is_folder or not link.path:
while 0 <= new_idx < num_links:
new_link = tpr.links[new_idx]
if new_link.label:
return {'CANCELLED'}
elif not new_link.path:
break
new_idx += self.direction
if new_idx < 0 or new_idx >= num_links:
return {'CANCELLED'}
else:
if new_link.label or new_link.is_folder or \
not new_link.path:
return {'CANCELLED'}
pm_idx = pr.pie_menus.find(new_link.pm_name)
pr.pie_menus.move(pr.active_pie_menu_idx, pm_idx)
pr.active_pie_menu_idx = pm_idx
PME_UL_pm_tree.update_tree()
# PME.links_idx = new_idx
else:
return {'CANCELLED'}
else:
new_idx = pr.active_pie_menu_idx + self.direction
if 0 <= new_idx <= len(pr.pie_menus) - 1:
pr.pie_menus.move(pr.active_pie_menu_idx, new_idx)
pr.active_pie_menu_idx = new_idx
PME_UL_pm_tree.update_tree()
return {'FINISHED'}
@classmethod
def poll(cls, context):
return len(prefs().pie_menus) > 1
class WM_OT_pm_sort(bpy.types.Operator):
bl_idname = "wm.pm_sort"
bl_label = ""
bl_description = "Sort menus"
bl_options = {'INTERNAL'}
mode = StringProperty()
def _draw(self, menu, context):
lh.lt(menu.layout)
lh.operator(
WM_OT_pm_sort.bl_idname, "By Name", 'SORTALPHA',
mode='NAME')
lh.operator(
WM_OT_pm_sort.bl_idname, "By Hotkey", 'FONTPREVIEW',
mode='HOTKEY')
lh.operator(
WM_OT_pm_sort.bl_idname, "By Keymap Name", 'SPLITSCREEN',
mode='KEYMAP')
lh.operator(
WM_OT_pm_sort.bl_idname, "By Type", 'PROP_CON',
mode='TYPE')
def execute(self, context):
if not self.mode:
context.window_manager.popup_menu(
self._draw, WM_OT_pm_sort.bl_description)
return {'FINISHED'}
pr = prefs()
if len(pr.pie_menus) == 0:
return {'FINISHED'}
items = [pm for pm in pr.pie_menus]
if self.mode == 'NAME':
items.sort(key=lambda pm: pm.name)
if self.mode == 'KEYMAP':
items.sort(key=lambda pm: (pm.km_name, pm.name))
if self.mode == 'HOTKEY':
items.sort(key=lambda pm: (
to_key_name(pm.key) if pm.key != 'NONE' else '_',
pm.ctrl, pm.shift, pm.alt, pm.oskey,
pm.key_mod if pm.key_mod != 'NONE' else '_'))
if self.mode == 'TYPE':
items.sort(key=lambda pm: (pm.mode, pm.name))
items = [pm.name for pm in items]
apm = pr.selected_pm
apm_name = apm.name
idx = len(items) - 1
aidx = 0
while idx > 0:
k = items[idx]
if pr.pie_menus[idx] != pr.pie_menus[k]:
k_idx = pr.pie_menus.find(k)
pr.pie_menus.move(k_idx, idx)
if apm_name == k:
aidx = idx
idx -= 1
pr.active_pie_menu_idx = aidx
PME_UL_pm_tree.update_tree()
tag_redraw()
return {'FINISHED'}
@classmethod
def poll(cls, context):
return len(prefs().pie_menus) > 1
class PME_MT_pm_new(bpy.types.Menu):
bl_label = "New"
def draw_items(self, layout):
lh.lt(layout)
for id, name, icon in ED_DATA:
lh.operator(WM_OT_pm_add.bl_idname, name, icon, mode=id)
def draw(self, context):
self.draw_items(self.layout)
class WM_OT_pme_preview(bpy.types.Operator):
bl_idname = "wm.pme_preview"
bl_label = ""
bl_description = "Preview"
bl_options = {'INTERNAL'}
pie_menu_name = StringProperty()
def execute(self, context):
bpy.ops.wm.pme_user_pie_menu_call(
'INVOKE_DEFAULT', pie_menu_name=self.pie_menu_name,
invoke_mode='RELEASE')
return {'FINISHED'}
class PME_OT_pmi_name_apply(bpy.types.Operator):
bl_idname = "pme.pmi_name_apply"
bl_label = ""
bl_description = "Apply the suggested name"
bl_options = {'INTERNAL'}
idx = bpy.props.IntProperty()
def execute(self, context):
data = prefs().pmi_data
data.name = data.sname
return {'FINISHED'}
class WM_OT_icon_filter_clear(bpy.types.Operator):
bl_idname = "wm.icon_filter_clear"
bl_label = ""
bl_description = "Clear Filter"
bl_options = {'INTERNAL'}
def execute(self, context):
prefs().icon_filter = ""
return {'FINISHED'}
class PME_OT_icons_refresh(bpy.types.Operator):
bl_idname = "pme.icons_refresh"
bl_label = ""
bl_description = "Refresh icons"
bl_options = {'INTERNAL'}
def execute(self, context):
ph.refresh()
return {'FINISHED'}
class PME_OT_docs(bpy.types.Operator):
bl_idname = "pme.docs"
bl_label = "Pie Menu Editor Documentation"
bl_description = "Documentation"
bl_options = {'INTERNAL'}
id = bpy.props.StringProperty(options={'SKIP_SAVE'})
def execute(self, context):
bpy.ops.wm.url_open(
url=(
"https://wiki.blender.org/index.php/User:Raa/"
"Addons/Pie_Menu_Editor") + self.id)
return {'FINISHED'}
class PMLink(bpy.types.PropertyGroup):
pm_name = bpy.props.StringProperty()
is_folder = bpy.props.BoolProperty()
label = bpy.props.StringProperty()
folder = bpy.props.StringProperty()
group = bpy.props.StringProperty()
idx = 0
paths = {}
@staticmethod
def add():
link = temp_prefs().links.add()
link.name = str(PMLink.idx)
PMLink.idx += 1
return link
@staticmethod
def clear():
PMLink.idx = 0
PMLink.paths.clear()
def __getattr__(self, attr):
if attr == "path":
if self.name not in PMLink.paths:
PMLink.paths[self.name] = []
return PMLink.paths[self.name]
def __str__(self):
return "%s [%s] (%r) (%s)" % (
self.pm_name, "/".join(self.path), self.is_folder, self.label)
def curpath(self):
ret = self.group + TREE_SPLITTER
ret += TREE_SPLITTER.join(self.path)
return ret
def fullpath(self):
ret = self.group + TREE_SPLITTER
ret += TREE_SPLITTER.join(self.path)
if self.is_folder:
if self.path:
ret += TREE_SPLITTER
ret += self.pm_name
return ret
class PMEData(bpy.types.PropertyGroup):
def get_links_idx(self):
return self["links_idx"] if "links_idx" in self else 0
def set_links_idx(self, value):
pr = prefs()
tpr = temp_prefs()
if value < 0 or value >= len(tpr.links):
return
link = tpr.links[value]
self["links_idx"] = value
if link.pm_name:
pr.active_pie_menu_idx = pr.pie_menus.find(link.pm_name)
links = bpy.props.CollectionProperty(type=PMLink)
links_idx = bpy.props.IntProperty(get=get_links_idx, set=set_links_idx)
hidden_panels_idx = bpy.props.IntProperty()
pie_menus = bpy.props.CollectionProperty(type=BaseCollectionItem)
def update_pie_menus(self):
pr = prefs()
spm = pr.selected_pm
supported_sub_menus = spm.ed.supported_sub_menus
pms = set()
for pm in pr.pie_menus:
if pm.name == spm.name:
continue
if pm.mode in supported_sub_menus:
pms.add(pm.name)
self.pie_menus.clear()
for pm in sorted(pms):
item = self.pie_menus.add()
item.name = pm
class WM_UL_panel_list(bpy.types.UIList):
def draw_item(
self, context, layout, data, item,
icon, active_data, active_propname, index):
tp = hidden_panel(item.text)
v = prefs().panel_info_visibility
if 'NAME' in v:
layout.label(item.name or item.text, icon='SYNTAX_OFF')
if 'CLASS' in v:
layout.label(item.text, icon='SYNTAX_ON')
if 'CTX' in v:
layout.label(
tp.bl_context if tp and hasattr(tp, "bl_context") else "-",
icon='NODE')
if 'CAT' in v:
layout.label(
tp.bl_category if tp and hasattr(tp, "bl_category") else "-",
icon='LINENUMBERS_ON')
class WM_UL_pm_list(bpy.types.UIList):
def draw_item(
self, context, layout, data, item,
icon, active_data, active_propname, index):
pr = prefs()
layout = layout.row(True)
layout.prop(
item, "enabled", text="", emboss=False,
icon=ICON_ON if item.enabled else ICON_OFF)
layout.label(icon=item.ed.icon)
col = 0
num_cols = pr.show_names + pr.show_hotkeys + pr.show_keymap_names
if pr.show_names:
layout.prop(
item, "label", text="", emboss=False)
col += 1
if pr.show_hotkeys:
if num_cols == 2 and col == 1:
layout = layout.row(True)
layout.alignment = 'RIGHT'
layout.label(item.to_hotkey())
col += 1
if pr.show_keymap_names:
if num_cols == 2 and col == 1:
layout = layout.row(True)
layout.alignment = 'RIGHT'
layout.label(item.km_name)
# def draw_filter(self, context, layout):
# layout = layout.row(True)
# layout.prop(
# prefs(), "show_hotkeys", icon='FONTPREVIEW', toggle=True)
# layout.prop(
# prefs(), "show_keymap_names", icon='SPLITSCREEN', toggle=True)
def filter_items(self, context, data, propname):
pr = prefs()
pie_menus = getattr(data, propname)
helper_funcs = bpy.types.UI_UL_list
filtered = []
ordered = []
if self.filter_name:
filtered = helper_funcs.filter_items_by_name(
self.filter_name, self.bitflag_filter_item,
pie_menus, "name")
if not filtered:
filtered = [self.bitflag_filter_item] * len(pie_menus)
if pr.use_filter:
for idx, pm in enumerate(pie_menus):
if not pm.filter(pr):
filtered[idx] = 0
if self.use_filter_sort_alpha:
ordered = helper_funcs.sort_items_by_name(pie_menus, "name")
return filtered, ordered
class PME_UL_pm_tree(bpy.types.UIList):
locked = False
collapsed_km_names = set()
expanded_folders = set()
keymap_names = None
has_folders = False
@staticmethod
def link_is_collapsed(link):
path = link.path
p = link.group
for i in range(0, len(path)):
if p:
p += TREE_SPLITTER
p += path[i]
if p not in PME_UL_pm_tree.expanded_folders:
return True
return False
@staticmethod
def update_tree():
if PME_UL_pm_tree.locked:
return
pr = prefs()
if not pr.tree_mode:
return
tpr = temp_prefs()
DBG_TREE and logh("Update Tree")
num_links = len(tpr.links)
sel_link, sel_folder = None, None
sel_link = 0 <= tpr.links_idx < num_links and tpr.links[tpr.links_idx]
if not sel_link or not sel_link.pm_name or \
sel_link.pm_name not in pr.pie_menus:
sel_link = None
sel_folder = sel_link and sel_link.path and sel_link.path[-1]
tpr.links.clear()
PMLink.clear()
folders = {}
keymaps = {}
files = set()
pms = [
pm for pm in pr.pie_menus
if not pr.use_filter or pm.filter(pr)]
if pr.show_keymap_names:
pms.sort(key=lambda pm: pm.km_name)
else:
keymaps["dummy"] = True
pms.sort(key=lambda pm: pm.name)
for pm in pms:
if pr.show_keymap_names:
kms = pm.km_name.split(", ")
for km in kms:
if km not in keymaps:
keymaps[km] = []
keymaps[km].append(pm)
for pmi in pm.pmis:
if pmi.mode == 'MENU':
_, name = pmi.parse_menu_data()
if name not in pr.pie_menus or \
pr.use_filter and \
not pr.pie_menus[name].filter(pr):
continue
if pm.name not in folders:
folders[pm.name] = []
folders[pm.name].append(name)
files.add(name)
PME_UL_pm_tree.has_folders = len(folders) > 0
if pr.show_keymap_names:
for kpms in keymaps.values():
kpms.sort(key=lambda pm: pm.name)
def add_children(files, group, path, idx, aidx):
DBG_TREE and logi(" " * len(path) + "/".join(path))
for file in files:
if file in path:
continue
link = PMLink.add()
link.group = group
link.pm_name = file
link.folder = pm.name
link.path.extend(path)
if file == apm_name and (
not sel_link or sel_folder == pm.name):
aidx = idx
idx += 1
if file in folders:
link.is_folder = True
path.append(file)
new_idx, aidx = add_children(
folders[file], group, path, idx, aidx)
if new_idx == idx:
link.is_folder = False
idx = new_idx
path.pop()
return idx, aidx
idx = 0
aidx = -1
apm_name = len(pr.pie_menus) and pr.selected_pm.name
PME_UL_pm_tree.keymap_names = \
km_names = sorted(keymaps.keys())
for km in km_names:
if pr.show_keymap_names:
link = PMLink.add()
link.label = km
idx += 1
pms = keymaps[km]
path = []
for pm in pms:
# if pr.show_keymap_names and km_name != pm.km_name:
# km_name = pm.km_name
# link = PMLink.add()
# link.label = km_name
# idx += 1
if pm.name in folders:
link = PMLink.add()
link.group = km
link.is_folder = True
link.pm_name = pm.name
if pm.name == apm_name and (
not sel_link or not sel_folder):
aidx = idx
idx += 1
path.append(pm.name)
idx, aidx = add_children(
folders[pm.name], km, path, idx, aidx)
path.pop()
# elif pm.name not in files:
else:
link = PMLink.add()
link.group = km
link.pm_name = pm.name
if pm.name == apm_name and (
not sel_link or not sel_folder):
aidx = idx
idx += 1
pm_links = {}
for link in tpr.links:
if link.label:
continue
if link.pm_name not in pm_links:
pm_links[link.pm_name] = []
pm_links[link.pm_name].append(link)
links_to_remove = set()
fixed_links = set()
for pm_name, links in pm_links.items():
if len(links) == 1:
continue
links.sort(key=lambda link: len(link.path), reverse=True)
can_be_removed = False
for link in links:
if len(link.path) == 0:
if can_be_removed and link.pm_name not in fixed_links:
links_to_remove.add(link.name)
DBG_TREE and logi("REMOVE", link.pm_name)
else:
if not can_be_removed and \
link.name not in links_to_remove and \
link.path[0] != pm_name:
fixed_links.add(link.path[0])
DBG_TREE and logi("FIXED", link.path[0])
can_be_removed = True
prev_link_will_be_removed = False
for link in tpr.links:
if link.label:
prev_link_will_be_removed = False
continue
if link.path:
if prev_link_will_be_removed:
links_to_remove.add(link.name)
else:
prev_link_will_be_removed = link.name in links_to_remove
for link in links_to_remove:
PME_UL_pm_tree.expanded_folders.discard(
tpr.links[link].fullpath())
tpr.links.remove(tpr.links.find(link))
if pr.show_keymap_names:
links_to_remove.clear()
prev_link = None
for link in tpr.links:
if link.label and prev_link and prev_link.label:
links_to_remove.add(prev_link.name)
prev_link = link
if prev_link and prev_link.label:
links_to_remove.add(prev_link.name)
for link in links_to_remove:
tpr.links.remove(tpr.links.find(link))
aidx = -1
for i, link in enumerate(tpr.links):
if link.pm_name == apm_name:
aidx = i
break
tpr["links_idx"] = aidx
if len(tpr.links):
sel_link = tpr.links[tpr.links_idx]
if sel_link.pm_name:
pm = pr.selected_pm
if pm.km_name in PME_UL_pm_tree.collapsed_km_names:
PME_UL_pm_tree.collapsed_km_names.remove(pm.km_name)
def draw_item(
self, context, layout, data, item,
icon, active_data, active_propname, index):
pr = prefs()
# if pr.show_hotkeys and item.pm_name:
# layout = layout.split(0.6, True)
layout = layout.row(True)
lh.lt(layout)
if item.pm_name:
# if item.folder:
# lh.label("", icon='BLANK1')
pm = pr.pie_menus[item.pm_name]
# WM_UL_pm_list.draw_item(
# self, context, lh.layout, data, pm, icon,
# active_data, active_propname, index)
# lh.row(layout, alignment='LEFT')
lh.prop(
pm, "enabled", "", ICON_ON if pm.enabled else ICON_OFF,
emboss=False)
for i in range(0, len(item.path)):
lh.label("", icon='BLANK1')
lh.label("", pm.ed.icon)
if item.is_folder:
icon = 'TRIA_DOWN' \
if item.fullpath() in PME_UL_pm_tree.expanded_folders \
else 'TRIA_RIGHT'
lh.operator(
PME_OT_tree_folder_toggle.bl_idname, "",
icon, emboss=False,
folder=item.fullpath(),
idx=index)
hk = pm.to_hotkey()
if pr.show_names or not pr.show_hotkeys or not hk:
lh.prop(pm, "label", "", emboss=False)
if pr.show_hotkeys and hk:
if pr.show_names:
lh.row(layout, alignment='RIGHT')
lh.label(hk)
else:
lh.row()
# lh.layout.active = False
lh.layout.scale_y = 0.95
icon = 'TRIA_RIGHT_BAR' \
if item.label in PME_UL_pm_tree.collapsed_km_names else \
'TRIA_DOWN_BAR'
lh.operator(
PME_OT_tree_kmname_toggle.bl_idname, item.label,
icon, km_name=item.label, idx=index, all=False)
# lh.label()
icon = 'TRIA_LEFT_BAR' \
if item.label in PME_UL_pm_tree.collapsed_km_names else \
'TRIA_DOWN_BAR'
lh.operator(
PME_OT_tree_kmname_toggle.bl_idname, "",
icon, km_name=item.label, idx=index,
all=True)
def draw_filter(self, context, layout):
pr = prefs()
row = layout.row(True)
row.prop(
pr, "show_names", icon='SYNTAX_OFF', toggle=True)
row.prop(
pr, "show_hotkeys", icon='FONTPREVIEW', toggle=True)
row.prop(
pr, "show_keymap_names", icon='SPLITSCREEN', toggle=True)
def filter_items(self, context, data, propname):
pr = prefs()
links = getattr(data, propname)
filtered = [self.bitflag_filter_item] * len(links)
cur_kmname = None
for idx, link in enumerate(links):
pm = None
if link.path:
pm = pr.pie_menus[link.path[0]]
elif link.pm_name:
pm = pr.pie_menus[link.pm_name]
if link.label and pr.show_keymap_names:
cur_kmname = link.label
if pr.show_keymap_names and pm and \
cur_kmname in pm.km_name and \
cur_kmname in PME_UL_pm_tree.collapsed_km_names or \
link.path and \
PME_UL_pm_tree.link_is_collapsed(link):
filtered[idx] = 0
return filtered, []
class PME_OT_tree_folder_toggle(bpy.types.Operator):
bl_idname = "pme.tree_folder_toggle"
bl_label = ""
bl_description = "Expand or collapse"
bl_options = {'INTERNAL'}
folder = bpy.props.StringProperty()
idx = bpy.props.IntProperty()
def execute(self, context):
temp_prefs().links_idx = self.idx
if self.folder:
if self.folder in PME_UL_pm_tree.expanded_folders:
PME_UL_pm_tree.expanded_folders.remove(self.folder)
else:
PME_UL_pm_tree.expanded_folders.add(self.folder)
return {'FINISHED'}
class PME_OT_tree_folder_toggle_all(bpy.types.Operator):
bl_idname = "pme.tree_folder_toggle_all"
bl_label = ""
bl_description = "Expand or collapse all menus"
bl_options = {'INTERNAL'}
def execute(self, context):
if PME_UL_pm_tree.expanded_folders:
PME_UL_pm_tree.expanded_folders.clear()
else:
for link in temp_prefs().links:
if link.is_folder:
PME_UL_pm_tree.expanded_folders.add(link.fullpath())
return {'FINISHED'}
class PME_OT_tree_kmname_toggle(bpy.types.Operator):
bl_idname = "pme.tree_kmname_toggle"
bl_label = ""
bl_description = "Expand or collapse keymap names"
bl_options = {'INTERNAL'}
km_name = bpy.props.StringProperty()
idx = bpy.props.IntProperty()
all = bpy.props.BoolProperty()
def execute(self, context):
tpr = temp_prefs()
if self.idx != -1:
tpr.links_idx = self.idx
if self.all:
add = len(PME_UL_pm_tree.collapsed_km_names) == 0
for link in tpr.links:
if not link.label:
continue
if link.label == self.km_name:
continue
if self.km_name:
PME_UL_pm_tree.collapsed_km_names.add(link.label)
elif add:
PME_UL_pm_tree.collapsed_km_names.add(link.label)
else:
PME_UL_pm_tree.collapsed_km_names.discard(link.label)
if self.km_name and \
self.km_name in PME_UL_pm_tree.collapsed_km_names:
PME_UL_pm_tree.collapsed_km_names.remove(self.km_name)
else:
if self.km_name in PME_UL_pm_tree.collapsed_km_names:
PME_UL_pm_tree.collapsed_km_names.remove(self.km_name)
else:
PME_UL_pm_tree.collapsed_km_names.add(self.km_name)
return {'FINISHED'}
class PMIItem(bpy.types.PropertyGroup):
expandable_props = {}
mode = EnumProperty(items=MODE_ITEMS, description="Type of the item")
text = StringProperty(maxlen=1024)
icon = StringProperty(description="Icon")
def get_pmi_label(self):
return self.name
def set_pmi_label(self, value):
if self.name == value:
return
pm = prefs().selected_pm
if pm.mode != 'PANEL':
return
for pmi in pm.pmis:
if pmi == self:
self.name = value
pm.update_panel_group()
break
label = StringProperty(
description="Label", get=get_pmi_label, set=set_pmi_label)
@property
def rm_class(self):
value = self.text.replace("@", "")
return get_pme_menu_class(value)
def from_dict(self, value):
pass
def to_dict(self):
return {k: self[k] for k in self.keys()}
def parse(self, default_icon='NONE'):
icon, icon_only, hidden = self.extract_flags()
oicon = icon
text = self.name
if icon_only:
text = ""
if hidden:
icon = 'NONE' if not icon or not icon_only else 'BLANK1'
if text:
text = ""
elif not icon:
icon = default_icon
if not hidden:
if self.mode == 'PROP':
bl_prop = bp.get(
self.prop if hasattr(self, "prop") else self.text)
if bl_prop:
if bl_prop.type in {'STRING', 'ENUM', 'POINTER'}:
text = ""
if bl_prop.type in {'FLOAT', 'INT', 'BOOLEAN'} and len(
bl_prop.default_array) > 1:
text = ""
if icon[0] != "@" and icon not in AVAILABLE_ICONS:
icon = default_icon
return text, icon, oicon, icon_only, hidden
def extract_flags(self):
icon = self.icon
hidden = False
icon_only = False
while icon:
if icon[0] == "!":
hidden = True
elif icon[0] == "#":
icon_only = True
else:
break
icon = icon[1:]
return icon, icon_only, hidden
def parse_icon(self, default_icon='NONE'):
icon = self.extract_flags()[0]
if not icon:
return default_icon
if icon[0] != "@" and icon not in AVAILABLE_ICONS:
return default_icon
return icon
def parse_menu_data(self):
data = self.text
if not data:
return False, ""
mouse_over = data[0] == "@"
if mouse_over:
data = data[1:]
return mouse_over, data
def copy_item(self):
PMEPreferences.pmi_clipboard = (
self.name, self.icon, self.mode, self.text)
def paste_item(self):
pr = prefs()
pm = pr.selected_pm
self.name, self.icon, self.mode, self.text = pr.pmi_clipboard
if pm.mode != 'DIALOG':
self.icon, _, _ = self.extract_flags()
def is_expandable_prop(self):
if self.mode != 'PROP':
return False
prop = self.text
if prop in self.expandable_props:
return self.expandable_props[prop]
value = None
try:
value = eval(prop)
except:
return False
self.expandable_props[prop] = not isinstance(value, bool)
return self.expandable_props[prop]
class PMItem(bpy.types.PropertyGroup):
poll_methods = {}
@staticmethod
def _parse_keymap(km_name, exists=True):
names = []
keymaps = bpy.context.window_manager.keyconfigs.user.keymaps
for name in km_name.split(","):
name = name.strip()
if not name:
continue
name_in_keymaps = name in keymaps
if exists and not name_in_keymaps or \
not exists and name_in_keymaps:
continue
names.append(name)
if exists and not names:
names.append("Window")
return names
def parse_keymap(self, exists=True):
return PMItem._parse_keymap(self.km_name, exists)
def get_pm_km_name(self):
if "km_name" not in self:
self["km_name"] = "Window"
return self["km_name"]
def set_pm_km_name(self, value):
if not value:
value = "Window"
else:
value = ", ".join(PMItem._parse_keymap(value))
if "km_name" not in self or self["km_name"] != value:
if "km_name" in self:
self.unregister_hotkey()
self["km_name"] = value
self.register_hotkey()
PME_UL_pm_tree.update_tree()
km_name = StringProperty(
default="Window", description="Keymap names",
get=get_pm_km_name, set=set_pm_km_name)
def get_pm_name(self):
return self.name
def set_pm_name(self, value):
pr = prefs()
pme = bpy.context.window_manager.pme
value = value.replace("@", "")
if value == self.name:
return
if value:
if value in pr.pie_menus:
value = pr.unique_pm_name(value)
if self.mode == 'PANEL':
rename_panel_group(self.name, value)
elif self.mode == 'MACRO':
# rename_macro(self.name, value)
remove_macro(self)
for link in pme.links:
if link.pm_name == self.name:
link.pm_name = value
if self.mode == 'RMENU' and self.name in pme_menu_classes:
del pme_menu_classes[self.name]
get_pme_menu_class(value)
for pm in pr.pie_menus:
if pm == self:
continue
for pmi in pm.pmis:
if pmi.mode == 'MENU':
mouse_over, menu_name = pmi.parse_menu_data()
if menu_name == self.name:
pmi.text = "@" + value if mouse_over else value
if self.name in kmis_map:
if kmis_map[self.name]:
self.unregister_hotkey()
else:
kmis_map[value] = kmis_map[self.name]
del kmis_map[self.name]
if self.name in PME_UL_pm_tree.expanded_folders:
PME_UL_pm_tree.expanded_folders.remove(self.name)
PME_UL_pm_tree.expanded_folders.add(value)
if self.name in pr.old_pms:
pr.old_pms.remove(self.name)
pr.old_pms.add(value)
for link in temp_prefs().links:
if link.pm_name == self.name:
link.pm_name = value
for i in range(0, len(link.path)):
if link.path[i] == self.name:
link.path[i] = value
self.name = value
if self.mode == 'MACRO':
add_macro(self)
if self.name not in kmis_map:
self.register_hotkey()
PME_UL_pm_tree.update_tree()
label = StringProperty(
get=get_pm_name, set=set_pm_name, description="Menu name")
pmis = CollectionProperty(type=PMIItem)
mode = EnumProperty(items=PM_ITEMS)
def update_keymap_item(self, context):
pr = prefs()
kmis = kmis_map[self.name]
if kmis:
for k in kmis.keys():
kmi = kmis[k]
for map_type in MAP_TYPES:
try:
kmi.type = self.key
break
except TypeError:
kmi.map_type = map_type
kmi.ctrl = self.ctrl
kmi.shift = self.shift
kmi.alt = self.alt
kmi.oskey = self.oskey
kmi.key_modifier = self.key_mod
kmi.value = \
'DOUBLE_CLICK' if self.open_mode == 'DOUBLE_CLICK' \
else 'PRESS'
if self.key == 'NONE' or not self.enabled:
if pr.kh.available():
pr.kh.keymap(k)
pr.kh.remove(kmi)
if self.key == 'NONE' or not self.enabled:
kmis_map[self.name] = None
else:
self.register_hotkey()
open_mode = EnumProperty(
name="Open Mode",
items=OPEN_MODE_ITEMS,
update=update_keymap_item)
key = EnumProperty(
items=keymap_helper.key_items,
description="Key pressed", update=update_keymap_item)
ctrl = BoolProperty(
description="Ctrl key pressed", update=update_keymap_item)
shift = BoolProperty(
description="Shift key pressed", update=update_keymap_item)
alt = BoolProperty(
description="Alt key pressed", update=update_keymap_item)
oskey = BoolProperty(
description="Operating system key pressed", update=update_keymap_item)
def get_pm_key_mod(self):
return self["key_mod"] if "key_mod" in self else 0
def set_pm_key_mod(self, value):
pr = prefs()
prev_value = self.key_mod
self["key_mod"] = value
value = self.key_mod
if prev_value == value or not self.enabled:
return
kms = self.parse_keymap()
if prev_value != 'NONE' and prev_value in MOUSE_BUTTONS:
for km in kms:
remove_mouse_button(prev_value, pr.kh, km)
if value != 'NONE' and value in MOUSE_BUTTONS:
for km in kms:
add_mouse_button(value, pr.kh, km)
key_mod = EnumProperty(
items=keymap_helper.key_items,
description="Regular key pressed as a modifier",
get=get_pm_key_mod, set=set_pm_key_mod)
def get_pm_enabled(self):
if "enabled" not in self:
self["enabled"] = True
return self["enabled"]
def set_pm_enabled(self, value):
if "enabled" in self and self["enabled"] == value:
return
self["enabled"] = value
if self.mode == 'PANEL':
if self.enabled:
for i, pmi in enumerate(self.pmis):
add_panel(
self.name, i, pmi.text, pmi.name,
self.panel_space, self.panel_region,
self.panel_context, self.panel_category,
draw_pme_panel, poll_pme_panel)
else:
remove_panel_group(self.name)
elif self.mode == 'HPANEL':
for pmi in self.pmis:
if self.enabled:
hide_panel(pmi.text)
else:
unhide_panel(pmi.text)
elif self.mode == 'MACRO':
if self.enabled:
add_macro(self)
else:
remove_macro(self)
if self.ed.has_hotkey:
self.update_keymap_item(bpy.context)
if self.key_mod in MOUSE_BUTTONS:
kms = self.parse_keymap()
for km in kms:
if self.enabled:
pass
# add_mouse_button(pm.key_mod, kh, km)
else:
remove_mouse_button(self.key_mod, prefs().kh, km)
enabled = BoolProperty(
description="Enable or disable the menu",
default=True,
get=get_pm_enabled, set=set_pm_enabled)
def update_poll_cmd(self, context):
if self.poll_cmd == DEFAULT_POLL:
self.poll_methods.pop(self.name, None)
else:
exec_locals = pme.context.gen_locals()
try:
exec(
"def poll(cls, context):" + self.poll_cmd,
pme.context.globals, exec_locals)
self.poll_methods[self.name] = exec_locals["poll"]
except:
self.poll_methods[self.name] = None
poll_cmd = StringProperty(
description=(
"Poll method\nTest if the item can be called/displayed or not"),
default=DEFAULT_POLL, maxlen=1024, update=update_poll_cmd)
data = StringProperty(maxlen=1024)
def update_panel_group(self):
remove_panel_group(self.name)
for i, pmi in enumerate(self.pmis):
add_panel(
self.name, i, pmi.text, pmi.name,
self.panel_space, self.panel_region,
self.panel_context, self.panel_category,
draw_pme_panel, poll_pme_panel)
def get_panel_context(self):
prop = pp.parse(self.data)
for item in panel_context_items(self, bpy.context):
if item[0] == prop.pg_context:
return item[4]
return 0
def set_panel_context(self, value):
value = panel_context_items(self, bpy.context)[value][0]
prop = pp.parse(self.data)
if prop.pg_context == value:
return
self.data = pp.encode(self.data, "pg_context", value)
self.update_panel_group()
panel_context = EnumProperty(
items=panel_context_items,
name="Context",
description="Panel context",
get=get_panel_context, set=set_panel_context)
def get_panel_category(self):
prop = pp.parse(self.data)
return prop.pg_category
def set_panel_category(self, value):
prop = pp.parse(self.data)
if prop.pg_category == value:
return
self.data = pp.encode(self.data, "pg_category", value)
self.update_panel_group()
panel_category = StringProperty(
default="My Category", description="Panel category",
get=get_panel_category, set=set_panel_category)
def get_panel_region(self):
prop = pp.parse(self.data)
for item in REGION_ITEMS:
if item[0] == prop.pg_region:
return item[4]
return 0
def set_panel_region(self, value):
value = REGION_ITEMS[value][0]
prop = pp.parse(self.data)
if prop.pg_region == value:
return
self.data = pp.encode(self.data, "pg_region", value)
self.update_panel_group()
panel_region = EnumProperty(
items=REGION_ITEMS,
name="Region",
description="Panel region",
get=get_panel_region, set=set_panel_region)
def get_panel_space(self):
prop = pp.parse(self.data)
for item in SPACE_ITEMS:
if item[0] == prop.pg_space:
return item[4]
return 0
def set_panel_space(self, value):
value = SPACE_ITEMS[value][0]
prop = pp.parse(self.data)
if prop.pg_space == value:
return
self.data = pp.encode(self.data, "pg_space", value)
self.update_panel_group()
panel_space = EnumProperty(
items=SPACE_ITEMS,
name="Space",
description="Panel space",
get=get_panel_space, set=set_panel_space)
pm_radius = IntProperty(
subtype='PIXEL',
description="Radius of the pie menu (-1 - use default value)",
get=lambda s: s.get_data("pm_radius"),
set=lambda s, v: s.set_data("pm_radius", v),
default=-1, step=10, min=-1, max=1000)
pm_threshold = IntProperty(
subtype='PIXEL',
description=(
"Distance from center needed "
"before a selection can be made(-1 - use default value)"),
get=lambda s: s.get_data("pm_threshold"),
set=lambda s, v: s.set_data("pm_threshold", v),
default=-1, step=10, min=-1, max=1000)
pm_confirm = IntProperty(
subtype='PIXEL',
description=(
"Distance threshold after which selection is made "
"(-1 - use default value)"),
get=lambda s: s.get_data("pm_confirm"),
set=lambda s, v: s.set_data("pm_confirm", v),
default=-1, step=10, min=-1, max=1000)
pm_flick = BoolProperty(
name="Confirm on Release",
description="Confirm selection when releasing the hotkey",
get=lambda s: s.get_data("pm_flick"),
set=lambda s, v: s.set_data("pm_flick", v))
pd_box = BoolProperty(
name="Use Frame", description="Use a frame",
get=lambda s: s.get_data("pd_box"),
set=lambda s, v: s.set_data("pd_box", v))
pd_auto_close = BoolProperty(
name="Auto Close on Mouse Out", description="Auto close on mouse out",
get=lambda s: s.get_data("pd_auto_close"),
set=lambda s, v: s.set_data("pd_auto_close", v))
pd_expand = BoolProperty(
name="Expand Sub Popup Dialogs",
description=(
"Expand all sub popup dialogs "
"instead of using them as a button"),
get=lambda s: s.get_data("pd_expand"),
set=lambda s, v: s.set_data("pd_expand", v))
pd_panel = EnumProperty(
name="Dialog Mode", description="Popup dialog mode",
items=PD_MODE_ITEMS,
get=lambda s: s.get_data("pd_panel"),
set=lambda s, v: s.set_data("pd_panel", v))
pd_width = IntProperty(
name="Width", description="Width of the popup",
get=lambda s: s.get_data("pd_width"),
set=lambda s, v: s.set_data("pd_width", v),
step=50, min=150, max=2000)
rm_title = BoolProperty(
name="Show Title", description="Show title",
get=lambda s: s.get_data("rm_title"),
set=lambda s, v: s.set_data("rm_title", v))
s_undo = BoolProperty(
name="Undo Previous Command", description="Undo previous command",
get=lambda s: s.get_data("s_undo"),
set=lambda s, v: s.set_data("s_undo", v))
def poll(self, cls=None, context=None):
if self.poll_cmd == DEFAULT_POLL:
return True
if self.name not in self.poll_methods:
self.update_poll_cmd(bpy.context)
poll_method = self.poll_methods[self.name]
return poll_method is None or poll_method(cls, context)
@property
def is_new(self):
return self.name not in prefs().old_pms
def register_hotkey(self, km_names=None):
pr = prefs()
kmis_map[self.name] = None
if self.key == 'NONE' or not self.enabled:
return
if pr.kh.available():
if km_names is None:
km_names = self.parse_keymap()
for km_name in km_names:
pr.kh.keymap(km_name)
kmi = pr.kh.operator(
WM_OT_pme_user_pie_menu_call,
None, # hotkey
self.key, self.ctrl, self.shift, self.alt, self.oskey,
'NONE' if self.key_mod in MOUSE_BUTTONS else self.key_mod
)
kmi.properties.pie_menu_name = self.name
kmi.properties.invoke_mode = 'HOTKEY'
kmi.properties.keymap = km_name
kmi.value = \
'DOUBLE_CLICK' if self.open_mode == 'DOUBLE_CLICK' \
else 'PRESS'
if kmis_map[self.name]:
kmis_map[self.name][km_name] = kmi
else:
kmis_map[self.name] = {km_name: kmi}
if self.key_mod in MOUSE_BUTTONS:
add_mouse_button(self.key_mod, pr.kh, km_name)
def unregister_hotkey(self):
pr = prefs()
if pr.kh.available() and self.name in kmis_map and kmis_map[self.name]:
for k in kmis_map[self.name].keys():
pr.kh.keymap(k)
pr.kh.remove(kmis_map[self.name][k])
if self.key_mod in MOUSE_BUTTONS:
remove_mouse_button(self.key_mod, pr.kh, k)
if self.name in kmis_map:
del kmis_map[self.name]
def filter_by_mode(self, pr):
return self.mode in pr.mode_filter
# if self.mode == 'PMENU':
# return pr.show_pm
# if self.mode == 'RMENU':
# return pr.show_rm
# if self.mode == 'DIALOG':
# return pr.show_pd
# if self.mode == 'SCRIPT':
# return pr.show_scripts
# if self.mode == 'STICKY':
# return pr.show_sticky
# if self.mode == 'PANEL':
# return pr.show_pg
# if self.mode == 'HPANEL':
# return pr.show_hpg
# if self.mode == 'MACRO':
# return pr.show_macro
def filter(self, pr):
return self.filter_by_mode(pr) and (
not pr.show_only_new_pms or self.is_new)
def from_dict(self, value):
pass
def to_dict(self):
d = {}
return d
def to_hotkey(self, use_key_names=True):
return keymap_helper.to_hotkey(
self.key, self.ctrl, self.shift, self.alt, self.oskey,
self.key_mod, use_key_names=use_key_names)
def get_data(self, key):
value = getattr(pp.parse(self.data), key)
# prop = pp.get(key)
# if prop.ptype == 'BOOL':
# value = value != ""
# elif prop.ptype == 'INT':
# value = int(value) if value else 0
return value
def set_data(self, key, value):
# prop = pp.get(key)
# if prop.ptype == 'BOOL':
# value = "1" if value else ""
# elif prop.ptype == 'INT':
# value = str(value)
self.data = pp.encode(self.data, key, value)
@property
def ed(self):
return EDITORS[self.mode]
def __str__(self):
return "[%s][%s][%s] %s" % (
"V" if self.enabled else " ",
self.mode, self.to_hotkey(), self.label
)
class PMIData(bpy.types.PropertyGroup):
_kmi = None
@property
def kmi(self):
pr = prefs()
if not PMIData._kmi:
pr.kh.keymap()
PMIData._kmi = pr.kh.operator(PME_OT_none)
PMIData._kmi.active = False
return PMIData._kmi
def check_pmi_errors(self, context):
self.info("")
pr = prefs()
pm = pr.selected_pm
if self.mode == 'COMMAND':
self.sname = ""
mo = re_operator.search(self.cmd)
if mo:
self.sname = gen_op_name(mo, True)
else:
mo = re_prop.search(self.cmd)
if mo:
self.sname, icon = gen_prop_name(mo, False, True)
if self.cmd:
try:
compile(self.cmd, '<string>', 'exec')
except:
self.info("Invalid syntax")
if pm.mode == 'STICKY':
PME_OT_sticky_key_edit.parse_prop_value(self.cmd)
elif self.mode == 'PROP':
self.sname = ""
mo = re_prop_set.search(self.prop)
if mo:
self.sname, icon = gen_prop_name(mo, True, True)
if icon and icon != 'NONE':
pmi = pm.pmis[pme.context.edit_item_idx]
_, icon_only, hidden = pmi.extract_flags()
if icon_only:
icon = "#" + icon
if hidden:
icon = "!" + icon
pmi.icon = icon
elif self.mode == 'MENU':
self.sname = self.menu
pr = prefs()
if not self.menu:
self.info("Select the item")
elif self.menu not in pr.pie_menus:
self.info("'%s' was not found" % self.menu)
# elif not pr.pie_menus[self.menu].ed.sub_item:
# self.info("'%s' is not supported here" % self.menu)
elif self.mode == 'HOTKEY':
self.sname = keymap_helper.to_hotkey(
self.key, self.ctrl, self.shift, self.alt,
self.oskey, self.key_mod)
if self.key == 'NONE':
self.info("Hotkey is not specified")
elif self.mode == 'CUSTOM':
self.sname = ""
pm = pr.selected_pm
if self.custom:
try:
compile(self.custom, '<string>', 'exec')
except:
self.info("Invalid syntax")
def update_data(self, context):
pr = prefs()
self.check_pmi_errors(context)
if self.mode == 'COMMAND' and pr.use_cmd_editor:
op_idname, args, pos_args = operator_utils.find_operator(self.cmd)
self.kmi.idname = ""
self.cmd_ctx = 'INVOKE_DEFAULT'
self.cmd_undo = False
if not op_idname:
return
else:
mod, _, op = op_idname.partition(".")
if not hasattr(
bpy.types,
getattr(getattr(bpy.ops, mod), op).idname()):
return
self.kmi.idname = op_idname
has_exec_ctx = False
has_undo = False
for i, arg in enumerate(pos_args):
if i > 2:
break
try:
value = eval(arg)
except:
continue
try:
if isinstance(value, str):
self.cmd_ctx = value
has_exec_ctx = True
continue
except:
self.cmd_ctx = 'INVOKE_DEFAULT'
continue
if isinstance(value, bool):
has_undo = True
self.cmd_undo = value
if has_undo and not has_exec_ctx:
self.cmd_ctx = 'EXEC_DEFAULT'
for k in self.kmi.properties.keys():
del self.kmi.properties[k]
operator_utils.apply_properties(self.kmi.properties, args)
mode = EnumProperty(
items=EMODE_ITEMS, description="Type of the item",
update=check_pmi_errors)
cmd = StringProperty(
description="Python code", maxlen=1024, update=update_data)
cmd_ctx = EnumProperty(
items=OP_CTX_ITEMS,
name="Execution Context",
description="Execution context")
cmd_undo = BoolProperty(
name="Undo Flag",
description="'Undo' positional argument")
custom = StringProperty(
description="Python code", maxlen=1024, update=update_data)
prop = StringProperty(
description="Property", update=update_data)
menu = StringProperty(
description="Menu's name", update=update_data)
xmenu = BoolProperty(
description="Open menu on mouse over")
icon = StringProperty(description="Name")
name = StringProperty(description="Name")
sname = StringProperty(description="Suggested name")
key = EnumProperty(
items=keymap_helper.key_items, description="Key pressed",
update=update_data)
ctrl = BoolProperty(
description="Ctrl key pressed")
shift = BoolProperty(
description="Shift key pressed")
alt = BoolProperty(
description="Alt key pressed")
oskey = BoolProperty(
description="Operating system key pressed")
key_mod = EnumProperty(
items=keymap_helper.key_items,
description="Regular key pressed as a modifier")
msg = StringProperty(description="Name")
def info(self, text):
self.msg = text
def has_messages(self):
return self.msg != ""
def extract_flags(self):
return PMIItem.extract_flags(self)
def parse_icon(self, default_icon='NONE'):
return PMIItem.parse_icon(self, default_icon)
class Overlay(bpy.types.PropertyGroup):
overlay = BoolProperty(
name="Display Stack Key Command",
description=(
"Display the name of the last command on screen "
"for stack keys with 2+ commands"),
default=True)
size = IntProperty(
name="Font Size", description="Font size",
default=24, min=10, max=50, options={'SKIP_SAVE'})
color = FloatVectorProperty(
name="Color", description="Color",
default=(1, 1, 1, 1), subtype='COLOR', size=4, min=0, max=1)
alignment = EnumProperty(
name="Alignment",
description="Alignment",
items=(
('TOP', "Top", ""),
('TOP_LEFT', "Top Left", ""),
('TOP_RIGHT', "Top Right", ""),
('BOTTOM', "Bottom", ""),
('BOTTOM_LEFT', "Bottom Left", ""),
('BOTTOM_RIGHT', "Bottom Right", ""),
),
default='TOP')
duration = FloatProperty(
name="Duration", subtype='TIME', min=1, max=10, default=2, step=10)
offset_x = IntProperty(
name="Offset X", description="Offset from area edges",
subtype='PIXEL', default=10, min=0)
offset_y = IntProperty(
name="Offset Y", description="Offset from area edges",
subtype='PIXEL', default=10, min=0)
shadow = BoolProperty(
name="Use Shadow", description="Use shadow", default=True)
def draw(self, layout):
if not self.overlay:
layout.prop(self, "overlay", toggle=True)
else:
layout = layout.column(True)
layout.prop(self, "overlay", toggle=True)
row = layout.split(0.5, True)
row1 = row.row(True)
row1.prop(self, "color", "")
row1.prop(self, "shadow", "", icon='META_BALL')
row.prop(self, "size")
row.prop(self, "duration")
row = layout.split(0.5, True)
row.prop(self, "alignment", "")
row.prop(self, "offset_x")
row.prop(self, "offset_y")
class PieMenuPrefs:
def __init__(self):
self.num_saves = 0
self.lock = False
self.radius = 80
self.confirm = 0
self.threshold = 12
def save(self):
self.num_saves += 1
# logi("SAVE", self.num_saves, self.lock)
if not self.lock:
view_prefs = bpy.context.user_preferences.view
if view_prefs.pie_menu_radius > 0:
self.radius = view_prefs.pie_menu_radius
self.confirm = view_prefs.pie_menu_confirm
self.threshold = view_prefs.pie_menu_threshold
self.lock = True
def restore(self):
self.num_saves -= 1
# logi("RESTORE", self.num_saves)
if self.lock and self.num_saves == 0:
view_prefs = bpy.context.user_preferences.view
view_prefs.pie_menu_radius = self.radius
view_prefs.pie_menu_confirm = self.confirm
view_prefs.pie_menu_threshold = self.threshold
self.lock = False
class TreeView:
def expand_km(self, name):
if name in PME_UL_pm_tree.collapsed_km_names:
PME_UL_pm_tree.collapsed_km_names.remove(name)
def lock(self):
PME_UL_pm_tree.locked = True
def unlock(self):
PME_UL_pm_tree.locked = False
def update(self):
PME_UL_pm_tree.update_tree()
class PMEPreferences(bpy.types.AddonPreferences):
bl_idname = ADDON_ID
_mode = 'ADDON'
mode_history = []
unregistered_pms = []
old_pms = set()
missing_kms = {}
pie_menu_prefs = PieMenuPrefs()
tree = TreeView()
pmi_clipboard = None
pdr_clipboard = None
rmc_clipboard = None
pie_menus = CollectionProperty(type=PMItem)
def update_pie_menu_idx(self, context):
self.pmi_data.info("")
temp_prefs().hidden_panels_idx = 0
active_pie_menu_idx = IntProperty(update=update_pie_menu_idx)
overlay = PointerProperty(type=Overlay)
list_size = IntProperty(
name="List Width", description="Width of the list",
default=40, min=20, max=80, subtype='PERCENTAGE'
)
def update_interactive_panels(self, context=None):
if PME_OT_interactive_panels_toggle.active == self.interactive_panels:
return
PME_OT_interactive_panels_toggle.active = self.interactive_panels
for tp in bl_panel_types():
if tp.bl_space_type == 'USER_PREFERENCES':
continue
if self.interactive_panels:
tp.append(PME_OT_interactive_panels_toggle._draw)
else:
tp.remove(PME_OT_interactive_panels_toggle._draw)
tag_redraw(True)
interactive_panels = BoolProperty(
name="Interactive Panels",
description="Interactive panels",
update=update_interactive_panels)
icon_filter = StringProperty(
description="Filter", options={'TEXTEDIT_UPDATE'})
hotkey = PointerProperty(type=keymap_helper.Hotkey)
hold_time = IntProperty(
name="Hold Timeout", description="Hold timeout (ms)",
default=200, min=50, max=1000, step=10)
tab = EnumProperty(
items=(
('EDITOR', "Editor", ""),
('SETTINGS', "Settings", ""),
),
options={'HIDDEN'})
show_names = BoolProperty(
default=True, description="Show names")
show_hotkeys = BoolProperty(
default=True, description="Show hotkeys")
def update_tree(self, context=None):
self.tree.update()
show_keymap_names = BoolProperty(
name="Keymap Names",
default=False, description="Show keymap names",
update=update_tree)
show_custom_icons = BoolProperty(
default=False, description="Show custom icons")
show_advanced_settings = BoolProperty(
default=False, description="Advanced settings")
show_list = BoolProperty(
default=True, description="Show the list of pie menus")
use_filter = BoolProperty(
description="Use filter", update=update_tree)
mode_filter = EnumProperty(
items=PM_ITEMS_M, default=PM_ITEMS_M_DEFAULT,
description="Show icons",
options={'ENUM_FLAG'},
update=update_tree
)
show_only_new_pms = BoolProperty(
description="Show only new menus", update=update_tree
)
# show_pm = BoolProperty(
# default=True, description="Show pie menus",
# update=update_tree)
# show_rm = BoolProperty(
# default=True, description="Show regular menus",
# update=update_tree)
# show_pd = BoolProperty(
# default=True, description="Show popup dialogs",
# update=update_tree)
# show_pg = BoolProperty(
# default=True, description="Show panel groups",
# update=update_tree)
# show_hpg = BoolProperty(
# default=True, description="Show hidden panel groups",
# update=update_tree)
# show_scripts = BoolProperty(
# default=True, description="Show stack keys",
# update=update_tree)
# show_sticky = BoolProperty(
# default=True, description="Show sticky keys",
# update=update_tree)
# show_macro = BoolProperty(
# default=True, description="Show macro operators",
# update=update_tree)
cache_scripts = BoolProperty(
name="Cache External Scripts", description="Cache external scripts",
default=True)
panel_info_visibility = EnumProperty(
name="Panel Info",
description="Show panel info",
items=(
('NAME', "Name", "", 'SYNTAX_OFF', 1),
('CLASS', "Class", "", 'SYNTAX_ON', 2),
('CTX', "Context", "", 'NODE', 4),
('CAT', "Category", "", 'LINENUMBERS_ON', 8),
),
default={'NAME', 'CLASS'},
options={'ENUM_FLAG'}
)
restore_mouse_pos = BoolProperty(
name="Restore Mouse Position (Pie Menu)",
description=(
"Restore mouse position "
"after releasing the pie menu's hotkey"))
use_spacer = BoolProperty(
name="Use 'Spacer' Separator by Default (Popup Dialog)",
description="Use 'Spacer' separator by default",
default=False)
use_cmd_editor = BoolProperty(
name="Use Operator Properties Editor (Command Tab)",
description="Use operator properties editor",
default=True)
def get_debug_mode(self):
return bpy.app.debug_wm
def set_debug_mode(self, value):
bpy.app.debug_wm = value
debug_mode = BoolProperty(
get=get_debug_mode, set=set_debug_mode,
description="Debug Mode")
def update_tree_mode(self, context):
PME_UL_pm_tree.update_tree()
tree_mode = BoolProperty(
description="Tree Mode", update=update_tree_mode)
def get_maximize_prefs(self):
return bpy.types.USERPREF_PT_addons.draw == draw_addons_maximized
def set_maximize_prefs(self, value):
if value and not is_userpref_maximized():
bpy.ops.pme.userpref_show(addon="pie_menu_editor")
elif not value and is_userpref_maximized():
bpy.ops.pme.userpref_restore()
maximize_prefs = BoolProperty(
description="Maximize preferences area",
get=get_maximize_prefs, set=set_maximize_prefs)
button_scalex = FloatProperty(
default=1, step=10, min=0.5, max=2,
description="Width of the buttons")
pmi_data = PointerProperty(type=PMIData)
scripts_filepath = StringProperty(subtype='FILE_PATH', default=SCRIPT_PATH)
@property
def selected_pm(self):
if 0 <= self.active_pie_menu_idx < len(self.pie_menus):
return self.pie_menus[self.active_pie_menu_idx]
return None
@property
def mode(self):
return PMEPreferences._mode
@mode.setter
def mode(self, value):
PMEPreferences._mode = value
def enter_mode(self, mode):
self.mode_history.append(PMEPreferences._mode)
PMEPreferences._mode = mode
def leave_mode(self):
PMEPreferences._mode = self.mode_history.pop()
def is_edit_mode(self):
return 'PMI' in PMEPreferences.mode_history
def add_pm(self, mode='PMENU', name=None, duplicate=False):
link = None
tpr = temp_prefs()
if self.tree_mode and len(tpr.links):
link = tpr.links[tpr.links_idx]
if link.path:
self.active_pie_menu_idx = self.pie_menus.find(link.path[0])
tpr.links_idx = -1
self.pie_menus.add()
if self.active_pie_menu_idx < len(self.pie_menus) - 1:
self.active_pie_menu_idx += 1
self.pie_menus.move(len(self.pie_menus) - 1, self.active_pie_menu_idx)
pm = self.selected_pm
pm.mode = mode
pm.name = self.unique_pm_name(name or pm.ed.default_name)
if self.tree_mode and self.show_keymap_names and not duplicate and link:
if link.label:
pm.km_name = link.label
elif link.path and link.path[0] in self.pie_menus:
pm.km_name = self.pie_menus[link.path[0]].km_name
elif link.pm_name and link.pm_name in self.pie_menus:
pm.km_name = self.pie_menus[link.pm_name].km_name
if pm.km_name in PME_UL_pm_tree.collapsed_km_names:
PME_UL_pm_tree.collapsed_km_names.remove(pm.km_name)
pm.data = pm.ed.default_pmi_data
if mode == 'PMENU':
for i in range(0, 8):
pm.pmis.add()
if mode == 'RMENU' and not duplicate:
pmi = pm.pmis.add()
pmi.mode = 'COMMAND'
pmi.name = "Menu Item"
elif mode == 'STICKY' and not duplicate:
pmi = pm.pmis.add()
pmi.mode = 'COMMAND'
pmi.name = "On Press"
pmi = pm.pmis.add()
pmi.mode = 'COMMAND'
pmi.name = "On Release"
elif mode == 'SCRIPT' and not duplicate:
pmi = pm.pmis.add()
pmi.mode = 'COMMAND'
pmi.name = "Command 1"
elif mode == 'MACRO' and not duplicate:
pmi = pm.pmis.add()
pmi.mode = 'COMMAND'
pmi.name = "Command 1"
add_macro(pm)
elif mode == 'DIALOG' and not duplicate:
pm.ed.add_pd_row(pm)
pm.register_hotkey()
return pm
def remove_pm(self, pm=None):
tpr = temp_prefs()
idx = 0
if pm:
idx = self.pie_menus.find(pm.name)
else:
idx = self.active_pie_menu_idx
if idx < 0 or idx >= len(self.pie_menus):
return
apm = self.pie_menus[idx]
new_idx = -1
num_links = len(tpr.links)
if self.tree_mode and num_links:
d = 1
i = tpr.links_idx + d
while True:
if i >= num_links:
d = -1
i = tpr.links_idx + d
continue
if i < 0:
break
link = tpr.links[i]
if not link.label and not link.path and \
link.pm_name != apm.name:
tpr["links_idx"] = i
new_idx = self.pie_menus.find(link.pm_name)
break
i += d
apm.key_mod = 'NONE'
if apm.mode == 'PANEL':
remove_panel_group(apm.name)
elif apm.mode == 'HPANEL':
for pmi in apm.pmis:
unhide_panel(pmi.text)
elif apm.mode == 'MACRO':
remove_macro(apm)
apm.unregister_hotkey()
if apm.name in self.old_pms:
self.old_pms.remove(apm.name)
self.pie_menus.remove(idx)
if new_idx >= idx:
new_idx -= 1
if new_idx >= 0:
self.active_pie_menu_idx = new_idx
elif self.active_pie_menu_idx >= len(self.pie_menus) and \
self.active_pie_menu_idx > 0:
self.active_pie_menu_idx -= 1
def unique_pm_name(self, name):
if name not in self.pie_menus:
return name
idx = 1
mo = re_name_idx.search(name)
if mo:
name = mo.group(1)
idx = int(mo.group(2))
while True:
uname = "%s.%s" % (name, str(idx).zfill(3))
if uname not in self.pie_menus:
return uname
idx += 1
def from_dict(self, value):
pass
def to_dict(self):
d = {}
return d
def _draw_pmi(self, context):
pr = prefs()
tpr = temp_prefs()
pm = pr.selected_pm
layout = self.layout
lh.lt(layout)
split = lh.split(None, 0.75, False)
lh.row()
data = pr.pmi_data
icon = data.parse_icon('FILE_HIDDEN')
if pm.ed.use_slot_icon:
lh.operator(
WM_OT_pmi_icon_select.bl_idname, "", icon,
idx=pme.context.edit_item_idx,
icon="")
lh.prop(data, "name", "")
if data.name != data.sname and data.sname:
lh.operator(
PME_OT_pmi_name_apply.bl_idname, "", 'BACK',
idx=pme.context.edit_item_idx)
lh.prop(data, "sname", "", enabled=False)
lh.lt(split)
lh.operator(
WM_OT_pmi_data_edit.bl_idname, "OK",
idx=pme.context.edit_item_idx, ok=True,
enabled=not data.has_messages())
lh.operator(WM_OT_pmi_data_edit.bl_idname, "Cancel", idx=-1)
box = layout.box()
column = lh.column(box)
lh.row()
pm.ed.draw_slot_modes(lh.layout, data)
if data.mode == 'COMMAND':
lh.row(column)
icon = 'ERROR' if data.has_messages() else 'NONE'
lh.prop(data, "cmd", "", icon)
lh.operator(
WM_OT_pmidata_specials_call.bl_idname, "", 'COLLAPSEMENU')
if pm.mode == 'STICKY' and PME_OT_sticky_key_edit.pmi_prop and \
pme.context.edit_item_idx == 0 and not data.has_messages():
lh.lt(column)
lh.operator(PME_OT_sticky_key_edit.bl_idname)
elif data.mode == 'PROP':
lh.row(column)
icon = 'ERROR' if data.has_messages() else 'NONE'
lh.prop(data, "prop", "", icon)
elif data.mode == 'MENU':
icon = 'ERROR' if data.has_messages() else 'NONE'
if data.menu in pr.pie_menus:
icon = pr.pie_menus[data.menu].ed.icon
row = lh.row(column)
row.prop_search(
data, "menu", tpr, "pie_menus", text="", icon=icon)
lh.operator(
WM_OT_pmidata_specials_call.bl_idname, "", 'COLLAPSEMENU')
elif data.mode == 'HOTKEY':
lh.row(column)
icon = 'ERROR' if data.has_messages() else 'NONE'
lh.prop(data, "key", "", icon, event=True)
lh.row(column)
lh.prop(data, "ctrl", "Ctrl", toggle=True)
lh.prop(data, "shift", "Shift", toggle=True)
lh.prop(data, "alt", "Alt", toggle=True)
lh.prop(data, "oskey", "OSkey", toggle=True)
lh.prop(data, "key_mod", "", event=True)
elif data.mode == 'CUSTOM':
lh.row(column)
icon = 'ERROR' if data.has_messages() else 'NONE'
lh.prop(data, "custom", "", icon)
lh.operator(
WM_OT_pmidata_specials_call.bl_idname, "", 'COLLAPSEMENU')
# elif data.mode == 'OPERATOR':
# lh.row(column)
# icon = 'ERROR' if data.has_messages() else 'NONE'
# lh.prop(data, "custom", "", icon)
# lh.operator(
# WM_OT_pmidata_specials_call.bl_idname, "", 'COLLAPSEMENU')
if data.has_messages():
lh.box(layout)
lh.label(data.msg, icon='INFO')
if pr.use_cmd_editor and data.mode == 'COMMAND' and \
data.kmi.idname and not data.has_messages():
lh.lt(layout.box().column(True))
lh.save()
lh.row(align=False)
lh.op(PME_OT_pmi_cmd_generate.bl_idname, icon='FILE_TEXT')
lh.op(
PME_OT_pmi_cmd_generate.bl_idname,
"Clear Properties and Generate", 'FILE_BLANK')(
clear=True)
lh.restore()
lh.sep()
lh.save()
lh.row(align=False)
lh.prop(data, "cmd_ctx", "")
lh.prop(data, "cmd_undo", toggle=True)
lh.restore()
lh.template_keymap_item_properties(data.kmi)
def _draw_icons(self, context):
pr = prefs()
pm = pr.selected_pm
pmi = pm.pmis[pme.context.edit_item_idx]
layout = self.layout
lh.lt(layout)
split = lh.split(None, 0.75, False)
lh.row()
data = pmi
if pr.is_edit_mode():
data = pr.pmi_data
icon = data.parse_icon('FILE_HIDDEN')
lh.prop(data, "name", "", icon)
lh.sep()
lh.prop(pr, "icon_filter", text="", icon='VIEWZOOM')
if pr.icon_filter:
lh.operator(WM_OT_icon_filter_clear.bl_idname, "", 'X')
lh.lt(split)
lh.operator(
WM_OT_pmi_icon_select.bl_idname, "None",
idx=pme.context.edit_item_idx,
icon='NONE')
lh.operator(
WM_OT_pmi_icon_select.bl_idname, "Cancel", idx=-1)
icon_filter = pr.icon_filter.upper()
box = layout.box()
column = box.column(align=True)
row = column.row(align=True)
row.alignment = 'CENTER'
idx = 0
for k, i in bpy.types.UILayout.bl_rna.functions[
"prop"].parameters["icon"].enum_items.items():
icon = i.identifier
if k == 'NONE':
continue
if icon_filter != "" and icon_filter not in icon:
continue
p = row.operator(
WM_OT_pmi_icon_select.bl_idname, text="",
icon=icon, emboss=False)
p.idx = pme.context.edit_item_idx
p.icon = icon
idx += 1
if idx > 28:
idx = 0
row = column.row(align=True)
row.alignment = 'CENTER'
if idx != 0:
while idx < 29:
row.label("", icon='BLANK1')
idx += 1
row = layout.row(align=True)
row.prop(
pr, "show_custom_icons", text="Custom Icons", toggle=True)
row.operator(
PME_OT_icons_refresh.bl_idname, "", icon='FILE_REFRESH')
p = row.operator("wm.path_open", "", icon='FILE_FOLDER')
p.filepath = ph.path
if not pr.show_custom_icons:
return
icon_filter = pr.icon_filter
box = layout.box()
column = box.column(align=True)
row = column.row(align=True)
row.alignment = 'CENTER'
idx = 0
for icon in sorted(ph.get_names()):
if icon_filter != "" and icon_filter not in icon:
continue
p = row.operator(
WM_OT_pmi_icon_select.bl_idname, "",
icon_value=ph.get_icon(icon), emboss=False)
p.idx = pme.context.edit_item_idx
p.icon = '@' + icon
idx += 1
if idx > 28:
idx = 0
row = column.row(align=True)
row.alignment = 'CENTER'
if idx != 0:
while idx < 29:
row.label("", icon='BLANK1')
idx += 1
def _draw_tab_editor(self, context, layout):
pr = prefs()
tpr = temp_prefs()
pm = None
link = None
if pr.tree_mode:
if len(tpr.links) > 0:
link = tpr.links[tpr.links_idx]
if link.pm_name:
pm = pr.pie_menus[link.pm_name]
else:
if len(pr.pie_menus):
pm = pr.selected_pm
if pr.show_list:
split = layout.split(pr.list_size / 100)
row = split.row()
column1 = row.column()
row = split.row()
column2 = row.column(align=True)
else:
row = layout
column3 = row.column()
if pr.show_list:
subrow = column1
if pr.use_filter:
subrow = column1.row()
subcol = subrow.column(True)
subcol.prop(
pr, "mode_filter", "",
expand=True, icon_only=True)
subcol.separator()
subcol.prop(
pr, "show_only_new_pms", "", icon='NEW', toggle=True)
column1 = subrow.column()
if pr.tree_mode:
column1.template_list(
"PME_UL_pm_tree", "",
tpr, "links",
tpr, "links_idx", rows=NUM_LIST_ROWS)
else:
column1.template_list(
"WM_UL_pm_list", "",
self, "pie_menus", self, "active_pie_menu_idx",
rows=NUM_LIST_ROWS)
row = column1.row(align=True)
p = row.operator(WM_OT_pm_import.bl_idname, text="Import")
p.mode = ""
if pm or link:
p = row.operator(WM_OT_pm_export.bl_idname, text="Export")
p.mode = ""
lh.lt(column2)
lh.operator(
WM_OT_pm_add.bl_idname, "", 'ZOOMIN',
mode="")
if pm:
lh.operator(WM_OT_pm_duplicate.bl_idname, "", 'GHOST')
lh.operator(WM_OT_pm_remove.bl_idname, "", 'ZOOMOUT')
lh.operator(
WM_OT_pm_remove_all.bl_idname, "", 'X',
ask=True)
lh.sep()
if pm and not pr.tree_mode:
if not link or not link.path:
lh.operator(
WM_OT_pm_move.bl_idname, "", 'TRIA_UP',
direction=-1)
lh.operator(
WM_OT_pm_move.bl_idname, "", 'TRIA_DOWN',
direction=1)
lh.operator(
WM_OT_pm_sort.bl_idname, "", 'SORTALPHA',
mode="")
lh.sep()
lh.operator(
WM_OT_pm_enable_all.bl_idname, "", ICON_ON).enabled = True
lh.operator(
WM_OT_pm_enable_all.bl_idname, "", ICON_OFF).enabled = False
if pr.tree_mode and PME_UL_pm_tree.has_folders:
lh.sep(group='EXP_COL_ALL')
icon = 'TRIA_RIGHT' \
if PME_UL_pm_tree.expanded_folders else \
'TRIA_DOWN'
lh.operator(PME_OT_tree_folder_toggle_all.bl_idname, "", icon)
if pr.tree_mode and pr.show_keymap_names and len(pr.pie_menus):
lh.sep(group='EXP_COL_ALL')
icon = 'TRIA_DOWN_BAR' \
if PME_UL_pm_tree.collapsed_km_names else 'TRIA_RIGHT_BAR'
lh.operator(
PME_OT_tree_kmname_toggle.bl_idname, "", icon,
km_name="",
idx=-1,
all=True)
if not pm:
if link and link.label:
subcol = column3.box().column(True)
subrow = subcol.row()
subrow.enabled = False
subrow.scale_y = NUM_LIST_ROWS + LIST_PADDING
subrow.alignment = 'CENTER'
subrow.label(link.label)
subcol.row(True)
else:
subcol = column3.box().column(True)
subrow = subcol.row()
subrow.enabled = False
subrow.scale_y = NUM_LIST_ROWS + LIST_PADDING
subrow.alignment = 'CENTER'
subrow.label(" ")
subcol.row(True)
return
row = column3.row(align=True)
row.prop(
pm, "enabled", text="",
icon=ICON_ON if pm.enabled else ICON_OFF)
if pm.ed.use_preview:
p = row.operator(
WM_OT_pme_preview.bl_idname, "", icon='VISIBLE_IPO_ON')
p.pie_menu_name = pm.name
p = row.operator(
WM_OT_pm_select.bl_idname, "", icon=pm.ed.icon)
p.pm_name = ""
p.use_mode_icons = True
row.prop(pm, "label", text="")
if pm.ed.docs:
p = row.operator(PME_OT_docs.bl_idname, "", icon='HELP')
p.id = pm.ed.docs
if pm.ed.has_extra_settings:
row.prop(pr, "show_advanced_settings", text="", icon='SETTINGS')
if pr.show_advanced_settings:
pm.ed.draw_extra_settings(column3.box(), pm)
column = column3.column(True)
pm.ed.draw_keymap(column, pm)
pm.ed.draw_hotkey(column, pm)
pm.ed.draw_items(column3, pm)
def _draw_tab_settings(self, context, layout):
pr = prefs()
box = layout.box()
subrow = box.split(0.5)
col = subrow.column()
pr.hotkey.draw(col)
col.prop(pr, "hold_time")
col.prop(pr, "list_size", slider=True)
col = subrow.column()
subcol = col.column(True)
subcol.prop(pr, "cache_scripts")
subcol.prop(pr, "use_spacer")
subcol.prop(pr, "use_cmd_editor")
subcol.prop(pr, "restore_mouse_pos")
pr.overlay.draw(box)
def _draw_preferences(self, context):
pr = prefs()
layout = self.layout
row = layout.row(True)
row.prop(pr, "show_list", text="", icon='COLLAPSEMENU')
if pr.show_list:
row.prop(pr, "tree_mode", text="", icon='OOPS')
row.prop(pr, "use_filter", "", icon='FILTER')
row.prop(pr, "show_names", text="", icon='SYNTAX_OFF')
row.prop(pr, "show_hotkeys", text="", icon='FONTPREVIEW')
row.prop(
pr, "show_keymap_names", text="", icon='SPLITSCREEN')
row.separator()
row.prop(pr, "tab", expand=True)
row.separator()
row.prop(pr, "interactive_panels", text="", icon='MOD_MULTIRES')
row.prop(pr, "debug_mode", text="", icon='SCRIPT')
row.separator()
row.prop(pr, "maximize_prefs", "", icon='FULLSCREEN_ENTER')
if pr.tab == 'EDITOR':
self._draw_tab_editor(context, layout)
elif pr.tab == 'SETTINGS':
self._draw_tab_settings(context, layout)
def draw(self, context):
if self.mode == 'ADDON':
self._draw_preferences(context)
elif self.mode == 'ICONS':
self._draw_icons(context)
elif self.mode == 'PMI':
self._draw_pmi(context)
def init_menus(self):
DBG and logh("Init Menus")
if len(self.pie_menus) == 0:
self.add_pm()
return
for pm in self.pie_menus:
self.old_pms.add(pm.name)
if not pm.data and pm.mode in {'PMENU', 'RMENU', 'DIALOG'}:
pm.data = pm.ed.default_pmi_data
if pm.mode == 'SCRIPT':
if not pm.data.startswith("s?"):
pmi = pm.pmis.add()
pmi.text = pm.data
pmi.mode = 'COMMAND'
pmi.name = "Command 1"
pm.data = pm.ed.default_pmi_data
if pm.mode not in {'PANEL', 'HPANEL', 'SCRIPT'}:
for pmi in pm.pmis:
if pmi.mode == 'MENU' and pmi.text[0] == "@":
get_pme_menu_class(pmi.text[1:])
if pm.mode == 'HPANEL' and pm.enabled and not SAFE_MODE:
for pmi in pm.pmis:
hide_panel(pmi.text)
if pm.mode == 'PANEL' and pm.enabled:
for i, pmi in enumerate(pm.pmis):
add_panel(
pm.name, i, pmi.text, pmi.name,
pm.panel_space, pm.panel_region,
pm.panel_context, pm.panel_category,
draw_pme_panel, poll_pme_panel)
if pm.mode == 'MACRO' and pm.enabled:
add_macro(pm)
km_names = pm.parse_keymap(False)
if km_names:
for km_name in km_names:
if km_name not in self.missing_kms:
self.missing_kms[km_name] = []
self.missing_kms[km_name].append(pm.name)
DBG and logw("..." + pm.name, pm.km_name, km_names)
else:
DBG and logi(" + " + pm.name)
pm.register_hotkey()
def register():
if not hasattr(bpy.types.WindowManager, "pme"):
bpy.types.WindowManager.pme = bpy.props.PointerProperty(
type=PMEData)
PMEPreferences.kh = KeymapHelper()
pr = prefs()
pr.tree.lock()
pr.init_menus()
pme.context.add_global("_prefs", prefs)
pme.context.add_global("prefs", prefs)
pme.context.add_global("pme", pme)
pme.context.add_global("os", os)
pme.context.add_global("PMEData", PMEData)
pr.interactive_panels = False
pr.icon_filter = ""
pr.show_custom_icons = False
pr.tab = 'EDITOR'
pr.use_filter = False
pr.show_only_new_pms = False
pr.maximize_prefs = False
pr.show_advanced_settings = False
h = pr.hotkey
if h.key == 'NONE':
h.key = 'ACCENT_GRAVE'
h.ctrl = True
h.shift = True
if pr.kh.available():
pr.kh.keymap()
h.add_kmi(pr.kh.operator(
WM_OT_pm_edit,
key=h.key, ctrl=h.ctrl, shift=h.shift, alt=h.alt, oskey=h.oskey,
key_mod=h.key_mod)).properties.auto = True
pr.kh.keymap("Info")
h.add_kmi(pr.kh.operator(
WM_OT_pm_edit,
key=h.key, ctrl=h.ctrl, shift=h.shift, alt=h.alt, oskey=h.oskey,
key_mod=h.key_mod)).properties.auto = False
pr.kh.keymap("View2D Buttons List")
p = pr.kh.operator(
WM_OT_pmi_icon_select,
'ESC').properties
p.idx = -1
p.hotkey = True
p = pr.kh.operator(
WM_OT_pmi_data_edit,
'RET').properties
p.ok = True
p.hotkey = True
p = pr.kh.operator(
WM_OT_pmi_data_edit,
'ESC').properties
p.idx = -1
p.hotkey = True
pr.tree.unlock()
pr.tree.update()
def unregister():
pr = prefs()
pr.kh.unregister()
PMIData._kmi = None
|
992,554 | 3205f08c89ca98fc11de77fe01a954cd76ee6eeb | def word_count(phrase):
words_dict = {}
words = phrase.lower().split()
output = ''
for word in words:
for ch in word:
if ch.isalnum():
output += ''.join(ch).strip()
elif ch == word[-1]:
output += ''
else:
output += ' '
print("Output: " + output)
if output:
words_dict[output] = words_dict.get(output, 0) + 1
output = ''
return words_dict |
992,555 | 490170ecc150c7ecbe117c0148f17e4e7f6628ef | import subprocess
import os
from config import config as Config
from common_utils import utils
import logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
append_command='-only-testing:SecureMailUITests/FileRepositoryTests/testAttachmentsOption_15057 -only-testing:SecureMailUITests/FileRepositoryTests/testSearchAttachmentsByAll_16367 -only-testing:SecureMailUITests/FileRepositoryTests/testEditOfflineAccessToSave_16390 -only-testing:SecureMailUITests/FileRepositoryTests/testSaveOfflineFiles_16388 -only-testing:SecureMailUITests/MailFolderTests/testBasicFolderSearch_14477 -only-testing:SecureMailUITests/MailUITests/testSelectMultiMailWhenSearchMail_14476 -only-testing:SecureMailUITests/MailUITests/testMailEmlAttachment_14451 -only-testing:SecureMailUITests/MailUITests/testMailMsgAttachment_14454 -only-testing:SecureMailUITests/MailUITests/testDownloadAttachment_14462 -only-testing:SecureMailUITests/MailUITests/testForwardEmailWithAttachemt_14436 -only-testing:SecureMailUITests/MailUITests/testForwardEmailWithoutAttachemt_14464 -only-testing:SecureMailUITests/MailUITests/testEWSSendAndWMForwardMail_18629 -only-testing:SecureMailUITests/MailUITests/testEWSSendAndWMReplyMail_17369 -only-testing:SecureMailUITests/MailUITests/testWMSendAndEWSReplyMail_17370 -only-testing:SecureMailUITests/MailUITests/testForwardARepliedMail_14448 -only-testing:SecureMailUITests/MailUITests/testCheckUrlInEmail_14442 -only-testing:SecureMailUITests/AccountSyncTests/testSyncMail_18579 -only-testing:SecureMailUITests/AccountSyncTests/testSyncCalendar_18580 -only-testing:SecureMailUITests/SlideViewTests/testEmailSlideView_OperationsWorkForMultipleMailSelection_14617 -only-testing:SecureMailUITests/ContactsUITests/testCallOperationContacts_OutgoingCallsVerify -only-testing:SecureMailUITests/ContactsUITests/testMultiSelectContact_14467 -only-testing:SecureMailUITests/MailUITests/testReceiveMailContainsAttachmentsFromOutlook_16130 -only-testing:SecureMailUITests/MailUITests/testSendMailWithAttachments_16392 -only-testing:SecureMailUITests/MailUITests/testNewMailWithPhotoAttachment_14459 -only-testing:SecureMailUITests/TriageViewTests/testMail_Triage_View_Settings_14450 -only-testing:SecureMailUITests/ReplyReplyAllMeetingTests/testEWSCreateAndWMReplyMeeting_16426 -only-testing:SecureMailUITests/PrivateMeetingTests/testPrivateMeeting_OrganizerCreate_NonRecurringMeeting_18599 -only-testing:SecureMailUITests/ContactsUITests/testContactMRU_BasicMRUOperations_14440 -only-testing:SecureMailUITests/ForwardMeetingTests/testMeetingForward_RecurringMeeting_fromInviteeCalendar_16427 -only-testing:SecureMailUITests/ContactsUITests/testExportContactToLocalSystem_14461 -only-testing:SecureMailUITests/CalendarUITest/testEventView_16401 -only-testing:SecureMailUITests/SettingsTests/testSettings_SlideOperationsSettings_16520 -only-testing:SecureMailUITests/SettingsTests/testSignatureForMultipleAccounts_17367 -only-testing:SecureMailUITests/SettingsTests/testExportSettingsForOnlyOneLoggedInAcount_14426 -only-testing:SecureMailUITests/SettingsTests/testExportSettingsForMultipulLoggedInAcount_14427'
class TestSuites(object):
def __init__(self, path, device_id, locale, scheme=None, target=None):
self.append_command=append_command
self.path = path
self.scheme = scheme
self.target = target
self.device_id = device_id
self.locale = locale
'''
config = build_util.get_build_config(self.path)
if(scheme is None):
self.scheme = config.get('project').get('schemes')[0]
if(target is None):
self.target = config.get('project').get('targets')[0]
'''
self._all_test_cases = self.load_all_cases_from_path(path=path)
self.select_test_cases = self._all_test_cases
def _run_grep_cases_command(self, path):
"""Run an xcrun simctl command."""
full_command = "grep -e '^-\s*(void)\s*test\w*_\d*.*$' -r '%s'" \
% (path,)
# Deliberately don't catch the exception - we want it to bubble up
return subprocess.check_output(full_command,
universal_newlines=True,
shell=True)
def load_all_cases_from_path(self, path='.'):
lines = self._run_grep_cases_command(path).split('\n')
test_case_ids = []
for line in lines:
if(len(line) > 0):
file_path, matched_line = line.split(':')
file_name = os.path.basename(file_path)
class_name = os.path.splitext(file_name)[0].split('+')[0]
method_name = matched_line.split(')')[-1]
test_case_ids.append("%s/%s" % (class_name, method_name))
return test_case_ids
def filter_case_id_by_list(self):
self.select_test_cases = []
for case in self._all_test_cases:
self.select_test_cases.append(self.target + "/" + case)
return self.select_test_cases
def run_test(self, instance_id):
report_dir = os.path.join(
Config.BASE_DIR,
Config.REPORT_SAVE_RELATIVE_PATH,
str(instance_id)
)
utils.ensure_dir(report_dir)
build_util.run_test(self.scheme,
self.device_id,
self.select_test_cases,
self.locale,
self.path,
report_dir)
return report_dir
|
992,556 | d1769c50a6a4b62bd37691a5252862741ea02387 | from .base import *
from django.conf import settings
import os
DEBUG = False
ALLOWED_HOSTS = [os.environ['ALLOWED_HOST']]
ROOT_URLCONF = 'config.urls.production'
SECRET_KEY = os.environ['SECRET_KEY']
|
992,557 | 3e2c232e3afe83cd2f6c476d6a844ed725a102c0 | for k in range(1,5):
print('sqrt({0})={1}',format(k,math.sqrt(k))) |
992,558 | feafc5bfc96bad833c48134062ac2feb40c52e40 | #!/usr/bin/env python
import pymongo
from shapely.geometry import Polygon
from shapely.geos import TopologicalError
import matplotlib.pyplot as plt
from shapely.validation import explain_validity
import math
import os
import cPickle as pickle
if os.path.exists("/home/ggdhines"):
base_directory = "/home/ggdhines"
github_directory = base_directory + "/github"
code_directory = base_directory + "/PycharmProjects"
elif os.path.exists("/Users/greg"):
base_directory = "/Users/greg"
code_directory = base_directory + "/Code"
github_directory = base_directory +"/github"
print github_directory
else:
base_directory = "/home/greg"
code_directory = base_directory + "/github"
github_directory = base_directory + "/github"
project = "kelp"
date = "2015-02-22"
client = pymongo.MongoClient()
db = client[project+"_"+date]
classification_collection = db[project+"_classifications"]
subject_collection = db[project+"_subjects"]
user_collection = db[project+"_users"]
polydict = {}
count = {}
def fix_poly(plist):
print " " + str(len(plist))
try:
shape = Polygon(plist)
except ValueError:
return []
validity = explain_validity(shape)
if validity == "Valid Geometry":
return [shape]
shape_list = []
while plist != []:
#print " " + str(len(plist))
longest_valid = 0
best_choice = None
for i in range(len(plist)):
# it might be that no matter how long the longest valid polygon is from this point, it can't
# beat the current max, if so, just break
if (len(plist)-i) < longest_valid:
break
longest_valid_i = 0
best_choice_i = None
for j in range(i+1,len(plist)):
try:
temp_shape = Polygon(plist[i:j])
validity = explain_validity(temp_shape)
if (validity == "Valid Geometry") and ((j-i) > longest_valid_i):
longest_valid_i = j-i
best_choice_i = i,j
except ValueError:
continue
if longest_valid_i > longest_valid:
longest_valid = longest_valid_i
best_choice = best_choice_i
if best_choice is None:
break
i,j = best_choice
shape = Polygon(plist[i:j])
shape_list.append(shape)
test_validity = explain_validity(shape)
assert test_validity == "Valid Geometry"
plist_temp = plist[:i]
plist_temp.extend(plist[j:])
plist = plist_temp
return shape_list
counter = -1
for classification in classification_collection.find():
counter += 1
if counter == 10000:
break
annotations = classification["annotations"]
zooniverse_id = classification["subjects"][0]["zooniverse_id"]
if "user_name" in classification:
user_id = classification["user_name"]
user = user_collection.find_one({"name":user_id})
else:
user_id = classification["user_ip"]
user = user_collection.find_one({"ip":user_id})
if user is None:
continue
zooniverse_user_id = user["zooniverse_id"]
index = [("value" in d) for d in annotations].index(True)
polygons = annotations[index]["value"]
if polygons == "":
continue
print counter
# have we read in this user and subject before? If so, read in the pickle file
fname = base_directory+"/Databases/kelp/"+str(zooniverse_id)+"_"+str(zooniverse_user_id)+".pickle"
plines = []
if os.path.isfile(fname):
plines = pickle.load(open(fname,"rb"))
else:
if not(zooniverse_id in count):
count[zooniverse_id] = 1
else:
count[zooniverse_id] += 1
for poly_key in polygons:
# convert from relative coordinates to absolute
poly = polygons[poly_key]
x,y = poly["startingPoint"]
x = float(x)
y = float(y)
segmentIndices = sorted([int(i) for i in poly["relPath"]])
plines = [(x,y)]
for i in segmentIndices:
delta = poly["relPath"][str(i)]
dX,dY = delta
dX = float(dX)
dY = float(dY)
if (dX == 0) and (dY == 0):
continue
x += dX
y += dY
plines.append((x,y))
plines = fix_poly(plines)
pickle.dump(plines,open(fname,"wb"))
if not(zooniverse_id in polydict):
polydict[zooniverse_id]= {}
polydict[zooniverse_id][user_id] = plines
else:
if not(user_id in polydict[zooniverse_id]):
polydict[zooniverse_id][user_id] = plines
else:
print "weird"
#polydict[zooniverse_id][user_id].extend(plines)
def intersect(plist1,plist2):
for p1 in plist1:
for p2 in plist2:
shape = p1.intersection(p2)
print "here"
for zooniverse_id in count:
if count[zooniverse_id] == 1:
continue
print zooniverse_id
continue
#shapes = {}
#for users in polydict[zooniverse_id]:
# shapes[users] = [Polygon(p) for p in polydict[zooniverse_id][users]]
u = polydict[zooniverse_id].keys()
u0 = u[0]
u1 = u[1]
intersect(polydict[zooniverse_id][u0],polydict[zooniverse_id][u1]) |
992,559 | e2057b9e0c54249fa828332069befaaae0d97c3c | # Importación de los módulos
import time
import redis
from flask import Flask
# Uso de Flask
app = Flask(__name__)
# Uso de redis
cache = redis.Redis(host='redis', port=6379)
# Función: Bucle básico de reintentos que nos permite intentar
# nuestra petición varias veces si el servicio de redis no está disponible
def get_hit_count():
retries = 5
while True:
try:
return cache.incr('hits')
except redis.exceptions.ConnectionError as exc:
if retries == 0:
raise exc
retries -= 1
time.sleep(0.5)
@app.route('/')
def hello():
count = get_hit_count()
return 'Hello World! I have been seen {} times.\n'.format(count)
@app.route('/about')
def about():
return "<h1> Hola Joan </h1>" |
992,560 | 6268410353c356127d3eb6307484992c16de5529 | # Generated by Django 3.0.8 on 2020-07-23 22:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('frontend', '0009_price_created_at'),
]
operations = [
migrations.CreateModel(
name='DailyStockStats',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField()),
('mean', models.FloatField(blank=True, null=True)),
('std_dev', models.FloatField(blank=True, null=True)),
('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='frontend.Company')),
],
options={
'unique_together': {('company', 'date')},
},
),
]
|
992,561 | ff3d8d50df604d8f96720571bfc4a2ddb84ee8cc | # django imports
from django.shortcuts import render_to_response, redirect
from django.core.urlresolvers import reverse
from django.http import HttpResponseServerError, HttpResponse, Http404
from django.template import RequestContext
from django.conf import settings
# bvclient imports
from bv.libclient.libtalks import LibTalks
from bv.libclient.libtrips import LibTrips
from bv.libclient.libusers import LibUsers
from bv.libclient.ext.dj import inject_lib, need_bvoauth_authentication
from bv.client.talks.forms import ContactUserForm
from bv.client.utils.paginator import compute_nb_pages
DEFAULT_ITEMS_PER_PAGE = settings.DEFAULT_ITEMS_PER_PAGE
items_per_page = getattr(settings, 'TALKS_PER_PAGE', DEFAULT_ITEMS_PER_PAGE)
@need_bvoauth_authentication()
@inject_lib(LibTalks)
def list_talks(request, page=1, lib=None):
"""Request all the talks of the logged user.
"""
count = lib.count_talks()
return render_to_response('talks/list_talks.html', {
'talks': lib.list_talks(page, items_per_page),
'count': count,
'page': int(page),
'listpages': compute_nb_pages(count, items_per_page),
'is_talk': True,
}, context_instance=RequestContext(request))
@need_bvoauth_authentication()
@inject_lib(LibTalks)
def contact_user(request, trip_id=None, lib=None):
"""Create a new negociation about an announce
Create the negotiation, the message, send a mail to the user trip, and
redirect user to the list of negociations
If a negociation already exists for this announce and this user (the logged
one), redirect the user to the add message view
If one of the email field is empty, redirect user to the contact error view
This view is only accessible by connected users.
"""
if lib.talk_exists_for_trip(trip_id):
return redirect('talks:add_message', int(trip_id))
if request.POST :
form = ContactUserForm(data=request.POST)
if form.is_valid():
talk_id = lib.create_talk(trip_id, form.cleaned_data['message'])
return redirect('talks:list_messages', talk_id)
else:
# check if a conversation about this trip already exists
form = ContactUserForm()
libtrips = LibTrips(**lib.get_params())
trip = libtrips.get_trip(trip_id)
return render_to_response('talks/contact_user.html', {
'from_user' : request.bvuser,
'to_user' : trip.user,
'form' : form,
'trip': trip,
'is_talk': True,
}, context_instance=RequestContext(request))
@need_bvoauth_authentication()
@inject_lib(LibTalks)
def list_messages(request, page=1, talk_id=None, lib=None):
"""Add a message to an existing talk.
"""
if request.POST:
form = ContactUserForm(data=request.POST)
if form.is_valid():
lib.add_message_to_talk(talk_id=talk_id,
message=form.cleaned_data['message'])
return redirect("talks:list_messages", talk_id)
else:
form = ContactUserForm()
talk = lib.get_talk(talk_id)
messages = lib.list_talk_messages(talk_id)
if request.bvuser.id == talk.from_user.id:
to_user = talk.trip.user
else:
to_user = talk.from_user
return render_to_response('talks/list_messages.html', {
'to_user' : to_user,
'talk' : talk,
'form' : form,
'messages': messages,
'is_talk': True,
}, context_instance=RequestContext(request))
@need_bvoauth_authentication()
@inject_lib(LibTalks)
def cancel_talk(request, talk_id=None, lib=None):
"""Cancel the negociation talk.
Cancelling a negociation talk must have a reason, so we use the contact
form.
"""
if request.POST:
form = ContactUserForm(data=request.POST)
if form.is_valid():
lib.delete_talk(talk_id, form.cleaned_data['message'])
return redirect("talks:list")
else:
form = ContactUserForm()
talk = lib.get_talk(talk_id)
return render_to_response('talks/cancel_talk.html', {
'talk' : talk,
'form' : form,
'is_talk': True,
}, context_instance=RequestContext(request))
@need_bvoauth_authentication()
@inject_lib(LibTalks)
def validate_talk(request, talk_id=None, lib=None):
"""Validate the talk
"""
lib.validate_talk(talk_id)
return redirect(reverse('talks:confirm_validate', args=[talk_id]))
|
992,562 | ec8833d37bf919bab869f1a72311ced3b2a7d3ab | from abc import ABC, abstractmethod
import matplotlib.pyplot as plt
import numpy as np
from scipy import spatial
class FitnessLandscape(ABC):
""" Template for building landscapes. """
def __init__(self, limits, resolution):
"""
Initialize bounds and fitness function.
Args:
limits (list): Boundaries of the landscape: [x_min, x_max, y_min, y_max]
resolution (int): Number of points per dimension.
"""
self.limits = limits
self.resolution = resolution
self.X, self.Y = self._create_meshgrid()
self.coords, self.tree = self._generate_coords()
self.fitness_function = self._calculate_fitness().reshape(self.resolution, self.resolution)
self.max, self.min = np.max(self.fitness_function), np.min(self.fitness_function)
def _generate_coords(self):
"""
Generates array of coordinates and tree for positions interpolation.
Returns:
tuple: Coordinates and tree for position lookup.
"""
coords = np.dstack([self.X.ravel(), self.Y.ravel()])[0]
return coords, spatial.cKDTree(coords)
def _create_meshgrid(self):
"""
Builds up the grid of the landscape. Each point corresponds to a coordinate in the space.
Returns:
tuple: Arrays containing coordinates of the meshgrid.
"""
x = np.linspace(self.limits[0], self.limits[1], self.resolution)
y = np.linspace(self.limits[2], self.limits[3], self.resolution)
X, Y = np.meshgrid(x, y)
return X, Y
def evaluate_fitness(self, pos):
"""
Computes the fitness of individual at position given the fitness function of the landscape.
Args:
pos (tuple): x and y of the individual
Returns:
float: Normalized fitness of the individual. Individuals in the minima will have a fitness close to 1.
"""
_, index = self.tree.query(pos)
return 1 - (self.fitness_function[index // self.resolution][index % self.resolution] - self.min) / (self.max - self.min)
def plot(self):
""" Displays the landscape using contour maps. """
cs = plt.contour(self.X, self.Y, self.fitness_function)
plt.clabel(cs, inline=1, fontsize=6)
plt.imshow(self.fitness_function, extent=self.limits, origin="lower", alpha=0.3)
@abstractmethod
def _calculate_fitness(self):
""" Creates the fitness landscape given a function.
Check https://en.wikipedia.org/wiki/Test_functions_for_optimization?wprov=srpw1_0
for more information.
"""
pass
class SphereLandscape(FitnessLandscape):
def _calculate_fitness(self):
return self.X ** 2 + self.Y ** 2
class GrickwankLandscape(FitnessLandscape):
def _calculate_fitness(self):
return 1 + (self.X ** 2 / 4000) + (self.Y ** 2 / 4000) - np.cos(self.X / np.sqrt(2)) - np.cos(self.Y / np.sqrt(2))
class HimmelblauLandscape(FitnessLandscape):
def _calculate_fitness(self):
return (self.X ** 2 + self.Y - 11) ** 2 + (self.X + self.Y ** 2 - 7) ** 2
class AckleyLandscape(FitnessLandscape):
def _calculate_fitness(self):
return (-20 * np.exp(-0.2 * np.sqrt(0.5 * (self.X ** 2 + self.Y ** 2))) - np.exp(0.5 * np.cos(2 * np.pi * self.X)
+ np.cos(2 * np.pi * self.Y)) + np.exp(1) + 20)
class RastringinLandscape(FitnessLandscape):
def _calculate_fitness(self):
return 20 + self.X ** 2 - 10 * np.cos(2 * np.pi * self.X) - 10 * np.cos(2 * np.pi * self.Y)
|
992,563 | bb421174d8799951cab385e50727e471c9e522a1 | __author__ = 'Nancy'
import tornado.ioloop
import tornado.web
# __________________________________________________
# class MainHandler(tornado.web.RequestHandler):
# def __get__(self):
# items=["Item 1","Item 2","Item3"]
# self.render("template.html",title="My title",items=items)
#
# application=tornado.web.Application([
# (r"/",MainHandler)
# ])
#
# # Why can not output ?
# if __name__=="__main__":
# application.listen(8888)
# tornado.ioloop.IOLoop.instance().start()
#______________________________________________________
class MainHandler(tornado.web.RequestHandler):
def get(self):
if not self.get._secure_cookie("mycookie"):
self.set_secure_cookie("mycookie","myvalue")
self.write("your cookie was not set yet!")
else:
self.write("your cookie was set!")
application=tornado.web.Application([
(r"/",MainHandler),
],cookie_secret="61oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=")
# 500: Internal Server Error
if __name__=="__main__":
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
|
992,564 | fc9555524085fbee2b82dd2790db5a8044c18e9d | import paho.mqtt.client as mqtt
from messageHandler import message_handler
import time
# This is the main script for Assignment 2 of IoT
# Its purpose is to subscribe to our local mqtt-sn broker (mosquitto.rsmb in my case)
# Every time it receives a message, it will send it to the messageHandler, which will do the work
# It is important to note that since my device on thingsboard is an entire station, our local virtual
# station will send the data only when the payload is complete (i.e. when it has every value, this may
# lead to data loss while the payload is still incomplete)
# Local Broker connection
local_broker="127.0.0.1"
local_port=1886
#function definitions
def on_connect(client, userdata, flags, rc): #connection feedback
print("Client "+ client + " connected with result code " + str(rc))
def on_publish(client,userdata,result): #publish feedback
print("data published to thingsboard \n")
pass
def on_subscribe(client, userdata, mid, granted_qos):
print("Client " + client + " has subscribed successfully")
#def on_message(client, userdata, message):
# print("Message received: "+ message)
def on_message(client, userdata, message):
print("message received " ,str(message.payload.decode("utf-8")))
print("message topic=",message.topic)
print("message qos=",message.qos)
print("message retain flag=",message.retain)
payload = str(message.payload.decode("utf-8"))
print('Sending to message handler the following topic: [' + message.topic + '] and payload: [' + payload + ']')
handler.set_message(message.topic,payload)
def on_log(client, userdata, level, buf): #used for logging purposes
print("log: ",buf)
#instantiating or messageHandler
handler = message_handler(1)
#instantiating local subscriber for station A
bridge = mqtt.Client("Bridge")
bridge.on_connect = on_connect
bridge.on_publish = on_publish
bridge.on_subscribe = on_subscribe
bridge.on_message = on_message
print("Local bridge created successfully")
#subscribing to local broker in order to get sensor data
bridge.connect(local_broker,local_port,60)
bridge.subscribe("/sensor/+/data/#")
#the loop forever function handles reconnections
bridge.loop_forever() |
992,565 | 4b633328aa000b10f06372ddaa3d627b2fc70f31 | #!/usr/bin/python
#encoding: UTF-8
#author: str2num
import os
import glob
import string
import syntax_tag
import bfunction
class Source(object):
TYPE = 'source'
def __init__(self, infile, args, ctx):
self._infile = os.path.normpath(infile)
self._outfile = self._infile
self._args = args
self._ctx = ctx
self._log = ctx.log()
self._target = None
self._depends = []
self._make_lines = []
self._clean_files = []
self._incpaths = []
self._cxxflags = []
self._cppflags = []
self._flags_extra = []
self._cflags = []
self._prefixes = []
self._line_delim=' \\\n '
self._space_delim=' '
self._incpaths_flag = False
self._cxxflags_flag = False
self._cppflags_flag = False
self._flags_extra_flag = False
self._cflags_flag = False
self._incpaths_s = ''
self._cxxflags_s = ''
self._cppflags_s = ''
self._flags_extra_s = ''
self._cflags_s = ''
self._depends_incpaths = []
self._depends_incpaths_s = ''
def in_file(self):
return self._infile
def out_file(self):
return self._outfile
def clean_files(self):
return self._clean_files
def set_target(self, target):
self._target = target
def set_depends(self, v):
self._depends = v
def make_lines(self):
return self._make_lines
def pre_action(self):
if (not self._incpaths_flag):
self._incpaths = self._ctx.include_paths().v()
if (not self._cxxflags_flag):
self._cxxflags = self._ctx.cxx_flags().v()
if (not self._cppflags_flag):
self._cppflags = self._ctx.cpp_flags().v()
if (not self._flags_extra_flag):
self._flags_extra = self._ctx.flags_extra().v()
if (not self._cflags_flag):
self._cflags = self._ctx.c_flags().v()
if (not self._depends_incpaths):
self._depends_incpaths = self._ctx.depends_include_paths()
self._incpaths_s = self._line_delim.join(map(lambda x:'-I%s' % x, self._incpaths))
self._depends_incpaths_s = self._line_delim.join(map(lambda x:'-I%s' % x, self._depends_incpaths))
self._cxxflags_s = self._line_delim.join(self._cxxflags)
self._cppflags_s = self._line_delim.join(self._cppflags)
self._flags_extra_s = self._line_delim.join(self._flags_extra)
self._cflags_s = self._line_delim.join(self._cflags)
def action(self):
pass
class CSource(Source):
EXTS = ('.c')
TYPE = 'c'
def __init__(self, infile, args, ctx):
Source.__init__(self, infile, args, ctx)
def pre_action(self):
Source.pre_action(self)
def action(self):
Source.action(self)
cfile = self._infile
objfile = bfunction.replace_file_ext_name(
bfunction.add_prefix_to_basename(
cfile,
self._target.basename() + '_'),
'.o')
gccflags_s = "%(_incpaths_s)s %(_depends_incpaths_s)s " % (self.__dict__)
gccflags_s += "%(_cppflags_s)s %(_cflags_s)s " % (self.__dict__)
real_cc = self._ctx.cc()
command1 = '%(real_cc)s -MG -MM %(gccflags_s)s %(cfile)s' % (locals())
command2 = 'cpp -E %(gccflags_s)s %(cfile)s' % (locals())
depfiles = []
depfiles.append(cfile)
depfiles.extend(self._prefixes)
depfiles.extend(get_cpp_depend_files(command1, command2, self._ctx, self._infile))
cc = "$(CC)"
if(not self._incpaths_flag):
r_gccflags_s = "$(INCPATH) "
else:
r_gccflags_s = "%(_incpaths_s)s " % (self.__dict__)
r_gccflags_s += "$(DEP_INCPATH) "
if(not self._cppflags_flag):
r_gccflags_s += "$(CPPFLAGS) "
else:
r_gccflags_s += "%(_cppflags_s)s "%(self.__dict__)
if(not self._cflags_flag):
r_gccflags_s += "$(CFLAGS) "
else:
r_gccflags_s += "%(_cflags_s)s "%(self.__dict__)
if cfile in self._ctx.user_sources_extra().v():
r_gccflags_s += '$(FLAGSEXTRA)'
cmd='%(cc)s -c %(r_gccflags_s)s -o %(objfile)s %(cfile)s'%(locals())
commands = []
commands.append(cmd)
r=(objfile, self._line_delim.join(depfiles), commands)
self._make_lines.append(r)
self._clean_files.append(objfile)
self._outfile = objfile
class CXXSource(Source):
EXTS = ('.cpp', '.cc', '.cxx')
TYPE = 'cxx'
def __init__(self, infile, args, ctx):
Source.__init__(self, infile, args, ctx)
def action(self):
Source.action(self)
cxxfile = self._infile
objfile = bfunction.replace_file_ext_name(
bfunction.add_prefix_to_basename(cxxfile, self._target.basename() + '_'), '.o')
gccflags_s = '%(_incpaths_s)s %(_depends_incpaths_s)s ' % (self.__dict__)
gccflags_s += '%(_cppflags_s)s %(_cxxflags_s)s ' % (self.__dict__)
real_cc = self._ctx.cxx()
command1 = '%(real_cc)s -MG -MM %(gccflags_s)s %(cxxfile)s' % (locals())
command2 = 'cpp -E %(gccflags_s)s %(cxxfile)s' % (locals())
cxx = '$(CXX)'
depfiles = []
depfiles.append(cxxfile)
depfiles.extend(self._prefixes)
depfiles.extend(get_cpp_depend_files(command1, command2, self._ctx, self._infile))
if (not self._incpaths_flag):
r_gccflags_s = '$(INCPATH) '
else:
r_gccflags_s = '%(_incpaths_s)s ' % (self.__dict__)
r_gccflags_s += '$(DEP_INCPATH) '
if (not self._cppflags_flag):
r_gccflags_s += '$(CPPFLAGS) '
else:
r_gccflags_s += '%(_cppflags_s)s ' % (self.__dict__)
if (not self._cxxflags_flag):
r_gccflags_s += '$(CXXFLAGS) '
else:
r_gccflags_s += '%(_cxxflags_s)s ' % (self.__dict__)
if cxxfile in self._ctx.user_sources_extra().v():
r_gccflags_s += '$(FLAGSEXTRA)'
cmd='%(cxx)s -c %(r_gccflags_s)s -o %(objfile)s %(cxxfile)s' % (locals())
commands = []
commands.append(cmd)
r = (objfile, self._line_delim.join(depfiles), commands)
self._make_lines.append(r)
self._clean_files.append(objfile)
self._outfile = objfile
class FileSource(Source):
def __init__(self, infile, args, ctx):
Source.__init__(self, infile, args, ctx)
def get_cpp_depend_files(command1, command2, ctx, infile):
func_name = 'action=get_cpp_depend_files'
log = ctx.log()
a = os.getenv('PRE')
if a == 'True':
(status, output, err) = (0, ':', '')
else:
(status, output, err) = log.log_notice_with_cc('%s cmd=[%s]' % (func_name,
bfunction.shorten_word(command1)), command1)
if (status):
log.log_fatal('%s cmd=[%s] status=%d err=[%s]' % (func_name, command1, status, err))
line = ' '.join(string.split(output, '\\\n'))
depfiles = string.split(string.split(line, ':')[1])
cwd = '%s/' % (os.path.abspath(os.getcwd()))
a = os.getenv('QUOT_ALL_DEPS')
if a != 'True':
depfiles = map(lambda x:os.path.normpath(x),
filter(lambda x:os.path.abspath(x).startswith(cwd), depfiles))
for depfile in depfiles:
if (not os.path.exists(depfile)):
log.log_fatal('%s cmd=[%s] err=[%s not found]' % (func_name, command2, os.path.abspath(depfile)))
(status, _, err) = log.log_notice_with_cc('%s cmd=[%s]' %
(func_name, bfunction.shorten_word(command2)), command2)
assert(status)
log.log_fatal('%s cmd=[%s] status=%d err=[%s]' % (func_name,
command2, status, err))
if (depfiles and infile == depfiles[0]):
depfiles = depfiles[1:]
return depfiles
|
992,566 | aa15aa6afc45e7f86f38024ddc8b9b8703c3657c | A = [1,1,1,3,12]
for elem in A:
if elem == 0:
A.remove(elem)
A.append(0)
print A |
992,567 | a6babc75c107a5d02aaeb6ad72b00b864988ed31 | from custodian.custodian import Custodian
from custodian.vasp.handlers import VaspErrorHandler, FrozenJobErrorHandler, \
UnconvergedErrorHandler, MeshSymmetryErrorHandler, MaxForceErrorHandler, \
PotimErrorHandler, NonConvergingErrorHandler, WalltimeHandler
from custodian.vasp.jobs import VaspJob
vasp_cmd = ['ibrun', '/home1/05018/tg843171/vasp.5.4.4_vtst/bin/vasp_std']
handlers = [FrozenJobErrorHandler(timeout=60)]
jobs = [VaspJob(vasp_cmd, final=True, suffix="", auto_npar=False)]
c = Custodian(handlers, jobs, max_errors=2)
c.run()
|
992,568 | 8fd6b4d981321162a56ef7c5f7568c2f23420c14 | # Copyright Pincer 2021-Present
# Full MIT License can be found in `LICENSE` at the project root.
from enum import IntEnum
class InteractionFlags(IntEnum):
"""
Attributes
----------
EPHEMERAL:
only the user receiving the message can see it
"""
EPHEMERAL = 1 << 6
|
992,569 | fd121fca0f32621f1666699fa67f7bef268af69f | """
Question class diagram:
Question
- question: String
- option1: String
- option2: String
- option3: String
- option4: String
- correct_ans: String
+ set_question
+ set_option1
+ set_option2
+ set_option3
+ set_option4
+ set_correct_ans
+ get_question
+ get_option1
+ get_option2
+ get_option3
+ get_option4
+ get_correct_ans
"""
class Question:
def __init__(self, question, option1, option2, option3, option4, correct_ans):
self.question = question
self.option1 = option1
self.option2 = option2
self.option3 = option3
self.option4 = option4
self.correct_ans = correct_ans
def set_question(self, new_question):
self.question = new_question
def set_option1(self, new_option):
self.option1 = new_option
def set_option2(self, new_option):
self.option2 = new_option
def set_option3(self, new_option):
self.option3 = new_option
def set_option4(self, new_option):
self.option4 = new_option
def set_correct_ans(self, new_ans):
self.correct_ans = new_ans
def get_question(self):
return self.question
def get_option1(self):
return self.option1
def get_option2(self):
return self.option2
def get_option3(self):
return self.option3
def get_option4(self):
return self.option4
def get_correct_ans(self):
return self.correct_ans
|
992,570 | 29f66b76ff191310e1b653c4ea95cb7f894cc115 | import numpy as np
import matplotlib.pyplot as plt
# load data
cpu_sizes = np.loadtxt("cpu_convolution_performance_results.txt")[:, 0]
cpu_times = np.loadtxt("cpu_convolution_performance_results.txt")[:, 1:]
gpu_sizes = np.loadtxt("gpu_convolution_performance_results_tpb128.txt")[:, 0]
gpu_times = np.loadtxt("gpu_convolution_performance_results_tpb128.txt")[:, 1:]
# calculate average times
cpu_times = np.mean(cpu_times, axis = 1)
gpu_times = np.mean(gpu_times, axis = 1)
#print gpu speedup values
print(cpu_times / gpu_times)
#plot execution times
plt.loglog(cpu_sizes, cpu_times, label="i9 9900K")
plt.loglog(gpu_sizes, gpu_times, label="GTX 1070")
plt.xlabel("Array Size")
plt.ylabel("Execution Time (ms)")
plt.title("Convolution Performance")
plt.legend()
plt.show()
#plot GPU speedup
plt.semilogx(cpu_sizes, cpu_times / gpu_times) # cpu sizes should be same as gpu sizes
plt.xlabel("Array Size")
plt.ylabel("Speed Increase Factor")
plt.title("GPU Speedup Over CPU")
plt.show()
|
992,571 | 8daf2e9af28360a2ee035f9ead09cd6d54189546 | print('Hello There')
print('\r')
print('\r')
print('\r')
print('\r')
print('\n')
print('\n')
print('\n')
print('\n')
print('\n')
print('\n')
|
992,572 | b8b8689ef15482d9f183667ff0d9ff67a865e2be | from django.apps import AppConfig
class CompanySearchConfig(AppConfig):
name = 'company_search'
|
992,573 | 6e601f08acad7ef032e7461cb5e7a37991b0dcc9 | from math import floor
R, L = list(map(float, input().split(" ")))
pi = 3.1415
volume_necessario = (4 * pi * pow(R, 3) / 3)
qtd_baloes = floor(L / volume_necessario)
print(qtd_baloes) |
992,574 | 2b168192476f256e01158b22b41efcd8b17dee01 |
import bisect
'''
General class that handles values of stochastic processes over time
'''
class StochasticProcess:
def __init__(self, initial_condition=0.0):
self.last_arrival = -1.0
self.arrival_times = []
self.values = [initial_condition]
'''Returns time of last arrival; None if no arrival happened yet'''
def get_last_arrival_time(self):
return self.last_arrival if self.last_arrival >= 0.0 else None
'''Returns current value of stochastic process'''
def get_current_value(self):
return self.values[-1]
'''Returns array of arrival times'''
def get_arrival_times(self):
return self.arrival_times
'''Generates arrival at time t with dN or set to value N if given'''
def generate_arrival_at(self, t, dN=1.0, N=None):
'''Check that arrival time happens in the present'''
try:
assert(t > self.last_arrival)
except AssertionError:
print("AssertionError: arrival time is before a previous arrival")
exit(1)
self.last_arrival = t
self.arrival_times.append(t)
if N is None:
self.values.append(self.values[-1] + dN)
else:
self.values.append(N)
'''Returns value of the stochastic process at time t'''
def value_at(self, t):
j = bisect.bisect_right(self.arrival_times, t)
return self.values[j]
'''
General class that handles values of counting processes over time
'''
class CountingProcess(StochasticProcess):
def __init__(self):
StochasticProcess.__init__(self, initial_condition=0.0)
'''Generates counting process arrival'''
def generate_arrival_at(self, t):
super().generate_arrival_at(t, 1.0)
if __name__ == '__main__':
'''Basic unit testing'''
s = StochasticProcess(initial_condition=0.0)
s.generate_arrival_at(1.0, 1.0)
s.generate_arrival_at(5.0, 1.0)
s.generate_arrival_at(10.0, -1.0)
assert(s.get_arrival_times() == [1.0, 5.0, 10.0])
assert(s.get_current_value() == 1.0)
assert(s.get_last_arrival_time() == 10.0)
tests = zip([-2.0, -1.0, 0.0, 1.0, 3.0, 5.0, 7.0, 10.0, 11.0],
[0.0, 0.0, 0.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0])
for t, sol in tests:
assert(sol == s.value_at(t))
c = CountingProcess()
c.generate_arrival_at(1.0)
c.generate_arrival_at(5.0)
assert(c.get_arrival_times() == [1.0, 5.0])
assert(c.get_current_value() == 2.0)
assert(c.get_last_arrival_time() == 5.0)
tests2 = zip([-2.0, -1.0, 0.0, 1.0, 3.0, 5.0, 7.0],
[0.0, 0.0, 0.0, 1.0, 1.0, 2.0, 2.0])
for t, sol in tests:
assert(sol == c.value_at(t))
print("Unit tests successful.")
|
992,575 | 83a36e0b1d1955b11f65bf4fa37850cb541bd47c |
#////////////////////////////////////////////////////////////////////////////////////////////////////
#/// \file Half_Life_Calculator.py
#/// \brief A python program built to calculate the transcript half lives for the three replicates
#/// using time series data.
#///
#// Author: Divya Singhal
#////////////////////////////////////////////////////////////////////////////////////////////////////
#import the required packages
import csv
import numpy as np
import pandas as pan
from scipy import stats
from operator import itemgetter
#////////////////////////////////////////////////////////////////////////////////////////////////////
#/// \function ParseTextIntoTables
#/// \brief function read's a text file and put's the values in a list
#/// \returns value i.e. list of read values from the text file
#////////////////////////////////////////////////////////////////////////////////////////////////////
def ParseTextIntoTables(table,x,y):
values = list()
rows = len(table)
for index in range(1,rows):
numList = [float(i) for i in table.loc[index,x:y]]
updatedVal = HandleNaN(numList)
values.append(updatedVal)
return values
#////////////////////////////////////////////////////////////////////////////////////////////////////
#/// \function HalfLifeCalculator
#/// \brief function evaluate's the transcript half life
#/// \returns value i.e. list of identified halflife values
#////////////////////////////////////////////////////////////////////////////////////////////////////
def HalfLifeCalculator(values):
HalfLife = list()
rows = len(values)
for index in range(1,rows):
slope, intercept, r_value, p_value, std_err = stats.linregress(values[0],values[index])
tempHL = 0.693/slope
HalfLife.append(tempHL)
return HalfLife
#////////////////////////////////////////////////////////////////////////////////////////////////////
#/// \function HandleNaN
#/// \brief function created to replace any NAN value with the mean value of the data.
#/// \returns value i.e. new list with NAN values replaced with mean values
#////////////////////////////////////////////////////////////////////////////////////////////////////
def HandleNaN(listValues):
df = pan.DataFrame({
'Cordinates': pan.Series(
[listValues[0],
listValues[1],
listValues[2],
listValues[3],
listValues[4],
listValues[5],
listValues[6],
listValues[7],
listValues[8]])})
meanList = df.fillna(df.mean(),axis=0)
return meanList['Cordinates'].values.tolist()
#////////////////////////////////////////////////////////////////////////////////////////////////////
#/// \function listToPandaDataFrame
#/// \brief function is created to convert the list of values into dataframe.
#/// \returns value i.e. dataframe
#////////////////////////////////////////////////////////////////////////////////////////////////////
def listToPandaDataFrame(List1):
pandaDataFrame = pan.DataFrame(List1,columns=['col1','col2'])
return pandaDataFrame
#using the functions defined above calcualte the half life of a transcript.
tableValues = pan.read_table('DecayTimecourse.txt',header=None)
table_1 = ParseTextIntoTables(tableValues, 1, 9)
table_2 = ParseTextIntoTables(tableValues, 10, 18)
table_3 = ParseTextIntoTables(tableValues, 19, 27)
HalfLife_1 = HalfLifeCalculator(table_1)
HalfLife_2 = HalfLifeCalculator(table_2)
HalfLife_3 = HalfLifeCalculator(table_3)
HalfLife = list()
GenesList = list()
startIndex = 2
for i in range(0,len(HalfLife_1)):
sum = HalfLife_1[i] + HalfLife_2[i] + HalfLife_3[i]
avg = sum/3
tempHL = [tableValues.loc[startIndex+i,0],avg]
HalfLife.append(tempHL)
HalfLife_PandaDF = listToPandaDataFrame(HalfLife)
HalfLife_PandaDF = HalfLife_PandaDF.replace([np.inf, -np.inf], np.nan)
HalfLife_PandaDF = HalfLife_PandaDF.dropna(how="any")
SortedHalfLife_PandaDF = HalfLife_PandaDF.sort_values("col2")
numberRows = len(SortedHalfLife_PandaDF)
tenPerNum = int(numberRows*0.10)
#filter out the top and bottom ten values.
topTenValues = SortedHalfLife_PandaDF.tail(tenPerNum)
BottomTenValues = SortedHalfLife_PandaDF.head(tenPerNum)
#save the result to the csv format.
topTenValues.to_csv("topTenValues.txt",sep='\t',encoding='utf-8')
BottomTenValues.to_csv("bottonTenValues.txt",sep='\t', encoding='utf-8')
|
992,576 | bb8b92739e6f11f529acb607870aa388ae80872b | test_list_1 = [['https', 'www'], ['python-izm', 'com']]
for value in test_list_1:
print(value)
for value_1, value_2 in test_list_1:
print(value_1, value_2)
|
992,577 | 3526f0c72ae4ca1bf79d207d49efc91c823f3c0a | from json import JSONDecodeError
import requests
from xml.etree.ElementTree import fromstring, ElementTree
from flask import request, render_template, redirect, url_for, session,\
flash
from werkzeug.security import generate_password_hash, check_password_hash
from werkzeug.exceptions import abort, HTTPException
from application import app, db, is_isbn_code, login_only, gr_api_key, api_host
from forms import LoginForm, RegisterForm, SearchForm, ReviewForm
@app.route('/')
def index():
# get random book titles, and pass it to template
q = db.execute(
'SELECT title '
'FROM public.book '
'ORDER BY random() '
'LIMIT 15;').fetchall()
rnd_titles = [t[0] + '?' for t in q]
return render_template('index.html', rnd_titles=rnd_titles)
@app.route('/register', methods=['GET', 'POST'])
def register():
form = RegisterForm()
if request.method == 'POST' and form.validate_on_submit():
u = db.execute(
'SELECT * FROM public.user '
'WHERE name = :name',
{"name": form.login.data}
).fetchone()
if u:
flash(f'username {form.login.data} not available', 'alert')
return render_template('register.html', form=form)
db.execute(
'INSERT INTO public.user (name, hash)'
'VALUES (:name, :hash)',
{"name": form.login.data,
"hash": generate_password_hash(form.passw.data)}
)
db.commit()
flash(
'Registration completed successfully. '
'Now you can <a href="./login" class="alert-link">log in</a>.',
'success')
return redirect(url_for('index'))
else:
for field in form.errors:
for err in form.errors[field]:
flash(f'{field}: {err}', 'alert')
return render_template('register.html', form=form)
@app.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
session['user'] = None # logout current user if any
if request.method == 'POST' and form.validate_on_submit():
u = db.execute(
'SELECT * FROM public.user '
'WHERE name = :name',
{"name": form.login.data}
).fetchone()
if not u:
flash(f'There is no user: {form.login.data}', 'alert')
return render_template('login.html', form=form)
if not check_password_hash(u[2], request.form['passw']):
flash(f'Login and password must match', 'alert')
return render_template('login.html', form=form)
elif check_password_hash(u[2], request.form['passw']):
session['user'] = request.form['login']
session['user_id'] = u.id
return redirect(url_for('search'))
else:
for field in form.errors:
for err in form.errors[field]:
flash(f'{field}: {err}', 'alert')
return render_template('login.html', form=form)
@app.route('/logout')
@login_only
def logout():
session['user'] = None
return redirect(url_for('index'))
@app.route('/search', methods=['GET', 'POST'])
@login_only
def search():
form = SearchForm()
if request.method == 'POST' and form.validate_on_submit():
# call helper function to check if the user put ISBN into search
user_provides_isbn = is_isbn_code(form.search.data)
if user_provides_isbn:
return redirect(url_for('book', book_isbn=user_provides_isbn))
# search for authors, and books
else:
s_q_authors = db.execute(
'SELECT *, '
'public.author_gr_map.author_id_gr, '
'public.author_gr_map.author_ph_gr '
'FROM public.author '
'LEFT JOIN public.author_gr_map '
'ON public.author.id = public.author_gr_map.author_id '
'WHERE LOWER(name) LIKE LOWER(:search_like) ',
{'search_like': '%'+form.search.data+'%'}
).fetchall()
s_q_books = db.execute(
'SELECT public.book.*, '
'array_agg(public.author.name) '
'FROM public.book '
'JOIN public.book_author '
'ON public.book.id = public.book_author.book_id '
'JOIN public.author '
'ON public.book_author.author_id = public.author.id '
'WHERE isbn LIKE :search_like '
'OR to_tsvector(title) @@ to_tsquery(:search) '
'GROUP BY public.book.id',
{'search': form.search.data.strip().replace(' ', ' & '),
'search_like': '%'+form.search.data+'%'}
).fetchall()
results = (s_q_books, s_q_authors)
return render_template('search.html', form=form, results=results)
if not form.validate_on_submit():
for field in form.errors:
for err in form.errors[field]:
flash(f'{field}: {err}', 'alert')
return render_template('search.html', form=form, results=None)
@app.route('/book/<string:book_isbn>', methods=['GET', 'POST'])
@login_only
def book(book_isbn):
form = ReviewForm()
# call application own API to get book details
r_api = requests.get(f'{api_host}{book_isbn}')
if r_api.status_code != 200:
return abort(404)
try:
book_json = r_api.json()
except JSONDecodeError:
return abort(500)
# call goodreads API to get book details
try:
r_gr_api = requests.get(
"https://www.goodreads.com/book/review_counts.json",
params={"key": gr_api_key, "isbns": book_isbn})
except requests.RequestException:
r_gr_api = None
try:
gr_api_json = r_gr_api.json()
except JSONDecodeError:
gr_api_json = None
# check for book id
book_id = db.execute(
'SELECT id FROM public.book '
'WHERE isbn = :isbn',
{'isbn': book_json['isbn']}
).fetchone()
# insert or update review if form submitted
if request.method == 'POST' and form.validate_on_submit():
db.execute(
'INSERT INTO public.user_book (user_id, book_id, score, review) '
'VALUES (:user_id, :book_id, :score, :review) '
'ON CONFLICT (user_id, book_id) '
'DO UPDATE SET score = :score, review = :review',
{"user_id": session['user_id'],
"book_id": book_id[0],
"score": form.rating.data,
"review": form.review.data
})
db.commit()
return redirect(url_for('book', book_isbn=book_isbn))
# get reviews (newest first)
r_q = db.execute(
'SELECT public.user_book.*, '
'public.user.name '
'FROM public.user_book '
'JOIN public.user '
'ON public.user.id = public.user_book.user_id '
'WHERE book_id = :book_id '
'ORDER BY (id) DESC ',
{"book_id": book_id[0]}
).fetchall()
# build list of reviews and check if current user already send a review
reviews = []
already_review = False
for r_api in r_q:
if r_api[5] == session['user']:
already_review = True
form.review.default = r_api[4]
form.rating.default = int(r_api[3])
form.process()
reviews.append(
{'user': r_api[5], 'rating': r_api[3], 'review': r_api[4]})
return render_template(
'book.html',
form=form,
book_json=book_json,
gr_api_json=gr_api_json,
reviews=reviews,
already_review=already_review
)
@app.route('/author/<string:author_id>', methods=['GET'])
@login_only
def author(author_id):
# check author id and goodreads author id
a_q = db.execute(
'SELECT public.author.id, '
'public.author.name, '
'public.author_gr_map.author_id_gr, '
'public.author_gr_map.author_ph_gr '
'FROM public.author '
'LEFT JOIN public.author_gr_map '
'ON public.author.id = public.author_gr_map.author_id '
'WHERE public.author.id = :author_id ',
{'author_id': int(author_id)}
).fetchone()
# check goodreads api and get xml formatted author info
try:
r_gr_a = requests.get(
f'https://www.goodreads.com/author/show/'
f'{a_q.author_id_gr}?format=xml&key={gr_api_key}')
except requests.RequestException:
r_gr_a = None
# set author description to None if no data received, else parse xml data
if r_gr_a.status_code != 200:
dsc = None
else:
tree = ElementTree(fromstring(r_gr_a.text))
root = tree.getroot()
dsc = root[1][8].text
# prepare author's books (newest first)
a_books = db.execute(
'SELECT public.book.* '
'FROM public.book_author '
'JOIN public.book '
'ON public.book.id = public.book_author.book_id '
'WHERE author_id = :author_id '
'ORDER BY (year) DESC ',
{'author_id': int(author_id)}
).fetchall()
return render_template(
'author.html',
a_q=a_q,
dsc=dsc,
a_books=a_books)
@app.errorhandler(Exception)
def error(err):
if isinstance(err, HTTPException):
return render_template('error.html', http_err=err), err.code
# non-HTTP exceptions
print(err)
return render_template('error.html')
|
992,578 | 69294457d0cb1086415ee129248ef4a83aab473d | from datetime import datetime
from flaskmov import db, login_manager
from flask_login import UserMixin
from io import TextIOWrapper
import csv
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(20), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
# image_file = db.Column(db.String(20), nullable=False, default='default.jpg')
password = db.Column(db.String(60), nullable=False)
def __repr__(self):
return f"User('{self.username}', '{self.email}', '{self.image_file}')"
class Moviedata(db.Model):
id = db.Column(db.Integer, primary_key=True)
number = db.Column(db.Integer)
overview = db.Column(db.Text)
poster_path = db.Column(db.Text)
release_date = db.Column(db.Text)
title = db.Column(db.Text)
genre1 = db.Column(db.Integer)
genre2 = db.Column(db.Integer)
def __repr__(self):
return f"('{self.title}', '{self.overview}','{self.poster_path}', '{self.release_date}','{self.genre1}', '{self.genre2}')"
class UserWatchlist(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.Text, nullable=False)
movie_id = db.Column(db.Integer)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<UserWatchlist {}>'.format(self.title, self.user_id)
class DontWatchlist(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.Text, nullable=False)
movie_id = db.Column(db.Integer)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<DontWatchlist {}>'.format(self.title, self.user_id)
class RecomendedList(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.Text, nullable=False)
# movie_id = db.Column(db.Integer)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<RecomendedList {}>'.format(self.title)
###################################################################################### Dodgey code
# file_name = "D:\\AuthWorking\\06-Login-Auth\\flaskblog\\Use.csv"
# def runner():
# with open(file_name, 'r', encoding='utf-8') as f:
# new_reader = csv.reader(f)
# rows=list(new_reader)
# for row in rows:
# movie = Moviedata(number=row[0],overview=row[1]
# ,poster_path=row[2],release_date=row[3],title=row[4],
# genre1=row[5], genre2=row[6])
# db.session.add(movie)
# db.session.commit()
# runner() |
992,579 | c6f7b1d24305dee67c2ae3c8f4145efb1c3ef2fb | # -*- coding:utf-8 -*-
# @Time :2019/12/17 20:07
# @Author :testcode_susu
# @Email :2804555260@qq.com
# @File :__init__.py.py |
992,580 | 71799cce298dda54761d5cd1dfabb2613fbacdf7 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 6 16:26:15 2018
@author: "Anirban Das"
"""
from timeit import default_timer as timer
import os
import json
import datetime
import sys, logging
import uuid , tempfile
import azure.functions as func
sys.path.append("/home/site/wwwroot/audio-pipeline/")
sys.path.append("/home/site/wwwroot/audio-pipeline/sphinxbase")
sys.path.append("/home/site/wwwroot/audio-pipeline/pocketsphinx")
#logging.info(f"-------------------------------------- {sys.path} ---------- {os.getcwd()}")
from pocketsphinx import Pocketsphinx, get_model_path, get_data_path, AudioFile
# Initialize global variables -----------------------------------------------------------
container_name = os.getenv('CONTAINER_NAME') #this gives the container name in which the code executes
blob_container_name = "myblobcontainer"
logging.basicConfig(format='%(asctime)s %(name)-20s %(levelname)-5s %(message)s')
logger = logging.getLogger(__name__)
def checkLambdaStatus():
if os.path.isfile("/tmp/perf_det.txt"):
with open("/tmp/perf_det.txt", "r+") as out:
line = out.readline()
line = line.strip()
uuid_val = line.split('|')[0].split('^')[-1]
modified_time = line.split('|')[1].split('^')[-1]
invocation_count = int(line.split('|')[2].split('^')[-1].strip()) +1
out.seek(0)
out.write("uuid^{}|modified_time^{}|invocation_count^{}".format(uuid_val,datetime.datetime.utcnow().isoformat(), invocation_count))
out.truncate()
return 'warm', invocation_count
else:
try:
uuid_val = str(uuid.uuid4())
with open("/tmp/perf_det.txt", "w") as out:
out.write("uuid^{}|modified_time^{}|invocation_count^{}".format(uuid_val, datetime.datetime.utcnow().isoformat(), 1))
except:
pass
return 'cold', 1
# Initiates and returns a pocket sphinx decoder
def getPockerSphinxDecoder():
model_path = get_model_path()
data_path = get_data_path()
config = {
'verbose': False,
'hmm': os.path.join(model_path, 'en-us'),
'lm': os.path.join(model_path, 'en-us.lm.bin'),
'dict': os.path.join(model_path, 'cmudict-en-us.dict'),
# 'topn': 2,
# 'ds':2,
# 'maxwpf': 5,
# 'maxhmmpf': 3000
}
return Pocketsphinx(**config)
# Code for doing the actual speech 2 text conversion
def getSpeech2Text(blobin: func.InputStream, blobout: func.Out[bytes], context: func.Context, func_start):
global blob_container_name
global container_name
try:
dictionary = {}
input_file = tempfile.NamedTemporaryFile(delete=False)
with open(input_file.name, 'wb') as f:
f.write(blobin.read())
filename = blobin.name
filename = filename.split(os.sep)[-1]
filename = filename.replace("%3A", ":")
filename = filename.replace("%40", "@")
filename = filename.replace("%5E", "^")
PS_DECODER = getPockerSphinxDecoder()
PS_DECODER.decode(audio_file=input_file.name,
buffer_size=2048,
no_search=False,
full_utt=False)
translation = PS_DECODER.hypothesis()
status, invocation_count = checkLambdaStatus()
filename = filename.split(os.sep)[-1]
dictionary["filename"] = filename.split('^')[0]
dictionary["container_name"] = str(container_name)
dictionary["blob_size"] = str(blobin.length)
dictionary["edgeuploadutctime"] = filename.split('^')[1] if '^' in filename else ""
dictionary["translation"] = translation
dictionary["invoke_time"] = ""
dictionary["func_start"] = func_start
dictionary["eventTime"] = ""
dictionary["lambdastatus"] = status
dictionary["invocation_count"] = invocation_count
dictionary["container_name"] = str(container_name)
dictionary["funccompleteutctime"] = datetime.datetime.utcnow().isoformat()
json_payload = json.dumps(dictionary)
blobout.set(json_payload) #write the output to the out file
os.remove(input_file.name)
except :
e = sys.exc_info()[0]
print("Exception occured during prediction: %s" % e)
sys.exit(0)
|
992,581 | 7f125bb7cf869ef14b1e0d1127e16ee7c5a48d98 | import requests
from bs4 import BeautifulSoup
import pprint
response = requests.get('http://news.ycombinator.com/news')
soup = BeautifulSoup(response.text, 'html.parser')
links = soup.select('.titlelink')
subtext = soup.select('.subtext')
def create_custom_hn(links, subtext, rank=100):
hn = []
for idx, item in enumerate(links):
title = item.getText()
href = item.get('href', None)
vote = subtext[idx].select('.score')
if len(vote):
points = int(vote[0].getText().replace(' points', ''))
hn.append({'title': title, 'link': href, 'votes': points})
return sorted(
list(filter(lambda item : item['votes'] > rank, hn)),
key=lambda item: item['votes'],
reverse=True
)
results = create_custom_hn(links, subtext, 200)
with open('results.txt', 'w') as file:
file.write(str(results))
pprint.pprint(results) |
992,582 | fb6e57692f92fbd5e7310f7df272c55e2ce0aee3 | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.homepage, name='home'),
url(r'^easyrider/$', views.riderpage, name='rider'),
]
|
992,583 | 3b0b51c41fdc0b07688e47483410bf0eb8e59130 | DEEPL_ENGINE = "deepl"
GOOGLE_ENGINE = "google"
BING_ENGINE = "bing"
DEFAULT_SEQUENCE = [
{
"source": "EN",
"target": "DE",
"engine": BING_ENGINE
},
{
"source": "DE",
"target": "ES",
"engine": GOOGLE_ENGINE
},
{
"source": "ES",
"target": "DE",
"engine": BING_ENGINE
},
{
"source": "DE",
"target": "EN",
"engine": GOOGLE_ENGINE
},
]
|
992,584 | 4ea5275a94cbd585d046fec209eb19c9ee2da419 | import sys
import os
from PyQt4 import QtGui
from PyQt4 import QtCore,uic
import labrad
import time
REFRESHTIME = .5 #in sec how often PMT is updated
class PMT_CONTROL(QtGui.QWidget):
def __init__(self, server, parent=None):
QtGui.QWidget.__init__(self, parent)
basepath = os.environ.get('LABRADPATH',None)
if not basepath:
raise Exception('Please set your LABRADPATH environment variable')
path = os.path.join(basepath,'lattice/clients/qtui/pmtfrontend.ui')
uic.loadUi(path,self)
self.server = server
#connect functions
self.pushButton.toggled.connect(self.on_toggled)
self.thread = dataGetter(self.server, parent=self)
self.thread.gotNewData.connect(self.onNewData)
self.thread.finished.connect(lambda: self.thread.start() if self.pushButton.isChecked() else None)
isreceiving = self.server.isreceiving()
self.setText(self.pushButton)
self.pushButton.setChecked(isreceiving)
dataset = self.server.currentdataset()
self.lineEdit.setText(dataset)
duration = self.server.getcollectionperiod()
self.doubleSpinBox.setValue(duration)
self.doubleSpinBox.valueChanged.connect(self.onNewDuration)
self.newSet.clicked.connect(self.onNewSet)
def on_toggled(self, state):
if state:
self.server.startreceiving()
self.thread.start()
else:
self.server.stopreceiving()
self.lcdNumber.display(0)
self.setText(self.pushButton)
def onNewSet(self):
newset = self.server.newdataset()
self.lineEdit.setText(newset)
def setText(self, obj):
state = obj.isChecked()
if state:
obj.setText('ON')
else:
obj.setText('OFF')
def onNewData(self,count):
self.lcdNumber.display(count)
def onNewDuration(self, value):
self.server.setcollectionperiod(value)
class dataGetter(QtCore.QThread):
gotNewData = QtCore.pyqtSignal(float)
def __init__(self, server, parent=None,):
QtCore.QThread.__init__(self,parent)
self.server = server
def run(self):
count = self.server.getnextreadings(1)[0][1]
self.gotNewData.emit(count)
time.sleep(REFRESHTIME)
if __name__=="__main__":
cxn = labrad.connect()
server = cxn.pmt_server
app = QtGui.QApplication(sys.argv)
icon = PMT_CONTROL(server)
icon.show()
app.exec_()
|
992,585 | 617fec84bcdb6e4a8a657c943ad167fdef57748f | from django.conf.urls.static import static
from django.urls import path, include
urlpatterns = [
path('post/',include('app.dashboard.writer.post.urls')),
] |
992,586 | d284540c6a617066f9cf049f3d0eee91db8a7523 | # -*- coding: utf-8 -*-
##############################################################################
#
# NCTR, Nile Center for Technology Research
# Copyright (C) 2011-2012 NCTR (<http://www.nctr.sd>).
#
##############################################################################
from osv import osv, fields
from tools.translate import _
import time
import netsvc
#----------------------------------------
# Class fleet vehicles
#----------------------------------------
class fleet_vehicles(osv.osv):
"""
To add admin affaire information """
STATE_SELECTION = [
('draft', 'Draft'),
('confirmed', 'Confirmed'),
('cancel', 'Cancel'),
]
_inherit = "fleet.vehicle"
_columns = {
#'name': fields.function(_complete_name, type='char', size=256, string="Vehicle Name",store=True),
'year':fields.many2one('manufacturing.year','Year',states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'depracc':fields.many2one('account.account',string='Depreciation Account',required=False,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
#'schedname':fields.many2one('fleet.service.templ','PM Schedule',help="Preventive maintainance schedule for this vehicle",required=False,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}), must be check with car maintenance module
'type': fields.selection([
('truck','Truck'),
('bus','Bus'),
('car','Car'),('generator','Generator')], 'Class', required=True,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]},),
'status': fields.selection([
('active','Active'),
('inactive','InActive'),
('outofservice','Out of Service'),
], 'status', required=True,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]},),
'ownership': fields.selection([
('owned','Owned'),
('rented','Rented'),('generator','Generator'),('mile','Instead mile'),
], 'Ownership', required=True,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]},),
'primarymeter':fields.selection([
('odometer','Odometer'),
('hourmeter','Hour Meter'),
],'Primary Meter',required=True,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]},),
'company_id':fields.many2one('res.company','Company',required=True,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]},),
'startodometer':fields.integer('Start Odometer',required=True,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'cmil':fields.float('Current Mileage',digits = (16,3),states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'bmil':fields.float('Base Mileage',digits=(16,3),help="The last recorded mileage",states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'bdate':fields.date('Recorded Date',help="Date on which the mileage is recorded",states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
#'location':fields.many2one('stock.location','Stk Location',help="Select the stock location or create one for each vehicle(recommended) so that the spares, tyres etc are assossiated with the vehicle when issued",required=False,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'department_id':fields.many2one('hr.department','Department',states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'machine_no': fields.char('Machine No', size=64,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'employee_id' :fields.many2one('hr.employee', 'Employee',states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'state': fields.selection(STATE_SELECTION,'State', readonly=True, select=True),
'user_id': fields.many2one('res.users', 'Responsible', readonly=True,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]} , ),
'notes': fields.text('Notes', size=256 ,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'serial':fields.char('productSerial #',size=50,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
'machine_capacity':fields.char('Machine Capacity',size=50,states={'confirmed':[('readonly',True)],'cancel':[('readonly',True)]}),
}
_defaults={
'state': 'draft',
'user_id': lambda self, cr, uid, context: uid,
}
def confirmed(self, cr, uid, ids, context=None):
"""
Workflow function to change state to confirmed.
@return: Boolean True
"""
self.write(cr, uid, ids, {'state':'confirmed'})
return True
def cancel(self, cr, uid, ids, notes='', context=None):
"""
Workflow function changes order state to cancel and writes note
which contains Date and username who do cancellation.
@param notes: contains information of who & when cancelling order.
@return: Boolean True
"""
notes = ""
u = self.browse(cr, uid, ids)[0].user_id.name
notes = notes +'\n'+'vehicle Cancelled at : '+time.strftime('%Y-%m-%d') + ' by '+ u
self.write(cr, uid, ids, {'state':'cancel','notes':notes})
return True
def ir_action_cancel_draft(self, cr, uid, ids, context=None):
"""
Changes state to Draft and reset the workflow.
@return: Boolean True
"""
if not len(ids):
return False
wf_service = netsvc.LocalService("workflow")
for s_id in ids:
self.write(cr, uid, s_id, {'state':'draft'})
wf_service.trg_delete(uid, 'fleet.vehicle', s_id, cr)
wf_service.trg_create(uid, 'fleet.vehicle', s_id, cr)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
992,587 | 3d3e0adc0f9fd29f33105ca0dee8580641e15032 |
def getWarmingLevels(scenario, warmingLev):
wly = {}
if scenario == 'rcp85':
if warmingLev == 1.5:
wly = {
'r1': 2024,
'r2': 2033,
'r3': 2025,
'r4': 2028,
'r5': 2029,
'r6': 2025,
'r7': 2027,
}
elif warmingLev == 2.:
wly = {
'r1': 2033,
'r2': 2049,
'r3': 2037,
'r4': 2042,
'r5': 2046,
'r6': 2035,
'r7': 2039,
}
elif warmingLev == 3.:
wly = {
'r1': 2053,
'r2': 2077,
'r3': 2055,
'r4': 2066,
'r5': 2074,
'r6': 2052,
'r7': 2061,
}
else:
raise Exception('warming level not supported: ' + str(warmingLev))
else:
raise Exception('scenario not supported: ' + scenario)
return wly
|
992,588 | b3603ea148016fcd7433a18427e8a997fcc6b044 | import ast
import math
import dateutil.parser
import requests
import json
import copy
import mysql
import xlrd
import xlwt
import datetime
from mysql import connector
class Excel_Data:
def __init__(self):
self.xl_json_request = []
self.xl_excepted_candidate_id = []
self.rownum = 1
self.boundary_range = [1119500,1265125,1120101,1217454,1120106,1120037,1223268,1217446,1117681,1222591,1116524,
1119509,1228469,1218132,1222838,1116199,1117669,1223237,1219415,1223794,1120109,1217637,
1119500,1265125,1217454,1120106,1120037,1223268,1217446,1117681,1222591,1119509,1117669,
1116524,1116199,1222838,1223237,1218132,1228469,1217637,1219415,1119500,1120101,1117681,
1116524,1120109,1265125,1119500,1120101,1217446,1116524,1217454,1116199,1218132,1217637,
1116524,1217454,1120037,1228469,1218132,1222838,1217637,1223237,1116569,1264533,1262975,
1217539,1217544,1116502,1219552,1217525,1264523,1116410,1217480,1117683,1219465,1119518,
1116435,1120130,1217525,1217544,1217539,1217532,1265118,1116457,1116571,1116400,1219467,
1222518,1117686,1217618,1120143,1217566,1217608,1116341,1116474,1217627,1116351,1116313,
1217742,1116341,1117813]
self.__style0 = xlwt.easyxf('font: name Times New Roman, color-index black, bold on')
self.__style1 = xlwt.easyxf('font: name Times New Roman, color-index black, bold off')
self.__style2 = xlwt.easyxf('font: name Times New Roman, color-index red, bold on')
self.__style3 = xlwt.easyxf('font: name Times New Roman, color-index green, bold on')
self.wb_result = xlwt.Workbook()
self.ws = self.wb_result.add_sheet('Candidate Resume Search Result')
self.ws.write(0, 0, 'Request', self.__style0)
self.ws.write(0, 1, 'API Count', self.__style0)
self.ws.write(0, 2, 'Expected Candidate Id\'s', self.__style0)
self.ws.write(0, 3, 'Not Matched Id\'s', self.__style0)
now = datetime.datetime.now()
self.__current_DateTime = now.strftime("%d-%m-%Y")
header = {"content-type": "application/json"}
data = {"LoginName": "admin", "Password": "rpo@1234", "TenantAlias": "rpotestone", "UserName": "admin"}
response = requests.post("https://amsin.hirepro.in/py/common/user/login_user/", headers=header,
data=json.dumps(data), verify=True)
self.TokenVal = response.json()
print self.TokenVal.get("Token")
wb = xlrd.open_workbook('C:\PythonAutomation\InputForJobOfferResumeSearch\ResumeSerachInput.xls')
sheetname = wb.sheet_names() # Reading XLS Sheet names
sh1 = wb.sheet_by_index(0) #
i = 1
for i in range(1, sh1.nrows):
rownum = (i)
rows = sh1.row_values(rownum)
self.xl_json_request.append(rows[0])
self.xl_excepted_candidate_id.append(str(rows[1]))
local = self.xl_excepted_candidate_id
length = len(self.xl_excepted_candidate_id)
self.new_local = []
for i in range(0, length):
j = [int(float(b)) for b in local[i].split(',')]
self.new_local.append(j)
self.xl_expected = self.new_local
def json_data(self):
r = requests.post("https://amsin.hirepro.in/py/rpo/get_all_candidates/", headers=self.headers,
data=json.dumps(self.data, default=str), verify=False)
# print self.data
resp_dict = json.loads(r.content)
self.status = resp_dict['status']
print resp_dict
if self.status == 'OK':
self.count = resp_dict['TotalItem']
self.total_pages1 = float(self.count)/200
self.total_pages = math.ceil(self.total_pages1)
self.total_pages = int(self.total_pages)
else:
self.count = "400000000000000"
def json_data_iteration(self, data, iter):
iter += 1
self.actual_ids = []
for i in range(1, iter):
self.data["PagingCriteria"]["PageNo"] = i
r = requests.post("https://amsin.hirepro.in/py/rpo/get_all_candidates/", headers=self.headers,
data=json.dumps(data, default=str), verify=False)
resp_dict = json.loads(r.content)
for element in resp_dict["Candidates"]:
self.actual_ids.append(element["Id"])
def all(self):
tot_len = len(self.xl_json_request)
for i in range(0, tot_len):
print "Iteration Count :- %s " % i
self.xl_request= json.loads(self.xl_json_request[i])
self.xl_request1 = copy.deepcopy(self.xl_request)
if self.xl_request.get("CandidateId"):
self.xl_request["CandidateId"] = self.boundary_range
else:
val = [("CandidateIds", self.boundary_range)]
id_filter = dict(val)
self.xl_request.update(id_filter)
self.headers = {"content-type": "application/json", "X-AUTH-TOKEN": self.TokenVal.get("Token")}
self.data = {"PagingCriteria": {"MaxResults": 200, "PageNo": 1}, "CandidateFilters": self.xl_request}
print self.data
self.json_data()
self.total_api_count = self.count
if self.count != "400000000000000":
self.data["PagingCriteria"] = {"IsRefresh": False, "MaxResults": 200, "PageNo": 1, "ObjectState": 0}
print self.data
# print self.total_pages
self.json_data_iteration(self.data, self.total_pages)
self.mismatched_id = set(self.xl_expected[i]) - set(self.actual_ids)
expected_id = str(self.xl_expected[i])
expected_id = expected_id.strip('[]')
mismatched_id = str(list(self.mismatched_id))
mismatched_id = mismatched_id.strip('[]')
self.ws.write(self.rownum, 0, str(self.xl_request1))
if self.total_api_count == self.xl_excepted_candidate_id:
self.ws.write(self.rownum, 1, self.total_api_count, self.__style3)
self.ws.write(self.rownum, 2, expected_id, self.__style3)
elif self.total_api_count == '400000000000000':
print "API Failed"
self.ws.write(self.rownum, 1, "API Failed", self.__style2)
self.ws.write(self.rownum, 2, expected_id, self.__style3)
self.ws.write(self.rownum, 3, "API Failed", self.__style2)
else:
print "this is else part \ n"
self.ws.write(self.rownum, 1, self.total_api_count, self.__style3)
self.ws.write(self.rownum, 2, expected_id, self.__style3)
self.ws.write(self.rownum, 3, mismatched_id, self.__style2)
self.wb_result.save(
'C:\PythonAutomation\SearchInResumeResults/'
+ self.__current_DateTime + '_Resume_Search.xls')
# print statusCode, " -- ", b
self.rownum = self.rownum + 1
print "Resume Search Script Started"
xlob = Excel_Data()
xlob.all()
print "Completed Successfully " |
992,589 | 687a0f204fffa89f47ee5aced772dbaead5500c3 | class LIST:
def __init__(self):
self.l=[]
def create(self):
n=input("Enter length of list")
for i in range(n):
a=input("Enter")
self.l.append(a)
def display(self):
print "List:",
for char in self.l:
print char,
print
def bubblesort(self):
for i in range(len(self.l)-1):
for j in range(len(self.l)-i-1):
if self.l[j]>self.l[j+1]:
self.l[j],self.l[j+1]=self.l[j+1],self.l[j]
print "List after ",i+1, " iteration = ",self.l
def linearsearch(self,n):
if len(self.l)==0:
print "Empty list"
else:
for i in range(len(self.l)):
if i==n:
avail=1
pos=i+1
break
else:
avail=0
if avail==1:
print "Item found at pos",pos
else:
print "Item not found"
def binarysearch(self,n):
if len(self.l)==0:
print "Empty List"
else:
self.l.sort()
first=0
last=len(self.l)-1
avail=0
while first<=last:
midpoint = (first + last)/2
if self.l[midpoint]==n:
pos=midpoint
avail=1
break
elif n<self.l[midpoint]:
last=midpoint-1
else:
first = midpoint+1
if avail==1:
print "Item found at pos",pos
else:
print "Item not found"
def mergesort(self,newl):
mlist=[]
newl.sort()
i=0
j=0
while i<len(self.l) and j<len(newl):
if self.l[i]<newl[j]:
mlist.append(self.l[i])
i+=1
elif self.l[i]>newl[j]:
mlist.append(newl[j])
j+=1
while i<len(self.l): #if a list has more elements than other
mlist.append(self.l[i])
i+=1
while j<len(newl):
mlist.append(newl[j])
j+=1
print "New list:",mlist |
992,590 | e2aa36a9d5f84720cd8b04264086b723449c0e77 | from django.apps import AppConfig
class TaskBoardConfig(AppConfig):
name = 'task_board'
|
992,591 | c28d8c6a46d5ac83ab9834700fda415da3d721b8 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import json
"""
判断obj对象是否继承至cls
"""
def ischildof(obj, cls):
try:
for i in obj.__bases__:
if i is cls or isinstance(i, cls):
return True
for i in obj.__bases__:
if ischildof(i, cls):
return True
except AttributeError:
return ischildof(obj.__class__, cls)
return False
"""
检查字段是否需要过滤
"""
def checkNoField(key, notFields):
filter = False
if notFields != None:
type1 = type(notFields)
if type1 == list or type1 == tuple :
for notField in notFields:
if key == notField:
filter = True
break
elif type1 == str:
# 不需要组装json的就过滤掉
filter = (key == notFields)
return filter
"""
对象转成json字符串
obj 对象
notFields 不需要的字段 如果只过滤一个可以传字符串,否则传列表list或者 元组tuple
"""
def obj2Json(obj, notFields=None):
if(obj == None):
return None
if(isinstance(obj, str) or isinstance(obj, int) or isinstance(obj, float) or isinstance(obj, bool)) :
return str(obj)
if(isinstance(obj, dict)) :
return json.dumps(obj)
if(isinstance(obj, type)) :
return str(obj)
result = "{"
for key, value in vars(obj).items():
if value == None:
continue
# 这个字段是否是不组包的字段
isNotField = checkNoField(key, notFields)
# 不需要组装json的就过滤掉
if isNotField:
continue
result+="\""+key+"\":"
fieldType = type(obj.__getattribute__(key))
if fieldType is tuple or fieldType is list:
result += "["
for item in value:
if isinstance(item, int) == int or isinstance(item, float):
result += str(item)+","
elif isinstance(item, str):
result += "\""+str(item)+"\","
else :
result += obj2Json(item, notFields)+","
if result[-1] == ',':
# 最后一个是,就删除这个,
result = result[:-1]
result += "],"
elif fieldType is type(str) :
result += "\""+str(value)+"\","
else :
result += obj2Json(value)+","
if result[-1] == ',':
# 最后一个是,就删除这个,
result = result[:-1]
result +="}"
return result
"""
把json解析成对象
"""
def parseObj(obj, jsonStr):
if(type(obj) == type(type)):
obj = obj()
print(str(type(obj)))
jsonDict = json.loads(jsonStr)
objDict = obj.__class__.__dict__.items()
for key, value in jsonDict.items():
if(key[0:2] == "__"):
continue
for key2, value2 in objDict:
if(key2[0:2] == "__"):
continue
if key == key2:
fieldType = type(obj.__getattribute__(key))
if fieldType == int:
value3 = int(value)
elif fieldType == float:
value3 = float(value)
elif fieldType == bool:
value3 = bool(value)
elif fieldType == str:
value3 = str(value)
elif fieldType == dict:
value3 = dict(value)
elif fieldType == tuple:
lenth = len(value)
temp = []
for i in range(lenth):
if(len(value2) > 0 and type(value2[0]) == type):
pass
value3.append(parseListChild(value[i], value2[0]))
else:
value3.append(parseListChild(value[i]))
value3 = tuple(temp)
elif fieldType == list:
lenth = len(value)
value3 = []
for i in range(lenth):
if(len(value2) > 0 and type(value2[0]) == type):
pass
value3.append(parseListChild(value[i], value2[0]))
else:
value3.append(parseListChild(value[i]))
elif ischildof(value2, fieldType):
tempClass = getattr(obj, key)
newClass = tempClass()
#valus 是字典,转成str也是单引号的,所以还是要用json.dumps转成双引号格式的字符串
jsonChildStr = json.dumps(value)
value3 = parseObj(newClass, jsonChildStr)
else :
tempClass = getattr(obj, key)
newClass = tempClass()
#valus 是字典,转成str也是单引号的,所以还是要用json.dumps转成双引号格式的字符串
jsonChildStr = json.dumps(value)
value3 = parseObj(newClass, jsonChildStr)
setattr(obj, key, value3)
# try:
#
# except Exception as err:
# print("parse json error field "+key+" "+str(value), err)
return obj
"""
处理list里面的内容,再返回给list
"""
def parseListChild(obj, newClass=None):
#{开头说明是对象
if str(obj)[0] == '{':
#valus 是字典,转成str也是单引号的,所以还是要用json.dumps转成双引号格式的字符串
jsonChildStr = json.dumps(obj)
if newClass == None or newClass == type:
pass
return jsonChildStr
else:
newValue = parseObj(newClass(), jsonChildStr)
return newValue
#TODO 此时并不知道list里的类型,所以没法弄成自己想要的对象,那暂时只能先用string接收喽
# 此处我加了处理,就是定义变量时先给他一个有一个item的值,里面的类型就是你定义的类型,list[Student],然后就可以解析了,但是这个其实并不合理,就啊开始时你的列表就有一个item了,很容易让你误解
else :
return obj
|
992,592 | 5c2e08f18f182d70c3ed6cb4681db1dd250c99e1 | """
Django settings for core project.
Generated by 'django-admin startproject' using Django 3.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
from datetime import timedelta
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "django-insecure-+4hy94l^jm8qdx0etu#3_zepvbr^5czq*yaiq!g^#mrx8_+439"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['192.168.1.107','127.0.0.1']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
'blogapi',
'users',
'rest_framework',
'corsheaders',
'django_filters',
# oauth2
'oauth2_provider',
'social_django',
'drf_social_oauth2',
'ckeditor',
'ckeditor_uploader',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'core.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
],
},
},
]
WSGI_APPLICATION = 'core.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.AllowAny',
],
'DEFAULT_AUTHENTICATION_CLASSES': (
'oauth2_provider.contrib.rest_framework.OAuth2Authentication', # django-oauth-toolkit >= 1.0.0
'drf_social_oauth2.authentication.SocialAuthentication',
)
}
CORS_ALLOWED_ORIGINS = [
'http://localhost:3000',
'http://192.168.1.107:3000',
]
CKEDITOR_UPLOAD_PATH = "uploads/"
CKEDITOR_CONFIGS = {
"default": {
"removePlugins": "stylesheetparser",
'allowedContent': True,
'toolbar_Full': [
['Styles', 'Format', 'Bold', 'Italic', 'Underline', 'Strike', 'Subscript', 'Superscript', '-', 'RemoveFormat' ],
['Image', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley','sourcearea', 'SpecialChar'],
[ 'Link', 'Unlink', 'Anchor' ],
[ 'NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-', 'Blockquote', 'CreateDiv', '-', 'JustifyLeft', 'JustifyCenter', 'JustifyRight', 'JustifyBlock', '-', 'BidiLtr', 'BidiRtl', 'Language' ],
[ 'Source', '-', 'Save', 'NewPage', 'Preview', 'Print', '-', 'Templates' ],
[ 'Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord', '-', 'Undo', 'Redo' ],
[ 'Find', 'Replace', '-', 'SelectAll', '-', 'Scayt' ],
[ 'Maximize', 'ShowBlocks' ]
],
}
}
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
AUTH_USER_MODEL='users.User'
AUTHENTICATION_BACKENDS = (
# Others auth providers (e.g. Google, OpenId, etc)
# Facebook OAuth2
'social_core.backends.facebook.FacebookAppOAuth2',
'social_core.backends.facebook.FacebookOAuth2',
# drf_social_oauth2
'drf_social_oauth2.backends.DjangoOAuth2',
# Django
'django.contrib.auth.backends.ModelBackend',
)
# Facebook configuration
SOCIAL_AUTH_FACEBOOK_KEY = '259454982321379'
SOCIAL_AUTH_FACEBOOK_SECRET = 'ac38ed86a54dba59d908f882e474b4e1'
# Define SOCIAL_AUTH_FACEBOOK_SCOPE to get extra permissions from Facebook.
# Email is not sent by default, to get it, you must request the email permission.
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_FACEBOOK_PROFILE_EXTRA_PARAMS = {
'fields': 'id, name, email'
}
SOCIAL_AUTH_USER_FIELDS = ['email','username','name','password']
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
|
992,593 | f69db90d89854185a8b9bd68b7e38edf0017b67d | #!/usr/bin/python
# STANDARD_RIFT_IO_COPYRIGHT
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gi
gi.require_version('CF', '1.0')
gi.require_version('RwTaskletPlugin', '1.0')
gi.require_version('RwTasklet', '1.0')
gi.require_version('RwDts', '1.0')
gi.require_version('RwDtsToyTaskletYang', '1.0')
gi.require_version('RwVcsYang', '1.0')
gi.require_version('RwBaseYang', '1.0')
gi.require_version('RwTypes', '1.0')
import time
from gi.repository import GObject, RwTaskletPlugin
from gi.repository import CF, RwTasklet, RwDts, RwDtsToyTaskletYang
from gi.repository import RwVcsYang
from gi.repository import RwBaseYang, RwTypes
"""This is a basic example of python tasklet. The name
of the Python tasklet class doesn't matter, however it
MUST be derived from GObject.Object and RwTaskletPlugin.Component.
Also note that the methods of the component interface
must be prefixed with "do_". The Peas interface appends this
prefix before invoking the interface functions.
NOTE: As a convention DONT use the do_ prefix for functions that
are not part of the interface.
The VCS framework call the interface functions in the following order
do_component_init
do_instance_alloc
do_instance_start
"""
import logging
import rwlogger
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
check_list = [ 'Started' ]
class AppConfGroupCreateFailed(Exception):
pass
class XPathRegistrationFailed(Exception):
pass
class SubscribeInsideXactExample(object):
intel_company_xpath = (
"C,/rw-dts-toy-tasklet:toytasklet-config"
"/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name='intel']"
)
all_company_xpath = (
"C,/rw-dts-toy-tasklet:toytasklet-config"
"/rw-dts-toy-tasklet:company"
)
def __init__(self, tasklet):
self.tasklet = tasklet
self.dts_api_h = None
self.all_app_group = None
self.one_app_group = None
def _on_all_company_config_xact_init(self, appconf, xact, ctx):
xact_scratch = {"all_company":1, "apply": 0, "prepare":0, "validate":0 }
logger.info("%s Got appconf callback for xact init company config ALL (%s): %s", self.__class__.__name__, xact, locals());
return xact_scratch
def _on_one_company_config_xact_init(self, appconf, xact, ctx):
xact_scratch = {"one_company":1, "apply": 0, "prepare":0, "validate":0 }
logger.info("%s Got appconf callback for xact init company config ONE (%s): %s", self.__class__.__name__, xact, locals())
return xact_scratch
def _on_config_apply(self, apih, appconf, xact, action, ctx, xact_scratch):
logger.info("%s Got appconf callback for applying xact (%s): %s", self.__class__.__name__, xact, locals())
if xact_scratch is not None:
xact_scratch["apply"] = 1
appconf.xact_add_issue(xact, RwTypes.RwStatus.FAILURE, "This is an add_issue error string")
def _on_config_validate(self, apih, appconf, xact, action, ctx, xact_scratch):
logger.info("%s Got appconf callback for validating xact (%s): %s", self.__class__.__name__, xact, locals())
if xact_scratch is not None:
xact_scratch["validate"] = 1
def _on_one_company_config_prepare(self, apih, ac, xact, xact_info, keyspec, msg, ctx, scratch, prepud):
logger.info("Got one company config prepare callback: %s", locals())
scratch["prepare"] += 1
ac.prepare_complete_ok(xact_info)
def intel_regready(self, regh, rw_status, user_data):
logger.info("DTS Intel reg_ready called ")
check_list.append ("multiblock_regready")
logger.debug(regh)
logger.debug(rw_status)
logger.debug(user_data)
riftiomsg = RwDtsToyTaskletYang.CompanyConfig.new()
riftiomsg.name = 'Intel'
riftiomsg.description = 'cloud technologies'
property1 = riftiomsg.property.add()
property1.name = 'test-property'
property1.value = 'test-value'
self.dts_api_h.trace(self.intel_company_xpath)
status = self.regh2.create_element_xpath(self.intel_company_xpath, riftiomsg.to_pbcm())
logger.info("Publishing Intel config at xpath (%s): %s",
self.intel_company_xpath, riftiomsg)
def _on_all_company_config_prepare(self, apih, ac, xact, xact_info, keyspec, msg, ctx, scratch, prepud):
logger.info("Got all company config prepare callback: %s, self.one_app_group=%s self.all_app_group=%s", locals(), self.one_app_group, self.all_app_group)
scratch["prepare"] += 1
logger.info("Point 1 in all company config prepare callback: %s, self.one_app_group=%s self.all_app_group=%s", locals(), self.one_app_group, self.all_app_group)
# Take a ref on the other tasklet(?)'s one_app_group so it doesn't go away on us FIX FIX
self.one_app_group_other = self.one_app_group
status, self.one_app_group = self.dts_api_h.appconf_group_create(
None,
self._on_one_company_config_xact_init,
None,
None,
self._on_config_apply,
self,
)
logger.info("Point 2 in all company config prepare callback: %s, self.one_app_group=%s self.all_app_group=%s", locals(), self.one_app_group, self.all_app_group)
if status != RwTypes.RwStatus.SUCCESS:
raise AppConfGroupCreateFailed("Failed to create {} appconf group: status={}".format(
self.__class__.__name__, status))
status, cfgreg = self.one_app_group.register_xpath(
self.intel_company_xpath,
RwDts.Flag.SUBSCRIBER|RwDts.Flag.CACHE,
self._on_one_company_config_prepare,
self
)
if status != RwTypes.RwStatus.SUCCESS:
raise XPathRegistrationFailed("{} failed to register as a subscriber for xpath: {}",
self.__class__.__name__, self.all_company_xpath)
logger.debug("%s registered for xpath inside config prepare: %s",
self.__class__.__name__, self.intel_company_xpath)
# signal config registration completion
self.one_app_group.phase_complete(RwDts.AppconfPhase.REGISTER)
self.all_app_group.prepare_complete_ok(xact_info)
def register_appconf(self):
logger.debug("%s registering appconf", self.__class__.__name__)
# Take extra ref in case we're in the two "tasklet" scenario
self.all_app_group_other = self.all_app_group
status, self.all_app_group = self.dts_api_h.appconf_group_create(
None,
self._on_all_company_config_xact_init,
None,
None,
self._on_config_apply,
self,
)
if status != RwTypes.RwStatus.SUCCESS:
raise AppConfGroupCreateFailed("Failed to create {} appconf group: status={}".format(
self.__class__.__name__, status))
logger.debug("%s appconf_group created", self.__class__.__name__)
status, cfgreg = self.all_app_group.register_xpath(
self.all_company_xpath,
RwDts.Flag.SUBSCRIBER,
self._on_all_company_config_prepare,
self
)
if status != RwTypes.RwStatus.SUCCESS:
raise XPathRegistrationFailed("{} failed to register as a subscriber for xpath: {}",
self.__class__.__name__, self.all_company_xpath)
logger.debug("%s registered for xpath: %s", self.__class__.__name__,
self.all_company_xpath)
# signal config registration completion
self.all_app_group.phase_complete(RwDts.AppconfPhase.REGISTER)
logger.debug("Creating timer to publish one company in 1 second")
#timer = self.tasklet.taskletinfo.rwsched_tasklet.CFRunLoopTimer(
# CF.CFAbsoluteTimeGetCurrent() + 1,
# 0,
# self.publish_one_company,
# self.tasklet.taskletinfo.rwsched_instance,
# )
#self.tasklet.taskletinfo.rwsched_tasklet.CFRunLoopAddTimer(
# self.tasklet.taskletinfo.rwsched_tasklet.CFRunLoopGetCurrent(),
# timer,
# self.tasklet.taskletinfo.rwsched_instance.CFRunLoopGetMainMode(),
# )
def publish_one_company_config_callback(self, xact, xact_status, user_data):
logger.info("publish_one_company_config_callback: %s", locals())
return (RwDts.MemberRspCode.ACTION_OK);
def publish_one_company(self, *args, **kwargs):
logger.info("Publishing a single company")
riftiomsg = RwDtsToyTaskletYang.CompanyConfig.new()
riftiomsg.name = 'Intel'
riftiomsg.description = 'cloud technologies'
property1 = riftiomsg.property.add()
property1.name = 'test-property'
property1.value = 'test-value'
self.dts_api_h.trace(self.intel_company_xpath)
# Begin a DTS transaction to send the config
xact = self.dts_api_h.query(
self.intel_company_xpath,
RwDts.QueryAction.UPDATE,
RwDts.XactFlag.ADVISE,
self.publish_one_company_config_callback,
self,
riftiomsg.to_pbcm()
)
xact.trace()
logger.info("Publishing one company config at xpath (%s): %s",
self.intel_company_xpath, riftiomsg)
def rwdts_tasklet_state_change_cb(self, apih, state, user_data):
logger.info("DTS Api init callback: %s", locals())
self.dts_api_h = apih
if state == RwDts.State.CONFIG:
#Call some code here
self.dts_api_h.state = RwDts.State.RUN
return
if not state == RwDts.State.INIT:
return
self.register_appconf()
if 1 :
intel_company_xpath = ( "C,/rw-dts-toy-tasklet:toytasklet-config" "/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name='intel']")
flags = ( RwDts.Flag.PUBLISHER | RwDts.Flag.CACHE | RwDts.Flag.NO_PREP_READ)
status, regh2 = apih.member_register_xpath(intel_company_xpath,
None,
flags,
RwDts.ShardFlavor.NULL,
0,
0,
-1,
0,
0,
self.intel_regready,
None,
None,
None,
None,
self)
logger.debug("member_register_xpath(cfgpath) returned")
logger.debug(status);
if regh2 is None :
logger.debug("Registration handle is none")
else:
logger.debug('registration handle is')
self.regh2 = regh2
self.dts_api_h.state = RwDts.State.REGN_COMPLETE
class Callback(object):
"""This is a class to illustrate the callbacks through
pygoject interface.
"""
def rwsched_timer_callback(self, timer, user_data):
"""This number of arguments in the function must match
the argument count to the function prototype in C.
"""
logger.info("**** Voila1 Python TIMER callback ****")
logger.debug(timer)
logger.debug(user_data)
logger.debug("\n\n")
def rwdts_api_xact_blk_callback(self, xact, xact_status, user_data):
"""This number of arguments in the function must match
the argument count to the function prototype in C.
"""
logger.info("**** Voila Python DTS Api Query -block- callback ****")
logger.info("xact = %s", xact)
logger.info("user_data = %s", user_data)
logger.info("xact_status = %s", xact_status)
qcorrid = 1
# Get the query result of the transaction, needs to be against block...
query_result = xact.query_result(qcorrid)
# Iterate through the result until there are no more results
while (query_result is not None):
# Get the result as a protobuf - this returns a generic protobuf message
pbcm = query_result.protobuf
# Convert the gerneric type to a type that we understand
company = RwDtsToyTaskletYang.Company.from_pbcm(pbcm)
# Log the results now
logger.info("**** Query Result -block- ***** :Corrid %d", qcorrid)
logger.info("XPath : %s", query_result.xpath)
logger.info("Query Result is")
logger.info(query_result)
logger.info(company)
#Get the next result
qcorrid = qcorrid + 1
query_result = xact.query_result(qcorrid)
# Get any errors
logger.debug("**** Voila Python DTS Api Block Query - Check for errors ****")
qcorrid = 0
query_error = xact.query_error(qcorrid)
#Iterate through the errors
while (query_error is not None):
#Log the error now
logger.info("**** Query Error -xact- ***** :Corrid %d", qcorrid)
logger.info("**** Query Error ****")
#logger.info(dir(query_error));
logger.info("XPath : %s", query_error.xpath)
logger.info("Keystr : %s", query_error.keystr)
logger.info("Query error cause is : %d", query_error.cause)
logger.info("Error description : %s", query_error.errstr)
#logger.info(query_error);
#Get the next error
#qcorrid = qcorrid + 1
query_error = xact.query_error(qcorrid)
#End this transaction
#xact_status = xact.get_status()
#logger.info("xact_status=%s", xact_status)
#xact = None;
check_list.append ("rwdts_api_xact_blk_callback got " + str(qcorrid))
logger.info("Transaction blk ended ")
def multiblock_regready(self, regh, rw_status, user_data):
logger.info("DTS multiblock reg_ready called ")
check_list.append ("multiblock_regready")
logger.debug(regh)
logger.debug(rw_status)
logger.debug(user_data)
def rwdts_api_xact_callback(self, xact, xact_status, user_data):
"""This number of arguments in the function must match
the argument count to the function prototype in C.
"""
logger.info("**** Voila Python DTS Api Query -xact- callback ****")
logger.debug(xact)
logger.debug(user_data)
logger.info(xact_status)
# Get the status of the xact. Maybe ditch the status?
#xact_status = xact.get_status()
#logger.info("xact_status=%s", xact_status)
#more = xact.get_more_results()
#logger.debug("blk more flag for xact = %d",more)
qcorrid = 0
# Get the query result of the transaction
query_result = xact.query_result(qcorrid)
logger.info(query_result);
# Iterate through the result until there are no more results
while (query_result is not None):
# Get the result as a protobuf - this returns a generic protobuf message
pbcm = query_result.protobuf
# Convert the gerneric type to a type that we understand
company = RwDtsToyTaskletYang.Company.from_pbcm(pbcm)
# Log the results now
logger.info("**** Query Result -xact- ***** :Corrid %d", qcorrid)
logger.info("**** Query Result ****")
logger.info("XPath : %s", query_result.xpath)
logger.info("Query Result is")
logger.info(query_result)
logger.info("Company is :")
logger.info(company)
qcorrid = qcorrid +1
#Get the next result
query_result = xact.query_result(qcorrid)
# Get any errors
logger.debug("**** Voila Python DTS Api Query - Check for errors ****")
qcorrid = 0
query_error = xact.query_error(qcorrid)
#Iterate through the errors
while (query_error is not None):
#Log the error now
logger.info("**** Query Error -xact- ***** :Corrid %d", qcorrid)
logger.info("**** Query Error ****")
#logger.info(dir(query_error));
logger.info("XPath : %s", query_error.xpath)
logger.info("Keystr : %s", query_error.keystr)
logger.info("Query error cause is : %d", query_error.cause)
logger.info("Error description : %s", query_error.errstr)
#logger.info(query_error);
#Get the next error
#qcorrid = qcorrid + 1
query_error = xact.query_error(qcorrid)
check_list.append("rwdts_api_xact_callback got" + str(qcorrid))
xact = None
logger.debug("Trasaction ended, xact unref in rwdts_api_xact_callback")
def rwdts_api_config_callback(self, xact, xact_status, user_data):
"""This number of arguments in the function must match
the argument count to the function prototype in C.
"""
logger.info("!!!! Voila Python DTS Api config advise/publish callback !!!!")
logger.debug(xact)
logger.info(xact_status)
logger.debug(user_data)
#xact_status = xact.get_status()
#logger.info("xact_status=%s", xact_status)
# Delete this xact ??
xact = None
logger.debug("Trasaction ended, xact unref in rwdts_api_config_callback")
logger.info(check_list)
def dts_dummy_cb(self, xact_info, user_data):
logger.info("Dummy callback called")
logger.debug(xact_info)
logger.debug(user_data)
def dts_single_query_reg_ready(self, regh, rw_status, user_data):
logger.info("DTS single_query_reg_ready is ready *****")
logger.debug("regh = %s", regh)
logger.debug("rw_status = %s", rw_status)
logger.debug("user_data = %s", user_data)
# Begin a DTS transaction from a single query
single_query_xpath = 'C,/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name=\'ub\']'
#apih.trace(xpath)
xact = self.dts_api.query(single_query_xpath,
RwDts.QueryAction.READ,
0,
self.rwdts_api_xact_callback,
self)
logger.debug("xact = apih.query()")
logger.debug(xact)
def dts_single_query_prepare_cb(self, xact_info, action, keyspec, msg, user_data):
logger.info("**** DTS single_query_prepare_cb is called *****")
logger.debug("xact_info is %s", xact_info )
logger.debug("action is %s", action)
logger.debug("keyspec is %s", keyspec)
logger.debug("msg is %s", msg)
logger.debug("user_data is %s", user_data)
logger.info("**** DTS sending another xact to create multiblock xact *****")
multiblock = 'C,/rw-dts-toy-tasklet:toytasklet-config/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name=\'pasta\']'
flags = RwDts.Flag.PUBLISHER
apih = xact_info.get_api()
#apih.trace(multiblock)
xact = xact_info.get_xact()
xact.trace()
if action == RwDts.QueryAction.READ:
xact_info.send_error_xpath(RwTypes.RwStatus.FAILURE,
multiblock,
"PYTHON TASKLET THROWING ERRORERRORERROR dts_single_query_prepare_cb")
check_list.append("Append Xact")
status, regh2 = apih.member_register_xpath(multiblock,
xact,
flags,
RwDts.ShardFlavor.NULL,
0,
0,
-1,
0,
0,
self.multiblock_regready,
None,
None,
None,
None,
self)
return (RwDts.MemberRspCode.ACTION_OK);
def dts_reg_ready(self, regh, rw_status, user_data):
logger.info("**** DTS registration is ready (are regh, self, keyspec, msg, user_data all different or all self?) *****")
logger.debug("regh is %s", regh)
logger.debug("rw_status is %s", rw_status)
logger.debug("user_data is %s", user_data)
def dts_cfg_reg_ready(self, regh, rw_status, user_data):
logger.info("**** DTS cfg registration is ready *****")
logger.debug(regh)
logger.debug(rw_status)
logger.debug(user_data)
riftiopath = 'C,/rw-dts-toy-tasklet:toytasklet-config/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name=\'riftio\']'
riftiomsg = RwDtsToyTaskletYang.CompanyConfig.new()
riftiomsg.name = 'Riftio'
riftiomsg.description = 'cloud technologies'
property1 = RwDtsToyTaskletYang.CompanyConfig_Property.new()
property1.name = 'test-property'
property1.value = 'test-value'
riftiomsg.property = [property1]
# Begin a DTS transaction to send the config
configxact = self.dts_api.query(riftiopath,
RwDts.QueryAction.UPDATE,
RwDts.XactFlag.ADVISE,
self.rwdts_api_config_callback,
self,
riftiomsg.to_pbcm())
logger.info("Sending config; configxact=%s", configxact)
logger.info("Config path=%s", riftiopath)
self.configxact = configxact
def dts_cfg_prepare_cb(self, apih, xact, xactinfo, keyspec, msg, user_data):
logger.info("**** DTS cfg prepare is called *****")
logger.debug("apih is %s", apih)
logger.debug("keyspec is %s", keyspec)
logger.debug("msg is %s", msg)
logger.debug("xactinfo is %s", xactinfo)
logger.debug("userdata is %s", user_data)
#Create a company object and return o/p
company = RwDtsToyTaskletYang.Company()
company.name = 'riftio'
company.profile.industry='technology'
company.profile.revenue = '$100,000,000,000'
# Convert the gerneric type to a type that we understand
if msg is not None :
company = RwDtsToyTaskletYang.Company.from_pbcm(msg)
if keyspec is not None :
schema = RwDtsToyTaskletYang.Company.schema()
pathentry = schema.keyspec_to_entry(keyspec)
logger.debug(pathentry)
if pathentry is not None:
logger.debug("Received keyspec with path key %s", pathentry.key00.name)
else:
logger.debug("Oh-Oh ---- Could not find path entry")
logger.debug("member_register_xpath(cfgpath) returned")
return (RwDts.MemberRspCode.ACTION_OK);
def dts_prepare_cb(self, xact_info, action, keyspec, msg, user_data):
logger.info("**** DTS prepare is called (are regh, self, keyspec, msg, user_data all different or all self?) *****")
logger.debug("xact_info is %s", xact_info)
logger.debug("action is %s", action)
logger.debug("keyspec is %s", keyspec)
logger.debug("msg is %s", msg)
logger.debug("user_data is %s", user_data)
if keyspec is not None :
schema = RwDtsToyTaskletYang.Company.schema()
pathentry = schema.keyspec_to_entry(keyspec)
logger.debug("Received path entry")
logger.debug(pathentry)
logger.debug(dir(pathentry))
# Convert the gerneric type to a type that we understand
xpath = 'C,/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name=\'riftio\']'
if action == RwDts.QueryAction.READ:
xact_info.send_error_xpath(RwTypes.RwStatus.FAILURE,
xpath,
"PYTHON TASKLET THROWING ERRORERRORERROR dts_prepare_cb")
#Create a company object and return o/p
company = RwDtsToyTaskletYang.Company()
company.name = 'riftio'
company.profile.industry='technology'
company.profile.revenue = '$100,000,000,000'
check_list.append("dts_prepare_cb read " + company.name)
xact_info.respond_xpath(RwDts.XactRspCode.ACK,
xpath,
company.to_pbcm())
else:
if msg is not None :
company = RwDtsToyTaskletYang.Company.from_pbcm(msg)
# We ret async. How does this end? Oh, it's ack above...
logger.info("**** DTS dts_prepare_cb done, ret ASYNC *****")
return (RwDts.MemberRspCode.ACTION_ASYNC)
# called on a new transaction
def config_xact_init(self, ac, xact, ctx):
logger.info("**** config_xact_init() called ****")
logger.info("self=%s", self)
logger.info("ac=%s", ac)
logger.info("xact=%s", xact)
logger.info("ctx=%s", ctx)
# Note no local reference, the dts api holds the only required reference except when executing appconf-config callbacks
scratch = { 'validatect':0, 'applyct':0, 'preparect':0, 'val1':'test scratch val1' }
return scratch
# called on the end of a transaction
def config_xact_deinit(self, ac, xact, ctx, scratch):
logger.info("**** config_xact_deinit() called ****")
logger.info("self=%s", self)
logger.info("ac=%s", ac)
logger.info("xact=%s", xact)
logger.info("ctx=%s", ctx)
logger.info("scratch=%s", scratch)
scratch=None
return
# called when validating a config
def config_validate(self, apih, ac, xact, ctx, scratch):
logger.info("**** config_validate() called ****")
logger.info("self=%s", self)
logger.info("apih=%s", apih)
logger.info("ac=%s", ac)
logger.info("xact=%s", xact)
logger.info("ctx=%s", ctx) # should be self, Callback object, handed as context to appconf_create
logger.info("scratch=%s", scratch)
scratch['validatect'] = scratch['validatect'] + 1
logger.info("scratch=%s", scratch)
return
# called to apply a config
def config_apply(self, apih, ac, xact, action, ctx, scratch):
logger.info("**** config_apply() called ****")
logger.info("self=%s", self)
logger.info("apih=%s", apih)
logger.info("ac=%s", ac)
logger.info("xact=%s", xact)
logger.info("action=%s", action)
logger.info("ctx=%s", ctx)
logger.info("scratch=%s", scratch)
# apply won't always have a xact with its scratch!
if scratch:
scratch['applyct'] = scratch['applyct'] + 1
logger.info("scratch=%s", scratch)
return
# called on a config prepare
def config_prepare(self, apih, ac, xact, xact_info, keyspec, msg, ctx, scratch, prepud):
logger.info("**** config_prepare() called ****")
logger.info("self=%s", self)
logger.info("apih=%s", apih)
logger.info("ac=%s", ac)
logger.info("xact=%s", xact)
logger.info("xact_info=%s", xact_info) # this is nominally a query handle, I think there is a bug here?
logger.info("keyspec=%s", keyspec)
logger.info("msg=%s", msg)
logger.info("ctx=%s", ctx) # should be this Callback / self
logger.info("prepud=%s", prepud) # should be this Callback / self
logger.info("scratch=%s", scratch) # should be our scratch
scratch['preparect'] = scratch['preparect'] + 1
logger.info("scratch=%s", scratch)
ac.prepare_complete_ok(xact_info)
# ac.prepare_complete_fail(xact_info, RW_STATUS_MEH, 'No such nozzle.'
return
def rwdts_tasklet_state_change_cb(self, apih, state, user_data):
"""This number of arguments in the function must match
the argument count to the function prototype in C.
"""
logger.info("**** Voila Python DTS Api init callback ****")
logger.debug("apih=")
logger.debug(apih)
logger.debug("state=")
logger.debug(state)
logger.debug("user_date=")
logger.debug(user_data)
self.dts_api = apih
self.rwdtstaskletpython = user_data
print("Sleeping 5s, hit ^C to set bp\n")
time.sleep(5)
if state == RwDts.State.CONFIG:
#Call recovery reconcile code here
self.dts_api.state = RwDts.State.RUN
return
if not state == RwDts.State.INIT:
return
# register for configuration
status, appgrp = apih.appconf_group_create(None, self.config_xact_init, self.config_xact_deinit, self.config_validate, self.config_apply, self)
logger.info("appconf_group_create returned status %d", status)
self.appgrp = appgrp
cfgpath = 'C,/rw-dts-toy-tasklet:toytasklet-config/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name=\'riftio\']'
status, cfgreg = appgrp.register_xpath(cfgpath, RwDts.Flag.SUBSCRIBER|RwDts.Flag.CACHE, self.config_prepare, self)
logger.info("appgrp.register_xpath returned status %d", status)
#logger.info(cfgreg)
self.cfgreg = cfgreg
# signal config registration completion
appgrp.phase_complete(RwDts.AppconfPhase.REGISTER)
# define the callbacks that we want to receive
xpath = 'C,/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name=\'riftio\']'
flags = RwDts.Flag.PUBLISHER
status, regh = apih.member_register_xpath(xpath,
None,
flags,
RwDts.ShardFlavor.NULL,
0,
0,
-1,
0,
0,
self.dts_reg_ready,
self.dts_prepare_cb,
None,
None,
None,
self)
logger.debug("member_register_xpath() returned")
logger.debug(status);
if regh is None :
logger.debug("Registration handle is none")
else:
logger.debug('registration handle is')
self.regh = regh
# Single query . Call on reg_ready
single_query_xpath = 'C,/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name=\'ub\']'
flags = RwDts.Flag.PUBLISHER | RwDts.Flag.CACHE
status, regh = apih.member_register_xpath(single_query_xpath,
None,
flags,
RwDts.ShardFlavor.NULL,
0,
0,
-1,
0,
0,
self.dts_single_query_reg_ready,
self.dts_single_query_prepare_cb,
None,
None,
None,
self)
logger.debug("member_register_xpath() returned")
logger.debug(status);
if regh is None :
logger.debug("Registration handle is none")
else:
logger.debug('registration handle is')
self.sq_regh = regh
# now waiting for dts_reg_ready()
# register config publisher
if 1 :
cfgpath2 = 'C,/rw-dts-toy-tasklet:toytasklet-config/rw-dts-toy-tasklet:company[rw-dts-toy-tasklet:name=\'noodle\']'
flags = ( RwDts.Flag.PUBLISHER | RwDts.Flag.CACHE )
status, regh2 = apih.member_register_xpath(cfgpath2,
None,
flags,
RwDts.ShardFlavor.NULL,
0,
0,
-1,
0,
0,
self.dts_cfg_reg_ready,
self.dts_cfg_prepare_cb,
None,
None,
None,
self)
logger.debug("member_register_xpath(cfgpath) returned")
logger.debug(status);
if regh2 is None :
logger.debug("Registration handle is none")
else:
logger.debug('registration handle is')
self.regh2 = regh2
# now waiting for dts_cfg_reg_ready()
# this ought to be later, as well
if 1 :
# Construct an (empty) transaction. 0 is flags
xact2 = apih.xact_create(0, None, None)
#xact2.trace()
logger.debug("xact2=apih.xact_create");
logger.debug("xact2=%s", xact2);
# Construct a query-block in / from the transaction
blk = xact2.block_create(0, None, None)
logger.debug("xact2.block_create")
logger.debug(blk)
# Add some query(ies) to the block
# Corrid is a nonzero int, useful for getting back specific query's results
qcorrid = 1
status = blk.add_query(xpath, RwDts.QueryAction.READ, RwDts.XactFlag.STREAM, qcorrid, None)
logger.debug(status)
qcorrid = qcorrid + 1
status = blk.add_query(xpath, RwDts.QueryAction.READ, RwDts.XactFlag.STREAM, qcorrid, None)
qcorrid = qcorrid + 1
status = blk.add_query(xpath, RwDts.QueryAction.READ, RwDts.XactFlag.STREAM, qcorrid, None)
# Pull the trigger. Callback+userdata are here. The final "after" argument,
# not included here, is a block to be after; otherwise at end
# with after block: status = blk.execute(0, self.rwdts_api_xact_blk_callback, self, earlier_block)
status = blk.execute(0, self.rwdts_api_xact_blk_callback, self)
logger.debug("blk.execute")
check_list.append("blk.execute queries sent " + str(qcorrid))
logger.debug(status)
# gads, imm should just be a flag
#broken status = blk.execute_immediate(0, self.rwdts_api_xact_blk_callback, self)
#?? status , blkstate = blk.get_status()
#more = blk.get_more_results()
#logger.debug("blk more flag = %d\n",more)
#more = blk.for_query_get_more_results(qcorrid)
#logger.debug("blk more flag for query = %d\n",more)
status = xact2.commit()
logger.debug("xact2.commit()")
logger.debug(status)
# Now let's test the member data APIs
self.memberd = RwdtsMemberDataAPIs(apih)
self.dts_api.state = RwDts.State.REGN_COMPLETE
class RwdtsMemberDataAPIs(object):
"""This class implements the APIs to demo member data APIs
"""
def __init__(self, apih):
#commented this to use the system-wide logger
#self.logger = logging.getLogger(name="dtstasklet_logger")
logger.debug("RwdtsMemberDataAPIs: __init__ function called")
self.apih = apih
xpath = 'C,/rw-dts-toy-tasklet:employee]'
status, keyspec = apih.keyspec_from_xpath(xpath)
flags = RwDts.Flag.PUBLISHER
status, self.mbdreg = apih.member_register_keyspec(keyspec,
None,
flags,
RwDts.ShardFlavor.NULL,
0,
0,
-1,
0,
0,
self.create_member_objects,
self.create_member_data_prepare,
None,
None,
None,
self)
logger.debug("RwdtsMemberDataAPIs - apih.member_register_xpath returned %s", status)
def create_member_objects(self, regh, keyspec, msg, user_data):
"""This function implements the data member APIs to create, update and read objects
"""
logger.info("Creating member data objects !!!")
id = 1
for id in range(1, 10):
emp = RwDtsToyTaskletYang.Employee()
emp.name = 'jdoe' + str(id)
emp.age = 30 + id
emp.phone = '978-863-00' + str(id)
emp.ssn = '123-45-000' + str(id)
path = '/rw-dts-toy-tasklet:employee[rw-dts-toy-tasklet:name=\'jdoe' + str(id) + '\']'
status, ks = self.apih.keyspec_from_xpath(path)
logger.debug("keyspec_from_xpath returned %s for path %s", status, ks)
status = self.mbdreg.create_element_keyspec(ks, emp.to_pbcm())
logger.debug("create_element_keyspec returned %s for path %s", status, ks)
#Update an element
path = '/rw-dts-toy-tasklet:employee[rw-dts-toy-tasklet:name=\'jdoe9\']'
status, ks = self.apih.keyspec_from_xpath(path)
logger.debug("keyspec_from_xpath returned %s for path %s", status, ks)
emp = RwDtsToyTaskletYang.Employee()
emp.name = 'jdoe9' + str(id)
emp.age = 41
emp.phone = '978-863-099'
emp.ssn = '123-45-0099'
status = self.mbdreg.update_element_keyspec(ks, emp.to_pbcm(), RwDts.XactFlag.REPLACE)
logger.info("Updated the object with key = %s status = %s", path, status)
# Now read it back
status, out_ks, pbcm = self.mbdreg.get_element_keyspec(ks)
logger.info("Get returned status=%s, pbcm=%s out_ks = %s", status, pbcm, out_ks)
employee = RwDtsToyTaskletYang.Employee.from_pbcm(pbcm)
logger.info("Read record is %s", employee)
# Now read with xpath
status, pbcm,out_ks = self.mbdreg.get_element_xpath('C,/rw-dts-toy-tasklet:employee[rw-dts-toy-tasklet:name=\'jdoe8\']')
logger.info("Get returned using xpath status=%s pbcm=%s out_ks = %s", status, pbcm, out_ks)
employee = RwDtsToyTaskletYang.Employee.from_pbcm(pbcm)
logger.info("Read record using xpath is %s", employee)
# Get a cursor and walk the list
cursor = self.mbdreg.get_cursor()
msg, ks = self.mbdreg.get_next_element(cursor)
while msg is not None:
employee = RwDtsToyTaskletYang.Employee.from_pbcm(msg)
logger.info("Read record using get next api %s", employee)
msg, ks = self.mbdreg.get_next_element(cursor)
self.mbdreg.delete_cursors()
def create_member_data_prepare(self, userdata):
"""This function implements the prepare callback for the data member API tests
"""
logger.info("create_member_data_prepare called!!!")
class RwdtstaskletPython(GObject.Object, RwTaskletPlugin.Component):
"""This class implements the 'RwTaskletPlugin.Component' interface.
"""
def __init__(self):
#commented this to use the system-wide logger
#self.logger = logging.getLogger(name="dtstasklet_logger")
logger.debug("RwdtstaskletPython: __init__ function called")
GObject.Object.__init__(self)
def do_component_init(self):
"""This function is called once during the compoenent
initialization.
"""
logger.debug("RwdtstaskletPython: do_component_init function called")
component_handle = RwTaskletPlugin.ComponentHandle()
return component_handle
def do_component_deinit(self, component_handle):
logger.debug("RwdtstaskletPython: do_component_deinit function called")
def do_instance_alloc(self, component_handle, tasklet_info, instance_url):
"""This function is called for each new instance of the tasklet.
The tasklet specific information such as scheduler instance,
trace context, logging context are passed in 'tasklet_info' variable.
This function stores the tasklet information locally.
"""
logger.debug("RwdtstaskletPython: do_instance_alloc function called")
self.taskletinfo = tasklet_info
# Save the scheduler instance and tasklet instance objects
#self.rwsched = tasklet_info.rwsched_instance
#self.tasklet = tasklet_info.rwsched_tasklet_info
#self.rwlog_instance = tasklet_info.rwlog_instance
tasklet_logger = rwlogger.RwLogger(subcategory="rw-vcs",
log_hdl=self.taskletinfo.rwlog_ctx)
logger.addHandler(tasklet_logger)
# After this point, all logger calls will log events to rw_vcs using
# the tasklets rwlog handle
logger.debug("Added rwlogger handler to tasklet logger")
instance_handle = RwTaskletPlugin.InstanceHandle()
return instance_handle
def do_instance_free(self, component_handle, instance_handle):
logger.debug("RwdtstaskletPython: do_instance_free function called")
def do_instance_start(self, component_handle, instance_handle):
"""This function is called to start the tasklet operations.
Typically DTS initialization is done in this function.
In this example a periodic timer is added to the tasklet
scheduler.
"""
logger.debug("RwdtstaskletPython: do_instance_start function called")
# Create an instance of DTS API - This object is needed by all DTS
# member and query APIs directly or indirectly.
# DTS invokes the callback to notify the tasklet that the DTS API instance is ready
# for use.
foo = Callback()
#sub = SubscribeInsideXactExample(self)
self.dts_api = RwDts.Api.new(self.taskletinfo, # tasklet object
RwDtsToyTaskletYang.get_schema(), # Schema object
foo.rwdts_tasklet_state_change_cb, # The callback for DTS state change
#sub.rwdts_tasklet_state_change_cb,
self) # user data in the callback - in this case self
def do_instance_stop(self, component_handle, instance_handle):
logger.debug("RwdtstaskletPython: do_instance_stop function called")
if __name__ == "__main__":
#add your test code to execute this as a standalone program
tasklet = RwdtstaskletPython()
component_handle = tasklet.component_init()
logger.debug("main: componente=%s" % (component_handle))
logger.debug("main: component-type=%s" % type(component_handle))
tasklet_info = RwTaskletPlugin._RWTaskletInfo()
instance_url = RwTaskletPlugin._RWExecURL()
logger.debug("main: tasklet=%s" % (tasklet_info))
logger.debug("main: tasklet-type=%s" % type(tasklet_info))
logger.debug("main: url=%s" % (instance_url))
logger.debug("main: url-type=%s" % type(instance_url))
instance_handle = tasklet.instance_alloc(component_handle, tasklet_info, instance_url)
logger.debug("main: instance=%s" % (instance_handle))
logger.debug("main: instance-type=%s" % type(instance_handle))
tasklet.instance_start(component_handle, instance_handle)
|
992,594 | 36f1604e4f2dc067e40abd34312752592dbe9951 | from pathlib import Path
from typing import Dict, List, Optional, Tuple, Union
from pydantic import BaseModel, FilePath, DirectoryPath, AnyUrl
from sito_io.dctypes.resource import UriKind, Resource
UriT = Union[FilePath, AnyUrl]
OptionsT = Optional[Union[Dict[str, str], List[Tuple[str, str]], List[str]]]
# todo: pending optional directories for output dirs that will be created,
# see: https://github.com/samuelcolvin/pydantic/issues/1254
# https://github.com/samuelcolvin/pydantic/issues/1983
class Manifest(BaseModel):
"""A loose collection of elements"""
base: Union[DirectoryPath, AnyUrl, Path] # base of the manifest tree
kind: UriKind = UriKind.Naive # how is the manifest rooted?
elements: List[Resource] # manifest contents
class ManifestMap(BaseModel):
"""A Manifest which can be addressed by the relative paths"""
base: Union[DirectoryPath, AnyUrl, Path] # base of the manifest tree
kind: UriKind = UriKind.Naive # how is the manifest rooted?
elements: Dict[Path, Resource] # manifest contents
class SitoFileToFile(BaseModel):
"""File-in, file-out. Takes a single file (or URI), emits a single file (or URI) """
input_uri: UriT
output_uri: Optional[Union[FilePath, AnyUrl, str]]
options: OptionsT
class SitoDirToDir(BaseModel):
"""Directory-in, directory-out. """
input_dir: DirectoryPath
output_dir: Optional[Union[DirectoryPath, str]]
options: OptionsT
class SitoFileToDir(BaseModel):
"""Single file in, directory of files out. """
input_uri: UriT
output_dir: Optional[Union[DirectoryPath, str]]
options: OptionsT
class SitoCoreUtil(BaseModel):
"""Emulates the interface of common coreutils tools, mv, tar, etc. E.g.:
tool [options] [output] <input> [inputs...]"""
inputs: List[UriT]
output: Optional[UriT]
options: OptionsT
|
992,595 | 8cf13d115f19779e50e7cf4b89d4b8f10cc72eb0 | from datetime import datetime
from bs4 import BeautifulSoup
import requests
def telasi_bil (ID,now):
frst_post = requests.get('http://my.telasi.ge/customers/info/%s' % ID)
soup = BeautifulSoup(frst_post.text,'html.parser')
result = []
for code in soup.findAll('code'):
result.append(code.text)
result = [float(i) for i in result[1:4]]
pay_day_water_filter = []
for ultag in soup.find_all('div', {'class': 'pull-right'}):
pay_day_water_filter.append(ultag.text)
bill_for_water = sum(result)
pay_day = pay_day_water_filter[-1]
pay_day = datetime.strptime(pay_day.strip(), "%d/%m/%Y")
now = datetime.strptime(now, "%d.%m.%Y")
time = pay_day - now
ret_list = ['Telasi',str(time)[:6], bill_for_water]
return ret_list
|
992,596 | 609ad52a2ca76c71e306f23bc997c451a45733b6 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'd:\Users\Administrator\Desktop\My_LED\main.ui'
#
# Created by: PyQt5 UI code generator 5.15.0
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1024, 600)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(20, 40, 587, 175))
font = QtGui.QFont()
font.setFamily("Bahnschrift Condensed")
font.setPointSize(145)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label.setObjectName("label")
self.progressBar = QtWidgets.QProgressBar(self.centralwidget)
self.progressBar.setGeometry(QtCore.QRect(21, 240, 588, 21))
self.progressBar.setMinimum(0)
self.progressBar.setMaximum(84600)
self.progressBar.setProperty("value", 0)
self.progressBar.setAlignment(QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.progressBar.setOrientation(QtCore.Qt.Horizontal)
self.progressBar.setInvertedAppearance(False)
self.progressBar.setTextDirection(QtWidgets.QProgressBar.TopToBottom)
self.progressBar.setObjectName("progressBar")
self.calendarWidget = QtWidgets.QCalendarWidget(self.centralwidget)
self.calendarWidget.setGeometry(QtCore.QRect(627, 11, 387, 246))
self.calendarWidget.setObjectName("calendarWidget")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(0, 280, 627, 300))
self.label_2.setStyleSheet("border-image: url(:/tianqi/static/未标题-1.png);")
self.label_2.setText("")
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(400, 310, 141, 101))
font = QtGui.QFont()
font.setFamily("Bahnschrift")
font.setPointSize(85)
self.label_3.setFont(font)
self.label_3.setStyleSheet("color: rgb(255, 255, 255);\n"
"background-color: rgba(255, 255, 255, 0);")
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(30, 310, 110, 110))
self.label_4.setStyleSheet("image: url(:/tianqi/static/103.png);")
self.label_4.setText("")
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(250, 510, 107, 30))
font = QtGui.QFont()
font.setFamily("Bahnschrift")
font.setPointSize(24)
self.label_5.setFont(font)
self.label_5.setAlignment(QtCore.Qt.AlignCenter)
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(430, 510, 107, 30))
font = QtGui.QFont()
font.setFamily("Bahnschrift")
font.setPointSize(24)
self.label_6.setFont(font)
self.label_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_6.setObjectName("label_6")
self.label_7 = QtWidgets.QLabel(self.centralwidget)
self.label_7.setGeometry(QtCore.QRect(70, 510, 107, 30))
font = QtGui.QFont()
font.setFamily("Bahnschrift")
font.setPointSize(24)
self.label_7.setFont(font)
self.label_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_7.setObjectName("label_7")
self.label_8 = QtWidgets.QLabel(self.centralwidget)
self.label_8.setGeometry(QtCore.QRect(120, 310, 281, 121))
font = QtGui.QFont()
font.setFamily("华光行书_CNKI")
font.setPointSize(26)
font.setBold(False)
font.setWeight(50)
self.label_8.setFont(font)
self.label_8.setStyleSheet("color: rgb(255, 255, 255);\n"
"background-color: rgba(255, 255, 255, 0);")
self.label_8.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_8.setObjectName("label_8")
self.label_9 = QtWidgets.QLabel(self.centralwidget)
self.label_9.setGeometry(QtCore.QRect(619, 270, 391, 300))
self.label_9.setStyleSheet("")
self.label_9.setText("")
self.label_9.setObjectName("label_9")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label.setText(_translate("MainWindow", "5:22"))
self.progressBar.setFormat(_translate("MainWindow", "%v %p%"))
self.label_3.setText(_translate("MainWindow", "20"))
self.label_5.setText(_translate("MainWindow", "06:55"))
self.label_6.setText(_translate("MainWindow", "06:55"))
self.label_7.setText(_translate("MainWindow", "0%"))
self.label_8.setText(_translate("MainWindow", "大暴雨到特大暴雨"))
import tianqi_rc
|
992,597 | 2e6ed45b4864de2dd3422ddd9c98c806d9a91876 | # 13 octobre 2017
# astro_v3.py
from pylab import *
import os
def B3V_eq(x):
"""
:param x: abcsisse du point de la ligne B3V dont on veut obtenir l'ordonnee
:return: ordonnee du point de la ligne B3V correspondant a l'abscisse x (dans un graphique u-g vs g-r)
"""
return 0.9909 * x - 0.8901
def lignes(filename, n_c1, n_c2):
"""
:param filename: nom du fichier qui contient les donnees des etoiles dont on veut connaitre
les valeurs dans les colonnes c1 et c2
:param n_c1: numero de la colonne correspondant a la colonne c1 dans le fichier d'entree
:param n_c2: numero de la colonne correspondant a la colonne c2 dans le fichier d'entree
:return: que dalle, c'est un generateur
"""
data = open(filename, 'r')
line = data.readline()
while line[0:2] != "--":
line = data.readline()
line = data.readline()
while line != "":
c1 = ""
c2 = ""
n_colonne = 1
for char in line:
if char == "|":
n_colonne += 1
if n_colonne == n_c1:
if char != " " and char != "|":
c1 += char
elif n_colonne == n_c2:
if char != " " and char != "|":
c2 += char
if n_colonne > max([n_c1, n_c2]):
break
if c1 == "":
c1 = None
if c2 == "":
c2 = None
yield c1, c2
line = data.readline()
data.close()
def recupere_magnitudes(filename, n_g_r, n_u_g):
"""
:param filename: nom du fichier qui contient les donnees des etoiles dont on veut connaitre
les caracteristique u-g et g-r
:param n_g_r: numero de la colonne correspondant a g-r dans le fichier d'entree
:param n_u_g: numero de la colonne correspondant a u-g dans le fichier d'entree
:return: liste avec les donnees de la colonne g-r dans le fichier filename, et une autre avec celles de u-g
"""
colonne_u_g = []
colonne_g_r = []
for g_r, u_g in lignes(filename, n_g_r, n_u_g):
if u_g is not None: colonne_u_g.append(float(u_g))
else: colonne_u_g.append(u_g)
if g_r is not None: colonne_g_r.append(float(g_r))
else: colonne_g_r.append(g_r)
return colonne_g_r, colonne_u_g
def find_hot_stars(input_file, output_file, output_folder=None, n_g_r=6, n_u_g=5):
"""
:param input_file: nom du fichier qui contient les donnees d'entree correspondant a des etoiles
:param output_file: nom du fichier qui contiendra les donnees correspondant uniquement aux etoiles chaudes
:param n_u_g: numero de la colonne correspondant a u-g dans le fichier d'entree
:param n_g_r: numero de la colonne correspondant a g-r dans le fichier d'entree
:param output_folder: nom du dossier dans lequel on va travailler (la ou y a le fichier d entree et la ou on veut mettre le fichier de sortie)
:return: None : cree juste le nouveau fichier dans le meme repertoire que celui dans lequel se trouve le programme
"""
if output_folder is not None:
output_folder_for_terminal = ""
for char in output_folder:
if char == " ":
output_folder_for_terminal += "\ "
elif char == "(":
output_folder_for_terminal += "\("
elif char == ")":
output_folder_for_terminal += "\)"
else:
output_folder_for_terminal += char
if not os.path.exists(output_folder):
os.system("mkdir " + output_folder_for_terminal)
input_file = output_folder + "/" + input_file
output_file = output_folder + "/" + output_file
data = open(input_file, 'r')
nfile = open(output_file, "w")
nfile.write("HOT STARS\n")
line = data.readline()
while line[0:2] != "--":
nfile.write(line)
line = data.readline()
nfile.write(line)
line = data.readline()
i = 0
while line != "":
i += 1
if i % 10000 == 0:
print("avancement : ", i)
u_g = ""
g_r = ""
n_colonne = 1
for char in line:
if char == "|":
n_colonne += 1
if n_colonne == n_u_g:
if char != " " and char != "|":
u_g += char
elif n_colonne == n_g_r:
if char != " " and char != "|":
g_r += char
if n_colonne > max([n_u_g, n_g_r]):
break
if u_g != "" and g_r != "" and float(u_g) <= B3V_eq(float(g_r)):
nfile.write(line)
line = data.readline()
data.close()
nfile.close()
def fichier_reg(input_file, output_file, output_folder=None, n_alpha=3, n_delta=4):
"""
:param input_file: fichier avec les etoiles chaudes
:param output_file: fichier en .reg
:param n_alpha: colonne avec les coordonees alpha de l'etoile
:param n_delta: colonne avec les coordonnees delta de l'etoile
:param output_folder: nom du dossier dans lequel on va travailler (la ou y a le fichier d entree et la ou on veut mettre le fichier de sortie)
:return: None
"""
if output_folder is not None:
output_folder_for_terminal = ""
for char in output_folder:
if char == " ":
output_folder_for_terminal += "\ "
elif char == "(":
output_folder_for_terminal += "\("
elif char == ")":
output_folder_for_terminal += "\)"
else:
output_folder_for_terminal += char
if not os.path.exists(output_folder):
os.system("mkdir " + output_folder_for_terminal)
input_file = output_folder + "/" + input_file
output_file = output_folder + "/" + output_file
nfile = open(output_file, "w")
nfile.write('# Region file format: DS9 version 4.1\n')
nfile.write(
'global color=green dashlist=8 3 width=1 font=\"helvetica 10 normal roman\" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1\n')
nfile.write('fk5')
for alpha, delta in lignes(input_file, n_alpha, n_delta):
nfile.write("\n")
nfile.write('circle(' + alpha + ',' + delta + ',5\")')
nfile.close()
def trace_graphique(titre, data_filename, SP_filename="SP.txt", n_g_r_data=6, n_u_g_data=5, n_g_r_SP=4, n_u_g_SP=3,
hot_stars_filename=None):
"""
:param titre: titre que l'on veut donner au graphique
:param data_filename: nom du fichier qui contient les donnees d'entree correspondant a des etoiles
:param SP_filename: nom du fichier qui contient des coordonnees de points de la sequence principale
:param n_g_r_data: numero de la colonne correspondant a g-r dans le fichier data_filename
:param n_u_g_data: numero de la colonne correspondant a u-g dans le fichier data_filename
:param n_g_r_SP: numero de la colonne correspondant a g-r dans le fichier SP_filename
:param n_u_g_SP: numero de la colonne correspondant a u-g dans le fichier SP_filename
:param hot_stars_filename: facultatif, nom du fichier contenant uniquement les donnees des etoiles chaudes
dans data_filename pour afficher d'une autre couleur les points correspondant aux etoiles chaudes
:return: None, trace le graphique u-g vs g-r avec la sequance principale et la ligne B3V
"""
# recupere donnees
g_r_data, u_g_data = recupere_magnitudes(data_filename, n_g_r_data, n_u_g_data)
g_r_SP, u_g_SP = recupere_magnitudes(SP_filename, n_g_r_SP, n_u_g_SP)
# parametre le graphique
plt.xlabel('g-r')
plt.ylabel('u-g')
plt.gca().invert_yaxis()
# trace u-g vs g-r avec nos donnees
plt.plot(g_r_data, u_g_data, '.', c='red', label='Étoiles')
if hot_stars_filename != None:
g_r_hot_stars, u_g_hot_stars = recupere_magnitudes(hot_stars_filename, n_g_r_data, n_u_g_data)
plt.plot(g_r_hot_stars, u_g_hot_stars, '.', c='blue', label='Étoiles chaudes')
# trace ligne B3V
m = min([x for x in g_r_data if x != None])
M = max([y for y in g_r_data if y != None])
x = np.linspace(m, M, 100)
plt.plot(x, B3V_eq(x), c='orange', label='Ligne B3V')
# trace sequence principale
plt.plot(g_r_SP, u_g_SP, c='black', label='Séquence principale')
# met le titre et affiche le tout
title(titre)
plt.legend()
plt.show()
def get_sky_picture(region_name, output_file, x_size, y_size, output_folder=None, coordinate_system="J2000",
survey="DSS2-red", ra="", dec=""):
output_file_for_terminal = ""
for char in output_file:
if char == " ":
output_file_for_terminal += "\ "
elif char == "(":
output_file_for_terminal += "\("
elif char == ")":
output_file_for_terminal += "\)"
else:
output_file_for_terminal += char
if output_folder is not None:
output_folder_for_terminal = ""
for char in output_folder:
if char == " ":
output_folder_for_terminal += "\ "
elif char == "(":
output_folder_for_terminal += "\("
elif char == ")":
output_folder_for_terminal += "\)"
else:
output_folder_for_terminal += char
if not os.path.exists(output_folder):
os.system("mkdir " + output_folder_for_terminal)
output_file_for_terminal = output_folder_for_terminal + "/" + output_file_for_terminal
region_name_for_link = ""
region_name_for_terminal = ""
for char in region_name:
if char == " ":
region_name_for_link += "+"
region_name_for_terminal += "\ "
else:
region_name_for_link += char
region_name_for_terminal += char
os.system(
"wget 'archive.eso.org/dss/dss/image?ra=" + ra + "&dec=" + dec + "&equinox=" + coordinate_system + "&name="
+ region_name_for_link + "&x=" + str(x_size) + "&y=" + str(y_size) + "&Sky-Survey=" + survey
+ "&mime-type=download-fits&statsmode=WEBFORM' -O " + output_file_for_terminal)
def recup_catalogue(region_name, output_file, cone_size, output_folder=None, size_unit='arcmin'):
output_file_for_terminal = ""
for char in output_file:
if char == " ":
output_file_for_terminal += "\ "
elif char == "(":
output_file_for_terminal += "\("
elif char == ")":
output_file_for_terminal += "\)"
else:
output_file_for_terminal += char
if output_folder is not None:
output_folder_for_terminal = ""
for char in output_folder:
if char == " ":
output_folder_for_terminal += "\ "
elif char == "(":
output_folder_for_terminal += "\("
elif char == ")":
output_folder_for_terminal += "\)"
else:
output_folder_for_terminal += char
if not os.path.exists(output_folder):
os.system("mkdir " + output_folder_for_terminal)
output_file_for_terminal = output_folder_for_terminal + "/" + output_file_for_terminal
region_name_for_link = ""
region_name_for_terminal = ""
for char in region_name:
if char == " ":
region_name_for_link += "+"
region_name_for_terminal += "\ "
else:
region_name_for_link += char
region_name_for_terminal += char
os.system(
"wget '" + 'http://vizier.u-strasbg.fr/viz-bin/asu-tsv/VizieR?-source=II/341/&-oc.form=dec&-out.max=unlimited&-c='
+ region_name_for_link + '&-c.eq=J2000&-c.r=' + str(cone_size) + '&-c.u=' + size_unit
+ '&-c.geom=r&-out=RAJ2000&-out=DEJ2000&-out=u-g&-out=g-r2&-out=umag&-out=e_umag&-out=gmag&-out=e_gmag&-out=r2mag&-out=e_r2mag&-out=Hamag&-out=e_Hamag&-out=rmag&-out=e_rmag&-out=imag&-out=e_imag&-out.add=_Glon,_Glat&-oc.form=dec&-out.form=|+-Separated-Values'
+ "' -O " + output_file_for_terminal)
def save_plot(output_file, input_file, titre, SP_filename="SP.txt", output_folder=None, n_g_r_data=6, n_u_g_data=5, n_g_r_SP=4, n_u_g_SP=3,
input_file_hot_stars=None):
"""
:param titre: titre que l'on veut donner au graphique
:param input_file: nom du fichier qui contient les donnees d'entree correspondant a des etoiles
:param SP_filename: nom du fichier qui contient des coordonnees de points de la sequence principale
:param output_folder: nom du dossier dans lequel on travaille (la ou y a les catalogues d entree (sauf SP) et la ou on met le fichier de sortie)
:param n_g_r_data: numero de la colonne correspondant a g-r dans le fichier data_filename
:param n_u_g_data: numero de la colonne correspondant a u-g dans le fichier data_filename
:param n_g_r_SP: numero de la colonne correspondant a g-r dans le fichier SP_filename
:param n_u_g_SP: numero de la colonne correspondant a u-g dans le fichier SP_filename
:param input_file_hot_stars: facultatif, nom du fichier contenant uniquement les donnees des etoiles chaudes
dans data_filename pour afficher d'une autre couleur les points correspondant aux etoiles chaudes
:return: None, trace le graphique u-g vs g-r avec la sequence principale et la ligne B3V
"""
if output_folder is not None:
output_folder_for_terminal = ""
for char in output_folder:
if char == " ":
output_folder_for_terminal += "\ "
elif char == "(":
output_folder_for_terminal += "\("
elif char == ")":
output_folder_for_terminal += "\)"
else:
output_folder_for_terminal += char
if not os.path.exists(output_folder):
os.system("mkdir " + output_folder_for_terminal)
input_file = output_folder + "/" + input_file
if input_file_hot_stars is not None:
input_file_hot_stars = output_folder + "/" + input_file_hot_stars
output_file = output_folder + "/" + output_file
# recupere donnees
g_r_data, u_g_data = recupere_magnitudes(input_file, n_g_r_data, n_u_g_data)
g_r_SP, u_g_SP = recupere_magnitudes(SP_filename, n_g_r_SP, n_u_g_SP)
# parametre le graphique
plt.xlabel('g-r')
plt.ylabel('u-g')
plt.gca().invert_yaxis()
# trace u-g vs g-r avec nos donnees
plt.plot(g_r_data, u_g_data, '.', c='red', label='Etoiles')
if input_file_hot_stars != None:
g_r_hot_stars, u_g_hot_stars = recupere_magnitudes(input_file_hot_stars, n_g_r_data, n_u_g_data)
plt.plot(g_r_hot_stars, u_g_hot_stars, '.', c='blue', label='Etoiles chaudes')
# trace ligne B3V
m = min([x for x in g_r_data if x != None])
M = max([y for y in g_r_data if y != None])
x = np.linspace(m, M, 100)
plt.plot(x, B3V_eq(x), c='orange', label='Ligne B3V')
# trace sequence principale
plt.plot(g_r_SP, u_g_SP, c='black', label='Séquence principale')
# met le titre et enregistre le tout
title(titre)
plt.legend()
plt.savefig(output_file)
def analyser_region(region_name, cone_size):
region_name_for_filenames = ""
for char in region_name:
if char == " ":
region_name_for_filenames += "_"
else:
region_name_for_filenames += char
output_folder = region_name_for_filenames + " (" + str(cone_size) + " arcmin)"
output_folder_for_terminal = ""
for char in output_folder:
if char == " ":
output_folder_for_terminal += "\ "
elif char == "(":
output_folder_for_terminal += "\("
elif char == ")":
output_folder_for_terminal += "\)"
else:
output_folder_for_terminal += char
output_file_data = region_name_for_filenames + ".data.txt"
output_file_hot_stars_data = region_name_for_filenames + ".hot_stars_data.txt"
output_file_reg = region_name_for_filenames + ".reg"
output_file_fits = region_name_for_filenames + ".fits"
output_file_plot = region_name_for_filenames + ".plot.png"
output_file_sky_picture = region_name_for_filenames + ".sky_picture.png"
recup_catalogue(region_name, output_file_data, cone_size, output_folder)
get_sky_picture(region_name, output_file_fits, 2 * cone_size, 2 * cone_size, output_folder)
find_hot_stars(output_file_data, output_file_hot_stars_data, output_folder)
fichier_reg(output_file_hot_stars_data, output_file_reg, output_folder)
save_plot(output_file_plot, output_file_data, region_name + " (cone search : " + str(cone_size) + " arcmin)", output_folder=output_folder, input_file_hot_stars=output_file_hot_stars_data)
oldpwd = os.getcwd()
os.chdir(output_folder)
os.system("ds9 " + output_file_fits + " -regions " + output_file_reg + " -saveimage " + output_file_sky_picture + " -exit")
os.chdir(oldpwd)
analyser_region("RCW 49", 10)
|
992,598 | 5f3d522c5c2f48cffeebaa46a399aebfefef4839 | import xml.dom.minidom
import sys
import re
import os
#<type 'str'>
from struct import *
from types import *
global NameSection, StringSection, ByteSection, Section1, UintSection, DevSection
global DeviceID
UintSection = []
ByteSection = []
StringSection =[]
NameSection = []
DevSection = []
DeviceID = []
Section1 = []
#[vkaushik] change for struct props
StructPtrs = []
StructIncFiles = []
PropNames = []
def AlignSectionAddr():
NameSectionLen = len(NameSection)
StringSectionLen = len(StringSection)
ByteSectionLen = len(ByteSection)
if NameSectionLen%4 != 0:
for tmpBytes in range(0,4-(NameSectionLen%4)):
NameSection.append(0)
if StringSectionLen%4 != 0:
for tmpBytes in range(0,4-(StringSectionLen%4)):
StringSection.append(0)
if ByteSectionLen%4 != 0:
for tmpBytes in range(0,4-(ByteSectionLen%4)):
ByteSection.append(0)
return
def PrintConfig(ModName):
LengthSection1 = 24 + 4*len(DeviceID)
global UintSection, DevSection
#UintSection = PackData(UintSection, 3)
DevSection = PackData(DevSection, 3)
Section1 = [ LengthSection1 + len(NameSection) + len(StringSection) + len(ByteSection) + len(UintSection) + len(DevSection),
LengthSection1,
LengthSection1 + len(NameSection),
LengthSection1 + len(NameSection) + len(StringSection),
LengthSection1 + len(NameSection) + len(StringSection) + len(ByteSection),
len(DeviceID) / 2]
for i in range(len(DeviceID)):
if i%2 == 1:
DeviceID[i] += Section1[4] + len(UintSection)
Section1.extend(DeviceID)
Section1 = PackData(Section1, 3)
PropBin = Section1 + NameSection + StringSection + ByteSection + UintSection + DevSection
CONFIG = open(DirName + "/" + sys.argv[3], 'a')
CONFIG.write ("#include \"DALStdDef.h\" \n")
CONFIG.write ("#include \"DALSysTypes.h\" \n")
#need to align prop array on 32 bit boundaries [vkaushik]
CONFIG.write ("#include \"dalconfig.h\" \n\n")
#define image name
if ModName == "modem":
CONFIG.write("#define DAL_CONFIG_IMAGE_MODEM \n")
elif ModName == "dsp":
CONFIG.write("#define DAL_CONFIG_IMAGE_DSP \n")
elif ModName == "boot":
CONFIG.write("#define DAL_CONFIG_IMAGE_BOOT \n")
elif ModName == "boot_wm6":
CONFIG.write("#define DAL_CONFIG_IMAGE_BOOT_WM_6 \n")
elif ModName == "boot_wm7":
CONFIG.write("#define DAL_CONFIG_IMAGE_BOOT_WM_7 \n")
elif ModName == "tz":
CONFIG.write("#define DAL_CONFIG_IMAGE_TZ \n")
elif ModName == "wcn":
CONFIG.write("#define DAL_CONFIG_IMAGE_WCN \n")
elif ModName == "sps":
CONFIG.write("#define DAL_CONFIG_IMAGE_SPS \n")
elif ModName == "rpm":
CONFIG.write("#define DAL_CONFIG_IMAGE_RPM \n")
else:
CONFIG.write("#define DAL_CONFIG_IMAGE_APPS \n")
#include struct files
for inc_files in StructIncFiles:
CONFIG.write ("#include \"%s\"\n" %inc_files)
#struct ptrs
CONFIG.write ("\nconst void * DALPROP_StructPtrs[] = {\n")
for struct_ptrs in StructPtrs:
CONFIG.write ("\t\t\t%s, \n" %struct_ptrs)
CONFIG.write ("\t\t\tNULL};\n")
#binary props
CONFIG.write ("\nconst uint32 DALPROP_PropBin[] = {\n")
for i in range(0, len(PropBin), 4):
if i%20 == 0:
CONFIG.write ("\n\t\t\t")
if type(PropBin[i]) is StringType:
CONFIG.write ("%s" %PropBin[i])
else:
if i+3<len(PropBin):
CONFIG.write ("0x%.2x" %PropBin[i+3] + "%.2x" %PropBin[i+2] + "%.2x" %PropBin[i+1] + "%.2x" %PropBin[i])
elif i+2<len(PropBin):
CONFIG.write ("0x%.4x" %PropBin[i+2] + "%.2x" %PropBin[i+1] + "%.2x" %PropBin[i])
elif i+1<len(PropBin):
CONFIG.write ("0x%.6x" %PropBin[i+1] + "%.2x" %PropBin[i])
else:
CONFIG.write ("0x%.8x" %PropBin[i])
if i != len(PropBin) - 4:
CONFIG.write (", ")
else:
CONFIG.write (" };\n")
CONFIG.write("\n\nDALProps DALPROP_PropsInfo = {(const byte*)DALPROP_PropBin, DALPROP_StructPtrs};\n")
def PackData(value, type1):
if type1 == 1: #String
value = list(value)
for index in range (len(value)):
value[index] = ord(value[index])
value.append(0)
elif type1 == 3: #UINT32
value_temp = []
value=list(value)
for index in range (len(value)):
if type(value[index]) is StringType:
value_temp.append(value[index])
# Since this is a enum which is 4-bytes long and the PropBin list
# is handled as 4-bytes at a time, appending 3 0's at the end of
# the string.
for j in range(3):
value_temp.append(0)
elif type(value[index]) is ListType:
for idx in range(len(value[index])):
value_temp.append(value[index][idx])
if len(value[index]) % 4 != 0:
for idx2 in range(4 - (len(value[index]) % 4)):
value_temp.append(0)
else:
for j in (unpack('BBBB',pack('I', value[index]))):
value_temp.append(j)
value = value_temp
return value
def hex_to_integer(h):
"""Convert a hex string to an integer.
The hex string can be any length. It can start with an 0x, or not.
Unrecognized characters will raise a ValueError.
"""
num = 0 # Resulting integer
h = h.lower() # Hex string
if h[:2] == "0x":
h = h[2:]
else:
raise ValueError("Type must start with Ox")
for c in h: # Hex character
num = num * 16
if "0" <= c <= "9":
num = num + (ord(c) - ord("0"))
elif "a" <= c <= "f":
num = num + (ord(c) - ord("a"))
num = num + 10
else:
raise ValueError(c)
return num
def GenerateGlobalDef(pDriver):
GlobalDef = {}
for global_defs in pDriver.getElementsByTagName("global_def"):
global_defs.normalize()
for node in global_defs.childNodes:
if node.nodeType == node.TEXT_NODE:
continue
name = node.getAttribute("name")
type = node.getAttribute("type")
type = hex_to_integer(type)
value = node.firstChild.data
if type == 1: #DALPROP_DATA_TYPE_STRING
value = value.strip()
elif (type == 2 or type ==3):
re.sub("\s*", '', value)
value = value.split(',')
for index in range (len(value)-1):
#removing whitespace as this cause eval to fail [vkaushik]
value[index] = value[index].lstrip()
value[index] = eval(value[index])
#value[index] = int(value[index], 16)
del value[-1]
value.insert(0, len(value)-1)
elif (type == 4):
#insert the include file if not already in our list
value = value.strip()
try:
StructIncFiles.index(value)
except ValueError:
StructIncFiles.append(value)
value = PackData(value, type)
GlobalDef[name] = value
return GlobalDef
def GenerateArray(node, GlobalDef, GlobalOffset):
DeviceID.append(int(node.getAttribute("id"), 16))
DeviceID.append(len(DevSection)*4)
for Props in node.getElementsByTagName("props"):
name = Props.getAttribute("name")
name = name.encode('ascii', 'ignore')
type = int(Props.getAttribute("type"), 16)
# make sure the prop name is unique in the NameSection
if name not in PropNames:
PropNames.append(name)
nameoffset = len(NameSection)
name = PackData(name, 1)
NameSection.extend(name)
else:
nameoffset = 0;
loc = PropNames.index(name)
for idx in range(loc):
nameoffset += len(PropNames[idx]) + 1
char_data = re.sub("(\s*|\n)", '', Props.firstChild.data)
if type == 0x02:
try:
value = eval(char_data)
except NameError:
value= str(char_data)
elif type == 0x08: # DALPROP_ATTR_TYPE_BYTE_SEQ
value = char_data.split(',')
for index in range(len(value) - 1):
value[index] = value[index].lstrip()
value[index] = eval(value[index])
value.insert(0, len(value) - 1)
# fix the length and pop the 'end'
value[0] -= 1
if "end" == value[len(value) - 1]:
value.pop()
elif type == 0x12:
try:
StructPtrs.index(char_data)
except ValueError:
StructPtrs.append(char_data)
value = StructPtrs.index(char_data)
else:
if char_data in GlobalOffset:
value = GlobalOffset[char_data]
else:
if type == 0x18:
value = len(ByteSection)
ByteSection.extend(GlobalDef[char_data])
elif type == 0x11:
value = len(StringSection)
StringSection.extend(GlobalDef[char_data])
elif type == 0x14:
value = len(UintSection)
UintSection.extend(GlobalDef[char_data])
GlobalOffset[char_data] = value
DevSection.extend([type, nameoffset, value])
#print DeviceID
DevSection.append(0xFF00FF00)
def PrintModDir(ModName, DriverList):
filename = "DALModDir_" + ModName +".c"
MODDIR = open(DirName + "/" + filename, 'w')
MODDIR.write ("#include \"DALStdDef.h\" \n")
MODDIR.write ("#include \"DALReg.h\" \n\n")
for drivers in DriverList:
MODDIR.write ("extern DALREG_DriverInfo DAL%s_DriverInfo;\n" %drivers)
MODDIR.write ("\nstatic DALREG_DriverInfo * DALDriverInfoArr[%d] = {\n" % len(DriverList))
count = 0;
for drivers in DriverList:
MODDIR.write ("\t& DAL%s_DriverInfo" %drivers)
if count < len(DriverList) - 1:
count += 1
MODDIR.write (",")
MODDIR.write ("\n")
MODDIR.write ("};\n\n")
MODDIR.write ("DALREG_DriverInfoList gDALModDriverInfoList = {%d, DALDriverInfoArr};" %len(DriverList))
MODDIR.write ("\n")
def CheckDupDriverID(ModName, DriverList):
# check if 'driver' is duplicated in the DriverList
idx = 1
for driver in DriverList:
if driver in DriverList[idx:]:
# found a duplicate driver and no need to proceed any further
print "ERROR *** DAL Driver '%s'" % driver, "has been included more than once for SW image '%s'" % ModName
print "ERROR *** Please check XML files for '%s'" % ModName
raise ValueError(driver)
idx += 1
def CheckDupDeviceID(ModName):
# build the device ID list
devIDList = []
for i in range(0, len(DeviceID), 2):
devIDList.append(DeviceID[i])
# check if there are any duplicated device IDs
idx = 1
for dev in devIDList:
if dev in devIDList[idx:]:
# find a duplicate device ID and no need to proceed any further
print "ERROR *** DAL Device ID 0x%.8x" % dev, "has been included more than once for SW image '%s'" % ModName
print "ERROR *** Please check XML files for '%s'" % ModName
raise ValueError(hex(dev))
idx += 1
def Print_Debug ():
print "\nSection1: ", Section1, "Len: ", len(Section1)
print "\nName Section:", NameSection, "Len: ",len(NameSection)
print "\nByte Section: ", ByteSection, "Len: ",len(ByteSection)
print "\nString Section:" , StringSection, "Len: ",len(StringSection)
print "\nUINT Section:", UintSection, "Len: ",len(UintSection)
print "\nDevice Section: ", DevSection
DirName = sys.argv[2]
if not os.path.isdir(DirName):
os.mkdir( DirName )
ModList = xml.dom.minidom.parse(sys.argv[1])
for Mod in ModList.getElementsByTagName("module"):
DriverList = []
ModName = Mod.getAttribute("name")
for Driver in Mod.getElementsByTagName("driver"):
GloablDef = {}
DriverList.append(Driver.getAttribute("name"))
GlobalDef = GenerateGlobalDef(Driver)
GlobalOffset = {}
for Device in Driver.getElementsByTagName("device"):
GenerateArray(Device, GlobalDef, GlobalOffset)
AlignSectionAddr()
# check for duplicated driver IDs and device IDs
CheckDupDriverID(ModName, DriverList)
CheckDupDeviceID(ModName)
PrintModDir(ModName, DriverList)
PrintConfig(ModName)
#Print_Debug()
|
992,599 | 1b1ed24fc11f117ccd75724017d9efc4fc6f6b2f | import os
import streamlit as st
import streamlit.components.v1 as components
from streamlit_hgb import hgb, reference_hash, load_samples, hgb_run
import pandas as pd
import gffutils
import glob
from streamlit_drawable_canvas import st_canvas
import time
DB = "hg38.genes.db"
_RELEASE = True
# app: `$ streamlit run main.py`
# Retrieve gene annottions from gff file
#@st.cache(allow_output_mutation=True)
def load_db(db_file):
return gffutils.FeatureDB(db_file, keep_order=True)
if _RELEASE:
st.header("Hybrid Genome Browser")
# Create a second instance of our component whose `name` arg will vary
# based on a text_input widget.
#
# We use the special "key" argument to assign a fixed identity to this
# component instance. By default, when a component's arguments change,
# it is considered a new instance and will be re-mounted on the frontend
# and lose its current state. In this case, we want to vary the component's
# "name" argument without having it get recreated.
# name_input = st.text_input("Enter a file name", value="../../bt142/ont2_ngmlr.bam")
try:
yaml = load_samples("config.yaml")
ref = st.sidebar.selectbox("Which references to use?", list(yaml.keys()), 1)
name_input = st.sidebar.multiselect("Which files to load?",
yaml[ref]["alignments"],
list(yaml[ref]["default"])
)
refs = reference_hash(yaml[ref]["alignments"][0])
default_range = yaml[ref]["range"][0]
db_file = yaml[ref]["db"][0]
except:
files = glob.glob("*.bam")
if len(files) > 0:
name_input = st.sidebar.multiselect("Which files to load?",
files,
[files[0]])
refs = reference_hash(name_input[0])
else:
name_input = st.sidebar.text_input("Which file to explore?")
refs = reference_hash(name_input)
name_input = [name_input]
db_file = DB
if len(refs) > 0:
#default_range = "{}:10001-20001".format(next(iter(refs)))
#default_range = "{}:8794744-8850896".format(next(iter(refs)))
default_range = "{}:8874744-8950896".format(next(iter(refs)))
else:
default_range = ""
region = st.sidebar.text_input("Where to explore?", default_range)
bed = st.sidebar.text_input("Which bed file to use for ?", "")
split=False
coverage=20
y=16
callet=True
no_ins=False
db = load_db(db_file)
if len(refs) > 0:
try:
# Fetch from gene
gene = db[region] #load_db(db_file, region)
#print(gene, gene.seqid)
chr_def = gene.seqid
car, cdr = gene.start, gene.end
default_range = "{}:{}-{}".format(chr_def, car, cdr)
except:
try:
chr_def, region_chr = region.split(":")
car, cdr = region_chr.split("-")
except:
region = default_range
chr_def, region_chr = region.split(":")
car, cdr = region_chr.split("-")
chr = list(refs.keys())
ref_id = st.sidebar.selectbox(
'Which chromosome to display?',
chr, chr.index(chr_def))
range = st.sidebar.slider(
'Select a range of values',
0, refs[ref_id], (int(car), int(cdr)))
if st.sidebar.checkbox("Detail"):
num = st.sidebar.number_input("Enter a start coordinate", 0, refs[ref_id], range[0])
num2 = st.sidebar.number_input("Enter a stop coordinate", 0, refs[ref_id], range[1])
coverage = st.sidebar.number_input('The expected coverage', 1, 500, coverage)
split = st.sidebar.checkbox('Split-alignment only view')
callet = st.sidebar.checkbox('Show callets only intra-chromosomal split alignment', True)
y = st.sidebar.number_input("Set a read height", 8, 128, y)
if range[1] - range[0] <= 1000*1000*12:
if bed != "":
flags = " -J {} -F {} -B -s".format(bed, bed)
else:
flags = ""
image = hgb_run(name_input, ref_id, range, coverage, flags, split, y, callet)
#stroke_color = st.sidebar.beta_color_picker("Stroke color hex: ")
#drawing_mode = st.sidebar.selectbox(
# "Drawing tool:", ("freedraw", "line", "rect", "circle", "transform"), 4
#)
drawing_mode = "transform"
stroke_color = "red"
canvas_result = st_canvas(
fill_color="rgba(255, 165, 0, 0.3)", # Fixed fill color with some opacity
stroke_width=3, #stroke_width,
stroke_color=stroke_color,
background_color="", #if bg_image else bg_color,
background_image=image,
update_streamlit=False, #realtime_update or update_button,
height=image.height,
width=image.width,
drawing_mode=drawing_mode,
key="canvas",
)
fields = ['seqid', 'start', 'end', 'source', 'featuretype', 'strand', 'attributes']
allFoo = list(db.region(region=(ref_id, range[0], range[1]), completely_within=False))
df = pd.DataFrame([{fn: getattr(f, fn) for fn in fields} for f in allFoo], columns=fields)
st.dataframe(df)
#df = pd.DataFrame([vars(f) for f in list(db.region(region=(ref_id, range[0], range[1]), completely_within=False))])
#df = pd.DataFrame(list(db.region(region=(ref_id, range[0], range[1]), completely_within=False)))
#print(df)
st.markdown(
f"""
<style>
.reportview-container .main .block-container{{
max-width: 1280px;
}}
</style>
""",
unsafe_allow_html=True,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.