id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
1848239 | <reponame>ButterAndButterfly/H-Breaker
#!/usr/bin/env python
# coding:utf-8
import os
def break_tail(path: str, keys):
if not isinstance(keys, bytes):
keys = bytes(keys, encoding = "utf8")
tail_lenth = len(keys)
file_lenth = os.path.getsize(path)
pointer = file_lenth - tail_lenth
assert(pointer >= 0)
with open(path,"rb+") as file:
file.seek(pointer)
tail = file.read(tail_lenth)
tail_broken = _break(tail, keys)
file.seek(pointer)
file.write(tail_broken)
def recover_tail(path: str, keys):
if not isinstance(keys, bytes):
keys = bytes(keys, encoding = "utf8")
tail_lenth = len(keys)
file_lenth = os.path.getsize(path)
pointer = file_lenth - tail_lenth
assert(pointer >= 0)
with open(path,"rb+") as file:
file.seek(pointer)
tail = file.read(tail_lenth)
tail_recover = _recover(tail, keys)
file.seek(pointer)
file.write(tail_recover)
def break_head(path: str, keys):
if not isinstance(keys, bytes):
keys = bytes(keys, encoding = "utf8")
head_lenth = len(keys)
with open(path,"rb+") as file:
head = file.read(head_lenth)
head_broken = _break(head, keys)
file.seek(0)
file.write(head_broken)
def recover_head(path: str, keys):
if not isinstance(keys, bytes):
keys = bytes(keys, encoding = "utf8")
head_lenth = len(keys)
with open(path,"rb+") as file:
head = file.read(head_lenth)
head_recover = _recover(head, keys)
file.seek(0)
file.write(head_recover)
def _break(heads: bytes, keys: bytes):
'''
heads 和 keys 对应字节两两相加
'''
assert(len(heads) == len(keys))
data = [ (heads[index] + keys[index])&0xff for index in range(len(keys))]
return bytes(data)
def _recover(heads: bytes, keys: bytes):
'''
heads 和 keys 对应字节两两相减
'''
assert(len(heads) == len(keys))
data = [ (heads[index] + 256 - keys[index])&0xff for index in range(len(keys))]
return bytes(data)
if __name__ == '__main__':
# main()
file = r'D:\Workspace\NiceLeee-FFmpeg.zip'
#file = r'D:\Workspace\PythonWorkspace\HeadBreaker\test.txt'
keys = '3.14151111111111111111111111111111111111111111111111111111111111111111'
break_head(file, keys)
break_tail(file, keys)
#recover_head(file, keys)
#recover_tail(file, keys)
| StarcoderdataPython |
3401103 | <gh_stars>1-10
#!/usr/bin/env python3
# <NAME> 2021 - Phage Annotation Workshop
"""
Read a GenBank file (gbk) and return the upstream sequences of each feature
"""
import sys, os
import argparse
from Bio import SeqIO
if __name__ == "__main__":
args = argparse.ArgumentParser(description="Read a GenBank file (gbk) and return the upstream sequences of each feature")
args.add_argument("-i", "--input", help="Input file", required=True)
args.add_argument("-o", "--output", help="Output file")
args.add_argument("-l", "--length", help="Length of upstream sequence [default: %(default)s]", default=100)
args.add_argument("-t", "--type", help="Record type [default: %(default)s]", default="CDS")
args = args.parse_args()
if not os.path.isfile(args.input):
print("ERROR: Input file not found")
sys.exit(1)
if args.output:
output = open(args.output, "w")
else:
output = sys.stdout
# get all sequence records for the specified genbank file
recs = [rec for rec in SeqIO.parse(args.input, "genbank")]
for rec in recs:
feats = [feat for feat in rec.features if feat.type == args.type]
for feat in feats:
# get the upstream sequence: calculate start and end postitions of the slice
if feat.location.start < int(args.length):
start = 0
else:
start = feat.location.start - int(args.length)
if feat.location.end + int(args.length) > len(rec.seq):
end = len(rec.seq)
else:
end = feat.location.end + int(args.length)
# Slice depending on the strand
if feat.location.strand == 1:
strand = "+"
seq = rec.seq[start:feat.location.start]
else:
strand = "-"
seq = rec.seq[feat.location.end:end].reverse_complement()
# Sequence name
name=feat.qualifiers["locus_tag"][0]
name += " coords=" + str(feat.location.start) + "-" + str(feat.location.end) + " strand=" + strand
name += " product='" + feat.qualifiers["product"][0] + "'"
name += " upstream=" + str(args.length) + " slice=" + str(start) + "-" + str(end)
print(">" , name, "\n", seq, sep="", file=output)
| StarcoderdataPython |
3319973 | from styx_msgs.msg import TrafficLight
import rospy
import tensorflow as tf
import numpy as np
import os
import cv2
class TLClassifier(object):
def __init__(self, model_name):
# Variables
PATH_TO_CKPT = os.path.join(model_name, 'frozen_inference_graph.pb')
self.tl_colors = ['Red', 'Yellow', 'Green', '-', 'Undefined']
self.tl_colorCodes = [(0, 0, 255), (0, 255, 255), (0, 255, 0), (0, 0, 0), (200, 200, 200)]
# Load frozen TF model to memory
self.detection_graph = tf.Graph()
with self.detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
# Definite input and output Tensors for self.detection_graph
self.image_tensor = self.detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
self.detection_boxes = self.detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
self.detection_scores = self.detection_graph.get_tensor_by_name('detection_scores:0')
self.detection_classes = self.detection_graph.get_tensor_by_name('detection_classes:0')
self.num_detections = self.detection_graph.get_tensor_by_name('num_detections:0')
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
self.sess = tf.Session(graph=self.detection_graph, config=config)
# Variables for frames skipping when running on a CPU
self.on_gpu = tf.test.is_gpu_available(cuda_only=True)
self.skip_frame = False
self.last_state = TrafficLight.UNKNOWN
self.last_image_np = np.zeros(1)
def get_classification(self, image, roi):
"""Determines the color of the traffic light in the image
Args:
image (cv::Mat): image containing the traffic light
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
image (cv::Mat): image containing debug detection output
"""
tl_state = TrafficLight.UNKNOWN
# Input image preprocessing
image_np = np.array(image).astype(np.uint8)
ymin = int(roi[0] * image_np.shape[0])
xmin = int(roi[1] * image_np.shape[1])
ymax = int(roi[2] * image_np.shape[0])
xmax = int(roi[3] * image_np.shape[1])
image_cropped = image_np[ymin:ymax, xmin:xmax]
# Frames skipping when running on a CPU
if not self.on_gpu and self.skip_frame:
self.skip_frame = not self.skip_frame
return self.last_state, self.last_image_np
# Expand dimensions since the model expects images
# to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_cropped, axis=0)
# Actual detection.
(boxes, scores, classes, num) = self.sess.run(
[self.detection_boxes, self.detection_scores,
self.detection_classes, self.num_detections],
feed_dict={self.image_tensor: image_np_expanded})
# Filter for robust tl_classification when there are multiple of them
tl_states = []
for bbox, score, clas in zip(boxes[0], scores[0], classes[0]):
if (score > 0.3) and (clas == 10) and \
(0.07/(roi[2]-roi[0]) < (bbox[2] - bbox[0]) < 0.5/(roi[2]-roi[0])):
ytl = int(bbox[0] * image_cropped.shape[0])
xtl = int(bbox[1] * image_cropped.shape[1])
ybr = int(bbox[2] * image_cropped.shape[0])
xbr = int(bbox[3] * image_cropped.shape[1])
### Classify the color of the traffic light
# Crop the tl bbox
tl_img = image_cropped[ytl:ybr, xtl:xbr]
# Crop margins
offset = int(tl_img.shape[1]/4)
cr_img = tl_img[offset:-offset, offset:-offset]
# Aspect ratio check
asp_rat = cr_img.shape[0] / cr_img.shape[1]
if 1.5 < asp_rat < 5:
# Convert to HSV and extract Value part from the image
if cv2.__version__ < '3.0.0':
cr_v_img = cv2.cvtColor(cr_img, cv2.cv.CV_BGR2HSV)[:,:,2]
else:
cr_v_img = cv2.cvtColor(cr_img, cv2.COLOR_BGR2HSV)[:,:,2]
# Finding mean intensities of each section
section_h = int(cr_img.shape[0]/3)
sections = np.hstack((np.mean(cr_v_img[:section_h]),
np.mean(cr_v_img[section_h:2*section_h]),
np.mean(cr_v_img[2*section_h:])))
tl_st = np.argmax(sections)
tl_states.append(tl_st)
# Draw debug information on the frame
try:
cv2.rectangle(image_np, (xmin+xtl, ymin+ytl),
(xmin+xbr, ymin+ybr),
self.tl_colorCodes[tl_st], 3)
except:
pass
txt = '%s: %.2f'%(self.tl_colors[tl_st][0], score)
bot_pos = ymin+ytl-10 if ymin+ytl-10 > 30 else ymin+ybr+25
left_pos = xmin+xtl if xmin+xtl > 0 else 0
try:
cv2.putText(image_np, txt, (left_pos, bot_pos),
cv2.FONT_HERSHEY_SIMPLEX, 0.8,
self.tl_colorCodes[tl_st], 2)
except:
pass
else:
tl_st = TrafficLight.UNKNOWN
# debug
rospy.logdebug("%s: %.3f, bbox: %s"%(self.tl_colors[tl_st], score, bbox))
if len(set(tl_states)) == 1:
tl_state = tl_states[0]
try:
cv2.rectangle(image_np, (xmin, ymin), (xmax, ymax),
self.tl_colorCodes[tl_state], 15)
except:
pass
# Update variables for frames skipping when running on a CPU
if not self.on_gpu:
self.last_state = tl_state
self.skip_frame = not self.skip_frame
self.last_image_np = image_np
return tl_state, image_np
| StarcoderdataPython |
9791235 | from .share_record import ShareRecord | StarcoderdataPython |
9680687 | """
********************************************************************************
* Name: files_tab.py
* Author: gagelarsen
* Created On: December 03, 2020
* Copyright: (c) Aquaveo 2020
********************************************************************************
"""
import json
import mimetypes
import os
import re
import time
import uuid
from django.http import HttpResponse, Http404
import tethys_gizmos.gizmo_options.datatable_view as gizmo_datatable_view
from .resource_tab import ResourceTab
class ResourceFilesTab(ResourceTab):
"""
A tab for the TabbedResourceDetails view that lists collections and files that are contained in those collections.
Required URL Variables:
resource_id (str): the ID of the Resource.
tab_slug (str): Portion of URL that denotes which tab is active.
Properties:
file_hide_patterns: A list of regular expression patterns for files that should not be shown in the files tab.fla
Methods:
get_file_collections (required): Override this method to define a list of FileCollections that are shown in this tab.
""" # noqa: E501
template_name = 'atcore/resources/tabs/files_tab.html'
post_load_callback = 'files_tab_loaded'
js_requirements = ResourceTab.js_requirements + [
x for x in gizmo_datatable_view.DataTableView.get_vendor_js()
] + [
'atcore/resources/files_tab.js',
]
css_requirements = ResourceTab.css_requirements + [
x for x in gizmo_datatable_view.DataTableView.get_vendor_js()
] + [
'atcore/resources/files_tab.css'
]
file_hide_patterns = [r'__meta__.json']
def get_file_collections(self, request, resource, session, *args, **kwargs):
"""
Get the file_collections
Returns:
A list of FileCollection clients.
"""
return []
def get_context(self, request, session, resource, context, *args, **kwargs):
"""
Build context for the ResourceFilesTab template that is used to generate the tab content.
"""
collections = self.get_file_collections(request, resource, session)
files_from_collection = {}
for collection in collections:
instance_id = collection.instance.id
files_from_collection[instance_id] = self._path_hierarchy(collection.path)
context['collections'] = files_from_collection
return context
def _path_hierarchy(self, path: str, root_dir: str = None, parent_slug: str = None):
"""
A function used to create a dictionary representation of a folder structure.
Args:
path: The path to recursively map to a dictionary.
root_dir: The root directory to be trimmed off of the absolute paths.
parent_slug: The slug for the parent used for hiding and showing files.
Returns:
dict: A dictionary defining the folder structure of the provided path.
"""
if root_dir is None:
root_dir = os.path.abspath(os.path.join(path, os.pardir))
# Remove the root directory from the string that will be placed in the structure.
# These paths will be relative to the path provided.
hierarchy_path = path.replace(root_dir, '')
name = os.path.basename(path)
for pattern in self.file_hide_patterns:
if re.search(pattern, name) is not None:
return None
hierarchy = {
'type': 'folder',
'name': name,
'path': hierarchy_path,
'parent_path': os.path.abspath(os.path.join(hierarchy_path, os.pardir)).replace(root_dir, ''),
'parent_slug': parent_slug,
'slug': '_' + hierarchy_path.replace(os.path.sep, '_').replace('.', '_').replace('-', '_'),
}
# Try and get a name from the meta file.
meta_file = os.path.join(path, '__meta__.json')
if os.path.isfile(meta_file):
try:
with open(meta_file) as mf:
meta_json = json.load(mf)
if 'display_name' in meta_json:
hierarchy['name'] = meta_json['display_name']
except json.JSONDecodeError:
pass
# Try and access 'children' here. If we can't than this is a file.
try:
# Recurse through each of the children if it is a directory.
hierarchy['children'] = []
for contents in os.listdir(path):
child = self._path_hierarchy(os.path.join(path, contents), root_dir, hierarchy['slug'])
if child is not None:
hierarchy['children'].append(child)
# If it is a directory we need to calculate the most recent modified date of a contained file
hierarchy['date_modified'] = time.ctime(max(os.path.getmtime(root) for root, _, _ in os.walk(path)))
# Catch the errors and assume we are dealing with a file instead of a directory
except OSError:
hierarchy['type'] = 'file'
hierarchy['date_modified'] = time.ctime(os.path.getmtime(path))
# Calculate the file size and convert to the appropriate measurement.
power = 2 ** 10
n = 0
power_labels = {0: 'Bytes', 1: 'KB', 2: 'MB', 3: 'GB', 4: 'TB'}
size = os.path.getsize(path)
while size > power:
size /= power
n += 1
size_str = f'{size:.1f}' if size > 0 else '0'
hierarchy['size'] = f'{size_str} {power_labels[n]}'
return hierarchy
def download_file(self, request, resource, session, *args, **kwargs):
"""
A function to download a file from a request.
"""
collection_id = request.GET.get('collection-id', None)
file_path = request.GET.get('file-path', None)
collections = self.get_file_collections(request, resource, session)
for collection in collections:
if uuid.UUID('{' + collection_id + '}') == collection.instance.id:
base_file_path = collection.path.replace(collection_id, '')
full_file_path = base_file_path + file_path
file_ext = os.path.splitext(full_file_path)[1]
mimetype = mimetypes.types_map[file_ext] if file_ext in mimetypes.types_map.keys() else 'text/plain'
if os.path.exists(full_file_path):
with open(full_file_path, 'rb') as fh:
response = HttpResponse(fh.read(), content_type=mimetype)
response['Content-Disposition'] = 'filename=' + os.path.basename(file_path)
return response
raise Http404('Unable to download file.')
| StarcoderdataPython |
11395546 | <filename>09Ajax/10lagou.py
from selenium import webdriver
from lxml import etree
import re
import time
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
class LagouSpider(object):
def __init__(self):
self.driver = webdriver.Chrome()
#python职位
self.url = 'https://www.lagou.com/jobs/list_python?city=%E5%8C%97%E4%BA%AC&cl=false&fromSearch=true&labelWords=&suginput='
self.position = []
def run(self):
self.driver.get(self.url)
while True:
source = self.driver.page_source
WebDriverWait(driver=self.driver,timeout=20).until(
EC.presence_of_element_located((By.XPATH,"//div[@class='pager_container']/span[last()]"))
)
self.parse_list_page(source)
#点“下一页”
next_btn = self.driver.find_element_by_xpath(
"//div[@class='pager_container']/span[last()]")
if "pager_next_disabled" in next_btn.get_attribute("class"):
break
else:
next_btn.click()
time.sleep(1)
def parse_list_page(self,source):
html = etree.HTML(source)
links = html.xpath("//a[@class='position_link']/@href")
#每一页的所有职位的详情url
for link in links:
self.request_detail_page(link)
time.sleep(1)
def request_detail_page(self,url):
# self.driver.get(url)
self.driver.execute_script("window.open('%s')"%url)
self.driver.switch_to.window(self.driver.window_handles[1])
WebDriverWait(driver=self.driver,timeout=20).until(
EC.presence_of_element_located((By.XPATH,"//div[@class='job-name']/span[@class='name']"))
)
#获取职位详情页的源代码
source = self.driver.page_source
self.parse_detail_page(source)
#关闭当前详情页,并且切换到列表页
self.driver.close()
self.driver.switch_to.window(self.driver.window_handles[0])
def parse_detail_page(self,source):
html = etree.HTML(source)
position_name = html.xpath("//span[@class='name']/text()")[0]
job_request_spans = html.xpath("//dd[@class='job_request']//span")
salary = job_request_spans[0].xpath('.//text()')[0].strip()
city = job_request_spans[1].xpath('.//text()')[0].strip()
city = re.sub(r"[\s/]","",city)
work_years = job_request_spans[2].xpath('.//text()')[0].strip()
work_years = re.sub(r"[\s/]","",work_years)
education = job_request_spans[3].xpath('.//text()')[0].strip()
education = re.sub(r"[\s/]","",education)
desc = "".join(html.xpath("//dd[@class='job_bt']//text()")).strip()
company_name = html.xpath("//h2[@class='fl']/text()")[0].strip()
position = {
'name':position_name,
'company_name':company_name,
'salary':salary,
'city': city,
'work_years': work_years,
'education': education,
'desc': desc,
}
self.position.append(position)
print(position)
print('-'*200)
if __name__ == '__main__':
spider = LagouSpider()
spider.run() | StarcoderdataPython |
6660616 | import ui
class RootView(ui.View):
def __init__(self):
'''Children must call RootView.__init__(self), in order to set up hidden webview!'''
self.__w=ui.WebView(frame=(1,1,1,1))
self.add_subview(self.__w)
@staticmethod
def convert_point(point=(0,0),from_view=None,to_view=None):
'''fixed convert point for fullscreen application.
works for any present type
existing function in fullscreen reports relative to portrait
TODO: does not work if from_view or to_view has been Transformed'''
(w,h)=ui.get_screen_size()
#detect what convert_point things rotation is.
origin=ui.convert_point((0,0),from_view,to_view )
xaxis=ui.convert_point((1,0),from_view,to_view )
xaxis=[xaxis[j]-origin[j] for j in (0,1)]
yaxis=ui.convert_point((0,1),from_view,to_view )
yaxis=[yaxis[j]-origin[j] for j in (0,1)]
pt_c=ui.convert_point(tuple(point),from_view,to_view)
pt=[0,0]
if from_view is not None:
pt[0]=( (xaxis[0]==-1)*h
+ xaxis[0]*pt_c[0]
+ (yaxis[0]==1)*w
- yaxis[0]*pt_c[1])
pt[1] = ( (xaxis[1]==1)*h
- xaxis[1]*pt_c[0]
+ (yaxis[1]==-1)*w
+ yaxis[1]*pt_c[1])
else: #just get corrected origin, and subtract out
origin_offset=RootView.convert_point((0,0),to_view,from_view)
pt[0]= point[0] - origin_offset[0]
pt[1]= point[1] - origin_offset[1]
return tuple(pt)
@staticmethod
def convert_rect(rect=(0,0,0,0),from_view=None,to_view=None):
pt=RootView.convert_point((rect[0],rect[1]), from_view,to_view)
return (pt[0], pt[1], rect[2], rect[3])
def get_keyboard_frame(self,frame=None):
'''get corrected keyboard frame, in the screen coordinates.
built in function breaks when in fullscreen, as it reports kbframe relative to a landscape screen'''
#TODO: remove dependence on webview, use xaxis/yaxis to determine rotation instead
if frame is None:
frame=ui.get_keyboard_frame()
origin=ui.convert_point((0,0),None,self )
xaxis=ui.convert_point((1,0),None,self )
xaxis=[xaxis[j]-origin[j] for j in (0,1)]
yaxis=ui.convert_point((0,1),None,self )
yaxis=[yaxis[j]-origin[j] for j in (0,1)]
o=self.__w.eval_js('window.orientation')
(w,h)=ui.get_screen_size()
if xaxis[0]==1 and yaxis[1]==1 and frame[0]==0:
#we are not in fullscreen, just return kbframe
fixedframe=frame
elif o=='0':
fixedframe= frame #ok
elif o=='-90':
fixedframe= [frame[1], frame[0], h,frame[2]]
elif o=='180':
fixedframe= [frame[0], h-frame[1]-frame[3], frame[2],frame[3]] #okrqq
elif o=='90':
fixedframe= [frame[1], w-frame[0]-frame[2],h,frame[2]]
else:
raise Error('UnexpectedOrientation')
return fixedframe
def get_orientation(self):
return self.__w.eval_js('window.orientation')
if __name__=='__main__':
class testconvert(RootView):
def __init__(self):
RootView.__init__(self)
self.t1=ui.Label(frame=(0,60,400,20))
self.t2=ui.Label(frame=(0,90,400,20))
self.t3=ui.TextView( frame=(0,120,700,200),bg_color=(0.7,0.7,0.7,0.5))
self.t3.text='textview for kb'
# the first time the keyboard appears, get kbframe is wrong...
# so, show then hide keyboard.
self.t3.begin_editing()
ui.delay(self.t3.end_editing,0.5)
# finally, show kbframe again
ui.delay(self.t3.begin_editing,1.0)
self.t1.text='touch to begin'
[self.add_subview(s) for s in [self.t1,self.t2,self.t3]]
def touch_began(self,touch):
self.t1.text='touch in view:={} == {}'.format(touch.location, self.convert_point(self.convert_point(touch.location,self,None),None ,self))
self.t2.text='touch in screen:={0:1}'.format(self.convert_point(touch.location,self,None))
def draw(self):
'''draw a green box around kb frame, padded by 10 pixels'''
kb=self.get_keyboard_frame()
# print kb
kb_self=self.convert_rect(kb,None,self)
# print kb_self
ui.set_color((0,1,0,0.5))
ui.fill_rect(kb_self[0]-10,kb_self[1]-10, kb_self[2]+20,kb_self[3]+20)
self.t3.text=('orientation {}\n'
'kbframe {}\n'
'kbframe fixed {}\n '
'kbframe in V {}\n').format(self.get_orientation(),ui.get_keyboard_frame(),kb,kb_self)
def keyboard_frame_did_change(self,frame):
'''wait a tiny bit, then update display.
i forget why i thought i needed the delay, maybe to ensure convert_point was updated.
does not seem to be needed now'''
ui.delay(self.set_needs_display,0.2)
def touch_moved(self,touch):
self.touch_began(touch)
#main code
import console
ptype=console.alert('select present type','select one','fullscreen','panel','sheet')
ptypes=('fullscreen','panel','sheet')
V=testconvert()
def show():
V.present(ptypes[ptype-1],hide_title_bar=False ) #works if hide is True too
V.bg_color=(1,1,1)
ui.delay(show,0.5) # wait until dialog is really gone
| StarcoderdataPython |
9640309 | <reponame>eladc-git/model_optimization<filename>model_compression_toolkit/common/constants.py
# Copyright 2021 Sony Semiconductors Israel, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Minimal threshold to use for quantization ranges:
MIN_THRESHOLD = (2 ** -28)
EPS = 1e-8
MULTIPLIER_N_BITS = 8
# Quantization attributes:
OUTPUT_SCALE = 'output_scale'
THRESHOLD = 'threshold'
SIGNED = 'is_signed'
CLUSTER_CENTERS = 'cluster_centers'
SCALE_PER_CHANNEL = 'scale_per_channel'
# Data types:
DATA_TYPE = 'dtype'
FLOAT_32 = 'float32'
# Number of Tensorboard cosine-similarity plots to add:
NUM_SAMPLES_CS_TENSORBOARD = 20
# num bits for shift negative non linear node
SHIFT_NEGATIVE_NON_LINEAR_NUM_BITS = 16
# In Mixed-Precision, a node can have multiple candidates for weights quantization configuration.
# In order to display a single view of a node (for example, for logging in TensorBoard) we need to track the attributes
# that are shared among different candidates:
WEIGHTS_NBITS_ATTRIBUTE = 'weights_n_bits'
CORRECTED_BIAS_ATTRIBUTE = 'corrected_bias'
| StarcoderdataPython |
6584127 | # Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT)
# Bespoke Link to Instruments and Small Satellites (BLISS)
#
# Copyright 2017, by the California Institute of Technology. ALL RIGHTS
# RESERVED. United States Government Sponsorship acknowledged. Any
# commercial use must be negotiated with the Office of Technology Transfer
# at the California Institute of Technology.
#
# This software may be subject to U.S. export control laws. By accepting
# this software, the user agrees to comply with all applicable U.S. export
# laws and regulations. User has the responsibility to obtain export licenses,
# or other export authority as may be required before exporting such
# information to foreign countries or providing access to foreign persons.
"""
AIT Javascript Object Notation (JSON)
The ait.core.json module provides JSON utilities and mixin classes
for encoding and decoding between AIT data structures and JSON.
"""
import collections
import json
def slotsToJSON(obj, slots=None):
"""Converts the given Python object to one suitable for Javascript
Object Notation (JSON) serialization via :func:`json.dump` or
:func:`json.dumps`. This function delegates to :func:`toJSON`.
Specifically only attributes in the list of *slots* are converted.
If *slots* is not provided, it defaults to the object's
``__slots__` and any inherited ``__slots__``.
To omit certain slots from serialization, the object may define a
:meth:`__jsonOmit__(key, val)` method. When the method returns
True for any particular slot name (i.e. key) and value
combination, the slot will not serialized.
"""
if slots is None:
slots = list(obj.__slots__) if hasattr(obj, '__slots__') else [ ]
for base in obj.__class__.__bases__:
if hasattr(base, '__slots__'):
slots.extend(base.__slots__)
testOmit = hasattr(obj, '__jsonOmit__') and callable(obj.__jsonOmit__)
result = { }
for slot in slots:
key = slot[1:] if slot.startswith('_') else slot
val = getattr(obj, slot, None)
if testOmit is False or obj.__jsonOmit__(key, val) is False:
result[key] = toJSON(val)
return result
def toJSON (obj):
"""Converts the given Python object to one suitable for Javascript
Object Notation (JSON) serialization via :func:`json.dump` or
:func:`json.dumps`. If the Python object has a :meth:`toJSON`
method, it is always given preference and will be called to peform
the conversion.
Otherwise, plain mapping and sequence types are converted to
Python dictionaries and lists, respectively, by recursively
calling this :func:`toJSON` function on mapping keys and values or
iterable items. Python primitive types handled natively by the
JSON encoder (``int``, ``long``, ``float``, ``str``, ``unicode``,
and ``None``) are returned as-is.
If no other conversion is appropriate, the Python builtin function
:func:`str` is used to convert the object.
"""
if hasattr(obj, 'toJSON') and callable(obj.toJSON):
result = obj.toJSON()
elif isinstance(obj, (int, long, float, str, unicode)) or obj is None:
result = obj
elif isinstance(obj, collections.Mapping):
result = { toJSON(key): toJSON(obj[key]) for key in obj }
elif isinstance(obj, collections.Sequence):
result = [ toJSON(item) for item in obj ]
else:
result = str(obj)
return result
class SlotSerializer (object):
__slots__ = [ ]
def __jsonOmit__(self, key, val):
return val is None or val is ''
def toJSON(self):
return slotsToJSON(self)
| StarcoderdataPython |
8183965 | <filename>knox/models.py<gh_stars>100-1000
from django.conf import settings
from django.db import models
from django.utils import timezone
from knox import crypto
from knox.settings import CONSTANTS, knox_settings
User = settings.AUTH_USER_MODEL
class AuthTokenManager(models.Manager):
def create(self, user, expiry=knox_settings.TOKEN_TTL):
token = crypto.create_token_string()
digest = crypto.hash_token(token)
if expiry is not None:
expiry = timezone.now() + expiry
instance = super(AuthTokenManager, self).create(
token_key=token[:CONSTANTS.TOKEN_KEY_LENGTH], digest=digest,
user=user, expiry=expiry)
return instance, token
class AuthToken(models.Model):
objects = AuthTokenManager()
digest = models.CharField(
max_length=CONSTANTS.DIGEST_LENGTH, primary_key=True)
token_key = models.CharField(
max_length=CONSTANTS.TOKEN_KEY_LENGTH, db_index=True)
user = models.ForeignKey(User, null=False, blank=False,
related_name='auth_token_set', on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
expiry = models.DateTimeField(null=True, blank=True)
def __str__(self):
return '%s : %s' % (self.digest, self.user)
| StarcoderdataPython |
1797981 | from django.views.generic import RedirectView, FormView
from django.contrib.auth import authenticate, login
from django.shortcuts import Http404, redirect
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.contrib.auth.models import User
from registration.models import RegistrationProfile
from registration.backends.default.views import RegistrationView
from account.forms import RegistrationFormNameAndUniqueEmail
from account.forms import UserProfileForm, SetPasswordForm
from account.models import UserProfile
from sabot.views import JobProcessingView
from sponsor.views import id_generator
class GenerateAuthTokenView(JobProcessingView):
next_view = "auth_user_list"
def process_job(self):
try:
user = User.objects.get(pk=self.kwargs["pk"])
except User.DoesNotExist:
raise Http404
try:
up = UserProfile.objects.get(user=user)
except UserProfile.DoesNotExist:
up = UserProfile(user=user)
up.authToken = id_generator(24)
up.save()
return True
class TokenLoginView(RedirectView):
permanent = False
def get_redirect_url(self, **kwargs):
user = authenticate(token = kwargs["token"])
if user is not None:
if user.is_active:
login(self.request, user)
return self.request.GET.get("next","/")
raise Http404
class UserProfileView(FormView):
template_name = "registration/profile.html"
form_class = UserProfileForm
def get_initial(self):
return {
"firstName" : self.request.user.first_name,
"lastName" : self.request.user.last_name,
"email" : self.request.user.email,
}
def form_valid(self, form):
user = self.request.user
user.first_name = form.cleaned_data["firstName"]
user.last_name = form.cleaned_data["lastName"]
user.email = form.cleaned_data["email"]
user.save()
return self.form_invalid(form)
class ActivateAndSetPWView(FormView):
form_class = SetPasswordForm
template_name = "registration/activate_with_pw.html"
invalid_template_name = "registration/activate.html"
def get(self, request, *args, **kwargs):
# check if activation link is ok, otherwise link to invalid
try:
profile = RegistrationProfile.objects.get(activation_key=kwargs["activation_key"])
return super(ActivateAndSetPWView, self).get(request, *args, **kwargs)
except RegistrationProfile.DoesNotExist:
return self.response_class(
request = self.request,
template = self.invalid_template_name,
context = {})
def form_valid(self, form):
try:
profile = RegistrationProfile.objects.get(activation_key=self.kwargs["activation_key"])
profile.user.set_password(form.cleaned_data["<PASSWORD>"])
profile.user.save()
RegistrationProfile.objects.activate_user(self.kwargs["activation_key"])
return redirect(reverse("auth_login"))
except RegistrationProfile.DoesNotExist:
raise Http404
| StarcoderdataPython |
6672056 | from textual.app import App
from textual import events
from textual.view import View
from textual.widgets import Placeholder
from textual.layouts.grid import GridLayout
import logging
from logging import FileHandler
logging.basicConfig(
level="NOTSET",
format="%(message)s",
datefmt="[%X]",
handlers=[FileHandler("richtui.log")],
)
log = logging.getLogger("rich")
class GridTest(App):
async def on_load(self, event: events.Load) -> None:
await self.bind("q,ctrl+c", "quit", "Quit")
async def on_startup(self, event: events.Startup) -> None:
layout = GridLayout()
await self.push_view(View(layout=layout))
layout.add_column("col", fraction=1, max_size=20)
layout.add_row("row", fraction=1, max_size=10)
layout.set_repeat(True, True)
layout.add_areas(center="col-2-start|col-4-end,row-2-start|row-3-end")
layout.set_align("stretch", "center")
# *(Placeholder() for _ in range(20)),
layout.place(*(Placeholder() for _ in range(20)), center=Placeholder())
# layout.add_column(fraction=1, name="left", min_size=20)
# layout.add_column(size=30, name="center")
# layout.add_column(fraction=1, name="right")
# layout.add_row(fraction=1, name="top", min_size=2)
# layout.add_row(fraction=2, name="middle")
# layout.add_row(fraction=1, name="bottom")
# layout.add_areas(
# area1="left,top",
# area2="center,middle",
# area3="left-start|right-end,bottom",
# area4="right,top-start|middle-end",
# )
# layout.place(
# area1=Placeholder(name="area1"),
# area2=Placeholder(name="area2"),
# area3=Placeholder(name="area3"),
# area4=Placeholder(name="area4"),
# )
GridTest.run(title="Grid Test")
| StarcoderdataPython |
3380375 | <filename>lib/galaxy/managers/quotas.py<gh_stars>1-10
"""
Manager and Serializers for Quotas.
For more information about quotas: https://galaxyproject.org/admin/disk-quotas/
"""
import logging
from typing import (
cast,
Optional,
Tuple,
Union,
)
from sqlalchemy import (
false,
true
)
from galaxy import model, util
from galaxy.app import StructuredApp
from galaxy.exceptions import ActionInputError
from galaxy.managers import base
from galaxy.managers.context import ProvidesUserContext
from galaxy.quota import DatabaseQuotaAgent
from galaxy.quota._schema import (
CreateQuotaParams,
CreateQuotaResult,
DefaultQuotaValues,
DeleteQuotaPayload,
QuotaDetails,
QuotaOperation,
QuotaSummaryList,
UpdateQuotaParams,
)
from galaxy.schema.fields import EncodedDatabaseIdField
from galaxy.web import url_for
log = logging.getLogger(__name__)
class QuotaManager:
"""Interface/service object to interact with Quotas."""
def __init__(self, app: StructuredApp):
self.app = app
@property
def sa_session(self):
return self.app.model.context
@property
def quota_agent(self) -> DatabaseQuotaAgent:
return cast(DatabaseQuotaAgent, self.app.quota_agent)
def create_quota(self, payload: dict, decode_id=None) -> Tuple[model.Quota, str]:
params = CreateQuotaParams.parse_obj(payload)
create_amount = self._parse_amount(params.amount)
if self.sa_session.query(model.Quota).filter(model.Quota.name == params.name).first():
raise ActionInputError("Quota names must be unique and a quota with that name already exists, please choose another name.")
elif create_amount is False:
raise ActionInputError("Unable to parse the provided amount.")
elif params.operation not in model.Quota.valid_operations:
raise ActionInputError("Enter a valid operation.")
elif params.default != DefaultQuotaValues.NO and params.operation != QuotaOperation.EXACT:
raise ActionInputError("Operation for a default quota must be '='.")
elif create_amount is None and params.operation != QuotaOperation.EXACT:
raise ActionInputError("Operation for an unlimited quota must be '='.")
# Create the quota
quota = model.Quota(name=params.name, description=params.description, amount=create_amount, operation=params.operation)
self.sa_session.add(quota)
# If this is a default quota, create the DefaultQuotaAssociation
if params.default != DefaultQuotaValues.NO:
self.quota_agent.set_default_quota(params.default, quota)
message = f"Default quota '{quota.name}' has been created."
else:
# Create the UserQuotaAssociations
in_users = [self.sa_session.query(model.User).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_users)]
in_groups = [self.sa_session.query(model.Group).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_groups)]
if None in in_users:
raise ActionInputError("One or more invalid user id has been provided.")
for user in in_users:
uqa = model.UserQuotaAssociation(user, quota)
self.sa_session.add(uqa)
# Create the GroupQuotaAssociations
if None in in_groups:
raise ActionInputError("One or more invalid group id has been provided.")
for group in in_groups:
gqa = model.GroupQuotaAssociation(group, quota)
self.sa_session.add(gqa)
message = f"Quota '{quota.name}' has been created with {len(in_users)} associated users and {len(in_groups)} associated groups."
self.sa_session.flush()
return quota, message
def _parse_amount(self, amount: str) -> Optional[Union[int, bool]]:
if amount.lower() in ('unlimited', 'none', 'no limit'):
return None
try:
return util.size_to_bytes(amount)
except AssertionError:
return False
def rename_quota(self, quota, params) -> str:
if not params.name:
raise ActionInputError('Enter a valid name.')
elif params.name != quota.name and self.sa_session.query(model.Quota).filter(model.Quota.name == params.name).first():
raise ActionInputError('A quota with that name already exists.')
else:
old_name = quota.name
quota.name = params.name
if params.description:
quota.description = params.description
self.sa_session.add(quota)
self.sa_session.flush()
message = f"Quota '{old_name}' has been renamed to '{params.name}'."
return message
def manage_users_and_groups_for_quota(self, quota, params, decode_id=None) -> str:
if quota.default:
raise ActionInputError('Default quotas cannot be associated with specific users and groups.')
else:
in_users = [self.sa_session.query(model.User).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_users)]
if None in in_users:
raise ActionInputError("One or more invalid user id has been provided.")
in_groups = [self.sa_session.query(model.Group).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_groups)]
if None in in_groups:
raise ActionInputError("One or more invalid group id has been provided.")
self.quota_agent.set_entity_quota_associations(quotas=[quota], users=in_users, groups=in_groups)
self.sa_session.refresh(quota)
message = f"Quota '{quota.name}' has been updated with {len(in_users)} associated users and {len(in_groups)} associated groups."
return message
def edit_quota(self, quota, params) -> str:
if params.amount.lower() in ('unlimited', 'none', 'no limit'):
new_amount = None
else:
try:
new_amount = util.size_to_bytes(params.amount)
except (AssertionError, ValueError):
new_amount = False
if not params.amount:
raise ActionInputError('Enter a valid amount.')
elif new_amount is False:
raise ActionInputError('Unable to parse the provided amount.')
elif params.operation not in model.Quota.valid_operations:
raise ActionInputError('Enter a valid operation.')
else:
quota.amount = new_amount
quota.operation = params.operation
self.sa_session.add(quota)
self.sa_session.flush()
message = f"Quota '{quota.name}' is now '{quota.operation}{quota.display_amount}'."
return message
def set_quota_default(self, quota, params) -> str:
if params.default != 'no' and params.default not in model.DefaultQuotaAssociation.types.__members__.values():
raise ActionInputError('Enter a valid default type.')
else:
if params.default != 'no':
self.quota_agent.set_default_quota(params.default, quota)
message = f"Quota '{quota.name}' is now the default for {params.default} users."
else:
if quota.default:
message = f"Quota '{quota.name}' is no longer the default for {quota.default[0].type} users."
for dqa in quota.default:
self.sa_session.delete(dqa)
self.sa_session.flush()
else:
message = f"Quota '{quota.name}' is not a default."
return message
def unset_quota_default(self, quota, params=None) -> str:
if not quota.default:
raise ActionInputError(f"Quota '{quota.name}' is not a default.")
else:
message = f"Quota '{quota.name}' is no longer the default for {quota.default[0].type} users."
for dqa in quota.default:
self.sa_session.delete(dqa)
self.sa_session.flush()
return message
def delete_quota(self, quota, params=None) -> str:
quotas = util.listify(quota)
names = []
for q in quotas:
if q.default:
names.append(q.name)
if len(names) == 1:
raise ActionInputError(f"Quota '{names[0]}' is a default, please unset it as a default before deleting it.")
elif len(names) > 1:
raise ActionInputError(f"Quotas are defaults, please unset them as defaults before deleting them: {', '.join(names)}")
message = f"Deleted {len(quotas)} quotas: "
for q in quotas:
q.deleted = True
self.sa_session.add(q)
names.append(q.name)
self.sa_session.flush()
message += ', '.join(names)
return message
def undelete_quota(self, quota, params=None) -> str:
quotas = util.listify(quota)
names = []
for q in quotas:
if not q.deleted:
names.append(q.name)
if len(names) == 1:
raise ActionInputError(f"Quota '{names[0]}' has not been deleted, so it cannot be undeleted.")
elif len(names) > 1:
raise ActionInputError(f"Quotas have not been deleted so they cannot be undeleted: {', '.join(names)}")
message = f"Undeleted {len(quotas)} quotas: "
for q in quotas:
q.deleted = False
self.sa_session.add(q)
names.append(q.name)
self.sa_session.flush()
message += ', '.join(names)
return message
def purge_quota(self, quota, params=None):
"""
This method should only be called for a Quota that has previously been deleted.
Purging a deleted Quota deletes all of the following from the database:
- UserQuotaAssociations where quota_id == Quota.id
- GroupQuotaAssociations where quota_id == Quota.id
"""
quotas = util.listify(quota)
names = []
for q in quotas:
if not q.deleted:
names.append(q.name)
if len(names) == 1:
raise ActionInputError(f"Quota '{names[0]}' has not been deleted, so it cannot be purged.")
elif len(names) > 1:
raise ActionInputError(f"Quotas have not been deleted so they cannot be undeleted: {', '.join(names)}")
message = f"Purged {len(quotas)} quotas: "
for q in quotas:
# Delete UserQuotaAssociations
for uqa in q.users:
self.sa_session.delete(uqa)
# Delete GroupQuotaAssociations
for gqa in q.groups:
self.sa_session.delete(gqa)
names.append(q.name)
self.sa_session.flush()
message += ', '.join(names)
return message
def get_quota(self, trans, id: EncodedDatabaseIdField, deleted: Optional[bool] = None) -> model.Quota:
return base.get_object(trans, id, 'Quota', check_ownership=False, check_accessible=False, deleted=deleted)
class QuotasService:
"""Interface/service object shared by controllers for interacting with quotas."""
def __init__(self, app: StructuredApp):
self.quota_manager: QuotaManager = QuotaManager(app)
def index(self, trans: ProvidesUserContext, deleted: bool = False) -> QuotaSummaryList:
"""Displays a collection (list) of quotas."""
rval = []
query = trans.sa_session.query(model.Quota)
if deleted:
route = 'deleted_quota'
query = query.filter(model.Quota.deleted == true())
else:
route = 'quota'
query = query.filter(model.Quota.deleted == false())
for quota in query:
item = quota.to_dict(value_mapper={'id': trans.security.encode_id})
encoded_id = trans.security.encode_id(quota.id)
item['url'] = self._url_for(route, id=encoded_id)
rval.append(item)
return QuotaSummaryList.parse_obj(rval)
def show(self, trans: ProvidesUserContext, id: EncodedDatabaseIdField, deleted: bool = False) -> QuotaDetails:
"""Displays information about a quota."""
quota = self.quota_manager.get_quota(trans, id, deleted=deleted)
rval = quota.to_dict(view='element', value_mapper={'id': trans.security.encode_id, 'total_disk_usage': float})
return QuotaDetails.parse_obj(rval)
def create(self, trans: ProvidesUserContext, params: CreateQuotaParams) -> CreateQuotaResult:
"""Creates a new quota."""
payload = params.dict()
self.validate_in_users_and_groups(trans, payload)
quota, message = self.quota_manager.create_quota(payload)
item = quota.to_dict(value_mapper={'id': trans.security.encode_id})
item['url'] = self._url_for('quota', id=trans.security.encode_id(quota.id))
item['message'] = message
return CreateQuotaResult.parse_obj(item)
def update(self, trans: ProvidesUserContext, id: EncodedDatabaseIdField, params: UpdateQuotaParams) -> str:
"""Modifies a quota."""
payload = params.dict()
self.validate_in_users_and_groups(trans, payload)
quota = self.quota_manager.get_quota(trans, id, deleted=False)
params = UpdateQuotaParams(**payload)
# FIXME: Doing it this way makes the update non-atomic if a method fails after an earlier one has succeeded.
methods = []
if params.name or params.description:
methods.append(self.quota_manager.rename_quota)
if params.amount:
methods.append(self.quota_manager.edit_quota)
if params.default == DefaultQuotaValues.NO:
methods.append(self.quota_manager.unset_quota_default)
elif params.default:
methods.append(self.quota_manager.set_quota_default)
if params.in_users or params.in_groups:
methods.append(self.quota_manager.manage_users_and_groups_for_quota)
messages = []
for method in methods:
message = method(quota, params)
messages.append(message)
return '; '.join(messages)
def delete(self, trans: ProvidesUserContext, id: EncodedDatabaseIdField, payload: Optional[DeleteQuotaPayload] = None) -> str:
"""Marks a quota as deleted."""
quota = self.quota_manager.get_quota(trans, id, deleted=False) # deleted quotas are not technically members of this collection
message = self.quota_manager.delete_quota(quota)
if payload and payload.purge:
message += self.quota_manager.purge_quota(quota)
return message
def undelete(self, trans: ProvidesUserContext, id: EncodedDatabaseIdField) -> str:
"""Restores a previously deleted quota."""
quota = self.quota_manager.get_quota(trans, id, deleted=True)
return self.quota_manager.undelete_quota(quota)
def validate_in_users_and_groups(self, trans, payload):
"""
For convenience, in_users and in_groups can be encoded IDs or emails/group names in the API.
"""
def get_id(item, model_class, column):
try:
return trans.security.decode_id(item)
except Exception:
pass # maybe an email/group name
# this will raise if the item is invalid
return trans.sa_session.query(model_class).filter(column == item).first().id
new_in_users = []
new_in_groups = []
invalid = []
for item in util.listify(payload.get('in_users', [])):
try:
new_in_users.append(get_id(item, model.User, model.User.email))
except Exception:
invalid.append(item)
for item in util.listify(payload.get('in_groups', [])):
try:
new_in_groups.append(get_id(item, model.Group, model.Group.name))
except Exception:
invalid.append(item)
if invalid:
msg = f"The following value(s) for associated users and/or groups could not be parsed: {', '.join(invalid)}."
msg += " Valid values are email addresses of users, names of groups, or IDs of both."
raise Exception(msg)
payload['in_users'] = list(map(str, new_in_users))
payload['in_groups'] = list(map(str, new_in_groups))
def _url_for(self, *args, **kargs):
try:
return url_for(*args, **kargs)
except AttributeError:
return "*deprecated attribute not filled in by FastAPI server*"
| StarcoderdataPython |
5159097 | import numpy as np
from abc import ABC, abstractmethod
from sklearn.base import BaseEstimator
from regain.utils import namedtuple_with_defaults
convergence = namedtuple_with_defaults("convergence", "iter obj iter_norm iter_r_norm")
def build_adjacency_matrix(neighbours, how="union"):
out = np.eye(len(neighbours))
if how.lower() == "union":
for i, arr in enumerate(neighbours):
where = [j for j in range(len(neighbours)) if j != i]
out[i, where] = arr
out = (out + out.T) / 2
elif how.lower() == "intersection":
for i, arr in enumerate(neighbours):
where = [j for j in range(len(neighbours)) if j != i]
out[i, where] = arr
binarized = (out.copy() != 0).astype(int)
binarized = (binarized + binarized.T) / 2
binarized[np.where(binarized < 1)] = 0
out = (out + out.T) / 2
out[np.where(binarized == 0)] = 0
assert np.all(out == out.T)
return out
class GLM_GM(ABC, BaseEstimator):
def __init__(
self,
alpha=0.01,
tol=1e-4,
rtol=1e-4,
max_iter=100,
verbose=False,
return_history=True,
return_n_iter=False,
compute_objective=True,
):
self.alpha = alpha
self.tol = tol
self.rtol = rtol
self.max_iter = max_iter
self.verbose = verbose
self.return_history = return_history
self.return_n_iter = return_n_iter
self.compute_objective = compute_objective
@abstractmethod
def fit(self, X, y=None, gamma=1e-3):
pass
| StarcoderdataPython |
9778430 | # Generated by Django 2.1.3 on 2018-12-28 05:29
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20181222_1252'),
]
operations = [
migrations.RenameField(
model_name='document',
old_name='document',
new_name='file',
),
]
| StarcoderdataPython |
6600485 | <gh_stars>0
# coding=utf-8
from common.constant import *
run_venv = 1
if run_venv == RUN_EVEN_TEST:
pass
else:
pass
DOC_DIR = "docs/"
DOC_TEMPLATE_DIR = "doc_templates/" | StarcoderdataPython |
1689481 | # -*- encoding: utf-8 -*-
from django.conf.urls import patterns, include, url
from .views import RecomendacionView
urlpatterns = patterns('',
url(r'^recomendacion/$', RecomendacionView.as_view(), name='recmendacion_url'),
)
| StarcoderdataPython |
5000798 | <filename>src/huggingmolecules/featurization/featurization_grover.py
from dataclasses import dataclass
from typing import *
import torch
from rdkit import Chem
from .featurization_api import RecursiveToDeviceMixin, PretrainedFeaturizerMixin
from .featurization_common_utils import stack_y, generate_additional_features, stack_generated_features
from .featurization_grover_utils import build_atom_features, build_bond_features_and_mappings
from ..configuration import GroverConfig
@dataclass
class GroverMoleculeEncoding:
f_atoms: list
f_bonds: list
a2b: list
b2a: list
b2revb: List
n_atoms: int
n_bonds: int
generated_features: Optional[List[float]]
y: Optional[float]
@dataclass
class GroverBatchEncoding(RecursiveToDeviceMixin):
f_atoms: torch.FloatTensor
f_bonds: torch.FloatTensor
a2b: torch.LongTensor
b2a: torch.LongTensor
b2revb: torch.LongTensor
a2a: torch.LongTensor
a_scope: torch.LongTensor
b_scope: torch.LongTensor
generated_features: Optional[torch.FloatTensor]
y: Optional[torch.FloatTensor]
batch_size: int
def __len__(self):
return self.batch_size
def get_components(self):
return self.f_atoms, self.f_bonds, self.a2b, self.b2a, self.b2revb, self.a_scope, self.b_scope, self.a2a
class GroverFeaturizer(PretrainedFeaturizerMixin[GroverMoleculeEncoding, GroverBatchEncoding, GroverConfig]):
@classmethod
def _get_config_cls(cls) -> Type[GroverConfig]:
return GroverConfig
def __init__(self, config: GroverConfig):
super().__init__(config)
self.atom_fdim = config.d_atom
self.bond_fdim = config.d_bond + config.d_atom
def _encode_smiles(self, smiles: str, y: Optional[float]) -> GroverMoleculeEncoding:
mol = Chem.MolFromSmiles(smiles)
atom_features = build_atom_features(mol)
bond_features, a2b, b2a, b2revb = build_bond_features_and_mappings(mol, atom_features)
generated_features = generate_additional_features(mol, self.config.ffn_features_generators)
return GroverMoleculeEncoding(f_atoms=atom_features,
f_bonds=bond_features,
a2b=a2b,
b2a=b2a,
b2revb=b2revb,
n_atoms=len(atom_features),
n_bonds=len(bond_features),
generated_features=generated_features,
y=y)
def _collate_encodings(self, encodings: List[GroverMoleculeEncoding]) -> GroverBatchEncoding:
# Start n_atoms and n_bonds at 1 b/c zero padding
n_atoms = 1 # number of atoms (start at 1 b/c need index 0 as padding)
n_bonds = 1 # number of bonds (start at 1 b/c need index 0 as padding)
a_scope = [] # list of tuples indicating (start_atom_index, num_atoms) for each molecule
b_scope = [] # list of tuples indicating (start_bond_index, num_bonds) for each molecule
# All start with zero padding so that indexing with zero padding returns zeros
f_atoms = [[0] * self.atom_fdim]
f_bonds = [[0] * self.bond_fdim]
a2b = [[]] # mapping from atom index to incoming bond indices
b2a = [0] # mapping from bond index to the index of the atom the bond is coming from
b2revb = [0] # mapping from bond index to the index of the reverse bond
for mol_graph in encodings:
f_atoms.extend(mol_graph.f_atoms)
f_bonds.extend(mol_graph.f_bonds)
for a in range(mol_graph.n_atoms):
a2b.append([b + n_bonds for b in mol_graph.a2b[a]])
for b in range(mol_graph.n_bonds):
b2a.append(n_atoms + mol_graph.b2a[b])
b2revb.append(n_bonds + mol_graph.b2revb[b])
a_scope.append((n_atoms, mol_graph.n_atoms))
b_scope.append((n_bonds, mol_graph.n_bonds))
n_atoms += mol_graph.n_atoms
n_bonds += mol_graph.n_bonds
# max with 1 to fix a crash in rare case of all single-heavy-atom mols
max_num_bonds = max(1, max(len(in_bonds) for in_bonds in a2b))
f_atoms = torch.FloatTensor(f_atoms)
f_bonds = torch.FloatTensor(f_bonds)
a2b = torch.LongTensor([a2b[a] + [0] * (max_num_bonds - len(a2b[a])) for a in range(n_atoms)])
b2a = torch.LongTensor(b2a)
b2revb = torch.LongTensor(b2revb)
a2a = b2a[a2b] # only needed if using atom messages
a_scope = torch.LongTensor(a_scope)
b_scope = torch.LongTensor(b_scope)
return GroverBatchEncoding(f_atoms=f_atoms,
f_bonds=f_bonds,
a2a=a2a,
a2b=a2b,
b2a=b2a,
b2revb=b2revb,
a_scope=a_scope,
b_scope=b_scope,
y=stack_y(encodings),
generated_features=stack_generated_features(encodings),
batch_size=len(encodings))
| StarcoderdataPython |
8137840 | #this file is just a tank of utilities for ploting stuff mainly.
#It creates the figures and the plots
import os
import pickle#5 as pickle
#import pickle5
import matplotlib.pyplot as plt
from matplotlib import gridspec
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn import metrics
import pandas as pd
def CountAbovethreshold(Data,threshold):
#give the length of data above a threshold, for hourly Data, it is number of Hrs above the threshold
return len([i for i in Data if i > threshold])
def Average(Data,WindNbVal):
#make an average
NewData =[sum(Data[:WindNbVal])/WindNbVal]
for i in range(1,len(Data)):
if i%WindNbVal==0:
NewData.append(sum(Data[i:i+WindNbVal])/WindNbVal)
return NewData
def DailyVal(Data):
#inputs needs to be hourly value avor a full year (8760 values)
#give time series in a daily distribution (365 value of 24 hours
DailyMax = []
DailyMin = []
var = np.array(Data)
var = var.reshape(365, 24, 1)
for i in range(len(var[:,0,0])):
DailyMax.append(max(var[i,:,0]))
DailyMin.append(min(var[i,:,0]))
if i==0:
DailyDistrib = var[i,:,0]
else:
DailyDistrib = np.vstack((DailyDistrib, var[i,:,0]))
return {'DailyMax': DailyMax, 'DailyMin' : DailyMin, 'DailyDistrib': DailyDistrib}
def getMatchedIndex(Vary1,Vary2,tol):
Relativerror = [(Vary2[i] - Vary1[i]) / Vary2[i] * 100 for i in range(len(Vary1))]
GoodIdx = [idx for idx, val in enumerate(Relativerror) if abs(val) <= tol]
return GoodIdx
#function copy/paste from : https://www.askpython.com/python/examples/principal-component-analysis
def PCA(X, num_var = 6, plot2D = False, plotSphere = False, plotInertia = False):
n, p = X.shape
# Step-1
X_meaned = (X - np.mean(X, axis=0))/np.std(X, axis=0)
# Step-2
cov_mat = np.cov(X_meaned, rowvar=False)
# Step-3
eigen_values, eigen_vectors = np.linalg.eigh(cov_mat)
# Step-4
sorted_index = np.argsort(eigen_values)[::-1]
sorted_eigenvalue = eigen_values[sorted_index]
sorted_eigenvectors = eigen_vectors[:, sorted_index]
Inertia = [val/sum(sorted_eigenvalue) for val in sorted_eigenvalue]
# Step-5
eigenvector_subset = sorted_eigenvectors[:, 0:num_var]
# Step-6
X_reduced = np.dot(eigenvector_subset.transpose(), X_meaned.transpose()).transpose()
corvar = np.zeros((p, p))
for k in range(p):
corvar[:, k] = sorted_eigenvectors.transpose()[k, :] * np.sqrt(sorted_eigenvalue)[k]
if plot2D:
plotCorCircle(X, corvar, num_var)
if plotSphere:
plotCorSphere(X, corvar, num_var)
if plotInertia:
plotPCAsInertia(Inertia)
return {'Coord' : X_reduced, 'EigVect':sorted_eigenvectors, 'EigVal': sorted_eigenvalue, 'Inertia': Inertia,
'CorVar':corvar}
def plotPCAsInertia(Inertia):
fig, axes = plt.subplots(figsize=(6, 6))
plt.plot(Inertia)
plt.xlabel('PCs')
plt.ylabel('Inertia (-)')
def plotCorCircle(X,CorVar,num_var):
for i in range(num_var-1):
# cercle des corrélations
fig, axes = plt.subplots(figsize=(6, 6))
axes.set_xlim(-1, 1)
axes.set_ylim(-1, 1)
# affichage des étiquettes (noms des variables)
for j in range(num_var-1):
plt.arrow(0, 0, CorVar[j, i], CorVar[j, i + 1])
# length_includes_head=True,
# head_width=0.08, head_length=0.00002)
plt.annotate(X.columns[j], (CorVar[j, i], CorVar[j, i + 1]))
plt.xlabel('PC' + str(i))
plt.ylabel('PC' + str(i + 1))
# ajouter les axes
plt.plot([-1, 1], [0, 0], color='silver', linestyle='-', linewidth=1)
plt.plot([0, 0], [-1, 1], color='silver', linestyle='-', linewidth=1)
cercle = plt.Circle((0, 0), 1, color='blue', fill=False)
axes.add_artist(cercle)
def plotCorSphere(X, corvar,p):
#Make the last 3D spehere plot
fig = plt.figure()
ax = fig.gca(projection='3d')
# draw sphere
u, v = np.mgrid[0:2*np.pi:50j, 0:np.pi:50j]
x = np.cos(u)*np.sin(v)
y = np.sin(u)*np.sin(v)
z = np.cos(v)
# alpha controls opacity
ax.plot_surface(x, y, z, color="g", alpha=0.3)
# tails of the arrows
tails= np.zeros(p)
# heads of the arrows with adjusted arrow head length
ax.quiver(tails,tails,tails,corvar[:,0], corvar[:,1], corvar[:,2],
color='r', arrow_length_ratio=0.15)
for i in range(p):
ax.text(corvar[i,0],corvar[i,1],corvar[i,2],X.columns[i])
ax.quiver(np.zeros(3),np.zeros(3),np.zeros(3),[1,0,0], [0,1,0], [0,0,1],
length=1.25, normalize=True,color='k', arrow_length_ratio=0.15)
ax.text(1.25,0,0,'PC0')
ax.text(0,1.25,0,'PC1')
ax.text(0,0,1.25,'PC2')
ax.grid(False)
plt.axis('off')
ax.set_xticks([])
ax.set_yticks([])
ax.set_zticks([])
ax.set_title('3D plots over the three first PCAs')
def getSortedIdx(reference,Data):
#return the index order for sake of comparison two different simulation with different buildong order
#was necesseray to make comparison between several geojson file of the same district.
#both input are time series, the outputs are the indexes of Data that matches with reference index
#for example, it was used with FormularId as reference key
index_y = []
varx = []
reference = [val for val in reference if val !=None]
for idx1, valref in enumerate(reference):
if valref!=None:
for idx2, locval in enumerate(Data):
if valref == locval and locval!=None:
index_y.append(idx2)
varx.append(idx1)
return index_y,varx
#this function enable to create a two subplots figure with ratio definition between the two plots
def createDualFig(title,ratio):
fig_name = plt.figure(figsize=(10, 7))
gs = gridspec.GridSpec(10,1, left=0.1, bottom = 0.1)
ax0 = plt.subplot(gs[:round(ratio*10), 0])
ax0.grid()
ax1 = plt.subplot(gs[round(ratio*10)+1:, 0])
ax1.grid()
ax1.sharex(ax0)
#plt.tight_layout()
plt.title(title)
return {'fig_name' : fig_name, 'ax0': ax0, 'ax1' : ax1}
#this function enable to create a two subplots figure with ratio definition between the two plots
def createMultilFig(title,nbFig,linked=True):
fig_name = plt.figure(figsize=(10, 7))
gs = gridspec.GridSpec(nbFig,1, left=0.1, bottom = 0.1)
ax = {}
for i in range(nbFig):
ax[i] = plt.subplot(gs[i, 0])
ax[i].grid()
if i>0 and linked:
ax[i].sharex(ax[0])
#plt.tight_layout()
plt.title(title)
return {'fig_name' : fig_name, 'ax': ax}
def createMultilDblFig(title,nbFigx,nbFigy,linked=True):
fig_name = plt.figure(figsize=(10, 7))
gs = gridspec.GridSpec(nbFigx,nbFigy, left=0.1, bottom = 0.1)
ax = {}
totfig = 0
for i in range(nbFigx):
for j in range(nbFigy):
ax[totfig] = plt.subplot(gs[i, j])
ax[totfig].grid()
totfig+=1
if i>0 and j>0 and linked:
ax[i].sharex(ax[0])
#plt.tight_layout()
plt.title(title)
return {'fig_name' : fig_name, 'ax': ax}
#this function enable to create a single graph areas
def createSimpleFig():
fig_name = plt.figure(figsize=(10, 7))
gs = gridspec.GridSpec(4, 1, left=0.1, bottom = 0.1)
ax0 = plt.subplot(gs[:, 0])
ax0.grid()
#plt.tight_layout()
return {'fig_name' : fig_name, 'ax0': ax0}
#basic plots
def plotBasicGraph(fig_name,ax0,varx,vary,varxname,varyname,title,sign,legend = True, markersize = 5):
plt.figure(fig_name)
if len(varyname)>0:
for nb,var in enumerate(vary):
ax0.plot(varx,var,sign,label= varyname[nb], mfc='none',markersize=markersize)
ax0.set_xlabel(varxname)
ax0.set_ylabel(title)
if legend:
ax0.legend()
else:
for nb,var in enumerate(vary):
ax0.plot(varx,var,sign, mfc='none',markersize=markersize)
ax0.set_xlabel(varxname)
ax0.set_ylabel(title)
#this plots variables realtively to their maximum value
def plotRelative2Max(fig_name,ax0,varx,vary,varxname,varyname):
plt.figure(fig_name)
relval = [vary[i] / max(vary) for i in range(len(vary))]
ax0.plot(varx, relval,label= varyname)
ax0.set_xlabel(varxname)
ax0.legend()
print(min(relval))
#this plots variables dimensioless values (from 0-1)
def plotDimLess(fig_name,ax0,varx,vary,varxname,varyname,varname):
plt.figure(fig_name)
xval = [(varx[i] -min(varx)) / (max(varx)-min(varx)) for i in range(len(varx))]
yval = [(vary[i] - min(vary)) / (max(vary) - min(vary)) for i in range(len(vary))]
ax0.plot(xval, yval,'.',label= varname)
ax0.set_xlabel(varxname)
ax0.set_ylabel(varyname)
ax0.legend()
#this plots in 2 subplots basic values and error, vary is thus a list of list, the first one being the reference
def plotBasicWithError(fig_name,ax0,ax1,varx,vary,varxname,varyname):
plt.figure(fig_name)
for id,xvar in enumerate(vary):
ax0.plot(varx, vary[id], 's',label= varyname[id])
ax0.legend()
ax0.set_xlabel(varxname)
for id,xvar in enumerate(vary):
ax1.plot(varx, [(vary[id][i] - vary[0][i]) / vary[0][i] * 100 for i in range(len(vary[0]))], 'x')
#this one I don't really get it yet...why I have done this....
def plot2Subplots(fig_name,ax0,ax1,varx,vary,varxname,varyname):
plt.figure(fig_name)
ax = [ax0,ax1]
for i in len(varx):
ax[i].plot(varx[i], vary[i])
ax[i].set_xlabel(varxname)
ax[i].set_ylabel(varyname)
ax[i].grid()
def plotHist(fig_name,ax0,vary,varyname):
plt.figure(fig_name)
ax0.hist(vary,normed=True,label = varyname)
ax0.legend()
def GetData(path,extravariables = [], Timeseries = [],BuildNum=[]):
os.chdir(path)
liste = os.listdir()
ResBld = {}
Res = {}
SimNumb = []
Res['ErrFiles'] = []
Res['Warnings'] = []
Res['Errors'] = []
print('reading file...')
#First round just to see what number to get
StillSearching = True
num =[]
idx1 = ['_','v']
idx2 = ['v','.']
if len(BuildNum)==0:
while StillSearching:
for i,file in enumerate(liste):
if '.pickle' in file:
num.append(int(file[file.index(idx1[0]) + 1:file.index(idx1[1])]))
if len(num)==2:
if abs(num[1]-num[0])>0:
idxF = idx1
else:
idxF = idx2
StillSearching = False
break
if i == len(liste):
StillSearching = False
idxF = idx1
else:
idxF = ['_'+str(BuildNum[0])+'v','.']
#now that we found this index, lets go along alll the files
for file in liste:
if '.pickle' in file:
try:
print(file)
SimNumb.append(int(file[file.index(idxF[0]) + len(idxF[0]):file.index(idxF[1])]))
try:
with open(file, 'rb') as handle:
ResBld[SimNumb[-1]] = pickle.load(handle)
except:
pass
# with open(file, 'rb') as handle:
# ResBld[SimNumb[-1]] = pickle5.load(handle)
try:
Res['ErrFiles'].append(os.path.getsize(file[:file.index('.pickle')]+'.err'))
with open(file[:file.index('.pickle')]+'.err') as file:
lines = file.readlines()
Res['Warnings'].append(int(lines[-1][lines[-1].index('--')+2:lines[-1].index('Warning')]))
Res['Errors'].append(int(lines[-1][lines[-1].index('Warning') + 8:lines[-1].index('Severe Errors')]))
except:
Res['ErrFiles'].append(0)
except:
pass
#lets get the mandatory variables
variables=['EP_Elec','EP_Heat','EP_Cool','EP_DHW','SimNum','EPC_Elec','EPC_Heat','EPC_Cool','EPC_Tot',
'ATemp','EP_Area','BuildID']
# lest build the Res dictionnary
for key in variables:
Res[key] = []
# #lets add to the extravariable the time series if present
# TimeSeriesKeys = ['HeatedArea','NonHeatedArea','OutdoorSite']
# for TimeKey in TimeSeriesKeys:
# if TimeKey in ResBld[SimNumb[0]].keys():
# extravariables.append(TimeKey)
#lest add the keysin the Res Dict of the extravariables
for key in extravariables:
Res[key] = []
# lest add the keysin the Res Dict of the extravariables
try:
for key in Timeseries:
varName = Timeseries[key]['Location']+'_'+Timeseries[key]['Data']
Res[varName] = []
except:
pass
#now we aggregate the data into Res dict
print('organizing data...')
for i,key in enumerate(ResBld):
ResDone = True
Res['SimNum'].append(key)
#lets first read the attribut of the building object (simulation inputs)
try:
BuildObj = ResBld[key]['BuildDB']
except:
BuildObj = ResBld[key]['BuildData']
ResDone = False
try:
Res['BuildID'].append(BuildObj.BuildID)
except:
Res['BuildID'].append(None)
Res['EP_Area'].append(BuildObj.EPHeatedArea)
try:
Res['ATemp'].append(BuildObj.ATemp)
except:
Res['ATemp'].append(BuildObj.surface)
eleval = 0
for x in BuildObj.EPCMeters['ElecLoad']:
if BuildObj.EPCMeters['ElecLoad'][x]:
eleval += BuildObj.EPCMeters['ElecLoad'][x]
Res['EPC_Elec'].append(eleval/BuildObj.ATemp if BuildObj.ATemp!=0 else 0)
heatval = 0
for x in BuildObj.EPCMeters['Heating']:
heatval += BuildObj.EPCMeters['Heating'][x]
Res['EPC_Heat'].append(heatval/BuildObj.ATemp if BuildObj.ATemp!=0 else 0)
coolval = 0
for x in BuildObj.EPCMeters['Cooling']:
coolval += BuildObj.EPCMeters['Cooling'][x]
Res['EPC_Cool'].append(coolval/BuildObj.ATemp if BuildObj.ATemp!=0 else 0)
Res['EPC_Tot'].append((eleval+heatval+coolval)/BuildObj.ATemp if BuildObj.ATemp!=0 else 0)
#forthe old way of doing things and the new paradigm for global results
try:
for key1 in Res:
if key1 in ['EP_Elec','EP_Cool','EP_Heat']:
idx = 1 if 'EP_elec' in key1 else 4 if 'EP_cool' in key1 else 5 if 'EP_heat' in key1 else None
Res[key1].append(ResBld[key]['EnergyConsVal'][idx] / 3.6 / BuildObj.EPHeatedArea * 1000)
except:
if ResDone:
for key1 in Res:
if key1 in ['EP_Elec']:
Res[key1].append(ResBld[key]['GlobRes']['Interior Equipment']['Electricity [GJ]'] / 3.6 / BuildObj.EPHeatedArea * 1000)
if key1 in ['EP_Cool']:
Res[key1].append(ResBld[key]['GlobRes']['Cooling']['District Cooling [GJ]'] / 3.6 / BuildObj.EPHeatedArea * 1000)
if key1 in ['EP_Heat']:
Res[key1].append(ResBld[key]['GlobRes']['Heating']['District Heating [GJ]'] / 3.6 / BuildObj.EPHeatedArea * 1000)
if key1 in ['EP_DHW']:
Res[key1].append(ResBld[key]['GlobRes']['Water Systems']['District Heating [GJ]'] / 3.6 / BuildObj.EPHeatedArea * 1000)
else:
pass
#Now lest get the extravariables
for key1 in extravariables:
try:
Res[key1].append(ResBld[key][key1])
except:
try:
Res[key1].append(eval('BuildObj.'+key1))
except:
Res[key1].append(-1)
try:
for key1 in Timeseries:
varName = Timeseries[key1]['Location'] + '_' + Timeseries[key1]['Data']
if len(Res[varName])==0:
Res[varName] = ResBld[key][Timeseries[key1]['Location']][Timeseries[key1]['Data']]
else:
Res[varName] = np.vstack((Res[varName] ,ResBld[key][Timeseries[key1]['Location']][Timeseries[key1]['Data']]))
except:
pass
return Res
def plotDHWdistrib(Distrib,name,DataQual = []):
fig = plt.figure(name)
gs = gridspec.GridSpec(24, 1)
XMAX1 = [0]*len(Distrib)
act = ['mean', 'max', 'min', 'std']
ope = 'mean'
for yr,Dist in enumerate(Distrib):
xmax1 = [0] * 24
for i in range(24):
distrib = [val for id,val in enumerate(Dist[:,i])]
xmax1[i] = gener_Plot(gs, distrib, i, 0, name)
XMAX1.append(max(xmax1))
for i in range(24):
ax0 = plt.subplot(gs[i, 0])
ax0.set_xlim([0, max(XMAX1)])
#plt.title(name)
#plt.show()
def gener_Plot(gs,data,i,pos,titre):
ax0 = plt.subplot(gs[i, pos])
#ax0.hist(data, 50, alpha=0.75)
#ax0.set_xlim([0, pos*5+10])
pt = np.histogram(data, 50)
volFlow = [pt[1][i] + float(j) for i, j in enumerate(np.diff(pt[1]))]
#plt.plot(volFlow,pt[0])
plt.fill_between(volFlow,0,pt[0],alpha = 0.5)
if i==0:
plt.title(titre)
plt.yticks([0], [str(i)])
if pos>0:
plt.yticks([0], [''])
plt.grid()
if i<23:
plt.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False) # labels along the bottom edge are off
else:
plt.xlabel('L/min')#data = np.array(data)
return max(volFlow)
def getLRMetaModel(X,y):
#this function comuts a Linear Regression model give the X parameters in a dataframe formet and the y output
#20% of the data are used to check the model afterward
#the function returns the coeffient of the model
#print('Launching calib process of linear regression')
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
regressor = LinearRegression()
regressor.fit(X_train, y_train)
coeff_df = pd.DataFrame(regressor.coef_, X.columns, columns=['Coefficient'])
y_pred = regressor.predict(X_test)
# print('Mean Absolute Error:', metrics.mean_absolute_error(y_test, y_pred))
# print('Mean Squared Error:', metrics.mean_squared_error(y_test, y_pred))
#print('R2:', metrics.r2_score(y_test, y_pred))
# print('Root Mean Squared Error:', np.sqrt(metrics.mean_squared_error(y_test, y_pred)))
return coeff_df, regressor.intercept_, metrics.r2_score(y_test, y_pred) | StarcoderdataPython |
3263537 | <filename>netrd/distance/hamming.py
"""
hamming.py
--------------
Hamming distance, wrapper for scipy function:
https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.distance.hamming.html#scipy.spatial.distance.hamming
"""
import scipy
import numpy as np
import networkx as nx
from .base import BaseDistance
class Hamming(BaseDistance):
"""Entry-wise disagreement between adjacency matrices."""
def dist(self, G1, G2):
r"""The proportion of disagreeing nodes between the flattened adjacency
matrices.
If :math:`u` and :math:`v` are boolean vectors, then Hamming
distance is:
.. math::
\frac{c_{01} + c_{10}}{n}
where :math:`c_{ij}` is the number of occurrences of where
:math:`u[k] = i` and :math:`v[k] = j` for :math:`k < n`.
The graphs must have the same number of nodes. A small modification
to this code could allow weights can be applied, but only one set
of weights that apply to both graphs.
The results dictionary also stores a 2-tuple of the underlying
adjacency matrices in the key `'adjacency_matrices'`.
Parameters
----------
G1, G2 (nx.Graph)
two networkx graphs to be compared.
Returns
-------
dist (float)
the distance between `G1` and `G2`.
References
----------
.. [1] https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.distance.hamming.html#scipy.spatial.distance.hamming
"""
adj1 = nx.to_numpy_array(G1)
adj2 = nx.to_numpy_array(G2)
dist = scipy.spatial.distance.hamming(adj1.flatten(), adj2.flatten())
self.results['dist'] = dist
self.results['adjacency_matrices'] = adj1, adj2
return dist
| StarcoderdataPython |
11341108 | <gh_stars>1-10
import json
import sys
import traceback
import policy
def load_config():
with open('config.json', 'r') as config_file:
raw = config_file.read()
return json.loads(raw)
def lambda_handler(event, context): # pylint: disable=unused-argument
try:
tmp = event['methodArn'].split(':')
api_gateway_arn = tmp[5].split('/')
account_id = tmp[4]
config = load_config()
expected_token = config['expected_token']
token = event['authorizationToken']
authpolicy = policy.AuthPolicy(token, account_id)
authpolicy.rest_api_id = api_gateway_arn[0]
authpolicy.region = tmp[3]
authpolicy.stage = api_gateway_arn[1]
if token == expected_token:
authpolicy.allow_all_methods()
else:
authpolicy.deny_all_methods()
return authpolicy.build()
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback)
raise Exception("Unauthorized")
| StarcoderdataPython |
1782169 | # -*- coding: utf-8 -*-
# $Id: wuiadmintestbox.py 69111 2017-10-17 14:26:02Z vboxsync $
"""
Test Manager WUI - TestBox.
"""
__copyright__ = \
"""
Copyright (C) 2012-2017 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 69111 $"
# Standard python imports.
import socket;
# Validation Kit imports.
from common import utils, webutils;
from testmanager.webui.wuicontentbase import WuiContentBase, WuiListContentWithActionBase, WuiFormContentBase, WuiLinkBase, \
WuiSvnLink, WuiTmLink, WuiSpanText, WuiRawHtml;
from testmanager.core.db import TMDatabaseConnection;
from testmanager.core.schedgroup import SchedGroupLogic, SchedGroupData;
from testmanager.core.testbox import TestBoxData, TestBoxDataEx, TestBoxLogic;
from testmanager.core.testset import TestSetData;
from testmanager.core.db import isDbTimestampInfinity;
class WuiTestBoxDetailsLink(WuiTmLink):
""" Test box details link by ID. """
def __init__(self, idTestBox, sName = WuiContentBase.ksShortDetailsLink, fBracketed = False, tsNow = None):
from testmanager.webui.wuiadmin import WuiAdmin;
dParams = {
WuiAdmin.ksParamAction: WuiAdmin.ksActionTestBoxDetails,
TestBoxData.ksParam_idTestBox: idTestBox,
};
if tsNow is not None:
dParams[WuiAdmin.ksParamEffectiveDate] = tsNow; ## ??
WuiTmLink.__init__(self, sName, WuiAdmin.ksScriptName, dParams, fBracketed = fBracketed);
self.idTestBox = idTestBox;
class WuiTestBox(WuiFormContentBase):
"""
WUI TestBox Form Content Generator.
"""
def __init__(self, oData, sMode, oDisp):
if sMode == WuiFormContentBase.ksMode_Add:
sTitle = 'Create TextBox';
if oData.uuidSystem is not None and len(oData.uuidSystem) > 10:
sTitle += ' - ' + oData.uuidSystem;
elif sMode == WuiFormContentBase.ksMode_Edit:
sTitle = 'Edit TestBox - %s (#%s)' % (oData.sName, oData.idTestBox);
else:
assert sMode == WuiFormContentBase.ksMode_Show;
sTitle = 'TestBox - %s (#%s)' % (oData.sName, oData.idTestBox);
WuiFormContentBase.__init__(self, oData, sMode, 'TestBox', oDisp, sTitle);
# Try enter sName as hostname (no domain) when creating the testbox.
if sMode == WuiFormContentBase.ksMode_Add \
and self._oData.sName in [None, ''] \
and self._oData.ip not in [None, '']:
try:
(self._oData.sName, _, _) = socket.gethostbyaddr(self._oData.ip);
except:
pass;
offDot = self._oData.sName.find('.');
if offDot > 0:
self._oData.sName = self._oData.sName[:offDot];
def _populateForm(self, oForm, oData):
oForm.addIntRO( TestBoxData.ksParam_idTestBox, oData.idTestBox, 'TestBox ID');
oForm.addIntRO( TestBoxData.ksParam_idGenTestBox, oData.idGenTestBox, 'TestBox generation ID');
oForm.addTimestampRO(TestBoxData.ksParam_tsEffective, oData.tsEffective, 'Last changed');
oForm.addTimestampRO(TestBoxData.ksParam_tsExpire, oData.tsExpire, 'Expires (excl)');
oForm.addIntRO( TestBoxData.ksParam_uidAuthor, oData.uidAuthor, 'Changed by UID');
oForm.addText( TestBoxData.ksParam_ip, oData.ip, 'TestBox IP Address'); ## make read only??
oForm.addUuid( TestBoxData.ksParam_uuidSystem, oData.uuidSystem, 'TestBox System/Firmware UUID');
oForm.addText( TestBoxData.ksParam_sName, oData.sName, 'TestBox Name');
oForm.addText( TestBoxData.ksParam_sDescription, oData.sDescription, 'TestBox Description');
oForm.addCheckBox( TestBoxData.ksParam_fEnabled, oData.fEnabled, 'Enabled');
oForm.addComboBox( TestBoxData.ksParam_enmLomKind, oData.enmLomKind, 'Lights-out-management',
TestBoxData.kaoLomKindDescs);
oForm.addText( TestBoxData.ksParam_ipLom, oData.ipLom, 'Lights-out-management IP Address');
oForm.addInt( TestBoxData.ksParam_pctScaleTimeout, oData.pctScaleTimeout, 'Timeout scale factor (%)');
oForm.addListOfSchedGroupsForTestBox(TestBoxDataEx.ksParam_aoInSchedGroups,
oData.aoInSchedGroups,
SchedGroupLogic(TMDatabaseConnection()).fetchOrderedByName(),
'Scheduling Group');
# Command, comment and submit button.
if self._sMode == WuiFormContentBase.ksMode_Edit:
oForm.addComboBox(TestBoxData.ksParam_enmPendingCmd, oData.enmPendingCmd, 'Pending command',
TestBoxData.kaoTestBoxCmdDescs);
else:
oForm.addComboBoxRO(TestBoxData.ksParam_enmPendingCmd, oData.enmPendingCmd, 'Pending command',
TestBoxData.kaoTestBoxCmdDescs);
oForm.addMultilineText(TestBoxData.ksParam_sComment, oData.sComment, 'Comment');
if self._sMode != WuiFormContentBase.ksMode_Show:
oForm.addSubmit('Create TestBox' if self._sMode == WuiFormContentBase.ksMode_Add else 'Change TestBox');
return True;
def _generatePostFormContent(self, oData):
from testmanager.webui.wuihlpform import WuiHlpForm;
oForm = WuiHlpForm('testbox-machine-settable', '', fReadOnly = True);
oForm.addTextRO( TestBoxData.ksParam_sOs, oData.sOs, 'TestBox OS');
oForm.addTextRO( TestBoxData.ksParam_sOsVersion, oData.sOsVersion, 'TestBox OS version');
oForm.addTextRO( TestBoxData.ksParam_sCpuArch, oData.sCpuArch, 'TestBox OS kernel architecture');
oForm.addTextRO( TestBoxData.ksParam_sCpuVendor, oData.sCpuVendor, 'TestBox CPU vendor');
oForm.addTextRO( TestBoxData.ksParam_sCpuName, oData.sCpuName, 'TestBox CPU name');
if oData.lCpuRevision:
oForm.addTextRO( TestBoxData.ksParam_lCpuRevision, '%#x' % (oData.lCpuRevision,), 'TestBox CPU revision',
sPostHtml = ' (family=%#x model=%#x stepping=%#x)'
% (oData.getCpuFamily(), oData.getCpuModel(), oData.getCpuStepping(),),
sSubClass = 'long');
else:
oForm.addLongRO( TestBoxData.ksParam_lCpuRevision, oData.lCpuRevision, 'TestBox CPU revision');
oForm.addIntRO( TestBoxData.ksParam_cCpus, oData.cCpus, 'Number of CPUs, cores and threads');
oForm.addCheckBoxRO( TestBoxData.ksParam_fCpuHwVirt, oData.fCpuHwVirt, 'VT-x or AMD-V supported');
oForm.addCheckBoxRO( TestBoxData.ksParam_fCpuNestedPaging, oData.fCpuNestedPaging, 'Nested paging supported');
oForm.addCheckBoxRO( TestBoxData.ksParam_fCpu64BitGuest, oData.fCpu64BitGuest, '64-bit guest supported');
oForm.addCheckBoxRO( TestBoxData.ksParam_fChipsetIoMmu, oData.fChipsetIoMmu, 'I/O MMU supported');
oForm.addMultilineTextRO(TestBoxData.ksParam_sReport, oData.sReport, 'Hardware/software report');
oForm.addLongRO( TestBoxData.ksParam_cMbMemory, oData.cMbMemory, 'Installed RAM size (MB)');
oForm.addLongRO( TestBoxData.ksParam_cMbScratch, oData.cMbScratch, 'Available scratch space (MB)');
oForm.addIntRO( TestBoxData.ksParam_iTestBoxScriptRev, oData.iTestBoxScriptRev,
'TestBox Script SVN revision');
sHexVer = oData.formatPythonVersion();
oForm.addIntRO( TestBoxData.ksParam_iPythonHexVersion, oData.iPythonHexVersion,
'Python version (hex)', sPostHtml = webutils.escapeElem(sHexVer));
return [('Machine Only Settables', oForm.finalize()),];
class WuiTestBoxList(WuiListContentWithActionBase):
"""
WUI TestBox List Content Generator.
"""
## Descriptors for the combo box.
kasTestBoxActionDescs = \
[ \
[ 'none', 'Select an action...', '' ],
[ 'enable', 'Enable', '' ],
[ 'disable', 'Disable', '' ],
TestBoxData.kaoTestBoxCmdDescs[1],
TestBoxData.kaoTestBoxCmdDescs[2],
TestBoxData.kaoTestBoxCmdDescs[3],
TestBoxData.kaoTestBoxCmdDescs[4],
TestBoxData.kaoTestBoxCmdDescs[5],
];
## Boxes which doesn't report in for more than 15 min are considered dead.
kcSecMaxStatusDeltaAlive = 15*60
def __init__(self, aoEntries, iPage, cItemsPerPage, tsEffective, fnDPrint, oDisp, aiSelectedSortColumns = None):
# type: (list[TestBoxDataForListing], int, int, datetime.datetime, ignore, WuiAdmin) -> None
WuiListContentWithActionBase.__init__(self, aoEntries, iPage, cItemsPerPage, tsEffective,
sTitle = 'TestBoxes', sId = 'users', fnDPrint = fnDPrint, oDisp = oDisp,
aiSelectedSortColumns = aiSelectedSortColumns);
self._asColumnHeaders.extend([ 'Name', 'LOM', 'Status', 'Cmd',
'Note', 'Script', 'Python', 'Group',
'OS', 'CPU', 'Features', 'CPUs', 'RAM', 'Scratch',
'Actions' ]);
self._asColumnAttribs.extend([ 'align="center"', 'align="center"', 'align="center"', 'align="center"'
'align="center"', 'align="center"', 'align="center"', 'align="center"',
'', '', '', 'align="left"', 'align="right"', 'align="right"', 'align="right"',
'align="center"' ]);
self._aaiColumnSorting.extend([
(TestBoxLogic.kiSortColumn_sName,),
None, # LOM
(-TestBoxLogic.kiSortColumn_fEnabled, TestBoxLogic.kiSortColumn_enmState, -TestBoxLogic.kiSortColumn_tsUpdated,),
(TestBoxLogic.kiSortColumn_enmPendingCmd,),
None, # Note
(TestBoxLogic.kiSortColumn_iTestBoxScriptRev,),
(TestBoxLogic.kiSortColumn_iPythonHexVersion,),
None, # Group
(TestBoxLogic.kiSortColumn_sOs, TestBoxLogic.kiSortColumn_sOsVersion, TestBoxLogic.kiSortColumn_sCpuArch,),
(TestBoxLogic.kiSortColumn_sCpuVendor, TestBoxLogic.kiSortColumn_lCpuRevision,),
(TestBoxLogic.kiSortColumn_fCpuNestedPaging,),
(TestBoxLogic.kiSortColumn_cCpus,),
(TestBoxLogic.kiSortColumn_cMbMemory,),
(TestBoxLogic.kiSortColumn_cMbScratch,),
None, # Actions
]);
assert len(self._aaiColumnSorting) == len(self._asColumnHeaders);
self._aoActions = list(self.kasTestBoxActionDescs);
self._sAction = oDisp.ksActionTestBoxListPost;
self._sCheckboxName = TestBoxData.ksParam_idTestBox;
def show(self, fShowNavigation = True):
""" Adds some stats at the bottom of the page """
(sTitle, sBody) = super(WuiTestBoxList, self).show(fShowNavigation);
# Count boxes in interesting states.
if self._aoEntries:
cActive = 0;
cDead = 0;
for oTestBox in self._aoEntries:
if oTestBox.oStatus is not None:
oDelta = oTestBox.tsCurrent - oTestBox.oStatus.tsUpdated;
if oDelta.days <= 0 and oDelta.seconds <= self.kcSecMaxStatusDeltaAlive:
if oTestBox.fEnabled:
cActive += 1;
else:
cDead += 1;
else:
cDead += 1;
sBody += '<div id="testboxsummary"><p>\n' \
'%s testboxes of which %s are active and %s dead' \
'</p></div>\n' \
% (len(self._aoEntries), cActive, cDead,)
return (sTitle, sBody);
def _formatListEntry(self, iEntry): # pylint: disable=R0914
from testmanager.webui.wuiadmin import WuiAdmin;
oEntry = self._aoEntries[iEntry];
# Lights outs managment.
if oEntry.enmLomKind == TestBoxData.ksLomKind_ILOM:
aoLom = [ WuiLinkBase('ILOM', 'https://%s/' % (oEntry.ipLom,), fBracketed = False), ];
elif oEntry.enmLomKind == TestBoxData.ksLomKind_ELOM:
aoLom = [ WuiLinkBase('ELOM', 'http://%s/' % (oEntry.ipLom,), fBracketed = False), ];
elif oEntry.enmLomKind == TestBoxData.ksLomKind_AppleXserveLom:
aoLom = [ 'Apple LOM' ];
elif oEntry.enmLomKind == TestBoxData.ksLomKind_None:
aoLom = [ 'none' ];
else:
aoLom = [ 'Unexpected enmLomKind value "%s"' % (oEntry.enmLomKind,) ];
if oEntry.ipLom is not None:
if oEntry.enmLomKind in [ TestBoxData.ksLomKind_ILOM, TestBoxData.ksLomKind_ELOM ]:
aoLom += [ WuiLinkBase('(ssh)', 'ssh://%s' % (oEntry.ipLom,), fBracketed = False) ];
aoLom += [ WuiRawHtml('<br>'), '%s' % (oEntry.ipLom,) ];
# State and Last seen.
if oEntry.oStatus is None:
oSeen = WuiSpanText('tmspan-offline', 'Never');
oState = '';
else:
oDelta = oEntry.tsCurrent - oEntry.oStatus.tsUpdated;
if oDelta.days <= 0 and oDelta.seconds <= self.kcSecMaxStatusDeltaAlive:
oSeen = WuiSpanText('tmspan-online', u'%s\u00a0s\u00a0ago' % (oDelta.days * 24 * 3600 + oDelta.seconds,));
else:
oSeen = WuiSpanText('tmspan-offline', u'%s' % (self.formatTsShort(oEntry.oStatus.tsUpdated),));
if oEntry.oStatus.idTestSet is None:
oState = str(oEntry.oStatus.enmState);
else:
from testmanager.webui.wuimain import WuiMain;
oState = WuiTmLink(oEntry.oStatus.enmState, WuiMain.ksScriptName, # pylint: disable=R0204
{ WuiMain.ksParamAction: WuiMain.ksActionTestResultDetails,
TestSetData.ksParam_idTestSet: oEntry.oStatus.idTestSet, },
sTitle = '#%u' % (oEntry.oStatus.idTestSet,),
fBracketed = False);
# Comment
oComment = self._formatCommentCell(oEntry.sComment);
# Group links.
aoGroups = [];
for oInGroup in oEntry.aoInSchedGroups:
oSchedGroup = oInGroup.oSchedGroup;
aoGroups.append(WuiTmLink(oSchedGroup.sName, WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionSchedGroupEdit,
SchedGroupData.ksParam_idSchedGroup: oSchedGroup.idSchedGroup, },
sTitle = '#%u' % (oSchedGroup.idSchedGroup,),
fBracketed = len(oEntry.aoInSchedGroups) > 1));
# Reformat the OS version to take less space.
aoOs = [ 'N/A' ];
if oEntry.sOs is not None and oEntry.sOsVersion is not None and oEntry.sCpuArch:
sOsVersion = oEntry.sOsVersion;
if sOsVersion[0] not in [ 'v', 'V', 'r', 'R'] \
and sOsVersion[0].isdigit() \
and sOsVersion.find('.') in range(4) \
and oEntry.sOs in [ 'linux', 'solaris', 'darwin', ]:
sOsVersion = 'v' + sOsVersion;
sVer1 = sOsVersion;
sVer2 = None;
if oEntry.sOs == 'linux' or oEntry.sOs == 'darwin':
iSep = sOsVersion.find(' / ');
if iSep > 0:
sVer1 = sOsVersion[:iSep].strip();
sVer2 = sOsVersion[iSep + 3:].strip();
sVer2 = sVer2.replace('Red Hat Enterprise Linux Server', 'RHEL');
sVer2 = sVer2.replace('Oracle Linux Server', 'OL');
elif oEntry.sOs == 'solaris':
iSep = sOsVersion.find(' (');
if iSep > 0 and sOsVersion[-1] == ')':
sVer1 = sOsVersion[:iSep].strip();
sVer2 = sOsVersion[iSep + 2:-1].strip();
elif oEntry.sOs == 'win':
iSep = sOsVersion.find('build');
if iSep > 0:
sVer1 = sOsVersion[:iSep].strip();
sVer2 = 'B' + sOsVersion[iSep + 1:].strip();
aoOs = [
WuiSpanText('tmspan-osarch', u'%s.%s' % (oEntry.sOs, oEntry.sCpuArch,)),
WuiSpanText('tmspan-osver1', sVer1.replace('-', u'\u2011'),),
];
if sVer2 is not None:
aoOs += [ WuiRawHtml('<br>'), WuiSpanText('tmspan-osver2', sVer2.replace('-', u'\u2011')), ];
# Format the CPU revision.
oCpu = None;
if oEntry.lCpuRevision is not None and oEntry.sCpuVendor is not None and oEntry.sCpuName is not None:
oCpu = [
u'%s (fam:%xh\u00a0m:%xh\u00a0s:%xh)'
% (oEntry.sCpuVendor, oEntry.getCpuFamily(), oEntry.getCpuModel(), oEntry.getCpuStepping(),),
WuiRawHtml('<br>'),
oEntry.sCpuName,
];
else:
oCpu = [];
if oEntry.sCpuVendor is not None:
oCpu.append(oEntry.sCpuVendor);
if oEntry.lCpuRevision is not None:
oCpu.append('%#x' % (oEntry.lCpuRevision,));
if oEntry.sCpuName is not None:
oCpu.append(oEntry.sCpuName);
# Stuff cpu vendor and cpu/box features into one field.
asFeatures = []
if oEntry.fCpuHwVirt is True: asFeatures.append(u'HW\u2011Virt');
if oEntry.fCpuNestedPaging is True: asFeatures.append(u'Nested\u2011Paging');
if oEntry.fCpu64BitGuest is True: asFeatures.append(u'64\u2011bit\u2011Guest');
if oEntry.fChipsetIoMmu is True: asFeatures.append(u'I/O\u2011MMU');
sFeatures = u' '.join(asFeatures) if asFeatures else u'';
# Collection applicable actions.
aoActions = [
WuiTmLink('Details', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionTestBoxDetails,
TestBoxData.ksParam_idTestBox: oEntry.idTestBox,
WuiAdmin.ksParamEffectiveDate: self._tsEffectiveDate, } ),
]
if self._oDisp is None or not self._oDisp.isReadOnlyUser():
if isDbTimestampInfinity(oEntry.tsExpire):
aoActions += [
WuiTmLink('Edit', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionTestBoxEdit,
TestBoxData.ksParam_idTestBox: oEntry.idTestBox, } ),
WuiTmLink('Remove', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionTestBoxRemovePost,
TestBoxData.ksParam_idTestBox: oEntry.idTestBox },
sConfirm = 'Are you sure that you want to remove %s (%s)?' % (oEntry.sName, oEntry.ip) ),
]
if oEntry.sOs not in [ 'win', 'os2', ] and oEntry.ip is not None:
aoActions.append(WuiLinkBase('ssh', 'ssh://vbox@%s' % (oEntry.ip,),));
return [ self._getCheckBoxColumn(iEntry, oEntry.idTestBox),
[ WuiSpanText('tmspan-name', oEntry.sName), WuiRawHtml('<br>'), '%s' % (oEntry.ip,),],
aoLom,
[
'' if oEntry.fEnabled else 'disabled / ',
oState,
WuiRawHtml('<br>'),
oSeen,
],
oEntry.enmPendingCmd,
oComment,
WuiSvnLink(oEntry.iTestBoxScriptRev),
oEntry.formatPythonVersion(),
aoGroups,
aoOs,
oCpu,
sFeatures,
oEntry.cCpus if oEntry.cCpus is not None else 'N/A',
utils.formatNumberNbsp(oEntry.cMbMemory) + u'\u00a0MB' if oEntry.cMbMemory is not None else 'N/A',
utils.formatNumberNbsp(oEntry.cMbScratch) + u'\u00a0MB' if oEntry.cMbScratch is not None else 'N/A',
aoActions,
];
| StarcoderdataPython |
9650595 | <gh_stars>0
########################################
#### Licensed under the MIT license ####
########################################
import torch
import torch.nn as nn
import torch.optim as optim
import os
import numpy as np
import cv2
from numpy import prod
from datetime import datetime
from model import CapsuleNetwork
from loss import CapsuleLoss
from time import time
from torchsummary import summary
SAVE_MODEL_PATH = 'checkpoints/'
if not os.path.exists(SAVE_MODEL_PATH):
os.mkdir(SAVE_MODEL_PATH)
class CapsNetTrainer:
"""
Wrapper object for handling training and evaluation
"""
def __init__(self, loaders, batch_size, learning_rate, num_routing=3, lr_decay=0.99, classes=7, num_filters=128, stride=2, filter_size=5, recons=False, device=torch.device("cuda" if torch.cuda.is_available() else "cpu"), multi_gpu=(torch.cuda.device_count() > 1)):
self.device = device
self.multi_gpu = multi_gpu
self.recons = recons
self.classes = classes
self.loaders = loaders
img_shape = self.loaders['train'].dataset[0][0].numpy().shape
self.net = CapsuleNetwork(img_shape, num_filters, stride, filter_size, recons, primary_dim=8, num_classes=self.classes, out_dim=16, num_routing=num_routing, device=self.device).to(self.device)
#summary(self.net, (3, 70, 70))
if self.multi_gpu:
self.net = nn.DataParallel(self.net)
self.criterion = CapsuleLoss(recons, loss_lambda=0.5, recon_loss_scale=5e-4)
self.optimizer = optim.Adam(self.net.parameters(), lr=learning_rate)
self.scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=lr_decay)
print(8*'#', 'PyTorch Model built'.upper(), 8*'#')
print('Num params:', sum([prod(p.size()) for p in self.net.parameters()]))
def __repr__(self):
return repr(self.net)
def run(self, epochs, classes):
print(8*'#', 'Run started'.upper(), 8*'#')
eye = torch.eye(len(classes)).to(self.device)
for epoch in range(1, epochs+1):
for phase in ['train', 'eval']:
print(f'{phase}ing...'.capitalize())
if phase == 'train':
self.net.train()
else:
self.net.eval()
t0 = time()
running_loss = 0.0
correct = 0; total = 0
for i, (images, labels) in enumerate(self.loaders[phase]):
t1 = time()
images, labels = images.to(self.device), labels.to(self.device)
# One-hot encode labels
labels = eye[labels]
self.optimizer.zero_grad()
outputs = self.net(images)
if type(outputs) is tuple:
loss = self.criterion(outputs[0], labels, images, outputs[1])
else:
loss = self.criterion(outputs, labels, images, None)
if phase == 'train':
loss.backward()
self.optimizer.step()
running_loss += loss.item()
_, predicted = torch.max(outputs, 1)
_, labels = torch.max(labels, 1)
total += labels.size(0)
correct += (predicted == labels).sum()
accuracy = float(correct) / float(total)
if phase == 'train':
print(f'Epoch {epoch}, Batch {i+1}, Loss {running_loss/(i+1)}',
f'Accuracy {accuracy} Time {round(time()-t1, 3)}s')
print(f'{phase.upper()} Epoch {epoch}, Loss {running_loss/(i+1)}',
f'Accuracy {accuracy} Time {round(time()-t0, 3)}s')
self.scheduler.step()
now = str(datetime.now()).replace(" ", "-")
error_rate = round((1-accuracy)*100, 2)
torch.save(self.net.state_dict(), os.path.join(SAVE_MODEL_PATH, f'{error_rate}_{now}.pth.tar'))
class_correct = list(0. for _ in classes)
class_total = list(0. for _ in classes)
correct = 0
total = 0
matrix = np.zeros((7, 7), dtype=np.int)
for images, labels in self.loaders['test']:
images, labels = images.to(self.device), labels.to(self.device)
outputs = self.net(images)
if type(outputs) is tuple:
outputs = outputs[0]
# image = np.array(((reconstructions[0].cpu().detach().numpy() * 0.5) + 0.5) * 255, dtype=np.int32)
# image = np.moveaxis(image, 0, -1)
# image_gt = np.array(((images[0].cpu().detach().numpy() * 0.5) + 0.5) * 255, dtype=np.int32)
# image_gt = np.moveaxis(image_gt, 0, -1)
# cv2.imwrite("/home/bax/Data/Dumpsters/capsule-network/" + str(epoch) + ".jpg", image)
# cv2.imwrite("/home/bax/Data/Dumpsters/capsule-network/" + str(epoch) + "_gt.jpg", image_gt)
_, predicted = torch.max(outputs, 1)
labels = eye[labels]
_, labels = torch.max(labels, 1)
total += labels.size(0)
correct += (predicted == labels).sum()
accuracy = float(correct) / float(total)
for i in range(labels.size(0)):
matrix[labels[i], predicted[i]] += 1
c = (predicted == labels).squeeze()
for i in range(labels.size(0)):
label = labels[i]
class_correct[label] += c[i].item()
class_total[label] += 1
print("Test accuracy", accuracy)
print(matrix)
for i in range(len(classes)):
print('Accuracy of %5s : %4f %%' % (
classes[i], 100 * class_correct[i] / class_total[i]))
| StarcoderdataPython |
12843381 | <reponame>vail130/norm
from __future__ import absolute_import, unicode_literals
import unittest
from mason import Param, ANY, SELECT, COUNT, SUM, AND, OR, Table, NUMERIC, DATE, COALESCE, CASE
class TheSelectClass(unittest.TestCase):
def test_returns_string_for_select_query(self):
purchases = Table('purchases')
users = Table('users')
user_id = Param('user_id')
start = Param('start')
end = Param('end')
query = str(
SELECT(purchases.id, purchases.product_name, NUMERIC(purchases.product_price, 10, 2),
DATE(purchases.datetime_purchased))
.FROM(purchases)
.INNER_JOIN(users.ON(purchases.purchaser_id == users.user_id))
.WHERE(AND(purchases.datetime_purchased.BETWEEN(start).AND(end),
OR(purchases.purchaser_id == user_id,
purchases.purchaser_id.IS_NULL)))
.ORDER_BY(purchases.datetime_purchased.ASC)
.LIMIT(10)
.OFFSET(10)
)
expected_query = '\n'.join([
"SELECT purchases.id, purchases.product_name, "
"(purchases.product_price)::NUMERIC(10, 2), (purchases.datetime_purchased)::DATE",
"FROM purchases",
"INNER JOIN users ON purchases.purchaser_id = users.user_id",
"WHERE purchases.datetime_purchased BETWEEN %(start)s AND %(end)s "
"AND (purchases.purchaser_id = %(user_id)s OR purchases.purchaser_id IS NULL)",
"ORDER BY purchases.datetime_purchased ASC",
"LIMIT 10",
"OFFSET 10",
])
self.assertEqual(query, expected_query)
def test_returns_string_for_select_query_grouping(self):
purchases = Table('purchases')
start = Param('start')
end = Param('end')
min_category_sum = Param('min_category_sum')
num_purchases = COUNT(purchases).AS('num_purchases')
category_percent = (SUM(
CASE.WHEN(purchases.is_valid)
.THEN(COALESCE(purchases.product_price, 0))
.ELSE(0).END
) / 100.0).AS('category_percent')
category_sum = SUM(COALESCE(purchases.product_price, 0)).AS('category_sum')
query = str(
SELECT(purchases.category, category_percent, num_purchases)
.FROM(purchases)
.WHERE(purchases.datetime_purchased.BETWEEN(start).AND(end))
.GROUP_BY(purchases.category)
.HAVING(category_sum > min_category_sum)
)
expected_query = '\n'.join([
("SELECT purchases.category, "
"(SUM(CASE WHEN purchases.is_valid "
"THEN COALESCE(purchases.product_price, 0) ELSE 0 END) / 100.0) AS category_percent, "
"COUNT(*) AS num_purchases"),
"FROM purchases",
"WHERE purchases.datetime_purchased BETWEEN %(start)s AND %(end)s",
"GROUP BY purchases.category",
"HAVING category_sum > %(min_category_sum)s",
])
self.assertEqual(query, expected_query)
def test_returns_string_for_select_query_with_subqueries(self):
purchases = Table('purchases')
num_purchases = COUNT(purchases).AS('num_purchases')
grouped_purchases = (
SELECT(purchases.category.AS('category'), num_purchases)
.FROM(purchases)
.GROUP_BY(purchases.category)
.AS('grouped_purchases')
)
products = Table('products')
num_products = COUNT(products).AS('num_products')
grouped_products = (
SELECT(products.category.AS('category'), num_products)
.FROM(products)
.GROUP_BY(products.category)
.AS('grouped_products')
)
categories_param = Param('categories')
categories_table = Table('categories')
query = str(
SELECT(grouped_purchases.category, grouped_purchases.num_purchases, grouped_products.num_products)
.FROM(grouped_purchases)
.INNER_JOIN(grouped_products.ON(grouped_purchases.category == grouped_products.category))
.WHERE(AND(grouped_purchases.category == ANY(categories_param),
grouped_purchases.category.IN(SELECT(categories_table.category).FROM(categories_table))))
)
expected_query = '\n'.join([
"SELECT grouped_purchases.category, grouped_purchases.num_purchases, grouped_products.num_products",
"FROM (",
"\tSELECT purchases.category AS category, COUNT(*) AS num_purchases",
"\tFROM purchases",
"\tGROUP BY purchases.category",
") AS grouped_purchases",
"INNER JOIN (",
"\tSELECT products.category AS category, COUNT(*) AS num_products",
"\tFROM products",
"\tGROUP BY products.category",
") AS grouped_products ON grouped_purchases.category = grouped_products.category",
"WHERE grouped_purchases.category = ANY(%(categories)s) "
"AND grouped_purchases.category IN (",
"\tSELECT categories.category",
"\tFROM categories",
")",
])
self.assertEqual(query, expected_query)
def test_returns_string_for_select_query_with_joins(self):
table = Table('table')
query = str(
SELECT('*')
.FROM(table)
.LEFT_OUTER_JOIN(table)
.RIGHT_OUTER_JOIN(table)
.FULL_OUTER_JOIN(table)
.OUTER_JOIN(table)
.LIMIT(10)
)
expected_query = '\n'.join([
"SELECT *",
"FROM table",
"LEFT OUTER JOIN table",
"RIGHT OUTER JOIN table",
"FULL OUTER JOIN table",
"OUTER JOIN table",
"LIMIT 10",
])
self.assertEqual(query, expected_query)
| StarcoderdataPython |
8073719 | <reponame>adelekap/ModelingBehavior_QLearning
"""
Theses are the learning functions that your
agent can utilize.
Returns the value of alpha.
"""
def constant(agent,state):
return agent.alpha[8:]
def decreasingLinear(agent,state):
return (1/(agent.episodesSoFar+state.trial))
def decreasingExponential(agent,state):
return(0.5**(agent.episodesSoFar+state.trial)) | StarcoderdataPython |
8108160 | <reponame>pivotal-energy-solutions/docusign-python-client<filename>docusign_esign/models/reminders.py
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
OpenAPI spec version: v2
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class Reminders(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, reminder_delay=None, reminder_enabled=None, reminder_frequency=None):
"""
Reminders - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'reminder_delay': 'str',
'reminder_enabled': 'str',
'reminder_frequency': 'str'
}
self.attribute_map = {
'reminder_delay': 'reminderDelay',
'reminder_enabled': 'reminderEnabled',
'reminder_frequency': 'reminderFrequency'
}
self._reminder_delay = reminder_delay
self._reminder_enabled = reminder_enabled
self._reminder_frequency = reminder_frequency
@property
def reminder_delay(self):
"""
Gets the reminder_delay of this Reminders.
An interger that sets the number of days after the recipient receives the envelope that reminder emails are sent to the recipient.
:return: The reminder_delay of this Reminders.
:rtype: str
"""
return self._reminder_delay
@reminder_delay.setter
def reminder_delay(self, reminder_delay):
"""
Sets the reminder_delay of this Reminders.
An interger that sets the number of days after the recipient receives the envelope that reminder emails are sent to the recipient.
:param reminder_delay: The reminder_delay of this Reminders.
:type: str
"""
self._reminder_delay = reminder_delay
@property
def reminder_enabled(self):
"""
Gets the reminder_enabled of this Reminders.
When set to **true**, the envelope expires (is no longer available for signing) in the set number of days. If false, the account default setting is used. If the account does not have an expiration setting, the DocuSign default value of 120 days is used.
:return: The reminder_enabled of this Reminders.
:rtype: str
"""
return self._reminder_enabled
@reminder_enabled.setter
def reminder_enabled(self, reminder_enabled):
"""
Sets the reminder_enabled of this Reminders.
When set to **true**, the envelope expires (is no longer available for signing) in the set number of days. If false, the account default setting is used. If the account does not have an expiration setting, the DocuSign default value of 120 days is used.
:param reminder_enabled: The reminder_enabled of this Reminders.
:type: str
"""
self._reminder_enabled = reminder_enabled
@property
def reminder_frequency(self):
"""
Gets the reminder_frequency of this Reminders.
An interger that sets the interval, in days, between reminder emails.
:return: The reminder_frequency of this Reminders.
:rtype: str
"""
return self._reminder_frequency
@reminder_frequency.setter
def reminder_frequency(self, reminder_frequency):
"""
Sets the reminder_frequency of this Reminders.
An interger that sets the interval, in days, between reminder emails.
:param reminder_frequency: The reminder_frequency of this Reminders.
:type: str
"""
self._reminder_frequency = reminder_frequency
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| StarcoderdataPython |
1957274 | from __future__ import print_function, absolute_import, division
if not hasattr(__builtins__, 'bytes'):
bytes: bytes = str
| StarcoderdataPython |
3550479 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from warehouse.legacy.api.xmlrpc.cache import interfaces
def cached_return_view(view, info):
if info.options.get("xmlrpc_cache"):
tag = info.options.get("xmlrpc_cache_tag")
expires = info.options.get("xmlrpc_cache_expires", 86400)
arg_index = info.options.get("xmlrpc_cache_arg_index")
slice_obj = info.options.get("xmlrpc_cache_slice_obj", slice(None, None))
tag_processor = info.options.get(
"xmlrpc_cache_tag_processor", lambda x: x.lower()
)
def wrapper_view(context, request):
try:
service = request.find_service(interfaces.IXMLRPCCache)
except LookupError:
return view(context, request)
try:
key = json.dumps(request.rpc_args[slice_obj])
_tag = tag
if arg_index is not None:
_tag = tag % (tag_processor(str(request.rpc_args[arg_index])))
return service.fetch(view, (context, request), {}, key, _tag, expires)
except (interfaces.CacheError, IndexError):
return view(context, request)
return wrapper_view
return view
cached_return_view.options = [ # type: ignore
"xmlrpc_cache",
"xmlrpc_cache_tag",
"xmlrpc_cache_expires",
"xmlrpc_cache_arg_index",
"xmlrpc_cache_slice_obj",
"xmlrpc_cache_tag_processor",
]
| StarcoderdataPython |
1875347 | __name__ = "scratchconnect"
__version__ = "2.4"
__author__ = "<NAME>"
__documentation__ = "https://sid72020123.github.io/scratchconnect/"
__doc__ = f"""
scratchconnect is a Python Library to connect Scratch API and much more.
This library can show the statistics of Users, Projects, Studios, Forums and also connect and set cloud variables of a project!
Import Statement:
import scratchconnect
Documentation(Tutorial):
For documentation, go to {__documentation__}
Required Libraries:
requests*, re*, json*, time*, threading*, websocket-client
* -> In-built
This library also uses pyEmmiter to handle cloud events in Python.
History:
19/06/2021(v0.0.0.1) - First made the library and updated it.
20/06/2021(v0.1) - Added many features.
21/06/2021(v0.1.9) - Bug fixes.
26/06/2021(v0.2.0) - Made Improvements and added new features.
27/06/2021(v0.2.6) - Bug Fixes and update and made the 'Studio' class.
03/07/2021(v0.4.5) - Added many functions and made the 'Project' class.
04/07/2021(v0.5.0) - Update.
05/07/2021(v0.5.1) - Updated the messages function.
06/07/2021(v0.6.0) - Updated CloudConnection.
08/07/2021(v0.7.5) - Updated CloudConnection.
10/07/2021(v0.7.5) - Updated CloudConnection, made the Forum class and added DocString.
13/07/2021(v0.9.7) - Added DocString.
14/07/2021(v0.9.0) - Bug Fixes.
15/07/2021(v1.0) - First Release!
18/07/2021(V1.1) - Made the 'studio.get_projects()'.
19/07/2021(v1.2) - Made the get comments, curators, managers of the studio
13/08/2021(v1.3) - Added the get comments function
14/08/2021(v1.4) - Updated the get messages function
17/08/2021(v1.5) - Made some bug fixes
18/09/2021(v1.7) - Made the ScratchConnect and User Classes fast and Improved methods
19/09/2021(v1.8) - Made the Studio Class Faster and Improved methods
25/09/2021(v1.8.5) - Updated the Project and User classes
02/10/2021(v2.0) - Updated the Cloud and Forum Class
10/10/2021(v2.0.1) - Fixed some cloud stuff
11/10/2021(v2.1) - Added some features to Forum Class
24/10/2021(v2.1.1) - Started making the scStorage Class
29/10/2021(v2.1.1.1) - Fixed set_bio() and set_work() and updated the scDataBase
30/10/2021(v2.2.5) - Updated the scStorage
31/10/2021(v2.2.7) - Updated the scStorage
25/11/2021(v2.3) - Updated the scStorage and CloudConnection
13/12/2021(v2.3.5) - Started making the TurbowarpCloudConnection feature and added some methods to it
14/12/2021(v2.4) - Updated and fixed mistakes in docs
Credits:
All code by <NAME>.
Information:
Module made by:- <NAME>
Age:- 15 (as of 2021)
Email:- <EMAIL>
YouTube Channel:- Siddhesh Chavan (Link: https://www.youtube.com/channel/UCWcSxfT-SbqAktvGAsrtadQ)
Scratch Account:- @Sid72020123 (Link: https://scratch.mit.edu/users/Sid72020123/)
My self-made Website: https://Sid72020123.github.io/
"""
from scratchconnect.ScratchConnect import ScratchConnect
from scratchconnect import Exceptions
print(f"{__name__} v{__version__} - {__documentation__}")
| StarcoderdataPython |
8152810 | <filename>bioconda_utils/bot/views.py
"""
HTTP Views (accepts and parses webhooks)
"""
import logging
from aiohttp import web
from aiohttp_session import get_session
from aiohttp_security import check_authorized, forget, permits, remember, authorized_userid
from aiohttp_jinja2 import template, render_template
from .events import event_routes
from ..githubhandler import Event
from ..circleci import SlackMessage
from .worker import capp
from .config import APP_SECRET, BOT_BASEURL
from .commands import command_routes
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
#: List of routes from url path to handler defined in this module
web_routes = web.RouteTableDef() # pylint: disable=invalid-name
#: List of navigation bar entries defined by this module
navigation_bar = [] # pylint: disable=invalid-name
def add_to_navbar(title):
"""Decorator adding a view to the navigation bar
Must be "above" the ``@web_routes`` decorator.
Arguments:
title: Title to register for this page. Will be the HTML title
and the name in the navbar.
"""
def wrapper(func):
route = web_routes[-1]
navigation_bar.append((route.path, route.kwargs['name'], title))
return func
return wrapper
async def check_permission(request, permission, context=None):
"""Checks permissions
Custom implementation replacing aiohttp-security one. This one
adds the requested permissions to the request so they can
be presented in the error handler.
Raises:
HTTPForbidden
"""
await check_authorized(request)
allowed = await permits(request, permission, context)
if not allowed:
request['permission_required'] = permission
raise web.HTTPForbidden()
@web_routes.post('/_gh')
async def github_webhook_dispatch(request):
"""View for incoming webhooks from Github
Here, webhooks (events) from Github are accepted and dispatched to
the event handlers defined in `events` module and registered with
`event_routes`.
"""
try:
body = await request.read()
secret = APP_SECRET
if secret == "IGNORE":
# For debugging locally, we allow not verifying the
# secret normally used to authenticate incoming webhooks.
# You do have to set it to "IGNORE" so that it's not
# accidentally disabled.
logger.error("IGNORING WEBHOOK SECRET (DEBUG MODE)")
secret = None
event = Event.from_http(request.headers, body, secret=secret)
# Respond to liveness check
if event.event == "ping":
return web.Response(status=200)
# Log Event
installation = event.get('installation/id')
to_user = event.get('repository/owner/login', None)
to_repo = event.get('repository/name', None)
action = event.get('action', None)
action_msg = '/' + action if action else ''
logger.info("Received GH Event '%s%s' (%s) for %s (%s/%s)",
event.event, action_msg,
event.delivery_id,
installation, to_user, to_repo)
# Get GithubAPI object for this installation
ghapi = await request.app['ghappapi'].get_github_api(
dry_run=False, installation=installation, to_user=to_user, to_repo=to_repo
)
# Dispatch the Event
try:
await event_routes.dispatch(event, ghapi)
logger.info("Event '%s%s' (%s) done", event.event, action_msg, event.delivery_id)
except Exception: # pylint: disable=broad-except
logger.exception("Failed to dispatch %s", event.delivery_id)
# Remember the rate limit
# FIXME: remove this, we have many tokens in many places, this no longer works sensibly.
request.app['gh_rate_limit'] = ghapi.rate_limit
return web.Response(status=200)
except Exception: # pylint: disable=broad-except
logger.exception("Failure in webhook dispatch")
return web.Response(status=500)
@web_routes.post('/hooks/circleci')
async def generic_circleci_dispatch(request):
"""View for incoming webhooks from CircleCI
These are actually slack messages. We try to deparse them, but
nothing is implemented on acting upon them yet.
"""
try:
body = await request.read()
msg = SlackMessage(request.headers, body)
logger.info("Got data from Circle: %s", msg)
return web.Response(status=200)
except Exception: # pylint: disable=broad-except
logger.exception("Failure in circle webhook dispatch")
return web.Response(status=500)
@web_routes.post('/hooks/{source}')
async def generic_webhook_dispatch(request):
"""View for all other incoming webhooks
This is just for debugging, so we can see what we would be
receiving
"""
try:
source = request.match_info['source']
body = await request.read()
logger.error("Got generic webhook for %s", source)
logger.error(" Data: %s", body)
return web.Response(status=200)
except Exception: # pylint: disable=broad-except
logger.exception("Failure in generic webhook dispatch")
return web.Response(status=500)
@add_to_navbar(title="Home")
@web_routes.get("/", name="home")
@template('bot_index.html')
async def show_index(_request):
"""View for the Bot's home page.
Renders nothing special at the moment, just the template.
"""
return {}
@add_to_navbar(title="Status")
@web_routes.get("/status", name="status")
@template("bot_status.html")
async def show_status(request):
"""View for checking in on the bots status
Shows the status of each responsding worker. This page may take
100ms extra to render. If workers are busy, they may not respons
within that time.
"""
await check_permission(request, 'bioconda')
worker_status = capp.control.inspect(timeout=0.1)
if not worker_status:
return {
'error': 'Could not get worker status'
}
alive = worker_status.ping()
if not alive:
return {
'error': 'No workers found'
}
return {
'workers': {
worker: {
'active': worker_status.active(worker),
'reserved': worker_status.reserved(worker),
}
for worker in sorted(alive.keys())
}
}
@web_routes.get('/logout', name="logout")
async def logout(request):
"""View for logging out user
Accepts a **next** parameter in the URL. This is where the user is
sent back to (via HTTP redirect) after logging out.
"""
await check_authorized(request)
nexturl = request.query.get('next', '/')
response = web.HTTPFound(nexturl)
await forget(request, response)
return response
@web_routes.get('/login')
async def login(request):
"""View for login page
Redirects to ``/auth/github`` in all cases - no other login
methods supported.
"""
return web.HTTPFound('/auth/github')
@web_routes.get('/auth/github', name="login")
async def auth_github(request):
"""View for signing in with Github
Currently the only authentication method (and probably will remain so).
This will redirect to Github to allow OAUTH authentication if
necessary.
"""
if 'error' in request.query:
logger.error(request.query)
web.HTTPUnauthorized(body="Encountered an error. ")
session = await get_session(request)
nexturl = request.query.get('next') or '/'
baseurl = BOT_BASEURL + "/auth/github?next=" + nexturl
try:
ghappapi = request.app['ghappapi']
ghapi = await ghappapi.oauth_github_user(baseurl, session, request.query)
if ghapi.username:
await remember(request, web.HTTPFound(nexturl), ghapi.token)
return web.HTTPFound(nexturl)
except web.HTTPFound:
raise
except Exception as exc:
logger.exception("failed to auth")
return web.HTTPUnauthorized(body="Could not authenticate your Github account")
@add_to_navbar(title="Commands")
@web_routes.get('/commands', name="commands")
@template('bot_commands.html')
async def list_commands(request):
"""Self documents available commands"""
return {
'commands': [
{'name': name, 'description': desc}
for name, (func, desc) in command_routes.mapping.items()
]
}
| StarcoderdataPython |
4886188 | <filename>red/inara/turtle/__init__.py
from .lib import CoolTurtle as CoolTurtle | StarcoderdataPython |
8110882 | # -*- coding: utf-8 -*-
"""
oss2.defaults
~~~~~~~~~~~~~
全局缺省变量。
"""
#: 连接超时时间
connect_timeout = 10
#: 缺省重试次数
request_retries = 3
#: 对于某些接口,上传数据长度大于或等于该值时,就采用分片上传。
multipart_threshold = 10 * 1024 * 1024
#: 缺省分片大小
part_size = 10 * 1024 * 1024 | StarcoderdataPython |
110066 | class FeeValidator:
def __init__(self, specifier) -> None:
super().__init__()
self.specifier = specifier
def validate(self, fee):
failed=False
try:
if fee != 0 and not 1 <= fee <= 100:
failed=True
except TypeError:
failed=True
if failed:
raise Exception("Fee for {} cannot be {}. Valid values are 0, [1-100]".format(self.specifier, fee)) | StarcoderdataPython |
4918818 | from setuptools import setup, find_packages
setup(
name="ell-pkg-mcon",
version="0.0.1",
author="<NAME>",
author_email="<EMAIL>",
description="Python3 bindings for Ell lib",
packages=find_packages(),
setup_requires=["cffi>=1.0.0"],
cffi_modules=["ell_build.py:ffibuilder"], # "filename:global"
install_requires=["cffi>=1.0.0"],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: Linux",
],
python_requires='>=3.6',
)
| StarcoderdataPython |
4828789 | <gh_stars>1-10
from argparse import ArgumentTypeError as err
import os
# modified from https://stackoverflow.com/a/33181083/1052418
class PathType(object):
def __init__(self, type='file'):
'''
exists:
True: a path that does exist
False: a path that does not exist, in a valid parent directory
None: don't care
type: file, dir, symlink, None, or a function returning True for valid paths
None: don't care
dash_ok: whether to allow "-" as stdin/stdout
'''
assert type in ('file', 'dir', 'symlink', None) or callable(type)
self._type = type
def __call__(self, string):
e = os.path.exists(string)
if not e:
raise err("path does not exist: '%s'" % string)
if self._type is None:
pass
elif self._type == 'file':
if not os.path.isfile(string):
raise err("path is not a file: '{0}'".format(string))
elif self._type == 'symlink':
if not os.path.islink(string):
raise err("path is not a symlink: '{0}'".format(string))
elif self._type == 'dir':
if not os.path.isdir(string):
raise err("path is not a directory: '{0}'".format(string))
elif not self._type(string):
raise err("path not valid: '%s'" % string)
return os.path.abspath(string)
| StarcoderdataPython |
12819886 | # Copyright (c) 2017 <NAME>, All rights reserved.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import requests
import os
import signal
from time import sleep
from unittest import TestCase, main
from os.path import expanduser
from subprocess import check_output, CalledProcessError, Popen, run, DEVNULL, PIPE
class CliTest(TestCase):
tf = 'tfnz '
@staticmethod
def bin(po=None):
if po is not None:
pgid = os.getpgid(po.pid) # alpine needs you to start a new session AND nuke the whole group
os.killpg(pgid, signal.SIGTERM)
po.wait()
try:
all = check_output('ls /tmp/tf-*', shell=True, start_new_session=True)
except CalledProcessError: # no tf-whatever files
return
for instance in all.split():
docker_id = ''
with open(instance) as f:
docker_id = f.read()
run('rm ' + instance.decode(), shell=True, start_new_session=True)
try:
run('docker kill ' + docker_id, stderr=DEVNULL, stdout=DEVNULL, shell=True, start_new_session=True)
except CalledProcessError:
pass
def test_ends(self):
try:
out = run(CliTest.tf + 'tfnz/ends_test', shell=True, start_new_session=True, stderr=PIPE)
self.assertTrue(b"Container is running" in out.stderr)
self.assertTrue(b"Container has exited and/or been destroyed" in out.stderr)
self.assertTrue(b"Disconnecting" in out.stderr)
finally:
CliTest.bin()
def test_verbose(self):
try:
out = run(CliTest.tf + '-v alpine true', shell=True, start_new_session=True, stderr=PIPE)
self.assertTrue(b"Message loop started" in out.stderr)
finally:
CliTest.bin()
def test_quiet(self):
try:
out = run(CliTest.tf + '-q alpine true', shell=True, start_new_session=True, stderr=PIPE)
self.assertTrue(len(out.stderr) == 0)
finally:
CliTest.bin()
def test_portmap(self):
try:
po = Popen(CliTest.tf + '-p 8080:80 nginx', shell=True, start_new_session=True)
sleep(5)
reply = requests.get('http://127.0.0.1:8080')
self.assertTrue("Welcome to nginx!" in reply.text)
finally:
CliTest.bin(po)
def test_environment(self):
try:
po = Popen(CliTest.tf + '-e TEST=environment -e VAR=iable -p 8080:80 tfnz/env_test',
shell=True, start_new_session=True)
sleep(5)
reply = requests.get('http://127.0.0.1:8080')
self.assertTrue("TEST=environment" in reply.text)
self.assertTrue("VAR=iable" in reply.text)
finally:
CliTest.bin(po)
def test_preboot(self):
try:
po = Popen(CliTest.tf + '-f cli_test.py:/usr/share/nginx/html/index.html -p 8080:80 nginx',
shell=True, start_new_session=True)
sleep(5)
reply = requests.get('http://127.0.0.1:8080')
self.assertTrue("test_preboot(self)" in reply.text)
finally:
CliTest.bin(po)
def test_mount_volume(self):
po = None
try:
# creating with a cli tag
try:
uuid = check_output(CliVolsTest.tfvolumes + 'create with_cli_tag', shell=True).decode().rstrip('\r\n')
except CalledProcessError as e:
run(CliVolsTest.tfvolumes + "destroy with_cli_tag", shell=True)
uuid = check_output(CliVolsTest.tfvolumes + 'create with_cli_tag', shell=True).decode().rstrip('\r\n')
print("Vol uuid = " + str(uuid))
# mount using the cli tag
print('\n' + CliTest.tf + '-s -m with_cli_tag:/usr/share/nginx/html/ -p 8080:80 nginx')
po = Popen(CliTest.tf + '-s -m with_cli_tag:/usr/share/nginx/html/ -p 8080:80 nginx',
shell=True, start_new_session=True)
sleep(5)
reply = requests.get('http://127.0.0.1:8080')
self.assertTrue(reply.status_code == 403) # initially nothing in the volume
# upload a file with sftp
run('echo "put tfnz.1 /usr/share/nginx/html/index.html" | sftp -P 2222 root@localhost',
shell=True, start_new_session=True)
sleep(1)
reply = requests.get('http://127.0.0.1:8080')
self.assertTrue(".TH TFNZ(1)" in reply.text)
CliTest.bin(po)
# mount using tag:uuid (in another container)
print('\n' + CliTest.tf + '-m %s:/usr/share/nginx/html/ -p 8080:80 nginx' % uuid)
po = Popen(CliTest.tf + '-m %s:/usr/share/nginx/html/ -p 8080:80 nginx' % uuid,
shell=True, start_new_session=True)
sleep(5)
reply = requests.get('http://127.0.0.1:8080')
self.assertTrue(".TH TFNZ(1)" in reply.text)
CliTest.bin(po)
# mount with just uuid
print('\n' + CliTest.tf + '-m %s:/usr/share/nginx/html/ -p 8080:80 nginx' % uuid.split(':')[0])
po = Popen(CliTest.tf + '-m %s:/usr/share/nginx/html/ -p 8080:80 nginx' % uuid.split(':')[0],
shell=True, start_new_session=True)
sleep(5)
reply = requests.get('http://127.0.0.1:8080')
self.assertTrue(".TH TFNZ(1)" in reply.text)
CliTest.bin(po)
# mount with just tag
print('\n' + CliTest.tf + '-m %s:/usr/share/nginx/html/ -p 8080:80 nginx' % uuid.split(':')[1])
po = Popen(CliTest.tf + '-m %s:/usr/share/nginx/html/ -p 8080:80 nginx' % uuid.split(':')[1],
shell=True, start_new_session=True)
sleep(5)
reply = requests.get('http://127.0.0.1:8080')
self.assertTrue(".TH TFNZ(1)" in reply.text)
finally:
CliTest.bin(po)
run(CliVolsTest.tfvolumes + 'destroy with_cli_tag', shell=True)
def test_start_script(self): # also tests ssh
try:
with open("new_script.sh", 'w') as f:
f.write('echo "I did this!" > /test ; /bin/sleep 1000')
po = Popen(CliTest.tf + '-s -f new_script.sh:/new_script.sh alpine sh /new_script.sh',
shell=True, start_new_session=True)
sleep(5)
out = check_output('ssh -p 2222 root@localhost cat /test',
shell=True, start_new_session=True)
self.assertTrue(b"I did this!" in out)
finally:
run('rm new_script.sh', shell=True, start_new_session=True)
CliTest.bin(po)
def test_web_host(self):
try:
po = Popen(CliTest.tf + '-w cli.test.sydney.20ft.nz nginx', shell=True, start_new_session=True)
sleep(5)
reply = requests.get('http://cli.test.sydney.20ft.nz')
self.assertTrue("Welcome to nginx!" in reply.text)
finally:
CliTest.bin(po)
def test_sleep(self):
try:
po = Popen(CliTest.tf + '-z -s alpine', shell=True, start_new_session=True)
sleep(5)
out = check_output('ssh -p 2222 root@localhost uname', shell=True, start_new_session=True)
self.assertTrue(b"Linux" in out)
finally:
CliTest.bin(po)
class CliVolsTest(TestCase):
tfvolumes = 'tfvolumes '
def test_blank(self):
try:
out = check_output(CliVolsTest.tfvolumes, shell=True, start_new_session=True)
self.assertTrue(b"{list,create,destroy}" in out)
finally:
CliTest.bin()
def test_destroy_missing(self):
try:
run(CliVolsTest.tfvolumes + "destroy", shell=True, stderr=DEVNULL, start_new_session=True)
except CalledProcessError as e:
self.assertTrue(b"the following arguments are required: uuid" in e.output)
self.assertTrue(e.returncode != 0)
finally:
CliTest.bin()
def test_crud(self):
try:
uuid = check_output(CliVolsTest.tfvolumes + 'create', shell=True).rstrip(b'\r\n')
self.assertTrue(len(uuid) != 0)
all = check_output(CliVolsTest.tfvolumes + 'list', shell=True, start_new_session=True)
self.assertTrue(uuid in all)
destroyed = check_output(CliVolsTest.tfvolumes + 'destroy ' + uuid.decode(),
shell=True, start_new_session=True)
self.assertTrue(len(uuid) != 0)
finally:
CliTest.bin()
def test_crud_tagged(self):
try:
uuid_tag = check_output(CliVolsTest.tfvolumes + 'create test_crud_tagged',
shell=True, start_new_session=True).rstrip(b'\r\n')
self.assertTrue(b'error' not in uuid_tag)
all = check_output(CliVolsTest.tfvolumes + 'list', shell=True, start_new_session=True)
self.assertTrue(uuid_tag in all)
destroyed = check_output(CliVolsTest.tfvolumes + 'destroy ' + uuid_tag.decode(),
shell=True, start_new_session=True)
self.assertTrue(b'error' not in destroyed)
all = check_output(CliVolsTest.tfvolumes + 'list',
shell=True, start_new_session=True)
self.assertTrue(uuid_tag not in all)
finally:
CliTest.bin()
class CliAcctbakTest(TestCase):
tfacctbak = 'tfacctbak'
def test_acctbak(self):
with open(expanduser("~/.20ft/default_location")) as f:
def_loc = f.read().rstrip('\r\n')
with open(expanduser("~/.20ft/") + def_loc) as f:
priv = f.read().encode().rstrip(b'\r\n')
with open(expanduser("~/.20ft/%s.pub") % def_loc) as f:
pub = f.read().encode().rstrip(b'\r\n')
def_loc = def_loc.encode()
out = check_output(CliAcctbakTest.tfacctbak, shell=True, start_new_session=True)
self.assertTrue(b"cat > ~/.20ft/default_location" in out)
self.assertTrue(b"cat > ~/.20ft/" + def_loc in out)
self.assertTrue(b"cat > ~/.20ft/" + def_loc + b".pub" in out)
self.assertTrue(def_loc in out)
self.assertTrue(pub in out)
self.assertTrue(priv in out)
if __name__ == '__main__':
main()
| StarcoderdataPython |
91403 | <reponame>Alacrate/the-tale<gh_stars>10-100
import smart_imports
smart_imports.all()
class RoadChangeTests(helpers.BaseTestPrototypes):
def setUp(self):
super().setUp()
self.new_path = 'rddr'
self.old_road = roads_logic.road_between_places(self.place1, self.place2)
self.assertNotEqual(self.new_path, self.old_road.path)
self.bill_data = bills.road_change.RoadChange(place_1_id=self.place1.id,
place_2_id=self.place2.id,
path=self.new_path)
self.bill = prototypes.BillPrototype.create(self.account1,
'bill-1-caption', self.bill_data,
chronicle_on_accepted='chronicle-on-accepted')
def test_create(self):
self.assertEqual(self.bill.data.place_1_id, self.place1.id)
self.assertEqual(self.bill.data.place_2_id, self.place2.id)
self.assertEqual(self.bill.data.path, self.new_path)
self.assertEqual(self.bill.data.old_place_1_name_forms, self.place1.utg_name)
self.assertEqual(self.bill.data.old_place_2_name_forms, self.place2.utg_name)
self.assertEqual(self.bill.data.old_path, self.old_road.path)
self.assertEqual(self.bill.data.place_1.id, self.place1.id)
self.assertEqual(self.bill.data.place_2.id, self.place2.id)
self.assertEqual(self.bill.data.old_place_1_name, self.place1.utg_name.normal_form())
self.assertEqual(self.bill.data.old_place_2_name, self.place2.utg_name.normal_form())
self.assertFalse(self.bill.data.place_1_name_changed)
self.assertFalse(self.bill.data.place_2_name_changed)
def test_user_form_initials(self):
self.assertEqual(self.bill.data.user_form_initials(),
{'place_1': self.bill.data.place_1_id,
'place_2': self.bill.data.place_2_id,
'path': self.bill.data.path})
def test_actors(self):
self.assertEqual(set(id(a) for a in self.bill_data.actors), set([id(self.place1), id(self.place2)]))
def test_update(self):
form = self.bill.data.get_user_form_update(post={'caption': 'new-caption',
'chronicle_on_accepted': 'chronicle-on-accepted-2',
'place_1': self.place2.id,
'place_2': self.place3.id,
'path': 'luld'})
self.assertTrue(form.is_valid())
self.bill.update(form)
self.bill = prototypes.BillPrototype.get_by_id(self.bill.id)
old_road = roads_logic.road_between_places(self.place2, self.place3)
self.assertEqual(self.bill.data.place_1_id, self.place2.id)
self.assertEqual(self.bill.data.place_2_id, self.place3.id)
self.assertEqual(self.bill.data.path, 'luld')
self.assertEqual(self.bill.data.old_place_1_name_forms, self.place2.utg_name)
self.assertEqual(self.bill.data.old_place_2_name_forms, self.place3.utg_name)
self.assertEqual(self.bill.data.old_path, old_road.path)
self.assertEqual(self.bill.data.place_1.id, self.place2.id)
self.assertEqual(self.bill.data.place_2.id, self.place3.id)
self.assertEqual(self.bill.data.old_place_1_name, self.place2.utg_name.normal_form())
self.assertEqual(self.bill.data.old_place_2_name, self.place3.utg_name.normal_form())
self.assertFalse(self.bill.data.place_2_name_changed)
self.assertFalse(self.bill.data.place_1_name_changed)
def test_form_validation__success(self):
form = self.bill.data.get_user_form_update(post={'caption': 'long caption',
'chronicle_on_accepted': 'chronicle-on-accepted',
'place_1': self.place1.id,
'place_2': self.place2.id,
'path': self.new_path})
self.assertTrue(form.is_valid())
def test_form_validation__wrong_end_place(self):
form = self.bill.data.get_user_form_update(post={'caption': 'long caption',
'chronicle_on_accepted': 'chronicle-on-accepted',
'place_1': self.place1.id,
'place_2': self.place3.id,
'path': 'drrd'})
self.assertFalse(form.is_valid())
def test_user_form_validation__not_exists(self):
self.assertEqual(roads_logic.road_between_places(self.place1, self.place3), None)
form = self.bill.data.get_user_form_update(post={'caption': 'long caption',
'chronicle_on_accepted': 'chronicle-on-accepted',
'place_1': self.place1.id,
'place_2': self.place3.id,
'path': 'rdrd'})
self.assertFalse(form.is_valid())
@mock.patch('the_tale.game.roads.logic.is_path_suitable_for_road',
lambda **kwargs: roads_relations.ROAD_PATH_ERRORS.random(exclude=[roads_relations.ROAD_PATH_ERRORS.NO_ERRORS]))
def test_user_form_validation__bad_path(self):
form = self.bill.data.get_user_form_update(post={'caption': 'long caption',
'chronicle_on_accepted': 'chronicle-on-accepted',
'place_1': self.place1.id,
'place_2': self.place2.id,
'path': self.new_path})
self.assertFalse(form.is_valid())
@mock.patch('the_tale.game.bills.conf.settings.MIN_VOTES_PERCENT', 0.6)
@mock.patch('the_tale.game.bills.prototypes.BillPrototype.time_before_voting_end', datetime.timedelta(seconds=0))
def apply_bill(self):
prototypes.VotePrototype.create(self.account2, self.bill, relations.VOTE_TYPE.AGAINST)
prototypes.VotePrototype.create(self.account3, self.bill, relations.VOTE_TYPE.FOR)
data = self.bill.user_form_initials
data['approved'] = True
form = self.bill.data.get_moderator_form_update(data)
self.assertTrue(form.is_valid())
self.bill.update_by_moderator(form, self.account1)
self.assertTrue(self.bill.apply())
def test_apply(self):
old_storage_version = roads_storage.roads._version
with self.check_not_changed(lambda: len(roads_storage.roads.all())):
self.apply_bill()
self.assertNotEqual(old_storage_version, roads_storage.roads._version)
bill = prototypes.BillPrototype.get_by_id(self.bill.id)
self.assertTrue(bill.state.is_ACCEPTED)
road = roads_logic.road_between_places(self.place1, self.place2)
self.assertEqual(road.path, self.new_path)
def test_has_meaning__not_exists(self):
bill_data = bills.road_change.RoadChange(place_1_id=self.place1.id,
place_2_id=self.place3.id,
path='rdrd')
bill = prototypes.BillPrototype.create(self.account1, 'bill-1-caption', bill_data,
chronicle_on_accepted='chronicle-on-accepted')
self.assertFalse(bill.has_meaning())
@mock.patch('the_tale.game.roads.logic.is_path_suitable_for_road',
lambda **kwargs: roads_relations.ROAD_PATH_ERRORS.random(exclude=[roads_relations.ROAD_PATH_ERRORS.NO_ERRORS]))
def test_has_meaning__wrong_path(self):
bill_data = bills.road_change.RoadChange(place_1_id=self.place1.id,
place_2_id=self.place2.id,
path=self.new_path)
bill = prototypes.BillPrototype.create(self.account1, 'bill-1-caption', bill_data,
chronicle_on_accepted='chronicle-on-accepted')
self.assertFalse(bill.has_meaning())
| StarcoderdataPython |
9649540 | <filename>tests/recursivity/test_p6.py
import unittest
from recursivity.p6 import towers_of_hanoi
class TestTowersOfHanoi(unittest.TestCase):
def test_move(self):
"""
Input:
stack1: [3, 2, 1]
stack2: []
stack3: []
Output:
stack1: []
stack2: []
stack3: [3, 2, 1]
:return: void
"""
stack1 = [3, 2, 1]
stack2 = []
stack3 = []
towers_of_hanoi(stack1, stack2, stack3, 3)
self.assertEqual([3, 2, 1], stack3)
def test_move_bigger(self):
"""
Input:
stack1: [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
stack2: []
stack3: []
Output:
stack1: []
stack2: []
stack3: [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
:return: void
"""
stack1 = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
stack2 = []
stack3 = []
towers_of_hanoi(stack1, stack2, stack3, 10)
self.assertEqual([10, 9, 8, 7, 6, 5, 4, 3, 2, 1], stack3)
| StarcoderdataPython |
3400812 | import can
import cantools
import time
import os
if os.name == 'nt':
#bus = can.interface.Bus(channel='PCAN_USBBUS1', bustype='pcan', bitrate=250000, fd=True)
bus = can.interface.Bus(channel=0, bustype='vector', bitrate=250000, fd=True)
else:
bus = can.interface.Bus(channel='vcan0', bustype='socketcan', bitrate=500000, fd=True)
db="""VERSION ""
BO_ 2566834709 DM1: 8 SEND
SG_ FlashAmberWarningLamp : 10|2@1+ (1,0) [0|3] "" Vector__XXX
SG_ FlashRedStopLamp : 12|2@1+ (1,0) [0|3] "" Vector__XXX
BO_ 2365194522 PD_Loader: 8 SEND
SG_ Capacity : 32|32@1+ (1,0) [0|4294967295] "mm2/s" Loader
SG_ Quality : 0|32@1+ (1,0) [0|100] "%" Loader
"""
db = cantools.db.load_string(db, 'dbc')
quality = 0
capacity = 0
while True:
print("------------------------------")
message = bus.recv()
message_decoded = db.decode_message(message.arbitration_id, message.data)
if 'Quality' in message_decoded:
quality = int(message_decoded['Quality'])
print(quality)
time.sleep(0.1)
| StarcoderdataPython |
12858392 | '''
- Leetcode problem: 352
- Difficulty: Hard
- Brief problem description:
Given a data stream input of non-negative integers a1, a2, ..., an, ..., summarize the numbers seen so far as a list of
disjoint intervals.
For example, suppose the integers from the data stream are 1, 3, 7, 2, 6, ..., then the summary will be:
[1, 1]
[1, 1], [3, 3]
[1, 1], [3, 3], [7, 7]
[1, 3], [7, 7]
[1, 3], [6, 7]
Follow up:
What if there are lots of merges and the number of disjoint intervals are small compared to the data stream's size?
- Solution Summary:
- Used Resources:
--- Bo Zhou
'''
class SummaryRanges:
def __init__(self):
"""
Initialize your data structure here.
"""
self.ih = [] # interval heap
def addNum(self, val: int) -> None:
heapq.heappush(self.ih, [val, val])
def getIntervals(self) -> List[List[int]]:
newh = []
while self.ih:
newInter = heapq.heappop(self.ih)
if newh and newh[-1][1] + 1 >= newInter[0]:
newh[-1][1] = max(newh[-1][1], newInter[1])
else:
heapq.heappush(newh, newInter)
self.ih = newh
return self.ih
# Your SummaryRanges object will be instantiated and called as such:
# obj = SummaryRanges()
# obj.addNum(val)
# param_2 = obj.getIntervals() | StarcoderdataPython |
8129795 | class Node:
nxt = None
def __init__(self, data):
self.data = data
def __str__(self) -> str:
nxt_notation = 'Node' if self.nxt is not None else 'None'
return f'Node {{ data: {self.data}, nxt: {nxt_notation} }}'
"""
Singly linked list.
"""
class LinkedList:
length = 0
head = None
tail = None
def __init__(self, *args):
self.length = len(args)
current_node = None
for _, data in enumerate(args):
node = Node(data)
if self.head is None:
self.head = node
current_node = node
else:
current_node.nxt = node
current_node = node
self.tail = current_node
def delete(self, index):
"""
Takes O(n) because it needs searching and deleting.
But the deletion itself just takes O(1) time
"""
if index >= self.length or index < 0:
raise ValueError(f'{index} out of range.')
current_node = self.head
previous_node = None
target_index = 0
while target_index != index:
previous_node = current_node
current_node = current_node.nxt
target_index += 1
if previous_node is not None:
previous_node.nxt = current_node.nxt
if current_node.nxt is None:
self.tail = None
else:
self.head = self.head.nxt
self.length -= 1
def last(self):
return self.tail
def first(self):
return self.head
def len(self):
return self.length
def append(self, data):
new_last_elem = Node(data)
self.tail.nxt = new_last_elem
self.tail = new_last_elem
self.length += 1
def insert(self, index, data):
"""
takes O(n) because it needs to traverse through the list and insert
the actual insertion takes O(1)
"""
if index >= self.length or index < 0:
raise ValueError(f'{index} out of range.')
if index == self.length - 1:
self.append(data)
return
new_node = Node(data)
current_node = self.head
previous_node = None
target_index = 0
while target_index != index:
previous_node = current_node
current_node = current_node.nxt
target_index += 1
if previous_node is not None:
previous_node.nxt = new_node
new_node.nxt = current_node
if new_node.nxt is None:
self.tail = new_node
else:
previous_head = self.head
self.head = new_node
new_node.nxt = previous_head
self.length += 1
def at(self, index):
"""
takes O(n) because it needs to traverse through the linked list
"""
if index <= 0 or index >= self.length:
return
current_node = self.head
target_index = 0
while target_index != index:
current_node = current_node.nxt
target_index += 1
return current_node.data
def __str__(self):
current_node = self.head
all_data = ""
while current_node is not None:
pointer_or_empty_space = "" if all_data == "" else "->"
all_data += f'{pointer_or_empty_space}{current_node.data}'
current_node = current_node.nxt
return f'{all_data}'
print("l = LinkedList(1,2,3,4)")
l = LinkedList(1,2,3,4)
print(l.len())
print(l)
print("l.append(5)")
l.append(5)
print("print(l)")
print(l)
print("l.append(6)")
l.append(6)
print(l)
print(l.len())
print("l.insert(0, 222)")
l.insert(0, 222)
print(l)
print(l.len())
print("l.insert(3, 555)")
l.insert(3, 555)
print(l)
print(l.len())
print("l.insert(1, 333)")
l.insert(1, 333)
print(l)
print(l.len())
print("l.insert(l.len() - 1, 99999)")
l.insert(l.len() - 1, 99999)
print(l)
print(l.len())
print("print(l.at(1))")
print(l.at(1))
print("print(l.last())")
print(l.last())
print("print(l.first())")
print(l.first())
print("l.delete(0)")
l.delete(0)
print(l)
print(l.len())
print("l.delete(l.len() - 1)")
l.delete(l.len() - 1)
print(l)
print(l.len())
print("l.delete(3)")
l.delete(3)
print(l)
print(l.len()) | StarcoderdataPython |
344266 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.5
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_Backend', [dirname(__file__)])
except ImportError:
import _Backend
return _Backend
if fp is not None:
try:
_mod = imp.load_module('_Backend', fp, pathname, description)
finally:
fp.close()
return _mod
_Backend = swig_import_helper()
del swig_import_helper
else:
import _Backend
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _Backend.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _Backend.SwigPyIterator_value(self)
def incr(self, n=1):
return _Backend.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _Backend.SwigPyIterator_decr(self, n)
def distance(self, x):
return _Backend.SwigPyIterator_distance(self, x)
def equal(self, x):
return _Backend.SwigPyIterator_equal(self, x)
def copy(self):
return _Backend.SwigPyIterator_copy(self)
def next(self):
return _Backend.SwigPyIterator_next(self)
def __next__(self):
return _Backend.SwigPyIterator___next__(self)
def previous(self):
return _Backend.SwigPyIterator_previous(self)
def advance(self, n):
return _Backend.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _Backend.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _Backend.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _Backend.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _Backend.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _Backend.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _Backend.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _Backend.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
class StdVectorString(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorString, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorString, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorString_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorString___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorString___bool__(self)
def __len__(self):
return _Backend.StdVectorString___len__(self)
def pop(self):
return _Backend.StdVectorString_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorString___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorString___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorString___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorString___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorString___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorString___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorString_append(self, x)
def empty(self):
return _Backend.StdVectorString_empty(self)
def size(self):
return _Backend.StdVectorString_size(self)
def clear(self):
return _Backend.StdVectorString_clear(self)
def swap(self, v):
return _Backend.StdVectorString_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorString_get_allocator(self)
def begin(self):
return _Backend.StdVectorString_begin(self)
def end(self):
return _Backend.StdVectorString_end(self)
def rbegin(self):
return _Backend.StdVectorString_rbegin(self)
def rend(self):
return _Backend.StdVectorString_rend(self)
def pop_back(self):
return _Backend.StdVectorString_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorString_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorString(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorString_push_back(self, x)
def front(self):
return _Backend.StdVectorString_front(self)
def back(self):
return _Backend.StdVectorString_back(self)
def assign(self, n, x):
return _Backend.StdVectorString_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorString_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorString_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorString_reserve(self, n)
def capacity(self):
return _Backend.StdVectorString_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorString
__del__ = lambda self: None
StdVectorString_swigregister = _Backend.StdVectorString_swigregister
StdVectorString_swigregister(StdVectorString)
class StdVectorDouble(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorDouble, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorDouble, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorDouble_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorDouble___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorDouble___bool__(self)
def __len__(self):
return _Backend.StdVectorDouble___len__(self)
def pop(self):
return _Backend.StdVectorDouble_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorDouble___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorDouble___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorDouble___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorDouble___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorDouble___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorDouble___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorDouble_append(self, x)
def empty(self):
return _Backend.StdVectorDouble_empty(self)
def size(self):
return _Backend.StdVectorDouble_size(self)
def clear(self):
return _Backend.StdVectorDouble_clear(self)
def swap(self, v):
return _Backend.StdVectorDouble_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorDouble_get_allocator(self)
def begin(self):
return _Backend.StdVectorDouble_begin(self)
def end(self):
return _Backend.StdVectorDouble_end(self)
def rbegin(self):
return _Backend.StdVectorDouble_rbegin(self)
def rend(self):
return _Backend.StdVectorDouble_rend(self)
def pop_back(self):
return _Backend.StdVectorDouble_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorDouble_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorDouble(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorDouble_push_back(self, x)
def front(self):
return _Backend.StdVectorDouble_front(self)
def back(self):
return _Backend.StdVectorDouble_back(self)
def assign(self, n, x):
return _Backend.StdVectorDouble_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorDouble_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorDouble_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorDouble_reserve(self, n)
def capacity(self):
return _Backend.StdVectorDouble_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorDouble
__del__ = lambda self: None
StdVectorDouble_swigregister = _Backend.StdVectorDouble_swigregister
StdVectorDouble_swigregister(StdVectorDouble)
class StdVectorInt(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorInt, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorInt, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorInt_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorInt___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorInt___bool__(self)
def __len__(self):
return _Backend.StdVectorInt___len__(self)
def pop(self):
return _Backend.StdVectorInt_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorInt___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorInt___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorInt___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorInt___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorInt___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorInt___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorInt_append(self, x)
def empty(self):
return _Backend.StdVectorInt_empty(self)
def size(self):
return _Backend.StdVectorInt_size(self)
def clear(self):
return _Backend.StdVectorInt_clear(self)
def swap(self, v):
return _Backend.StdVectorInt_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorInt_get_allocator(self)
def begin(self):
return _Backend.StdVectorInt_begin(self)
def end(self):
return _Backend.StdVectorInt_end(self)
def rbegin(self):
return _Backend.StdVectorInt_rbegin(self)
def rend(self):
return _Backend.StdVectorInt_rend(self)
def pop_back(self):
return _Backend.StdVectorInt_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorInt_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorInt(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorInt_push_back(self, x)
def front(self):
return _Backend.StdVectorInt_front(self)
def back(self):
return _Backend.StdVectorInt_back(self)
def assign(self, n, x):
return _Backend.StdVectorInt_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorInt_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorInt_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorInt_reserve(self, n)
def capacity(self):
return _Backend.StdVectorInt_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorInt
__del__ = lambda self: None
StdVectorInt_swigregister = _Backend.StdVectorInt_swigregister
StdVectorInt_swigregister(StdVectorInt)
class StdVectorBool(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorBool, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorBool, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorBool_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorBool___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorBool___bool__(self)
def __len__(self):
return _Backend.StdVectorBool___len__(self)
def pop(self):
return _Backend.StdVectorBool_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorBool___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorBool___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorBool___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorBool___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorBool___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorBool___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorBool_append(self, x)
def empty(self):
return _Backend.StdVectorBool_empty(self)
def size(self):
return _Backend.StdVectorBool_size(self)
def clear(self):
return _Backend.StdVectorBool_clear(self)
def swap(self, v):
return _Backend.StdVectorBool_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorBool_get_allocator(self)
def begin(self):
return _Backend.StdVectorBool_begin(self)
def end(self):
return _Backend.StdVectorBool_end(self)
def rbegin(self):
return _Backend.StdVectorBool_rbegin(self)
def rend(self):
return _Backend.StdVectorBool_rend(self)
def pop_back(self):
return _Backend.StdVectorBool_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorBool_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorBool(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorBool_push_back(self, x)
def front(self):
return _Backend.StdVectorBool_front(self)
def back(self):
return _Backend.StdVectorBool_back(self)
def assign(self, n, x):
return _Backend.StdVectorBool_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorBool_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorBool_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorBool_reserve(self, n)
def capacity(self):
return _Backend.StdVectorBool_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorBool
__del__ = lambda self: None
StdVectorBool_swigregister = _Backend.StdVectorBool_swigregister
StdVectorBool_swigregister(StdVectorBool)
class StdVectorProcess(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorProcess, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorProcess, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorProcess_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorProcess___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorProcess___bool__(self)
def __len__(self):
return _Backend.StdVectorProcess___len__(self)
def pop(self):
return _Backend.StdVectorProcess_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorProcess___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorProcess___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorProcess___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorProcess___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorProcess___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorProcess___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorProcess_append(self, x)
def empty(self):
return _Backend.StdVectorProcess_empty(self)
def size(self):
return _Backend.StdVectorProcess_size(self)
def clear(self):
return _Backend.StdVectorProcess_clear(self)
def swap(self, v):
return _Backend.StdVectorProcess_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorProcess_get_allocator(self)
def begin(self):
return _Backend.StdVectorProcess_begin(self)
def end(self):
return _Backend.StdVectorProcess_end(self)
def rbegin(self):
return _Backend.StdVectorProcess_rbegin(self)
def rend(self):
return _Backend.StdVectorProcess_rend(self)
def pop_back(self):
return _Backend.StdVectorProcess_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorProcess_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorProcess(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorProcess_push_back(self, x)
def front(self):
return _Backend.StdVectorProcess_front(self)
def back(self):
return _Backend.StdVectorProcess_back(self)
def assign(self, n, x):
return _Backend.StdVectorProcess_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorProcess_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorProcess_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorProcess_reserve(self, n)
def capacity(self):
return _Backend.StdVectorProcess_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorProcess
__del__ = lambda self: None
StdVectorProcess_swigregister = _Backend.StdVectorProcess_swigregister
StdVectorProcess_swigregister(StdVectorProcess)
class StdVectorProcessPtr(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorProcessPtr, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorProcessPtr, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorProcessPtr_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorProcessPtr___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorProcessPtr___bool__(self)
def __len__(self):
return _Backend.StdVectorProcessPtr___len__(self)
def pop(self):
return _Backend.StdVectorProcessPtr_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorProcessPtr___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorProcessPtr___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorProcessPtr___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorProcessPtr___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorProcessPtr___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorProcessPtr___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorProcessPtr_append(self, x)
def empty(self):
return _Backend.StdVectorProcessPtr_empty(self)
def size(self):
return _Backend.StdVectorProcessPtr_size(self)
def clear(self):
return _Backend.StdVectorProcessPtr_clear(self)
def swap(self, v):
return _Backend.StdVectorProcessPtr_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorProcessPtr_get_allocator(self)
def begin(self):
return _Backend.StdVectorProcessPtr_begin(self)
def end(self):
return _Backend.StdVectorProcessPtr_end(self)
def rbegin(self):
return _Backend.StdVectorProcessPtr_rbegin(self)
def rend(self):
return _Backend.StdVectorProcessPtr_rend(self)
def pop_back(self):
return _Backend.StdVectorProcessPtr_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorProcessPtr_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorProcessPtr(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorProcessPtr_push_back(self, x)
def front(self):
return _Backend.StdVectorProcessPtr_front(self)
def back(self):
return _Backend.StdVectorProcessPtr_back(self)
def assign(self, n, x):
return _Backend.StdVectorProcessPtr_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorProcessPtr_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorProcessPtr_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorProcessPtr_reserve(self, n)
def capacity(self):
return _Backend.StdVectorProcessPtr_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorProcessPtr
__del__ = lambda self: None
StdVectorProcessPtr_swigregister = _Backend.StdVectorProcessPtr_swigregister
StdVectorProcessPtr_swigregister(StdVectorProcessPtr)
class StdVectorCustomRateProcess(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorCustomRateProcess, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorCustomRateProcess, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorCustomRateProcess_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorCustomRateProcess___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorCustomRateProcess___bool__(self)
def __len__(self):
return _Backend.StdVectorCustomRateProcess___len__(self)
def pop(self):
return _Backend.StdVectorCustomRateProcess_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorCustomRateProcess___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorCustomRateProcess___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorCustomRateProcess___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorCustomRateProcess___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorCustomRateProcess___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorCustomRateProcess___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorCustomRateProcess_append(self, x)
def empty(self):
return _Backend.StdVectorCustomRateProcess_empty(self)
def size(self):
return _Backend.StdVectorCustomRateProcess_size(self)
def clear(self):
return _Backend.StdVectorCustomRateProcess_clear(self)
def swap(self, v):
return _Backend.StdVectorCustomRateProcess_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorCustomRateProcess_get_allocator(self)
def begin(self):
return _Backend.StdVectorCustomRateProcess_begin(self)
def end(self):
return _Backend.StdVectorCustomRateProcess_end(self)
def rbegin(self):
return _Backend.StdVectorCustomRateProcess_rbegin(self)
def rend(self):
return _Backend.StdVectorCustomRateProcess_rend(self)
def pop_back(self):
return _Backend.StdVectorCustomRateProcess_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorCustomRateProcess_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorCustomRateProcess(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorCustomRateProcess_push_back(self, x)
def front(self):
return _Backend.StdVectorCustomRateProcess_front(self)
def back(self):
return _Backend.StdVectorCustomRateProcess_back(self)
def assign(self, n, x):
return _Backend.StdVectorCustomRateProcess_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorCustomRateProcess_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorCustomRateProcess_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorCustomRateProcess_reserve(self, n)
def capacity(self):
return _Backend.StdVectorCustomRateProcess_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorCustomRateProcess
__del__ = lambda self: None
StdVectorCustomRateProcess_swigregister = _Backend.StdVectorCustomRateProcess_swigregister
StdVectorCustomRateProcess_swigregister(StdVectorCustomRateProcess)
class StdVectorMinimalMatchListEntry(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorMinimalMatchListEntry, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorMinimalMatchListEntry, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorMinimalMatchListEntry_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorMinimalMatchListEntry___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorMinimalMatchListEntry___bool__(self)
def __len__(self):
return _Backend.StdVectorMinimalMatchListEntry___len__(self)
def pop(self):
return _Backend.StdVectorMinimalMatchListEntry_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorMinimalMatchListEntry___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorMinimalMatchListEntry___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorMinimalMatchListEntry___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorMinimalMatchListEntry___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorMinimalMatchListEntry___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorMinimalMatchListEntry___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorMinimalMatchListEntry_append(self, x)
def empty(self):
return _Backend.StdVectorMinimalMatchListEntry_empty(self)
def size(self):
return _Backend.StdVectorMinimalMatchListEntry_size(self)
def clear(self):
return _Backend.StdVectorMinimalMatchListEntry_clear(self)
def swap(self, v):
return _Backend.StdVectorMinimalMatchListEntry_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorMinimalMatchListEntry_get_allocator(self)
def begin(self):
return _Backend.StdVectorMinimalMatchListEntry_begin(self)
def end(self):
return _Backend.StdVectorMinimalMatchListEntry_end(self)
def rbegin(self):
return _Backend.StdVectorMinimalMatchListEntry_rbegin(self)
def rend(self):
return _Backend.StdVectorMinimalMatchListEntry_rend(self)
def pop_back(self):
return _Backend.StdVectorMinimalMatchListEntry_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorMinimalMatchListEntry_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorMinimalMatchListEntry(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorMinimalMatchListEntry_push_back(self, x)
def front(self):
return _Backend.StdVectorMinimalMatchListEntry_front(self)
def back(self):
return _Backend.StdVectorMinimalMatchListEntry_back(self)
def assign(self, n, x):
return _Backend.StdVectorMinimalMatchListEntry_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorMinimalMatchListEntry_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorMinimalMatchListEntry_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorMinimalMatchListEntry_reserve(self, n)
def capacity(self):
return _Backend.StdVectorMinimalMatchListEntry_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorMinimalMatchListEntry
__del__ = lambda self: None
StdVectorMinimalMatchListEntry_swigregister = _Backend.StdVectorMinimalMatchListEntry_swigregister
StdVectorMinimalMatchListEntry_swigregister(StdVectorMinimalMatchListEntry)
class StdVectorStdVectorInt(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorStdVectorInt, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorStdVectorInt, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorStdVectorInt_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorStdVectorInt___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorStdVectorInt___bool__(self)
def __len__(self):
return _Backend.StdVectorStdVectorInt___len__(self)
def pop(self):
return _Backend.StdVectorStdVectorInt_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorStdVectorInt___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorStdVectorInt___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorStdVectorInt___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorStdVectorInt___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorStdVectorInt___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorStdVectorInt___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorStdVectorInt_append(self, x)
def empty(self):
return _Backend.StdVectorStdVectorInt_empty(self)
def size(self):
return _Backend.StdVectorStdVectorInt_size(self)
def clear(self):
return _Backend.StdVectorStdVectorInt_clear(self)
def swap(self, v):
return _Backend.StdVectorStdVectorInt_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorStdVectorInt_get_allocator(self)
def begin(self):
return _Backend.StdVectorStdVectorInt_begin(self)
def end(self):
return _Backend.StdVectorStdVectorInt_end(self)
def rbegin(self):
return _Backend.StdVectorStdVectorInt_rbegin(self)
def rend(self):
return _Backend.StdVectorStdVectorInt_rend(self)
def pop_back(self):
return _Backend.StdVectorStdVectorInt_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorStdVectorInt_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorStdVectorInt(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorStdVectorInt_push_back(self, x)
def front(self):
return _Backend.StdVectorStdVectorInt_front(self)
def back(self):
return _Backend.StdVectorStdVectorInt_back(self)
def assign(self, n, x):
return _Backend.StdVectorStdVectorInt_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorStdVectorInt_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorStdVectorInt_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorStdVectorInt_reserve(self, n)
def capacity(self):
return _Backend.StdVectorStdVectorInt_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorStdVectorInt
__del__ = lambda self: None
StdVectorStdVectorInt_swigregister = _Backend.StdVectorStdVectorInt_swigregister
StdVectorStdVectorInt_swigregister(StdVectorStdVectorInt)
class StdVectorStdVectorDouble(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorStdVectorDouble, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorStdVectorDouble, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorStdVectorDouble_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorStdVectorDouble___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorStdVectorDouble___bool__(self)
def __len__(self):
return _Backend.StdVectorStdVectorDouble___len__(self)
def pop(self):
return _Backend.StdVectorStdVectorDouble_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorStdVectorDouble___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorStdVectorDouble___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorStdVectorDouble___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorStdVectorDouble___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorStdVectorDouble___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorStdVectorDouble___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorStdVectorDouble_append(self, x)
def empty(self):
return _Backend.StdVectorStdVectorDouble_empty(self)
def size(self):
return _Backend.StdVectorStdVectorDouble_size(self)
def clear(self):
return _Backend.StdVectorStdVectorDouble_clear(self)
def swap(self, v):
return _Backend.StdVectorStdVectorDouble_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorStdVectorDouble_get_allocator(self)
def begin(self):
return _Backend.StdVectorStdVectorDouble_begin(self)
def end(self):
return _Backend.StdVectorStdVectorDouble_end(self)
def rbegin(self):
return _Backend.StdVectorStdVectorDouble_rbegin(self)
def rend(self):
return _Backend.StdVectorStdVectorDouble_rend(self)
def pop_back(self):
return _Backend.StdVectorStdVectorDouble_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorStdVectorDouble_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorStdVectorDouble(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorStdVectorDouble_push_back(self, x)
def front(self):
return _Backend.StdVectorStdVectorDouble_front(self)
def back(self):
return _Backend.StdVectorStdVectorDouble_back(self)
def assign(self, n, x):
return _Backend.StdVectorStdVectorDouble_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorStdVectorDouble_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorStdVectorDouble_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorStdVectorDouble_reserve(self, n)
def capacity(self):
return _Backend.StdVectorStdVectorDouble_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorStdVectorDouble
__del__ = lambda self: None
StdVectorStdVectorDouble_swigregister = _Backend.StdVectorStdVectorDouble_swigregister
StdVectorStdVectorDouble_swigregister(StdVectorStdVectorDouble)
class StdVectorCoordinate(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorCoordinate, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorCoordinate, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorCoordinate_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorCoordinate___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorCoordinate___bool__(self)
def __len__(self):
return _Backend.StdVectorCoordinate___len__(self)
def pop(self):
return _Backend.StdVectorCoordinate_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorCoordinate___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorCoordinate___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorCoordinate___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorCoordinate___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorCoordinate___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorCoordinate___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorCoordinate_append(self, x)
def empty(self):
return _Backend.StdVectorCoordinate_empty(self)
def size(self):
return _Backend.StdVectorCoordinate_size(self)
def clear(self):
return _Backend.StdVectorCoordinate_clear(self)
def swap(self, v):
return _Backend.StdVectorCoordinate_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorCoordinate_get_allocator(self)
def begin(self):
return _Backend.StdVectorCoordinate_begin(self)
def end(self):
return _Backend.StdVectorCoordinate_end(self)
def rbegin(self):
return _Backend.StdVectorCoordinate_rbegin(self)
def rend(self):
return _Backend.StdVectorCoordinate_rend(self)
def pop_back(self):
return _Backend.StdVectorCoordinate_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorCoordinate_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorCoordinate(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorCoordinate_push_back(self, x)
def front(self):
return _Backend.StdVectorCoordinate_front(self)
def back(self):
return _Backend.StdVectorCoordinate_back(self)
def assign(self, n, x):
return _Backend.StdVectorCoordinate_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorCoordinate_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorCoordinate_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorCoordinate_reserve(self, n)
def capacity(self):
return _Backend.StdVectorCoordinate_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorCoordinate
__del__ = lambda self: None
StdVectorCoordinate_swigregister = _Backend.StdVectorCoordinate_swigregister
StdVectorCoordinate_swigregister(StdVectorCoordinate)
class StdVectorStdPairCoordinate(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorStdPairCoordinate, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorStdPairCoordinate, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorStdPairCoordinate_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorStdPairCoordinate___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorStdPairCoordinate___bool__(self)
def __len__(self):
return _Backend.StdVectorStdPairCoordinate___len__(self)
def pop(self):
return _Backend.StdVectorStdPairCoordinate_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorStdPairCoordinate___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorStdPairCoordinate___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorStdPairCoordinate___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorStdPairCoordinate___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorStdPairCoordinate___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorStdPairCoordinate___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorStdPairCoordinate_append(self, x)
def empty(self):
return _Backend.StdVectorStdPairCoordinate_empty(self)
def size(self):
return _Backend.StdVectorStdPairCoordinate_size(self)
def clear(self):
return _Backend.StdVectorStdPairCoordinate_clear(self)
def swap(self, v):
return _Backend.StdVectorStdPairCoordinate_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorStdPairCoordinate_get_allocator(self)
def begin(self):
return _Backend.StdVectorStdPairCoordinate_begin(self)
def end(self):
return _Backend.StdVectorStdPairCoordinate_end(self)
def rbegin(self):
return _Backend.StdVectorStdPairCoordinate_rbegin(self)
def rend(self):
return _Backend.StdVectorStdPairCoordinate_rend(self)
def pop_back(self):
return _Backend.StdVectorStdPairCoordinate_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorStdPairCoordinate_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorStdPairCoordinate(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorStdPairCoordinate_push_back(self, x)
def front(self):
return _Backend.StdVectorStdPairCoordinate_front(self)
def back(self):
return _Backend.StdVectorStdPairCoordinate_back(self)
def assign(self, n, x):
return _Backend.StdVectorStdPairCoordinate_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorStdPairCoordinate_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorStdPairCoordinate_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorStdPairCoordinate_reserve(self, n)
def capacity(self):
return _Backend.StdVectorStdPairCoordinate_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorStdPairCoordinate
__del__ = lambda self: None
StdVectorStdPairCoordinate_swigregister = _Backend.StdVectorStdPairCoordinate_swigregister
StdVectorStdPairCoordinate_swigregister(StdVectorStdPairCoordinate)
class StdVectorStdVectorCoordinate(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorStdVectorCoordinate, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorStdVectorCoordinate, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorStdVectorCoordinate_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorStdVectorCoordinate___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorStdVectorCoordinate___bool__(self)
def __len__(self):
return _Backend.StdVectorStdVectorCoordinate___len__(self)
def pop(self):
return _Backend.StdVectorStdVectorCoordinate_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorStdVectorCoordinate___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorStdVectorCoordinate___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorStdVectorCoordinate___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorStdVectorCoordinate___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorStdVectorCoordinate___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorStdVectorCoordinate___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorStdVectorCoordinate_append(self, x)
def empty(self):
return _Backend.StdVectorStdVectorCoordinate_empty(self)
def size(self):
return _Backend.StdVectorStdVectorCoordinate_size(self)
def clear(self):
return _Backend.StdVectorStdVectorCoordinate_clear(self)
def swap(self, v):
return _Backend.StdVectorStdVectorCoordinate_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorStdVectorCoordinate_get_allocator(self)
def begin(self):
return _Backend.StdVectorStdVectorCoordinate_begin(self)
def end(self):
return _Backend.StdVectorStdVectorCoordinate_end(self)
def rbegin(self):
return _Backend.StdVectorStdVectorCoordinate_rbegin(self)
def rend(self):
return _Backend.StdVectorStdVectorCoordinate_rend(self)
def pop_back(self):
return _Backend.StdVectorStdVectorCoordinate_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorStdVectorCoordinate_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorStdVectorCoordinate(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorStdVectorCoordinate_push_back(self, x)
def front(self):
return _Backend.StdVectorStdVectorCoordinate_front(self)
def back(self):
return _Backend.StdVectorStdVectorCoordinate_back(self)
def assign(self, n, x):
return _Backend.StdVectorStdVectorCoordinate_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorStdVectorCoordinate_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorStdVectorCoordinate_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorStdVectorCoordinate_reserve(self, n)
def capacity(self):
return _Backend.StdVectorStdVectorCoordinate_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorStdVectorCoordinate
__del__ = lambda self: None
StdVectorStdVectorCoordinate_swigregister = _Backend.StdVectorStdVectorCoordinate_swigregister
StdVectorStdVectorCoordinate_swigregister(StdVectorStdVectorCoordinate)
class StdMapStringInt(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdMapStringInt, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdMapStringInt, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdMapStringInt_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdMapStringInt___nonzero__(self)
def __bool__(self):
return _Backend.StdMapStringInt___bool__(self)
def __len__(self):
return _Backend.StdMapStringInt___len__(self)
def __iter__(self):
return self.key_iterator()
def iterkeys(self):
return self.key_iterator()
def itervalues(self):
return self.value_iterator()
def iteritems(self):
return self.iterator()
def __getitem__(self, key):
return _Backend.StdMapStringInt___getitem__(self, key)
def __delitem__(self, key):
return _Backend.StdMapStringInt___delitem__(self, key)
def has_key(self, key):
return _Backend.StdMapStringInt_has_key(self, key)
def keys(self):
return _Backend.StdMapStringInt_keys(self)
def values(self):
return _Backend.StdMapStringInt_values(self)
def items(self):
return _Backend.StdMapStringInt_items(self)
def __contains__(self, key):
return _Backend.StdMapStringInt___contains__(self, key)
def key_iterator(self):
return _Backend.StdMapStringInt_key_iterator(self)
def value_iterator(self):
return _Backend.StdMapStringInt_value_iterator(self)
def __setitem__(self, *args):
return _Backend.StdMapStringInt___setitem__(self, *args)
def asdict(self):
return _Backend.StdMapStringInt_asdict(self)
def __init__(self, *args):
this = _Backend.new_StdMapStringInt(*args)
try:
self.this.append(this)
except:
self.this = this
def empty(self):
return _Backend.StdMapStringInt_empty(self)
def size(self):
return _Backend.StdMapStringInt_size(self)
def clear(self):
return _Backend.StdMapStringInt_clear(self)
def swap(self, v):
return _Backend.StdMapStringInt_swap(self, v)
def get_allocator(self):
return _Backend.StdMapStringInt_get_allocator(self)
def begin(self):
return _Backend.StdMapStringInt_begin(self)
def end(self):
return _Backend.StdMapStringInt_end(self)
def rbegin(self):
return _Backend.StdMapStringInt_rbegin(self)
def rend(self):
return _Backend.StdMapStringInt_rend(self)
def count(self, x):
return _Backend.StdMapStringInt_count(self, x)
def erase(self, *args):
return _Backend.StdMapStringInt_erase(self, *args)
def find(self, x):
return _Backend.StdMapStringInt_find(self, x)
def lower_bound(self, x):
return _Backend.StdMapStringInt_lower_bound(self, x)
def upper_bound(self, x):
return _Backend.StdMapStringInt_upper_bound(self, x)
__swig_destroy__ = _Backend.delete_StdMapStringInt
__del__ = lambda self: None
StdMapStringInt_swigregister = _Backend.StdMapStringInt_swigregister
StdMapStringInt_swigregister(StdMapStringInt)
class StdVectorStdPairIntInt(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StdVectorStdPairIntInt, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StdVectorStdPairIntInt, name)
__repr__ = _swig_repr
def iterator(self):
return _Backend.StdVectorStdPairIntInt_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _Backend.StdVectorStdPairIntInt___nonzero__(self)
def __bool__(self):
return _Backend.StdVectorStdPairIntInt___bool__(self)
def __len__(self):
return _Backend.StdVectorStdPairIntInt___len__(self)
def pop(self):
return _Backend.StdVectorStdPairIntInt_pop(self)
def __getslice__(self, i, j):
return _Backend.StdVectorStdPairIntInt___getslice__(self, i, j)
def __setslice__(self, *args):
return _Backend.StdVectorStdPairIntInt___setslice__(self, *args)
def __delslice__(self, i, j):
return _Backend.StdVectorStdPairIntInt___delslice__(self, i, j)
def __delitem__(self, *args):
return _Backend.StdVectorStdPairIntInt___delitem__(self, *args)
def __getitem__(self, *args):
return _Backend.StdVectorStdPairIntInt___getitem__(self, *args)
def __setitem__(self, *args):
return _Backend.StdVectorStdPairIntInt___setitem__(self, *args)
def append(self, x):
return _Backend.StdVectorStdPairIntInt_append(self, x)
def empty(self):
return _Backend.StdVectorStdPairIntInt_empty(self)
def size(self):
return _Backend.StdVectorStdPairIntInt_size(self)
def clear(self):
return _Backend.StdVectorStdPairIntInt_clear(self)
def swap(self, v):
return _Backend.StdVectorStdPairIntInt_swap(self, v)
def get_allocator(self):
return _Backend.StdVectorStdPairIntInt_get_allocator(self)
def begin(self):
return _Backend.StdVectorStdPairIntInt_begin(self)
def end(self):
return _Backend.StdVectorStdPairIntInt_end(self)
def rbegin(self):
return _Backend.StdVectorStdPairIntInt_rbegin(self)
def rend(self):
return _Backend.StdVectorStdPairIntInt_rend(self)
def pop_back(self):
return _Backend.StdVectorStdPairIntInt_pop_back(self)
def erase(self, *args):
return _Backend.StdVectorStdPairIntInt_erase(self, *args)
def __init__(self, *args):
this = _Backend.new_StdVectorStdPairIntInt(*args)
try:
self.this.append(this)
except:
self.this = this
def push_back(self, x):
return _Backend.StdVectorStdPairIntInt_push_back(self, x)
def front(self):
return _Backend.StdVectorStdPairIntInt_front(self)
def back(self):
return _Backend.StdVectorStdPairIntInt_back(self)
def assign(self, n, x):
return _Backend.StdVectorStdPairIntInt_assign(self, n, x)
def resize(self, *args):
return _Backend.StdVectorStdPairIntInt_resize(self, *args)
def insert(self, *args):
return _Backend.StdVectorStdPairIntInt_insert(self, *args)
def reserve(self, n):
return _Backend.StdVectorStdPairIntInt_reserve(self, n)
def capacity(self):
return _Backend.StdVectorStdPairIntInt_capacity(self)
__swig_destroy__ = _Backend.delete_StdVectorStdPairIntInt
__del__ = lambda self: None
StdVectorStdPairIntInt_swigregister = _Backend.StdVectorStdPairIntInt_swigregister
StdVectorStdPairIntInt_swigregister(StdVectorStdPairIntInt)
class LatticeModel(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, LatticeModel, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, LatticeModel, name)
__repr__ = _swig_repr
def __init__(self, configuration, simulation_timer, lattice_map, interactions):
this = _Backend.new_LatticeModel(configuration, simulation_timer, lattice_map, interactions)
try:
self.this.append(this)
except:
self.this = this
def singleStep(self):
return _Backend.LatticeModel_singleStep(self)
def interactions(self):
return _Backend.LatticeModel_interactions(self)
def configuration(self):
return _Backend.LatticeModel_configuration(self)
def latticeMap(self):
return _Backend.LatticeModel_latticeMap(self)
__swig_destroy__ = _Backend.delete_LatticeModel
__del__ = lambda self: None
LatticeModel_swigregister = _Backend.LatticeModel_swigregister
LatticeModel_swigregister(LatticeModel)
class LatticeMap(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, LatticeMap, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, LatticeMap, name)
__repr__ = _swig_repr
def __init__(self, n_basis, repetitions, periodic):
this = _Backend.new_LatticeMap(n_basis, repetitions, periodic)
try:
self.this.append(this)
except:
self.this = this
def neighbourIndices(self, index, shells=1):
return _Backend.LatticeMap_neighbourIndices(self, index, shells)
def supersetNeighbourIndices(self, indices, shells):
return _Backend.LatticeMap_supersetNeighbourIndices(self, indices, shells)
def indicesFromCell(self, i, j, k):
return _Backend.LatticeMap_indicesFromCell(self, i, j, k)
def indexFromMoveInfo(self, index, i, j, k, basis):
return _Backend.LatticeMap_indexFromMoveInfo(self, index, i, j, k, basis)
def indexToCell(self, index, cell_i, cell_j, cell_k):
return _Backend.LatticeMap_indexToCell(self, index, cell_i, cell_j, cell_k)
def basisSiteFromIndex(self, index):
return _Backend.LatticeMap_basisSiteFromIndex(self, index)
def nBasis(self):
return _Backend.LatticeMap_nBasis(self)
def periodicA(self):
return _Backend.LatticeMap_periodicA(self)
def periodicB(self):
return _Backend.LatticeMap_periodicB(self)
def periodicC(self):
return _Backend.LatticeMap_periodicC(self)
def repetitionsA(self):
return _Backend.LatticeMap_repetitionsA(self)
def repetitionsB(self):
return _Backend.LatticeMap_repetitionsB(self)
def repetitionsC(self):
return _Backend.LatticeMap_repetitionsC(self)
def wrap(self, *args):
return _Backend.LatticeMap_wrap(self, *args)
__swig_destroy__ = _Backend.delete_LatticeMap
__del__ = lambda self: None
LatticeMap_swigregister = _Backend.LatticeMap_swigregister
LatticeMap_swigregister(LatticeMap)
class Configuration(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Configuration, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Configuration, name)
__repr__ = _swig_repr
def __init__(self, coordinates, elements, possible_types):
this = _Backend.new_Configuration(coordinates, elements, possible_types)
try:
self.this.append(this)
except:
self.this = this
def initMatchLists(self, lattice_map, range):
return _Backend.Configuration_initMatchLists(self, lattice_map, range)
def coordinates(self):
return _Backend.Configuration_coordinates(self)
def atomIDCoordinates(self):
return _Backend.Configuration_atomIDCoordinates(self)
def elements(self):
return _Backend.Configuration_elements(self)
def atomIDElements(self):
return _Backend.Configuration_atomIDElements(self)
def types(self):
return _Backend.Configuration_types(self)
def movedAtomIDs(self):
return _Backend.Configuration_movedAtomIDs(self)
def recentMoveVectors(self):
return _Backend.Configuration_recentMoveVectors(self)
def updateMatchList(self, index):
return _Backend.Configuration_updateMatchList(self, index)
def minimalMatchList(self, *args):
return _Backend.Configuration_minimalMatchList(self, *args)
def performProcess(self, process, site_index, lattice_map):
return _Backend.Configuration_performProcess(self, process, site_index, lattice_map)
def typeName(self, type):
return _Backend.Configuration_typeName(self, type)
def atomIdCoordinates(self):
return _Backend.Configuration_atomIdCoordinates(self)
def atomID(self):
return _Backend.Configuration_atomID(self)
__swig_destroy__ = _Backend.delete_Configuration
__del__ = lambda self: None
Configuration_swigregister = _Backend.Configuration_swigregister
Configuration_swigregister(Configuration)
class Interactions(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Interactions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Interactions, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Backend.new_Interactions(*args)
try:
self.this.append(this)
except:
self.this = this
def maxRange(self):
return _Backend.Interactions_maxRange(self)
def useCustomRates(self):
return _Backend.Interactions_useCustomRates(self)
def updateProcessMatchLists(self, configuration, lattice_map):
return _Backend.Interactions_updateProcessMatchLists(self, configuration, lattice_map)
def processes(self, *args):
return _Backend.Interactions_processes(self, *args)
def rateCalculator(self):
return _Backend.Interactions_rateCalculator(self)
def totalAvailableSites(self):
return _Backend.Interactions_totalAvailableSites(self)
def probabilityTable(self):
return _Backend.Interactions_probabilityTable(self)
def updateProbabilityTable(self):
return _Backend.Interactions_updateProbabilityTable(self)
def totalRate(self):
return _Backend.Interactions_totalRate(self)
def pickProcessIndex(self):
return _Backend.Interactions_pickProcessIndex(self)
def pickProcess(self):
return _Backend.Interactions_pickProcess(self)
__swig_destroy__ = _Backend.delete_Interactions
__del__ = lambda self: None
Interactions_swigregister = _Backend.Interactions_swigregister
Interactions_swigregister(Interactions)
class Process(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Process, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Process, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Backend.new_Process(*args)
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _Backend.delete_Process
__del__ = lambda self: None
def totalRate(self):
return _Backend.Process_totalRate(self)
def addSite(self, index, rate=0.0):
return _Backend.Process_addSite(self, index, rate)
def removeSite(self, index):
return _Backend.Process_removeSite(self, index)
def pickSite(self):
return _Backend.Process_pickSite(self)
def updateRateTable(self):
return _Backend.Process_updateRateTable(self)
def rateConstant(self):
return _Backend.Process_rateConstant(self)
def nSites(self):
return _Backend.Process_nSites(self)
def isListed(self, index):
return _Backend.Process_isListed(self, index)
def sites(self):
return _Backend.Process_sites(self)
def minimalMatchList(self, *args):
return _Backend.Process_minimalMatchList(self, *args)
def affectedIndices(self, *args):
return _Backend.Process_affectedIndices(self, *args)
def basisSites(self):
return _Backend.Process_basisSites(self)
def idMoves(self, *args):
return _Backend.Process_idMoves(self, *args)
def cutoff(self):
return _Backend.Process_cutoff(self)
def range(self):
return _Backend.Process_range(self)
def processNumber(self):
return _Backend.Process_processNumber(self)
Process_swigregister = _Backend.Process_swigregister
Process_swigregister(Process)
class CustomRateProcess(Process):
__swig_setmethods__ = {}
for _s in [Process]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, CustomRateProcess, name, value)
__swig_getmethods__ = {}
for _s in [Process]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, CustomRateProcess, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Backend.new_CustomRateProcess(*args)
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _Backend.delete_CustomRateProcess
__del__ = lambda self: None
def totalRate(self):
return _Backend.CustomRateProcess_totalRate(self)
def addSite(self, index, rate):
return _Backend.CustomRateProcess_addSite(self, index, rate)
def removeSite(self, index):
return _Backend.CustomRateProcess_removeSite(self, index)
def pickSite(self):
return _Backend.CustomRateProcess_pickSite(self)
def updateRateTable(self):
return _Backend.CustomRateProcess_updateRateTable(self)
CustomRateProcess_swigregister = _Backend.CustomRateProcess_swigregister
CustomRateProcess_swigregister(CustomRateProcess)
class Coordinate(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Coordinate, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Coordinate, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _Backend.new_Coordinate(*args)
try:
self.this.append(this)
except:
self.this = this
def norm(self):
return _Backend.Coordinate_norm(self)
def outerProdDiag(self, other):
return _Backend.Coordinate_outerProdDiag(self, other)
def dot(self, other):
return _Backend.Coordinate_dot(self, other)
def __lt__(self, other):
return _Backend.Coordinate___lt__(self, other)
def __eq__(self, other):
return _Backend.Coordinate___eq__(self, other)
def __ne__(self, other):
return _Backend.Coordinate___ne__(self, other)
def __sub__(self, other):
return _Backend.Coordinate___sub__(self, other)
def __add__(self, other):
return _Backend.Coordinate___add__(self, other)
def __iadd__(self, other):
return _Backend.Coordinate___iadd__(self, other)
def __mul__(self, scalar):
return _Backend.Coordinate___mul__(self, scalar)
def x(self):
return _Backend.Coordinate_x(self)
def y(self):
return _Backend.Coordinate_y(self)
def z(self):
return _Backend.Coordinate_z(self)
def data(self):
return _Backend.Coordinate_data(self)
def distance(self, other):
return _Backend.Coordinate_distance(self, other)
def distanceToOrigin(self):
return _Backend.Coordinate_distanceToOrigin(self)
def _print(self):
return _Backend.Coordinate__print(self)
def __getitem__(self, i):
return _Backend.Coordinate___getitem__(self, i)
def __setitem__(self, i, value):
return _Backend.Coordinate___setitem__(self, i, value)
__swig_destroy__ = _Backend.delete_Coordinate
__del__ = lambda self: None
Coordinate_swigregister = _Backend.Coordinate_swigregister
Coordinate_swigregister(Coordinate)
class MinimalMatchListEntry(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MinimalMatchListEntry, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MinimalMatchListEntry, name)
__repr__ = _swig_repr
__swig_setmethods__["has_move_coordinate"] = _Backend.MinimalMatchListEntry_has_move_coordinate_set
__swig_getmethods__["has_move_coordinate"] = _Backend.MinimalMatchListEntry_has_move_coordinate_get
if _newclass:
has_move_coordinate = _swig_property(_Backend.MinimalMatchListEntry_has_move_coordinate_get, _Backend.MinimalMatchListEntry_has_move_coordinate_set)
__swig_setmethods__["match_type"] = _Backend.MinimalMatchListEntry_match_type_set
__swig_getmethods__["match_type"] = _Backend.MinimalMatchListEntry_match_type_get
if _newclass:
match_type = _swig_property(_Backend.MinimalMatchListEntry_match_type_get, _Backend.MinimalMatchListEntry_match_type_set)
__swig_setmethods__["update_type"] = _Backend.MinimalMatchListEntry_update_type_set
__swig_getmethods__["update_type"] = _Backend.MinimalMatchListEntry_update_type_get
if _newclass:
update_type = _swig_property(_Backend.MinimalMatchListEntry_update_type_get, _Backend.MinimalMatchListEntry_update_type_set)
__swig_setmethods__["index"] = _Backend.MinimalMatchListEntry_index_set
__swig_getmethods__["index"] = _Backend.MinimalMatchListEntry_index_get
if _newclass:
index = _swig_property(_Backend.MinimalMatchListEntry_index_get, _Backend.MinimalMatchListEntry_index_set)
__swig_setmethods__["move_cell_i"] = _Backend.MinimalMatchListEntry_move_cell_i_set
__swig_getmethods__["move_cell_i"] = _Backend.MinimalMatchListEntry_move_cell_i_get
if _newclass:
move_cell_i = _swig_property(_Backend.MinimalMatchListEntry_move_cell_i_get, _Backend.MinimalMatchListEntry_move_cell_i_set)
__swig_setmethods__["move_cell_j"] = _Backend.MinimalMatchListEntry_move_cell_j_set
__swig_getmethods__["move_cell_j"] = _Backend.MinimalMatchListEntry_move_cell_j_get
if _newclass:
move_cell_j = _swig_property(_Backend.MinimalMatchListEntry_move_cell_j_get, _Backend.MinimalMatchListEntry_move_cell_j_set)
__swig_setmethods__["move_cell_k"] = _Backend.MinimalMatchListEntry_move_cell_k_set
__swig_getmethods__["move_cell_k"] = _Backend.MinimalMatchListEntry_move_cell_k_get
if _newclass:
move_cell_k = _swig_property(_Backend.MinimalMatchListEntry_move_cell_k_get, _Backend.MinimalMatchListEntry_move_cell_k_set)
__swig_setmethods__["move_basis"] = _Backend.MinimalMatchListEntry_move_basis_set
__swig_getmethods__["move_basis"] = _Backend.MinimalMatchListEntry_move_basis_get
if _newclass:
move_basis = _swig_property(_Backend.MinimalMatchListEntry_move_basis_get, _Backend.MinimalMatchListEntry_move_basis_set)
__swig_setmethods__["distance"] = _Backend.MinimalMatchListEntry_distance_set
__swig_getmethods__["distance"] = _Backend.MinimalMatchListEntry_distance_get
if _newclass:
distance = _swig_property(_Backend.MinimalMatchListEntry_distance_get, _Backend.MinimalMatchListEntry_distance_set)
__swig_setmethods__["coordinate"] = _Backend.MinimalMatchListEntry_coordinate_set
__swig_getmethods__["coordinate"] = _Backend.MinimalMatchListEntry_coordinate_get
if _newclass:
coordinate = _swig_property(_Backend.MinimalMatchListEntry_coordinate_get, _Backend.MinimalMatchListEntry_coordinate_set)
__swig_setmethods__["move_coordinate"] = _Backend.MinimalMatchListEntry_move_coordinate_set
__swig_getmethods__["move_coordinate"] = _Backend.MinimalMatchListEntry_move_coordinate_get
if _newclass:
move_coordinate = _swig_property(_Backend.MinimalMatchListEntry_move_coordinate_get, _Backend.MinimalMatchListEntry_move_coordinate_set)
def __init__(self):
this = _Backend.new_MinimalMatchListEntry()
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _Backend.delete_MinimalMatchListEntry
__del__ = lambda self: None
MinimalMatchListEntry_swigregister = _Backend.MinimalMatchListEntry_swigregister
MinimalMatchListEntry_swigregister(MinimalMatchListEntry)
class SimulationTimer(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SimulationTimer, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SimulationTimer, name)
__repr__ = _swig_repr
def __init__(self):
this = _Backend.new_SimulationTimer()
try:
self.this.append(this)
except:
self.this = this
def propagateTime(self, total_rate):
return _Backend.SimulationTimer_propagateTime(self, total_rate)
def simulationTime(self):
return _Backend.SimulationTimer_simulationTime(self)
__swig_destroy__ = _Backend.delete_SimulationTimer
__del__ = lambda self: None
SimulationTimer_swigregister = _Backend.SimulationTimer_swigregister
SimulationTimer_swigregister(SimulationTimer)
cvar = _Backend.cvar
class RateCalculator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, RateCalculator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, RateCalculator, name)
__repr__ = _swig_repr
def __init__(self):
if self.__class__ == RateCalculator:
_self = None
else:
_self = self
this = _Backend.new_RateCalculator(_self, )
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _Backend.delete_RateCalculator
__del__ = lambda self: None
def backendRateCallback(self, geometry, len, types_before, types_after, rate_constant, process_number, global_x, global_y, global_z):
return _Backend.RateCalculator_backendRateCallback(self, geometry, len, types_before, types_after, rate_constant, process_number, global_x, global_y, global_z)
def __disown__(self):
self.this.disown()
_Backend.disown_RateCalculator(self)
return weakref_proxy(self)
RateCalculator_swigregister = _Backend.RateCalculator_swigregister
RateCalculator_swigregister(RateCalculator)
class SimpleDummyBaseClass(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SimpleDummyBaseClass, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SimpleDummyBaseClass, name)
__repr__ = _swig_repr
def __init__(self):
if self.__class__ == SimpleDummyBaseClass:
_self = None
else:
_self = self
this = _Backend.new_SimpleDummyBaseClass(_self, )
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _Backend.delete_SimpleDummyBaseClass
__del__ = lambda self: None
def whoAmI(self):
return _Backend.SimpleDummyBaseClass_whoAmI(self)
def __disown__(self):
self.this.disown()
_Backend.disown_SimpleDummyBaseClass(self)
return weakref_proxy(self)
SimpleDummyBaseClass_swigregister = _Backend.SimpleDummyBaseClass_swigregister
SimpleDummyBaseClass_swigregister(SimpleDummyBaseClass)
def callWhoAmI(obj):
return _Backend.callWhoAmI(obj)
callWhoAmI = _Backend.callWhoAmI
def getRate(rc, geometry, types_before, types_after, rate_constant, process_number, global_x, global_y, global_z):
return _Backend.getRate(rc, geometry, types_before, types_after, rate_constant, process_number, global_x, global_y, global_z)
getRate = _Backend.getRate
class MPICommons(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MPICommons, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MPICommons, name)
__repr__ = _swig_repr
__swig_getmethods__["init"] = lambda x: _Backend.MPICommons_init
if _newclass:
init = staticmethod(_Backend.MPICommons_init)
__swig_getmethods__["finalize"] = lambda x: _Backend.MPICommons_finalize
if _newclass:
finalize = staticmethod(_Backend.MPICommons_finalize)
__swig_getmethods__["myRank"] = lambda x: _Backend.MPICommons_myRank
if _newclass:
myRank = staticmethod(_Backend.MPICommons_myRank)
__swig_getmethods__["size"] = lambda x: _Backend.MPICommons_size
if _newclass:
size = staticmethod(_Backend.MPICommons_size)
__swig_getmethods__["barrier"] = lambda x: _Backend.MPICommons_barrier
if _newclass:
barrier = staticmethod(_Backend.MPICommons_barrier)
__swig_getmethods__["isMaster"] = lambda x: _Backend.MPICommons_isMaster
if _newclass:
isMaster = staticmethod(_Backend.MPICommons_isMaster)
def __init__(self):
this = _Backend.new_MPICommons()
try:
self.this.append(this)
except:
self.this = this
__swig_destroy__ = _Backend.delete_MPICommons
__del__ = lambda self: None
MPICommons_swigregister = _Backend.MPICommons_swigregister
MPICommons_swigregister(MPICommons)
def MPICommons_init():
return _Backend.MPICommons_init()
MPICommons_init = _Backend.MPICommons_init
def MPICommons_finalize():
return _Backend.MPICommons_finalize()
MPICommons_finalize = _Backend.MPICommons_finalize
def MPICommons_myRank(*args):
return _Backend.MPICommons_myRank(*args)
MPICommons_myRank = _Backend.MPICommons_myRank
def MPICommons_size(*args):
return _Backend.MPICommons_size(*args)
MPICommons_size = _Backend.MPICommons_size
def MPICommons_barrier(*args):
return _Backend.MPICommons_barrier(*args)
MPICommons_barrier = _Backend.MPICommons_barrier
def MPICommons_isMaster(*args):
return _Backend.MPICommons_isMaster(*args)
MPICommons_isMaster = _Backend.MPICommons_isMaster
class OnTheFlyMSD(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, OnTheFlyMSD, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, OnTheFlyMSD, name)
__repr__ = _swig_repr
def __init__(self, configuration, history_steps, n_bins, t_max, t0, track_type, abc_to_xyz, blocksize=0):
this = _Backend.new_OnTheFlyMSD(configuration, history_steps, n_bins, t_max, t0, track_type, abc_to_xyz, blocksize)
try:
self.this.append(this)
except:
self.this = this
def registerStep(self, time, configuration):
return _Backend.OnTheFlyMSD_registerStep(self, time, configuration)
def histogramBuffer(self):
return _Backend.OnTheFlyMSD_histogramBuffer(self)
def histogramBufferSqr(self):
return _Backend.OnTheFlyMSD_histogramBufferSqr(self)
def histogramBinCounts(self):
return _Backend.OnTheFlyMSD_histogramBinCounts(self)
def historyStepsHistogramBinCounts(self):
return _Backend.OnTheFlyMSD_historyStepsHistogramBinCounts(self)
def historyBuffer(self):
return _Backend.OnTheFlyMSD_historyBuffer(self)
def hstepCounts(self):
return _Backend.OnTheFlyMSD_hstepCounts(self)
def blockerValues(self):
return _Backend.OnTheFlyMSD_blockerValues(self)
__swig_destroy__ = _Backend.delete_OnTheFlyMSD
__del__ = lambda self: None
OnTheFlyMSD_swigregister = _Backend.OnTheFlyMSD_swigregister
OnTheFlyMSD_swigregister(OnTheFlyMSD)
def calculateAndBinMSD(history, abc_to_xyz, binsize, histogram, histogram_sqr, bin_counters, hsteps_bin_counts, hstep_counts, blocker):
return _Backend.calculateAndBinMSD(history, abc_to_xyz, binsize, histogram, histogram_sqr, bin_counters, hsteps_bin_counts, hstep_counts, blocker)
calculateAndBinMSD = _Backend.calculateAndBinMSD
def seedRandom(time_seed, seed):
return _Backend.seedRandom(time_seed, seed)
seedRandom = _Backend.seedRandom
def randomDouble01():
return _Backend.randomDouble01()
randomDouble01 = _Backend.randomDouble01
# This file is compatible with both classic and new-style classes.
| StarcoderdataPython |
4875414 | #!/usr/bin/env python3
import cgi, cgitb, os
from secret import username, password
from templates import _wrapper, secret_page, after_login_incorrect
# Create insatnce of FieldStorage
form = cgi.FieldStorage()
# Get data from fields
user_name = form.getvalue('username')
pwd = form.getvalue('password')
# check if the username match password
if user_name == username and pwd == password:
if not os.environ['HTTP_COOKIE']:
print("Set-Cookie:Username = %s;" %(username))
print("Set-Cookie:Password = %s;" %(password))
print("Content-type:text/html\r\n\r\n")
print("<html>")
print("<head>")
print("<title>Cookies Set - Second CGI Program</title>")
print("</head>")
print("<body>")
print(secret_page(username, password))
# Question 5
print("<p>Question 5:</p>")
print("<p>Cookies: %s</p>" %(os.environ['HTTP_COOKIE']))
print("</body>")
print("</html>")
else:
print("Content-type:text/html\r\n\r\n")
print("<html>")
print("<head>")
print("<title>Login Fail - Second CGI Program</title>")
print("</head>")
print("<body>")
print(after_login_incorrect())
print("</body>")
print("</html>") | StarcoderdataPython |
3257884 | <reponame>lcsm29/project-euler<filename>py/py_0597_torpids.py<gh_stars>0
# Solution of;
# Project Euler Problem 597: Torpids
# https://projecteuler.net/problem=597
#
# The Torpids are rowing races held annually in Oxford, following some curious
# rules:A division consists of $n$ boats (typically 13), placed in order based
# on past performance. All boats within a division start at 40 metre intervals
# along the river, in order with the highest-placed boat starting furthest
# upstream. The boats all start rowing simultaneously, upstream, trying to
# catch the boat in front while avoiding being caught by boats behind. Each
# boat continues rowing until either it reaches the finish line or it catches
# up with ("bumps") a boat in front. The finish line is a distance $L$ metres
# (the course length, in reality about 1800 metres) upstream from the starting
# position of the lowest-placed boat. (Because of the staggered starting
# positions, higher-placed boats row a slightly shorter course than
# lower-placed boats. )When a "bump" occurs, the "bumping" boat takes no
# further part in the race. The "bumped" boat must continue, however, and may
# even be "bumped" again by boats that started two or more places behind it.
# After the race, boats are assigned new places within the division, based on
# the bumps that occurred. Specifically, for any boat $A$ that started in a
# lower place than $B$, then $A$ will be placed higher than $B$ in the new
# order if and only if one of the following occurred: $A$ bumped $B$ directly
# $A$ bumped another boat that went on to bump $B$ $A$ bumped another boat,
# that bumped yet another boat, that bumped $B$ etc NOTE: For the purposes of
# this problem you may disregard the boats' lengths, and assume that a bump
# occurs precisely when the two boats draw level. (In reality, a bump is
# awarded as soon as physical contact is made, which usually occurs when there
# is much less than a full boat length's overlap. )Suppose that, in a
# particular race, each boat $B_j$ rows at a steady speed $v_j = -$log$X_j$
# metres per second, where the $X_j$ are chosen randomly (with uniform
# distribution) between 0 and 1, independently from one another. These speeds
# are relative to the riverbank: you may disregard the flow of the river. Let
# $p(n,L)$ be the probability that the new order is an even permutation of the
# starting order, when there are $n$ boats in the division and $L$ is the
# course length. For example, with $n=3$ and $L=160$, labelling the boats as
# $A$,$B$,$C$ in starting order with $C$ highest, the different possible
# outcomes of the race are as follows: Bumps occurring New order Permutation
# Probability none $A$, $B$, $C$ even $4/15$ $B$ bumps $C$ $A$, $C$, $B$ odd
# $8/45$ $A$ bumps $B$ $B$, $A$, $C$ odd $1/3$ $B$ bumps $C$, then $A$ bumps
# $C$ $C$, $A$, $B$ even $4/27$ $A$ bumps $B$, then $B$ bumps $C$ $C$, $B$,
# $A$ odd $2/27$ Therefore, $p(3,160) = 4/15 + 4/27 = 56/135$. You are also
# given that $p(4,400)=0. 5107843137$, rounded to 10 digits after the decimal
# point. Find $p(13,1800)$ rounded to 10 digits after the decimal point.
#
# by lcsm29 http://github.com/lcsm29/project-euler
import timed
def dummy(n):
pass
if __name__ == '__main__':
n = 1000
i = 10000
prob_id = 597
timed.caller(dummy, n, i, prob_id)
| StarcoderdataPython |
6612989 | <filename>__init__.py<gh_stars>0
from mycroft import MycroftSkill, intent_file_handler
class Fry(MycroftSkill):
def __init__(self):
MycroftSkill.__init__(self)
@intent_file_handler('fry.intent')
def handle_fry(self, message):
self.speak_dialog('fry')
def create_skill():
return Fry()
| StarcoderdataPython |
6525 | # -*- coding: utf-8 -*-
import os
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch import receiver
from .base import Pessoa
from djangosige.apps.login.models import Usuario
from djangosige.configs.settings import MEDIA_ROOT
def logo_directory_path(instance, filename):
extension = os.path.splitext(filename)[1]
return 'imagens/empresas/logo_{0}_{1}{2}'.format(instance.nome_razao_social, instance.id, extension)
class Empresa(Pessoa):
logo_file = models.ImageField(
upload_to=logo_directory_path, default='imagens/logo.png', blank=True, null=True)
cnae = models.CharField(max_length=10, blank=True, null=True)
iest = models.CharField(max_length=32, null=True, blank=True)
class Meta:
verbose_name = "Empresa"
@property
def caminho_completo_logo(self):
if self.logo_file.name != 'imagens/logo.png':
return os.path.join(MEDIA_ROOT, self.logo_file.name)
else:
return ''
def save(self, *args, **kwargs):
# Deletar logo se ja existir um
try:
obj = Empresa.objects.get(id=self.id)
if obj.logo_file != self.logo_file and obj.logo_file != 'imagens/logo.png':
obj.logo_file.delete(save=False)
except:
pass
super(Empresa, self).save(*args, **kwargs)
def __unicode__(self):
return u'%s' % self.nome_razao_social
def __str__(self):
return u'%s' % self.nome_razao_social
# Deletar logo quando empresa for deletada
@receiver(post_delete, sender=Empresa)
def logo_post_delete_handler(sender, instance, **kwargs):
# Nao deletar a imagem default 'logo.png'
if instance.logo_file != 'imagens/logo.png':
instance.logo_file.delete(False)
class MinhaEmpresa(models.Model):
m_empresa = models.ForeignKey(
Empresa, on_delete=models.CASCADE, related_name='minha_empresa', blank=True, null=True)
m_usuario = models.ForeignKey(
Usuario, on_delete=models.CASCADE, related_name='empresa_usuario')
| StarcoderdataPython |
3242243 | <reponame>NikhilNarayana/pyforms-lite
# !/usr/bin/python
# -*- coding: utf-8 -*-
from pyforms_lite.gui.controls.ControlBase import ControlBase
from AnyQt.QtWidgets import QTreeWidget, QTreeWidgetItem, QTreeView, QAbstractItemView, QAction
from AnyQt.QtGui import QIcon, QKeySequence
from AnyQt import QtCore
class ControlTree(ControlBase, QTreeWidget):
"""This class represents a wrapper to the QTreeWidget"""
def __init__(self, *args, **kwargs):
QTreeWidget.__init__(self)
ControlBase.__init__(self, *args, **kwargs)
def init_form(self):
self.setSelectionBehavior(QAbstractItemView.SelectRows)
self.setUniformRowHeights(True)
self.setDragDropMode(QAbstractItemView.NoDragDrop)
self.setDragEnabled(False)
self.setAcceptDrops(False)
self.model().dataChanged.connect(self.__itemChangedEvent)
self.itemDoubleClicked.connect(self.__itemDoubleClicked)
self.selectionChanged = self.selectionChanged
self._items = {}
def __repr__(self):
return QTreeWidget.__repr__(self)
##########################################################################
############ FUNCTIONS ###################################################
##########################################################################
def __add__(self, other):
if isinstance(other, QTreeWidgetItem):
self.invisibleRootItem().addChild(other)
elif isinstance(other, str):
item = QTreeWidgetItem(other)
self.invisibleRootItem().addChild(item)
elif isinstance(other, list):
for x in other:
if isinstance(x, str):
item = QTreeWidgetItem(x)
self.invisibleRootItem().addChild(item)
else:
self.invisibleRootItem().addChild(x)
else:
item = QTreeWidgetItem(other)
self.invisibleRootItem().addChild(item)
# self.setFirstColumnSpanned( self.model().rowCount() - 1, self.rootIndex(), True)
return self
def __remove_recursively(self, parent, item_2_remove):
if parent is None: return
for i in range(parent.childCount()):
child = parent.child(i)
if child == item_2_remove:
parent.removeChild(child)
else:
self.__remove_recursively(child, item_2_remove)
def __sub__(self, other):
if isinstance(other, int):
if other < 0:
indexToRemove = self.selected_row_index
else:
indexToRemove = other
self.model().removeRow(indexToRemove)
else:
self.__remove_recursively(self.invisibleRootItem(), other)
return self
def save_form(self, data, path=None):
pass
def load_form(self, data, path=None):
pass
def add_popup_menu_option(self, label='', function_action=None, key=None, item=None, icon=None, submenu=None):
"""
Add an option to the Control popup menu
@param label: label of the option.
@param function_action: function called when the option is selected.
@param key: shortcut key
@param key: shortcut key
"""
action = super(ControlTree, self).add_popup_menu_option(label, function_action, key, submenu)
if item is not None:
if label == "-":
self._items[id(item)].append(label)
else:
action = QAction(label, self.form)
if icon is not None:
action.setIconVisibleInMenu(True)
action.setIcon(QIcon(icon))
if key is not None:
action.setShortcut(QKeySequence(key))
if function_action:
action.triggered.connect(function_action)
# Associate action to the item.
if id(item) not in self._items.keys():
self._items.update({id(item): []})
self._items[id(item)].append(action)
##########################
return action
return action
def clear(self):
super(ControlTree, self).clear()
if self._popup_menu:
self._popup_menu.clear()
self._items = {}
def expand_item(self, item, expand=True, parents=True):
item.setExpanded(expand)
if parents:
parent = item.parent()
while (True):
try:
parent.setExpanded(expand)
parent = parent.parent()
except AttributeError:
break
def create_child(self, name, parent=None, icon=None):
"""
Create a new child for to the parent item.
If the parent is None it add to the root.
"""
item = QTreeWidgetItem(self, [name]) if (
parent is None) else QTreeWidgetItem(parent, [name])
if icon is not None:
if isinstance(icon, str):
item.setIcon(0, QIcon(icon))
elif isinstance(icon, QIcon):
item.setIcon(0, icon)
return item
##########################################################################
############ EVENTS ######################################################
##########################################################################
def item_changed_event(self, item):
pass
def item_selection_changed_event(self):
pass
def item_double_clicked_event(self, item):
pass
def key_press_event(self, event):
pass
def rows_inserted_event(self, parent, start, end):
""" This event is called every time a new row is added to the tree"""
pass
##########################################################################
############ PROPERTIES ##################################################
##########################################################################
@property
def show_header(self):
return self.header().isVisible()
@show_header.setter
def show_header(self, value):
self.header().show() if value else self.header().hide()
@property
def selected_rows_indexes(self):
result = []
for index in self.selectedIndexes():
result.append(index.row())
return list(set(result))
@property
def selected_row_index(self):
indexes = self.selected_rows_indexes
if len(indexes) > 0:
return indexes[0]
else:
return None
@selected_row_index.setter
def selected_row_index(self, value):
self.setCurrentCell(value)
@property
def selected_item(self):
return self.selectedItems()[0] if len(self.selectedItems()) > 0 else None
@selected_item.setter
def selected_item(self, value):
self.setCurrentItem(value)
@property
def form(self):
return self
@property
def value(self):
root = self.invisibleRootItem()
return [root.child(i) for i in range(root.childCount())]
@value.setter
def value(self, value):
if isinstance(value, list):
for x in value:
self += x
else:
self += value
@property
def icon_size(self):
size = self.iconSize()
return size.width(), size.height()
@icon_size.setter
def icon_size(self, value):
self.setIconSize(QtCore.QSize(*value))
##########################################################################
############ PRIVATE FUNCTIONS ###########################################
##########################################################################
def __itemChangedEvent(self, item):
self.item_changed_event(item)
def rowsInserted(self, parent, start, end):
super(ControlTree, self).rowsInserted(parent, start, end)
self.rows_inserted_event(parent, start, end)
def selectionChanged(self, selected, deselected):
super(QTreeView, self).selectionChanged(selected, deselected)
self.item_selection_changed_event()
def __itemDoubleClicked(self, item, column):
if hasattr(item, 'double_clicked_event'): item.double_clicked_event()
self.item_double_clicked_event(item)
def keyPressEvent(self, event):
QTreeView.keyPressEvent(self, event)
item = self.selected_item
if hasattr(item, 'key_pressed_event'): item.key_pressed_event(event)
self.key_press_event(event)
def about_to_show_contextmenu_event(self):
"""
Function called before open the Control popup menu
"""
if len(self._items) > 0: # Reset the menu and construct a new one only if there are actions for the items.
self._popup_menu.clear()
itemSelected = self.selectedItems()[0]
if id(itemSelected) in self._items:
for action in self._items[id(itemSelected)]:
if action == '-':
self._popup_menu.addSeparator()
else:
self._popup_menu.addAction(action)
# print("Adding action {action} to {item}".format(
# action=action.text(), item=itemSelected))
def clone_item(self, parent, item, copy_function=None):
new_item = QTreeWidgetItem()
for col_index in range(item.columnCount()):
new_item.setText(col_index, item.text(col_index))
new_item.setIcon(col_index, item.icon(col_index))
if copy_function is not None: copy_function(item, new_item)
parent.addChild(new_item)
for child_index in range(item.childCount()):
child_item = item.child(child_index)
self.clone_item(new_item, child_item, copy_function)
def clone_tree(self, tree, copy_function=None):
for item in tree.value:
self.clone_item(self.invisibleRootItem(), item, copy_function)
| StarcoderdataPython |
8132580 | import tqdm
import argparse
import numpy as np
import datetime
import time
import spacy
import pandas as pd
from sklearn.metrics import f1_score
from torch import optim
import torch.nn as nn
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from preprocess_utils import *
from train import train_model, test_model
from models import BasicLSTM, BiLSTM
from utils import *
import captum
from captum.attr import LayerIntegratedGradients, TokenReferenceBase, visualization
spacy_en = spacy.load("en_core_web_sm")
def batch_model_explainability(model, vocab_stoi, vocab_itos, dataloaders, field, device):
"""
Using LIME to get qualitative results on words' importance to make
the decision
"""
print("\n\n**MODEL EXPLAINABILITY**\n")
PAD_IND = field.vocab.stoi[field.pad_token] +1 #vocab_stoi[field.pad_token]
print('PAD_IND', PAD_IND)
token_reference = TokenReferenceBase(reference_token_idx=PAD_IND)
lig = LayerIntegratedGradients(model, model.emb)
# accumalate couple samples in this array for visualization purposes
vis_data_records_ig = []
phase = "test"
model.train()
nb_batches = len(dataloaders[phase])
length_phase = len(dataloaders[phase].dataset)
pbar = tqdm.tqdm([i for i in range(nb_batches)])
# Iterate over data.
# batch_size is set to 1.
for batch_idx, (inputs, labels) in enumerate(dataloaders[phase]):
pbar.update()
pbar.set_description("Processing batch %s" % str(batch_idx+1))
labels = int(labels)
# forward
# track history if only in train
with torch.set_grad_enabled(True):
#output = model.forward(inputs)
#print(output)
interpret_sentence(model, field, inputs, vocab_stoi, vocab_itos,
device, vis_data_records_ig, token_reference, lig, min_len = 7, label = labels)
# break
pbar.close()
return vis_data_records_ig
def convert_token_to_str(input_token, vocab_stoi, vocab_itos):
str_input = ""
for i in range(len(input_token)):
str_input+=vocab_itos[input_token[i]]+" "
return str_input
def interpret_sentence(model, field, inputs, vocab_stoi, vocab_itos, device, vis_data_records_ig, token_reference, lig, min_len = 7, label = 0):
# PAD_IND = vocab_stoi[field.pad_token]
indexed = [int(inputs[i,0]) for i in range(inputs.shape[0])]
if len(indexed) < min_len :
indexed +=[vocab_stoi[field.pad_token]] * (min_len - len(indexed))
print("indexed", indexed)
sentence = convert_token_to_str(indexed, vocab_stoi, vocab_itos)
# print("sentence", sentence)
text = [vocab_itos[tok] for tok in indexed]
if len(text) < min_len:
text += [vocab_itos[field.pad_token]] * (min_len - len(text))
print("text", text)
indexed = [vocab_stoi[t] for t in text]
input_indices = torch.tensor(indexed, device=device)
model.zero_grad()
# input_indices = torch.tensor(inputs, device=device)
input_indices = input_indices.unsqueeze(0)
# input_indices dim: [sequence_length]
seq_length = inputs.shape[0]
input_indices = inputs
# predict
# print("inputs indices", input_indices.shape)
out = model.forward(inputs)
out = torch.sigmoid(out)
pred = out.item()
pred_ind = round(pred)
# generate reference indices for each sample
reference_indices = token_reference.generate_reference(seq_length, device=device).unsqueeze(0).permute(1, 0)
print("ref_indices", reference_indices.shape)
# compute attributions and approximation delta using layer integrated gradients
attributions_ig, delta = lig.attribute(input_indices, reference_indices, \
n_steps=500, return_convergence_delta=True)
class_names = ["Neutral","Hate"]
print('pred: ', class_names[pred_ind], '(', '%.2f'%pred, ')', ', delta: ', abs(delta))
add_attributions_to_visualizer(attributions_ig, vocab_itos, text, pred, pred_ind, label, delta, vis_data_records_ig)
def add_attributions_to_visualizer(attributions, vocab_itos, text, pred, pred_ind, label, delta, vis_data_records):
attributions = attributions.sum(dim=2).squeeze(0)
attributions = attributions / torch.norm(attributions)
attributions = attributions.cpu().detach().numpy()
print(attributions.shape)
class_names = ["Neutral", "Hate"]
# storing couple samples in an array for visualization purposes
vis_data_records.append(visualization.VisualizationDataRecord(
attributions,
pred,
class_names[pred_ind],
class_names[label],
class_names[1],
attributions.sum(),
text,
delta))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--training_data", help="unprocessed OLID training dataset", default="data/training_data/offenseval-training-v1.tsv")
parser.add_argument("--testset_data", help="unprocessed OLID testset dataset", default="data/test_data/testset-levela.tsv")
parser.add_argument("--test_labels_data", help="unprocessed OLID test labels dataset", default="data/test_data/labels-levela.csv")
parser.add_argument("--model", help="model to use. Choices are: BasicLSTM, ...", default='BiLSTM')
parser.add_argument("--batch_size", help="batch size", type=int, default=1)
parser.add_argument("--lr", help="learning rate", type=float, default=1e-3)
parser.add_argument("--optimizer_type", help="optimizer: adam, sgd", default='adam')
parser.add_argument("--loss_criterion", help="loss function: bceloss, crossentropy", default='bceloss')
parser.add_argument("--epochs", default=10, help="cpu or cuda for gpu", type=int)
parser.add_argument("--patience_es", default=2, help="nb epochs before early stopping", type=int)
parser.add_argument("--do_save", default=1, help="1 for saving stats and figures, else 0", type=int)
parser.add_argument("--save_condition", help="save model with"+\
" condition on best val_acc (acc) or lowest val_loss(loss)", default='acc')
parser.add_argument("--device", default='' , help="cpu or cuda for gpu")
parser.add_argument("--model_path", default='saved-models/BiLSTM_2021-12-03_23-58-08_trained_testAcc=0.5561.pth' , help="saved model to load")
args = parser.parse_args()
# Data processing
training_data = args.training_data
testset_data = args.testset_data
test_labels_data = args.test_labels_data
# Hyperparameters
batch_size = args.batch_size
epochs = args.epochs
patience_es = args.patience_es
lr = args.lr
optimizer_type = args.optimizer_type
loss_criterion = args.loss_criterion
model_type = args.model
do_save = args.do_save
save_condition = args.save_condition
saved_model_path = args.model_path
if args.device in ['cuda', 'cpu']:
device = args.device
else:
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
print("Device:", device)
field, tokenizer, train_data, val_data, test_data = get_datasets(training_data, testset_data, test_labels_data)
vocab_stoi, vocab_itos = get_vocab_stoi_itos(field)
dataloaders = get_dataloaders(train_data, val_data, test_data, batch_size, device)
model = load_model(model_type,field,device)
model = load_trained_model(model, saved_model_path, device)
# lime_explainability(model, vocab_stoi, vocab_itos, dataloaders)
#
vis_data_records_ig = batch_model_explainability(model, vocab_stoi, vocab_itos, dataloaders, field, device)
print(vis_data_records_ig)
visualization.visualize_text(vis_data_records_ig)
| StarcoderdataPython |
5057778 | <filename>backend/sellers/tests/test_models.py
from django.contrib.auth import get_user_model
from django.test import TestCase
from sellers.models import Seller
class SellerModelTest(TestCase):
def setUp(self):
self.user = get_user_model().objects.create_user(
username="testuser",
email="<EMAIL>",
password="<PASSWORD>",
)
self.seller = Seller.objects.create(
name="Test Company LLC",
description="The best company worldwide",
email="<EMAIL>",
address1="baker street 555, London, UK",
zip_code="1086",
city="London",
country="uk",
owner=self.user
)
def test_seller_listing(self):
self.assertEqual(f'{self.seller.name}', 'Test Company LLC')
self.assertEqual(f'{self.seller.description}', 'The best company worldwide')
self.assertEqual(f'{self.seller.email}', '<EMAIL>')
self.assertEqual(f'{self.seller.address1}', 'baker street 555, London, UK')
self.assertEqual(f'{self.seller.zip_code}', '1086')
self.assertEqual(f'{self.seller.city}', 'London')
self.assertEqual(self.seller.owner, self.user)
| StarcoderdataPython |
3294122 | <reponame>yihenghu/Aristolochia_fimbriata_genome_analysis<gh_stars>1-10
import glob
import re
from Bio import SeqIO
def rm_3(seq):
new = ''
for index, i in enumerate(str(seq)):
if (index + 1) % 3 != 0:
new += i
return new
def save_3(seq):
new = ''
for index, i in enumerate(str(seq)):
if (index + 1) % 3 == 0:
new += i
return new
fileID_list = []
for file in glob.glob('*fa'):
fileID = int(
re.search(
'OCG(\d+).mafftHA.pruned.gb.filter.fa',
file).group(1))
fileID_list.append(fileID)
# concatenated alignments of first and second codon positions
sp2seq = {}
for fileID in sorted(fileID_list):
seqRs = SeqIO.parse('OCG%s.mafftHA.pruned.gb.filter.fa' % fileID, 'fasta')
for seqR in seqRs:
sp = re.search('(\w*)|', seqR.id).group(1)
if sp in sp2seq:
sp2seq[sp] += rm_3(seqR.seq)
else:
sp2seq[sp] = rm_3(seqR.seq)
with open('all.fa.12', 'w') as fw:
for sp, seq in sp2seq.items():
fw.write('>' + sp + '\n' + seq + '\n')
# concatenated alignments of third codon positions
sp2seq = {}
for fileID in sorted(fileID_list):
seqRs = SeqIO.parse('OCG%s.mafftHA.pruned.gb.filter.fa' % fileID, 'fasta')
for seqR in seqRs:
sp = re.search('(\w*)|', seqR.id).group(1)
if sp in sp2seq:
sp2seq[sp] += save_3(seqR.seq)
else:
sp2seq[sp] = save_3(seqR.seq)
with open('all.fa.3', 'w') as fw:
for sp, seq in sp2seq.items():
fw.write('>' + sp + '\n' + seq + '\n')
| StarcoderdataPython |
3247424 | <gh_stars>0
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import ExecuteProcess
def generate_launch_description():
return LaunchDescription([
ExecuteProcess(
# cmd=['gazebo', '--verbose', 'sdf/world.sdf', '-s libgazebo_ros_factory.so'],
cmd=['gazebo', '--verbose', 'sdf/world.sdf'],
output='screen'
),
Node(
package='teleop_twist_keyboard',
namespace='teleop_twist_keyboard',
executable='teleop_twist_keyboard',
remappings=[
('/teleop_twist_keyboard/cmd_vel','/youbot/cmd_vel')
],
output='screen',
prefix = 'xterm -e'
)
]) | StarcoderdataPython |
11273805 | <reponame>HelloAny/nwalgo<gh_stars>0
import itertools
import copy
def compile(seq1, seq2, score_dic, method):
score_matrix = [[0 for column in range(len(seq1))]
for row in range(len(seq2))]
trace_back = [[[]for column in range(len(seq1))]
for row in range(len(seq2))]
# 打分矩阵
# 回溯路径
if method != 3:
for i in range(len(score_matrix[0])):
score_matrix[0][i] = score_dic['gap']+(i-1)*score_dic['extgap']
if i > 0:
trace_back[0][i].append('left')
for i in range(len(score_matrix)):
score_matrix[i][0] = score_dic['gap']+(i-1)*score_dic['extgap']
if i > 0:
trace_back[i][0].append('up')
else:
for i in range(2, len(score_matrix[0])):
trace_back[0][i].append('left')
for i in range(2, len(score_matrix)):
trace_back[i][0].append('up')
trace_back[0][0].append('done')
# 基本框架
for i in range(1, len(score_matrix)):
for j in range(1, len(score_matrix[0])):
if seq1[j] == seq2[i]:
char_score = score_dic['match']
else:
char_score = score_dic['mismatch']
if 'up' in trace_back[i-1][j]:
top_score = score_matrix[i - 1][j] + score_dic['extgap']
else:
top_score = score_matrix[i - 1][j] + score_dic['gap']
if 'left' in trace_back[i][j-1]:
left_score = score_matrix[i][j - 1] + score_dic['extgap']
else:
left_score = score_matrix[i][j - 1] + score_dic['gap']
diag_score = score_matrix[i - 1][j - 1] + char_score
score = max(top_score, left_score, diag_score)
score_matrix[i][j] = score
# 计算最大值
if top_score == score:
trace_back[i][j].append('up')
if left_score == score:
trace_back[i][j].append('left')
if diag_score == score:
trace_back[i][j].append('diag')
# 反馈至路径
if method == 3:
if score_matrix[i][j] < 0:
score_matrix[i][j] = 0
if method == 2:
scup = score
sclef = score
up = i
lef = j
bl = 0
for m in range(0, i):
sc = (i-m-1)*score_dic['extgap'] + \
score_dic['gap']+score_matrix[m][j]
if sc >= scup:
scup = sc
bl = 1
for m in range(0, j):
sc = (j-m-1)*score_dic['extgap'] + \
score_dic['gap']+score_matrix[i][m]
if sc >= sclef:
sclef = sc
bl = 1
if bl == 1:
if scup > sclef:
score_matrix[i][j] = scup
if scup == score:
for n in range(up, i+1):
trace_back[up][j].append('up')
elif scup > score:
for n in range(up, i+1):
trace_back[up][j] = 'up'
elif scup < sclef:
score_matrix[i][j] = sclef
if sclef == score:
for n in range(lef, j+1):
trace_back[i][lef].append('left')
elif sclef > score:
for n in range(lef, j+1):
trace_back[i][lef] = 'left'
# 计算矩阵
# 根据结果计算最优匹配的序列
# pointer = [seq2_index, seq1_index]
pointer = [len(score_matrix) - 1, len(score_matrix[0]) - 1]
align_seq1 = []
align_seq2 = []
arrow = trace_back[pointer[0]][pointer[1]]
def seq_letter_finder(current_arrow, current_pointer):
if current_arrow == 'diag':
letter = [seq1[current_pointer[1]], seq2[current_pointer[0]]]
next_pointer = [current_pointer[0] - 1, current_pointer[1] - 1]
next_arrow = trace_back[next_pointer[0]][next_pointer[1]]
return letter, next_arrow, next_pointer
elif current_arrow == 'left':
letter = [seq1[current_pointer[1]], '-']
next_pointer = [current_pointer[0], current_pointer[1] - 1]
next_arrow = trace_back[next_pointer[0]][next_pointer[1]]
return letter, next_arrow, next_pointer
else:
letter = ['-', seq2[current_pointer[0]]]
next_pointer = [current_pointer[0] - 1, current_pointer[1]]
next_arrow = trace_back[next_pointer[0]][next_pointer[1]]
return letter, next_arrow, next_pointer
def align_seq_finder(rec_arrow, rec_pointer, rec_ls):
if rec_arrow[0] == 'done':
rec_ls = [rec_ls[0][::-1], rec_ls[1][::-1]]
return rec_ls
else:
if len(rec_arrow) == 1:
letter, rec_arrow, rec_pointer = seq_letter_finder(
rec_arrow[0], rec_pointer)
rec_ls[0] += letter[0]
rec_ls[1] += letter[1]
return align_seq_finder(rec_arrow, rec_pointer, rec_ls)
elif len(rec_arrow) == 2:
arrow1 = copy.deepcopy(rec_arrow[0])
pointer1 = copy.deepcopy(rec_pointer)
ls1 = copy.deepcopy(rec_ls)
arrow2 = copy.deepcopy(rec_arrow[1])
pointer2 = copy.deepcopy(rec_pointer)
ls2 = copy.deepcopy(rec_ls)
letter1, arrow1, pointer1 = seq_letter_finder(arrow1, pointer1)
letter2, arrow2, pointer2 = seq_letter_finder(arrow2, pointer2)
ls1[0] += letter1[0]
ls1[1] += letter1[1]
ls2[0] += letter2[0]
ls2[1] += letter2[1]
return list(itertools.chain(align_seq_finder(arrow1, pointer1, ls1),
align_seq_finder(arrow2, pointer2, ls2)))
else:
arrow1 = copy.deepcopy(rec_arrow[0])
pointer1 = copy.deepcopy(rec_pointer)
pointer2 = copy.deepcopy(rec_pointer)
pointer3 = copy.deepcopy(rec_pointer)
ls1 = copy.deepcopy(rec_ls)
ls2 = copy.deepcopy(rec_ls)
ls3 = copy.deepcopy(rec_ls)
letter, arrow1, pointer1 = seq_letter_finder(arrow1, pointer1)
ls1[0] += letter[0]
ls1[1] += letter[1]
arrow2 = rec_arrow[1]
letter, arrow2, pointer2 = seq_letter_finder(arrow2, pointer2)
ls2[0] += letter[0]
ls2[1] += letter[1]
arrow3 = rec_arrow[2]
letter, arrow3, pointer3 = seq_letter_finder(arrow3, pointer3)
ls3[0] += letter[0]
ls3[1] += letter[1]
return list(itertools.chain(align_seq_finder(arrow1, pointer1, ls1),
align_seq_finder(
arrow2, pointer2, ls2),
align_seq_finder(arrow3, pointer3, ls3)))
return align_seq_finder(arrow, pointer, ['', ''])
| StarcoderdataPython |
6656573 | <reponame>bukun/TorCMS<filename>torcms/handlers/wiki_ajax_handler.py
# -*- coding:utf-8 -*-
'''
Handler for wiki, and page.
'''
from .wiki_handler import WikiHandler
class WikiAjaxHandler(WikiHandler):
'''
Handler for wiki, and page.
'''
def initialize(self):
super().initialize()
self.kind = '1'
| StarcoderdataPython |
94574 | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from torch import tensor
from torch.utils.data import DataLoader, IterableDataset
from pytorch_lightning.trainer.supporters import CombinedLoader
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.fetching import DataFetcher
@pytest.mark.parametrize("use_combined_loader", [False, True])
def test_prefetch_iterator(use_combined_loader):
"""Test the DataFetcher with PyTorch IterableDataset."""
class IterDataset(IterableDataset):
def __iter__(self):
yield 1
yield 2
yield 3
for prefetch_batches in range(0, 4):
if use_combined_loader:
loader = CombinedLoader([DataLoader(IterDataset()), DataLoader(IterDataset())])
expected = [
([tensor([1]), tensor([1])], False),
([tensor([2]), tensor([2])], False),
([tensor([3]), tensor([3])], True),
]
else:
loader = DataLoader(IterDataset())
expected = [(1, False), (2, False), (3, True)]
iterator = DataFetcher(prefetch_batches=prefetch_batches)
prefetch_batches += 1
assert iterator.prefetch_batches == prefetch_batches
iterator.setup(loader)
def generate():
generated = []
for idx, data in enumerate(iterator, 1):
if iterator.done:
assert iterator.fetched == 3
else:
assert iterator.fetched == (idx + prefetch_batches)
generated.append(data)
return generated
assert generate() == expected
# validate reset works properly.
assert generate() == expected
assert iterator.fetched == 3
class EmptyIterDataset(IterableDataset):
def __iter__(self):
return iter([])
dataloader = DataLoader(EmptyIterDataset())
iterator = DataFetcher()
iterator.setup(dataloader)
assert list(iterator) == []
def test_misconfiguration_error():
fetcher = DataFetcher()
with pytest.raises(
MisconfigurationException, match="The `dataloader_iter` isn't available outside the __iter__ context."
):
loader = DataLoader(range(10))
fetcher.setup(loader)
assert fetcher.loaders[0] == loader
fetcher.loader_iters
iter(fetcher)
assert fetcher.loader_iters
| StarcoderdataPython |
9636494 | <filename>test/test_transposition.py
import unittest
from romanyh.transposition import findKeysInRomanTextString
from romanyh.transposition import transposeKeys
transpositionTest = """
Composer: <NAME>
Title: Changing keys
Time signature: 4/4
m1 b1 C: I a: b4 i
m2 G: I e: b3 i
m3 b1 D: I b: b4 I
m4 A: I f#: b3 i
m5 b1 E: I c#: b4 i
Note: Moving to the flat side here
m6 B: I ab: b3 i
m7 b1 Gb: I eb: b4 i
m8 Db: I bb: b3 i
m9 b1 Ab: I f: b4 i
m10 Eb: I c: b3 i
m11 b1 Bb: I g: b4 i
m12 F: I d: b3 i
m13 b1 C: I a: b3 i
"""
class TestTransposition(unittest.TestCase):
def test_get_keys(self):
keysGT = (
"C",
"a",
"G",
"e",
"D",
"b",
"A",
"f#",
"E",
"c#",
"B",
"ab",
"Gb",
"eb",
"Db",
"bb",
"Ab",
"f",
"Eb",
"c",
"Bb",
"g",
"F",
"d",
"C",
"a",
)
keys = findKeysInRomanTextString(transpositionTest)
self.assertEqual(tuple(keys), keysGT)
def test_transpose_keys_flat(self):
keys = findKeysInRomanTextString(transpositionTest)
transposedKeysGT = (
"Db",
"bb",
"Ab",
"f",
"Eb",
"c",
"Bb",
"g",
"F",
"d",
"C",
"a",
"G",
"e",
"D",
"b",
"A",
"f#",
"E",
"c#",
"B",
"g#",
"Gb",
"eb",
"Db",
"bb",
)
transposedKeys = transposeKeys(keys, "Db")
self.assertEqual(tuple(transposedKeys), transposedKeysGT)
def test_transpose_keys_sharp(self):
keys = findKeysInRomanTextString(transpositionTest)
transposedKeysGT = (
"B",
"g#",
"F#",
"d#",
"Db",
"bb",
"Ab",
"f",
"Eb",
"c",
"Bb",
"g",
"F",
"d",
"C",
"a",
"G",
"e",
"D",
"b",
"A",
"f#",
"E",
"c#",
"B",
"g#",
)
transposedKeys = transposeKeys(keys, "b")
self.assertEqual(tuple(transposedKeys), transposedKeysGT)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
8179899 | <reponame>oronnir/CAST<filename>EvaluationUtils/descriptive_stats.py
import os
import shutil
import json
import math
from EvaluationUtils.detection_mapping import DetectionMapping
from Animator.utils import eprint
from EvaluationUtils.image_utils import crop_image, save_image
from Animator.consolidation_api import CharacterDetectionOutput
from tqdm import tqdm
import numpy as np
from PIL import Image, ImageDraw
import time
from sklearn.metrics.pairwise import cosine_similarity
import matplotlib.pyplot as plt
def find_similar_and_dissimilar_pairs(num_examples, ids, features):
n = len(ids)
pairs = list()
distances = list()
for i in tqdm(range(n)):
for j in range(i+1, n):
diff = features[i] - features[j]
diff_square = diff.T.dot(diff)
l2_norm = math.sqrt(diff_square)
pairs.append([ids[i], ids[j], l2_norm])
distances.append(l2_norm)
distances = np.asarray(distances)
linear_top_dis = np.argpartition(distances, -num_examples)[-num_examples:]
linear_top_sim = np.argpartition(distances, num_examples)[:num_examples]
top_similar = [pairs[i] for i in linear_top_sim]
top_dis_similar = [pairs[i] for i in linear_top_dis]
return top_similar, top_dis_similar
def visualize_similarity_features(sim_repo, pairs, role_detections_repo):
if os.path.isdir(sim_repo):
shutil.rmtree(sim_repo)
os.mkdir(sim_repo)
counter = 0
for sim_pair in pairs:
pair_repo = os.path.join(sim_repo, '', str(counter))
counter += 1
os.mkdir(pair_repo)
target_bbox_1 = os.path.join(pair_repo, '', '{}.jpg'.format(sim_pair[0]))
source_bbox_1 = os.path.join(role_detections_repo, '', '{}.jpg'.format(sim_pair[0]))
target_bbox_2 = os.path.join(pair_repo, '', '{}.jpg'.format(sim_pair[1]))
source_bbox_2 = os.path.join(role_detections_repo, '', '{}.jpg'.format(sim_pair[1]))
if os.path.isfile(source_bbox_1) and os.path.isfile(source_bbox_2):
shutil.copyfile(source_bbox_1, target_bbox_1)
shutil.copyfile(source_bbox_2, target_bbox_2)
def count_files_in_repo(repo):
if not os.path.isdir(repo):
return -1
return len(os.listdir(repo))
def create_collage(source_images, target_image_path, texts=None):
if texts is None:
texts = [str(t) for t in range(len(source_images))]
type_to_source_paths = dict(zip(texts, source_images))
width, height = 1600, 900
n = len(source_images)
edge_count = int(math.sqrt(n)) + 1 if int(math.sqrt(n)) ** 2 < n else int(math.sqrt(n))
cols = edge_count
rows = edge_count
thumbnail_width = width//cols
thumbnail_height = height//rows
size = thumbnail_width, thumbnail_height
new_im = Image.new('RGB', (width, height))
ims = []
for triplet_type, p in type_to_source_paths.items():
im = Image.open(p)
im.thumbnail(size)
# write label
draw = ImageDraw.Draw(im)
draw.text((0, 0), triplet_type, (255, 255, 255))
ims.append(im)
i = 0
x = 0
y = 0
for col in range(cols):
for row in range(rows):
if n == 0:
break
n -= 1
new_im.paste(ims[i], (x, y))
i += 1
y += thumbnail_height
x += thumbnail_width
y = 0
if os.path.isfile(target_image_path):
os.remove(target_image_path)
new_im.save(target_image_path)
return
def deduplication_threshold_setting(series, eval_root):
dissimilar = []
similar = []
deduper_repo = r'\..\Deduper'
similar_repo = os.path.join(deduper_repo, '', 'Similar')
dissimilar_repo = os.path.join(deduper_repo, '', 'Dissimilar')
for ser in series:
for role in ['Training', 'Test']:
ser_path = os.path.join(eval_root, '', ser)
role_path = os.path.join(ser_path, '', role)
detection_output_path = os.path.join(role_path, '', 'animationdetectionoutput.json')
# adding EDH features
edh_detection_json = detection_output_path.replace(r'E2ETestset\SeResNext\Videos', r'E2ETestset\EDH')
edh_character_detections = CharacterDetectionOutput.read_from_json(edh_detection_json)
id_to_edh = {bbox.ThumbnailId: bbox.Features for bbox in edh_character_detections.CharacterBoundingBoxes}
ser_similar_repo_path = os.path.join(similar_repo, ser, role)
if os.path.isdir(ser_similar_repo_path):
pairs_repo_names = os.listdir(ser_similar_repo_path)
for pair_repo_name in pairs_repo_names:
pair_repo_path = os.path.join(ser_similar_repo_path, pair_repo_name)
similar_thumbs = os.listdir(pair_repo_path)
if len(similar_thumbs) <= 1:
continue
first_thumbnail_id = similar_thumbs[0].replace('.jpg', '')
second_thumbnail_id = similar_thumbs[1].replace('.jpg', '')
if first_thumbnail_id in id_to_edh and second_thumbnail_id in id_to_edh:
cos = cosine_similarity([id_to_edh[first_thumbnail_id], id_to_edh[second_thumbnail_id]])[0, 1]
print('Similar: first thumb: {}, second thumb: {}, cosine: {}'.format(first_thumbnail_id,
second_thumbnail_id, cos))
similar.append(cos)
ser_dissimilar_repo_path = os.path.join(dissimilar_repo, ser, role)
if os.path.isdir(ser_dissimilar_repo_path):
pairs_repo_names = os.listdir(ser_dissimilar_repo_path)
for pair_repo_name in pairs_repo_names:
pair_repo_path = os.path.join(ser_dissimilar_repo_path, pair_repo_name)
dissimilar_thumbs = os.listdir(pair_repo_path)
if len(dissimilar_thumbs) <= 1:
continue
first_thumbnail_id = dissimilar_thumbs[0].replace('.jpg', '')
second_thumbnail_id = dissimilar_thumbs[1].replace('.jpg', '')
if first_thumbnail_id in id_to_edh and second_thumbnail_id in id_to_edh:
cos = cosine_similarity([id_to_edh[first_thumbnail_id], id_to_edh[second_thumbnail_id]])[0, 1]
print('disSimilar: first thumb: {}, second thumb: {}, cosine: {}'.format(first_thumbnail_id,
second_thumbnail_id,
cos))
dissimilar.append(cos)
print('Similar\n{}'.format(similar))
print('DisSimilar\n{}'.format(dissimilar))
plt.hist(similar, bins=50, label='A complete Duplication', alpha=0.5)
plt.hist(dissimilar, bins=50, label='Very close instances', alpha=0.5)
plt.axvline(x=0.995, label='Merge threshold', color='r', linestyle='dashed', linewidth=1)
plt.legend(loc='best')
plt.show()
plt.savefig(r'\..\Deduper\Deduplication threshold.png')
return similar, dissimilar
def main():
# get descriptive statistics
eval_root = r'\..\SeResNext\Videos'
series = os.listdir(eval_root)
for ser in series:
for role in ['Training', 'Test']:
if ser not in ['FiremanSam'] or role in ['Training']:
print('skipping {} {}'.format(ser, role))
continue
ser_path = os.path.join(eval_root, '', ser)
role_path = os.path.join(ser_path, '', role)
detection_output_path = os.path.join(role_path, '', 'animationdetectionoutput.json')
role_detections_repo = os.path.join(role_path, '', 'animationdetectionoriginalimages')
role_detections_count = count_files_in_repo(role_detections_repo)
print('Series: {}, Role: {}, Count: {}'.format(ser, role, role_detections_count))
if role_detections_count <= 0:
print('*** SKIP - Got no detections for {} ***'.format(role_path))
continue
features = list()
ids = list()
character_detections = CharacterDetectionOutput.read_from_json(detection_output_path)
grouping_output_path = os.path.join(role_path, '', 'animationgroupingoutput.json')
mapping = DetectionMapping.parse_index(detection_output_path, grouping_output_path)
# serialize mapping
mapping_serialization_path = os.path.join(role_path, '', 'CombinedGroupedDetections.json')
if not os.path.isfile(mapping_serialization_path):
mapping_dict = dict(boxes=[bmap.__dict__ for bmap in mapping])
try:
with open(mapping_serialization_path, "w") as text_file:
json.dump(mapping_dict, text_file)
except Exception as e:
exception_message = ' with exception: \'{}\'' % e
eprint(exception_message)
should_run_similarity_sanity_check = False
if should_run_similarity_sanity_check:
for bbox in character_detections.CharacterBoundingBoxes:
if_exist = [m for m in mapping if m.ThumbnailId == bbox.ThumbnailId and m.BoxesConsolidation < 0]
if len(if_exist) == 0:
continue
ids.append(bbox.ThumbnailId)
features.append(bbox.Features)
sanity_check_num_examples = 100
similar_pairs, dissimilar_pairs = find_similar_and_dissimilar_pairs(sanity_check_num_examples, ids,
features)
sim_repo = os.path.join(role_path, "Similar")
visualize_similarity_features(sim_repo, similar_pairs, role_detections_repo)
dis_repo = os.path.join(role_path, "DisSimilar")
visualize_similarity_features(dis_repo, dissimilar_pairs, role_detections_repo)
# copy all bboxes grouped by cluster id
groups_root = os.path.join(role_path, '', 'groups')
if os.path.isdir(groups_root):
shutil.rmtree(groups_root)
time.sleep(2)
os.mkdir(groups_root)
noise_repo = os.path.join(groups_root, '', 'All_noisy_clusters')
for bbox in mapping:
cluster_repo = os.path.join(groups_root, '', 'Cluster_{}'.format(bbox.BoxesConsolidation))
if bbox.BoxesConsolidation < 0:
cluster_repo = noise_repo
if not os.path.isdir(cluster_repo):
os.mkdir(cluster_repo)
bbox_target = os.path.join(cluster_repo, '', '{}.jpg'.format(bbox.ThumbnailId))
if not os.path.isfile(bbox_target):
source_bbox = os.path.join(role_detections_repo, '', '{}.jpg'.format(bbox.ThumbnailId))
shutil.copyfile(source_bbox, bbox_target)
# make collages
collage_repo = os.path.join(groups_root, '', 'All_collages')
if os.path.isdir(collage_repo):
shutil.rmtree(collage_repo)
os.mkdir(collage_repo)
for cluster_repo_name in os.listdir(groups_root):
if not cluster_repo_name.startswith('Cluster_'):
continue
cluster_repo_path = os.path.join(groups_root, '', cluster_repo_name)
collage_images = [os.path.join(cluster_repo_path, '', bbox_name) for bbox_name in os.listdir(cluster_repo_path)]
target_collage_path = os.path.join(collage_repo, '', '{}.jpg'.format(cluster_repo_name))
create_collage(collage_images, target_collage_path)
# copy negative examples
neg_dir = os.path.join(groups_root, '', 'negatives')
if not os.path.isdir(neg_dir):
os.mkdir(neg_dir)
negative_examples = DetectionMapping.parse_negatives(grouping_output_path)
ordered_negs = sorted(negative_examples, key=lambda neg: neg['BoundingBox']['Width']*neg['BoundingBox']['Height'], reverse=True)
num_negs = min(300, len(ordered_negs))
top_negs = ordered_negs[0:num_negs]
keyframes_dir = os.path.join(role_path, '', '_KeyFrameThumbnail')
for top_neg in top_negs:
keyframe_thumbnail_id = top_neg['KeyframeId']
keyframe_thumbnail_path = os.path.join(keyframes_dir, '', 'KeyFrameThumbnail_{}.jpg'.format(top_neg['KeyframeId']))
x = top_neg['BoundingBox']['X']
y = top_neg['BoundingBox']['Y']
w = top_neg['BoundingBox']['Width']
h = top_neg['BoundingBox']['Height']
neg_image_target_path = os.path.join(neg_dir, '', '{}_{}-{}-{}-{}.jpg'
.format(keyframe_thumbnail_id, x, y, w, h))
crop = crop_image(keyframe_thumbnail_path, x, y, w, h)
save_image(neg_image_target_path, crop)
return
if __name__ == "__main__":
main()
| StarcoderdataPython |
4974986 | <gh_stars>10-100
# Simple example showing how to use the MidasServer Python class
from midasserver import * # Make sure you have this and midassocket on your
# PYTHONPATH so the import picks this up
class MyServer(MidasServer) :
""" MyServer demonstrates how to write your own MidasServer for
communicating with MidasTalkers"""
def __init__ (self, have_server_send_mesg_at_connect,
host, port, ser, socket_duplex, dis) :
MidasServer.__init__(self, host, port, ser, socket_duplex, dis)
self.count = 0
# If the server sends a message at connect time, that
# establishes the serialization of the session, which you may
# or may not want. If you wish the client to establish the
# serialization of the session, then set this to false.
# Otherwise, the host always does.
self.haveServerSendMessageAtConnect = have_server_send_mesg_at_connect
def acceptNewClient_ (self, read_fd, read_addr, write_fd, write_addr):
print 'MYSERVER:Connection',read_fd, write_fd
print ' Made from',read_addr,write_addr
# Send a message right as it connects
if (self.haveServerSendMessageAtConnect) :
print "Sending a test message at connect time: This establishes"
print " the serialization for the session. If you wish to let the"
print " the client set the serialization, don't send this message."
test = { 'TEST': 'at connect', 'a' : [1,2,3] }
try :
self.sendBlocking_(write_fd, test)
except Exception:
# Don't want to bring down the server if sendBlocking_ fails
print "Trouble writing back to client? Probably disconnected:",
print " ... continuing and keeping server up."
# ... do cleanup code before thread leaves ...
def readClientData_ (self, read_fd, write_fd, data) :
print 'MYSERVER:Client',read_fd,write_fd
print ' saw some data',data
# Send the same data back to the client who sent it
try :
self.sendBlocking_(write_fd, data)
except Exception:
# Don't want to bring down the server if sendBlocking_ fails
print "Trouble writing back to client? Probably disconnected:",
print " ... continuing and keeping server up."
print '... and sent the same data back!'
# Show how to allow shutdown
max_count = 100000;
self.count += 1
if (self.count>max_count) :
print '... saw ', max_count,' messages .. .shutting down'
self.shutdown()
def disconnectClient_ (self, read_fd, write_fd) :
print 'MYSERVER:Client',read_fd,write_fd,'disconnected'
import sys
import string
import getopt
try :
opts,args=getopt.getopt(sys.argv[1:],[],["ser=","sock=","arrdisp=", "server_send_message_at_connect="])
if len(args)!=2 : raise error
except :
print "Usage: python midasserver_ex.py [--ser=0|1|2|-2|5] [--sock=1|2|777] [--arrdisp=0|1|2|4] [--server_send_message_at_connect=0|1] host port"
sys.exit(1)
host = args[0]
port = string.atoi(args[1])
serialization = SERIALIZE_P0 # Not fast, but backwards compatible
socket_duplex = DUAL_SOCKET
array_disposition = ARRAYDISPOSITION_AS_LIST # backwards compat
server_send = 1 # By default
for opt, val in opts :
if opt=="--ser" : serialization = int(val)
elif opt=="--sock":
options = { '1':0, '2':1, '777':777 }
socket_duplex = options[val]
elif opt=="--arrdisp" : array_disposition = int(val)
elif opt=="--server_send_message_at_connect" : server_send = int(val)
else : assert False, "unhandled option"
a = MyServer(server_send, host, port, serialization, socket_duplex, array_disposition)
a.open()
# Sit in some loop
import time
while 1 :
time.sleep(1)
# When determine its time to go away, shutdown and then wait
a.shutdown()
a.waitForMainLoopToFinish()
| StarcoderdataPython |
6570126 | import psycopg2
from datetime import datetime
from typing import Union
from ..data import ReserveDataAdapter
from ...entities import Supboard, User
class PostgressSupboardAdapter(ReserveDataAdapter):
"""Supboard PostgreSQL data adapter class
Attributes:
connection:
A PostgreSQL connection instance.
"""
columns = (
"id", "firstname", "lastname", "middlename", "displayname",
"telegram_id", "phone_number", "start_time", "end_time",
"set_type_id", "set_count", "count")
def __init__(self,
connection=None, database_url=None,
table_name="sup_reserves"):
self.__connection = connection
self.__database_url = database_url
self.__table_name = table_name
self.connect()
self.create_table()
@property
def connection(self):
return self.__connection
def connect(self):
try:
with self.__connection.cursor() as cursor:
cursor.execute("SELECT 1")
except Exception:
self.__connection = psycopg2.connect(self.__database_url)
def create_table(self):
with self.__connection.cursor() as cursor:
cursor.execute(
f"CREATE TABLE IF NOT EXISTS {self.__table_name}"
""" (
id SERIAL PRIMARY KEY,
telegram_id integer,
firstname varchar(20),
lastname varchar(20),
middlename varchar(20),
displayname varchar(60),
phone_number varchar(20),
start_time timestamp,
end_time timestamp,
set_type_id varchar(20),
set_count integer,
count integer,
canceled boolean DEFAULT false,
cancel_telegram_id integer)""")
self.__connection.commit()
def get_supboard_from_row(self, row):
supboard_id = row[self.columns.index("id")]
user = User(row[self.columns.index("firstname")])
user.lastname = row[self.columns.index("lastname")]
user.middlename = row[self.columns.index("middlename")]
user.displayname = row[self.columns.index("displayname")]
user.telegram_id = row[self.columns.index("telegram_id")]
user.phone_number = row[self.columns.index("phone_number")]
start = row[self.columns.index("start_time")]
set_type_id = row[self.columns.index("set_type_id")]
set_count = row[self.columns.index("set_count")]
count = row[self.columns.index("count")]
return Supboard(id=supboard_id, user=user,
start_date=start.date(), start_time=start.time(),
set_type_id=set_type_id, set_count=set_count,
count=count)
def get_data(self) -> iter:
"""Get a full set of data from storage
Returns:
A iterator object of given data
"""
with self.__connection.cursor() as cursor:
columns_str = ", ".join(self.columns)
cursor.execute(f"SELECT {columns_str} FROM {self.__table_name}")
self.__connection.commit()
for row in cursor:
yield self.get_supboard_from_row(row)
def get_active_reserves(self) -> iter:
"""Get an active supboard reservations from storage
Returns:
A iterator object of given data
"""
with self.__connection.cursor() as cursor:
columns_str = ", ".join(self.columns)
cursor.execute((f"SELECT {columns_str} FROM {self.__table_name}"
" WHERE NOT canceled and start_time >= %s"
" ORDER BY start_time"), [datetime.today()])
self.__connection.commit()
for row in cursor:
yield self.get_supboard_from_row(row)
def get_data_by_keys(self, id: int) -> Union[Supboard, None]:
"""Get a set of data from storage by a keys
Args:
id:
An identifier of Supboard reservation
Returns:
A iterator object of given data
"""
with self.__connection.cursor() as cursor:
columns_str = ", ".join(self.columns)
cursor.execute((f"SELECT {columns_str} FROM {self.__table_name}"
" WHERE id = %s"), [id])
self.__connection.commit()
rows = list(cursor)
if len(rows) == 0:
return None
row = rows[0]
return self.get_supboard_from_row(row)
def get_concurrent_reserves(self, reserve: Supboard) -> iter:
"""Get an concurrent reservations from storage
Returns:
A iterator object of given data
"""
start_ts = reserve.start
end_ts = reserve.end
with self.__connection.cursor() as cursor:
columns_str = ", ".join(self.columns)
cursor.execute(f"SELECT {columns_str} FROM {self.__table_name}"
" WHERE NOT canceled"
" and ((%s = start_time)"
" or (%s < start_time and %s > start_time)"
" or (%s > start_time and %s < end_time))"
" ORDER BY start_time",
(start_ts, start_ts, end_ts, start_ts, start_ts))
self.__connection.commit()
for row in cursor:
yield self.get_supboard_from_row(row)
def get_concurrent_count(self, reserve: Supboard) -> int:
"""Get an concurrent reservations count from storage
Returns:
An integer count of concurrent reservations
"""
start_ts = reserve.start
end_ts = reserve.end
with self.__connection.cursor() as cursor:
cursor.execute(
" SELECT SUM(count) AS concurrent_count"
f" FROM {self.__table_name}"
""" WHERE NOT canceled
and ((%s = start_time)
or (%s < start_time and %s > start_time)
or (%s > start_time and %s < end_time))""",
(start_ts, start_ts, end_ts, start_ts, start_ts))
self.__connection.commit()
if cursor:
row = list(cursor)
else:
return 0
return row[0][0] if row[0][0] else 0
def append_data(self, reserve: Supboard) -> Supboard:
"""Append new data to storage
Args:
reserve:
An instance of entity Supboard class.
"""
with self.__connection.cursor() as cursor:
columns_str = ", ".join(self.columns[1:])
cursor.execute(
f" INSERT INTO {self.__table_name} ({columns_str})"
" VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
" RETURNING id", (
reserve.user.firstname,
reserve.user.lastname,
reserve.user.middlename,
reserve.user.displayname,
reserve.user.telegram_id,
reserve.user.phone_number,
reserve.start,
reserve.end,
reserve.set_type.set_id,
reserve.set_count,
reserve.count
))
result = reserve.__deepcopy__()
result.id = cursor.fetchone()[0]
self.__connection.commit()
return result
def update_data(self, reserve: Supboard):
"""Append new data to storage
Args:
reserve:
An instance of entity Supboard class.
"""
with self.__connection.cursor() as cursor:
cursor.execute(
f" UPDATE {self.__table_name} SET"
""" firstname = %s, lastname = %s, middlename = %s,
displayname = %s, phone_number = %s, telegram_id = %s,
start_time = %s, end_time = %s, set_type_id = %s,
set_count = %s, count = %s,
canceled = %s, cancel_telegram_id = %s"""
" WHERE id = %s", (
reserve.user.firstname,
reserve.user.lastname,
reserve.user.middlename,
reserve.user.displayname,
reserve.user.phone_number,
reserve.user.telegram_id,
reserve.start,
reserve.end,
reserve.set_type.set_id,
reserve.set_count,
reserve.count,
reserve.canceled,
reserve.cancel_telegram_id,
reserve.id))
self.__connection.commit()
def remove_data_by_keys(self, id: int):
"""Remove data from storage by a keys
Args:
id:
An identifier of Supboard reservation
Returns:
A iterator object of given data
"""
with self.__connection.cursor() as cursor:
cursor.execute(
f"DELETE FROM {self.__table_name} WHERE id = %s", [id])
self.__connection.commit()
| StarcoderdataPython |
8123999 | #pylint: disable=invalid-name
""" Sample MARI reduction script """
import os,sys
from numpy import *
from mantid import *
from Direct.ReductionWrapper import *
from mantid.simpleapi import *
from mantid.kernel import funcinspect
from mantid.dataobjects import EventWorkspace
import six
import types
from Direct.PropertyManager import PropertyManager
import Direct
import numpy as np
class MARIReduction(ReductionWrapper):
@MainProperties
def def_main_properties(self):
"""Define main properties used in reduction. These are the property
a user usually wants to change
MARI Instrument scientist beware!!!!
-- the properties set up here may be overridden in iliad_mari (below ) if you use it, or
in section __name__=='__main__' below if you do not use iliad_mari
"""
prop = {}
# if energy is specified as a list (even with single value e.g. ei=[81])
# The numbers are treated as a fraction of ei [from ,step, to ]. If energy is
# a number, energy binning assumed to be absolute (e_min, e_step,e_max)
#
#prop['incident_energy'] = 50
#prop['energy_bins'] = [-20,0.1,49]
prop['incident_energy'] = [60]
prop['energy_bins'] = [-1, 0.005, 0.97]
#
# the range of files to reduce. This range ignored when deployed from autoreduction,
# unless you going to sum these files.
# The range of numbers or run number is used when you run reduction from PC.
# If you "save" a run without ending it, you have to give the file name
#prop['sample_run'] = ['MAR25360.n001']
# Otherwise just give the run number
#prop['sample_run'] = [25362, 25363, 25364, 25365]
prop['sample_run'] = 25780
prop['wb_run'] = 25779
prop['sum_runs'] = False # set to true to sum everything provided to sample_run
# # list
# Absolute units reduction properties. Set prop['monovan_run']=None to do relative units
prop['monovan_run'] = 25781
prop['sample_mass'] = 13
prop['sample_rmm'] = 26.982
return prop
@AdvancedProperties
def def_advanced_properties(self):
"""Set up advanced properties, describing reduction.
These are the properties, usually provided by an instrument
scientist
separation between simple and advanced properties depends
on scientist, experiment and user. All are necessary for reduction
to work properly
MARI Instrument scientist beware!!!!
-- the properties set up here may be overridden in iliad_mari (below ) if you use it, or
in section __name__=='__main__' below if you do not use iliad_mari
"""
prop = {}
prop['normalise_method'] = 'current'
prop['map_file'] = "mari_res2013.map"
prop['monovan_mapfile'] = "mari_res2013.map"
# Next lines are for removing detector artifacts which should not be needed
#prop['remove_streaks'] = True
#prop['fakewb'] = True
#
#prop['hardmaskOnly']=maskfile # disable diag, use only hard mask
#prop['hard_mask_file'] = "mari_mask2019.msk"
prop['det_cal_file'] = ''
# Comment out the next line if you want to use the data run for background masking
#prop['mask_run'] = 25035
#prop['use_hard_mask_only'] = True
prop['save_format'] = 'nxspe'
#
#prop['wb_integr_range'] = [2,10]
prop['data_file_ext'] = '.nxs' # if two input files with the same name and
# different extension found, what to prefer.
prop['load_monitors_with_workspace'] = False
# change this to correct value and verify that motor_log_names refers correct and existing
# log name for crystal rotation to write correct psi value into nxspe files
prop['motor_offset']=None
prop['check_background']=False
prop['bkgd-range-min']=18000
prop['bkgd-range-max']=19000
return prop
@iliad
def reduce(self,input_file=None,output_directory=None):
"""Method executes reduction over single file
Overload only if custom reduction is needed or
special features are requested
"""
output = ReductionWrapper.reduce(self,input_file,output_directory)
# Autoreduction returns workspace list, so for compartibility with autoreduction
# we better process any output as reduction list
if not isinstance(output,list):
output = [output]
for ws in output:
ei = ws.getEFixed(1)
q_min = 0.04*sqrt(ei)
q_max = 1.3*sqrt(ei)
q_bins = str(q_min)+','+str(q_max/285.)+','+str(q_max)
wsn = ws.name()
SofQW3(InputWorkspace=ws, OutputWorkspace=wsn+'_SQW', QAxisBinning=q_bins, Emode='Direct')
Transpose(InputWorkspace=wsn+'_SQW', OutputWorkspace=wsn+'_SQW')
return output
def run_reduction(self, out_ws_name=None):
"""" Reduces runs one by one or sum all them together and reduce after this
if wait_for_file time is > 0, it will until missing files appear on the
data search path
"""
try:
_,r = funcinspect.lhs_info('both')
out_ws_name = r[0]
except:
pass
if not hasattr(PropertyManager.wb_run, '_old_get_workspace'):
PropertyManager.wb_run._old_get_workspace = PropertyManager.wb_run.get_workspace
old_wb_get_workspace = PropertyManager.wb_run._old_get_workspace
if self.reducer.prop_man.fakewb is True:
def new_wb_get_workspace(self):
ws = old_wb_get_workspace()
if ((self._run_number is not None and self._run_number != 25035)
or ('25035' not in self._ws_name)) or ws.run().hasProperty('faked'):
return ws
print("*** Faking Whitebeam run")
x = ws.extractX()
y = ws.extractY()
e = ws.extractE()
for ifake0, ifake1, ireal0, ireal1 in [[404, 440, 143, 179], [663, 699, 143, 179],
[441, 470, 182, 211], [700, 729, 182, 211], [276, 371, 535, 630],
[373, 378, 632, 637], [381, 386, 640, 645], [389, 394, 648, 653]]:
for ifake, ireal in np.array([range(ifake0-1, ifake1), range(ireal0-1, ireal1)]).T:
y[ifake, :] = y[ireal, :]
e[ifake, :] = e[ireal, :]
# Masking is messed up if we use this fake white beam so put masks here directly...
for ifake in [351, 617, 846]:
y[ifake,:] = y[ifake,:] * 0
e[ifake,:] = e[ifake,:] * 0
for isp in range(ws.getNumberHistograms()):
ws.setY(isp, y[isp, :])
ws.setE(isp, e[isp, :])
AddSampleLog(ws, 'faked', 'already_faked')
return ws
else:
def new_wb_get_workspace(self):
ws = old_wb_get_workspace()
return ws
PropertyManager.wb_run.get_workspace = types.MethodType(new_wb_get_workspace, PropertyManager.wb_run)
if not hasattr(PropertyManager.sample_run, '_old_get_workspace'):
PropertyManager.sample_run._old_get_workspace = PropertyManager.sample_run.get_workspace
old_get_workspace = PropertyManager.sample_run._old_get_workspace
if self.reducer.prop_man.remove_streaks is True:
def new_get_workspace(self):
ws = old_get_workspace()
if isinstance(ws, EventWorkspace) and not ws.run().hasProperty('unstreaked'):
print('*** Removing Streaks')
wsn = ws.name()
t0 = 221.75 # ToF of first streak in us
stp = 21.33333333333333 # ToF between streaks in us (==256/12)
w1 = 150 # Number of ToF bins too look for streak around expected position
w2 = 10 # Number of ToF bins around streaks to calculate background level
spikes_tof = np.arange(t0, 20000, stp)
spikes_tof = np.round(spikes_tof * 4) / 4
SumSpectra(wsn, IncludeMonitors=False, OutputWorkspace=wsn+'s')
wsr = Rebin(wsn+'s', '1,0.25,20000',PreserveEvents=False, OutputWorkspace=wsn+'s')
xx = (np.array(wsr.extractX()).T)[:,0]
yy = (np.array(wsr.extractY()).T)[:,0]
ee = (np.array(wsr.extractE()).T)[:,0]
bad = []
ymax = np.max(yy)
for spk in spikes_tof:
ix = np.where(xx == spk)[0][0]
yy2 = yy[(ix-w1):(ix+w1)]
iy = np.where(yy2 == np.max(yy2))[0][0] + ix - w1
yv = yy[iy]
yy3 = yy[(iy-w2):(iy+w2)]
mv = np.mean(yy3[np.where(yy3 != yv)])
if yv > mv * 1.5 and yv > ymax/500:
bad.append(iy)
badtof = xx[bad]
for id in range(ws.getNumberHistograms()):
ev = ws.getEventList(id)
for tof in badtof:
ev.maskTof(tof-0.075, tof+0.225)
AddSampleLog(ws, 'unstreaked', 'unstreaked')
DeleteWorkspace(wsr)
return ws
else:
def new_get_workspace(self):
ws = old_get_workspace()
return ws
PropertyManager.sample_run.get_workspace = types.MethodType(new_get_workspace, PropertyManager.sample_run)
# if this is not None, we want to run validation not reduction
if self.validate_run_number:
self.reducer.prop_man.log\
("**************************************************************************************",'warning')
self.reducer.prop_man.log\
("**************************************************************************************",'warning')
rez,mess=self.build_or_validate_result()
if rez:
self.reducer.prop_man.log("*** SUCCESS! {0}".format(mess))
self.reducer.prop_man.log\
("**************************************************************************************",'warning')
else:
self.reducer.prop_man.log("*** VALIDATION FAILED! {0}".format(mess))
self.reducer.prop_man.log\
("**************************************************************************************",'warning')
raise RuntimeError("Validation against old data file failed")
self.validate_run_number=None
return rez,mess
sam_run = self.reducer.prop_man.sample_run
setattr(Direct.diagnostics, 'normalise_background', mari_normalise_background)
if self.reducer.sum_runs:
### sum runs provided
if out_ws_name is None:
return self.sum_and_reduce()
else:
red_ws = self.sum_and_reduce()
if len(red_ws) > 1:
ws_list = []
for id, ws_out in enumerate(red_ws):
ws_list.append('{0}_{1}_sum_SQW'.format(out_ws_name, id))
RenameWorkspace(InputWorkspace=ws_out.name()+'_SQW', OutputWorkspace=ws_list[-1])
ws_list.append('{0}_{1}_sum'.format(out_ws_name, id))
RenameWorkspace(InputWorkspace=ws_out, OutputWorkspace=ws_list[-1])
GroupWorkspaces(InputWorkspaces=ws_list, OutputWorkspace=out_ws_name)
else:
RenameWorkspace(InputWorkspace=red_ws[0].name()+'_SQW', OutputWorkspace=out_ws_name+'_sum_SQW')
RenameWorkspace(InputWorkspace=red_ws[0], OutputWorkspace=out_ws_name+'_sum')
return red_ws
else:
### reduce list of runs one by one
runfiles = PropertyManager.sample_run.get_run_file_list()
#if hasattr(runfiles, '__len__') and len(runfiles) > 1:
# runfiles = [runfiles[-1]]
if out_ws_name is None:
ws_refs = []
for file_name in runfiles:
ws_refs.append(self.reduce(file_name))
return ws_refs if len(runfiles) > 1 else ws_refs[0]
else:
results = []
nruns = len(runfiles)
for num, file_name in enumerate(runfiles):
red_ws = self.reduce(file_name)
if isinstance(red_ws, list):
for ws in red_ws:
results.append(ws)
if len(red_ws) > 1:
ws_list = []
for id, ws_out in enumerate(red_ws):
print('--------------------')
print(ws_out.name())
print('--------------------')
ws_list.append('{0}_{1}_SQW'.format(out_ws_name, id))
RenameWorkspace(InputWorkspace=ws_out.name()+'_SQW', OutputWorkspace=ws_list[-1])
ws_list.append('{0}_{1}'.format(out_ws_name, id))
RenameWorkspace(InputWorkspace=ws_out, OutputWorkspace=ws_list[-1])
GroupWorkspaces(InputWorkspaces=ws_list, OutputWorkspace=out_ws_name)
else:
RenameWorkspace(InputWorkspace=red_ws[0].name()+'_SQW', OutputWorkspace=out_ws_name+'_SQW')
RenameWorkspace(InputWorkspace=red_ws[0], OutputWorkspace=out_ws_name)
else:
if nruns == 1:
if red_ws.name() != out_ws_name:
RenameWorkspace(InputWorkspace=red_ws, OutputWorkspace=out_ws_name)
RenameWorkspace(InputWorkspace=red_ws.name()+'_SQW', OutputWorkspace=out_ws_name+'_SQW')
results.append(mtd[out_ws_name])
else:
OutWSName = '{0}#{1}of{2}'.format(out_ws_name,num+1,nruns)
if red_ws.name() != out_ws_name:
RenameWorkspace(InputWorkspace=red_ws, OutputWorkspace=OutWSName)
RenameWorkspace(InputWorkspace=red_ws.name()+'_SQW', OutputWorkspace=OutWSName+'_SQW')
results.append(mtd[OutWSName])
if len(results) == 1:
return results[0]
else:
return results
def set_custom_output_filename(self):
"""define custom name of output files if standard one is not satisfactory
In addition to that, example of accessing complex reduction properties
Simple reduction properties can be accessed as e.g.: value= prop_man.sum_runs
"""
def custom_name(prop_man):
"""Sample function which builds filename from
incident energy and run number and adds some auxiliary information
to it.
"""
# Note -- properties have the same names as the list of advanced and
# main properties
# Note: the properties are stored in prop_man class and accessed as
# below.
ei = PropertyManager.incident_energy.get_current()
# sample run is more then just list of runs, so we use
# the formalization below to access its methods
if self.reducer.prop_man.filename_prefix:
return reduced_filename(0, ei, False, self.reducer.prop_man.filename_prefix)
else:
runs_list = PropertyManager.sample_run.get_run_list()
return reduced_filename(runs_list, ei, self.reducer.prop_man.sum_runs)
# Uncomment this to use custom filename function
return lambda : custom_name(self.reducer.prop_man)
# Uncomment this to use standard file name generating function
#return None
def validation_file_place(self):
"""Redefine this to the place, where validation file, used in conjunction with
'validate_run' property, located. Here it defines the place to this script folder.
but if this function is disabled, by default it looks for/places it
in a default save directory"""
return os.path.split(os.path.realpath(__file__))[0]
def __init__(self,web_var=None):
""" sets properties defaults for the instrument with Name"""
ReductionWrapper.__init__(self,'MAR',web_var)
object.__setattr__(self.reducer.prop_man, 'remove_streaks', False)
object.__setattr__(self.reducer.prop_man, 'fakewb', False)
object.__setattr__(self.reducer.prop_man, 'filename_prefix', '')
#------------------------------------------------------------------------------
# Defines a function to return the data file names
def reduced_filename(runs, ei, is_sum, prefix=None):
runs = [runs] if not isinstance(runs, list) else runs
is_sum = is_sum if len(runs) > 1 else False
if not prefix:
prefix = 'MAR{}to{}sum'.format(runs[0], runs[-1]) if is_sum else 'MAR{}'.format(runs[0])
return '{}_Ei{:<3.2f}meV'.format(prefix, ei)
def iliad_mari(runno,ei,wbvan,monovan,sam_mass,sam_rmm,sum_runs=False,**kwargs):
"""Helper function, which allow to run MARIReduction in old iliad way
inputs:
runno -- one or list of run numbers to process
ei -- incident energy or list of incident energies
wbvan -- white beam vanadium run number or file name of the vanadium
monovan -- monochromatic vanadium run number or file name
sam_mass-- mass of the sample under investigation
sam_rmm -- rmm of sample under investigation
sum_runs -- if true, all runs provided in runno list should be added together
**kwargs -- list of any reduction properties, found in MARI_Parameters.xml file
written in the form property=value
NOTE: to avoid duplication, all default parameters are set up within def_advanced properites
and def_main properties functions. They of course may be overwritten here.
"""
rd = MARIReduction()
# set up advanced and main properties, specified in code above
rd.def_advanced_properties()
rd.def_main_properties()
prop_man = rd.reducer.prop_man
if not hasattr(runno, '__len__') or isinstance(runno, six.string_types):
runno = [runno]
if sum_runs and len(runno)==1:
sum_runs = False
#assign input arguments:
prop_man.incident_energy = ei
prop_man.sum_runs = sum_runs
prop_man.sample_run = runno
prop_man.wb_run = wbvan
multirun = False
if hasattr(ei, '__len__') and len(ei) > 1:
prop_man.energy_bins=[-1, 1./400., 0.97]
multirun = True if sum_runs else False
elif ei != 'auto':
prop_man.energy_bins=[-1*ei, ei/400., 0.97*ei]
if ( sam_rmm!=0 and sam_mass!=0 ) :
prop_man.sample_mass=sam_mass
prop_man.sample_rmm=sam_rmm
prop_man.monovan_run=monovan
else:
prop_man.monovan_run=None
outws = None
for key,val in kwargs.items():
if key == 'save_file_name':
if isinstance(runno, (list, tuple)) or isinstance(ei,(list, tuple)) :
print "**************************************************************************************"
print "*** WARNING: you can not set up single file name for list of files or list of energies"
print "*** change ''set_custom_output_filename'' function, which returns lamda function used "
print "*** to calculate file name as function of each incident energy and run number."
print "**************************************************************************************"
continue
if key == 'wait_for_file':
rd.wait_for_file = kwargs['wait_for_file']
continue
if key == 'OutputWorkspace':
outws = kwargs['OutputWorkspace']
continue
if key == 'dos_background':
continue
setattr(prop_man,key,val);
rd.reducer.prop_man = prop_man
#rd.reducer.prop_man.save_file_name='mar'+str(runno)+'_ei'+str(int(round(ei)))
return rd.run_reduction(outws)
class Runs(object):
"""Helper class for iliad_dos - a list of runs and associated metadata"""
def __init__(self, run_nums, wbvan, ei, monovan=0, sam_mass=0, sam_rmm=0, sum_runs=True, **kwargs):
self.runs = run_nums if hasattr(run_nums, '__iter__') and not isinstance(run_nums, six.string_types) else [run_nums]
self.ei, self.wbvan, self.monovan, self.sam_mass, self.sam_rmm, self.sum_runs = (ei, wbvan, monovan, sam_mass, sam_rmm, sum_runs)
self.kwargs = kwargs
self.prefix = self.kwargs['filename_prefix'] if 'filename_prefix' in self.kwargs else None
self.outputworkspace = self.kwargs.pop('OutputWorkspace', None)
self.recalc = self.kwargs.pop('recalc', False)
def run_iliad(self):
if self.sam_mass == 0 or self.sam_rmm == 0:
self.monovan = None
if self.outputworkspace:
if self.recalc or self.outputworkspace not in mtd.getObjectNames():
return iliad_mari(self.runs, self.ei, self.wbvan, self.monovan, self.sam_mass, self.sam_rmm, self.sum_runs,
OutputWorkspace=self.outputworkspace, **self.kwargs)
else:
return iliad_mari(self.runs, self.ei, self.wbvan, self.monovan, self.sam_mass, self.sam_rmm, self.sum_runs, **self.kwargs)
def load_reduce(self, wd):
ws = []
for ei in self.ei if hasattr(self.ei, '__iter__') else [self.ei]:
filename = reduced_filename(self.runs, ei, self.sum_runs, self.prefix)
if filename in mtd.getObjectNames():
ws.append(mtd[filename])
continue
for ext in PropertyManager.save_format.save_formats:
try:
ws1 = Load('{}/{}.{}'.format(wd, filename, ext), OutputWorkspace=filename)
except ValueError:
pass
else:
ws.append(ws1)
continue
return ws if len(ws) > 0 else self.run_iliad()
def reduce_runs(runs_dict, wbvan, ei, monovan, **kwargs):
"""Parses a dictionary of runs / samples / temperatures"""
load_reduce = kwargs.pop('load_reduce', False)
use_subdirs = kwargs.pop('use_sub_directories', False)
wd0 = config['defaultsave.directory']
new_dict = {}
for sam in list(runs_dict.keys()):
if use_subdirs:
wd = '{}/{}'.format(wd0, sam)
if not os.path.isdir(wd):
os.mkdir(wd)
config['defaultsave.directory'] = wd
new_dict[sam] = {}
ws_list = []
(ei0, monovan0, sam_mass, sam_rmm) = (runs_dict[sam].pop(ky, df)
for ky, df in list(zip(['ei', 'monovan', 'sam_mass', 'sam_rmm'], [ei, monovan, 0, 0])))
for tt in list(runs_dict[sam].keys()):
new_dict[sam][tt] = {}
runobj = Runs(runs_dict[sam][tt]['data'], wbvan, ei0, monovan0, sam_mass, sam_rmm, **kwargs)
new_dict[sam][tt]['data'] = runobj.load_reduce(wd) if load_reduce else runobj.run_iliad()
#ws_list = ws_list + [ws.name() for ws in new_dict[sam][tt]['data']]
#ws_list = ws_list + [ws.name()+'_SQW' for ws in new_dict[sam][tt]['data']]
if 'background' in runs_dict[sam][tt]:
runobj = Runs(runs_dict[sam][tt]['background'], wbvan, ei0, monovan0, sam_mass, sam_rmm, **kwargs)
new_dict[sam][tt]['background'] = runobj.load_reduce(wd) if load_reduce else runobj.run_iliad()
#ws_list = ws_list + [ws.name() for ws in new_dict[sam][tt]['background']]
#ws_list = ws_list + [ws.name()+'_SQW' for ws in new_dict[sam][tt]['background']]
#GroupWorkspaces(InputWorkspaces=ws_list, OutputWorkspace='{}_{}K_reduced'.format(sam, tt))
return new_dict
def _parseqe(qe, ei):
if isinstance(qe, list):
if isinstance(qe[0], six.string_types) and len(qe)==len(ei):
return qe
elif isinstance(qe[0], list) and len(qe)==len(ei):
return [','.join(v) for v in qe]
else:
return [','.join(qe)] * len(ei)
else:
return [qe] * len(ei)
def iliad_dos(runno, wbvan, ei=None, monovan=None, sam_mass=0, sam_rmm=0, sum_runs=False, **kwargs):
"""Reduces a set of data (and optionally background) runs and calculates the phonon density of states
in the incoherent approximation from the data (or background subtracted data).
inputs:
runno - either a list of run numbers (in which case the next 5 parameters must be:
ei, wbvan, monovan, sam_mass, sam_rmm just like in the iliad_mari function)
in this case you must also specify the temperature keyword with the sample temperature
in this case you can also specify the sum_runs parameter like in iliad_mari (default: False)
or runno can be a python dictionary of dictionaries with the following structure:
runno = {'sample_name': { temperature: sample_dict, 'ei':ei, 'monovan': n, 'sam_mass': n, 'sam_rmm': y }, ... }
(e.g. a dictionary with keys which are sample names containing another dictionary with the
sample temperature as keys whose values is another dictionary with the following keys:
'data' - a list of data run numbers. These runs will be summed and reduced
'background' - an optional list of background run numbers. These will also be summed and reduced
and subtracted from the data
'recalc' - by default this routine does not recalculate the reduction if it sees that the output
workspaces are present in the *Analysis Data Service*. If this key is present and
set to True, then it will force a recalculation of the reduction.
'ssf' - a per sample and per temperature self-shielding factor for background subtraction
'msd' - a per sample and per temperature mean-square displacement factor for DOS calculation
In addition to the sample temperature in the samples dict, you can also provide the following optional keys:
'ei' - the incident energ(y)(ies) of the runs. If you don't provide the ei(s) in the keyword arguments to
iliad_dos, it must be provided on a per dataset basis.
'monovan' - the run number of a vanadium calibration run with the same spectrometer
setting as the sample and data for absolute units normalisation.
'sam_mass' - if 'monovan' is set you must provide this key, which is the sample mass in g (ignored if monovan not set)
'sam_rmm' - if 'monovan' is set you must provide this key, which is the sample molar mass (ignored if monovan not set)
wbvan - the white beam vanadium run number (mandatory)
ei - either a number or a list of the incident energies in the measurement. If you provide the ei here it will be assumed
that all runs have this ei. If you have measured different samples / temperatures with different ei's you have
to use the runno dictionary input and give the ei in each samples' dictionary.
monovan - the monochromatic vanadium run number for absolute units calibration (assuming all runs have the same
ei, otherwise this should also be in the samples' dictionaries). **Note that if you must also define the sample
mass and molar mass in the sample's dictionary otherwise this option will be ignored.**
In addition this function understands the following keyword arguments (and will pass on other keyword args to iliad_mari):
ssf - the global self shielding factor for background subtraction (default: 1.) This is overriden by any SSF defined in the runno dict
msd - the global mean square displacement for DOS calculation (default: 0.) This is overriden by any MSD defined in the runno dict
qrange - a string or list of string or list of lists of two numbers denoting the |Q| range to sum over for the DOS calculation.
if it is a list of strings or list of list it must be the same size as the number of incident energies and corresponds to those.
(default: Qmax/3 to Qmax at the elastic line)
ebins - a string or list of string or list of lists of three numbers denoting the energy transfer bins for the DOS calculation.
if it is a list of strings or list of list it must be the same size as the number of incident energies and corresponds to those.
(default: Emax/10 to Emax*0.95 in steps of Emax/100)
temperature - if runno is not a dictionary, you *must* specify the sample temperature using this keyword argument
background - if runno is not a dictionary, you can specify the list of background runs here
load_reduce - if this is set to True, the function will try to load in the reduced data files rather than recalculate them
Note that this option will override any 'recalc' keys in the runno dict (if you want to force recalculation
set this to False or omit this keyword altogether).
save_text - if True this will save the calculated DOS as 3-column x,y,error text files.
nsmooth - if set, this will apply an n-point moving average filter to the calculated DOS creating another file/workspace
nsmooth should be an odd number greater than 2. (Default: None - do not apply smoothing).
save_folder - if set the function will save the reduce data to this folder instead of the Mantid default folder
use_sub_directories - if set and is True then for each sample create a new subdirectory and save its file there
E.g.:
iliad_dos([25000, 25001], ei=[120, 10], wbvan=25035, background=[25004, 25005], temperature=5)
will run the reduction for one set of data files with background subtraction and calculate the DOS at 5K.
iliad_dos({'sam1':
{5: {'data'=[25000,25001], 'background'=[25004,25005]},
300: {'data'=[25002,25003], 'background=[25006,25007]},
'sam_mass':10, 'sam_rmm':177.77},
'sam2':
{10: {'data'=[25010,25011], 'background'=[25014,25015]},
600: {'data'=[25012,25013], 'background=[25016,25017]},
'sam_mass':8, 'sam_rmm':187.77},
}, ei=[120,10], wbvan=25035, monovan=25008)
will run the reduction for two sets of samples (one at 5K and 300K, one at 10K and 600K), and calculate the
density of states for the four sets of measurements, normalising to absolute units. All runs are with Ei=120 and 10meV.
"""
# Parses the input
save_text = kwargs.pop('save_text', False)
nsmooth = kwargs.pop('nsmooth', None)
save_folder = kwargs.pop('save_folder', None)
use_subdirs = kwargs['use_sub_directories'] if 'use_sub_directories' in kwargs else False
global_ssf = kwargs.pop('ssf', 1.0)
global_msd = kwargs.pop('msd', 0.0)
global_qrange = kwargs.pop('qrange', 'Qmax/3, Qmax')
global_ebins = kwargs.pop('ebins', 'Emax/10, Emax/100, Emax*0.95')
global_ei = ei
oldwd = config['defaultsave.directory']
wd0 = save_folder if save_folder is not None else oldwd
config['defaultsave.directory'] = wd0
# Runs the reduction
if isinstance(runno, dict):
runs_dict = runno
else:
if not hasattr(runno, '__len__') or isinstance(runno, six.string_types):
runno = [runno]
if sum_runs and len(runno)==1:
sum_runs = False
if 'temperature' not in kwargs:
raise ValueError('No sample temperature given')
temperature = kwargs.pop('temperature')
if ei is None:
raise ValueError('Incident energy not defined')
if sum_runs:
runs_dict = {None: {temperature: {'data':runno}}}
if 'background' in kwargs:
runs_dict[None][temperature]['background'] = kwargs.pop('background')
else:
runs_dict = {'MAR{}'.format(run): {temperature: {'data':run}} for run in runno}
if monovan and sam_mass:
for ky in runs_dict.keys():
runs_dict[ky]['sam_mass'] = sam_mass
if monovan and sam_rmm:
for ky in runs_dict.keys():
runs_dict[ky]['sam_rmm'] = sam_rmm
if 'background' in kwargs:
background = kwargs.pop('background')
for idx, run in enumerate(runno):
runs_dict['MAR{}'.format(run)][temperature]['background'] = background[idx]
ws_dict = reduce_runs(runs_dict, wbvan, ei, monovan, **kwargs)
# Calculates the DOS (with optional background subtraction)
for sam in list(ws_dict.keys()):
if use_subdirs:
wd = '{}/{}'.format(wd0, sam)
if not os.path.isdir(wd):
os.mkdir(wd)
config['defaultsave.directory'] = wd
for tt in list(ws_dict[sam].keys()):
def_ei = runs_dict[sam][tt]['ei'] if 'ei' in runs_dict[sam][tt] else global_ei
if not hasattr(def_ei, '__len__'):
def_ei = [def_ei]
ws_ei = [ws.getEFixed(1) for ws in ws_dict[sam][tt]['data']]
id_ei = [np.argsort([np.abs(ei1-ei0) for ei1 in ws_ei])[0] for ei0 in def_ei]
data_ws = [ws_dict[sam][tt]['data'][id_ei[ii]] for ii in range(len(ws_ei))]
msd = runs_dict[sam][tt]['msd'] if 'msd' in runs_dict[sam][tt] else global_msd
# Calculates the sample DOS (without background subtraction)
qstr = _parseqe(runs_dict[sam][tt]['qrange'] if 'qrange' in runs_dict[sam][tt] else global_qrange, def_ei)
estr = _parseqe(runs_dict[sam][tt]['ebins'] if 'ebins' in runs_dict[sam][tt] else global_ebins, def_ei)
for ws, ei, qq, ee in list(zip(data_ws, def_ei, qstr, estr)):
if ws.name()+'_SQW' not in mtd.getObjectNames():
q_min, q_max = tuple([v*sqrt(ei) for v in [0.04, 1.3]])
ws_sqw = SofQW3(ws, '{},{},{}'.format(q_min, q_max/285., q_max), EMode='Direct', OutputWorkspace=ws.name()+'_SQW')
else:
ws_sqw = mtd[ws.name()+'_SQW']
ws_dos = ComputeIncoherentDOS(ws_sqw, tt, msd, qq, ee, OutputWorkspace='{}_{}K_Ei{}_data_DOS'.format(sam, tt, ei))
if save_text:
SaveAscii(ws_dos, ws_dos.name()+'.txt', Separator='Space')
if nsmooth > 2:
SmoothData(ws_dos, nsmooth, OutputWorkspace=ws_dos.name()+'_smooth')
if save_text:
SaveAscii(ws_dos.name()+'_smooth', ws_dos.name()+'_smooth.txt', Separator='Space')
if 'background' in ws_dict[sam][tt].keys():
bkg_ei = [ws.getEFixed(1) for ws in ws_dict[sam][tt]['background']]
id_bkg_ei = [np.argsort([np.abs(ei1-ei0) for ei1 in bkg_ei])[0] for ei0 in def_ei]
bkg_ws = [ws_dict[sam][tt]['background'][id_bkg_ei[ii]] for ii in range(len(bkg_ei))]
ssf = runs_dict[sam][tt]['ssf'] if 'ssf' in runs_dict[sam][tt] else global_ssf
sub_ws = [data_ws[ii] - ssf*bkg_ws[ii] for ii in range(len(bkg_ws))]
for ws, ei, qq, ee in list(zip(sub_ws, def_ei, qstr, estr)):
SaveNXSPE(ws, '{}_{}K_Ei{:.2f}meV_subtracted.nxspe'.format(sam, tt, ei))
q_min, q_max = tuple([v*sqrt(ei) for v in [0.04, 1.3]])
ws_sqw = SofQW3(ws, '{},{},{}'.format(q_min, q_max/285., q_max), EMode='Direct', OutputWorkspace=ws.name()+'_SQW')
ws_dos = ComputeIncoherentDOS(ws_sqw, tt, msd, qq, ee, OutputWorkspace='{}_{}K_Ei{}_subtracted_DOS'.format(sam, tt, ei))
if save_text:
SaveAscii(ws_dos, ws_dos.name()+'.txt', Separator='Space')
if nsmooth > 2:
ws_dos_smooth = SmoothData(ws_dos, nsmooth, OutputWorkspace=ws_dos.name()+'_smooth')
if save_text:
SaveAscii(ws_dos_smooth, ws_dos_smooth.name()+'.txt', Separator='Space')
config['defaultsave.directory'] = oldwd
def mari_normalise_background(background_int, white_int, second_white_int=None):
"""Normalize the background integrals"""
if second_white_int is None:
if background_int.getNumberHistograms() == 919 and white_int.getNumberHistograms() == 918:
background_int = CropWorkspace(background_int, StartWorkspaceIndex=1)
background_int = Divide(LHSWorkspace=background_int,RHSWorkspace=white_int,WarnOnZeroDivide='0')
else:
hmean = 2.0*white_int*second_white_int/(white_int+second_white_int)
background_int = Divide(LHSWorkspace=background_int,RHSWorkspace=hmean,WarnOnZeroDivide='0')
DeleteWorkspace(hmean)
if __name__ == "__main__" or __name__ == "__builtin__":
#------------------------------------------------------------------------------------#
# SECTION USED TO RUN REDUCTION FROM MANTID SCRIPT WINDOW #
#------------------------------------------------------------------------------------#
##### Here one sets up folders where to find input data and where to save results ####
# It can be done here or from Mantid GUI:
# File->Manage user directory ->Browse to directory
# Folder where map and mask files are located:
#map_mask_dir = '/usr/local/mprogs/InstrumentFiles/maps'
# folder where input data can be found
#data_dir = r'\\isis\inst$\NDXMARI\Instrument\data\cycle_14_2'
#config.appendDataSearchDir(map_mask_dir)
#config.appendDataSearchDir(data_dir)
root=os.path.dirname(os.path.realpath(__file__))
#data_dir = os.path.join(root,r'data')
#config.appendDataSearchDir(root)
#config.appendDataSearchDir(data_dir)
#config['defaultsave.directory']=root
###### Initialize reduction class above and set up reduction properties. ######
###### Note no web_var in constructor.(will be irrelevant if factory is implemented)
rd = MARIReduction()
rd.def_advanced_properties()
rd.def_main_properties()
#### uncomment rows below to generate web variables and save then to transfer to ###
## web services.
run_dir = os.path.dirname(os.path.realpath(__file__))
file = os.path.join(run_dir,'reduce_vars.py')
rd.save_web_variables(file)
#### Set up time interval (sec) for reducer to check for input data file. ####
# If this file is not present and this value is 0,reduction fails
# if this value >0 the reduction waits until file appears on the data
# search path checking after time specified below.
rd.wait_for_file = 0 # waiting time interval in seconds
### Define a run number to validate reduction against future changes #############
# After reduction works well and all settings are done and verified,
# take a run number with good reduced results and build validation
# for this result.
# Then place the validation run together with this reduction script.
# Next time, the script will run reduction and compare the reduction results against
# the results obtained earlier.
#rd.validate_run_number = 21968 # Enabling this property disables normal reduction
# and forces reduction to reduce run specified here and compares results against
# validation file, processed earlier or calculate this file if run for the first time.
#This would ensure that reduction script have not changed,
#allow to identify the reason for changes if it was changed
# and would allow to recover the script,used to produce initial reduction
#if changes are unacceptable.
####get reduction parameters from properties above, override what you want locally ###
# and run reduction. Overriding would have form:
# rd.reducer.prop_man.property_name (from the dictionary above) = new value e.g.
# rd.reducer.prop_man.energy_bins = [-40,2,40]
# or
## rd.reducer.prop_man.sum_runs = False
#
###### Run reduction over all run numbers or files assigned to ######
# sample_run variable
# return output workspace only if you are going to do
# something with it here. Running range of runs will return the array
# of workspace pointers.
#red_ws = rd.run_reduction()
# usual way to go is to reduce workspace and save it internally
rd.run_reduction()
| StarcoderdataPython |
12803453 | """
"""
import numpy as np
from ..nfw_evolution import lgc_vs_lgt, get_bounded_params
from ..fit_nfw_helpers import fit_lgconc, get_loss_data
from ..fit_nfw_helpers_fixed_k import fit_lgconc as fit_lgconc_fixed_k
from ..fit_nfw_helpers_fixed_k import get_loss_data as get_loss_data_fixed_k
from ..fit_nfw_helpers_fixed_k import FIXED_K
SEED = 32
def test_conc_fitter():
"""Pick a random point in parameter space and demonstrate that the fitter
recovers the correct result.
"""
t_sim = np.linspace(0.1, 14, 100)
lgt_sim = np.log10(t_sim)
rng = np.random.RandomState(SEED)
up_target = rng.normal(loc=0, size=4, scale=1)
p_target = get_bounded_params(up_target)
lgc_sim = lgc_vs_lgt(lgt_sim, *p_target)
conc_sim = 10 ** lgc_sim
log_mah_sim = np.zeros_like(conc_sim) + 100
lgm_min = 0
u_p0, _loss_data = get_loss_data(t_sim, conc_sim, log_mah_sim, lgm_min)
res = fit_lgconc(t_sim, conc_sim, log_mah_sim, lgm_min)
p_best, loss, method, loss_data = res
lgc_best = lgc_vs_lgt(lgt_sim, *p_best)
assert np.allclose(lgc_sim, lgc_best, atol=0.01)
assert np.allclose(p_best, p_target, atol=0.01)
# Enforce that the returned loss_data contains the expected information
for a, b in zip(_loss_data, loss_data):
assert np.allclose(a, b)
def test_conc_fitter_fixed_k():
"""Pick a random point in parameter space and demonstrate that the fitter
recovers the correct result.
"""
t_sim = np.linspace(0.1, 14, 100)
lgt_sim = np.log10(t_sim)
rng = np.random.RandomState(SEED)
up_target = rng.normal(loc=0, size=4, scale=1)
p_target = np.array(get_bounded_params(up_target))
p_target[1] = FIXED_K
lgc_sim = lgc_vs_lgt(lgt_sim, *p_target)
conc_sim = 10 ** lgc_sim
log_mah_sim = np.zeros_like(conc_sim) + 100
lgm_min = 0
u_p0, _loss_data = get_loss_data_fixed_k(t_sim, conc_sim, log_mah_sim, lgm_min)
res = fit_lgconc_fixed_k(t_sim, conc_sim, log_mah_sim, lgm_min)
p_best, loss, method, loss_data = res
lgc_best = lgc_vs_lgt(lgt_sim, *p_best)
assert np.allclose(lgc_sim, lgc_best, atol=0.01)
assert np.allclose(p_best, p_target, atol=0.01)
# Enforce that the returned loss_data contains the expected information
for a, b in zip(_loss_data, loss_data):
assert np.allclose(a, b)
| StarcoderdataPython |
4984699 | GRCH_VERSION = 'GRCh37'
GRCH_SUBVERSION = '13'
ENSEMBL_VERSION = '88'
COSMIC_VERSION = '81'
DBSNP_VERSION = '150'
SPIDEX_LOCATION = 'spidex_public_noncommercial_v1.0/spidex_public_noncommercial_v1_0.tab.gz'
TRANSCRIPT_DB_PATH = 'ensembl/v' + ENSEMBL_VERSION + '/Homo_sapiens.' + GRCH_VERSION + '.cds.all.fa'
vcf_mutation_sources = {
'COSMIC': {
'is_alias': False,
'path': 'cosmic/v' + COSMIC_VERSION + '/CosmicCodingMuts.vcf.gz.bgz',
'given_as_positive_strand_only': True
},
'dbSNP': {
'is_alias': False,
'path': 'ncbi/dbsnp_' + DBSNP_VERSION + '-' + GRCH_VERSION.lower() + 'p' +
GRCH_SUBVERSION + '/00-All.vcf.gz',
'given_as_positive_strand_only': True
},
'ensembl': {
'is_alias': False,
'path': 'ensembl/v' + ENSEMBL_VERSION + '/Homo_sapiens.vcf.gz',
'given_as_positive_strand_only': True
},
'ClinVar': {
'is_alias': True,
'aliased_vcf': 'dbSNP'
},
'ESP': {
'is_alias': True,
'aliased_vcf': 'ensembl'
},
'HGMD-PUBLIC': {
'is_alias': True,
'aliased_vcf': 'ensembl'
},
'PhenCode': {
'is_alias': True,
'aliased_vcf': 'ensembl'
},
}
VERBOSITY_LEVEL = 0
| StarcoderdataPython |
3560099 | <gh_stars>1-10
# -*- coding: utf-8 -*-
"""
Created on Mon May 11 14:28:57 2020
@author: fgp35
"""
import os
from collections import OrderedDict
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
import numpy as np
import torchvision
import pytorch_lightning as pl
from scipy.linalg import sqrtm
from turboGAN2d import *
class mirror3d(object):
def __init__(self):
super().__init__()
def __call__(self,field):
p = torch.rand(1)
if p < 0.25:
return torch.flip(field,[0,1,2])
elif 0.25 <= p < 0.5:
return torch.flip(field,[0,1,3])
elif 0.5 <= p < 0.75:
return torch.flip(field,[0,1,2,3])
else:
return field
class transform3d(object):
def __init__(self):
self.transform = torchvision.transforms.Compose([
mirror3d(),
])
def __call__(self,field):
return self.transform(field)
def mse1(x,y):
s = y;
t = x.shape[2]
s_hat = 0
for i in range(t):
s_hat += spec(x[:,:,i])[1]/t
if x.is_cuda:
s_hat = s_hat.cuda(x.device.index)
s = s.cuda(x.device.index)
return torch.norm(s-s_hat)
def t_correlation(x):
m, n = x[0,0].shape
bs = x.shape[0]
t = x.shape[1]
x = x.cpu().detach().numpy()
r = np.zeros((t,m,n))
for i in range(m):
for j in range(n):
for b in range(bs):
r[:,i,j] += np.correlate(x[b,:,i,j],x[b,:,i,j],mode='full')[t-1:]/bs
r[:,i,j] /= max(r[:,i,j])
return r
def s2(x):
nf = x.shape[1]
t = x.shape[2]
m,n = x[0,0,0].shape
s = np.zeros((nf,t,m,n))
for i in range(nf):
s[i] = t_correlation(x[:,i])
s = torch.tensor(s,requires_grad=x.requires_grad)
return s.to(x.device)
def mse2(x,y):
s_hat = y
s = s2(x)
if x.is_cuda:
s = s.cuda(x.device.index)
s_hat = s_hat.cuda(x.device.index)
return torch.norm(s-s_hat)
def s3(latent_vector):
mean = torch.mean(latent_vector)
rms = torch.sqrt(torch.mean(latent_vector**2))
sk = torch.mean(((latent_vector-mean)/torch.std(latent_vector))**3)
k = sk = torch.mean(((latent_vector-mean)/torch.std(latent_vector))**4)
return torch.tensor((mean,rms,sk,k)).type_as(latent_vector)
def mse3(x,y):
s = s3(x)
s_hat = s3(y)
return torch.norm(s-s_hat)
class Discriminator_norm(nn.Module):
def __init__(self,latent_dim):
super(Discriminator_norm,self).__init__()
self.main = nn.Sequential(
nn.Linear(latent_dim+4,255),
nn.LeakyReLU(0.2,True),
nn.Linear(255,255),
nn.LeakyReLU(0.2,True),
nn.Linear(255,255),
nn.LeakyReLU(0.2,True),
nn.Linear(255,255),
nn.LeakyReLU(0.2,True),
)
def forward(self,latent_vector):
bs = latent_vector.shape[0]
mean = torch.mean(latent_vector)
rms = torch.sqrt(torch.mean(latent_vector**2))
sk = torch.mean(((latent_vector-mean)/torch.std(latent_vector))**3)
k = sk = torch.mean(((latent_vector-mean)/torch.std(latent_vector))**4)
moments = torch.tensor((mean,rms,sk,k)).type_as(latent_vector)
moments = moments.expand(bs,4)
latent_vector = torch.cat((latent_vector,moments),dim=1)
return self.main(latent_vector)
class Discriminator_time(nn.Module):
def __init__(self,use_vorticity=True):
super(Discriminator_time,self).__init__()
if use_vorticity:
self.input_features = 4
else:
self.input_features = 3
def block(in_feats,out_feats):
layers = [nn.ConvTranspose3d(in_feats,in_feats,3,padding=1)]
layers.append(nn.LeakyReLU(0.2,True))
layers.append(nn.ConvTranspose3d(in_feats,out_feats,3,padding=1))
layers.append(nn.LeakyReLU(0.2,True))
layers.append(nn.AvgPool3d((1,2,2)))
return layers
self.main = nn.Sequential(
nn.ConvTranspose3d(self.input_features,24,1),
# 128 x 128 x 4 x 24
*block(24,48),
# 64 x 64 x x 4 x 96
*block(48,96),
# 32 x 32 x 4 x 96
*block(96,96),
# 16 x 16 x 4 x 96
*block(96,96),
# 8 x 8 x 4 x 96
*block(96,96),
# 4 x 4 x 4 x 96
nn.ConvTranspose3d(96,96,3,padding=1),
nn.LeakyReLU(0.2,True),
# 4 x 4 x 4 x 96
)
self.last_block = nn.Sequential(
nn.Conv3d(96+1,96,3,padding=1),
nn.LeakyReLU(0.2,True),
nn.Conv3d(96,96,4),
nn.LeakyReLU(0.2,True),
)
self.fc = nn.Linear(96,1,bias=False)
def forward(self,field):
b_size = field.shape[0]
field = self.main(field)
mstd = torch.std(field,dim=1).unsqueeze(1)
field = torch.cat((field,mstd),dim=1)
field = self.last_block(field)
field = field.reshape(b_size,96)
return self.fc(field)
class RNN(nn.Module):
def __init__(self,hidden_size):
super(RNN,self).__init__()
self.hs = hidden_size
self.main = nn.LSTM(192,self.hs,num_layers=3, batch_first=True)
self.fc = nn.Linear(self.hs,192)
def forward(self,z,hidden):
z = z.view(z.shape[0],1,z.shape[1])
out,(hn,cn) = self.main(z,hidden)
return self.fc(out), (hn,cn)
def init_hidden(self, batch_size):
''' Initialize hidden state '''
# create NEW tensor with SAME TYPE as weight
weight = next(self.parameters()).data
hidden = (weight.new(3,batch_size, self.hs).normal_(mean=0,std=0.1),
weight.new(3,batch_size, self.hs).normal_(mean=0,std=0.1))
return hidden
class GAN3d(pl.LightningModule):
def __init__(self,hparams):
super(GAN3d,self).__init__()
torch.cuda.seed_all()
self.hparams = hparams
#networks
GAN2d = GAN.load_from_checkpoint(os.getcwd()+'/pre_trainGan.ckpt')
self.netG = GAN2d.netG
self.netD = GAN2d.netD
self.D_time = Discriminator_time()
self.D_time.apply(weights_init)
self.D_norm = Discriminator_norm(hparams.latent_dim)
self.D_norm.apply(weights_init)
self.RNN = RNN(500)
def evaluate_lstm(self,z,t):
hidden = self.RNN.init_hidden(z.shape[0])
output = z.view(z.shape[0],1,z.shape[1])
ot = z
for i in range(1,t):
ot, hidden = self.RNN(ot.view_as(z),hidden)
output = torch.cat((output,ot),dim=1)
ot = None
return output
def forward(self,z,t):
bs = z.shape[0]
zt = self.evaluate_lstm(z,t)
field = self.netG(zt[:,0]).reshape(bs,3,1,128,128)
for i in range(1,t):
field_i = self.netG(zt[:,i]).reshape(bs,3,1,128,128)
field = torch.cat((field,field_i),dim=2)
return field
def adversarial_loss(self, y, y_hat):
return -torch.mean((y)) + torch.mean((y_hat))
def training_step(self, batch, batch_nb, optimizer_idx):
real_field = batch
self.s1 = self.s1.type_as(real_field)
self.s2 = self.s2.type_as(real_field)
t = real_field.shape[2]
if not self.hparams.nv:
omega = stream_vorticity(real_field[:,:,0]).type_as(real_field[:,:,0])
for i in range(1,t):
omega = torch.cat((omega,stream_vorticity(real_field[:,:,i]).type_as(real_field[:,:,i])),dim=0)
real_field = torch.cat((real_field,omega.view(real_field.shape[0],1,t,128,128)),dim=1)
if optimizer_idx == 0:
z = torch.randn(real_field.shape[0],self.hparams.latent_dim).type_as(real_field)
gen_field = self.netG(z)
if not self.hparams.nv:
omega = stream_vorticity(gen_field).type_as(gen_field)
gen_field = torch.cat((gen_field,omega),1)
grad_penalty = calc_gradient_penalty(self.netD,real_field[:,:,0],gen_field,l=100)
d_loss = self.adversarial_loss(self.netD(real_field[:,:,0]),self.netD(gen_field)) + grad_penalty
tqdm_dict = {'d_loss': d_loss}
output = OrderedDict({
'loss': d_loss,
'progress_bar': tqdm_dict,
'log': tqdm_dict,
})
return output
if optimizer_idx == 1:
z = torch.randn(real_field.shape[0],self.hparams.latent_dim).type_as(real_field)
gen_field = self.netG(z)
if not self.hparams.nv:
omega = stream_vorticity(gen_field).type_as(gen_field)
gen_field = torch.cat((gen_field,omega),1)
gen_field_t = self(z,4)
if not self.hparams.nv:
omega = stream_vorticity(gen_field_t[:,:,0]).type_as(gen_field)
for i in range(1,4):
omega = torch.cat((omega,stream_vorticity(gen_field_t[:,:,i]).type_as(gen_field)),dim=0)
gen_field_t = torch.cat((gen_field_t,omega.view(real_field.shape[0],1,t,128,128)),dim=1)
g_loss = (-torch.mean(self.netD(gen_field)) -torch.mean(self.D_time(gen_field_t))
+ 10*mse1(gen_field_t,self.s1) +1*mse2(gen_field_t[:,0:3],self.s2))
fid = score(real_field[:,:,0],gen_field_t[:,:,0]).detach()
for i in range(1,4):
fid += score(real_field[:,:,i],gen_field_t[:,:,i]).detach()
fid = fid/4
tqdm_dict = {'g_loss': g_loss,'score': fid}
output = OrderedDict({
'loss': g_loss,
'progress_bar': tqdm_dict,
'log': tqdm_dict,
})
return output
if optimizer_idx ==2:
z = torch.randn(real_field.shape[0],self.hparams.latent_dim).type_as(real_field)
gen_field_t = self(z,4)
if not self.hparams.nv:
omega = stream_vorticity(gen_field_t[:,:,0]).type_as(gen_field_t)
for i in range(1,4):
omega = torch.cat((omega,stream_vorticity(gen_field_t[:,:,i]).type_as(gen_field_t)),dim=0)
gen_field_t = torch.cat((gen_field_t,omega.view(real_field.shape[0],1,t,128,128)),dim=1)
grad_penalty = calc_gradient_penalty(self.D_time,real_field,gen_field_t,l=400)
d_time_loss = self.adversarial_loss(self.D_time(real_field),self.D_time(gen_field_t)) + grad_penalty
fid = score(real_field[:,:,0],gen_field_t[:,:,0]).detach()
for i in range(1,4):
fid += score(real_field[:,:,i],gen_field_t[:,:,i]).detach()
fid = fid/4
tqdm_dict = {'d_time_loss': d_time_loss, 'score': fid}
output = OrderedDict({
'loss': d_time_loss,
'progress_bar': tqdm_dict,
'log': tqdm_dict,
})
return output
if optimizer_idx == 3:
z = torch.randn(real_field.shape[0],self.hparams.latent_dim).type_as(real_field)
zt = self.evaluate_lstm(z,500)
zt = zt[:,np.random.randint(50,500)].view_as(z)
grad_penalty = calc_gradient_penalty(self.D_norm,z,zt)
d_norm_loss = self.adversarial_loss(self.D_norm(z),self.D_norm(zt)) + grad_penalty
tqdm_dict = {'d_norm_loss': d_norm_loss}
output = OrderedDict({
'loss': d_norm_loss,
'progress_bar': tqdm_dict,
'log': tqdm_dict,
})
return output
if optimizer_idx == 4:
z = torch.randn(real_field.shape[0],self.hparams.latent_dim).type_as(real_field)
zt = self.evaluate_lstm(z,500)
zt = zt[:,np.random.randint(50,500)].view_as(z)
gen_field_t = self(z,4)
if not self.hparams.nv:
omega = stream_vorticity(gen_field_t[:,:,0]).type_as(gen_field_t)
for i in range(1,4):
omega = torch.cat((omega,stream_vorticity(gen_field_t[:,:,i]).type_as(gen_field_t)),dim=0)
gen_field_t = torch.cat((gen_field_t,omega.view(real_field.shape[0],1,t,128,128)),dim=1)
rnn_loss = (-torch.mean(self.D_time(gen_field_t)) -torch.mean(self.D_norm(zt)) + 10*mse1(gen_field_t,self.s1)
+1*mse2(gen_field_t[:,0:3],self.s2) +100*mse3(z,zt))
fid = score(real_field[:,:,0],gen_field_t[:,:,0]).detach()
for i in range(1,4):
fid += score(real_field[:,:,i],gen_field_t[:,:,i]).detach()
fid = fid/4
tqdm_dict = {'rnn_loss': rnn_loss, 'score': fid}
output = OrderedDict({
'loss': rnn_loss,
'progress_bar': tqdm_dict,
'log': tqdm_dict,
})
return output
def configure_optimizers(self):
lr = self.hparams.lr
b1 = self.hparams.b1
b2 = self.hparams.b2
opt_g = torch.optim.Adam(self.netG.parameters(), lr=lr, betas=(b1, b2))
opt_d = torch.optim.Adam(self.netD.parameters(), lr=lr, betas=(b1, b2))
opt_d_time = torch.optim.Adam(self.D_time.parameters(), lr=lr, betas=(b1, b2))
opt_d_norm = torch.optim.Adam(self.D_norm.parameters(), lr=lr, betas=(b1, b2))
opt_rnn = torch.optim.Adam(self.RNN.parameters(), lr=lr, betas=(b1, b2))
if self.hparams.sc:
scheduler_d = torch.optim.lr_scheduler.MultiStepLR(opt_d,milestones=self.hparams.milestones,gamma=self.hparams.gamma)
scheduler_g = torch.optim.lr_scheduler.MultiStepLR(opt_g,milestones=self.hparams.milestones,gamma=self.hparams.gamma)
scheduler_dt = torch.optim.lr_scheduler.MultiStepLR(opt_d_time,milestones=self.hparams.milestones,gamma=self.hparams.gamma)
scheduler_dn = torch.optim.lr_scheduler.MultiStepLR(opt_d_norm,milestones=self.hparams.milestones,gamma=self.hparams.gamma)
scheduler_rnn = torch.optim.lr_scheduler.MultiStepLR(opt_rnn,milestones=self.hparams.milestones,gamma=self.hparams.gamma)
return [opt_d, opt_g, opt_d_time, opt_d_norm, opt_rnn], [scheduler_d,scheduler_g,scheduler_dt,scheduler_dn,scheduler_rnn]
else:
return opt_d, opt_g, opt_d_time, opt_d_norm, opt_rnn
def train_dataloader(self):
return DataLoader(self.dataset, batch_size=self.hparams.batch_size,)
def prepare_data(self):
path = os.getcwd()
field = torch.load(path+'/field.pt')
dataset = mydataset(field, transform=transform3d())
self.dataset = dataset
t = field.shape[2]
s_hat = 0
for i in range(t):
s_hat += spec(field[0:100,:,i])[1]/t
self.s1 = torch.mean(s_hat,dim=0).unsqueeze(0)
self.s2 = s2(field) | StarcoderdataPython |
3299697 | <gh_stars>10-100
from queue import Queue
from collections import defaultdict, namedtuple
Subscriber = namedtuple('Subscriber', 'queue, properties')
class Publisher(object):
"""
Contains a list of subscribers that can can receive updates.
Each subscriber can have its own private data and may subscribe to
different channel.
"""
END_STREAM = {}
def __init__(self):
"""
Creates a new publisher with an empty list of subscribers.
"""
self.subscribers_by_channel = defaultdict(list)
def _get_subscribers_lists(self, channel):
if isinstance(channel, str):
yield self.subscribers_by_channel[channel]
else:
for channel_name in channel:
yield self.subscribers_by_channel[channel_name]
def get_subscribers(self, channel='default channel'):
"""
Returns a generator of all subscribers in the given channel.
`channel` can either be a channel name (e.g. "secret room") or a list
of channel names (e.g. "['chat', 'global messages']"). It defaults to
the channel named "default channel".
"""
for subscriber_list in self._get_subscribers_lists(channel):
yield from subscriber_list
def _publish_single(self, data, queue):
"""
Publishes a single piece of data to a single user. Data is encoded as
required.
"""
str_data = str(data)
for line in str_data.split('\n'):
queue.put('data: {}\n'.format(line))
queue.put('\n')
def publish(self, data, channel='default channel'):
"""
Publishes data to all subscribers of the given channel.
`channel` can either be a channel name (e.g. "secret room") or a list
of channel names (e.g. "['chat', 'global messages']"). It defaults to
the channel named "default channel".
If data is callable, the return of `data(properties)` will be published
instead, for the `properties` object of each subscriber. This allows
for customized events.
"""
# Note we call `str` here instead of leaving it to each subscriber's
# `format` call. The reason is twofold: this caches the same between
# subscribers, and is not prone to time differences.
if callable(data):
for subscriber in self.get_subscribers(channel):
value = data(subscriber.properties)
if value:
self._publish_single(value, subscriber.queue)
else:
for subscriber in self.get_subscribers(channel):
self._publish_single(data, subscriber.queue)
def subscribe(self, channel='default channel', properties=None, initial_data=[]):
"""
Subscribes to the channel, returning an infinite generator of
Server-Sent-Events.
`channel` can either be a channel name (e.g. "secret room") or a list
of channel names (e.g. "['chat', 'global messages']"). It defaults to
the channel named "default channel".
If `properties` is passed, these will be used for differentiation if a
callable object is published (see `Publisher.publish`).
If the list `initial_data` is passed, all data there will be sent
before the regular channel process starts.
"""
queue = Queue()
properties = properties or {}
subscriber = Subscriber(queue, properties)
for data in initial_data:
self._publish_single(data, queue)
for subscribers_list in self._get_subscribers_lists(channel):
subscribers_list.append(subscriber)
return self._make_generator(queue)
def unsubscribe(self, channel='default channel', properties=None):
"""
`channel` can either be a channel name (e.g. "secret room") or a list
of channel names (e.g. "['chat', 'global messages']"). It defaults to
the channel named "default channel".
If `properties` is None, then all subscribers will be removed from selected
channel(s). If properties are provided then these are used to filter which
subscribers are removed. Only the subscribers exactly matching the properties
are unsubscribed.
"""
for subscribers_list in self._get_subscribers_lists(channel):
if properties is None:
subscribers_list[:] = []
else:
subscribers_list[:] = [subscriber for subscriber in subscribers_list if subscriber.properties != properties]
def _make_generator(self, queue):
"""
Returns a generator that reads data from the queue, emitting data
events, while the Publisher.END_STREAM value is not received.
"""
while True:
data = queue.get()
if data is Publisher.END_STREAM:
return
yield data
def close(self):
"""
Closes all active subscriptions.
"""
for channel in self.subscribers_by_channel.values():
for queue, _ in channel:
queue.put(Publisher.END_STREAM)
channel.clear()
if __name__ == '__main__':
# Starts an example chat application.
# Run this module and point your browser to http://localhost:5000
import cgi
import flask
publisher = Publisher()
app = flask.Flask(__name__, static_folder='static', static_url_path='')
@app.route('/publish', methods=['POST'])
def publish():
sender_username = flask.request.form['username']
chat_message = flask.request.form['message']
template = '<strong>{}</strong>: {}'
full_message = template.format(cgi.escape(sender_username),
cgi.escape(chat_message))
def m(subscriber_username):
if subscriber_username != sender_username:
return full_message
publisher.publish(m)
return ''
@app.route('/subscribe')
def subscribe():
username = flask.request.args.get('username')
return flask.Response(publisher.subscribe(properties=username),
content_type='text/event-stream')
@app.route('/')
def root():
return app.send_static_file('chat.html')
app.run(debug=True, threaded=True)
| StarcoderdataPython |
11231029 | import os
import subprocess
import sys
import time
import pytest
import torch
from elasticsearch import Elasticsearch
from haystack.document_store.elasticsearch import ElasticsearchDocumentStore
from haystack.reader.transformers import TransformersReader
from haystack.retriever.dense import EmbeddingRetriever, DensePassageRetriever
from haystack.retriever.sparse import ElasticsearchRetriever
from haystack.preprocessor.preprocessor import PreProcessor
sys.path.insert(0, os.path.abspath("./"))
from src.evaluation.utils.elasticsearch_management import delete_indices, prepare_mapping
from src.evaluation.utils.TitleEmbeddingRetriever import TitleEmbeddingRetriever
from src.evaluation.config.elasticsearch_mappings import SQUAD_MAPPING
from src.evaluation.utils.utils_eval import PiafEvalRetriever, PiafEvalReader
@pytest.fixture(scope="session", autouse=True)
def elasticsearch_fixture():
# test if a ES cluster is already running. If not, download and start an ES instance locally.
try:
client = Elasticsearch(hosts=[{"host": "localhost", "port": "9200"}])
client.info()
except:
print("Starting Elasticsearch ...")
status = subprocess.run(["docker rm haystack_test_elastic"], shell=True)
status = subprocess.run(
[
'docker run -d --name haystack_test_elastic -p 9200:9200 -e "discovery.type=single-node" elasticsearch:7.6.2'
],
shell=True,
)
if status.returncode:
raise Exception(
"Failed to launch Elasticsearch. Please check docker container logs."
)
time.sleep(30)
@pytest.fixture
def gpu_available():
return torch.cuda.is_available()
@pytest.fixture
def gpu_id(gpu_available):
if gpu_available:
gpu_id = torch.cuda.current_device()
else:
gpu_id = -1
return gpu_id
@pytest.fixture(scope="session")
def preprocessor():
# test with preprocessor
preprocessor = PreProcessor(
clean_empty_lines=False,
clean_whitespace=False,
clean_header_footer=False,
split_by="word",
split_length=50,
split_overlap=0, # this must be set to 0 at the data of writting this: 22 01 2021
split_respect_sentence_boundary=False,
)
return preprocessor
@pytest.fixture(scope="session")
def document_store(elasticsearch_fixture):
prepare_mapping(mapping=SQUAD_MAPPING, embedding_dimension=768)
document_index = "document"
document_store = ElasticsearchDocumentStore(host="localhost", username="", password="", index=document_index,
create_index=False, embedding_field="emb",
embedding_dim=768, excluded_meta_data=["emb"], similarity='cosine',
custom_mapping=SQUAD_MAPPING)
yield document_store
# clean up
delete_indices(index=document_index)
delete_indices(index="label")
@pytest.fixture
def reader(gpu_id):
k_reader = 3
reader = TransformersReader(
model_name_or_path="etalab-ia/camembert-base-squadFR-fquad-piaf",
tokenizer="etalab-ia/camembert-base-squadFR-fquad-piaf",
use_gpu=gpu_id,
top_k_per_candidate=k_reader,
)
return reader
@pytest.fixture
def retriever_bm25(document_store):
return ElasticsearchRetriever(document_store=document_store)
@pytest.fixture
def retriever_emb(document_store, gpu_available):
return EmbeddingRetriever(document_store=document_store,
embedding_model="sentence-transformers/distiluse-base-multilingual-cased-v2",
model_version="fcd5c2bb3e3aa74cd765d793fb576705e4ea797e",
use_gpu=gpu_available,
model_format="transformers",
pooling_strategy="reduce_max",
emb_extraction_layer=-1)
@pytest.fixture
def retriever_dpr(document_store, gpu_available):
return DensePassageRetriever(document_store=document_store,
query_embedding_model="etalab-ia/dpr-question_encoder-fr_qa-camembert",
passage_embedding_model="etalab-ia/dpr-ctx_encoder-fr_qa-camembert",
model_version="v1.0",
infer_tokenizer_classes=True,
use_gpu=gpu_available)
@pytest.fixture
def retriever_faq(document_store, gpu_available):
return TitleEmbeddingRetriever(
document_store=document_store,
embedding_model="sentence-transformers/distiluse-base-multilingual-cased-v2",
model_version="fcd5c2bb3e3aa74cd765d793fb576705e4ea797e",
use_gpu=gpu_available,
model_format="transformers",
pooling_strategy="reduce_max",
emb_extraction_layer=-1,
)
@pytest.fixture
def retriever_piafeval():
return PiafEvalRetriever()
@pytest.fixture
def reader_piafeval():
return PiafEvalReader()
| StarcoderdataPython |
6455296 | ##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: <NAME>
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# ConfigurableIssuance.py
# Methods for setting up configurable issuance.
#
# Author: hansen
# ----------------------------------------------------------------------------
##
# This is a base file that is not intended to be overridden.
##
import TimeRangeUtils
import string, types, time
import TimeRange, AbsTime
class ConfigurableIssuance(TimeRangeUtils.TimeRangeUtils):
def __init__(self):
TimeRangeUtils.TimeRangeUtils.__init__(self)
def getIssuanceInfo(self, productIssuance, issuanceList, creationTime=None):
# Create a NarrativeDef for a "narrative" type of product
# from an issuanceList and selected item
(currentLocalTime, self._shift) = self.determineTimeShift()
if creationTime is None:
day = currentLocalTime.day
month = currentLocalTime.month
year = currentLocalTime.year
hour = currentLocalTime.hour
minutes = currentLocalTime.minute
else:
localTime = time.localtime(creationTime)
year = localTime[0]
month = localTime[1]
day = localTime[2]
hour = localTime[3]
minutes = localTime[4]
# Determine "issuanceHour"
startTime = AbsTime.absTimeYMD(year,month,day,hour)
# find the entry for our selection
#print productIssuance, issuanceList
entry = self.getEntry(productIssuance, issuanceList)
desc, startHour, endHour, expireHour, p1Label, \
lateNightPhrase, lateDayPhrase, todayFlag, narrativeDef = entry
period1Label = p1Label
period1LateDayPhrase = lateDayPhrase
period1LateNightPhrase = lateNightPhrase
# Take care of "issuanceHour" variable
startHour = self.convertIssuanceHour(startHour, hour, minutes)
endHour = self.convertIssuanceHour(endHour, hour, minutes)
expireHour = self.convertIssuanceHour(expireHour, hour, minutes)
# Determine startTime and period1
startTime = AbsTime.absTimeYMD(year, month, day, 0)
startTime = startTime + startHour * 3600
endTime = AbsTime.absTimeYMD(year, month, day, 0)
endTime = endTime + endHour * 3600
period1 = (endTime.unixTime() - startTime.unixTime())/3600
# Set "period1" if it appears in narrativeDef
newNarrativeDef = []
totalHours = 0
firstPeriod = 1
for component, period in narrativeDef:
# Handle custom components - added in OB8.2.
# "Custom" components are intended to replace "priorPeriod" which is removed.
# "Custom" component entries in a narrative definition are of the form:
# ("Custom", (componentName, timeRange))
# where timeRange can be (start_hours, end_hours) or an AFPS.TimeRange.
# Start_hours and end_hours are relative to midnight local time
# of the product creation date.
if component == "Custom":
newNarrativeDef.append((component, period))
continue
if firstPeriod:
if period == "period1":
period = period1
else:
period1 = period
firstPeriod = 0
totalHours = totalHours + period
newNarrativeDef.append((component, period))
# Convert to GMT time before making time range
startTime = startTime - self._shift
tr = TimeRange.TimeRange(startTime, startTime + (totalHours * 3600))
timeRange = tr
period1TimeRange = TimeRange.TimeRange(
tr.startTime(), tr.startTime() + period1*3600)
narrativeDef = newNarrativeDef
# Expiration time -- convert to GMT
expireStartTime = AbsTime.absTimeYMD(year, month, day, 0) - self._shift
expireStartTime = expireStartTime + expireHour * 3600
expireTime = expireStartTime
issueTime = AbsTime.current()
#issueTime = self.getCurrentTime(
# None, "%l%M %p %Z %a %b %e %Y", stripLeading=1)
#expireTimeRange = AFPS.TimeRange(expireStartTime, expireStartTime + 3600)
#expireTime = string.upper(self.timeDisplay(expireTimeRange, "", "", "%d%H%M", ""))
return Issuance(entry, timeRange, expireTime, issueTime, narrativeDef,
period1TimeRange, period1LateDayPhrase, period1LateNightPhrase,
period1Label, todayFlag)
def convertIssuanceHour(self, issuanceHour, currentHour, currentMinutes):
if type(issuanceHour) == types.StringType:
if currentMinutes > self.issuanceHour_minutesPastHour():
currentHour = currentHour + 1
# Don't cross to the next day
if currentHour == 24:
currentHour = 23
issuanceHour = string.replace(issuanceHour, "issuanceHour", `currentHour`)
exec "resultHour = " + issuanceHour
return resultHour
else:
return issuanceHour
def getEntry(self, productIssuance, issuanceList):
found =0
for entry in issuanceList:
issuanceDescription = entry[0]
if productIssuance == issuanceDescription:
found = 1
break
if found == 0:
return None
else:
return entry
def issuanceHour_minutesPastHour(self):
# Minutes past the hour after which "issuanceHour" will jump to the next hour
# The exception is Hour 23 which will always be truncated i.e. we won't jump
# to the next day.
#
# Default is to truncate the hour so that we always get the hazards
# reported for that hour.
return 65
class Issuance:
def __init__(self, entry, timeRange, expireTime, issueTime, narrativeDef,
period1TimeRange, period1LateDayPhrase, period1LateNightPhrase,
period1Label, todayFlag):
self.__entry = entry
self.__timeRange = timeRange
self.__expireTime = expireTime
self.__issueTime = issueTime
self.__narrativeDef = narrativeDef
self.__period1TimeRange = period1TimeRange
self.__period1LateDayPhrase = period1LateDayPhrase
self.__period1LateNightPhrase = period1LateNightPhrase
self.__period1Label = period1Label
self.__todayFlag = todayFlag
def entry(self):
return self.__entry
def timeRange(self):
return self.__timeRange
def expireTime(self):
return self.__expireTime
def issueTime(self):
return self.__issueTime
def narrativeDef(self):
return self.__narrativeDef
def period1TimeRange(self):
return self.__period1TimeRange
def period1LateDayPhrase(self):
return self.__period1LateDayPhrase
def period1LateNightPhrase(self):
return self.__period1LateNightPhrase
def period1Label(self):
return self.__period1Label
def todayFlag(self):
return self.__todayFlag
| StarcoderdataPython |
8197342 | <gh_stars>0
import os
import re
from flask import jsonify, request
"""Check whether password is valid"""
def password_validator(password):
if re.match(
r'^(?=.{6,}$)(?=.*[A-Z])(?=.*[a-z])(?=.*[0-9])(?=.*[^A-Za-z0-9]).*',
password):
return True
"""Check whether the email is valid"""
def mail_validator(email):
if re.match(r'[a-z0-9._%+-]+@[a-z0-9.-]+\.[a-z]', email.lower()):
return True
"""Check if a field is empty"""
def is_empty(field_list):
empty = [field for field in field_list if field ==
"" or field.isspace() or field.isdigit()]
for field in field_list:
field.strip()
if empty != []:
return True
def is_int(field_list):
empty = [field for field in field_list if field ==
"" or not field.isdigit()]
if empty != []:
return False
def strip_spaces(field_list):
for field in field_list:
field.strip()
return field_list | StarcoderdataPython |
301360 | from django.conf.urls import url
from main import views
urlpatterns = [
url(r'^parse$', views.parse, name='parse'),
url(r'^reading$', views.reading, name='reading'),
]
| StarcoderdataPython |
1658292 | <filename>teste.py
from fixed_files_parse import Fixed_files
ff = Fixed_files('record', )
records = open('record.txt').readlines()
rec_in = []
for record in records:
rec_in.append(ff.parse(record))
# rec_in
# for rec in rec_in:
# print rec
#
# for n, r in enumerate(rec_in):
# print ff.unparse(r) == records[n]
| StarcoderdataPython |
296847 | <filename>tools/map_classes.py
#!/usr/bin/env python3
import argparse
import inspect
import json
import os
from pathlib import Path
from typing import Dict, List
import boto3
import botocore
from botocore import xform_name
parser = argparse.ArgumentParser(
description="Gathers data needed for annotating classes and generating test cases by parsing the boto JSON schema."
)
parser.add_argument(
"--service",
type=str,
required=True,
help="Service name (lowercase) to annotate classes for",
)
args = parser.parse_args()
UTF_8 = "utf-8"
DATA_FOLDER = "data"
COLLECTION_SUFFIX = "Collection"
COLLECTIONS_KEY = "hasMany"
RESOURCE_BASE_CLASS = "ServiceResource"
COLLECTION_BASE_CLASS = "ResourceCollection"
PAGINATOR_BASE_CLASS = "Paginator"
WAITER_BASE_CLASS = "Waiter"
CLIENT_BASE_CLASS = "BaseClient"
RESOURCES_FILE_NAME = "resources-1.json"
PAGINATORS_FILE_NAME = "paginators-1.json"
WAITERS_FILE_NAME = "waiters-2.json"
READ = "r"
WRITE = "w"
SERVICE_MODEL_FILE_NAME = "service-2.json"
CONSTRUCTOR_ARGS_KEY = "identifiers"
DATA_FILE_NAME = f"{args.service}_data.json"
def get_latest_version(folder: Path) -> Path:
folders = os.listdir(folder.resolve())
latest_version = max(folders)
return folder.joinpath(latest_version)
here = Path(__file__).parent
boto3_path = Path(inspect.getfile(boto3)).parent
botocore_path = Path(inspect.getfile(botocore)).parent
resource_data_folder = boto3_path.joinpath(DATA_FOLDER).joinpath(args.service)
client_data_folder = botocore_path.joinpath(DATA_FOLDER).joinpath(args.service)
schema_folder = get_latest_version(client_data_folder)
service_model_file = schema_folder.joinpath(SERVICE_MODEL_FILE_NAME)
with service_model_file.open(READ, encoding=UTF_8) as file:
service_model = json.load(file)
SERVICE_ABBREVIATION = service_model["metadata"]["serviceId"]
SERVICE_ABBREVIATION_LOWER = SERVICE_ABBREVIATION.lower()
def get_waiters(file: Path) -> List[Dict[str, str]]:
with file.open(READ, encoding=UTF_8) as fp:
waiters_json = json.load(fp)
return [
{
"stub_class": f"{waiter}{WAITER_BASE_CLASS}",
"boto_class": f"{SERVICE_ABBREVIATION}.{WAITER_BASE_CLASS}.{waiter}",
"base_class": WAITER_BASE_CLASS,
"fixture_name": f"gen_{xform_name(waiter)}_waiter",
"snake_name": xform_name(waiter),
}
for waiter in waiters_json["waiters"]
]
def get_paginators(file: Path) -> List[Dict[str, str]]:
with file.open(READ, encoding=UTF_8) as fp:
paginators_json = json.load(fp)
return [
{
"stub_class": f"{paginator}{PAGINATOR_BASE_CLASS}",
"boto_class": f"{SERVICE_ABBREVIATION}.{PAGINATOR_BASE_CLASS}.{paginator}",
"base_class": PAGINATOR_BASE_CLASS,
"fixture_name": f"gen_{xform_name(paginator)}_paginator",
"snake_name": xform_name(paginator),
}
for paginator in paginators_json["pagination"]
]
def get_collections(
resource_definition: Dict, parent: Dict[str, str]
) -> List[Dict[str, str]]:
return [
{
"stub_class": f"{parent['stub_class']}{collection}{COLLECTION_SUFFIX}",
"boto_class": f"{SERVICE_ABBREVIATION_LOWER}.{parent['stub_class']}.{xform_name(collection)}{COLLECTION_SUFFIX}",
"base_class": COLLECTION_BASE_CLASS,
"fixture_name": f"gen_{parent['snake_name']}_{xform_name(collection)}_collection",
"snake_name": xform_name(collection),
"parent_fixture_name": parent["fixture_name"],
}
for collection in resource_definition["hasMany"]
]
def get_resource(key: str, resource_definition: Dict) -> tuple:
num_constructor_args = (
len(resource_definition[CONSTRUCTOR_ARGS_KEY])
if CONSTRUCTOR_ARGS_KEY in resource_definition
else 0
)
# Handle the resource object
item = {
"stub_class": key,
"boto_class": f"{SERVICE_ABBREVIATION_LOWER}.{key}",
"base_class": RESOURCE_BASE_CLASS,
"fixture_name": f"gen_{xform_name(key)}",
"snake_name": xform_name(key),
"constructor_args": ",".join(
["random_str()" for _ in range(num_constructor_args)]
),
}
# Handle any collections that are part of this resource
collections = []
if COLLECTIONS_KEY in resource_definition:
collections = get_collections(resource_definition, item)
return item, collections
def get_resources(folder: Path) -> Dict:
resources_file = folder.joinpath(RESOURCES_FILE_NAME)
with resources_file.open(READ, encoding=UTF_8) as file:
resources_json = json.load(file)
# Handle service resource
result = {
"service_resource": {
"stub_class": f"{SERVICE_ABBREVIATION}{RESOURCE_BASE_CLASS}",
"boto_class": f"{SERVICE_ABBREVIATION_LOWER}.{RESOURCE_BASE_CLASS}",
"base_class": RESOURCE_BASE_CLASS,
"fixture_name": f"gen_{SERVICE_ABBREVIATION_LOWER}_resource",
"snake_name": f"{SERVICE_ABBREVIATION_LOWER}_resource",
},
"collections": [],
"resources": [],
}
# Handle any collections that are part of the service resource
service_resource_definition = resources_json["service"]
if COLLECTIONS_KEY in service_resource_definition:
result["collections"] += [
{
"stub_class": f"{RESOURCE_BASE_CLASS}{collection}{COLLECTION_SUFFIX}",
"boto_class": f"{SERVICE_ABBREVIATION_LOWER}.{xform_name(collection)}{COLLECTION_SUFFIX}",
"base_class": COLLECTION_BASE_CLASS,
"fixture_name": f"gen_service_resource_{xform_name(collection)}_collection",
"snake_name": xform_name(collection),
"parent_fixture_name": result["service_resource"]["fixture_name"],
}
for collection in service_resource_definition["hasMany"]
]
# Handle resources and any collections they may have
for key, val in resources_json["resources"].items():
item, collections = get_resource(key, val)
result["resources"].append(item)
result["collections"] += collections
return result
HAS_RESOURCES = resource_data_folder.exists()
data = {
"client": {
"stub_class": f"{SERVICE_ABBREVIATION}Client",
"boto_class": SERVICE_ABBREVIATION,
"base_class": CLIENT_BASE_CLASS,
"fixture_name": f"gen_{SERVICE_ABBREVIATION_LOWER}_client",
"snake_name": f"{SERVICE_ABBREVIATION_LOWER}_client",
}
}
waiters_file = schema_folder.joinpath(WAITERS_FILE_NAME)
paginators_file = schema_folder.joinpath(PAGINATORS_FILE_NAME)
if paginators_file.exists():
data["paginators"] = get_paginators(paginators_file)
if waiters_file.exists():
data["waiters"] = get_waiters(waiters_file)
if HAS_RESOURCES:
schema_folder = get_latest_version(resource_data_folder)
data.update(get_resources(schema_folder))
output_folder = here.parent.joinpath(DATA_FOLDER)
output_folder.mkdir(parents=True, exist_ok=True)
output_file = output_folder.joinpath(DATA_FILE_NAME)
with output_file.open(WRITE, encoding=UTF_8) as file:
json.dump(data, file)
| StarcoderdataPython |
1780448 | # Copyright 2018 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python package for the ros2 launch api."""
from .api import get_share_file_path_from_package
from .api import InvalidPythonLaunchFileError
from .api import launch_a_python_launch_file
from .api import LaunchFileNameCompleter
from .api import MultipleLaunchFilesError
from .api import print_a_python_launch_file
from .api import print_arguments_of_python_launch_file
__all__ = [
'get_share_file_path_from_package',
'InvalidPythonLaunchFileError',
'LaunchFileNameCompleter',
'launch_a_python_launch_file',
'MultipleLaunchFilesError',
'print_a_python_launch_file',
'print_arguments_of_python_launch_file',
]
| StarcoderdataPython |
8129481 | """
Installation configuration.
"""
import os
import json
import setuptools
# Fetch the root folder to specify absolute paths to the "include" files
ROOT = os.path.normpath(os.path.dirname(__file__))
# Specify which files should be added to the installation
PACKAGE_DATA = [
os.path.join(ROOT, "surface", "res", "metadata.json"),
os.path.join(ROOT, "surface", "log", ".keep")
]
with open(os.path.join(ROOT, "surface", "res", "metadata.json")) as f:
metadata = json.load(f)
setuptools.setup(
name=metadata["__title__"],
description=metadata["__description__"],
version=metadata["__version__"],
author=metadata["__lead__"],
author_email=metadata["__email__"],
maintainer=metadata["__lead__"],
maintainer_email=metadata["__email__"],
url=metadata["__url__"],
packages=setuptools.find_namespace_packages(),
package_data={"surface": PACKAGE_DATA},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.6",
],
install_requires=[
"redis",
"python-dotenv",
"msgpack",
"numpy",
"sklearn",
"pandas",
"opencv-python",
"inputs"
],
python_requires=">=3.6",
)
| StarcoderdataPython |
1955072 | <gh_stars>1-10
from pyrogram import filters
from pyrogram.types import Message
from wbb import SUDOERS, app, arq
from wbb.utils.filter_groups import autocorrect_group
@app.on_message(filters.command("autocorrect"))
async def autocorrect_bot(_, message: Message):
if not message.reply_to_message:
return await message.reply_text("Reply to a text message.")
reply = message.reply_to_message
text = reply.text or reply.caption
if not text:
return await message.reply_text("Reply to a text message.")
data = await arq.spellcheck(text)
if not data.ok:
return await message.reply_text("Something wrong happened.")
result = data.result
await message.reply_text(result.corrected or "Empty")
| StarcoderdataPython |
5048710 | from scipy.io import loadmat, savemat
annots = loadmat('../data/color150.mat')
annots['colors'] = annots['colors'][2:4]
print(annots['colors'])
savemat("../data/color2.mat", annots)
| StarcoderdataPython |
11337618 | # -*- coding: utf-8 -*-
import cv2
import zbar
scanner = zbar.ImageScanner()
scanner.parse_config('enable')
cap = cv2.VideoCapture(0)
captured = False
while True:
ret, camera = cap.read()
gray = cv2.cvtColor(camera, cv2.COLOR_BGR2GRAY)
rows, cols = gray.shape[:2]
image = zbar.Image(cols, rows, 'Y800', gray.tostring())
scanner.scan(image)
#cv2.namedWindow('camera', cv2.WINDOW_NORMAL)
#cv2.setWindowProperty('camera', cv2.WND_PROP_FULLSCREEN, 1)
cv2.imshow("camera", camera)
for symbol in image:
print('%s' % symbol.data)
f = open('userID.txt', 'w')
f.write('%s' % symbol.data)
f.close()
captured = True
if captured:
break
if cv2.waitKey(1) == 27:
break
cap.release()
cv2.destroyAllWindows()
| StarcoderdataPython |
3405186 | <reponame>ramoseh/wizeline-challenge
from fastapi.testclient import TestClient
from app import app
client = TestClient(app)
def test_hello():
response = client.get("/hello")
assert response.status_code == 200
assert response.json() == {"payload": "Hello World!"}
def test_get_table():
response = client.get("/table")
assert response.status_code == 200
def test_post_table():
response = client.post(
"/table", json={"name": "test", "email": "<EMAIL>"})
assert response.status_code == 200
def test_post_table_no_email():
response = client.post("/table", json={"name": "test"})
assert response.status_code == 422
def test_post_table_no_name():
response = client.post("/table", json={"email": "test"})
assert response.status_code == 422
def test_get_weather():
response = client.get("/weather")
assert response.status_code == 200
| StarcoderdataPython |
12827487 | <reponame>drylikov/mappad.ru<gh_stars>0
from datetime import datetime
from webapp import db
class Track(db.Model):
__tablename__ = 'tracks'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(140))
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
description = db.Column(db.String(250))
raw_gpx = db.Column(db.Text())
def __repr__(self):
return '<Track {}>'.format(self.title)
| StarcoderdataPython |
9682760 | <filename>tests/common_mocks/mock_policy/mock_policy.py
from continual_rl.policies.policy_base import PolicyBase
from tests.common_mocks.mock_policy.mock_policy_config import MockPolicyConfig
from tests.common_mocks.mock_environment_runner import MockEnvironmentRunner
class MockPolicy(PolicyBase):
"""
A mock policy for use in unit testing. This is just basically a de-abstraction of the base class.
For any test-specific usages, monkeypatch the appropriate function.
"""
def __init__(self, config: MockPolicyConfig, observation_space, action_spaces):
super().__init__()
self._config = config
self.train_run_count = 0
self.current_env_runner = None
def get_environment_runner(self, task_spec):
# In general this should not be saved off, but doing so here to use it as a spy into env runner behavior.
self.current_env_runner = MockEnvironmentRunner()
return self.current_env_runner
def compute_action(self, observation, task_id, action_space_id, last_timestep_data, eval_mode):
pass
def train(self, storage_buffer):
self.train_run_count += 1
def save(self, output_path_dir, task_id, task_total_steps):
pass
def load(self, model_path):
pass
| StarcoderdataPython |
4804555 | <filename>egs/mini_librespeech/s5/test-cross-entropy.py
#!/usr/bin/env python3
# Results with this script
# 2 layer TDNN, no bn, Adam, lr=0.001: valid loss was 2.8642 after 6 epochs
# 3 layer TDNN, no bn, Adam, lr=0.001: valid loss was 2.4447 with <4 epochs. Stopped training as the trend was clear.
# 3 layer TDNN, bn, Adam, lr=0.001: valid loss was 2.15 with <4 epochs
import argparse
import os
import sys
import torch
import torch.optim as optim
import torch.nn.functional as F
import torch.nn as nn
import torch.nn.init as init
import pkwrap
import numpy as np
class Net(nn.Module):
def __init__(self, output_dim, feat_dim):
super(Net, self).__init__()
self.input_dim = feat_dim
self.output_dim = output_dim
self.tdnn1 = nn.Conv1d(feat_dim, 512, kernel_size=5, stride=1)
self.tdnn1_bn = nn.BatchNorm1d(512, affine=False)
self.tdnn2 = nn.Conv1d(512, 512, kernel_size=3, stride=1)
self.tdnn2_bn = nn.BatchNorm1d(512, affine=False)
self.tdnn3 = nn.Conv1d(512, 512, kernel_size=3, stride=1)
self.tdnn3_bn = nn.BatchNorm1d(512, affine=False)
self.xent_prefinal_layer = nn.Linear(512, 512)
self.xent_layer = nn.Linear(512, output_dim)
self.initialize()
def initialize(self):
init.xavier_normal_(self.tdnn1.weight)
init.xavier_normal_(self.tdnn2.weight)
init.xavier_normal_(self.tdnn3.weight)
def forward(self, input):
mb, T, D = input.shape
x = input.permute(0,2,1)
x = self.tdnn1(x)
x = F.relu(x)
x = self.tdnn1_bn(x)
x = self.tdnn2(x)
x = F.relu(x)
x = self.tdnn2_bn(x)
x = self.tdnn3(x)
x = F.relu(x)
x = self.tdnn3_bn(x)
x = x.permute(0,2,1)
x = x.reshape(-1,512)
pxx = F.relu(self.xent_prefinal_layer(x))
xent_out = self.xent_layer(pxx)
return F.log_softmax(xent_out, dim=1)
class Mls(torch.utils.data.Dataset):
def __init__(self, feat_dict, target_dict, egs_file):
self.feat_dict = feat_dict
self.target_dict = target_dict
self.chunks = []
with open(egs_file) as ipf:
for ln in ipf:
self.chunks.append(ln.strip().split()[0])
def __len__(self):
return len(self.chunks)
def __getitem__(self, idx):
chunk = self.chunks[idx]
s = chunk.split('-')
start_frame = int(s[-1])
n = '-'.join(s[:-1])
f = self.feat_dict[n]
chunk_size = 8
t = target_dict[n][start_frame:start_frame+chunk_size]
fnp = np.pad(f.numpy(), [(20,20), (0,0)], mode='edge')
f = torch.tensor(fnp)
start_frame += 20
x = f[start_frame-4:start_frame+chunk_size+4]
t = torch.tensor(t)
return x, t
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="")
parser.add_argument("--mode")
args = parser.parse_args()
dirname = './'
if args.mode == "init":
model = Net(2016, 40)
torch.save(model.state_dict(), os.path.join(dirname, "0.pt"))
if args.mode == "train":
num_iters = 20
feat_file = 'scp:data/train_clean_5_sp_hires/feats.scp'
feat_dict = {}
target_dict = {}
r = pkwrap.script_utils.feat_reader(feat_file)
while not r.Done():
k = r.Key()
feat_dict[k] = pkwrap.kaldi.matrix.KaldiMatrixToTensor(r.Value())
m = feat_dict[k].mean(0)
v = feat_dict[k].var(0)
feat_dict[k] = (feat_dict[k]-m)
r.Next()
print("Read all features")
for i in range(1,21):
ali_name = 'exp/tri3b_ali_train_clean_5_sp/ali.{}.txt'.format(i)
with open(ali_name) as ipf:
for ln in ipf:
lns = ln.strip().split()
n = lns[0]
v = list(map(int, lns[1:]))
target_dict[n] = v
lr = 0.001
model = Net(2016, 40)
base_model = '{}.pt'.format(0)
model.load_state_dict(torch.load(base_model))
model = model.cuda()
optimizer = optim.Adam(model.parameters(), lr=lr)
for i in range(0, 6):
#logf = open('log/{}.log'.format(i),'w')
print("Starting iter={}".format(i))
dataset = Mls(feat_dict, target_dict, 'exp/chain/tdnn_sp/egs_ce/egs.scp')
loader = torch.utils.data.DataLoader(dataset, batch_size=128)
lr = lr/2
model.train()
for idx, (feat, target) in enumerate(loader):
# feat_i = feat.permute(0,2,1)
feat_i = feat
feat_i = feat_i.cuda()
#target_t = torch.cat(target)
target_t = target.reshape(-1).cuda()
x = model(feat_i)
loss = F.nll_loss(x, target_t)
if idx%20 == 0:
print(idx, loss.item())
sys.stdout.flush()
if idx%100 == 0:
prediction = x.argmax(1).cpu()
acc = torch.eq(prediction, target_t.cpu()).sum()
#acc = acc/float(target_t.shape[0])
print("Accuracy={}".format(acc))
optimizer.zero_grad()
loss.backward()
optimizer.step()
if idx>0 and idx%100 == 0: # validate
norms = []
for p in model.parameters():
if p.grad.data is not None:
norms.append(p.grad.data.norm(2))
print("Norms", norms)
with torch.no_grad():
model.eval()
# acc = 0
nt = 0
utt_list = [utt.strip() for utt in open('exp/chain/tdnn_sp/egs/valid_uttlist')]
valid_loss = None
for utt in utt_list:
utt = utt.strip()
feat = feat_dict[utt] #.numpy()
# add left and right context before testing
#feat = np.pad(feat, [(2,2), (0,0)], mode='edge')
d = feat.shape[-1]
left_context = feat[0,:].repeat(4).reshape(-1,d)
right_context = feat[-1,:].repeat(4).reshape(-1,d)
feat = torch.cat([left_context, feat, right_context])
#feat = torch.tensor(feat)
feat = feat.unsqueeze(0).cuda()
tgt = target_dict[utt]
nt += len(tgt)
y = model(feat) #.cpu().squeeze(0)
valid_loss_ = F.nll_loss(y, torch.tensor(tgt).cuda())
if valid_loss is None:
valid_loss = valid_loss_
else:
valid_loss += valid_loss_
# predictions = y.argmax(1)
# acc_ = torch.eq(predictions, torch.tensor(tgt)).sum()
# acc += acc_.tolist()
# sys.stdout.flush()
# pred_list = set([x.tolist() for x in predictions[:]])
model.train()
# print("Validation acc=", float(acc)/float(nt))
print("Valid loss=", valid_loss/len(utt_list))
# model = model.cpu()
# torch.save(model.state_dict(), os.path.join(dirname, "{}.pt".format(i+1)))
if args.mode == "test":
feat_file = 'scp:data/train_clean_5_sp_hires/feats.scp'
feat_dict = {}
target_dict = {}
r = pkwrap.script_utils.feat_reader(feat_file)
while not r.Done():
feat_dict[r.Key()] = pkwrap.kaldi.matrix.KaldiMatrixToTensor(r.Value())
r.Next()
print("Read all features")
for i in range(1,21):
ali_name = 'exp/tri3b_ali_train_clean_5_sp/ali.{}.txt'.format(i)
with open(ali_name) as ipf:
for ln in ipf:
lns = ln.strip().split()
n = lns[0]
v = list(map(int, lns[1:]))
target_dict[n] = v
model = Net(2016, 40)
base_model = '{}.pt'.format(0)
model.load_state_dict(torch.load(base_model))
model.eval()
dataset = Mls(feat_dict, target_dict, 'exp/chain/tdnn_sp/egs_ce/egs.scp')
loader = torch.utils.data.DataLoader(dataset, batch_size=128)
for idx, (feat, target) in enumerate(loader):
feat_i = feat
#target_t = torch.cat(target)
#target_t = target
x = model(feat_i)
print(x.shape)
print(F.nll_loss(x, target.reshape(-1)))
quit(0)
| StarcoderdataPython |
9766783 | <gh_stars>10-100
#!/usr/bin/env python3
"""
This implements a simple Evolutionary Programming (EP) system, but it
does not evolve state machines as done with the original EP approach.
TODO convert to a state machines problem
"""
import os
from toolz import pipe
from leap_ec import Individual, context, test_env_var
from leap_ec import ops, util
from leap_ec.decoder import IdentityDecoder
from leap_ec.real_rep.problems import SpheroidProblem
from leap_ec.real_rep.initializers import create_real_vector
from leap_ec.real_rep.ops import mutate_gaussian
def print_population(population, generation):
""" Convenience function for pretty printing a population that's
associated with a given generation
:param population:
:param generation:
:return: None
"""
for individual in population:
print(generation, individual.genome, individual.fitness)
BROOD_SIZE = 3 # how many offspring each parent will reproduce
if __name__ == '__main__':
# Define the real value bounds for initializing the population. In this case,
# we define a genome of four bounds.
# the (-5.12,5.12) was what was originally used for this problem in
# <NAME>'s 1975 dissertation, so was used for historical reasons.
bounds = [(-5.12, 5.12), (-5.12, 5.12), (-5.12, 5.12), (-5.12, 5.12)]
parents = Individual.create_population(5,
initialize=create_real_vector(
bounds),
decoder=IdentityDecoder(),
problem=SpheroidProblem(
maximize=False))
# Evaluate initial population
parents = Individual.evaluate_population(parents)
# print initial, random population
print_population(parents, generation=0)
# When running the test harness, just run for two generations
# (we use this to quickly ensure our examples don't get bitrot)
if os.environ.get(test_env_var, False) == 'True':
max_generation = 2
else:
max_generation = 100
# Set up a generation counter using the default global context variable
generation_counter = util.inc_generation()
while generation_counter.generation() < max_generation:
offspring = pipe(parents,
ops.cyclic_selection,
ops.clone,
mutate_gaussian(std=.1, expected_num_mutations='isotropic'),
ops.evaluate,
# create the brood
ops.pool(size=len(parents) * BROOD_SIZE),
# mu + lambda
ops.truncation_selection(size=len(parents),
parents=parents))
parents = offspring
generation_counter() # increment to the next generation
# Just to demonstrate that we can also get the current generation from
# the context
print_population(parents, context['leap']['generation'])
| StarcoderdataPython |
4852024 | """
Contains the TemplateProcessor class for handling template input file data
"""
from ..utils import regex
import re
class Template(object):
"""
A class for handling template input files for electronic structure theory codes
Parameters
----------
template_path : str
A path to a template input file
"""
def __init__(self, template_path):
with open(template_path, 'r') as f:
template = f.read()
self.template = template
self.start, self.end = self.parse_xyz()
def parse_xyz(self):
"""
Locates the file positions of the xyz geometry.
Returns
-------
bounds : tuple
A tuple of size two: start and end string positions of the xyz geometry block
"""
iter_matches = re.finditer(regex.xyz_block_regex, self.template, re.MULTILINE)
matches = [match for match in iter_matches]
if matches is None:
raise Exception("No XYZ geometry found in template input file")
# only find last xyz if there are multiple
# grab string positions of xyz coordinates
start = matches[-1].start()
end = matches[-1].end()
return start, end
def header_xyz(self):
"""
The header of the xyz template input file (all text before the geometry)
Returns
-------
header : str
All template input file text before xyz geometry specification
"""
header = self.template[:self.start]
return header
def footer_xyz(self):
"""
The footer of the xyz template input file (all text after the geometry)
Returns
-------
header : str
All template input file text after xyz geometry specification
"""
footer = self.template[self.end:]
return footer
def extract_xyz(self):
"""
Extracts an xyz-style geometry block from a template input file
Returns
-------
XYZ : str
An xyz geometry of the form:
atom_label x_coord y_coord z_coord
atom_label x_coord y_coord z_coord
...
"""
xyz = self.template[self.start:self.end]
return xyz
| StarcoderdataPython |
9765549 | <filename>tools/util.py
# Copyright (C) 2015 The Minorminor Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
import os
import hashlib
import urllib
import contextlib
import subprocess
import __builtin__
from distutils.spawn import find_executable
def make_sure_dir(d):
if os.path.isdir(d):
return
try:
os.makedirs(d)
except __builtin__.OSError as e:
if not os.path.isdir(d):
raise e
def hash_of(file):
h = hashlib.sha1()
with __builtin__.open(file, 'rb') as f:
while True:
buff = f.read(65536)
if not buff:
break
h.update(buff)
return h
def sha1_of(file):
return hash_of(file).hexdigest()
def is_integrated(file, sha1):
h = sha1_of(file)
if sha1 != h:
print('\n received SHA-1: %s \n expected SHA-1: %s' %
(h, sha1), file=sys.stderr)
return False
return True
def path_of(userpath):
if userpath.startswith('~/'):
return os.path.expanduser(userpath)
return userpath
def sha1_of_file(filepath):
h = hashlib.sha1()
with __builtin__.open(filepath, 'rb') as f:
while True:
buf = f.read(65536)
if not buf:
break
h.update(buf)
return h.hexdigest()
def hash_of_url(url):
h = hashlib.sha1()
with contextlib.closing(urllib.urlopen(url)) as f: # may be binary_file
while True:
data = f.read(4096)
if not data:
break
h.update(data)
return h.hexdigest()
def download(url, to, verbose):
try:
if verbose:
print("\ndownload %s\n to %s\n" % (url, to), file=sys.stderr)
if find_executable('curl') is not None:
subprocess.check_output(['curl',
'--proxy-anyauth',
'--create-dirs',
'-f',
'--silent',
'--insecure',
'-o', to,
'--url', url
])
else:
print("need install curl", file=sys.stderr)
except subprocess.CalledProcessError as e:
print('\ncurl is failed to download %s :\n%s,\n%s' %
(url, e.cmd, e.output), file=sys.stderr)
sys.exit(e.returncode)
| StarcoderdataPython |
93964 | """
h2o2_mk2012_ad.py
Hydrogen peroxide, H2O2, ground state surface from
Ref [1]_. The coefficients are available from the references
supplementary information as the 'adiabatic PES', which
corresponds to the "V+C+R+H+D" results.
The surface is implemented in internal coordinates.
X1 ... O1 -- H1 bond length (Angstroms)
X2 ... O2 -- H2 bond length ( " " )
X3 ... O1 -- O2 bond length ( " " )
X4 ... O2-O1-H1 bond angle (degrees)
X5 ... O1-O2-H2 bond angle ( " " )
X6 ... dihedral angle ( " " )
References
----------
.. [1] <NAME> and <NAME>. J. Comp. Chem. 34, 337-344 (2013).
https://doi.org/10.1002/jcc.23137
"""
import nitrogen as n2
import nitrogen.autodiff.forward as adf
import numpy as np
def Vfun(X, deriv = 0, out = None, var = None):
"""
expected order : r1, r2, R, a1, a2, tau
"""
x = n2.dfun.X2adf(X, deriv, var)
r1 = x[0]
r2 = x[1]
R = x[2]
a1 = x[3]
a2 = x[4]
tau = x[5]
# Define reference values
Re = 1.45538654 # Angstroms
re = 0.96257063 # Angstroms
ae = 101.08307909 # degrees
q1 = (r1 - re) / r1 # Simons-Parr-Finlan coordinates
q2 = (r2 - re) / r2
q3 = (R - Re) / R
q4 = (a1 - ae) * np.pi/180.0 # radians
q5 = (a2 - ae) * np.pi/180.0 # radians
q6 = tau * np.pi/180.0 # radians
# Calculate surface
v = calcsurf([q1,q2,q3,q4,q5,q6]) * n2.constants.Eh
return n2.dfun.adf2array([v], out)
######################################
#
# Define module-scope PES DFun object
#
PES = n2.dfun.DFun(Vfun, nf = 1, nx = 6)
#
#
######################################
def calcsurf(q):
max_pow = [5,5,5,6,6,6] # max_pow[5] is really the max freq. of dihedral
qpow = []
for i in range(5):
qi = [adf.const_like(1.0, q[i]), q[i]]
for p in range(2,max_pow[i]+1):
qi.append(qi[1] * qi[p-1]) # qi ** p
qpow.append(qi)
# Calculate cos(n*q6)
cosq = [ adf.cos(n * q[5]) for n in range(max_pow[5] + 1)]
qpow.append(cosq)
v = 0.0
nterms = powers.shape[0]
for i in range(nterms):
c = coeffs[i]
v += c * \
qpow[0][powers[i,0]] * \
qpow[1][powers[i,1]] * \
qpow[2][powers[i,2]] * \
qpow[3][powers[i,3]] * \
qpow[4][powers[i,4]] * \
qpow[5][powers[i,5]]
return v
powers = np.array([
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 2],
[0, 0, 0, 0, 0, 3],
[0, 0, 0, 0, 0, 4],
[0, 0, 0, 0, 0, 5],
[0, 0, 0, 0, 0, 6],
[0, 0, 2, 0, 0, 0],
[2, 0, 0, 0, 0, 0],
[0, 2, 0, 0, 0, 0],
[0, 0, 0, 2, 0, 0],
[0, 0, 0, 0, 2, 0],
[1, 0, 1, 0, 0, 0],
[0, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 0, 1, 0],
[1, 1, 0, 0, 0, 0],
[1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0],
[1, 0, 0, 0, 1, 0],
[0, 1, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0],
[0, 0, 3, 0, 0, 0],
[3, 0, 0, 0, 0, 0],
[0, 3, 0, 0, 0, 0],
[0, 0, 0, 3, 0, 0],
[0, 0, 0, 0, 3, 0],
[1, 0, 2, 0, 0, 0],
[0, 1, 2, 0, 0, 0],
[0, 0, 2, 1, 0, 0],
[0, 0, 2, 0, 1, 0],
[2, 0, 1, 0, 0, 0],
[0, 2, 1, 0, 0, 0],
[0, 0, 1, 2, 0, 0],
[0, 0, 1, 0, 2, 0],
[1, 2, 0, 0, 0, 0],
[2, 1, 0, 0, 0, 0],
[1, 0, 0, 2, 0, 0],
[0, 1, 0, 0, 2, 0],
[2, 0, 0, 1, 0, 0],
[0, 2, 0, 0, 1, 0],
[1, 0, 0, 0, 2, 0],
[0, 1, 0, 2, 0, 0],
[2, 0, 0, 0, 1, 0],
[0, 2, 0, 1, 0, 0],
[0, 0, 0, 1, 2, 0],
[0, 0, 0, 2, 1, 0],
[1, 1, 1, 0, 0, 0],
[1, 0, 1, 1, 0, 0],
[0, 1, 1, 0, 1, 0],
[1, 0, 1, 0, 1, 0],
[0, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0],
[1, 1, 0, 1, 0, 0],
[1, 1, 0, 0, 1, 0],
[1, 0, 0, 1, 1, 0],
[0, 1, 0, 1, 1, 0],
[0, 0, 4, 0, 0, 0],
[4, 0, 0, 0, 0, 0],
[0, 4, 0, 0, 0, 0],
[0, 0, 0, 4, 0, 0],
[0, 0, 0, 0, 4, 0],
[2, 0, 2, 0, 0, 0],
[0, 2, 2, 0, 0, 0],
[0, 0, 2, 2, 0, 0],
[0, 0, 2, 0, 2, 0],
[2, 2, 0, 0, 0, 0],
[2, 0, 0, 2, 0, 0],
[0, 2, 0, 0, 2, 0],
[0, 0, 0, 2, 2, 0],
[1, 0, 3, 0, 0, 0],
[0, 1, 3, 0, 0, 0],
[0, 0, 3, 1, 0, 0],
[0, 0, 3, 0, 1, 0],
[3, 0, 0, 1, 0, 0],
[0, 3, 0, 0, 1, 0],
[3, 0, 1, 0, 0, 0],
[0, 3, 1, 0, 0, 0],
[0, 0, 1, 3, 0, 0],
[0, 0, 1, 0, 3, 0],
[1, 3, 0, 0, 0, 0],
[3, 1, 0, 0, 0, 0],
[1, 0, 0, 3, 0, 0],
[0, 1, 0, 0, 3, 0],
[1, 0, 0, 0, 3, 0],
[0, 1, 0, 3, 0, 0],
[0, 0, 0, 1, 3, 0],
[0, 0, 0, 3, 1, 0],
[1, 1, 2, 0, 0, 0],
[1, 0, 2, 1, 0, 0],
[0, 1, 2, 0, 1, 0],
[1, 0, 2, 0, 1, 0],
[0, 1, 2, 1, 0, 0],
[0, 0, 2, 1, 1, 0],
[2, 0, 0, 1, 1, 0],
[0, 2, 0, 1, 1, 0],
[1, 0, 1, 2, 0, 0],
[0, 1, 1, 0, 2, 0],
[1, 0, 0, 1, 2, 0],
[0, 1, 0, 2, 1, 0],
[1, 0, 0, 2, 1, 0],
[0, 1, 0, 1, 2, 0],
[0, 0, 5, 0, 0, 0],
[5, 0, 0, 0, 0, 0],
[0, 5, 0, 0, 0, 0],
[0, 0, 0, 5, 0, 0],
[0, 0, 0, 0, 5, 0],
[0, 0, 0, 6, 0, 0],
[0, 0, 0, 0, 6, 0],
[0, 0, 0, 4, 1, 0],
[0, 0, 0, 1, 4, 0],
[0, 0, 0, 3, 2, 0],
[0, 0, 0, 2, 3, 0],
[0, 0, 1, 4, 0, 0],
[0, 0, 1, 0, 4, 0],
[0, 0, 2, 3, 0, 0],
[0, 0, 2, 0, 3, 0],
[1, 0, 0, 4, 0, 0],
[0, 1, 0, 0, 4, 0],
[2, 0, 0, 3, 0, 0],
[0, 2, 0, 0, 3, 0],
[0, 0, 1, 0, 0, 1],
[1, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 1],
[0, 0, 0, 1, 0, 1],
[0, 0, 0, 0, 1, 1],
[0, 0, 2, 0, 0, 1],
[2, 0, 0, 0, 0, 1],
[0, 2, 0, 0, 0, 1],
[0, 0, 0, 2, 0, 1],
[0, 0, 0, 0, 2, 1],
[1, 0, 1, 0, 0, 1],
[0, 1, 1, 0, 0, 1],
[0, 0, 1, 1, 0, 1],
[0, 0, 1, 0, 1, 1],
[1, 1, 0, 0, 0, 1],
[1, 0, 0, 1, 0, 1],
[0, 1, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 1],
[0, 1, 0, 1, 0, 1],
[0, 0, 0, 1, 1, 1],
[0, 0, 3, 0, 0, 1],
[3, 0, 0, 0, 0, 1],
[0, 3, 0, 0, 0, 1],
[0, 0, 0, 3, 0, 1],
[0, 0, 0, 0, 3, 1],
[1, 0, 2, 0, 0, 1],
[0, 1, 2, 0, 0, 1],
[0, 0, 2, 1, 0, 1],
[0, 0, 2, 0, 1, 1],
[0, 0, 1, 2, 0, 1],
[0, 0, 1, 0, 2, 1],
[1, 2, 0, 0, 0, 1],
[2, 1, 0, 0, 0, 1],
[1, 0, 0, 2, 0, 1],
[0, 1, 0, 0, 2, 1],
[1, 0, 0, 0, 2, 1],
[0, 1, 0, 2, 0, 1],
[0, 0, 0, 1, 2, 1],
[0, 0, 0, 2, 1, 1],
[1, 1, 1, 0, 0, 1],
[1, 0, 0, 1, 1, 1],
[0, 1, 0, 1, 1, 1],
[0, 0, 0, 4, 0, 1],
[0, 0, 0, 0, 4, 1],
[0, 0, 0, 5, 0, 1],
[0, 0, 0, 0, 5, 1],
[0, 0, 1, 3, 0, 1],
[0, 0, 1, 0, 3, 1],
[0, 0, 2, 2, 0, 1],
[0, 0, 2, 0, 2, 1],
[0, 0, 0, 1, 3, 1],
[0, 0, 0, 3, 1, 1],
[0, 0, 0, 2, 2, 1],
[1, 0, 0, 3, 0, 1],
[0, 1, 0, 0, 3, 1],
[1, 0, 0, 0, 3, 1],
[0, 1, 0, 3, 0, 1],
[2, 0, 0, 2, 0, 1],
[0, 2, 0, 0, 2, 1],
[2, 0, 0, 0, 2, 1],
[0, 2, 0, 2, 0, 1],
[1, 0, 2, 1, 0, 1],
[0, 1, 2, 0, 1, 1],
[2, 0, 1, 1, 0, 1],
[0, 2, 1, 0, 1, 1],
[1, 0, 1, 2, 0, 1],
[0, 1, 1, 0, 2, 1],
[0, 0, 1, 0, 0, 2],
[1, 0, 0, 0, 0, 2],
[0, 1, 0, 0, 0, 2],
[0, 0, 0, 1, 0, 2],
[0, 0, 0, 0, 1, 2],
[0, 0, 2, 0, 0, 2],
[2, 0, 0, 0, 0, 2],
[0, 2, 0, 0, 0, 2],
[0, 0, 0, 2, 0, 2],
[0, 0, 0, 0, 2, 2],
[1, 0, 1, 0, 0, 2],
[0, 1, 1, 0, 0, 2],
[0, 0, 1, 1, 0, 2],
[0, 0, 1, 0, 1, 2],
[1, 1, 0, 0, 0, 2],
[1, 0, 0, 1, 0, 2],
[0, 1, 0, 0, 1, 2],
[1, 0, 0, 0, 1, 2],
[0, 1, 0, 1, 0, 2],
[0, 0, 0, 1, 1, 2],
[0, 0, 3, 0, 0, 2],
[3, 0, 0, 0, 0, 2],
[0, 3, 0, 0, 0, 2],
[0, 0, 0, 3, 0, 2],
[0, 0, 0, 0, 3, 2],
[0, 0, 0, 2, 1, 2],
[0, 0, 0, 1, 2, 2],
[0, 0, 1, 2, 0, 2],
[0, 0, 1, 0, 2, 2],
[1, 0, 2, 0, 0, 2],
[0, 1, 2, 0, 0, 2],
[2, 0, 1, 0, 0, 2],
[0, 2, 1, 0, 0, 2],
[0, 0, 0, 4, 0, 2],
[0, 0, 0, 0, 4, 2],
[0, 0, 0, 1, 3, 2],
[0, 0, 0, 3, 1, 2],
[0, 0, 0, 2, 2, 2],
[2, 0, 0, 1, 0, 2],
[0, 2, 0, 0, 1, 2],
[1, 0, 0, 2, 0, 2],
[0, 1, 0, 0, 2, 2],
[1, 0, 0, 0, 2, 2],
[0, 1, 0, 2, 0, 2],
[1, 0, 1, 1, 0, 2],
[0, 1, 1, 0, 1, 2],
[1, 0, 1, 0, 1, 2],
[0, 1, 1, 1, 0, 2],
[0, 0, 1, 3, 0, 2],
[0, 0, 1, 0, 3, 2],
[0, 0, 1, 0, 0, 3],
[1, 0, 0, 0, 0, 3],
[0, 1, 0, 0, 0, 3],
[0, 0, 0, 1, 0, 3],
[0, 0, 0, 0, 1, 3],
[0, 0, 2, 0, 0, 3],
[2, 0, 0, 0, 0, 3],
[0, 2, 0, 0, 0, 3],
[0, 0, 0, 2, 0, 3],
[0, 0, 0, 0, 2, 3],
[0, 0, 0, 1, 1, 3],
[0, 0, 3, 0, 0, 3],
[0, 0, 0, 3, 0, 3],
[0, 0, 0, 0, 3, 3],
[0, 0, 0, 1, 2, 3],
[0, 0, 0, 2, 1, 3],
[0, 0, 1, 1, 0, 3],
[0, 0, 1, 0, 1, 3],
[1, 0, 0, 1, 0, 3],
[0, 1, 0, 0, 1, 3],
[1, 0, 0, 0, 1, 3],
[0, 1, 0, 1, 0, 3],
[0, 0, 2, 1, 0, 3],
[0, 0, 2, 0, 1, 3],
[0, 0, 1, 0, 0, 4],
[1, 0, 0, 0, 0, 4],
[0, 1, 0, 0, 0, 4],
[0, 0, 0, 1, 0, 4],
[0, 0, 0, 0, 1, 4],
[0, 0, 2, 0, 0, 4],
[0, 0, 0, 2, 0, 4],
[0, 0, 0, 0, 2, 4],
[0, 0, 0, 1, 1, 4],
[0, 0, 1, 1, 0, 4],
[0, 0, 1, 0, 1, 4],
[0, 0, 1, 0, 0, 5],
[1, 0, 0, 0, 0, 5],
[0, 1, 0, 0, 0, 5],
[0, 0, 0, 1, 0, 5],
[0, 0, 0, 0, 1, 5]
])
coeffs = np.array([
0.00396159 ,
0.00481490 ,
0.00318934 ,
0.00027018 ,
0.00005307 ,
0.00001047 ,
0.00000198 ,
1.07103383 ,
0.85671785 ,
0.85671785 ,
0.11105339 ,
0.11105339 ,
-0.03876908 ,
-0.03876908 ,
0.18430247 ,
0.18430247 ,
0.00036727 ,
-0.00663756 ,
-0.00663756 ,
-0.00196944 ,
-0.00196944 ,
0.01747081 ,
-1.18343510 ,
-0.23735539 ,
-0.23735539 ,
-0.02611900 ,
-0.02611900 ,
-0.15438002 ,
-0.15438002 ,
-0.35516368 ,
-0.35516368 ,
0.07899067 ,
0.07899067 ,
-0.26776532 ,
-0.26776532 ,
-0.00406083 ,
-0.00406083 ,
-0.01925971 ,
-0.01925971 ,
-0.01107079 ,
-0.01107079 ,
-0.00816282 ,
-0.00816282 ,
0.00337183 ,
0.00337183 ,
-0.01352772 ,
-0.01352772 ,
0.01289325 ,
-0.07449808 ,
-0.07449808 ,
-0.03379136 ,
-0.03379136 ,
-0.01672271 ,
-0.00495469 ,
-0.00495469 ,
-0.00453600 ,
-0.00453600 ,
-0.91033894 ,
-0.38779590 ,
-0.38779590 ,
-0.00503640 ,
-0.00503640 ,
-0.46416302 ,
-0.46416302 ,
0.07527264 ,
0.07527264 ,
-0.00799835 ,
-0.04029912 ,
-0.04029912 ,
0.00364088 ,
0.47561739 ,
0.47561739 ,
-0.41647359 ,
-0.41647359 ,
-0.06425296 ,
-0.06425296 ,
0.26125142 ,
0.26125142 ,
0.10336257 ,
0.10336257 ,
-0.01680055 ,
-0.01680055 ,
0.04984239 ,
0.04984239 ,
0.00354416 ,
0.00354416 ,
0.00452574 ,
0.00452574 ,
-0.05423804 ,
0.06564708 ,
0.06564708 ,
0.03801095 ,
0.03801095 ,
-0.09161667 ,
-0.01589965 ,
-0.01589965 ,
0.01341203 ,
0.01341203 ,
-0.01342635 ,
-0.01342635 ,
-0.00671149 ,
-0.00671149 ,
-0.73562441 ,
-0.30455894 ,
-0.30455894 ,
0.00582616 ,
0.00582616 ,
-0.00547701 ,
-0.00547701 ,
0.00280896 ,
0.00280896 ,
0.00674263 ,
0.00674263 ,
0.06845098 ,
0.06845098 ,
0.04193747 ,
0.04193747 ,
-0.05190213 ,
-0.05190213 ,
0.04168912 ,
0.04168912 ,
-0.01682379 ,
-0.00098759 ,
-0.00098759 ,
-0.01176361 ,
-0.01176361 ,
0.01742527 ,
-0.00533832 ,
-0.00533832 ,
0.00542779 ,
0.00542779 ,
0.00263732 ,
0.00263732 ,
0.01859551 ,
0.01859551 ,
0.00511361 ,
-0.00973834 ,
-0.00973834 ,
-0.00511467 ,
-0.00511467 ,
-0.01356281 ,
0.00352911 ,
-0.00964293 ,
-0.00964293 ,
-0.00113452 ,
-0.00113452 ,
0.01028106 ,
0.01028106 ,
-0.03748145 ,
-0.03748145 ,
-0.00708628 ,
-0.00708628 ,
0.00742831 ,
0.00742831 ,
0.00419281 ,
0.00419281 ,
-0.00555253 ,
-0.00555253 ,
-0.02044897 ,
-0.02044897 ,
-0.02429936 ,
0.00148383 ,
0.00148383 ,
0.00050075 ,
0.00050075 ,
0.00149142 ,
0.00149142 ,
0.02232416 ,
0.02232416 ,
0.07164353 ,
0.07164353 ,
0.01644870 ,
0.01644870 ,
0.01815537 ,
0.01605919 ,
0.01605919 ,
0.00735028 ,
0.00735028 ,
0.02670612 ,
0.02670612 ,
0.01548269 ,
0.01548269 ,
-0.13042235 ,
-0.13042235 ,
0.07364926 ,
0.07364926 ,
-0.08874645 ,
-0.08874645 ,
-0.01177248 ,
0.00172223 ,
0.00172223 ,
-0.00154074 ,
-0.00154074 ,
0.01965194 ,
0.00409752 ,
0.00409752 ,
0.00301573 ,
0.00301573 ,
-0.00734859 ,
-0.00734859 ,
0.00350247 ,
0.00350247 ,
-0.00037121 ,
0.00249543 ,
0.00249543 ,
-0.00168725 ,
-0.00168725 ,
0.00914785 ,
-0.02015559 ,
0.00925238 ,
0.00925238 ,
-0.00593037 ,
-0.00593037 ,
-0.01230679 ,
-0.01230679 ,
0.00829575 ,
0.00829575 ,
0.03735453 ,
0.03735453 ,
-0.04328977 ,
-0.04328977 ,
0.00458548 ,
0.00458548 ,
0.00364501 ,
0.00364501 ,
0.00986809 ,
0.01437361 ,
0.01437361 ,
0.00072674 ,
0.00072674 ,
-0.00158409 ,
-0.00158409 ,
-0.03961996 ,
-0.03961996 ,
-0.01732246 ,
-0.01732246 ,
0.02668498 ,
0.02668498 ,
-0.00188286 ,
0.00052265 ,
0.00052265 ,
-0.00089442 ,
-0.00089442 ,
0.00481644 ,
0.00031496 ,
0.00031496 ,
0.00103249 ,
0.00103249 ,
0.00224998 ,
-0.00366693 ,
-0.00033429 ,
-0.00033429 ,
-0.00319598 ,
-0.00319598 ,
0.00447145 ,
0.00447145 ,
-0.00147544 ,
-0.00147544 ,
-0.00085521 ,
-0.00085521 ,
-0.01099915 ,
-0.01099915 ,
-0.00042972 ,
0.00013538 ,
0.00013538 ,
-0.00019221 ,
-0.00019221 ,
0.00121114 ,
0.00026755 ,
0.00026755 ,
0.00054596 ,
0.00057513 ,
0.00057513 ,
-0.00009041 ,
0.00002274 ,
0.00002274 ,
-0.00004075 ,
-0.00004075
]) | StarcoderdataPython |
5016609 | <gh_stars>0
#!/usr/bin/python3
import plac
MODELS = [
'BernoulliNB',
'KNN',
'LinearSVC_L1',
'LinearSVC_L2',
'MultinomialNB',
'NearestCentroid',
'PassiveAggresive',
'Perceptron',
'Ridge',
'SGDClassifierElastic',
'SGDClassifierL2',
]
@plac.annotations(
event_sel=("Events to select", "positional", None, int),
)
def main(*event_sel):
for m in MODELS:
model = __import__(m)
acc = model.main(event_sel)
print('>>> {} acc={:.3f}'.format(m, acc))
if __name__ == "__main__":
plac.call(main)
| StarcoderdataPython |
8142696 | #!/usr/bin/env python3
# Copyright 2021 Universität Tübingen, DKFZ and EMBL
# for the German Human Genome-Phenome Archive (GHGA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate openapi yaml from FastAPI server"""
import typer
import yaml
import requests
def main(url: str = "http://localhost:8000/openapi.json", output: str = "openapi.yaml"):
"""Generate openapi yaml from FastAPI server"""
response = requests.get(url)
json_data = response.json()
with open(output, "w") as json_file:
yaml.dump(json_data, json_file)
if __name__ == "__main__":
typer.run(main)
| StarcoderdataPython |
3238766 | '''
creates the Movie class which receives three arguments
1. Title of the Movie
2. Link to the poster image of the Movie
3. YouTube Link to youtube trailer video
'''
class Movie:
def __init__(self, title, poster_image_url, trailer_youtube_url):
self.title = title
self.poster_image_url = poster_image_url
self.trailer_youtube_url = trailer_youtube_url
| StarcoderdataPython |
5091653 | <gh_stars>0
# pull the bindings
from . import isce3 as extisce3
# end of file
| StarcoderdataPython |
6513379 | <filename>src/masonite_permission/models/permission.py
"""Permission Model."""
from masoniteorm.models import Model
from masoniteorm.query import QueryBuilder
from ..exceptions import PermissionException
class Permission(Model):
"""Permission Model."""
__primary_key__ = "id"
__fillable__ = ["name", "slug"]
def roles(self):
from ..models.role import Role
return (
Role.join("model_has_permissions as mhp", "mhp.permissionable_id", "=", "roles.id")
.where("mhp.permission_id", self.id)
.where("mhp.permissionable_type", "roles")
.select_raw("roles.*")
.get()
)
def sync_roles(self, *args):
"""Sync roles from related model"""
from ..models.role import Role
role_ids = []
role_slugs = []
found_ids = []
if len(args) == 0:
QueryBuilder().table("model_has_permissions").where(
"permissionable_type", "roles"
).where("permission_id", self.id).delete()
return
if type(args[0]) == list:
args = args[0]
for role in args:
if isinstance(role, int):
role_ids.append(role)
elif isinstance(role, str):
role_slugs.append(role)
elif isinstance(role, Role):
found_ids.append(role.id)
role_by_id = list(Role.where_in("id", role_ids).get().pluck("id"))
role_by_slug = list(Role.where_in("slug", role_slugs).get().pluck("id"))
ids = list(dict.fromkeys(found_ids + role_by_id + role_by_slug))
data = []
for role in ids:
data.append(
{
"permission_id": self.id,
"permissionable_id": role,
"permissionable_type": "roles",
}
)
query = QueryBuilder().table("model_has_permissions")
query.where("permissionable_type", "roles").where("permission_id", self.id).delete()
if len(data) > 0:
query.bulk_create(data)
def attach_role(self, role):
"""Assign a role to a role
Arguments:
role {collection or str} -- Role collection or role slug...
"""
from ..models.role import Role
if type(role) == str:
role = Role.where("slug", role).first()
if not role:
raise PermissionException(f"Role: {role} does not exist!")
elif type(role) == int:
role = Role.find(role)
if not role:
raise PermissionException(f"Role: with id {role} does not exist!")
exists = (
QueryBuilder()
.table("model_has_permissions")
.where("permissionable_id", role.id)
.where("permission_id", self.id)
.where("permissionable_type", "roles")
.count()
)
if not exists:
QueryBuilder().table("model_has_permissions").create(
{
"permission_id": self.id,
"permissionable_id": role.id,
"permissionable_type": "roles",
}
)
def detach_role(self, role):
"""Detach a role from a permission
Arguments:
role {collection or int} -- Role collection or role slug...
"""
from ..models.role import Role
if type(role) == str:
role = Role.where("slug", role).first()
if not role:
raise PermissionException(f"Role: {role} does not exist!")
elif type(role) == int:
role = Role.find(role)
if not role:
raise PermissionException(f"Role: with id {role} does not exist!")
exists = (
QueryBuilder()
.table("model_has_permissions")
.where("permissionable_id", role.id)
.where("permission_id", self.id)
.where("permissionable_type", "roles")
.count()
)
if exists:
QueryBuilder().table("model_has_permissions").where(
"permissionable_id", role.id
).where("permissionable_type", "roles").where("permission_id", self.id).delete()
| StarcoderdataPython |
3561199 | <filename>app/fsm.py
from enum import Enum
# Класс перечислений(константы состояний)
class States(Enum):
S_START = 0 # Начало нового диалога
S_SETTINGS = 1
S_SEND_USERNAME = 2
S_SEND_PASSWORD = 3
S_SEND_SECURITY_TYPE = 4
S_SEND_HIDDEN_NET = 5
states_dict = {}
# Получение текущего состояния
def get_current_state(user_id):
# Если такого ключа почему-то не оказалось то возвращаем значение по умолчанию - начало диалога
return states_dict.get(str(user_id), States.S_START.value)
# Установка нового состояния
def set_state(user_id, value):
try:
states_dict.update({str(user_id):value})
except Exception as e:
print('Ошибка словаря состояний', e)
return False
else:
return True | StarcoderdataPython |
5092799 | <reponame>EnerwhereIT/erpnext_operations
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('requirements.txt') as f:
install_requires = f.read().strip().split('\n')
# get version from __version__ variable in erpnext_operations/__init__.py
from erpnext_operations import __version__ as version
setup(
name='erpnext_operations',
version=version,
description='Create invoices, manage meter readings, manage fuel prices, etc...',
author='frappe',
author_email='<EMAIL>',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=install_requires
)
| StarcoderdataPython |
3344997 | <filename>filter01.py
from db.mysql_conn import MysqlDb
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import f1_score
from scipy import stats
import numpy as np
import pickle
from row_index import RowIndex
def read_line(file_name):
lines = [line.rstrip('\n') for line in open(file_name)]
return lines
def process():
db = MysqlDb()
rowIdx = RowIndex()
measure_data = read_line('data/measurement.txt')
un_measure = read_line('data/unmeasurement.txt')
yes_training = [str(data) for data in db.get_75_row('yes')]
no_training = [str(data) for data in db.get_75_row('no')]
training_id = []
training_id.extend(yes_training)
training_id.extend(no_training)
str_trainind_id = '(' + ','.join(training_id) + ')'
x_train = db.get_feature_by_row(str_trainind_id)
y_train = [1] * 75 + [0] * 75
rowIdx.training150_data_x = training_id
rowIdx.training150_data_y = y_train
x_train_other_id = []
x_train_other_id.extend(yes_training[:50])
x_train_other_id.extend(no_training[:50])
str_trainind_other_id = '(' + ','.join(x_train_other_id) + ')'
x_train_other = db.get_feature_by_row(str_trainind_other_id)
y_train_other = [1] * 50 + [0] * 50
rowIdx.training100_data_x = x_train_other_id
rowIdx.training100_data_y = y_train_other
yes_test = [str(data) for data in db.get_25_row('yes')]
no_test = [str(data) for data in db.get_25_row('no')]
test_id = []
test_id.extend(yes_test)
test_id.extend(no_test)
str_test_id = '(' + ','.join(test_id) + ')'
x_test = db.get_feature_by_row(str_test_id)
y_test = [1] * 25 + [0] * 25
rowIdx.test50_data_x = test_id
rowIdx.test50_data_y = y_test
# Case 1
m1 = RandomForestClassifier()
m1.fit(x_train, y_train)
y_pred = m1.predict(x_test)
fsc1 = f1_score(y_test, y_pred)
# Case 2
m2_x_train = []
m2_y_train = []
str_measure_id = '(' + ','.join(measure_data) + ')'
y_training_measure = db.get_label_data(str_measure_id)
x_training_measure = db.get_feature_by_row(str_measure_id)
for x in x_train_other:
m2_x_train.append(x)
for x in x_training_measure:
m2_x_train.append(x)
for y in y_train_other:
m2_y_train.append(y)
for y in y_training_measure:
m2_y_train.append(y)
m2 = RandomForestClassifier()
m2.fit(m2_x_train, m2_y_train)
m2_y_pred = m2.predict(x_test)
fsc2 = f1_score(y_test, m2_y_pred)
# Case 3
m3_x_train = []
m3_y_train = []
str_unmeasure_id = '(' + ','.join(un_measure) + ')'
y_training_measure3 = db.get_label_data(str_unmeasure_id)
x_training_measure3 = db.get_feature_by_row(str_unmeasure_id)
for x in x_train_other:
m3_x_train.append(x)
for x in x_training_measure3:
m3_x_train.append(x)
for y in y_train_other:
m3_y_train.append(y)
for y in y_training_measure3:
m3_y_train.append(y)
m3 = RandomForestClassifier()
m3.fit(m3_x_train, m3_y_train)
m3_y_pred = m3.predict(x_test)
fsc3 = f1_score(y_test, m3_y_pred)
# case 4
m4 = RandomForestClassifier()
m4.fit(x_train_other, y_train_other)
m4_y_pred = m4.predict(x_test)
fsc4 = f1_score(y_test, m4_y_pred)
return fsc1, fsc2, fsc3, fsc4, rowIdx
def run_test():
res1 = []
res2 = []
res3 = []
res4 = []
rowIdxLst = []
for _ in range(0, 200):
fsc1, fsc2, fsc3, fsc4, rowId = process()
res1.append(fsc1)
res2.append(fsc2)
res3.append(fsc3)
res4.append(fsc4)
rowIdxLst.append(rowId)
pickle.dump(res1, open('result/res1.obj', 'wb'))
pickle.dump(res2, open('result/res2.obj', 'wb'))
pickle.dump(res3 , open('result/res3.obj', 'wb'))
pickle.dump(res4, open('result/res4.obj', 'wb'))
pickle.dump(rowIdxLst, open('result/row_id.obj', 'wb'))
print 'fsc1 : {}, fsc2 : {}, fsc3 : {}, fsc4 : {}'.format(np.average(res1), np.average(res2), np.average(res3), np.average(res4))
def dump_test():
res1 = pickle.load(open('result/2000_1/res1.obj', 'rb'))
res2 = pickle.load(open('result/2000_1/res2.obj', 'rb'))
res3 = pickle.load(open('result/2000_1/res3.obj', 'rb'))
res4 = pickle.load(open('result/2000_1/res4.obj', 'rb'))
print 'fsc1 : {}, fsc2 : {}, fsc3 : {}, fsc4 : {}'.format(np.average(res1), np.average(res2), np.average(res3), np.average(res4))
if __name__ == '__main__':
dump_test()
| StarcoderdataPython |
237900 | from flask import Blueprint
from flask import current_app
from flask import make_response
from flask import request
from flask import send_from_directory
from fedservice.exception import UnknownEntity
sigserv_views = Blueprint("sig_serv", __name__, url_prefix='')
@sigserv_views.route("/static/<path:path>")
def send_js(path):
return send_from_directory('static', path)
@sigserv_views.route("/eid/<eid>/.well-known/openid-federation")
def well_known(eid):
# self signed entity statement
response = make_response(current_app.signing_service.issuer.create_entity_statement(eid))
response.headers['Content-Type'] = 'application/jose; charset=UTF-8'
return response
@sigserv_views.route("/api/<eid>")
def signer(eid):
args = [eid]
_srv = current_app.signing_service.issuer
if "sub" in request.args:
args.append(request.args["sub"])
try:
info = _srv.create_entity_statement(*args)
except UnknownEntity as err:
make_response(400, "Unknown entity")
else:
if info:
response = make_response(info)
response.headers['Content-Type'] = 'application/jose; charset=UTF-8'
return response
else:
make_response(400, f"No information on {args[:-1]}")
| StarcoderdataPython |
11347963 | """ Class definition for Combustor."""
import numpy as np
import openmdao.api as om
from pycycle.cea.set_total import SetTotal
from pycycle.cea.set_static import SetStatic
from pycycle.cea.species_data import Thermo, janaf
from pycycle.constants import AIR_FUEL_MIX, AIR_MIX
from pycycle.elements.duct import PressureLoss
from pycycle.flow_in import FlowIn
from pycycle.passthrough import PassThrough
class MixFuel(om.ExplicitComponent):
"""
MixFuel calculates fuel and air mixture.
"""
def initialize(self):
self.options.declare('thermo_data', default=janaf,
desc='thermodynamic data set', recordable=False)
self.options.declare('inflow_elements', default=AIR_MIX,
desc='set of elements present in the flow')
self.options.declare('fuel_type', default="JP-7",
desc='Type of fuel.')
def setup(self):
thermo_data = self.options['thermo_data']
inflow_elements = self.options['inflow_elements']
fuel_type = self.options['fuel_type']
self.mixed_elements = inflow_elements.copy()
self.mixed_elements.update(janaf.reactants[fuel_type])
inflow_thermo = Thermo(thermo_data, init_reacts=inflow_elements)
self.inflow_prods = inflow_thermo.products
self.inflow_num_prods = len(self.inflow_prods)
self.inflow_wt_mole = inflow_thermo.wt_mole
air_fuel_thermo = Thermo(thermo_data, init_reacts=self.mixed_elements)
self.air_fuel_prods = air_fuel_thermo.products
self.air_fuel_wt_mole = air_fuel_thermo.wt_mole
self.num_prod = n_prods = len(self.air_fuel_prods)
self.init_air_amounts = np.zeros(n_prods)
self.init_fuel_amounts = np.zeros(n_prods)
self.init_fuel_amounts_base = np.zeros(n_prods)
# inputs
self.add_input('Fl_I:stat:W', val=0.0, desc='weight flow', units='lbm/s')
self.add_input('Fl_I:FAR', val=0.0, desc='Fuel to air ratio')
self.add_input('Fl_I:tot:h', val=0.0, desc='total enthalpy', units='Btu/lbm')
self.add_input('Fl_I:tot:n', shape=self.inflow_num_prods, desc='incoming flow composition')
self.add_input('fuel_Tt', val=518., units='degR', desc="fuel temperature")
# outputs
self.add_output('mass_avg_h', shape=1, units='Btu/lbm',
desc="mass flow rate averaged specific enthalpy")
self.add_output('init_prod_amounts', shape=n_prods, desc='initial product amounts')
self.add_output('Wout', shape=1, units="lbm/s", desc="total massflow out")
self.add_output('Wfuel', shape=1, units="lbm/s", desc="total fuel massflow out")
for i, r in enumerate(self.air_fuel_prods):
self.init_fuel_amounts_base[i] = janaf.reactants[fuel_type].get(r, 0) * janaf.products[r]['wt']
# create a mapping between the composition indices of the inflow and outflow arrays
self.in_out_flow_idx_map = [self.air_fuel_prods.index(prod) for prod in self.inflow_prods]
self.M_air = np.sum(self.init_air_amounts)
self.M_fuel_base = np.sum(self.init_fuel_amounts_base)
self.declare_partials('mass_avg_h', ['Fl_I:FAR', 'Fl_I:tot:h'])
self.declare_partials('init_prod_amounts', ['Fl_I:FAR', 'Fl_I:tot:n'])
self.declare_partials('Wout', ['Fl_I:stat:W', 'Fl_I:FAR'])
self.declare_partials('Wfuel', ['Fl_I:stat:W', 'Fl_I:FAR'])
def compute(self, inputs, outputs):
FAR = inputs['Fl_I:FAR']
W = inputs['Fl_I:stat:W']
Fl_I_tot_n = inputs['Fl_I:tot:n']
if inputs._under_complex_step:
self.init_air_amounts = self.init_air_amounts.astype(np.complex)
else:
self.init_air_amounts = self.init_air_amounts.real
# copy the incoming flow into a correctly sized array for the outflow composition
for i, j in enumerate(self.in_out_flow_idx_map):
self.init_air_amounts[j] = Fl_I_tot_n[i]
self.init_air_amounts *= self.air_fuel_wt_mole
self.init_air_amounts /= np.sum(self.init_air_amounts)
self.init_air_amounts *= W # convert to kg and scale with mass flow
# compute the amount of fuel-flow rate in terms of the incoming mass-flow rate
self.init_fuel_amounts = self.init_fuel_amounts_base/self.M_fuel_base * W * FAR
self.init_stuff = (self.init_air_amounts + self.init_fuel_amounts)
self.sum_stuff = np.sum(self.init_stuff)
# print('sum_stuff',self.sum_stuff)
self.norm_init_stuff = self.init_stuff/self.sum_stuff
outputs['init_prod_amounts'] = self.norm_init_stuff/self.air_fuel_wt_mole
self.fuel_ht = 0 # makes ht happy
outputs['mass_avg_h'] = (inputs['Fl_I:tot:h']+FAR*self.fuel_ht)/(1+FAR)
outputs['Wout'] = W * (1+FAR)
outputs['Wfuel'] = W * FAR
def compute_partials(self, inputs, J):
FAR = inputs['Fl_I:FAR']
W = inputs['Fl_I:stat:W']
ht = inputs['Fl_I:tot:h']
n = inputs['Fl_I:tot:n']
# AssertionError: 4.2991138611171866e-05 not less than or equal to 1e-05 : DESIGN.burner.mix_fuel: init_prod_amounts w.r.t Fl_I:tot:n
J['mass_avg_h', 'Fl_I:FAR'] = -ht/(1+FAR)**2 + self.fuel_ht/(1+FAR)**2 # - self.fuel_ht*FAR/(1+FAR)**2
J['mass_avg_h', 'Fl_I:tot:h'] = 1.0/(1.0 + FAR)
J['Wout', 'Fl_I:stat:W'] = (1.0 + FAR)
J['Wout', 'Fl_I:FAR'] = W
J['Wfuel', 'Fl_I:stat:W'] = FAR
J['Wfuel', 'Fl_I:FAR'] = W
init_air_amounts = np.zeros(len(self.air_fuel_prods))
for i, j in enumerate(self.in_out_flow_idx_map):
init_air_amounts[j] = n[i]
init_air_amounts *= self.air_fuel_wt_mole
init_air_amounts /= np.sum(init_air_amounts)
init_fuel_amounts = self.init_fuel_amounts_base/self.M_fuel_base
J['init_prod_amounts', 'Fl_I:FAR'] = (init_fuel_amounts - init_air_amounts)/(1 + FAR)**2/self.air_fuel_wt_mole
dinit_prod_dn = np.zeros((self.num_prod,self.inflow_num_prods))
temp = ((np.eye(self.inflow_num_prods) * self.inflow_wt_mole * np.sum(n*self.inflow_wt_mole)) - \
(np.outer(self.inflow_wt_mole,self.inflow_wt_mole)*n)) / \
(np.sum(n*self.inflow_wt_mole)**2) / (1+FAR) / self.inflow_wt_mole
for i, j in enumerate(self.in_out_flow_idx_map):
dinit_prod_dn[j] = temp[:,i]
J['init_prod_amounts', 'Fl_I:tot:n'] = dinit_prod_dn
class Combustor(om.Group):
"""
A combustor that adds a fuel to an incoming flow mixture and burns it
--------------
Flow Stations
--------------
Fl_I
Fl_O
-------------
Design
-------------
inputs
--------
Fl_I:FAR
dPqP
MN
outputs
--------
Wfuel
-------------
Off-Design
-------------
inputs
--------
Fl_I:FAR
dPqP
area
outputs
--------
Wfuel
"""
def initialize(self):
self.options.declare('thermo_data', default=janaf,
desc='thermodynamic data set', recordable=False)
self.options.declare('inflow_elements', default=AIR_MIX,
desc='set of elements present in the air flow')
self.options.declare('air_fuel_elements', default=AIR_FUEL_MIX,
desc='set of elements present in the fuel')
self.options.declare('design', default=True,
desc='Switch between on-design and off-design calculation.')
self.options.declare('statics', default=True,
desc='If True, calculate static properties.')
self.options.declare('fuel_type', default="JP-7",
desc='Type of fuel.')
def setup(self):
thermo_data = self.options['thermo_data']
inflow_elements = self.options['inflow_elements']
air_fuel_elements = self.options['air_fuel_elements']
design = self.options['design']
statics = self.options['statics']
fuel_type = self.options['fuel_type']
air_fuel_thermo = Thermo(thermo_data, init_reacts=air_fuel_elements)
self.air_fuel_prods = air_fuel_thermo.products
air_thermo = Thermo(thermo_data, init_reacts=inflow_elements)
self.air_prods = air_thermo.products
self.num_air_fuel_prod = len(self.air_fuel_prods)
self.num_air_prod = len(self.air_prods)
# Create combustor flow station
in_flow = FlowIn(fl_name='Fl_I', num_prods=self.num_air_prod)
self.add_subsystem('in_flow', in_flow, promotes=['Fl_I:tot:*', 'Fl_I:stat:*'])
# Perform combustor engineering calculations
self.add_subsystem('mix_fuel',
MixFuel(thermo_data=thermo_data, inflow_elements=inflow_elements,
fuel_type=fuel_type),
promotes=['Fl_I:stat:W','Fl_I:FAR', 'Fl_I:tot:n', 'Fl_I:tot:h', 'Wfuel', 'Wout'])
# Pressure loss
prom_in = [('Pt_in', 'Fl_I:tot:P'),'dPqP']
self.add_subsystem('p_loss', PressureLoss(), promotes_inputs=prom_in)
# Calculate vitiated flow station properties
vit_flow = SetTotal(thermo_data=thermo_data, mode='h', init_reacts=air_fuel_elements,
fl_name="Fl_O:tot")
self.add_subsystem('vitiated_flow', vit_flow, promotes_outputs=['Fl_O:*'])
self.connect("mix_fuel.mass_avg_h", "vitiated_flow.h")
self.connect("mix_fuel.init_prod_amounts", "vitiated_flow.init_prod_amounts")
self.connect("p_loss.Pt_out","vitiated_flow.P")
if statics:
if design:
# Calculate static properties.
out_stat = SetStatic(mode="MN", thermo_data=thermo_data, init_reacts=air_fuel_elements,
fl_name="Fl_O:stat")
prom_in = ['MN']
prom_out = ['Fl_O:stat:*']
self.add_subsystem('out_stat', out_stat, promotes_inputs=prom_in,
promotes_outputs=prom_out)
self.connect("mix_fuel.init_prod_amounts", "out_stat.init_prod_amounts")
self.connect('Fl_O:tot:S', 'out_stat.S')
self.connect('Fl_O:tot:h', 'out_stat.ht')
self.connect('Fl_O:tot:P', 'out_stat.guess:Pt')
self.connect('Fl_O:tot:gamma', 'out_stat.guess:gamt')
self.connect('Wout','out_stat.W')
else:
# Calculate static properties.
out_stat = SetStatic(mode="area", thermo_data=thermo_data, init_reacts=air_fuel_elements,
fl_name="Fl_O:stat")
prom_in = ['area']
prom_out = ['Fl_O:stat:*']
self.add_subsystem('out_stat', out_stat, promotes_inputs=prom_in,
promotes_outputs=prom_out)
self.connect("mix_fuel.init_prod_amounts", "out_stat.init_prod_amounts")
self.connect('Fl_O:tot:S', 'out_stat.S')
self.connect('Fl_O:tot:h', 'out_stat.ht')
self.connect('Fl_O:tot:P', 'out_stat.guess:Pt')
self.connect('Fl_O:tot:gamma', 'out_stat.guess:gamt')
self.connect('Wout','out_stat.W')
else:
self.add_subsystem('W_passthru', PassThrough('Wout', 'Fl_O:stat:W', 1.0, units= "lbm/s"),
promotes=['*'])
self.add_subsystem('FAR_pass_thru', PassThrough('Fl_I:FAR', 'Fl_O:FAR', 0.0),
promotes=['*'])
if __name__ == "__main__":
p = om.Problem()
p.model = om.Group()
p.model.add('comp', MixFuel(), promotes=['*'])
p.model.add('d1', IndepVarComp('Fl_I:stat:W', val=1.0, units='lbm/s', desc='weight flow'),
promotes=['*'])
p.model.add('d2', IndepVarComp('Fl_I:FAR', val=0.2, desc='Fuel to air ratio'), promotes=['*'])
p.model.add('d3', IndepVarComp('Fl_I:tot:h', val=1.0, units='Btu/lbm', desc='total enthalpy'),
promotes=['*'])
p.model.add('d4', IndepVarComp('fuel_Tt', val=518.0, units='degR', desc='fuel temperature'),
promotes=['*'])
p.setup(check=False)
p.run_model()
p.check_partials(compact_print=True)
| StarcoderdataPython |
5139850 | <gh_stars>10-100
import typing
from collections import defaultdict
import graph_tool
import graph_tool.draw
import graph_tool.inference
import graph_tool.search
import graph_tool.topology
import matplotlib.colors as colors
import numpy as np
import pandas as pd
import seaborn as sns
from cytoolz import sliding_window, unique, valfilter
from matplotlib.collections import LineCollection, PatchCollection
from matplotlib.patches import Wedge
from aves.features.geometry import bspline
from aves.models.network import Network
class HierarchicalEdgeBundling(object):
def __init__(
self,
network: Network,
state=None,
covariate_type=None,
points_per_edge=50,
path_smoothing_factor=0.8,
):
self.network = network
self.state = state
if state is not None:
self.block_levels = self.state.get_bs()
else:
self.estimate_blockmodel(covariate_type=covariate_type)
self.build_community_graph()
self.build_node_memberships()
self.build_edges(
n_points=points_per_edge, smoothing_factor=path_smoothing_factor
)
def estimate_blockmodel(self, covariate_type="real-exponential"):
if self.network.edge_weight is not None and covariate_type is not None:
state_args = dict(
recs=[self.network.edge_weight], rec_types=[covariate_type]
)
self.state = graph_tool.inference.minimize_nested_blockmodel_dl(
self.network.graph(), state_args=state_args
)
else:
self.state = graph_tool.inference.minimize_nested_blockmodel_dl(
self.network.graph()
)
self.block_levels = self.state.get_bs()
def get_node_memberships(self, level):
return [
self.membership_per_level[level][int(node_id)]
for node_id in self.network.vertices()
]
def build_community_graph(self):
from aves.visualization.networks import NodeLink
(
tree,
membership,
order,
) = graph_tool.inference.nested_blockmodel.get_hierarchy_tree(
self.state, empty_branches=False
)
self.nested_graph = tree
self.nested_graph.set_directed(False)
self.radial_positions = np.array(
list(
graph_tool.draw.radial_tree_layout(
self.nested_graph, self.nested_graph.num_vertices() - 1
)
)
)
self.node_angles = np.degrees(
np.arctan2(self.radial_positions[:, 1], self.radial_positions[:, 0])
)
self.node_angles_dict = dict(
zip(map(int, self.nested_graph.vertices()), self.node_angles)
)
self.node_ratio = np.sqrt(
np.dot(self.radial_positions[0], self.radial_positions[0])
)
self.network.layout_nodes(
method="precomputed",
positions=self.radial_positions[: self.network.num_vertices()],
angles=self.node_angles,
ratios=np.sqrt(
np.sum(self.radial_positions * self.radial_positions, axis=1)
),
)
self.community_graph = Network(
graph_tool.GraphView(
self.nested_graph,
directed=True,
vfilt=lambda x: x >= self.network.num_vertices(),
)
)
self.community_nodelink = NodeLink(self.community_graph)
self.community_nodelink.layout_nodes(
method="precomputed",
positions=self.radial_positions[self.network.num_vertices() :],
angles=self.node_angles,
ratios=self.node_ratio,
)
self.community_nodelink.set_node_drawing(method="plain")
self.community_nodelink.set_edge_drawing(method="plain")
def build_node_memberships(self):
self.nested_graph.set_directed(True)
depth_edges = graph_tool.search.dfs_iterator(
self.nested_graph, source=self.nested_graph.num_vertices() - 1, array=True
)
self.membership_per_level = defaultdict(lambda: defaultdict(int))
stack = []
for src_idx, dst_idx in depth_edges:
if not stack:
stack.append(src_idx)
if dst_idx < self.network.num_vertices():
# leaf node
path = [dst_idx]
path.extend(reversed(stack))
for level, community_id in enumerate(path):
self.membership_per_level[level][dst_idx] = community_id
else:
while src_idx != stack[-1]:
# a new community, remove visited branches
stack.pop()
stack.append(dst_idx)
self.nested_graph.set_directed(False)
def edge_to_spline(self, src, dst, n_points, smoothing_factor):
if src == dst:
raise Exception("Self-pointing edges are not supported")
vertex_path, edge_path = graph_tool.topology.shortest_path(
self.nested_graph, src, dst
)
edge_cp = [self.radial_positions[int(node_id)] for node_id in vertex_path]
try:
smooth_edge = bspline(edge_cp, degree=min(len(edge_cp) - 1, 3), n=n_points)
source_edge = np.vstack(
(
np.linspace(
edge_cp[0][0], edge_cp[-1][0], num=n_points, endpoint=True
),
np.linspace(
edge_cp[0][1], edge_cp[-1][1], num=n_points, endpoint=True
),
)
).T
if smoothing_factor < 1.0:
smooth_edge = smooth_edge * smoothing_factor + source_edge * (
1.0 - smoothing_factor
)
return smooth_edge
except ValueError:
print(src, dst, "error")
return None
def build_edges(self, n_points=50, smoothing_factor=0.8):
for e in self.network.edge_data:
src = e.index_pair[0]
dst = e.index_pair[1]
curve = self.edge_to_spline(src, dst, n_points, smoothing_factor)
if curve is not None:
e.points = curve
def plot_community_wedges(
self,
ax,
level=1,
wedge_width=0.5,
wedge_ratio=None,
wedge_offset=0.05,
alpha=1.0,
fill_gaps=False,
palette="plasma",
label_func=None,
):
if wedge_ratio is None:
wedge_ratio = self.node_ratio + wedge_offset
community_ids = sorted(set(self.membership_per_level[level].values()))
community_colors = dict(
zip(community_ids, sns.color_palette(palette, n_colors=len(community_ids)))
)
wedge_meta = []
wedge_gap = 180 / self.network.num_vertices() if fill_gaps else 0
# fom https://matplotlib.org/stable/gallery/pie_and_polar_charts/pie_and_donut_labels.html
bbox_props = dict(boxstyle="square,pad=0.3", fc="none", ec="none")
kw = dict(
arrowprops=dict(arrowstyle="-", color="#abacab"),
bbox=bbox_props,
zorder=0,
va="center",
fontsize=8,
)
for c_id in community_ids:
nodes_in_community = list(
valfilter(lambda x: x == c_id, self.membership_per_level[level]).keys()
)
community_angles = [
self.node_angles_dict[n_id] for n_id in nodes_in_community
]
community_angles = [a if a >= 0 else a + 360 for a in community_angles]
community_angle = self.node_angles_dict[int(c_id)]
if community_angle < 0:
community_angle += 360
min_angle = min(community_angles)
max_angle = max(community_angles)
extent_angle = max_angle - min_angle
if extent_angle < 0:
min_angle, max_angle = max_angle, min_angle
if fill_gaps:
min_angle -= wedge_gap
max_angle += wedge_gap
wedge_meta.append(
{
"community_id": c_id,
"n_nodes": len(nodes_in_community),
"center_angle": community_angle,
"extent_angle": extent_angle,
"min_angle": min_angle,
"max_angle": max_angle,
"color": community_colors[c_id],
}
)
if label_func is not None:
community_label = label_func(c_id)
if community_label:
ratio = self.node_ratio
mid_angle = 0.5 * (max_angle + min_angle)
mid_angle_radians = np.radians(mid_angle)
pos_x, pos_y = ratio * np.cos(mid_angle_radians), ratio * np.sin(
mid_angle_radians
)
horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(pos_x))]
connectionstyle = "angle,angleA=0,angleB={}".format(mid_angle)
kw["arrowprops"].update({"connectionstyle": connectionstyle})
ax.annotate(
community_label,
xy=(pos_x, pos_y),
xytext=(1.35 * pos_x, 1.4 * pos_y),
horizontalalignment=horizontalalignment,
**kw,
)
collection = [
Wedge(
0.0,
wedge_ratio + wedge_width,
w["min_angle"],
w["max_angle"],
width=wedge_width,
)
for w in wedge_meta
]
ax.add_collection(
PatchCollection(
collection,
edgecolor="none",
color=[w["color"] for w in wedge_meta],
alpha=alpha,
)
)
return wedge_meta, collection
def plot_community_labels(self, ax, level=None, ratio=None, offset=0.05):
if ratio is None:
ratio = self.node_ratio + offset
community_ids = set(self.membership_per_level[level].values())
for c_id in community_ids:
nodes_in_community = list(
valfilter(lambda x: x == c_id, self.membership_per_level[level]).keys()
)
community_angles = [
self.node_angles_dict[n_id] for n_id in nodes_in_community
]
community_angles = [a if a >= 0 else a + 360 for a in community_angles]
community_angle = self.node_angles[int(c_id)]
if community_angle < 0:
community_angle += 360
min_angle = min(community_angles)
max_angle = max(community_angles)
mid_angle = 0.5 * (max_angle + min_angle)
mid_angle_radians = np.radians(mid_angle)
pos_x, pos_y = ratio * np.cos(mid_angle_radians), ratio * np.sin(
mid_angle_radians
)
ha = "left" if pos_x >= 0 else "right"
if mid_angle > 90:
mid_angle = mid_angle - 180
elif mid_angle < -90:
mid_angle = mid_angle + 180
ax.annotate(
f"{c_id}",
(pos_x, pos_y),
rotation=mid_angle,
ha=ha,
va="center",
rotation_mode="anchor",
fontsize="small",
)
def plot_community_network(self, ax):
self.community_nodelink.plot_nodes(ax, color="blue", marker="s")
self.community_nodelink.plot_edges(ax, color="black", linewidth=2, alpha=0.8)
| StarcoderdataPython |
11266227 | <filename>simple_qt/gui/control_button_frame.py
#!/usr/bin/env python3
#version 2.1
from PyQt5 import Qt
from PyQt5 import QtCore
from PyQt5 import Qt
from PyQt5.QtCore import pyqtSignal
class control_button_frame(Qt.QFrame):
def __init__(self, parent=None, az_el = None):
super(control_button_frame, self).__init__()
self.parent = parent
self.az_el = az_el
self.initUI()
def initUI(self):
self.setFrameShape(Qt.QFrame.StyledPanel)
self.init_widgets()
self.connect_signals()
def init_widgets(self):
self.MinusTenButton = Qt.QPushButton(self)
self.MinusTenButton.setText("-10.0")
self.MinusTenButton.setMinimumWidth(45)
self.MinusOneButton = Qt.QPushButton(self)
self.MinusOneButton.setText("-1.0")
self.MinusOneButton.setMinimumWidth(45)
self.MinusPtOneButton = Qt.QPushButton(self)
self.MinusPtOneButton.setText("-0.1")
self.MinusPtOneButton.setMinimumWidth(45)
self.PlusPtOneButton = Qt.QPushButton(self)
self.PlusPtOneButton.setText("+0.1")
self.PlusPtOneButton.setMinimumWidth(45)
self.PlusOneButton = Qt.QPushButton(self)
self.PlusOneButton.setText("+1.0")
self.PlusOneButton.setMinimumWidth(45)
self.PlusTenButton = Qt.QPushButton(self)
self.PlusTenButton.setText("+10.0")
self.PlusTenButton.setMinimumWidth(45)
hbox1 = Qt.QHBoxLayout()
hbox1.addWidget(self.MinusTenButton)
hbox1.addWidget(self.MinusOneButton)
hbox1.addWidget(self.MinusPtOneButton)
hbox1.addWidget(self.PlusPtOneButton)
hbox1.addWidget(self.PlusOneButton)
hbox1.addWidget(self.PlusTenButton)
self.setLayout(hbox1)
def connect_signals(self):
self.PlusPtOneButton.clicked.connect(self.button_clicked)
self.PlusOneButton.clicked.connect(self.button_clicked)
self.PlusTenButton.clicked.connect(self.button_clicked)
self.MinusPtOneButton.clicked.connect(self.button_clicked)
self.MinusOneButton.clicked.connect(self.button_clicked)
self.MinusTenButton.clicked.connect(self.button_clicked)
def button_clicked(self):
sender = self.sender()
self.parent.increment_target_angle(self.az_el,float(sender.text()))
| StarcoderdataPython |
6645830 | <reponame>bodnar-e/bimcloud-api
import requests
from .errors import raise_bimcloud_manager_error, HttpError
from .url import is_url, join_url
class ManagerApi:
def __init__(self, manager_url):
if not is_url(manager_url):
raise ValueError('Manager url is invalid.')
self.manager_url = manager_url
self._api_root = join_url(manager_url, 'management/client')
def create_session(self, username, password, client_id):
request = {
'username': username,
'password': password,
'client-id': client_id
}
url = join_url(self._api_root, 'create-session')
response = requests.post(url, json=request)
result = self.process_response(response)
# We can ignore expire-timeout for now. It will have effect on future versions of the API.
return result['user-id'], result['session-id']
def close_session(self, session_id):
url = join_url(self._api_root, 'close-session')
response = requests.post(url, params={ 'session-id': session_id })
self.process_response(response)
def ping_session(self, session_id):
url = join_url(self._api_root, 'ping-session')
response = requests.post(url, params={ 'session-id': session_id })
self.process_response(response)
def get_resource(self, session_id, by_path=None, by_id=None, try_get=False):
if by_id is not None:
return self.get_resource_by_id(session_id, by_id)
criterion = None
if by_path is not None:
criterion = { '$eq': { '$path': by_path } }
try:
return self.get_resource_by_criterion(session_id, criterion)
except Exception as err:
if try_get:
return None
raise err
def get_resource_by_id(self, session_id, resource_id):
if resource_id is None:
raise ValueError('"resource_id"" expected.')
url = join_url(self._api_root, 'get-resource')
response = requests.get(url, params={ 'session-id': session_id, 'resource-id': resource_id })
result = self.process_response(response)
return result
def get_resources_by_criterion(self, session_id, criterion, options=None):
if criterion is None:
raise ValueError('"criterion"" expected.')
url = join_url(self._api_root, 'get-resources-by-criterion')
params = { 'session-id': session_id }
if isinstance(options, dict):
for key in options:
params[key] = options[key]
response = requests.post(url, params=params, json=criterion)
result = self.process_response(response)
assert isinstance(result, list), 'Result is not a list.'
return result
def get_resource_by_criterion(self, session_id, criterion, options=None):
result = self.get_resources_by_criterion(session_id, criterion, options)
return result[0] if result else None
def create_resource_group(self, session_id, name, parent_id=None):
url = join_url(self._api_root, 'insert-resource-group')
directory = {
'name': name,
'type': 'resourceGroup'
}
response = requests.post(url, params={ 'session-id': session_id, 'parent-id': parent_id }, json=directory)
result = self.process_response(response)
assert isinstance(result, str), 'Result is not a string.'
return result
def delete_resource_group(self, session_id, directory_id):
url = join_url(self._api_root, 'delete-resource-group')
response = requests.delete(url, params={ 'session-id': session_id, 'resource-id': directory_id })
self.process_response(response)
def delete_blob(self, session_id, blob_id):
url = join_url(self._api_root, 'delete-blob')
response = requests.delete(url, params={ 'session-id': session_id, 'resource-id': blob_id })
self.process_response(response)
def update_blob(self, session_id, blob):
url = join_url(self._api_root, 'update-blob')
response = requests.put(url, params={ 'session-id': session_id }, json=blob)
self.process_response(response)
def get_blob_changes_for_sync(self, session_id, path, resource_group_id, from_revision):
url = join_url(self._api_root, 'get-blob-changes-for-sync')
request = {
'path': path,
'resourceGroupId': resource_group_id,
'fromRevision': from_revision
}
response = requests.post(url, params={ 'session-id': session_id }, json=request)
result = self.process_response(response)
assert isinstance(result, object), 'Result is not an object.'
return result
def get_inherited_default_blob_server_id(self, session_id, resource_group_id):
url = join_url(self._api_root, 'get-inherited-default-blob-server-id')
response = requests.get(url, params={ 'session-id': session_id, 'resource-group-id': resource_group_id })
result = self.process_response(response)
return result
def get_ticket(self, session_id, resource_id):
url = join_url(self._api_root, 'ticket-generator/get-ticket')
request = {
'type': 'freeTicket',
'resources': [resource_id],
'format': 'base64'
}
response = requests.post(url, params={ 'session-id': session_id }, json=request)
result = self.process_response(response, json=False)
assert isinstance(result, bytes), 'Result is not a bytes.'
result = result.decode('utf-8')
return result
@staticmethod
def process_response(response, json=True):
# ok, status_code, reason, 430: error-code, error-message
has_content = response.content is not None and len(response.content)
if response.ok:
if has_content:
return response.json() if json else response.content
else:
return None
if response.status_code == 430:
# 430: BIMcloud Error
assert has_content, 'BIMcloud error should has contet.'
raise_bimcloud_manager_error(response.json())
raise HttpError(response) | StarcoderdataPython |
133622 | <filename>algorithms/leetcode/easy/0504_七进制数.py<gh_stars>1-10
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# author: bigfoolliu
"""
给定一个整数 num,将其转化为 7 进制,并以字符串形式输出。
示例 1:
输入: num = 100
输出: "202"
示例 2:
输入: num = -7
输出: "-10"
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/base-7
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
"""
import doctest
class Solution:
"""
>>> s = Solution()
>>> s.convertToBase7(100)
'202'
>>> s.convertToBase7(-7)
'-10'
"""
def convertToBase7(self, num: int) -> str:
"""
任何进制转换口诀:
不断取余(直到商为0),倒序输出
"""
if num == 0:
return '0'
# 分正负讨论
is_positive = True
if num < 0:
num = -num
is_positive = False
res = []
while num != 0:
num, i = divmod(num, 7) # num为商,i为余数
res.append('0123456'[i])
res.reverse()
if is_positive:
return ''.join(res)
else:
return '-' + ''.join(res)
if __name__ == '__main__':
doctest.testmod()
| StarcoderdataPython |
12859625 | import os
import pkg_resources
import shutil
from ..equations import equations
from ..definitions import EQN_DEFINITIONS
def generate_equations_doc(docfile):
"""
Helper function to automatically generate a documentation page containing
all the available equations within cweqgen.
Parameters
----------
docfile: str:
The output file for the documentation.
"""
doccontents = """
#########
Equations
#########
The currently implemented equations are:
"""
references = """\
References
----------
"""
usedreferences = []
usedrefurls = []
refcount = 1
for eqn in EQN_DEFINITIONS:
eqstr = ""
# create equation
eq = equations(eqn)
if eq.reference_string in usedreferences:
refnum = usedreferences.index(eq.reference_string) + 1
else:
usedreferences.append(eq.reference_string)
usedrefurls.append(eq.reference_adsurl)
refnum = refcount
refcount += 1
eqstr += """\
{0}
{1}
""".format(
eq.description, "-" * len(eq.description)
)
eqstr += """
This equation can be accessed from the :func:`~cweqgen.equations.equations` function using
the name ``{}``.
""".format(
eqn
)
eqno = "" if eq.reference_eqno is None else f"Eqn. {eq.reference_eqno} in "
eqstr += """
The generated equation ({0} [{1}]_) is:
.. math::
{2}
""".format(
eqno, refnum, eq.equation(nocomment=True)
)
eqstr += """
The fiducial values defined for this equation are:
.. math::
{}
""".format(
eq.fiducial_equation(nocomment=True)
)
eqstr += """
.. note::
These fiducial values are just those defined within this package and may not be representative
of fiducial values used elsewhere in the literature.
"""
eqstr += """
To generate the equation as calculated at particular values, the
:func:`~cweqgen.equations.equations` can be used as
.. py:function:: equations("{0}", {1})
:noindex:
""".format(
eq.equation_name,
", ".join(
[
"{}={}".format(fid, str(val))
for fid, val in eq.default_fiducial_values.items()
]
),
)
# add doc string lines
for line in eq.__doc__.split("\n"):
eqstr += f" {line}\n"
doccontents += eqstr + "\n"
# add in list of references
for i in range(len(usedreferences)):
references += """
.. [{0}] {1} [`ADS URL <{2}>`__]
""".format(
(i + 1), usedreferences[i], usedrefurls[i]
)
with open(docfile, "w") as fp:
fp.write(doccontents + references)
def generate_yamlexample_doc(docfile, eqn="h0"):
"""
Output an example YAML file.
Parameters
----------
docfile: str
The output file for the documentation.
eqn: str
The name of the equation for which the docstring is required.
"""
src = os.path.join(pkg_resources.resource_filename("cweqgen", "eqnfiles"), f"{eqn}.yaml")
shutil.copyfile(src, docfile)
| StarcoderdataPython |
3591369 | from . import heads | StarcoderdataPython |
11355557 | <reponame>EarthOnline/ICS2000-Python<filename>ics2000/Devices.py
from typing import Optional
class Device:
def __init__(self, name, entity_id, hb):
self._hub = hb
self._name = name
self._id = entity_id
print(str(self._name) + " : " + str(self._id))
def name(self):
return self._name
def turnoff(self):
cmd = self._hub.getcmdswitch(self._id, False)
self._hub.sendcommand(cmd.getcommand())
def turnon(self):
cmd = self._hub.getcmdswitch(self._id, True)
self._hub.sendcommand(cmd.getcommand())
def getstatus(self) -> Optional[bool]:
return self._hub.getlampstatus(self._id)
class Dimmer(Device):
def dim(self, level):
if level < 0 or level > 15:
return
cmd = super()._hub.getcmddim(super()._hub, level)
super()._hub.sendcommand(cmd.getcommand()) | StarcoderdataPython |
11245133 | from app import app, db
class Sessions(db.Model):
__tablename__ = 'sessions'
session_id = db.Column(db.Integer, primary_key=True)
app = db.Column(db.String(255))
r_id = db.Column(db.Integer, db.ForeignKey('researchers.r_id'))
user_id = db.Column(db.Integer, db.ForeignKey('players.user_id'))
session_date = db.Column(db.String(255))
class Mahjong_Games(db.Model):
__tablename__ = 'mahjong_games'
session_id = db.Column(db.Integer, db.ForeignKey('sessions.session_id'), primary_key = True)
game_num = db.Column(db.Integer, primary_key = True)
package = db.Column(db.String(64))
layout = db.Column(db.String(64))
selections = db.Column(db.Integer)
deselections = db.Column(db.Integer)
correct_matches = db.Column(db.Integer)
incorrect_matches = db.Column(db.Integer)
hints_enabled = db.Column(db.Boolean)
hints = db.Column(db.Integer)
shuffles = db.Column(db.Integer)
time_taken = db.Column(db.Integer)
completion = db.Column(db.String(64))
sessions = db.relationship(Sessions)
class Bejeweled_Sessions(db.Model):
__tablename__ = 'bejeweled_sessions'
session_id = db.Column(db.Integer, db.ForeignKey('sessions.session_id'), primary_key = True)
attempt_number = db.Column(db.Integer, primary_key = True)
level = db.Column(db.Integer)
target_score = db.Column(db.Integer)
tile_types = db.Column(db.Integer)
score_total = db.Column(db.Integer)
score_zone = db.Column(db.String(255))
latency_average = db.Column(db.Float)
events = db.Column(db.Text)
sessions = db.relationship(Sessions)
class Wordsearch_Sessions(db.Model):
__tablename__ = 'wordsearch_sessions'
session_id = db.Column(db.Integer, db.ForeignKey('sessions.session_id'), primary_key = True)
attempt_number = db.Column(db.Integer, primary_key = True)
level = db.Column(db.Text)
version = db.Column(db.Integer)
rows = db.Column(db.Integer)
words = db.Column(db.Integer)
latency_average = db.Column(db.Float)
longest_word = db.Column(db.Integer)
longest_pause = db.Column(db.Float)
events = db.Column(db.Text)
sessions = db.relationship(Sessions)
class Mole_Sessions(db.Model):
__tablename__ = 'mole_sessions'
session_id = db.Column(db.Integer, db.ForeignKey('sessions.session_id'), primary_key = True)
target_visibility = db.Column(db.Integer)
target_latency = db.Column(db.Integer)
attempt_duration = db.Column(db.Integer)
level_progression = db.Column(db.Float)
hit_sound = db.Column(db.Integer)
hit_vibration = db.Column(db.Integer)
avg_reaction_time = db.Column(db.Float)
reaction_time_sd = db.Column(db.Float)
events = db.Column(db.Text)
avg_reaction_time_by_attempt = db.Column(db.Text)
reaction_time_sds_by_attempt = db.Column(db.Text)
moles_hit_by_attempt = db.Column(db.Text)
moles_missed_by_attempt = db.Column(db.Text)
bunnies_hit_by_attempt = db.Column(db.Text)
bunnies_missed_by_attempt = db.Column(db.Text)
sessions = db.relationship(Sessions)
class Researchers(db.Model):
__tablename__ = 'researchers'
r_id = db.Column(db.Integer, primary_key = True)
sessions = db.relationship(Sessions)
class Players(db.Model):
__tablename__ = 'players'
user_id = db.Column(db.Integer, primary_key = True)
sessions = db.relationship(Sessions)
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.