id stringlengths 1 7 | text stringlengths 6 1.03M | dataset_id stringclasses 1
value |
|---|---|---|
3307351 | <filename>KNet/lib/switches.py
'''Copyright 2018 KNet Solutions, India, http://knetsolutions.in
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from __future__ import unicode_literals
import abc
from six import add_metaclass, text_type
import sys
import KNet.lib.utils as utils
import KNet.lib.ovs_cmds as ovs
@add_metaclass(abc.ABCMeta)
class Switch(object):
def __init__(self, data, controller):
self.id = utils.generate_id()
self.name = data["name"]
if "openflow" in data:
self.version = str(data["openflow"]["version"])
else:
self.version = None
if "mgmt_ips" in data:
self.mgmt_ips = data["mgmt_ips"]
else:
self.mgmt_ips = None
self.controller = controller
self.status = "initialized"
if "datapathid" in data:
self.datapathid = int(data["datapathid"])
else:
self.datapathid = int(self.id)
# Insert the switch details in to DB
self.docid = utils.switch_t.insert({'id': self.id, 'name': self.name,
'ofversion': self.version,
'controller': self.controller,
'datapathid': self.datapathid,
'status': self.status})
def create(self):
ovs.create_bridge(self.name)
#ovs.create_userspace_bridge(self.name)
# controller format:
if self.controller:
if not ovs.check_controller_format(self.controller):
return False
ovs.set_controller(self.name, self.controller)
ovs.set_protocol_version(self.name, str(self.version))
ovs.set_datapath_id(self.name, self.datapathid)
if self.mgmt_ips:
ovs.set_mgmt_ip(self.name, self.mgmt_ips)
ovs.disable_inband(self.name)
# Update theDB
self.status = "created"
utils.switch_t.update({'status': self.status}, doc_ids=[self.docid])
def delete(self):
ovs.delete_bridge(self.name)
# Update the DB
self.status = "deleted"
# utils.switch_t.update({'status': self.status}, doc_ids=[self.docid])
utils.switch_t.remove(doc_ids=[self.docid])
def get(self):
return utils.switch_t.get(doc_id=self.docid)
| StarcoderdataPython |
4817279 | #!/usr/bin/env python
# encoding: utf-8
"""Simple MD5 generation.
"""
__version__ = "$Id$"
import hashlib
from hashlib_data import lorem
h = hashlib.md5()
h.update(lorem)
print h.hexdigest()
| StarcoderdataPython |
3229013 | import torch.nn.functional as F
from segmentron.models.model_zoo import MODEL_REGISTRY
from segmentron.models.segbase import SegBaseModel
from segmentron.config import cfg
from segmentron.modules.dmlp import DMLP
__all__ = ['Trans4PASS']
@MODEL_REGISTRY.register(name='Trans4PASS')
class Trans4PASS(SegBaseModel):
def __init__(self):
super().__init__()
vit_params = cfg.MODEL.TRANS2Seg
c4_HxW = (cfg.TRAIN.BASE_SIZE // 32) ** 2
vit_params['decoder_feat_HxW'] = c4_HxW
vit_params['nclass'] = self.nclass
vit_params['emb_chans'] = cfg.MODEL.EMB_CHANNELS
self.dede_head = DMLP(vit_params)
self.__setattr__('decoder', ['dede_head'])
def forward(self, x):
size = x.size()[2:]
c1, c2, c3, c4 = self.encoder(x)
feats = [c1, c2, c3, c4]
outputs = list()
x = self.dede_head(c1, c2, c3, c4)
x = F.interpolate(x, size, mode='bilinear', align_corners=True)
outputs.append(x)
return tuple(outputs)
| StarcoderdataPython |
1762734 | from django.db import models
class Book(models.Model):
isbn = models.TextField(unique=True)
title = models.TextField()
author = models.TextField()
description = models.TextField()
def __str__(self):
return f'{self.title}'
| StarcoderdataPython |
1675954 | <reponame>bluekyu/RenderPipeline
"""
RenderPipeline
Copyright (c) 2014-2016 tobspr <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import division
from functools import partial
from panda3d.core import Vec3
from rplibs.yaml import load_yaml_file
from rpcore.native import NATIVE_CXX_LOADED
from rpcore.gui.draggable_window import DraggableWindow
from rpcore.gui.labeled_checkbox import LabeledCheckbox
from rpcore.gui.checkbox_collection import CheckboxCollection
class RenderModeSelector(DraggableWindow):
""" Window which offers the user to select a render mode to apply """
def __init__(self, pipeline, parent):
DraggableWindow.__init__(self, width=690, height=340, parent=parent,
title="Select render mode")
self._pipeline = pipeline
self._selected_mode = ""
self._create_components()
self.hide()
def _create_components(self):
""" Internal method to init the components """
DraggableWindow._create_components(self)
self._content_node = self._node.attach_new_node("content")
self._populate_content()
def _populate_content(self):
""" Populates the windows content """
self._content_node.node().remove_all_children()
# Reload config each time the window is opened so its easy to add new
# render modes
config = load_yaml_file("/$$rpconfig/debugging.yaml")
debugger_content = self._content_node.attach_new_node("RenderModes")
debugger_content.set_z(-20)
debugger_content.set_x(20)
render_modes = [("Default", "", False, "", False)]
# Read modes from configuration
for mode in config["render_modes"]:
data = [mode["name"], mode["key"]]
data.append(mode.get("cxx_only", False))
data.append(mode.get("requires", ""))
data.append(mode.get("special", False))
render_modes.append(data)
collection = CheckboxCollection()
max_column_height = 9
for idx, (mode, mode_id, requires_cxx, requires_plugin, special) in enumerate(render_modes):
offs_y = (idx % max_column_height) * 24 + 35
offs_x = (idx // max_column_height) * 220
enabled = True
if requires_cxx and not NATIVE_CXX_LOADED:
enabled = False
if requires_plugin:
if not self._pipeline.plugin_mgr.is_plugin_enabled(requires_plugin):
enabled = False
box = LabeledCheckbox(
parent=debugger_content, x=offs_x, y=offs_y, text=mode.upper(),
text_color=Vec3(0.4), radio=True, chb_checked=(mode_id == self._selected_mode),
chb_callback=partial(self._set_render_mode, mode_id, special),
text_size=14, expand_width=230, enabled=enabled)
collection.add(box.checkbox)
def _set_render_mode(self, mode_id, special, value):
""" Callback which gets called when a render mode got selected """
if not value:
return
to_remove = []
for define in self._pipeline.stage_mgr.defines:
if define.startswith("_RM_"):
to_remove.append(define)
for define in to_remove:
del self._pipeline.stage_mgr.defines[define]
if mode_id == "":
self._pipeline.stage_mgr.defines["ANY_DEBUG_MODE"] = 0
else:
# Don't activate the generic debugging mode for special modes. This
# is for modes like luminance which expect the scene to be rendered
# unaltered.
self._pipeline.stage_mgr.defines["ANY_DEBUG_MODE"] = 0 if special else 1
self._pipeline.stage_mgr.defines["_RM_" + mode_id] = 1
self._selected_mode = mode_id
self._pipeline.reload_shaders()
def toggle(self):
""" Toggles the visibility of this windows """
if self._visible:
self.hide()
else:
self._populate_content()
self.show()
| StarcoderdataPython |
3323313 | <filename>01_von_Karman_square/src/cmpt_stats.py
"""
********************************************************************************
computes statistics
********************************************************************************
"""
import numpy as np
def fwd_stats(u, u_, n):
mse = np.mean(np.square(u - u_))
sem = np.std (np.square(u - u_), ddof = 1) / np.sqrt(n)
return mse, sem
def inv_stats(theta_, window = 100):
# theta_: list
theta_ = theta_[-window:]
mean = np.mean(theta_)
std = np.std (theta_, ddof = 1)
return mean, std | StarcoderdataPython |
3211933 | # -*- coding:utf-8 -*-
import os
import random
import cv2
import argparse
def mkdir_if_not_exist(path):
if not os.path.exists(os.path.join(*path)):
os.makedirs(os.path.join(*path))
class CasiaSurf(object):
def __init__(self):
self.color_list = []
self.depth_list = []
self.ir_list = []
def preprocess_val_list(self):
positive_num = 0
negative_num = 0
# mkdir_if_not_exist(['data'])
# color_wf =open('data/val_color_all_112_{}.lst'.format(9608), 'w')
depth_wf =open('data/val_depth_all_112_{}.lst'.format(9608), 'w')
# ir_wf =open('data/val_ir_all_112_{}.lst'.format(9608), 'w')
with open('val_public_list_with_label.txt', 'r') as f:
i = 0
for line in f.readlines():
line = line.strip() # 去掉每行头尾空白
line_lst = line.split() # 按空白符分割
# color_path = line_lst[0]
# resize_color_path = 'Val-112/' + color_path.split('/', 1)[1]
# colr_img = cv2.imread('../phase1/' + color_path)
# colr_img_resized = cv2.resize(colr_img,(112,112))
# color_path_lst = ['..', 'phase1']
# color_path_lst.extend(resize_color_path.split('/')[0:-1])
# mkdir_if_not_exist(color_path_lst)
# cv2.imwrite('../phase1/' + resize_color_path, colr_img_resized)
depth_path = line_lst[1]
resize_depth_path = 'Val-112/' + depth_path.split('/', 1)[1]
# depth_img = cv2.imread('../phase1/' + depth_path)
# depth_img_resized = cv2.resize(depth_img, (112,112))
# depth_path_lst = ['..', 'phase1']
# depth_path_lst.extend(resize_depth_path.split('/')[0:-1])
# mkdir_if_not_exist(depth_path_lst)
# cv2.imwrite('../phase1/' + resize_depth_path, depth_img_resized)
# ir_path = line_lst[2]
# resize_ir_path = 'Val-112/' + ir_path.split('/', 1)[1]
# ir_img = cv2.imread('../phase1/' + ir_path)
# ir_img_resized = cv2.resize(ir_img, (112,112))
# ir_path_lst = ['..', 'phase1']
# ir_path_lst.extend(resize_ir_path.split('/')[0:-1])
# mkdir_if_not_exist(ir_path_lst)
# cv2.imwrite('../phase1/' + resize_ir_path, ir_img_resized)
label = line_lst[3]
if float(label) <= 0.5:
negative_num += 1
else:
positive_num += 1
# color_path_with_label = str(i) + '\t' + label + '\t' + resize_color_path + '\n'
# i += 1
depth_path_with_label = str(i) + '\t' + label + '\t' + resize_depth_path + '\n'
i += 1
# ir_path_with_label = str(i) + '\t' + label + '\t' + resize_ir_path + '\n'
# i += 1
# color_wf.write(color_path_with_label)
depth_wf.write(depth_path_with_label)
# ir_wf.write(ir_path_with_label)
# print('process val line->%d' %(i // 3))
print('process val line->%d' %(i))
# color_wf.close()
depth_wf.close()
# ir_wf.close()
print('preprocess val list success!')
def preprocess_train_list(self):
positive_num = 0
negative_num = 0
with open('../phase1/train_list.txt', 'r') as f:
i = 0
for line in f.readlines():
line = line.strip() # 去掉每行头尾空白
line_lst = line.split() # 按空白符分割
# color_path = line_lst[0]
# resize_color_path = 'Training-112/' + color_path.split('/', 1)[1]
# colr_img = cv2.imread('../phase1/' + color_path)
# colr_img_resized = cv2.resize(colr_img,(112,112))
# color_path_lst = ['..', 'phase1']
# color_path_lst.extend(resize_color_path.split('/')[0:-1])
# mkdir_if_not_exist(color_path_lst)
# cv2.imwrite('../phase1/' + resize_color_path, colr_img_resized)
depth_path = line_lst[1]
resize_depth_path = 'Training-112/' + depth_path.split('/', 1)[1]
# depth_img = cv2.imread('../phase1/' + depth_path)
# depth_img_resized = cv2.resize(depth_img, (112,112))
# depth_path_lst = ['..', 'phase1']
# depth_path_lst.extend(resize_depth_path.split('/')[0:-1])
# mkdir_if_not_exist(depth_path_lst)
# cv2.imwrite('../phase1/' + resize_depth_path, depth_img_resized)
# ir_path = line_lst[2]
# resize_ir_path = 'Training-112/' + ir_path.split('/', 1)[1]
# ir_img = cv2.imread('../phase1/' + ir_path)
# ir_img_resized = cv2.resize(ir_img, (112,112))
# ir_path_lst = ['..', 'phase1']
# ir_path_lst.extend(resize_ir_path.split('/')[0:-1])
# mkdir_if_not_exist(ir_path_lst)
# cv2.imwrite('../phase1/' + resize_ir_path, ir_img_resized)
label = line_lst[3]
if 0 == int(label):
negative_num += 1
else:
positive_num += 1
# color_path_with_label = str(i) + '\t' + label + '\t' + resize_color_path
# i += 1
depth_path_with_label = str(i) + '\t' + label + '\t' + resize_depth_path
i += 1
# ir_path_with_label = str(i) + '\t' + label + '\t' + resize_ir_path
# i += 1
# self.color_list.append(color_path_with_label)
self.depth_list.append(depth_path_with_label)
# self.ir_list.append(ir_path_with_label)
# print('process train line->%d' %(i // 3))
print('process train line->%d' %(i))
print('positive_num=%d' %(positive_num)) # 8942
print('negative_num=%d' %(negative_num)) # 20324
# random.shuffle(self.color_list)
random.shuffle(self.depth_list)
# random.shuffle(self.ir_list)
# with open('train_color_all_112_{}.lst'.format(len(self.color_list)), 'w') as f:
# f.write('\n'.join(self.color_list))
with open('data/train_depth_all_112_{}.lst'.format(len(self.depth_list)), 'w') as f:
f.write('\n'.join(self.depth_list))
# with open('train_ir_all_112_{}.lst'.format(len(self.ir_list)), 'w') as f:
# f.write('\n'.join(self.ir_list))
print('preprocess train list success!')
def use_train_sublist(self):
positive_num = 0
negative_num = 0
with open('data/train_depth_all_112_29266.lst', 'r') as f:
i = 0
for line in f.readlines():
line = line.strip() # 去掉每行头尾空白
line_lst = line.split() # 按空白符分割
label = int(line_lst[1])
depth_path = line_lst[2]
if 0 == label:
if not '_enm_' in depth_path:
self.depth_list.append(line)
negative_num += 1
else:
positive_num += 1
self.depth_list.append(line)
i += 1
print('process train line->%d' %(i))
print('positive_num=%d' %(positive_num)) # 8942
print('negative_num=%d' %(negative_num)) # 6518
random.shuffle(self.depth_list)
with open('data/train_depth_noenmfake_112_{}.lst'.format(len(self.depth_list)), 'w') as f:
f.write('\n'.join(self.depth_list))
print('preprocess train sublist success!')
def aug_trainlist(self):
with open('data/train_depth_all_112_29266.lst', 'r') as f:
i = 0
pLst = []
nLst = []
allLst = []
for line in f.readlines():
line = line.strip() # 去掉每行头尾空白
line_lst = line.split() # 按空白符分割
label = int(line_lst[1])
# depth_path = line_lst[2]
if 0 == label:
nLst.append(line)
else:
pLst.append(line)
i += 1
print('process aug train line->%d' %(i))
print('positive_num=%d' %(len(pLst))) # 8942
print('negative_num=%d' %(len(nLst))) # 20324
# augPLst = random.sample(pLst, 8942)
augPLst = pLst[:] # 拷贝自身
print('augPLst num=%d' %(len(augPLst)))
pLst.extend(augPLst)
print('positive_num=%d' %(len(pLst))) # 17884
print('negative_num=%d' %(len(nLst))) # 20324
allLst = pLst + nLst
print('allLst num=%d' %(len(allLst))) # 38208
random.shuffle(allLst)
with open('data/train_depth_aug_112_{}.lst'.format(len(allLst)), 'w') as f:
f.write('\n'.join(allLst))
print('preprocess aug trainlist success!')
def parse_args():
"""Defines all arguments.
Returns
-------
args object that contains all the params
"""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='Create an image list or \
make a record database by reading from an image list')
# parser.add_argument('--train', action='store_true',
# help='generate train/val list file & resize train/val image to 112 size which saved in ../phase1/ dir.')
parser.add_argument('train', help='generate train/val list file & resize train/val image to 112 size which saved in ../phase1/ dir.')
cgroup = parser.add_argument_group('Options for creating image lists')
cgroup.add_argument('--no-enmfake', action='store_true', default=False,
help='remove enm fake train image dataset')
cgroup.add_argument('--aug', action='store_true', default=False,
help='augment train positive image dataset')
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_args()
casiaSurf = CasiaSurf()
# mkdir_if_not_exist(['data'])
if args.train == 'train':
if args.no_enmfake:
casiaSurf.use_train_sublist()
elif args.aug:
casiaSurf.aug_trainlist()
else:
casiaSurf.preprocess_train_list()
else:
casiaSurf.preprocess_val_list()
| StarcoderdataPython |
1634693 | <gh_stars>0
'''
Created on May 19, 2013
Re-implementation of dumpevent.cc
@author: <NAME>, CERN
'''
import sys
def dumpevent( fileName, eventNumber, runNumber=None ):
from pyLCIO import IOIMPL
reader = IOIMPL.LCFactory.getInstance().createLCReader()
reader.open( fileName )
event = None
if runNumber:
event = reader.readEvent( runNumber, eventNumber )
else:
if eventNumber > 1:
reader.skipNEvents( eventNumber - 1 )
event = reader.readNextEvent()
if event:
from pyLCIO import UTIL
UTIL.LCTOOLS.dumpEventDetailed( event )
elif runNumber:
print " couldn't find event %d - run %d in file %s" % ( eventNumber, runNumber, fileName )
else:
print ' less than %d events in file %s' % ( eventNumber, fileName )
def usage():
''' Helper method to inform about the usage of this script '''
print ' usage: python dumpevent.py filename runNum evtNum '
print ' or: python dumpevent.py filename n '
print ' where the first dumps the event with the specified run and event number'
print ' and the second simply dumps the n-th event in the file'
if __name__ == '__main__':
# check the command line parameters
if len( sys.argv ) < 3 or len( sys.argv ) > 4 or sys.argv[1] in ['-h', '--help']:
usage()
sys.exit( 1 )
if len( sys.argv ) == 3:
dumpevent( sys.argv[1], int( sys.argv[2] ) )
sys.exit( 0 )
if len( sys.argv ) == 4:
dumpevent( sys.argv[1], int( sys.argv[2] ), int( sys.argv[3] ) )
sys.exit( 0 )
| StarcoderdataPython |
1720116 | <gh_stars>1-10
# ~*~ encoding: utf-8 ~*~
from pymongo import MongoClient
from pandas import read_csv
from datetime import date
mongodb = MongoClient('192.168.178.82', 9999)
db = mongodb['dev']
drug_collection = db['drug']
drugs = read_csv('~/Dokumente/bfarm_lieferenpass_meldung.csv', delimiter=';', encoding='iso8859_2').to_dict()
drugs.pop('Id', None)
drugs.pop('aktuelle Bescheidart', None)
drugs.pop('Meldungsart', None)
drugs.pop('aktuelle Bescheidart', None)
data = dict()
for x in range(drugs['Verkehrsfähig'].__len__()):
"""
if drugs['Ende Engpass'][x] == '-':
data['end'] = None
else:
day, month, year = drugs['Ende Engpass'][x].split('.')
data['end'] = date(int(year), int(month), int(day)).__str__()
if drugs['Beginn Engpass'][x] == '-':
data['initial_report'] = None
else:
day, month, year = drugs['Beginn Engpass'][x].split('.')
data['initial_report'] = date(int(year), int(month), int(day)).__str__()
if drugs['Datum der letzten Meldung'][x] == '-':
data['last_report'] = None
else:
day, month, year = drugs['Datum der letzten Meldung'][x].split('.')
data['last_report'] = date(int(year), int(month), int(day)).__str__()
"""
data['substance'] = drugs['Wirkstoffe'][x].replace(' ', '').split(';')
data['enr'] = int(drugs['Enr'][x])
data['marketability'] = True if drugs['Verkehrsfähig'][x] == 'ja' else False
data['atc_code'] = drugs['ATC-Code'][x]
data['pzn'] = int(drugs['PZN'][x].split(' ')[0].replace(';', '')) if drugs['PZN'][x] != '-' else None
data['drug_title'] = drugs['Arzneimittelbezeichnung'][x]
data['hospital'] = True if drugs['Krankenhausrelevant'][x] == 'ja' else False
drug_collection.update_one({'enr': data['enr']}, {'$set': data}, upsert=True)
| StarcoderdataPython |
3331808 | # AUTO GENERATED FILE - DO NOT EDIT
from dash.development.base_component import Component, _explicitize_args
class ForceArrayPlot(Component):
"""A ForceArrayPlot component.
The ForceArrayPlot component is used to visualize the shapley contributions
to multiple predictions made by a tree-based ML model. This is a wrapper on
top of React implementation published in shapjs package.
Read more about the component here: https://github.com/slundberg/shap
Keyword arguments:
- id (string; optional): The ID of this component, used to identify dash components
in callbacks. The ID needs to be unique to the component.
- style (dict; optional): Inline css of each element
- title (string; optional): Plot title
- className (string; optional): html class associated with the component, used for styling purposes
- baseValue (number; optional): same as explainer.expected_value
- plot_cmap (a value equal to: 'RdBu', 'GnPR', 'CyPU', 'PkYg', 'DrDb', 'LpLb', 'YlDp', 'OrId' | list of strings; default 'RdBu'): The colors used for shap contributions that increase/decrease the prediction value.
Should be one of:
-- default colour combinations RdBu, GnPR, CyPU, PkYg, DrDb, LpLb, YlDp, OrId
-- list of two hex codes, e.g., ["#AAAA11", "#6633CC"]
-- list of two rgb values, e.g., ["rgb(255, 13, 87)", "rgb(30, 136, 229)"]
- link (a value equal to: 'identity', 'logit'; default 'identity'): either 'identity' or 'logit'
- featureNames (dict with strings as keys and values of type string; optional): Labels corresponding to each feature, should have same set of keys as "features" prop
- outNames (list of strings; optional): Single element list of prediction variable name.
- labelMargin (number; optional): Margin (in px) for labels on top of the plot
- ordering_keys (list of boolean | number | string | dict | lists; optional): X-Axis label for each point
- ordering_keys_time_format (string; optional): Formatting for temporal axes, one of d3-time-formats
- explanations (dict; optional): List of predictions, where each prediction is a dictionary
describing the predicted value, similarity index and shapley
contributions of each feature. explanations has the following type: list of dicts containing keys 'outValue', 'simIndex', 'features'.
Those keys have the following types:
- outValue (number; required)
- simIndex (boolean | number | string | dict | list; required)
- features (dict; required)
- clickData (dict; optional): attribute for attaching callbacks on click events"""
@_explicitize_args
def __init__(self, id=Component.UNDEFINED, style=Component.UNDEFINED, title=Component.UNDEFINED, className=Component.UNDEFINED, baseValue=Component.UNDEFINED, plot_cmap=Component.UNDEFINED, link=Component.UNDEFINED, featureNames=Component.UNDEFINED, outNames=Component.UNDEFINED, labelMargin=Component.UNDEFINED, ordering_keys=Component.UNDEFINED, ordering_keys_time_format=Component.UNDEFINED, explanations=Component.UNDEFINED, clickData=Component.UNDEFINED, **kwargs):
self._prop_names = ['id', 'style', 'title', 'className', 'baseValue', 'plot_cmap', 'link', 'featureNames', 'outNames', 'labelMargin', 'ordering_keys', 'ordering_keys_time_format', 'explanations', 'clickData']
self._type = 'ForceArrayPlot'
self._namespace = 'dash_shap_components'
self._valid_wildcard_attributes = []
self.available_properties = ['id', 'style', 'title', 'className', 'baseValue', 'plot_cmap', 'link', 'featureNames', 'outNames', 'labelMargin', 'ordering_keys', 'ordering_keys_time_format', 'explanations', 'clickData']
self.available_wildcard_properties = []
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(ForceArrayPlot, self).__init__(**args)
| StarcoderdataPython |
111614 | from __future__ import absolute_import
# from similarities import all_similarities
from .scoss import Scoss
from .smoss import SMoss
from .main import *
| StarcoderdataPython |
95544 | <reponame>frozenbey/noxususerbot
import threading
from sqlalchemy import func, distinct, Column, String, UnicodeText
try:
from userbot.modules.sql_helper import SESSION, BASE
except ImportError:
raise AttributeError
class Mesajlar(BASE):
__tablename__ = "mesaj"
komut = Column(UnicodeText, primary_key=True, nullable=False)
mesaj = Column(UnicodeText, primary_key=True, nullable=False)
def __init__(self, komut, mesaj):
self.komut = komut # ensure string
self.mesaj = mesaj
def __repr__(self):
return "<Mesaj '%s' için %s>" % (self.komut, self.mesaj)
def __eq__(self, other):
return bool(isinstance(other, Mesajlar)
and self.komut == other.komut
and self.mesaj == other.mesaj)
Mesajlar.__table__.create(checkfirst=True)
KOMUT_INSERTION_LOCK = threading.RLock()
def ekle_mesaj(komut, mesaj):
with KOMUT_INSERTION_LOCK:
try:
SESSION.query(Mesajlar).filter(Mesajlar.komut == komut).delete()
except:
pass
komut = Mesajlar(komut, mesaj)
SESSION.merge(komut)
SESSION.commit()
def getir_mesaj(komu):
try:
MESAJ = SESSION.query(Mesajlar).filter(Mesajlar.komut == komu).first()
return MESAJ.mesaj
except:
return False
def sil_mesaj(komu):
try:
SESSION.query(Mesajlar).filter(Mesajlar.komut == komu).delete()
SESSION.commit()
except Exception as e:
return e
return True
| StarcoderdataPython |
3351834 | <gh_stars>1-10
from clientbase.clientsocket import TcpCliSock
from clientbase.crypto import *
import time
class ClientBase:
def __init__(self):
self.clisock = TcpCliSock()
self.key = None
self.name = None
self.logged = False
self.allm = 6277101735386680763835789423207666416102355444464034512659
self.alliv = 1772048404007700507
self.ads = []
pass
def userstr(self, name, password):
Private_Code = Make_Private_Code(hash(name), self.allm, hash(password))[0]
return name + ': ' + str(Private_Code) + '\n'
def NewUser(self, name, password):
if self.clisock.state != 'Connected' or name == '' or password == '':
return False
msg = self.userstr(name, password)
try:
self.clisock.send('NUS', msg)
res = self.clisock.recvstr()
except:
return False
if(res != 'Creat Successfully'):
return False
return True
def LogIn(self, name, password):
if self.clisock.state != 'Connected' or name == '' or password == '':
return False
try:
self.clisock.send('AFT', name)
msg = self.clisock.recvbyte()
if(msg == b'Unsuccessful'):
return False
Communication_Open_Code = int(123).from_bytes(msg, 'big')
ip = self.clisock.recvstr()
self.key = Make_Key(Communication_Open_Code, self.allm, hash(password))
msg = encrypt(ip, self.key, self.alliv)
self.clisock.send('RTT', msg)
res = self.clisock.recvbyte()
except:
return 'Unsuccessful'
if res == b'Unsuccessful':
return False
if(decrypt(res, self.key, self.alliv) != 'Sign In Successfully'):
return False
self.name = name
self.logged = True
return True
def ChangePassword(self, new_password):
if self.clisock.state != 'Connected' or self.logged == False or new_password == '':
return False
try:
msg = self.userstr(self.name, new_password)
msg = encrypt(msg, self.key, self.alliv)
self.clisock.send('CPC', msg)
res = self.clisock.recvbyte()
except:
return False
if res == b'Unsuccessful':
return False
try:
if(decrypt(res, self.key, self.alliv) != 'Change Successfully'):
return False
except:
return False
return True
def DeletUser(self):
if self.clisock.state != 'Connected' or self.logged == False:
return False
try:
msg = encrypt(self.name, self.key, self.alliv)
self.clisock.send('DUS', msg)
res = self.clisock.recvbyte()
if res == b'Unsuccessful':
return False
except:
return False
if(decrypt(res, self.key, self.alliv) != 'Delete Successfully'):
return False
return True
def SearchForGoods(self, name):
if self.clisock.state != 'Connected':
return False
try:
self.clisock.send('SFG', name.encode())
num = int(self.clisock.recvstr())
reslist = []
for i in range(num):
msg = self.clisock.recvstr()
reslist.append(eval(msg))
except:
return False
else:
return reslist
def GetAds(self):
if self.clisock.state != 'Connected':
return False
try:
self.clisock.send('GAD', None)
num = int(self.clisock.recvstr())
for i in range(num):
msg = self.clisock.recvstr()
self.ads.append(eval(msg))
except:
return False
else:
return True | StarcoderdataPython |
86772 | <filename>products/models.py
from django.conf import settings
from django.db import models
# Create your models here.
class Product(models.Model):
"""Model definition for Product."""
# TODO: Define fields here
name = models.CharField(max_length=60)
description = models.CharField(max_length=140, blank=True, null=True)
price = models.FloatField()
stock_amount = models.IntegerField()
package_details = models.CharField(max_length=20)
picture = models.ImageField(upload_to="images/product")
seller = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
delivery_option = models.BooleanField(default=False)
CATEGORY_TYPE = (
("GROC", "GROCERIES"),
("ELEC", "ELECTRONICS"),
("CLTH", "CLOTHES"),
("HOME", "HOME AND LIVING"),
)
category = models.CharField(max_length=5, choices=CATEGORY_TYPE, null=False)
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
"""Meta definition for Product."""
verbose_name = "Product"
verbose_name_plural = "Products"
def __str__(self):
"""Unicode representation of Product."""
return self.name
def get_picture(self):
return self.picture.url
| StarcoderdataPython |
176980 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import *
import numpy as np
from .rnn_base import RNNBase
from .utils import SymbolTable
class TextRNN(RNNBase):
"""TextRNN for strings of text."""
def _preprocess_data(self, candidates, extend=False):
"""Convert candidate sentences to lookup sequences
:param candidates: candidates to process
:param extend: extend symbol table for tokens (train), or lookup (test)?
"""
if not hasattr(self, 'word_dict'):
self.word_dict = SymbolTable()
data, ends = [], []
for candidate in candidates:
toks = candidate.get_contexts()[0].text.split()
# Either extend word table or retrieve from it
f = self.word_dict.get if extend else self.word_dict.lookup
data.append(np.array(list(map(f, toks))))
ends.append(len(toks))
return data, ends
| StarcoderdataPython |
3299732 | def movie(card, ticket, perc):
| StarcoderdataPython |
1777607 | from dcos_installer import config
from gen.exceptions import ValidationError
def test_normalize_config_validation_exception():
errors = {
'key': {'message': 'test'},
}
validation_error = ValidationError(errors=errors, unset=set(['one', 'two']))
normalized = config.normalize_config_validation_exception(validation_error)
expected = {
'key': 'test',
'one': 'Must set one, no way to calculate value.',
'two': 'Must set two, no way to calculate value.',
}
assert expected == normalized
| StarcoderdataPython |
3389956 | <reponame>asvatov/pomodoro-timer
import webbrowser
import gi
from pomodoro_timer.components.about_dialog import on_about_item
from pomodoro_timer.configs.main_configs import WEBSITE_BUG_REPORTS
from pomodoro_timer.configs.strings_config import STRING_ABOUT, STRING_BUGS
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk as gtk
def get_dropdown_help_menu():
help_menu = gtk.Menu()
item_about = gtk.MenuItem(STRING_ABOUT)
item_about.connect('activate', lambda x: on_about_item(x))
item_about.show()
help_menu.append(item_about)
bug_item = gtk.MenuItem(STRING_BUGS)
bug_item.connect('activate', lambda x: webbrowser.open(WEBSITE_BUG_REPORTS))
bug_item.show()
help_menu.append(bug_item)
help_menu.show()
return help_menu
| StarcoderdataPython |
3249924 | import asyncio
import gzip
import socket
import threading
from collections import defaultdict
from contextlib import contextmanager
from queue import Queue
from google.protobuf import json_format
from sanic import Sanic, response
from signalfx.generated_protocol_buffers import signal_fx_protocol_buffers_pb2 as sf_pbuf
# This module collects metrics from the agent and can echo them back out for
# making assertions on the collected metrics.
STOP = type("STOP", (), {})
def free_tcp_socket(host="127.0.0.1"):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((host, 0))
return (sock, sock.getsockname()[1])
# Fake the /v2/datapoint endpoint and just stick all of the metrics in a
# list
# pylint: disable=unused-variable
def _make_fake_ingest(datapoint_queue, events, spans):
app = Sanic()
@app.middleware("request")
async def compress_request(request):
if "Content-Encoding" in request.headers:
if "gzip" in request.headers["Content-Encoding"]:
request.body = gzip.decompress(request.body)
@app.post("/v2/datapoint")
async def handle_datapoints(request):
is_json = "application/json" in request.headers.get("content-type")
dp_upload = sf_pbuf.DataPointUploadMessage()
if is_json:
json_format.Parse(request.body, dp_upload)
else:
dp_upload.ParseFromString(request.body)
datapoint_queue.put(dp_upload)
return response.json("OK")
@app.post("/v2/event")
async def handle_event(request):
is_json = "application/json" in request.headers.get("content-type")
event_upload = sf_pbuf.EventUploadMessage()
if is_json:
json_format.Parse(request.body, event_upload)
else:
event_upload.ParseFromString(request.body)
events.extend(event_upload.events) # pylint: disable=no-member
return response.json("OK")
@app.post("/v1/trace")
async def handle_trace(request):
spans.extend(request.json)
return response.json([])
return app
# Fake the dimension PUT method to capture dimension property/tag updates.
# pylint: disable=unused-variable
def _make_fake_api(dims):
app = Sanic()
@app.put("/v2/dimension/<key>/<value>")
async def put_dim(request, key, value):
content = request.json
dims[key][value] = content
return response.json({})
return app
# Starts up a new set of backend services that will run on a random port. The
# returned object will have properties on it for datapoints, events, and dims.
# The fake servers will be stopped once the context manager block is exited.
# pylint: disable=too-many-locals
@contextmanager
def start(ip_addr="127.0.0.1"):
# Data structures are thread-safe due to the GIL
_dp_upload_queue = Queue()
_datapoints = []
_datapoints_by_metric = defaultdict(list)
_datapoints_by_dim = defaultdict(list)
_events = []
_spans = []
_dims = defaultdict(defaultdict)
ingest_app = _make_fake_ingest(_dp_upload_queue, _events, _spans)
api_app = _make_fake_api(_dims)
[ingest_sock, _ingest_port] = free_tcp_socket(ip_addr)
[api_sock, _api_port] = free_tcp_socket(ip_addr)
loop = asyncio.new_event_loop()
async def start_servers():
ingest_server = ingest_app.create_server(sock=ingest_sock, access_log=False)
api_server = api_app.create_server(sock=api_sock, access_log=False)
loop.create_task(ingest_server)
loop.create_task(api_server)
loop.create_task(start_servers())
threading.Thread(target=loop.run_forever).start()
def add_datapoints():
while True:
dp_upload = _dp_upload_queue.get()
if dp_upload is STOP:
return
_datapoints.extend(dp_upload.datapoints) # pylint: disable=no-member
for dp in dp_upload.datapoints: # pylint: disable=no-member
_datapoints_by_metric[dp.metric].append(dp)
for dim in dp.dimensions:
_datapoints_by_dim[f"{dim.key}:{dim.value}"].append(dp)
threading.Thread(target=add_datapoints).start()
class FakeBackend: # pylint: disable=too-few-public-methods
ingest_host = ip_addr
ingest_port = _ingest_port
ingest_url = f"http://{ingest_host}:{ingest_port}"
api_host = ip_addr
api_port = _api_port
api_url = f"http://{api_host}:{api_port}"
datapoints = _datapoints
datapoints_by_metric = _datapoints_by_metric
datapoints_by_dim = _datapoints_by_dim
events = _events
spans = _spans
dims = _dims
def reset_datapoints(self):
self.datapoints.clear()
self.datapoints_by_metric.clear()
self.datapoints_by_dim.clear()
try:
yield FakeBackend()
finally:
ingest_sock.close()
api_sock.close()
loop.stop()
_dp_upload_queue.put(STOP)
| StarcoderdataPython |
55412 | import BotDecidesPos
import numpy as np
class Collision_check:
def __init__(self):
self.m=0.0
self.n=0.0
def load_data(self,bot):
tgx,tgy=bot.getTarget()
mpx,mpy=bot.getPos()
spd=bot.getSpeed()
return spd,mpx,mpy,tgx,tgy
def checkCollision(self,bot1,bot2):
eg = Engine()
sp1,x,y,x1,y1=self.load_data(bot1)
sp2,a,b,a1,b1=self.load_data(bot2)
p=eg.findDist(x,y,x1,y1)
q=eg.findDist(a,b,a1,b1)
#v1=[sp1*(x-x1)/p,sp1*(y-y1)/p]
#v2 = [sp2 * (a - a1) / q, sp2 * (b - b1) / q]
#Ax=C, which is the matrix from of the equation on the path of the vehicle
s=[[x-x1,y-y1],[a-a1,b-b1]]
t=[y*x1-x*y1,b*a1-a*b1]
self.m,self.n=eg.eq_StraightLine(s,t)
p1=eg.findDist(x,y,self.m,self.n)
q1=eg.findDist(a,b,self.m,self.n)
eta1=p1/sp1;
eta2=q1/sp2;
if np.absolute(eta1-eta2)<1 :
return True
else:
return False
def getCollisionIndex(self):
return self.m,self.n
def setCollisionIndex(self,a,b):
self.m=a
self.n=b
| StarcoderdataPython |
44613 | <gh_stars>0
from typing import List, NoReturn
from lib.t import T
class Lecture: # Vorlesungen/Fächer
subject: str
room: str
schedule: List[T]
def __init__(self,
subject: str,
room: str,
*schedule: T
) -> NoReturn:
self.subject = subject
self.room = room
self.schedule = schedule
def pprint(self,
n: int
) -> str:
ret: str = self.schedule[n].pprint() + ": " + self.subject
ret += " (" + self.__get_room_for_lecture(n) + ")"
return ret
def __get_room_for_lecture(self,
n: int) -> str:
if self.schedule[n].room is None:
return self.room
return self.schedule[n].room
| StarcoderdataPython |
3293251 | <reponame>srinirama/datacamp-downloader
import sys
import threading
import time
import colorama
from config import Config as con
from helper import bcolors
from utils import download_course, download_track, get_completed_tracks, get_completed_courses, get_all_courses
def main(argv):
if argv[0] == 'settoken':
print_dash()
con.set_token(argv[1])
else:
return
if not con.active:
return
print_dash()
print_desc()
while True:
print_dash()
s = input('>> ')
if s == 'list':
thread = threading.Thread(target=print_tracks)
thread.start()
if print_waiting(thread):
if len(get_completed_tracks()) == 0:
continue
(s, v) = wait_download()
if s is not None:
path, nums = split_download_command(s)
for i in nums:
track = list(filter(lambda x: x.id == int(i),
get_completed_tracks()))[0]
download_track(track, path, v)
elif s == 'listc':
thread = threading.Thread(target=print_courses)
thread.start()
if print_waiting(thread):
if len(get_completed_courses()) == 0:
continue
(s, v) = wait_download()
if s is not None:
path, nums = split_download_command(s)
for i in nums:
track = list(filter(lambda x: x.id == int(i),
get_completed_courses()))[0]
download_course(track.link, path, v)
elif s == 'copy':
thread = threading.Thread(target=print_all_courses)
thread.start()
if print_waiting(thread):
if len(get_all_courses()) == 0:
continue
(s, v) = wait_download()
if s is not None:
path, nums = split_download_command(s)
for i in nums:
track = list(filter(lambda x: x.id == int(i),
get_all_courses()))[0]
download_course(track.link, path, v)
elif s == 'exit':
sys.exit()
def wait_download():
while True:
s = input('>>> ')
if s.split()[0] == 'download':
return s, False # False for don't download videos
elif s.split()[0] == 'downloadv':
return s, True # Download videos
elif s == 'back':
return None, False
def print_waiting(thread):
i = 1
while thread.isAlive():
print('Waiting %s%s' % ('.' * i, ' ' * (3 - i)), end='\r')
i = i + 1 if i < 3 else 1
time.sleep(0.4)
print('', end='\r')
return True
def split_download_command(text):
if "'" in text:
path = text.split("'")
if '-' in path[2]:
nums = list(range(
int(path[2].split('-')[0]),
int(path[2].split('-')[1]) + 1))
else:
nums = path[2].split()
return path[1], nums
else:
path = text.split()
if '-' in path[2]:
nums = list(range(
int(path[2].split('-')[0]),
int(path[2].split('-')[1]) + 1))
else:
nums = path[2:]
return path[1], nums
def print_courses():
courses = get_completed_courses()
if len(courses) == 0:
sys.stdout.write(
f'{bcolors.FAIL} No courses found! {bcolors.BKENDC}\n')
for course in courses:
sys.stdout.write(
f'{bcolors.BKGREEN} {course.id}. {course.name} {bcolors.BKENDC}\n')
def print_all_courses():
courses = get_all_courses()
if len(courses) == 0:
sys.stdout.write(
f'{bcolors.FAIL} No courses found! {bcolors.BKENDC}\n')
for course in courses:
sys.stdout.write(
f'{bcolors.BKGREEN} {course.id}. {course.name} {bcolors.BKENDC}\n')
def print_tracks():
tracks = get_completed_tracks()
if len(tracks) == 0:
sys.stdout.write(
f'{bcolors.FAIL} No tracks found! {bcolors.BKENDC}\n')
for track in tracks:
sys.stdout.write(
f'{bcolors.BKBLUE} {track.id}. {track.name} {bcolors.BKENDC}\n')
def print_desc():
desc = 'Use the following commands in order.\n' +\
f'1. {bcolors.BKBLUE}list{bcolors.BKENDC} : to print your completed tracks.\n' +\
f' or {bcolors.BKBLUE}listc{bcolors.BKENDC} : to print your completed courses.\n' +\
f'2. {bcolors.BKBLUE}download{bcolors.BKENDC} followed by the destination and the id(s) of the ' +\
f'track(s)/course(s).\n\tThis command downloads {bcolors.OKBLUE}slides{bcolors.ENDC} only.\n' +\
f' or {bcolors.BKBLUE}downloadv{bcolors.BKENDC} followed by the destination and the id(s) of the ' +\
f'track(s)/course(s).\n\tThis command downloads both {bcolors.OKBLUE}slides and videos{bcolors.ENDC}.\n' +\
f'{bcolors.OKGREEN}Note: you can type 1-13 in the download command to download courses from 1 to 13.{bcolors.ENDC}\n' +\
'=' * 100 + '\n' + \
f'{bcolors.BKGREEN} Example {bcolors.BKENDC}\n' + \
'>> listc\n 1. Introduction to Databases in Python' + \
'\n 2. Building Chatbots in Python \n' + \
">>> downloadv 'C:/' 2"
print(desc)
def print_dash():
print('=' * 100, end='\n')
colorama.init()
if __name__ == "__main__":
# print(sys.argv)
main(sys.argv[1:])
| StarcoderdataPython |
3270984 | from django.conf import settings
from django.core.exceptions import ValidationError
__all__ = ['validate_url_keyword']
_default_keywords = ('new', 'edit', 'delete')
_keywords = getattr(settings, 'URL_KEYWORDS', _default_keywords)
def validate_url_keyword(value):
"""
Validates that `value` is not one of the "keywords" to be used in
URL design for this project.
"""
if value in _keywords:
raise ValidationError("Identifier cannot be \"%s\"" % value)
| StarcoderdataPython |
3202719 | # -*- coding: utf-8 -*-
from odoo import models, fields, api,tools,_
from datetime import datetime, timedelta
from odoo.exceptions import UserError
# Cap Nhat Trang Thai chuyen dich kho (daft)
class Update_Invoice_Out(models.Model):
_inherit = "stock.picking"
# cap nhat trang thai hoa don
# class Update_Invoice(models.Model):
# _inherit = "account.invoice"
#
# @api.multi
# def schedule_update_invoice(self):
# self.env.cr.execute(
# """DELETE FROM account_invoice WHERE origin = '%s'""" % ('SO24129'))
# self.env.cr.execute(
# """DELETE FROM account_invoice WHERE origin = '%s'""" % ('INV/2018/6043'))
# print "ok"
# cap nhat trang thai don hang
class Update_Sale_Order(models.Model):
# _inherit = "account.invoice"
_inherit = "sale.order"
class Duoc_Crm_Lead(models.Model):
_inherit = "crm.lead"
@api.model
def get_date(self):
import datetime
return datetime.date.today()
date_create = fields.Date(default=get_date)
# @api.multi
# def icron_change_lost_reason(self):
#
# obj_crm_lead = self.search([('active', '=', False),('probability', '=', 0), ('lost_reason', 'in', [5])])
# reason = self.env['crm.lost.reason'].search([('type_state', '=', 2)],
# limit=1)
# # print len(obj_crm_lead)
# list_crm_lost = []
# for item in obj_crm_lead:
# list_crm_lost.append(item.id)
# if reason:
# reason_id = reason.id
# print reason_id
# if list_crm_lost:
# self.env.cr.execute(
# """UPDATE crm_lead SET lost_reason = %s WHERE id in %s""" % (
# reason_id, tuple(list_crm_lost)))
# print "ok"
# @api.multi
# def icron_change_stage_and_probability(self):
# obj_crm_lead = self.search([('active', '=', True)])
# # print([item.stage_id.name for item in obj_crm_lead if item.stage_id.name == u'Cần chăm sóc hôm nay'])
#
# list_ticket_apply = [item.id for item in obj_crm_lead if
# item.stage_id.name.startswith(u'Đã hết hạn chăm sóc')]
# if list_ticket_apply:
# self.env.cr.execute(
# """UPDATE crm_lead SET probability = %s WHERE id in %s""" % (
# 50, tuple(list_ticket_apply)))
# @api.multi
@api.model
def schedule_reject_ticket(self):
today = datetime.strptime(fields.Datetime.now(),tools.DEFAULT_SERVER_DATETIME_FORMAT)
obj_crm_stage = self.env['crm.stage'].search([('probability','in', [100])])
obj_crm_lead = self.search([('active','=',True),('stage_id.id','not in',obj_crm_stage.ids),('type_contact','not in',['sp'])])
reason = self.env['crm.lost.reason'].search([('type_state', '=', 1)], limit=1)
day_new_contact = self.env['res.automatic.share.settings'].sudo().search([])[0].conf_new_contact
day_re_use = self.env['res.automatic.share.settings'].sudo().search([])[0].conf_re_use
number_re_sign = self.env['res.automatic.share.settings'].sudo().search([])[0].conf_re_sign
list_ticket_apply = []
list_res_partner = []
if reason:
id_reason = reason.id
else:
raise UserError(_("Chưa cài đặt loại cho một lý do là Quá Hạn"))
for rec in obj_crm_lead:
number_day = day_re_use if rec.type_contact == "reuse" else number_re_sign if rec.type_contact == "contract" else day_new_contact
create_date = datetime.strptime(rec.create_date, tools.DEFAULT_SERVER_DATETIME_FORMAT)
real_date = create_date + timedelta(days=number_day)
if (real_date - today ).days < 0:
list_ticket_apply.append(rec.id)
rec.action_set_lost()
if len(rec.partner_id.ids) > 0:
list_res_partner.append(rec.partner_id.id)
if(list_ticket_apply):
self.env.cr.execute("""UPDATE crm_lead SET lost_reason = %s WHERE id in %s""" % (id_reason, tuple(list_ticket_apply)))
if(list_res_partner):
self.env.cr.execute("""UPDATE res_partner SET reuse = '%s' WHERE id in %s""" % ('yes', tuple(list_res_partner)))
# reject cts Moi co trang thai KNM ve Tai Su Dung sau 18:00:00
@api.multi
@api.model
def schedule_reject_ticket_new_to_reuse(self):
today = datetime.strptime(fields.Datetime.now(),
tools.DEFAULT_SERVER_DATETIME_FORMAT)
obj_crm_stage = self.env['crm.stage'].search(
[('probability', 'in', [70])])
obj_crm_lead = self.env['crm.lead'].sudo().search([
('active', '=', True),
('stage_id.id', 'in', obj_crm_stage.ids),
('type_contact', 'in', ['new'])])
# print obj_crm_lead
reason = self.env['crm.lost.reason'].search([('type_state', '=', 1)],
limit=1)
day_new_contact = \
self.env['res.automatic.share.settings'].sudo().search([])[
0].conf_new_cts_knm
list_ticket_apply = []
list_res_partner = []
if reason:
id_reason = reason.id
else:
raise UserError(_("Chưa cài đặt loại cho một lý do là Quá Hạn"))
for rec in obj_crm_lead:
# print "trang thai", rec.stage_id.name, "Kieu", rec.type_contact
number_day = day_new_contact
if rec.stage_id.name.startswith(u"Cần gọi lại"):
# create_date = datetime.strptime(rec.create_date,
# tools.DEFAULT_SERVER_DATETIME_FORMAT)
writes_date = datetime.strptime(rec.write_date,
tools.DEFAULT_SERVER_DATETIME_FORMAT)
real_date = writes_date + timedelta(days=number_day)
# print "hieu", (real_date - today ).days
if (real_date - today).days == 0:
list_ticket_apply.append(rec.id)
rec.action_set_lost()
if len(rec.partner_id.ids) > 0:
list_res_partner.append(rec.partner_id.id)
# print "ticket list", list_ticket_apply
# print "partner list", list_res_partner
if (list_ticket_apply):
self.env.cr.execute(
"""UPDATE crm_lead SET lost_reason = %s WHERE id in %s""" % (
id_reason, tuple(list_ticket_apply)))
if (list_res_partner):
self.env.cr.execute(
"""UPDATE res_partner SET reuse = '%s' WHERE id in %s""" % (
'yes', tuple(list_res_partner)))
# Loc Cac Ticket Trung Nhau Giu lai ticket co date update gan nhat
# @api.multi
# def reject_ticket_repeat(self):
# today = datetime.strptime(fields.Datetime.now(),
# tools.DEFAULT_SERVER_DATETIME_FORMAT)
# cus_phone_obj = self.env['res.partner'].search([("active","=", True), ('phone', '!=', False)], limit=250000)
#
# cus_phone = [x.phone for x in cus_phone_obj]
# # print cus_phone
# for p in cus_phone:
# p = p.strip()
# # print p
# obj_crm_lead = self.env['crm.lead'].search([('active', '=', True), ('phone', '=', p)])
# # print len(obj_crm_lead)
# date_update = [datetime.strptime(c.write_date,
# tools.DEFAULT_SERVER_DATETIME_FORMAT)
# for c in obj_crm_lead]
# lst_hieu = []
# if len(obj_crm_lead) > 1:
# for x in date_update:
# hieu = (today - x).days
# lst_hieu.append(hieu)
# min_hieu = min(lst_hieu)
# for item in obj_crm_lead:
# date_up = datetime.strptime(item.write_date,
# tools.DEFAULT_SERVER_DATETIME_FORMAT)
# if (today - date_up).days > min_hieu:
# self.env.cr.execute(
# "UPDATE crm_lead SET active='%s' WHERE id=%s" % (
# False, item.id))
# Chon ngay <NAME> de thay doi ngay tao
# @api.multi
# def Change_create_on2(self):
# cus_phone_obj = self.env['res.partner'].search(
# [("active", "=", True), ('phone', '!=', False),
# ('date_sub', '!=', None),
# ('kip_sub', '!=', None),
# ('level', '=', None)])
# for item in cus_phone_obj:
# if item.date_sub and item.kip_sub:
# date_sub = datetime.strptime(item.date_sub, '%Y-%m-%d %H:%M:%S')
# # kiem tra xem chon ca lam viec nao
# if item.kip_sub == 1:
# item.create_on = date_sub + timedelta(hours=1)
# if item.kip_sub == 2:
# item.create_on = date_sub + timedelta(hours=6)
# if item.kip_sub == 3:
# item.create_on = date_sub + timedelta(hours=9)
# hung code
# ----bắt đầu-----
@api.multi
def write(self, vals):
if 'stage_id' in vals:
print(vals)
# không nghe máy stage_id = 109
if vals['stage_id'] == 2:
self.partner_id.level = 2
# tư vấn lần 1 hoặc lần 2
if vals['stage_id'] == 5:
self.partner_id.level = 3
# tư vấn thành công = 40
if vals['stage_id'] == 4:
self.partner_id.level = 6
if vals['stage_id'] == 6:
self.partner_id.level = 4
return super(Duoc_Crm_Lead, self).write(vals)
# ----kết thúc-----
class CrmLeadLostInherit(models.TransientModel):
_inherit = 'crm.lead.lost'
@api.multi
def action_lost_reason_apply(self):
res = super(CrmLeadLostInherit, self).action_lost_reason_apply()
leads = self.env['crm.lead'].browse(self.env.context.get('active_ids'))
reason_name = self.lost_reason_id.name or ""
body_html_partner = "<div><ul>" \
"<li>Thất Bại</li>" \
"<li>{sale_man} : {time}</li>" \
"<li>Lý Do: {activity}</li>" \
"</ul></div>".format(sale_man=self.env.user.name, time=fields.Date.today(),
activity=reason_name)
leads.partner_id.message_post(body_html_partner)
if self.lost_reason_id and self.lost_reason_id.type_state == 2 and len(leads.partner_id.ids) > 0:
leads.partner_id.write({'active': False})
return res
class CrmStageInherit(models.Model):
_inherit = "crm.stage"
_sql_constraints = [
('type_stage', 'unique (type_state)', 'Loại trạng thái đã tồn tại'),
]
type_state = fields.Selection([
(1, 'Đơn đã bị huỷ'),
(2, 'Đơn xác nhận'),
(3, 'Đơn đang đi trên đường'),
(4, 'Đơn hàng hoàn thành'),
(5, 'Đơn hoàn')
], string='Loại trạng thái')
# for rec in obj_crm_lead:
class CrmLeadLostReasonInherit(models.Model):
_inherit = 'crm.lost.reason'
_sql_constraints = [
('type_stage', 'Check(1=1)', 'Loại lý do đã tồn tại'),
]
type_state = fields.Selection([
(1, 'Quá Hạn'),
(2, 'Không tái phân bổ'),
], string='Loại')
@api.constrains('type_state')
def _check_type_state(self):
if self.type_state == 1 :
obj_lost = self.search([('type_state','=',1)])
if obj_lost :
raise UserError(_("Loại lý do Quá Hạn đã tồn tại"))
| StarcoderdataPython |
3347836 | from .LCV_ours_sub3 import LCV as LCV_ours_sub3
| StarcoderdataPython |
1722402 | <reponame>yskn67/redashbot-python<gh_stars>1-10
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import os
API_TOKEN = os.environ['SLACK_BOT_TOKEN']
DEFAULT_REPLY = 'Usage: @redashbot {}/queries/<query-number>#<visualization-number>'.format(os.environ['REDASH_HOST'])
PLUGINS = [
'plugins'
]
| StarcoderdataPython |
181383 | <filename>src/AWS.py
import schedule
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.chrome.options import Options
try:
import autoit
except ModuleNotFoundError:
print('ModuleNotFoundError')
pass
import time
import datetime
import os
import argparse
from AWS_read import read_message, read_database, read_medialist, read_setting
from AWS_mod import webloading, autoitloading
from AWS_about import about
parser = argparse.ArgumentParser(description='PyWhatsapp Guide')
parser.add_argument('--chrome_driver_path', action='store', type=str, default='./chromedriver.exe', help='chromedriver executable path (MAC and Windows path would be different)')
parser.add_argument('--message', action='store', type=str, default='', help='Enter the msg you want to send')
parser.add_argument('--remove_cache', action='store', type=str, default='False', help='Remove Cache | Scan QR again or Not')
args = parser.parse_args()
if args.remove_cache == 'True':
os.system('rm -rf User_Data/*')
browser = None
# Contact = None
message = None if args.message == '' else args.message
Link = "https://web.whatsapp.com/"
link_num = None
wait = None
choice = None
docChoice = None
unsaved_Contacts = None
wb_name = None
def input_contacts(db_col):
global unsaved_Contacts
unsaved_Contacts = []
unsaved_Contacts = read_database(wb_name, 'database', db_col)
if len(unsaved_Contacts) != 0:
print("Daftar nomor yang akan dikirim ->")
for i in unsaved_Contacts:
print(i)
else:
print('Tidak ada nomor tersedia')
def input_message(campaign):
global message
message = read_database(wb_name, campaign, 'A')[0]
print()
print('-------------------------------------------')
print(message)
print('-------------------------------------------')
print()
message = message.split('\n')
def whatsapp_login(chrome_path):
global browser
chrome_options = Options()
chrome_options.add_argument('--user-data-dir=./User_Data')
# chrome_options.add_argument('--headless')
browser = webdriver.Chrome(executable_path=chrome_path, options=chrome_options)
wait = WebDriverWait(browser, 600)
browser.get(Link)
print("Kode QR telah berhasil di pindai")
def send_message():
global message
try:
address_XPATH = '//*[@id="main"]/footer/div[1]/div[2]/div/div[2]'
input_box = browser.find_element_by_xpath(address_XPATH)
for ch in message:
if ch == "":
ActionChains(browser).key_down(Keys.SHIFT).key_down(Keys.ENTER).key_up(Keys.ENTER).key_up(Keys.SHIFT).key_up(Keys.BACKSPACE).perform()
else:
ActionChains(browser).key_down(Keys.SHIFT).key_down(Keys.ENTER).key_up(Keys.ENTER).key_up(Keys.SHIFT).key_up(Keys.BACKSPACE).perform()
input_box.send_keys(ch)
# time.sleep(5)
input_box.send_keys(Keys.ENTER)
# print("Pesan berhasil dikirimkan")
except NoSuchElementException:
print("Pesan gagal dikirimkan")
return
def send_attachment(docType):
if docType == 1:
medialist = read_database(wb_name, campaign, 'D')
medialist_Desc = read_database(wb_name, campaign, 'E')
else:
medialist = read_database(wb_name, campaign, 'G')
image_path = read_medialist(medialist, docType)
print(medialist[1:])
clipButton = browser.find_element_by_xpath('//*[@id="main"]/footer/div[1]/div[1]/div[2]/div')
clipButton.click()
time.sleep(5)
if docType == 1:
address_XPATH = '//*[@id="main"]/footer/div[1]/div[1]/div[2]/span/div/div/ul/li[1]/button'
mediaButton = browser.find_element_by_xpath(address_XPATH)
mediaButton.click()
else:
address_XPATH = '//*[@id="main"]/footer/div[1]/div[1]/div[2]/span/div/div/ul/li[3]/button'
docButton = browser.find_element_by_xpath(address_XPATH)
docButton.click()
autoitloading(autoit)
time.sleep(1)
autoit.control_focus("Open", "Edit1")
autoit.control_set_text("Open", "Edit1", image_path)
autoit.control_click("Open", "Button1")
if docType == 1:
time.sleep(5)
address_XPATH = '//*[@id="app"]/div/div/div[2]/div[2]/span/div/span/div/div/div[2]/div[1]/span/div/div[2]/div/div[3]/div[1]'
ket = browser.find_element_by_xpath(address_XPATH)
ket.send_keys(medialist_Desc[0])
a = image_path.split(' ')
for i in range(len(a)-2):
imPress = browser.find_element_by_xpath('//*[@id="app"]/div/div/div[2]/div[2]/span/div/span/div/div/div[2]/div[2]/span/div[' + str(i+2)+ ']')
imPress.click()
ket = browser.find_element_by_xpath('//*[@id="app"]/div/div/div[2]/div[2]/span/div/span/div/div/div[2]/div[1]/span/div/div[2]/div/div[3]/div[1]')
ket.send_keys(medialist_Desc[i+1])
time.sleep(5)
whatsapp_send_button = browser.find_element_by_xpath('//*[@id="app"]/div/div/div[2]/div[2]/span/div/span/div/div/div[2]/span/div')
whatsapp_send_button.click()
def sendersub(istiboll, istijeda, istiwaktu, i):
global link_num
link_num = "https://web.whatsapp.com/send?phone={}&text&source&data&app_absent".format(i)
#driver = webdriver.Chrome()
browser.get(link_num)
address_XPATH = '//*[@id="main"]/footer/div[1]/div[2]/div/div[2]'
valadation = webloading(browser, link_num, address_XPATH)
# input_box = browser.find_element_by_xpath(address_XPATH)
if valadation == True:
print("Mengirim pesan ke", i)
send_message()
print('Pesan terkirim')
if(choice == "yes"):
try:
docType = 1
send_attachment(docType)
print('Gambar/video terkirim')
except:
print('Gambar/video tidak terkirim')
if(docChoice == "yes"):
try:
docType = 2
send_attachment(docType)
print('Dokumen terkirim')
except:
print('Dokumen tidak terkirim')
time.sleep(1)
def sender(istiboll, istijeda, istiwaktu):
# global link_num
if len(unsaved_Contacts) > 0:
for i in unsaved_Contacts: # for i in range(1, 100):
if istiboll == 'yes':
if (int(i) % istijeda != 0):
########
sendersub(istiboll, istijeda, istiwaktu, i)
########
else:
print('Istirahat dulu bos,', istiwaktu, 'menit')
time.sleep(float(istiwaktu)*60)
else:
sendersub(istiboll, istijeda, istiwaktu, i)
else:
print('Tidak ada nomor tersedia')
if __name__ == "__main__"
about()
wb_name = 'database.xlsx'
db_col, campaign, choice, docChoice, istiboll, istijeda, istiwaktu = read_setting(wb_name)
input_contacts(db_col)
input_message(campaign)
print("Pindai kode QR")
whatsapp_login(args.chrome_driver_path)
sender(istiboll, istijeda, istiwaktu)
print("Tugas selesai")
# browser.quit()
| StarcoderdataPython |
3393421 | #!/home/kjell/envs/vol-env/bin/python
import discord
from discord.ext import commands
import traceback
import sys
import logging
import asyncio
import asyncpg
import auth_token
import aiohttp
# set up logging
logger = logging.getLogger('discord')
logger.setLevel(logging.INFO)
handler = logging.FileHandler(
filename='discord.log', encoding='utf-8', mode='w')
handler.setFormatter(logging.Formatter(
'%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
# setting up bot instance
description = "A bot that posts videos and streams.\n\nFor feedback and suggestions contact AtomToast#9642\n\nYou can find a small setup guide on https://github.com/AtomToast/Voice-of-Light/"
extensions = ["ext.youtube", "ext.twitch", "ext.reddit",
"ext.utils", "ext.webserver", "ext.surrenderat20"]
bot = commands.Bot(command_prefix=commands.when_mentioned_or(
';'), description=description, activity=discord.Game(";help"))
bot.session = None
@bot.event
async def on_ready():
print('Logged in as')
print(bot.user.name)
print(bot.user.id)
print('------')
bot.session = aiohttp.ClientSession(loop=bot.loop)
# add new guilds to database
@bot.event
async def on_guild_join(guild):
async with bot.pool.acquire() as db:
await db.execute("INSERT INTO Guilds (ID, Name) VALUES ($1, $2)", guild.id, guild.name)
print(f">> Joined {guild.name}")
# remove guild data when leaving guilds
@bot.event
async def on_guild_remove(guild):
async with bot.pool.acquire() as db:
await db.execute("DELETE FROM Guilds WHERE ID=$1", guild.id)
await db.execute("DELETE FROM YoutubeSubscriptions WHERE Guild=$1", guild.id)
await db.execute("DELETE FROM TwitchSubscriptions WHERE Guild=$1", guild.id)
await db.execute("DELETE FROM SubredditSubscriptions WHERE Guild=$1", guild.id)
await db.execute("DELETE FROM Keywords WHERE Guild=$1", guild.id)
await db.execute("DELETE FROM SurrenderAt20Subscriptions WHERE Guild=$1", guild.id)
print(f"<< Left {guild.name}")
@bot.event
async def on_command_error(ctx, error):
# This prevents any commands with local handlers being handled here in on_command_error.
if hasattr(ctx.command, 'on_error'):
return
ignored = (commands.CommandNotFound, commands.UserInputError)
# Allows us to check for original exceptions raised and sent to CommandInvokeError.
# If nothing is found. We keep the exception passed to on_command_error.
error = getattr(error, 'original', error)
# Anything in ignored will return and prevent anything happening.
if isinstance(error, ignored):
return
elif isinstance(error, commands.NoPrivateMessage):
try:
return await ctx.author.send(f'{ctx.command} can not be used in Private Messages.')
except Exception:
pass
elif isinstance(error, commands.MissingPermissions):
try:
return await ctx.author.send('You lack permissions for this this command.')
except Exception:
pass
elif isinstance(error, commands.BotMissingPermissions):
try:
return await ctx.author.send("The bot lacks the permissions: " + " ".join(error.missing_perms))
except Exception:
pass
elif isinstance(error, discord.errors.Forbidden):
try:
return await ctx.message.add_reaction("🔇")
except Exception:
pass
print('Ignoring exception in command {}:'.format(
ctx.command), file=sys.stderr)
traceback.print_exception(
type(error), error, error.__traceback__, file=sys.stderr)
# bot shutdown
@commands.is_owner()
@bot.command(hidden=True)
async def kill(ctx):
await ctx.send(":(")
ws = bot.get_cog("Webserver")
await ws.site.stop()
await ws.runner.cleanup()
rd = bot.get_cog("Reddit")
rd.reddit_poller.cancel()
try:
await asyncio.wait_for(bot.pool.close(), 10.0)
except asyncio.TimeoutError:
await bot.pool.expire_connections()
bot.pool.terminate()
await bot.session.close()
await bot.close()
# fetch guilds and add guilds, not yet in database
@commands.is_owner()
@bot.command(hidden=True)
async def fetchguilds(ctx):
async with bot.pool.acquire() as db:
guilds_db = await db.fetch("SELECT ID, Name FROM Guilds")
guilds_bot = bot.guilds
for g_bot in guilds_bot:
for g_db in guilds_db:
if g_db[0] == g_bot.id:
break
else:
await db.execute("INSERT INTO Guilds (ID, Name) VALUES ($1, $2)", g_bot.id, g_bot.name)
print(f">> Joined {g_bot.name}")
for g_db in guilds_db:
guild_obj = bot.get_guild(g_db[0])
if guild_obj is None:
await db.execute("DELETE FROM Guilds WHERE ID=$1", g_db[0])
await db.execute("DELETE FROM YoutubeSubscriptions WHERE Guild=$1", g_db[0])
await db.execute("DELETE FROM TwitchSubscriptions WHERE Guild=$1", g_db[0])
await db.execute("DELETE FROM SubredditSubscriptions WHERE Guild=$1", g_db[0])
await db.execute("DELETE FROM Keywords WHERE Guild=$1", g_db[0])
await db.execute("DELETE FROM SurrenderAt20Subscriptions WHERE Guild=$1", g_db[0])
print(f"<< Left {g_db[1]}")
await ctx.send("Done fetching guilds!")
# send an announcement to all servers the bot is on
@commands.is_owner()
@bot.command(hidden=True)
async def announce(ctx, *, message):
async with bot.pool.acquire() as db:
guilds_db = await db.fetch("SELECT * FROM Guilds")
for g in guilds_db:
if g[2] is not None:
channel = bot.get_channel(g[2])
await channel.send("```" + message + "```")
elif g[3] is not None:
channel = bot.get_channel(g[3])
await channel.send("```" + message + "```")
elif g[4] is not None:
channel = bot.get_channel(g[4])
await channel.send("```" + message + "```")
elif g[5] is not None:
channel = bot.get_channel(g[5])
await channel.send("```" + message + "```")
else:
guild = bot.get_guild(g[0])
for ch in guild.text_channels:
bot_member = guild.get_member(bot.user.id)
permissions = ch.permissions_for(bot_member)
if permissions.send_messages:
await channel.send("```" + message + "```")
break
await ctx.send("Announcement sent!")
# love
@bot.command(hidden=True, aliases=["-;"])
async def luv(ctx):
emote = bot.get_emoji(423224786664161280)
try:
await ctx.message.add_reaction(emote)
except Exception:
pass
if __name__ == "__main__":
bot.pool = bot.loop.run_until_complete(asyncpg.create_pool(
database="voiceoflightdb", loop=bot.loop, command_timeout=60))
for ext in extensions:
bot.load_extension(ext)
bot.run(auth_token.discord)
# https://discordapp.com/api/oauth2/authorize?client_id=460410391290314752&scope=bot&permissions=19456
| StarcoderdataPython |
3270204 | class Bunker:
def __init__(self):
self.survivors = []
self.supplies = []
self.medicine = []
@property
def food(self):
food_supplies = [f for f in self.supplies if f.__name__ == "FoodSupply"]
if not food_supplies:
raise IndexError("There are no food supplies left!")
return food_supplies
@property
def water(self):
water_supplies = [w for w in self.supplies if w.__name__ == "WaterSupply"]
if not water_supplies:
raise IndexError("There are no water supplies left!")
return water_supplies
@property
def painkillers(self):
painkillers_supplies = [p for p in self.medicine if p.__name__ == "Painkillers"]
if not painkillers_supplies:
raise IndexError("There are no painkillers left!")
return painkillers_supplies
@property
def salves(self):
salves_supplies = [s for s in self.medicine if s.__name__ == "Salves"]
if not salves_supplies:
raise IndexError("There are no salves left!")
return salves_supplies
def add_survivor(self, survivor):
if survivor in self.survivors:
raise ValueError(f"Survivor with name {survivor.name} already exists.")
self.survivors.append(survivor)
def add_supply(self, supply):
self.supplies.append(supply)
def add_medicine(self, medicine):
self.medicine.append(medicine)
def heal(self, survivor, medicine_type):
if survivor.needs_healing:
for med in self.medicine[::-1]:
if type(med).__name__ == medicine_type:
med.apply(survivor)
self.medicine.remove(med)
return f"{survivor.name} healed successfully with {medicine_type}"
def sustain(self, survivor, sustenance_type):
if survivor.needs_sustenance:
for sup in self.supplies[::-1]:
if type(sup).__name__ == sustenance_type:
sup.apply(survivor)
self.supplies.remove(sup)
return f"{survivor.name} sustained successfully with {sustenance_type}"
def next_day(self):
for survivor in self.survivors:
survivor.needs -= survivor.age * 2
for survivor in self.survivors:
self.sustain(survivor, "FoodSupply")
self.sustain(survivor, "WaterSupply")
| StarcoderdataPython |
26191 | # -*- coding: utf-8 -*-
i = 1
for x in range(60, -1, -5):
print('I={} J={}'.format(i, x))
i += 3
| StarcoderdataPython |
136697 | # RAiDAuth and RAiDFactory classes
#
# Wrapper classes around the Python Requests library
# to facilitate the creation and updating of RAiDs
#
# Written by <NAME> <<EMAIL>>
#
# Updated 23 Jul 2020
import logging
import requests
from requests.auth import AuthBase
from urllib.parse import quote
import backoff
from .mt_json import dict_to_json
from .config_helper import process_config
logger = logging.getLogger(__name__)
class RAiDAuth(AuthBase):
def __init__(self,
api_key):
self.api_key = api_key
def __call__(self,
request):
request.headers['Authorization'] = f'Bearer {self.api_key}'
return request
class RAiDFactory():
def __init__(self,
global_config_file_path):
# global_config holds environment variables that don't change often
# such as LDAP parameters and project_db stuff
local_keys = [
'raid_api_key',
'raid_url',
'raid_cert',
'proxy_http',
'proxy_https']
self.config = process_config(keys=local_keys,
global_filepath=global_config_file_path)
self.auth = RAiDAuth(self.config['raid_api_key'])
print(self.config)
self.proxies = {}
if 'proxy_http' in self.config.keys():
self.proxies["http"] = self.config['proxy_http']
if 'proxy_https' in self.config.keys():
self.proxies["https"] = self.config['proxy_https']
if self.proxies is {}:
self.proxies = None
self.verify_certificate = bool(self.config['raid_cert'])
@backoff.on_exception(backoff.expo,
(requests.exceptions.Timeout,
requests.exceptions.ConnectionError),
max_tries=8)
def rest_api_call(self,
method, # REST api method
action, # action here refers to experiment, dataset or datafile
data=None,
params=None,
extra_headers=None):
'''Function to handle the REST API calls
Inputs:
=================================
method: The REST API method, POST, GET etc.
action: The object type to call REST API on, e.g. experiment, dataset
data: A JSON string containing data for generating an object via POST/PUT
params: A JSON string of parameters to be passed in the URL
extra_headers: Extra headers (META) to be passed to the API call
api_url_template: Over-ride for the default API URL
Returns:
=================================
A Python Requests library repsonse object
'''
url = self.config['raid_url'] + f'/{action}'
print(url)
headers = {'Accept': 'application/json'}
if extra_headers:
headers = {**headers, **extra_headers}
try:
if self.proxies:
response = requests.request(method,
url,
data=data,
params=params,
headers=headers,
auth=self.auth,
verify=self.verify_certificate,
proxies=self.proxies)
else:
response = requests.request(method,
url,
data=data,
params=params,
headers=headers,
auth=self.auth,
verify=self.verify_certificate)
# 502 Bad Gateway triggers retries, since the proxy web
# server (eg Nginx or Apache) in front of MyTardis could be
# temporarily restarting
if response.status_code == 502:
self.__raise_request_exception(response)
else:
response.raise_for_status()
except requests.exceptions.RequestException as err:
logger.error("Request failed : %s : %s", err, url)
raise err
except Exception as err:
logger.error(
f'Error, {err.msg}, occurred when attempting to call api request {url}')
raise err
return response
def get_all_raids(self):
response = self.rest_api_call('GET',
'RAiD?owner=true')
return response
def get_raid(self,
raid_handle):
url_safe_raid_handle = quote(raid_handle, safe='')
response = self.rest_api_call('GET',
f'RAiD/{url_safe_raid_handle}')
return response
def mint_raid(self,
name,
description,
url,
metadata=None,
startdate=None):
from datetime import datetime
raid_dict = {}
raid_dict['contentPath'] = url
if startdate:
raid_dict['startDate'] = startdate
raid_dict['meta'] = {'name': name,
'description': description}
if metadata:
for key in metadata.keys():
if not 'meta' in raid_dict.keys():
raid_dict['meta'] = {}
raid_dict['meta'][key] = metadata[key]
raid_json = dict_to_json(raid_dict)
response = self.rest_api_call('POST',
'RAiD',
data=raid_json)
return response
def update_raid(self,
name,
description,
url,
raid_handle):
raid_dict = {'contentPath': url,
'name': name,
'description': description}
raid_json = dict_to_json(raid_dict)
url_safe_raid_handle = quote(raid_handle, safe='')
response = self.rest_api_call('PUT',
f'RAiD/{url_safe_raid_handle}',
data=raid_json)
return response
| StarcoderdataPython |
3386113 | <gh_stars>100-1000
import unittest
from vint.linting.config.config_comment_parser import (
parse_config_comment,
ConfigComment,
)
class ConfigCommentAssertion(unittest.TestCase):
def assertConfigCommentEqual(self, a, b):
# type: (ConfigComment, ConfigComment) -> None
self.assertEqual(a.config_dict, b.config_dict)
self.assertEqual(a.is_only_next_line, b.is_only_next_line)
class TestConfigCommentParser(ConfigCommentAssertion, unittest.TestCase):
def test_parse_config_comment_empty(self):
expected_config_comment = ConfigComment(
config_dict={'policies': {}},
is_only_next_line=False
)
config_comment = parse_config_comment(' vint:')
self.assertConfigCommentEqual(config_comment, expected_config_comment)
def test_parse_config_comment(self):
expected_config_comment = ConfigComment(
config_dict={
'policies': {
'Policy1': {
'enabled': False,
},
'Policy2': {
'enabled': True,
},
},
},
is_only_next_line=False
)
config_comment = parse_config_comment(' vint: -Policy1 +Policy2')
self.assertConfigCommentEqual(config_comment, expected_config_comment)
def test_parse_config_comment_next_line(self):
expected_config_comment = ConfigComment(
config_dict={
'policies': {
'Policy1': {
'enabled': False,
},
'Policy2': {
'enabled': True,
},
},
},
is_only_next_line=True
)
config_dict = parse_config_comment(' vint: next-line -Policy1 +Policy2')
self.assertConfigCommentEqual(config_dict, expected_config_comment)
def test_parse_config_comment_next_line_with_no_white_spaces(self):
expected_config_comment = ConfigComment(
config_dict={'policies': {}},
is_only_next_line=True
)
config_dict = parse_config_comment('vint:next-line')
self.assertConfigCommentEqual(config_dict, expected_config_comment)
def test_parse_not_config_comment(self):
config_comment = parse_config_comment(' not config comment')
self.assertIsNone(config_comment)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
14047 | def formstash_to_querystring(formStash):
err = []
for (k, v) in formStash.errors.items():
err.append(("%s--%s" % (k, v)).replace("\n", "+").replace(" ", "+"))
err = sorted(err)
err = "---".join(err)
return err
class _UrlSafeException(Exception):
@property
def as_querystring(self):
return str(self).replace("\n", "+").replace(" ", "+")
class GarfieldMinusGarfield(Exception):
"""
An exception for those odd moments
"""
pass
class InvalidTransition(Exception):
"""raised when a transition is invalid"""
pass
class ObjectExists(Exception):
"""raised when an object already exists, no need to create"""
pass
class ConflictingObject(Exception):
"""
raised when an object already exists
args[0] = tuple(conflicting_object, error_message_string)
"""
pass
class OpenSslError(Exception):
pass
class OpenSslError_CsrGeneration(OpenSslError):
pass
class OpenSslError_InvalidKey(OpenSslError):
pass
class OpenSslError_InvalidCSR(OpenSslError):
pass
class OpenSslError_InvalidCertificate(OpenSslError):
pass
class OpenSslError_VersionTooLow(OpenSslError):
pass
class QueueProcessingError(Exception):
pass
class AcmeError(_UrlSafeException):
pass
class AcmeDuplicateAccount(AcmeError):
"""
args[0] MUST be the duplicate AcmeAccount
"""
pass
class AcmeDuplicateChallenges(AcmeError):
pass
class AcmeDuplicateChallengesExisting(AcmeDuplicateChallenges):
"""the first arg should be a list of the active challenges"""
def __str__(self):
return (
"""One or more domains already have active challenges: %s."""
% ", ".join(
[
"`%s` (%s)" % (ac.domain.domain_name, ac.acme_challenge_type)
for ac in self.args[0]
]
)
)
class AcmeDuplicateChallenge(AcmeDuplicateChallenges):
"""the first arg should be a single active challenge"""
def __str__(self):
return (
"""This domain already has active challenges: `%s`."""
% self.args[0].domain.domain_name
)
class AcmeDuplicateOrderlessDomain(AcmeDuplicateChallenges):
pass
class AcmeServerError(AcmeError):
pass
class AcmeServer404(AcmeServerError):
pass
class AcmeCommunicationError(AcmeError):
pass
class AcmeAuthorizationFailure(AcmeError):
"""raised when an Authorization fails"""
pass
class AcmeOrphanedObject(AcmeError):
pass
class AcmeOrderError(AcmeError):
pass
class AcmeOrderFatal(AcmeOrderError):
"""
The AcmeOrder has a fatal error.
Authorizations should be killed.
"""
pass
class AcmeOrderCreatedError(AcmeOrderError):
"""
If an exception occurs AFTER an AcmeOrder is created, raise this.
It should have two attributes:
args[0] - AcmeOrder
args[1] - original exception
"""
def __str__(self):
return "An AcmeOrder-{0} was created but errored".format(self.args[0])
@property
def acme_order(self):
return self.args[0]
@property
def original_exception(self):
return self.args[1]
class AcmeOrderProcessing(AcmeOrderCreatedError):
"""
raise when the AcmeOrder is `processing` (RFC status)
this should generally indicate the user should retry their action
"""
def __str__(self):
return "An AcmeOrder-{0} was created. The order is still processing.".format(
self.args[0]
)
class AcmeOrderValid(AcmeOrderCreatedError):
"""
raise when the AcmeOrder is `valid` (RFC status)
this should generally indicate the user should retry their action
"""
def __str__(self):
return "An AcmeOrder-{0} was created. The order is valid and the CertificateSigned can be downloaded.".format(
self.args[0]
)
class AcmeMissingChallenges(AcmeError):
"""There are no Acme Challenges"""
pass
class AcmeChallengeFailure(AcmeError):
pass
class AcmeDomainsInvalid(AcmeError):
def __str__(self):
return "The following Domains are invalid: {0}".format(", ".join(self.args[0]))
class AcmeDomainsBlocklisted(AcmeDomainsInvalid):
def __str__(self):
return "The following Domains are blocklisted: {0}".format(
", ".join(self.args[0])
)
class AcmeDomainsRequireConfigurationAcmeDNS(AcmeDomainsInvalid):
def __str__(self):
return "The following Domains are not configured with ACME-DNS: {0}".format(
", ".join(self.args[0])
)
class DomainVerificationError(AcmeError):
pass
class DisplayableError(_UrlSafeException):
pass
class InvalidRequest(_UrlSafeException):
"""
raised when an end-user wants to do something invalid/not-allowed
"""
pass
# class TransitionError(_UrlSafeException):
# pass
# class OperationsContextError(_UrlSafeException):
# pass
| StarcoderdataPython |
3213588 | <gh_stars>0
from __future__ import annotations
from dataclasses import dataclass
from typing import Union, List
from item_engine import Item, Group, Match
import python_generator as pg
__all__ = ["TokenI", "TokenG"]
class TokenG(Group):
@property
def items_str(self) -> str:
return '\n'.join(map(repr, sorted([item.name for item in self.items])))
def condition(self, item: pg.VAR) -> pg.CONDITION:
items = tuple(sorted(map(str, self.items)))
grp = items[0] if len(self.items) == 1 else pg.TUPLE(items)
return self.code_factory(item.GETATTR("value"), grp)
def match(self, action: str) -> Match:
return Match(self, action)
@classmethod
def grp(cls, names: Union[str, List[str]]) -> TokenG:
if isinstance(names, str):
names = [names]
return cls(frozenset(map(TokenI, names)))
@dataclass(frozen=True, order=True)
class TokenI(Item):
name: str
def __str__(self):
return repr(self.name)
@property
def as_group(self) -> TokenG:
return TokenG(frozenset({self}))
| StarcoderdataPython |
1794883 | <filename>od_client/enums.py
from enum import Enum
class OdEndpoint(Enum):
entries = "entries"
inflections = "inflections"
translations = "translations"
class LexiStatsSort(Enum):
word_form_asc = "wordform"
true_case_asc = "trueCase"
lemma_asc = "lemma"
lexical_category_asc = "lexicalCategory"
frequency_asc = "frequency"
normalized_frequency_asc = "normalizedFrequency"
word_form_desc = "-wordform"
true_case_desc = "-trueCase"
lemma_desc = "-lemma"
lexical_category_desc = "-lexicalCategory"
frequency_desc = "-frequency"
normalized_frequency_desc = "-normalizedFrequency"
class LexiStatsCollate(Enum):
word_form = "wordform"
true_case = "trueCase"
lemma = "lemma"
lexical_category = "lexicalCategory"
class LexiStatsTokenFormat(Enum):
google = "google"
oup = "oup"
| StarcoderdataPython |
3275160 | <reponame>gautierdag/cultural-evolution-engine<filename>data/__init__.py
from .AgentVocab import AgentVocab
from .feature_extractor import get_features
from .shapes import get_shapes_dataloader, get_shapes_metadata, get_shapes_features
from .obverter import (
get_obverter_dataloader,
get_obverter_metadata,
get_obverter_features,
)
| StarcoderdataPython |
33774 | import os
import numpy as np
import urllib
from absl import flags
import tensorflow as tf
import tensorflow_probability as tfp
tfb = tfp.bijectors
tfd = tfp.distributions
flags.DEFINE_float(
"learning_rate", default=0.001, help="Initial learning rate.")
flags.DEFINE_integer(
"epochs", default=100, help="Number of training steps to run.")
flags.DEFINE_string(
"activation",
default="selu",
help="Activation function for all hidden layers.")
flags.DEFINE_integer(
"batch_size",
default=32,
help="Batch size.")
flags.DEFINE_string(
"data_dir",
default="/tmp/mnist",
help="Directory where data is stored (if using real data).")
flags.DEFINE_string(
"model_dir",
default="/tmp/critic/",
help="Directory to put the model's fit.")
flags.DEFINE_integer(
"viz_steps", default=500, help="Frequency at which to save visualizations.")
flags.DEFINE_bool(
"delete_existing",
default=False,
help="If true, deletes existing `model_dir` directory.")
FLAGS = flags.FLAGS
def non_square_det(x, reltol=1e-6):
"""
Idea taken from https://www.quora.com/How-do-we-calculate-the-determinant-of-a-non-square-matrix
# for n != m
A = tf.random_normal([n, m])
det(A) := sqrt(det(A.A^T))
Args:
x (tf.tensor): shape in [..., a, b]
Returns:
[..., ]
"""
# squared_mat = tf.matmul(x, x, transpose_b=True)
# return tf.sqrt(tf.linalg.det(squared_mat))
s = tf.svd(x, compute_uv=False)
# atol = tf.reduce_max(s) * reltol
# s = tf.diag(tf.where(tf.greater(atol, tf.abs(s)), tf.ones_like(s), s))
return tf.reduce_prod(s)
def pinv(A, reltol=1e-6):
"""
Args:
A (tf.tensor): the matrix to be inverted shape=[n, m]
Returns:
inverse (tf.tensor): the invserse of A, s.t. A_T.A = I. shape=[m,n]
"""
s, u, v = tf.svd(A)
atol = tf.reduce_max(s) * reltol
s_inv = tf.diag(tf.where(tf.greater(tf.abs(s), atol), 1.0/s, tf.zeros_like(s)))
# s_inv = tf.diag(1./s)
return tf.matmul(v, tf.matmul(s_inv, u, transpose_b=True))
class Dense(tfb.Bijector):
"""
Want a hierarchical flow.
Map some low dim distribution to a manifold in a higher dimensional space.
For more info on bijectors see tfb.Bijector, I simply cloned the general
structure.
"""
def __init__(self, n_inputs, n_outputs, validate_args=False, name=''):
"""
Args:
n_inputs (int): the number of features (last dim)
n_outputs (int): the target num of feautres
"""
super(self.__class__, self).__init__(
validate_args=validate_args,
is_constant_jacobian=True,
forward_min_event_ndims=1,
name=name)
self.n_inputs = n_inputs
self.n_outputs = n_outputs
with tf.variable_scope('dense'+name):
self.weights = tf.get_variable(name='weights',
shape=[n_inputs, n_outputs],
dtype=tf.float32,
# initializer=tf.initializers.orthogonal()
)
self.bias = tf.get_variable(name='bias',
shape=[n_outputs],
dtype=tf.float32,
initializer=tf.initializers.zeros()
)
@property
def _is_injective(self):
return True
def _forward_event_shape_tensor(self, shape):
return tf.shape([shape[0], self.n_inputs])
def _invserse_event_shape_tensor(self, shape):
return tf.shape([shape[0], self.n_outputs])
def _forward(self, x):
return tf.matmul(x, self.weights) + self.bias
def _inverse(self, y):
weights_inv = pinv(self.weights)
return tf.matmul(y - self.bias, weights_inv)
def _forward_log_det_jacobian(self, x):
return tf.log(non_square_det(self.weights))
def _inverse_log_det_jacobian(self, y):
return tf.log(non_square_det(pinv(self.weights)))
def make_mixture(latent_size, mixture_components):
"""Creates a mixture of Gaussians distribution.
Args:
latent_size: The dimensionality of the latent representation.
mixture_components: Number of elements of the mixture.
Returns:
random_prior: A `tf.distributions.Distribution` instance
representing the distribution over encodings in the absence of any
evidence.
"""
if mixture_components == 1:
# See the module docstring for why we don't learn the parameters here.
return tfd.MultivariateNormalDiag(
loc=tf.zeros([latent_size]),
scale_identity_multiplier=1.0)
loc = tf.get_variable(name="loc", shape=[mixture_components, latent_size])
raw_scale_diag = tf.get_variable(
name="raw_scale_diag", shape=[mixture_components, latent_size])
mixture_logits = tf.get_variable(
name="mixture_logits", shape=[mixture_components])
return tfd.MixtureSameFamily(
components_distribution=tfd.MultivariateNormalDiag(
loc=loc,
scale_diag=tf.nn.softplus(raw_scale_diag)),
mixture_distribution=tfd.Categorical(logits=mixture_logits),
name="prior")
def model_fn(features, labels, mode, params, config):
"""
Builds the model function for use in an estimator.
Arguments:
features: The input features for the estimator.
labels: The labels, unused here.
mode: Signifies whether it is train or test or predict.
params: Some hyperparameters as a dictionary.
config: The RunConfig, unused here.
Returns:
EstimatorSpec: A tf.estimator.EstimatorSpec instance.
"""
x = features['x']
global_step = tf.train.get_or_create_global_step()
with tf.contrib.summary.record_summaries_every_n_global_steps(100, global_step=global_step):
# construct a multilayer parameterised bijector
n_hidden = 8
width = 32
n_outputs = 784
fn = tfb.Chain([
Dense(width, n_outputs, name='3'),
# tfb.Softplus(),
# Dense(width, width, name='2'),
# tfb.Softplus(),
# Dense(width, width, name='1'),
Dense(n_hidden, width, name='0')
])
# use the bijector to map a simple distribution into our a density model
dist = make_mixture(n_hidden, 10)
# logits = tf.get_variable(
# name="logits", shape=[n_outputs])
# dist = tfd.RelaxedOneHotCategorical(logits=logits, temperature=1.0)
# density = tfd.RelaxedBernoulli(logits=logits, temperature=100.0)
density = tfd.TransformedDistribution(distribution=dist, bijector=fn)
# maximise the likelihood of the data
p = density.prob(x)
loss = tf.reduce_mean(1-p) # - 0.1*density.entropy()
# reg = -density.entropy()
# tf.summary.scalar('entropy', reg)
# generate some samples to visualise
# HACK to get samples to work I had to comment out line 411 of transformed_distribution.py
samples = density.sample(3)
tf.summary.image('samples', tf.reshape(samples, [3, 28, 28, 1]))
# mu = density.mean()
# tf.summary.image('mean', tf.reshape(mu, [1, 28, 28, 1]))
opt = tf.train.AdamOptimizer(0.0001)
gnvs = opt.compute_gradients(loss)
gnvs = [(tf.clip_by_norm(g, 10.0) if g is not None else tf.zeros_like(v), v) for g, v in gnvs]
train_step = opt.apply_gradients(gnvs, global_step=global_step)
return tf.estimator.EstimatorSpec(
mode=mode,
loss=loss,
train_op=train_step,
eval_metric_ops={"eval_loss": tf.metrics.mean(loss)}
)
def main(_):
params = FLAGS.flag_values_dict()
params["activation"] = getattr(tf.nn, params["activation"])
if FLAGS.delete_existing and tf.gfile.Exists(FLAGS.model_dir):
tf.logging.warn("Deleting old log directory at {}".format(FLAGS.model_dir))
tf.gfile.DeleteRecursively(FLAGS.model_dir)
tf.gfile.MakeDirs(FLAGS.model_dir)
mnist = tf.contrib.learn.datasets.load_dataset("mnist")
train_data = mnist.train.images # Returns np.array
train_labels = np.asarray(mnist.train.labels, dtype=np.int32)
eval_data = mnist.test.images # Returns np.array
eval_labels = np.asarray(mnist.test.labels, dtype=np.int32)
train_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": train_data},
y=train_labels,
batch_size=FLAGS.batch_size,
num_epochs=1,
shuffle=True)
eval_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": eval_data},
y=eval_labels,
batch_size=FLAGS.batch_size,
num_epochs=1,
shuffle=False)
estimator = tf.estimator.Estimator(
model_fn,
params=params,
config=tf.estimator.RunConfig(
model_dir=FLAGS.model_dir,
save_checkpoints_steps=FLAGS.viz_steps,
),
)
for _ in range(FLAGS.epochs):
estimator.train(train_input_fn, steps=FLAGS.viz_steps)
eval_results = estimator.evaluate(eval_input_fn)
print("Evaluation_results:\n\t%s\n" % eval_results)
if __name__ == "__main__":
tf.app.run()
| StarcoderdataPython |
1776466 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from aria.utils import validation
def test_function_kwargs_validation():
def mock_function(arg1, arg2=1, arg3=1):
pass
with pytest.raises(ValueError):
validation.validate_function_arguments(mock_function, dict(arg2=1))
with pytest.raises(ValueError):
validation.validate_function_arguments(mock_function, dict(arg3=3))
with pytest.raises(ValueError):
validation.validate_function_arguments(mock_function, dict(arg2=2, arg3=3))
validation.validate_function_arguments(mock_function, dict(arg1=1, arg3=3))
validation.validate_function_arguments(mock_function, dict(arg1=1, arg2=2))
validation.validate_function_arguments(mock_function, dict(arg1=1, arg2=2, arg3=3))
| StarcoderdataPython |
1738854 | <gh_stars>0
#!/usr/bin/env python3
import re
import os
import sys
import time
import subprocess
import RPi.GPIO as GPIO
import multiprocessing as mp
from collections import defaultdict
from configparser import ConfigParser
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(17, GPIO.OUT)
GPIO.output(17, GPIO.HIGH)
cmds = {
'blk': "lsblk | awk '{print $1}'",
'up': "uptime | sed 's/^.* up \+\(.\+\), \+[0-9] user.*$/\\1/' | awk '{printf \"Uptime: %s\", $0}'",
'temp': "cat /sys/class/thermal/thermal_zone0/temp | awk '{printf \"CPU Temp: %.1f°C\", $1/1000}'",
'ip': "hostname -I | awk '{printf \"IP %s\", $1}'",
'cpu': "uptime | awk '{printf \"CPU Load: %.2f\", $(NF-2)}'",
'men': "free -m | awk 'NR==2{printf \"Mem: %s/%sMB\", $3,$2}'",
'disk': "df -h | awk '$NF==\"/\"{printf \"Disk: %d/%dGB %s\", $3,$2,$5}'"
}
lv2dc = {'lv3': 100, 'lv2': 75, 'lv1': 50, 'lv0': 25}
# pin37(bcm26) sata0, pin22(bcm25) sata1
def set_mode(pin, mode):
try:
GPIO.setup(pin, GPIO.OUT)
GPIO.output(pin, mode)
except Exception as ex:
print(ex)
def disk_turn_on():
blk1 = get_blk()
set_mode(26, GPIO.HIGH)
time.sleep(0.5)
set_mode(25, GPIO.HIGH)
wait_blk(10)
blk2 = get_blk()
conf['disk'] = sorted(list(set(blk2) - set(blk1)))
def disk_turn_off():
set_mode(26, GPIO.LOW)
time.sleep(0.5)
set_mode(25, GPIO.LOW)
def check_output(cmd):
return subprocess.check_output(cmd, shell=True).decode().strip()
def check_call(cmd):
return subprocess.check_call(cmd, shell=True)
def wait_blk(t1=10):
t = 0
while t <= t1:
try:
check_call('lsblk /dev/sda > /dev/null 2>&1')
check_call('lsblk /dev/sdb > /dev/null 2>&1')
check_call('lsblk /dev/sdc > /dev/null 2>&1')
check_call('lsblk /dev/sdd > /dev/null 2>&1')
except Exception:
time.sleep(0.1)
t += 0.1
continue
else:
time.sleep(0.5)
break
def get_blk():
return check_output(cmds['blk']).strip().split('\n')
def get_info(s):
return check_output(cmds[s])
def read_conf():
conf = defaultdict(dict)
try:
cfg = ConfigParser()
cfg.read('/etc/rockpi-sata.conf')
# fan
conf['fan']['lv0'] = cfg.getfloat('fan', 'lv0')
conf['fan']['lv1'] = cfg.getfloat('fan', 'lv1')
conf['fan']['lv2'] = cfg.getfloat('fan', 'lv2')
conf['fan']['lv3'] = cfg.getfloat('fan', 'lv3')
# key
conf['key']['click'] = cfg.get('key', 'click')
conf['key']['twice'] = cfg.get('key', 'twice')
conf['key']['press'] = cfg.get('key', 'press')
# time
conf['time']['twice'] = cfg.getfloat('time', 'twice')
conf['time']['press'] = cfg.getfloat('time', 'press')
# other
conf['slider']['auto'] = cfg.getboolean('slider', 'auto')
conf['slider']['time'] = cfg.getfloat('slider', 'time')
conf['oled']['rotate'] = cfg.getboolean('oled', 'rotate')
except Exception:
# fan
conf['fan']['lv0'] = 35
conf['fan']['lv1'] = 40
conf['fan']['lv2'] = 45
conf['fan']['lv3'] = 50
# key
conf['key']['click'] = 'slider'
conf['key']['twice'] = 'switch'
conf['key']['press'] = 'none'
# time
conf['time']['twice'] = 0.7 # second
conf['time']['press'] = 1.8
# other
conf['slider']['auto'] = True
conf['slider']['time'] = 10 # second
conf['oled']['rotate'] = False
return conf
def read_key(pattern, size):
s = ''
while True:
s = s[-size:] + str(GPIO.input(17))
for t, p in pattern.items():
if p.match(s):
return t
time.sleep(0.1)
def watch_key(q=None):
size = int(conf['time']['press'] * 10)
wait = int(conf['time']['twice'] * 10)
pattern = {
'click': re.compile(r'1+0+1{%d,}' % wait),
'twice': re.compile(r'1+0+1+0+1{3,}'),
'press': re.compile(r'1+0{%d,}' % size),
}
while True:
q.put(read_key(pattern, size))
def get_disk_info(cache={}):
if not cache.get('time') or time.time() - cache['time'] > 30:
info = {}
cmd = "df -h | awk '$NF==\"/\"{printf \"%s\", $5}'"
info['root'] = check_output(cmd)
for x in conf['disk']:
cmd = "df -Bg | awk '$1==\"/dev/{}\" {{printf \"%s\", $5}}'".format(x)
info[x] = check_output(cmd)
cache['info'] = list(zip(*info.items()))
cache['time'] = time.time()
return cache['info']
def slider_next(pages):
conf['idx'].value += 1
return pages[conf['idx'].value % len(pages)]
def slider_sleep():
time.sleep(conf['slider']['time'])
def fan_temp2dc(t):
for lv, dc in lv2dc.items():
if t >= conf['fan'][lv]:
return dc
return 0
def fan_switch():
conf['run'].value = not(conf['run'].value)
def get_func(key):
return conf['key'].get(key, 'none')
def open_w1_i2c():
with open('/boot/config.txt', 'r') as f:
content = f.read()
if 'dtoverlay=w1-gpio' not in content:
with open('/boot/config.txt', 'w') as f:
f.write(content.strip() + '\ndtoverlay=w1-gpio')
if 'dtparam=i2c1=on' not in content:
with open('/boot/config.txt', 'w') as f:
f.write(content.strip() + '\ndtparam=i2c1=on')
os.system('/sbin/modprobe w1-gpio')
os.system('/sbin/modprobe w1-therm')
os.system('/sbin/modprobe i2c-dev')
def wait():
while True:
time.sleep(60)
conf = {'disk': [], 'idx': mp.Value('d', -1), 'run': mp.Value('d', 1)}
conf.update(read_conf())
if __name__ == '__main__':
if sys.argv[-1] == 'open_w1_i2c':
open_w1_i2c()
| StarcoderdataPython |
3257399 | #!/usr/bin/python
import requests
import json
class DataPuller(object):
def __init__():
lunoBidPrice = 0
lunoAskPrice = 0
lunoBidUSD = 0
lunoAskUSD = 0
zarusd = 0
coinbaseBidPrice = 0
coinbaseAskPrice = 0
lunoFees = 0
coinbaseFees = 0
sources = {
'luno': 'https://api.mybitx.com/api/1/ticker?pair=XBTZAR',
'alphavantage': 'https://www.alphavantage.co/query?function=CURRENCY_EXCHANGE_RATE&from_currency=USD&to_currency=ZAR&apikey=<KEY>',
'coinbase_buy':'https://api.coinbase.com/v2/prices/BTC-USD/buy',
'coinbase_sell': 'https://api.coinbase.com/v2/prices/BTC-USD/sell'
}
def retrieve_data(self, source):
if source in self.sources.keys:
data = json.loads(requests.get(self.sources[source]))
return data
| StarcoderdataPython |
3280834 | <reponame>CharlesJ-ABu/MindMap<gh_stars>0
from header import *
class Link:
width = 3.0
length=0.0
z_length=0.0
def __init__(self, parentSheet, tA, tB, importance):
self.root=parentSheet.root
self.canvas=parentSheet.canvas
self.parentSheet = parentSheet
self.cs = self.parentSheet.cs
self.tA = tA
self.tB = tB
self.importance = importance
self._layers = []
#self.colour = tuple([v*0.5 + 1.0*0.5 for v in self.parentSheet.colour])
self.colour = self.cs.link # g.shadeN([self.tA.colour, self.cs.background],[0,1],0.5) #
if importance != 1:
self.colour = g.shadeN([self.colour, self.cs.background], [0,1], 0.75)
self.width = 5
#print "importance =", importance
self.setZooms()
self.initDrawing()
def setZooms(self):
self.z_width = self.width * self.parentSheet.curZoom
self.z_length = self.length * self.parentSheet.curZoom
def initDrawing(self):
#self.canvasIndex = self.canvas.create_line(self.tA.pixLoc[0], self.tA.pixLoc[1],
# self.tB.pixLoc[0], self.tB.pixLoc[1],
# fill="white", width=5)
x0,y0,x1,y1 = self.getCoords()
#xb0,yb0,xb1,yb1 = self.getHeadCoords()
self.canvasIndex = self.add_to_layer(self.importance, self.canvas.create_line, (x0,y0,x1,y1),
fill=g.toHex(self.colour), activefill = g.toHex(self.cs.highlight2), width=int(self.z_width),
activewidth = 5)
self.canvas.tag_bind(self.canvasIndex, '<Button-3>', self.remove)
#self.canvas.tag_bind(self.canvasIndex2, '<ButtonRelease-3>', self.endDrag)
self.grow()
def add_to_layer(self, layer, command, coords, **kwargs):
layer_tag = "layer %s" % layer
if layer_tag not in self._layers:
self._layers.append(layer_tag)
tags = kwargs.setdefault("tags", [])
tags.append(layer_tag)
item_id = command(coords, **kwargs)
self._adjust_layers()
return item_id
def _adjust_layers(self, readjust=False):
if readjust:
# just lower the soft ones
self.canvas.lower("layer 0")
else:
# lower all
for layer in sorted(self._layers):
self.canvas.lower(layer)
'''
if layer == "layer 1":
self.canvas.lower(layer)
print "layer 1"
elif layer == "layer 2":
self.canvas.lower(layer)
self.canvas.lower(layer)
print "layer 2"
'''
def updateLine(self):
x0,y0,x1,y1 = self.getCoords()
#xb0,yb0,xb1,yb1 = self.getHeadCoords()
self.canvas.coords(self.canvasIndex, x0, y0, x1, y1)
#self.canvas.coords(self.canvasIndex2, xb0, yb0, xb1, yb1)
self.canvas.itemconfig(self.canvasIndex, width=int(self.z_width))
#self.canvas.itemconfig(self.canvasIndex2, width=int(self.z_width*2))
def remove(self, event=[]):
self.parentSheet.pausePanning = True
self.canvas.delete(self.canvasIndex)
#self.canvas.delete(self.canvasIndex2)
self.parentSheet.removeLink(self.tA, self.tB)
def endDrag(self, event):
self.parentSheet.pausePanning = False
def grow(self, stage=0):
if self.parentSheet.fastGraphics: return
total_stages=10
f = 1.0*stage/total_stages
if stage<=total_stages:
x0,y0,x1,y1 = self.getCoords()
x1p = (1.0-f)*x0 + f*x1
y1p = (1.0-f)*y0 + f*y1
self.canvas.coords(self.canvasIndex, x0, y0, x1p, y1p)
w = self.z_width*f
self.canvas.itemconfig(self.canvasIndex, width=int(w))
self.root.update()
if stage < total_stages:
t = threading.Timer(0.02, self.grow, [stage+1])
t.daemon = True
t.start()
#time.sleep(0.005)
#self.grow(max_width, stage+1)
def getCoords(self, dFrac=0.5):
x0,y0,x1,y1 = self.tA.pixLoc[0], self.tA.pixLoc[1], self.tB.pixLoc[0], self.tB.pixLoc[1]
# only want line to go between white rings around thoughts
length = dist((x0, y0), (x1, y1))
#frac of way white ring on A is to center to A
f0 = (1.0*length - (self.tA.z_r + self.tA.z_ringSpacing[0]))/length
x0p = (1.0-f0)*x1 + f0*x0
y0p = (1.0-f0)*y1 + f0*y0
#frac of way white ring on B is to center to B
f1 = (1.0*length - (self.tB.z_r + self.tB.z_ringSpacing[0]))/length
x1p = (1.0-f1)*x0 + f1*x1
y1p = (1.0-f1)*y0 + f1*y1
self.z_length = dist((x0p, y0p), (x1p, y1p))
self.length = self.z_length/self.parentSheet.curZoom
return (x0p, y0p, x1p, y1p)
def zoom(self, direction):
self.setZooms()
self.updateLine()
def isImportant(self):
return self.importance==1 | StarcoderdataPython |
1667397 | from typing import NamedTuple
class TVShow(NamedTuple):
name: str
class Episode(NamedTuple):
tvshow: TVShow
season: int
number: int
| StarcoderdataPython |
30751 | import os
import sys
import shutil
import asyncio
import aioboto3
from glob import glob
from PIL import Image
from fnmatch import fnmatch
from src.secrets import (
SPACES_REGION,
SPACES_BUCKET,
SPACES_PREFIX,
SPACES_ENDPOINT_URL,
SPACES_ACCESS_KEY,
SPACES_SECRET_KEY
)
from src.format import (
get_filename,
get_image_id
)
from src.logger import logger
LOCAL_IMAGES_PATH = sys.path[0]
async def download_file(key, bucket):
if not key.endswith('/'):
await bucket.download_file(key, key)
elif not os.path.exists(key):
os.makedirs(key)
async def download_files(bucket, prefix):
async with aioboto3.resource('s3',
region_name=SPACES_REGION,
endpoint_url=SPACES_ENDPOINT_URL,
aws_access_key_id=SPACES_ACCESS_KEY,
aws_secret_access_key=SPACES_SECRET_KEY) as resource:
bucket = await resource.Bucket(bucket)
tasks = [asyncio.ensure_future(download_file(s3_obj.key, bucket)) async for s3_obj in
bucket.objects.filter(Prefix=prefix)]
await asyncio.gather(*tasks)
async def download_images():
try:
await download_files(SPACES_BUCKET, SPACES_PREFIX)
logger.info(f'Images from S3 have been downloaded successfully')
except Exception as error:
logger.error(f'Error to download images from S3: {error}')
raise
async def upload_file(subdir, file, image, bucket):
if fnmatch(file, f'{image.height}*.jpg'):
full_path = os.path.join(subdir, file)
with open(full_path, 'rb') as data:
await bucket.put_object(ACL='public-read', Key=full_path[len(LOCAL_IMAGES_PATH) + 1:], Body=data,
ContentType='image/jpg')
async def upload_files(bucket, prefix, image):
tasks = []
async with aioboto3.resource('s3',
region_name=SPACES_REGION,
endpoint_url=SPACES_ENDPOINT_URL,
aws_access_key_id=SPACES_ACCESS_KEY,
aws_secret_access_key=SPACES_SECRET_KEY) as resource:
bucket = await resource.Bucket(bucket)
for subdir, dirs, files in os.walk(LOCAL_IMAGES_PATH + f'/{prefix}'):
for file in files:
tasks.append(asyncio.ensure_future(upload_file(subdir, file, image, bucket)))
await asyncio.gather(*tasks)
async def upload_images(image):
try:
await upload_files(SPACES_BUCKET, SPACES_PREFIX, image)
logger.info('Images have been uploaded successfully into S3')
except Exception as error:
logger.error(f'Error to upload new images sizes to S3: {error}')
raise
async def get_local_images():
images = []
for filename in glob(LOCAL_IMAGES_PATH + f'/{SPACES_PREFIX}/*/720*.jpg'):
img = Image.open(filename)
image = {
"content": img,
"image_id": get_image_id(filename),
"filename": get_filename(filename)
}
images.append(image)
return images
async def save_local_images(resized_images):
try:
for (i, new_image) in enumerate(resized_images):
new_image['content'].save('{}/{}{}/{}{}'.format(LOCAL_IMAGES_PATH, SPACES_PREFIX, new_image['image_id'],
new_image['content'].height, new_image['filename']))
except Exception as error:
logger.error(f'Error to save images in local directories: {error}')
raise
async def remove_local_images():
path = os.path.join(LOCAL_IMAGES_PATH, 'test')
try:
if os.path.exists(path):
shutil.rmtree(path)
logger.info('Local images directory has been removed successfully')
except shutil.Error as error:
logger.error(f'Error to remove local images directory: {error}')
raise | StarcoderdataPython |
3387815 | <reponame>steinarvk/numera-te-ipsum
class OperationFailed(Exception):
pass
class ValidationFailed(Exception):
pass
| StarcoderdataPython |
1689873 | <filename>projects/Password_generator/password_generator.py<gh_stars>1000+
from tkinter import*
from random import choice
import string
class App:
def __init__(self):
self.window = Tk()
self.window.title('password_generator')
self.window.iconbitmap('logo.ico')
self.window.iconphoto(False, PhotoImage(file='logo.png'))
self.window.geometry('500x255')
self.window.config(bg='gray')
#component creation
self.label()
self.entry()
self.button()
def label(self):
label_title = Label(self.window, text='Welcome to password generator', font=('Courrier', 20), bg='gray', fg='black')
label_title.pack()
def entry(self):
self.password_entry = Entry(self.window, font=('Courrier', 25), bg='white', fg='black', width=30, relief='solid')
self.password_entry.pack(pady=50)
def button(self):
password_generator = Button(self.window, text="Generate_password", font=('Courrier', 12), bg='white', fg='black', width=25, command=self.generate_password)
password_generator.pack()
def generate_password(self):
characters = string.ascii_letters + string.punctuation + string.digits
password = ""
for x in range(28):
password+=choice(characters)
self.password_entry.delete(0, END)
self.password_entry.insert(0, password)
#display
app = App()
app.window.mainloop()
| StarcoderdataPython |
1675431 | from django.conf.urls.defaults import patterns, include, url
from website.views import HomeView
urlpatterns = patterns('',
url(r'^home',HomeView.as_view(),name="home"),
)
| StarcoderdataPython |
1753764 | <filename>group/views.py<gh_stars>1-10
from django.shortcuts import render, redirect
from django.http import HttpResponse, Http404, HttpResponseBadRequest
from django.views.defaults import bad_request
from django.urls import reverse
from urllib.parse import urlencode
import datetime
import requests
from utils import SNI_URL, SNI_DYNAMIC_TOKEN, SNI_TEMP_USER_TOKEN
from SNI.error import render_error
from SNI.check import check_tokens
from SNI.lib import global_headers, get_clearance_level
GLOBAL_URL = SNI_URL + "group"
@check_tokens()
def home(request):
"""
Will display all the groups registered on the SNI
"""
request_groups = requests.get(GLOBAL_URL, headers=global_headers(request))
if request_groups.status_code != 200:
return render_error(request_group)
group_list = request_groups.json()
print(group_list)
return render(request, 'group/home.html', {
"group_list": group_list,
"new_group": request.GET.get("new_group"),
"deleted_group": request.GET.get("del_group"),
"new_member": request.GET.get("new_member"),
"removed_member": request.GET.get("rem_member"),
"clearance_level": get_clearance_level(request)
})
@check_tokens()
def sheet(request, group_id):
"""
Will display the main page for accessing group informations
"""
url = f"{GLOBAL_URL}/{group_id}"
request_group = requests.get(url, headers=global_headers(request))
if request_group.status_code != 200:
return render_error(request_group)
request_group_json = request_group.json()
if "root" in request_group_json["members"]:
request_group_json["members"].remove("root")
return render(request, 'group/sheet.html', {
"group": request_group_json,
"new_member": request.GET.get("new_member"),
"removed_member": request.GET.get("rem_member"),
"clearance_level": get_clearance_level(request)
})
@check_tokens()
def new(request):
"""
Display tools to create a new group
"""
return render(request, 'group/new.html', {})
@check_tokens()
def create(request):
"""
Create a new group
This link should only be accessed by a redirection from group/new
note: maybe use a post or something to make sure the group isn't created several times?
"""
headers = global_headers(request)
headers.update({"Content-type": "application/json"})
data = "{\"group_name\":\"" + request.GET.get("name") + "\"}"
request_create_group = requests.post(GLOBAL_URL, headers=headers, data=data)
print(request_create_group)
print(request_create_group.json())
if request_create_group.status_code != 201:
return render_error(request_create_group)
return_url = reverse("group-home")
params = urlencode({"new_group":request.GET.get("name")})
url = f"{return_url}?{params}"
return redirect(url)
@check_tokens()
def delete(request, group_id):
"""
Deletes a group
"""
url = f"{GLOBAL_URL}/{group_id}"
request_group = requests.get(url, headers=global_headers(request)) # stores group params
if request_group.status_code != 200:
return render_error(request_group)
request_delete_group = requests.delete(url, headers=global_headers(request))
if request_delete_group.status_code != 200:
return render_error(request_delete_group)
params = urlencode({"del_group": request_group.json()["group_name"]})
return_url = f"{reverse('group-home')}?{params}"
return redirect(return_url)
@check_tokens()
def add_member(request, group_id):
"""
Add an member to the group
"""
url = f"{GLOBAL_URL}/{group_id}"
headers = global_headers(request)
headers.update({"Content-type": "application/json"})
data = "{\"add_members\": [\"" + str(request.POST.get("member")) + "\"]}"
request_new = requests.put(url, headers=headers, data=data)
if request_new.status_code != 200:
return render_error(request_new)
print(request_new.status_code)
print(request_new.json())
params = urlencode({"new_member": request.POST.get("member")})
return_url = reverse("group-home", args=[group_id]) + "?" + params
return redirect(return_url)
@check_tokens()
def remove_member(request, group_id, member):
"""
Removes an member from the group
"""
url = f"{GLOBAL_URL}/{group_id}"
headers = global_headers(request)
headers.update({"Content-type": "application/json"})
data = "{\"remove_members\": [\"" + str(member) + "\"]}"
request_remove= requests.put(url, headers=headers, data=data)
if request_remove.status_code != 200:
return render_error(request_remove)
print(request_remove.status_code)
print(request_remove.json())
params = urlencode({"rem_member": member})
return_url = reverse("group-home", args=[group_id]) + "?" + params
return redirect(return_url)
| StarcoderdataPython |
127930 | <reponame>zurfyx/udlchan<gh_stars>1-10
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-06 17:51
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('title', models.CharField(default='', max_length=300)),
('description', models.CharField(default='', max_length=500)),
],
options={
'abstract': False,
'verbose_name_plural': 'categories',
},
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('title', models.CharField(default='', max_length=300)),
('content', models.CharField(default='', max_length=10000)),
('user', models.CharField(max_length=30)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='posts.Post', verbose_name='Post')),
],
options={
'abstract': False,
},
),
]
| StarcoderdataPython |
1628689 | <gh_stars>0
import os
from smpl.package import SourcePackage
from smpl.config_file import ConfigObject, PackageParms
import smpl.util as util
import smpl.log_module as logger
import smpl.exec as exec
#
# llhttp is a project in which some of thec source code is generated.
# to make it run
# make release
# the all the code is in release/include and release/src
#
# to make it work in a hostng project all of the code both .h and .c
# must end up in vendor/src/llhttp/*.c *.h
#
class NodeJsLLHttp(SourcePackage):
def __init__(self, name: str, parms: PackageParms, cfg_obj: ConfigObject):
logger.debugln("class: {} package name {} ".format(type(self).__name__, name));
super().__init__(name, cfg_obj)
self.name = name
self.parms = parms
self.release = "v2.2.0"
self.git_url = "https://github.com/nodejs/llhttp"
self.package_clone_dir_path = os.path.join(self.cfg_obj.clone_dir, "llhttp")
def build_in_clone(self):
logger.debugln("class: {} package name {} ".format(type(self).__name__, self.name));
# this package require the source and headers to be generated
exec.run(["npm", "install"], self.package_clone_dir_path)
exec.run(["make", "release"], self.package_clone_dir_path)
# exec.run(["make", "install"], self.package_clone_dir_versioned_path)
pass
def get_package(self):
logger.debugln("class: {} package name {} ".format(type(self).__name__, self.name));
self.get_git_repo(self.git_url, "llhttp", self.release)
self.build_in_clone()
def stage_package(self):
logger.debugln("class: {} package name {} ".format(type(self).__name__, self.name));
self.stage_source("llhttp", "llhttp", "release/include" )
self.stage_source("llhttp", "llhttp", "release/src", False)
def install_package(self):
# had to trick it into doing the right thing
logger.debugln("class: {} package name {} ".format(type(self).__name__, self.name));
self.install_stage_to_project("llhttp", "llhttp")
| StarcoderdataPython |
1614172 | # -*- coding: utf-8 -*-
from pathlib import Path
from ..base import Property
from ..serialise import YAML
from .base import DetectionReader, GroundTruthReader, SensorDataReader
class YAMLReader(DetectionReader, GroundTruthReader, SensorDataReader):
"""YAML Detection Writer"""
path = Property(Path, doc="File to read data from")
def __init__(self, path, *args, **kwargs):
if not isinstance(path, Path):
path = Path(path) # Ensure Path
super().__init__(path, *args, **kwargs)
self._yaml = YAML()
self._detections = set()
self._groundtruth_paths = set()
self._sensor_data = set()
self._tracks = set()
@property
def detections(self):
return self._detections
@property
def groundtruth_paths(self):
return self._groundtruth_paths
@property
def sensor_data(self):
return self._sensor_data
@property
def tracks(self):
return self._tracks
def detections_gen(self):
for time, _ in self.data_gen():
yield time, self.detections
def groundtruth_paths_gen(self):
for time, _ in self.data_gen():
yield time, self.groundtruth_paths
def sensor_data_gen(self):
for time, _ in self.data_gen():
yield time, self.sensor_data
def tracks_gen(self):
for time, _ in self.data_gen():
yield time, self.tracks
def data_gen(self):
for document in self._yaml.load_all(self.path):
self._detections = document.get('detections', set())
self._groundtruth_paths = document.get('groundtruth_paths', set())
self._sensor_data = document.get('sensor_data', set())
self._tracks = document.get('tracks', set())
yield document.pop('time'), document
| StarcoderdataPython |
3338254 | #!/usr/bin/env python
# encoding: utf-8
"""
@version: v1.0
@author: xag
@license: Apache Licence
@contact: <EMAIL>
@site: http://www.xingag.top
@software: PyCharm
@file: StringUtils.py
@time: 2020-04-11 18:39
@description:TODO
"""
import re
def get_ava_string(str):
"""
去掉特殊符号,保留正常内容
:param str:
:return:
"""
return re.sub(u"([^ \u4e00-\u9fa5\u0030-\u0039\u0041-\u005a\u0061-\u007a])", "", str)
| StarcoderdataPython |
3283470 | <filename>analysis/make.py
###################
### ENVIRONMENT ###
###################
import os
import sys
### LOAD GSLAB MAKE
ROOT = '..'
gslm_path = os.path.join(ROOT, 'lib', 'gslab_make')
sys.path.append(gslm_path)
import gslab_make as gs
### PULL PATHS FROM CONFIG
PATHS = {
'root': ROOT,
'config': os.path.join(ROOT, 'config.yaml')
}
PATHS = gs.update_internal_paths(PATHS)
### LOAD CONFIG USER
PATHS = gs.update_external_paths(PATHS)
gs.update_executables(PATHS)
############
### MAKE ###
############
### START MAKE
gs.remove_dir(['input', 'external'])
gs.clear_dir(['output', 'log'])
gs.start_makelog(PATHS)
### MAKE LINKS TO INPUT AND EXTERNAL FILES
inputs = gs.link_inputs(PATHS, ['input.txt'])
externals = gs.link_externals(PATHS, ['external.txt'])
gs.write_source_logs(PATHS, inputs + externals)
gs.get_modified_sources(PATHS, inputs + externals)
### RUN SCRIPTS
gs.run_python(PATHS, program = 'code/analyze_data.py')
### LOG OUTPUTS
gs.log_files_in_output(PATHS)
### CHECK FILE SIZES
gs.check_module_size(PATHS)
### END MAKE
gs.end_makelog(PATHS) | StarcoderdataPython |
1622473 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '0006_require_contenttypes_0002'),
('hs_core', '0029_auto_20161123_1858'),
('hs_access_control', '0016_auto_enforce_constraints'),
]
operations = [
migrations.CreateModel(
name='GroupResourceProvenance',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('privilege', models.IntegerField(default=3, editable=False, choices=[(1, b'Owner'), (2, b'Change'), (3, b'View')])),
('start', models.DateTimeField(auto_now_add=True)),
('grantor', models.ForeignKey(related_name='x2grq', editable=False, to=settings.AUTH_USER_MODEL, help_text=b'grantor of privilege')),
('group', models.ForeignKey(related_name='g2grq', editable=False, to='auth.Group', help_text=b'group to be granted privilege')),
('resource', models.ForeignKey(related_name='r2grq', editable=False, to='hs_core.BaseResource', help_text=b'resource to which privilege applies')),
],
),
migrations.CreateModel(
name='UserGroupProvenance',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('privilege', models.IntegerField(default=3, editable=False, choices=[(1, b'Owner'), (2, b'Change'), (3, b'View')])),
('start', models.DateTimeField(auto_now_add=True)),
('grantor', models.ForeignKey(related_name='x2ugq', editable=False, to=settings.AUTH_USER_MODEL, help_text=b'grantor of privilege')),
('group', models.ForeignKey(related_name='g2ugq', editable=False, to='auth.Group', help_text=b'group to which privilege applies')),
('user', models.ForeignKey(related_name='u2ugq', editable=False, to=settings.AUTH_USER_MODEL, help_text=b'user to be granted privilege')),
],
),
migrations.CreateModel(
name='UserResourceProvenance',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('privilege', models.IntegerField(default=3, editable=False, choices=[(1, b'Owner'), (2, b'Change'), (3, b'View')])),
('start', models.DateTimeField(auto_now_add=True)),
('grantor', models.ForeignKey(related_name='x2urq', editable=False, to=settings.AUTH_USER_MODEL, help_text=b'grantor of privilege')),
('resource', models.ForeignKey(related_name='r2urq', editable=False, to='hs_core.BaseResource', help_text=b'resource to which privilege applies')),
('user', models.ForeignKey(related_name='u2urq', editable=False, to=settings.AUTH_USER_MODEL, help_text=b'user to be granted privilege')),
],
),
migrations.AlterUniqueTogether(
name='userresourceprovenance',
unique_together=set([('user', 'resource', 'start')]),
),
migrations.AlterUniqueTogether(
name='usergroupprovenance',
unique_together=set([('user', 'group', 'start')]),
),
migrations.AlterUniqueTogether(
name='groupresourceprovenance',
unique_together=set([('group', 'resource', 'start')]),
),
]
| StarcoderdataPython |
4836008 | <filename>capstone_proj/reports_app/views.py<gh_stars>0
# Create your views here.
from __future__ import absolute_import
import json
import requests
from intuitlib.client import AuthClient
from intuitlib.migration import migrate
from intuitlib.enums import Scopes
from intuitlib.exceptions import AuthClientError
from quickbooks import QuickBooks
from quickbooks.objects.customer import Customer
from quickbooks.objects.invoice import Invoice
from django.shortcuts import render, redirect
from django.http import HttpResponse, HttpResponseBadRequest
from django.conf import settings
from reports_app.services import qbo_api_call, manual_call
def index(request):
return render(request, 'reports_app/index.html')
def dashboard(request):
return render(request, 'reports_app/dashboard.html')
def oauth(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
)
url = auth_client.get_authorization_url([Scopes.ACCOUNTING])
request.session['state'] = auth_client.state_token
return redirect(url)
def openid(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
)
url = auth_client.get_authorization_url([Scopes.OPENID, Scopes.EMAIL])
request.session['state'] = auth_client.state_token
return redirect(url)
def callback(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
state_token=request.session.get('state', None),
)
state_tok = request.GET.get('state', None)
error = request.GET.get('error', None)
if error == 'access_denied':
return redirect('app:index')
if state_tok is None:
return HttpResponseBadRequest()
elif state_tok != auth_client.state_token:
return HttpResponse('unauthorized', status=401)
auth_code = request.GET.get('code', None)
realm_id = request.GET.get('realmId', None)
request.session['realm_id'] = realm_id
if auth_code is None:
return HttpResponseBadRequest()
try:
auth_client.get_bearer_token(auth_code, realm_id=realm_id)
request.session['access_token'] = auth_client.access_token
request.session['refresh_token'] = auth_client.refresh_token
request.session['id_token'] = auth_client.id_token
except AuthClientError as e:
# just printing status_code here but it can be used for retry workflows, etc
print(e.status_code)
print(e.content)
print(e.intuit_tid)
except Exception as e:
print(e)
return redirect('reports_app:connected')
def connected(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
id_token=request.session.get('id_token', None),
)
if auth_client.id_token is not None:
return render(request, 'reports_app/dashboard.html', context={'openid': True})
else:
return render(request, 'reports_app/dashboard.html', context={'openid': False})
def qbo_request(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
realm_id=request.session.get('realm_id', None),
)
if auth_client.access_token is not None:
access_token = auth_client.access_token
if auth_client.realm_id is None:
raise ValueError('Realm id not specified.')
response = qbo_api_call(auth_client.access_token, auth_client.realm_id)
if not response.ok:
return HttpResponse(' '.join([response.content, str(response.status_code)]))
else:
return HttpResponse(response.content)
def user_info(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
id_token=request.session.get('id_token', None),
)
try:
response = auth_client.get_user_info()
except ValueError:
return HttpResponse('id_token or access_token not found.')
except AuthClientError as e:
print(e.status_code)
print(e.intuit_tid)
return HttpResponse(response.content)
def refresh(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
)
try:
auth_client.refresh()
except AuthClientError as e:
print(e.status_code)
print(e.intuit_tid)
return HttpResponse(auth_client.refresh_token, content_type="application/json")
def revoke(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
)
try:
is_revoked = auth_client.revoke()
except AuthClientError as e:
print(e.status_code)
print(e.intuit_tid)
return HttpResponse('Revoke successful')
def migration(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
)
try:
migrate(
settings.CONSUMER_KEY,
settings.CONSUMER_SECRET,
settings.ACCESS_KEY,
settings.ACCESS_SECRET,
auth_client,
[Scopes.ACCOUNTING]
)
except AuthClientError as e:
print(e.status_code)
print(e.intuit_tid)
return HttpResponse('OAuth2 refresh_token {0}'.format(auth_client.refresh_token))
def invoice(request):
"""Returns all invoices"""
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
)
client = QuickBooks(
auth_client=auth_client,
refresh_token=request.session.get('refresh_token', None),
company_id=settings.COMPANY_ID,
minorversion=63
)
# Get customer invoices ordered by TxnDate
invoices = Invoice.filter(order_by='TxnDate', qb=client)
invoice_response = []
for invoice in invoices:
invoice = invoice.to_json()
invoice = json.loads(invoice)
invoice_response.append(json.dumps(invoice))
return HttpResponse(invoice_response, content_type="application/json")
def list_customers(request):
"""Returns querey customer for all active customers"""
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
realm_id=request.session.get('realm_id', None),
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
)
if auth_client.access_token is not None:
access_token = auth_client.access_token
if auth_client.realm_id is None:
raise ValueError('Realm id not specified.')
auth_header = 'Bearer {0}'.format(access_token)
headers = {
'Authorization': auth_header,
'Accept': 'application/json'
}
if settings.ENVIRONMENT == 'production':
base_url = settings.QBO_BASE_PROD
else:
base_url = settings.QBO_BASE_SANDBOX
route = '/v3/company/{0}/{1}?{2}&minorversion=63'.format(auth_client.realm_id, 'query', 'select * from Customer Where Active = True')
response = requests.get('{0}{1}'.format(base_url, route), headers=headers)
# /v3/company/4620816365212855650/query?query=select * from Customer Where Active = True&minorversion=63
if not response.ok:
return HttpResponse(' '.join([response.content, str(response.status_code)]))
else:
return HttpResponse(response.content, content_type="application/json")
def company_lookup(request):
"""CONSIDER DELETION"""
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
)
client = QuickBooks(
auth_client=auth_client,
refresh_token=request.session.get('refresh_token', None),
company_id=settings.COMPANY_ID,
minorversion=63
)
customers = Customer.filter(Id=request.finance_id, qb=client)
return HttpResponse(customers, content_type="application/json")
def manual_invoice(request, transaction_number=False):
# Takes in transaction number to return invoice information from quickbooks
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
realm_id=request.session.get('realm_id', None),
)
if auth_client.access_token is not None:
access_token = auth_client.access_token
if auth_client.realm_id is None:
raise ValueError('Realm id not specified.')
response = manual_call(auth_client.access_token, auth_client.realm_id, 'invoice', transaction_number)
if not response.ok:
return HttpResponse(' '.join([response.content, str(response.status_code)]))
else:
return HttpResponse(response.content, content_type="application/json")
def read_customer(request, finance_id=5):
'''Takes in Auth Client and finance Id to return customer information as JSON'''
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
realm_id=request.session.get('realm_id', None),
)
if auth_client.access_token is not None:
access_token = auth_client.access_token
if auth_client.realm_id is None:
raise ValueError('Realm id not specified.')
response = manual_call(auth_client.access_token, auth_client.realm_id, 'customer', finance_id)
if not response.ok:
return HttpResponse(' '.join([response.content, str(response.status_code)]))
else:
return HttpResponse(response.content, content_type="application/json")
def auth_header(request):
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
realm_id=request.session.get('realm_id', None),
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
)
access_token = auth_client.access_token
auth_header = 'Bearer {0}'.format(access_token)
# headers = {
# 'Authorization': auth_header,
# 'Accept': 'application/json'
# }
print(f"bearer token updated: {auth_header}")
return HttpResponse(auth_header)
def sparse_update_customer(request):
"""Takes in context to update profile, converts to JSON, attempts to update profile"""
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
realm_id=request.session.get('realm_id', None),
)
if auth_client.access_token is not None:
access_token = auth_client.access_token
if auth_client.realm_id is None:
raise ValueError('Realm id not specified.')
if settings.ENVIRONMENT == 'production':
base_url = settings.QBO_BASE_PROD
else:
base_url = settings.QBO_BASE_SANDBOX
route = '/v3/company/{0}/{1}?minorversion=63'.format(auth_client.realm_id, 'customer')
auth_header = 'Bearer {0}'.format(access_token)
headers = {
'Authorization': auth_header,
'Accept': 'application/json',
'Content-Type': 'application/json',
}
body = request
response = requests.post('{0}{1}'.format(base_url, route), headers=headers, data=body)
return HttpResponse(response.content, content_type="application/json")
def create_new_customer(request):
"""Takes in context to create new customer entry, converts to JSON. Returns customer entry"""
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
realm_id=request.session.get('realm_id', None),
)
if auth_client.access_token is not None:
access_token = auth_client.access_token
if auth_client.realm_id is None:
raise ValueError('Realm id not specified.')
if settings.ENVIRONMENT == 'production':
base_url = settings.QBO_BASE_PROD
else:
base_url = settings.QBO_BASE_SANDBOX
route = '/v3/company/{0}/{1}?minorversion=63'.format(auth_client.realm_id, 'customer')
auth_header = 'Bearer {0}'.format(access_token)
headers = {
'Authorization': auth_header,
'Accept': 'application/json',
'Content-Type': 'application/json',
}
body = request
response = requests.post('{0}{1}'.format(base_url, route), headers=headers, data=body)
return HttpResponse(response.content, content_type="application/json")
def create_new_invoice(request):
"""Takes in context to create new service invoice, converts to JSON. Returns customer ID and Invoice number"""
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
realm_id=request.session.get('realm_id', None),
)
if auth_client.access_token is not None:
access_token = auth_client.access_token
if auth_client.realm_id is None:
raise ValueError('Realm id not specified.')
if settings.ENVIRONMENT == 'production':
base_url = settings.QBO_BASE_PROD
else:
base_url = settings.QBO_BASE_SANDBOX
route = '/v3/company/{0}/{1}'.format(auth_client.realm_id, 'invoice')
# /v3/company/<realmID>/invoice
auth_header = 'Bearer {0}'.format(access_token)
headers = {
'Authorization': auth_header,
'Accept': 'application/json',
'Content-Type': 'application/json',
}
body = request
response = requests.post('{0}{1}'.format(base_url, route), headers=headers, data=body)
return HttpResponse(response.content, content_type="application/json")
def email_invoice(request, invoice_id, email_address):
"""Takes in email addresss and ID number to send """
auth_client = AuthClient(
settings.CLIENT_ID,
settings.CLIENT_SECRET,
settings.REDIRECT_URI,
settings.ENVIRONMENT,
access_token=request.session.get('access_token', None),
refresh_token=request.session.get('refresh_token', None),
realm_id=request.session.get('realm_id', None),
)
if auth_client.access_token is not None:
access_token = auth_client.access_token
if auth_client.realm_id is None:
raise ValueError('Realm id not specified.')
if settings.ENVIRONMENT == 'production':
base_url = settings.QBO_BASE_PROD
else:
base_url = settings.QBO_BASE_SANDBOX
route = '/v3/company/{0}/{1}/{2}/send?sendTo={3}&minorversion=63'.format(auth_client.realm_id, 'invoice', invoice_id, email_address)
# /v3/company/<realmID> {0} /invoice {1} /<invoiceId> {2} /send?sendTo=<emailAddr> {3}
auth_header = 'Bearer {0}'.format(access_token)
headers = {
'Authorization': auth_header,
'Accept': 'application/json',
'Content-Type': 'application/octet-stream',
}
body = request
response = requests.post('{0}{1}'.format(base_url, route), headers=headers)
return HttpResponse(response.content, content_type="application/json") | StarcoderdataPython |
3347982 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'galvo.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(466, 184)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog.sizePolicy().hasHeightForWidth())
Dialog.setSizePolicy(sizePolicy)
Dialog.setMinimumSize(QtCore.QSize(0, 0))
Dialog.setMaximumSize(QtCore.QSize(10000, 10000))
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.xAxisControlBox = QtWidgets.QGroupBox(Dialog)
self.xAxisControlBox.setObjectName("xAxisControlBox")
self.gridLayout_2 = QtWidgets.QGridLayout(self.xAxisControlBox)
self.gridLayout_2.setObjectName("gridLayout_2")
self.xOffsetLabel = QtWidgets.QLabel(self.xAxisControlBox)
self.xOffsetLabel.setObjectName("xOffsetLabel")
self.gridLayout_2.addWidget(self.xOffsetLabel, 0, 1, 1, 1)
self.xAmplitudeSpinBox = QtWidgets.QDoubleSpinBox(self.xAxisControlBox)
self.xAmplitudeSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.xAmplitudeSpinBox.setMinimum(0.0)
self.xAmplitudeSpinBox.setMaximum(10.0)
self.xAmplitudeSpinBox.setSingleStep(0.01)
self.xAmplitudeSpinBox.setObjectName("xAmplitudeSpinBox")
self.gridLayout_2.addWidget(self.xAmplitudeSpinBox, 1, 0, 1, 1)
self.xAmplitudeLabel = QtWidgets.QLabel(self.xAxisControlBox)
self.xAmplitudeLabel.setObjectName("xAmplitudeLabel")
self.gridLayout_2.addWidget(self.xAmplitudeLabel, 1, 1, 1, 1)
self.xFrequencyLabel = QtWidgets.QLabel(self.xAxisControlBox)
self.xFrequencyLabel.setObjectName("xFrequencyLabel")
self.gridLayout_2.addWidget(self.xFrequencyLabel, 2, 1, 1, 1)
self.xOffsetSpinBox = QtWidgets.QDoubleSpinBox(self.xAxisControlBox)
self.xOffsetSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.xOffsetSpinBox.setMinimum(-10.0)
self.xOffsetSpinBox.setMaximum(10.0)
self.xOffsetSpinBox.setSingleStep(0.01)
self.xOffsetSpinBox.setObjectName("xOffsetSpinBox")
self.gridLayout_2.addWidget(self.xOffsetSpinBox, 0, 0, 1, 1)
self.xFrequencySpinBox = QtWidgets.QSpinBox(self.xAxisControlBox)
self.xFrequencySpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.xFrequencySpinBox.setMaximum(100000)
self.xFrequencySpinBox.setObjectName("xFrequencySpinBox")
self.gridLayout_2.addWidget(self.xFrequencySpinBox, 2, 0, 1, 1)
self.horizontalLayout_2.addWidget(self.xAxisControlBox)
self.yAxisControlBox = QtWidgets.QGroupBox(Dialog)
self.yAxisControlBox.setObjectName("yAxisControlBox")
self.gridLayout = QtWidgets.QGridLayout(self.yAxisControlBox)
self.gridLayout.setObjectName("gridLayout")
self.yOffsetSpinBox = QtWidgets.QDoubleSpinBox(self.yAxisControlBox)
self.yOffsetSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.yOffsetSpinBox.setMinimum(-10.0)
self.yOffsetSpinBox.setMaximum(10.0)
self.yOffsetSpinBox.setSingleStep(0.01)
self.yOffsetSpinBox.setObjectName("yOffsetSpinBox")
self.gridLayout.addWidget(self.yOffsetSpinBox, 0, 0, 1, 1)
self.yOffsetLabel = QtWidgets.QLabel(self.yAxisControlBox)
self.yOffsetLabel.setObjectName("yOffsetLabel")
self.gridLayout.addWidget(self.yOffsetLabel, 0, 1, 1, 1)
self.yAmplitudeSpinBox = QtWidgets.QDoubleSpinBox(self.yAxisControlBox)
self.yAmplitudeSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.yAmplitudeSpinBox.setMinimum(0.0)
self.yAmplitudeSpinBox.setMaximum(10.0)
self.yAmplitudeSpinBox.setSingleStep(0.01)
self.yAmplitudeSpinBox.setObjectName("yAmplitudeSpinBox")
self.gridLayout.addWidget(self.yAmplitudeSpinBox, 1, 0, 1, 1)
self.yAmplitudeLabel = QtWidgets.QLabel(self.yAxisControlBox)
self.yAmplitudeLabel.setObjectName("yAmplitudeLabel")
self.gridLayout.addWidget(self.yAmplitudeLabel, 1, 1, 1, 1)
self.yFrequencyLabel = QtWidgets.QLabel(self.yAxisControlBox)
self.yFrequencyLabel.setObjectName("yFrequencyLabel")
self.gridLayout.addWidget(self.yFrequencyLabel, 2, 1, 1, 1)
self.yFrequencySpinBox = QtWidgets.QSpinBox(self.yAxisControlBox)
self.yFrequencySpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.yFrequencySpinBox.setMaximum(100000)
self.yFrequencySpinBox.setObjectName("yFrequencySpinBox")
self.gridLayout.addWidget(self.yFrequencySpinBox, 2, 0, 1, 1)
self.horizontalLayout_2.addWidget(self.yAxisControlBox)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.activateButton = QtWidgets.QPushButton(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.activateButton.sizePolicy().hasHeightForWidth())
self.activateButton.setSizePolicy(sizePolicy)
self.activateButton.setCheckable(True)
self.activateButton.setObjectName("activateButton")
self.horizontalLayout.addWidget(self.activateButton)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.okButton = QtWidgets.QPushButton(Dialog)
self.okButton.setObjectName("okButton")
self.horizontalLayout.addWidget(self.okButton)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "HAL-4000 Galvonometer Control"))
self.xAxisControlBox.setTitle(_translate("Dialog", "X Axis"))
self.xOffsetLabel.setText(_translate("Dialog", "Offset (V)"))
self.xAmplitudeLabel.setText(_translate("Dialog", "Amplitude (V)"))
self.xFrequencyLabel.setText(_translate("Dialog", "Frequency (Hz)"))
self.yAxisControlBox.setTitle(_translate("Dialog", "Y Axis"))
self.yOffsetLabel.setText(_translate("Dialog", "Offset (V)"))
self.yAmplitudeLabel.setText(_translate("Dialog", "Amplitude (V)"))
self.yFrequencyLabel.setText(_translate("Dialog", "Frequency (Hz)"))
self.activateButton.setText(_translate("Dialog", "Activate"))
self.okButton.setText(_translate("Dialog", "Ok"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
| StarcoderdataPython |
3308631 | import pytest
import hls4ml
import numpy as np
from pathlib import Path
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Embedding
test_root_path = Path(__file__).parent
@pytest.fixture(scope='module')
def data():
X = np.random.randint(10, size=(32, 100))
return X
@pytest.fixture(scope='module')
def keras_model():
inputs = Input(shape=(100,), name='embedding_input')
embedding = Embedding(13, 8, input_length=100, name='embedding')(inputs)
model = Model(inputs=inputs, outputs=embedding)
return model
@pytest.fixture
@pytest.mark.parametrize('io_type', ['io_parallel',
'io_stream'])
def hls_model(keras_model, io_type):
hls_config = hls4ml.utils.config_from_keras_model(keras_model,
default_precision='ap_fixed<16,6>',
granularity='name')
hls_config['LayerName']['embedding_input']['Precision']['result'] = 'ap_uint<4>'
out_dir = str(test_root_path / 'hls4mlprj_embed_{}').format(io_type)
hls_model = hls4ml.converters.convert_from_keras_model(keras_model,
hls_config=hls_config,
io_type=io_type,
output_dir=out_dir)
hls_model.compile()
return hls_model
@pytest.mark.parametrize('io_type', ['io_parallel',
'io_stream'])
def test_embedding_accuracy(data, keras_model, hls_model):
X = data
model = keras_model
# model under test predictions and accuracy
y_keras = model.predict(X)
y_hls4ml = hls_model.predict(X.astype(np.float)).reshape(y_keras.shape)
# "accuracy" of hls4ml predictions vs keras
np.testing.assert_allclose(y_keras, y_hls4ml, rtol=0, atol=1e-03, verbose=True)
| StarcoderdataPython |
3285483 | <gh_stars>0
from sklearn.metrics import classification_report, confusion_matrix, accuracy_score
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.naive_bayes import GaussianNB, MultinomialNB, ComplementNB, BernoulliNB, CategoricalNB
from sklearn import svm
from sklearn import tree
from keras.models import Sequential
from keras import layers
import tensorflow as tf
test_size = 0.25
random_state = 42
def evaluating(y_test, y_pred):
cfm = confusion_matrix(y_test, y_pred)
clr = classification_report(y_test, y_pred)
acs = accuracy_score(y_test, y_pred)
return cfm, clr, acs
def generate_gaussian_nb(x, y):
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=test_size, random_state=random_state)
gnb = GaussianNB()
y_pred = gnb.fit(x_train, y_train).predict(x_test)
return gnb, (y_test, y_pred)
def generate_multinomial_nb(x, y):
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=test_size, random_state=random_state)
mnb = MultinomialNB()
y_pred = mnb.fit(x_train, y_train).predict(x_test)
return mnb, (y_test, y_pred)
def generate_complement_nb(x, y):
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=test_size, random_state=random_state)
cnb = ComplementNB()
y_pred = cnb.fit(x_train, y_train).predict(x_test)
return cnb, (y_test, y_pred)
def generate_bernoulli_nb(x, y):
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=test_size, random_state=random_state)
bnb = BernoulliNB()
y_pred = bnb.fit(x_train, y_train).predict(x_test)
return bnb, (y_test, y_pred)
def generate_categorical_nb(x, y):
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=test_size, random_state=random_state)
cnb = CategoricalNB()
y_pred = cnb.fit(x_train, y_train).predict(x_test)
return cnb, (y_test, y_pred)
def generate_svm(x, y, kernel="precomputed"):
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=test_size, random_state=random_state)
clf = svm.SVC(kernel=kernel)
y_pred = clf.fit(x_train, y_train).predict(x_test)
return clf, (y_test, y_pred)
def generate_tree(x, y):
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=test_size, random_state=random_state)
tr = tree.DecisionTreeClassifier()
y_pred = tr.fit(x_train, y_train).predict(x_test)
return tr, (y_test, y_pred)
def generate_nn(x, y, num_layers=100, activation='relu', loss='binary_crossentropy', optimizer='adam', **kwargs):
le = LabelEncoder()
le.fit(list(set(y)))
y = le.transform(y)
y = tf.keras.utils.to_categorical(y, len(le.classes_))
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=test_size, random_state=random_state)
input_dim = x.shape[1]
model = Sequential()
model.add(layers.Dense(num_layers, input_dim=input_dim, activation=activation))
if "num_hidden_layers" in kwargs:
num_hidden = kwargs["num_hidden"] if "num_hidden" in kwargs else num_layers
hidden_activation = kwargs["hidden_activation"] if "hidden_activation" in kwargs else activation
for _ in range(kwargs["num_hidden_layers"]):
model.add(layers.Dense(num_hidden, activation=hidden_activation))
model.add(layers.Dense(len(le.classes_), activation='sigmoid'))
model.compile(loss=loss, optimizer=optimizer, metrics=['accuracy'])
model.summary()
history = model.fit(x_train, y_train,
epochs=100,
verbose=False,
validation_data=(x_test, y_test),
batch_size=10)
y_pred = model.predict(x_test)
def destranformate(y):
return le.inverse_transform(y.argmax(1))
y_test = destranformate(y_test)
y_pred = destranformate(y_pred)
return model, history, (y_test, y_pred)
| StarcoderdataPython |
102796 | import logging
from pathlib import Path
def get_logger(logger_name: str):
Path("logs/").mkdir(parents=True, exist_ok=True)
logger = logging.getLogger(logger_name)
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler('logs/' + logger_name + '.log')
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
formatter = logging.Formatter('%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(ch)
return logger
| StarcoderdataPython |
103352 | from srs_sqlite import load_srs
if __name__ == '__main__':
# load_srs('user/PathoKnowledge.db', debug=True)
load_srs('user/PathoImages.db', debug=True)
| StarcoderdataPython |
71174 | <filename>render/__init__.py<gh_stars>0
#
# This source file is part of appleseed.
# Visit https://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2014-2018 The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
import sys
import threading
import bpy
import appleseed as asr
from .renderercontroller import FinalRendererController, InteractiveRendererController
from .tilecallbacks import FinalTileCallback
from ..logger import get_logger
from ..translators.preview import PreviewRenderer
from ..translators.scene import SceneTranslator
from ..utils.util import safe_register_class, safe_unregister_class
logger = get_logger()
class RenderThread(threading.Thread):
def __init__(self, renderer):
super(RenderThread, self).__init__()
self.__renderer = renderer
def run(self):
self.__renderer.render()
class SetAppleseedLogLevel(object):
mapping = {'debug': asr.LogMessageCategory.Debug,
'info': asr.LogMessageCategory.Info,
'warning': asr.LogMessageCategory.Warning,
'error': asr.LogMessageCategory.Error,
'fatal': asr.LogMessageCategory.Fatal}
def __init__(self, new_level):
self.__new_level = self.mapping[new_level]
def __enter__(self):
self.__saved_level = asr.global_logger().get_verbosity_level()
asr.global_logger().set_verbosity_level(self.__new_level)
def __exit__(self, type, value, traceback):
asr.global_logger().set_verbosity_level(self.__saved_level)
class RenderAppleseed(bpy.types.RenderEngine):
bl_idname = 'APPLESEED_RENDER'
bl_label = 'appleseed'
bl_use_preview = True
# True if we are doing interactive rendering.
__interactive_session = False
#
# Constructor.
#
def __init__(self):
logger.debug("Creating render engine")
# Common for all rendering modes.
self.__renderer = None
self.__renderer_controller = None
self.__tile_callback = None
self.__render_thread = None
# Interactive rendering.
self.__interactive_scene_translator = None
self.__is_interactive = False
#
# Destructor.
#
def __del__(self):
self.__stop_rendering()
# Sometimes __is_interactive does not exist, not sure why.
try:
if self.__is_interactive:
RenderAppleseed.__interactive_session = False
except:
pass
logger.debug("Deleting render engine")
#
# RenderEngine methods.
#
def render(self, scene):
if self.is_preview:
if bpy.app.background: # Can this happen?
return
# Disable material previews if we are doing an interactive render.
if not RenderAppleseed.__interactive_session:
level = 'error'
with SetAppleseedLogLevel(level):
self.__render_material_preview(scene)
else:
level = scene.appleseed.log_level
with SetAppleseedLogLevel(level):
self.__add_render_passes(scene)
self.__render_final(scene)
def view_update(self, context):
if self.__interactive_scene_translator is None:
self.__start_interactive_render(context)
else:
self.__pause_rendering()
logger.debug("Updating scene")
self.__interactive_scene_translator.update_scene(context.scene, context)
self.__restart_interactive_render()
def view_draw(self, context):
self.__draw_pixels(context)
# Check if view has changed.
view_update, cam_param_update, cam_translate_update = self.__interactive_scene_translator.check_view(context)
if view_update or cam_param_update or cam_translate_update:
self.__pause_rendering()
logger.debug("Updating view")
self.__interactive_scene_translator.update_view(view_update, cam_param_update)
self.__restart_interactive_render()
def update_render_passes(self, scene=None, renderlayer=None):
asr_scene_props = scene.appleseed
if not self.is_preview:
self.register_pass(scene, renderlayer, "Combined", 4, "RGBA", 'COLOR')
if asr_scene_props.diffuse_aov:
self.register_pass(scene, renderlayer, "Diffuse", 4, "RGBA", 'COLOR')
if asr_scene_props.screen_space_velocity_aov:
self.register_pass(scene, renderlayer, "Screen Space Velocity", 3, "RGB", 'COLOR')
if asr_scene_props.direct_diffuse_aov:
self.register_pass(scene, renderlayer, "Direct Diffuse", 4, "RGBA", 'COLOR')
if asr_scene_props.indirect_diffuse_aov:
self.register_pass(scene, renderlayer, "Indirect Diffuse", 4, "RGBA", 'COLOR')
if asr_scene_props.glossy_aov:
self.register_pass(scene, renderlayer, "Glossy", 4, "RGBA", 'COLOR')
if asr_scene_props.direct_glossy_aov:
self.register_pass(scene, renderlayer, "Direct Glossy", 4, "RGBA", 'COLOR')
if asr_scene_props.indirect_glossy_aov:
self.register_pass(scene, renderlayer, "Indirect Glossy", 4, "RGBA", 'COLOR')
if asr_scene_props.albedo_aov:
self.register_pass(scene, renderlayer, "Albedo", 4, "RGBA", 'COLOR')
if asr_scene_props.emission_aov:
self.register_pass(scene, renderlayer, "Emission", 4, "RGBA", 'COLOR')
if asr_scene_props.npr_shading_aov:
self.register_pass(scene, renderlayer, "NPR Shading", 4, "RGBA", 'COLOR')
if asr_scene_props.npr_contour_aov:
self.register_pass(scene, renderlayer, "NPR Contour", 4, "RGBA", 'COLOR')
if asr_scene_props.normal_aov:
self.register_pass(scene, renderlayer, "Normal", 3, "RGB", 'VECTOR')
if asr_scene_props.position_aov:
self.register_pass(scene, renderlayer, "Position", 3, "RGB", 'VECTOR')
if asr_scene_props.uv_aov:
self.register_pass(scene, renderlayer, "UV", 3, "RGB", 'VECTOR')
if asr_scene_props.depth_aov:
self.register_pass(scene, renderlayer, "Z Depth", 1, "Z", 'VALUE')
if asr_scene_props.pixel_time_aov:
self.register_pass(scene, renderlayer, "Pixel Time", 3, "RGB", "VECTOR")
if asr_scene_props.invalid_samples_aov:
self.register_pass(scene, renderlayer, "Invalid Samples", 3, "RGB", "VECTOR")
if asr_scene_props.pixel_sample_count_aov:
self.register_pass(scene, renderlayer, "Pixel Sample Count", 3, "RGB", "VECTOR")
if asr_scene_props.pixel_variation_aov:
self.register_pass(scene, renderlayer, "Pixel Variation", 3, "RGB", "VECTOR")
#
# Internal methods.
#
def __render_material_preview(self, scene):
"""
Export and render the material preview scene.
"""
material_preview_renderer = PreviewRenderer()
material_preview_renderer.translate_preview(scene)
self.__start_final_render(scene, material_preview_renderer.as_project)
def __render_final(self, scene):
"""
Export and render the scene.
"""
scene_translator = SceneTranslator.create_final_render_translator(scene)
self.update_stats("appleseed Rendering: Translating scene", "")
scene_translator.translate_scene()
self.__start_final_render(scene, scene_translator.as_project)
def __start_final_render(self, scene, project):
"""
Start a final render.
"""
# Preconditions.
assert(self.__renderer is None)
assert(self.__renderer_controller is None)
assert(self.__tile_callback is None)
assert(self.__render_thread is None)
self.__tile_callback = FinalTileCallback(self, scene)
self.__renderer_controller = FinalRendererController(self, self.__tile_callback)
self.__renderer = asr.MasterRenderer(project,
project.configurations()['final'].get_inherited_parameters(),
[],
self.__renderer_controller,
self.__tile_callback)
self.__render_thread = RenderThread(self.__renderer)
# While debugging, log to the console. This should be configurable.
log_target = asr.ConsoleLogTarget(sys.stderr)
asr.global_logger().add_target(log_target)
# Start render thread and wait for it to finish.
self.__render_thread.start()
while self.__render_thread.isAlive():
self.__render_thread.join(0.5) # seconds
# Cleanup.
asr.global_logger().remove_target(log_target)
if scene.appleseed.denoise_mode == 'write_outputs':
project.get_frame().write_main_image(os.path.join(scene.appleseed.denoise_output_dir, "output.exr"))
self.__stop_rendering()
def __start_interactive_render(self, context):
"""
Start an interactive rendering session.
"""
# Preconditions.
assert(self.__interactive_scene_translator is None)
assert(self.__renderer is None)
assert(self.__renderer_controller is None)
assert(self.__tile_callback is None)
assert(self.__render_thread is None)
logger.debug("Starting interactive rendering")
self.__is_interactive = True
RenderAppleseed.__interactive_session = True
logger.debug("Translating scene for interactive rendering")
self.__interactive_scene_translator = SceneTranslator.create_interactive_render_translator(context)
self.__interactive_scene_translator.translate_scene()
self.__camera = self.__interactive_scene_translator.camera_translator
project = self.__interactive_scene_translator.as_project
self.__renderer_controller = InteractiveRendererController(self.__camera)
self.__tile_callback = asr.BlenderProgressiveTileCallback(self.tag_redraw)
self.__renderer = asr.MasterRenderer(project,
project.configurations()['interactive'].get_inherited_parameters(),
[],
self.__renderer_controller,
self.__tile_callback)
self.__restart_interactive_render()
def __restart_interactive_render(self):
"""
Restart the interactive renderer.
"""
logger.debug("Start rendering")
self.__renderer_controller.set_status(asr.IRenderControllerStatus.ContinueRendering)
self.__render_thread = RenderThread(self.__renderer)
self.__render_thread.start()
def __pause_rendering(self):
"""
Abort rendering if a render is in progress.
"""
# Signal appleseed to stop rendering.
logger.debug("Pause rendering")
try:
if self.__render_thread:
self.__renderer_controller.set_status(asr.IRenderControllerStatus.AbortRendering)
self.__render_thread.join()
except:
pass
self.__render_thread = None
def __stop_rendering(self):
"""
Abort rendering if a render is in progress and cleanup.
"""
# Signal appleseed to stop rendering.
logger.debug("Abort rendering")
try:
if self.__render_thread:
self.__renderer_controller.set_status(asr.IRenderControllerStatus.AbortRendering)
self.__render_thread.join()
except:
pass
# Cleanup.
self.__render_thread = None
self.__renderer = None
self.__renderer_controller = None
self.__tile_callback = None
def __draw_pixels(self, context):
"""
Draw rendered image in Blender's viewport.
"""
self.bind_display_space_shader(context.scene)
self.__tile_callback.draw_pixels()
self.unbind_display_space_shader()
def __add_render_passes(self, scene):
asr_scene_props = scene.appleseed
if asr_scene_props.screen_space_velocity_aov:
self.add_pass("Screen Space Velocity", 3, "RGB")
if asr_scene_props.diffuse_aov:
self.add_pass("Diffuse", 4, "RGBA")
if asr_scene_props.direct_diffuse_aov:
self.add_pass("Direct Diffuse", 4, "RGBA")
if asr_scene_props.indirect_diffuse_aov:
self.add_pass("Indirect Diffuse", 4, "RGBA")
if asr_scene_props.glossy_aov:
self.add_pass("Glossy", 4, "RGBA")
if asr_scene_props.direct_glossy_aov:
self.add_pass("Direct Glossy", 4, "RGBA")
if asr_scene_props.indirect_glossy_aov:
self.add_pass("Indirect Glossy", 4, "RGBA")
if asr_scene_props.normal_aov:
self.add_pass("Normal", 3, "RGB")
if asr_scene_props.position_aov:
self.add_pass("Position", 3, "RGB")
if asr_scene_props.uv_aov:
self.add_pass("UV", 3, "RGB")
if asr_scene_props.depth_aov:
self.add_pass("Z Depth", 1, "Z")
if asr_scene_props.pixel_time_aov:
self.add_pass("Pixel Time", 3, "RGB")
if asr_scene_props.invalid_samples_aov:
self.add_pass("Invalid Samples", 3, "RGB")
if asr_scene_props.pixel_sample_count_aov:
self.add_pass("Pixel Sample Count", 3, "RGB")
if asr_scene_props.pixel_variation_aov:
self.add_pass("Pixel Variation", 3, "RGB")
if asr_scene_props.albedo_aov:
self.add_pass("Albedo", 4, "RGBA")
if asr_scene_props.emission_aov:
self.add_pass("Emission", 4, "RGBA")
if asr_scene_props.npr_shading_aov:
self.add_pass("NPR Shading", 4, "RGBA")
if asr_scene_props.npr_contour_aov:
self.add_pass("NPR Contour", 4, "RGBA")
def register():
safe_register_class(RenderAppleseed)
def unregister():
safe_unregister_class(RenderAppleseed)
| StarcoderdataPython |
3267364 | import numpy as np
# Create some training samples.
# Note the bias 1s added to the start of each raw sample.
# By addign this, we allow the weight that corresponds
# to this column in w to play the same role that the
# variable b plays in our single dimension linear
# regression function.
X = np.array([
[1, 17.9302012052, 94.5205919533],
[1, 97.1446971852, 69.5932819844],
[1, 81.7759007845, 5.73764809688] ])
Y = np.array([ 317, 405, 180])
# Derive the weights that give the lowest error.
w = np.linalg.solve(np.dot(X.T, X), np.dot(X.T, Y))
# Calcualte the predicted y values.
Yhat = np.dot(X, w)
# Calculate a score of the accuracy of our predictions.
d1 = Y - Yhat
d2 = Y - Y.mean()
rsquared = 1 - d1.dot(d1) / d2.dot(d2) | StarcoderdataPython |
138867 | from setuptools import find_packages
from setuptools import setup
try:
README = open("README.md").read()
except IOError:
README = None
setup(
name="pgjobs",
version="0.2.1",
description="Postgresql job scheduling",
long_description=README,
long_description_content_type="text/markdown",
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/vinissimus/jobs",
package_data={"jobs": ["py.typed"]},
packages=find_packages(),
include_package_data=True,
classifiers=[],
install_requires=["asyncpg>=0.20.1,<0.21"],
tests_require=["pytest", "pytest-docker-fixtures"],
extras_require={
"test": [
"pytest",
"pytest-asyncio",
"pytest-cov",
"pytest-docker-fixtures[pg]",
"coverage",
]
},
entry_points={
"console_scripts": [
"jobs-worker = jobs.worker:run",
"jobs-migrator = jobs.migrations:run",
]
},
)
| StarcoderdataPython |
111077 | <reponame>smartao/estudos_python<gh_stars>0
#!/usr/bin/python3
'''
Interpolarção
É substituir valores dentro da string
'''
# Criando duas variaveis
from string import Template
nome, idade = '<NAME>', 30.98761
# Método mais antigo, menos recomendado!
#
# %s = sequencia de caracteres que sera interpretado pelo python
# para substuir elementos do tipo string
# %d = usado para substituir valores inteiros
# %f = usando para substiuir valores float
print('\nSubstituindo valores variaveis, método antigo:')
print('Nome: %s, Idade: %d' % (nome, idade))
print('\nSubstitindo valores float e limitando as casas deciamais:')
print('Nome: %s, Idade: %.2f' % (nome, idade))
# Método utilizado no Python 3.6 ou inferior
print('\nMetodo de interpolacao python 3.6 ou inferior:')
print('Nome: {0} Idade: {1}'.format(nome, idade))
# Método mais novo de todos, suporta apenas Python 3.6 ou superior
# Chamando f-string
print('\nMetodo de interpolacao python 3.6 ou superior:')
print(f'Nome: {nome} Idade: {idade}')
# Adicioando valores e formatando mostrando apenas duas casas deciamias
print(f'Idade de {nome} daqui 10 anos = {idade+10:.2f}')
# Método usando template, necessário configurar o import
print('\nMetodo usando template:')
s = Template('Nome: $n Idade: $ida')
print(s.substitute(n=nome, ida=idade))
#
# Fontes:
# Curso Python 3 - Curso Completo do Básico ao Avançado Udemy Aula 54
# https://github.com/cod3rcursos/curso-python/tree/master/fundamentos
# https://realpython.com/python-f-strings/
# https://www.geeksforgeeks.org/formatted-string-literals-f-strings-python/
| StarcoderdataPython |
3334423 | <reponame>neshdev/competitive-prog
n = int(input())
arr = [int(x) for x in input().split()]
hi_count = 0
lo_count = 0
hi = arr[0]
lo = arr[0]
for i in range(1,n):
if arr[i] > hi:
hi = arr[i]
hi_count += 1
if arr[i] < lo:
lo = arr[i]
lo_count += 1
print(hi_count + lo_count) | StarcoderdataPython |
3341093 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views import View
from base.handlers.extra_handlers import ExtraHandler
from jekyllnow.handlers.jekyllnow_handlers import JekyllNowHandler
class JekyllNowTheme(LoginRequiredMixin, View):
"""
Does the primary tasks and redirect to site route.
"""
def get(self, request, *args, **kwargs):
user = self.request.user
repo = ExtraHandler().main_repo_with_no_template(user)
jekyll_now = JekyllNowHandler(user, repo)
jekyll_now.perform_initial_tasks()
return HttpResponseRedirect(reverse('siteprofile'))
| StarcoderdataPython |
3227861 | <filename>Filter/ICADL.py<gh_stars>1-10
import time
from SPARQLWrapper import SPARQLWrapper, JSON
from SPARQLWrapper.SPARQLExceptions import EndPointNotFound
from stop_words import get_stop_words, StopWordError
import regex
#Change the rapidfuzz for fuzzywuzzy if it is needed the original implementation
from rapidfuzz import fuzz
class FilteringNEL:
def __init__(self, lang, input_file_path, output_file_path, comment_token, freeling=False, sep="|", tokens_col=0, lit_col=1, meto_col=None, wd_col=2, last_nil=True, no_candidates=1, start_at=0, skip_header=False, filter=True, search_name=True):
self.__db_types = {
"loc": ["dbo:Location", "dbo:Settlement", "dbo:Region", "dbo:Building", "dbo:Village",
"umbel-rc:Country", "yago:YagoGeoEntity"],
"org": ["dbo:Organisation", "umbel-rc:Business", "dbc:Supraorganizations", "yago:YagoGeoEntity"],
"pers": ["foaf:Person", "dbo:Person", "dbo:Agent", "dul:SocialPerson"],
"per": ["foaf:Person", "dbo:Person", "dbo:Agent", "dul:SocialPerson"],
"prod": ["dbo:Work", "dbo:Newspaper", "umbel-rc:Business"],
"humanprod": ["dbo:Work", "dbo:Newspaper", "umbel-rc:Business"]
}
self.__search_name = search_name
self.__tokens_col = tokens_col
self.__lit_col = lit_col
self.__meto_col = meto_col
self.__wd_col = wd_col
self.__lang = lang
self.__supported_lang = False
self.__sparql = SPARQLWrapper("http://dbpedia.org/sparql")
# Supported by the English DBpedia
if self.__lang in ["en", "de", "nl", "it", "es", "fr", "ja", "ar", "pl", "pt", "ru", "zh"]:
self.__supported_lang = True
self.__sparql_chapter = None
self.__wikidata_www_chapters = None
# Supported by the DBpedia Chapter
if self.__lang in ["ar", "eu", "ca", "cs", "nl", "eo", "fr", "el", "de", "id", "it", "ja", "ko", "pl", "pt", "es", "sv", "uk"]:
#Some chapters use www in the wikidata url, while other don't
self.__wikidata_www_chapters = ["es", "eu", "de", "ja"]
self.__sparql_chapter = SPARQLWrapper(f"http://{self.__lang}.dbpedia.org/sparql")
try:
self.__stop_words_list = get_stop_words(lang)
except StopWordError:
self.__stop_words_list = self.loadFileStopwords()
self.__input_file_path = input_file_path
self.__output_file_path = output_file_path
self.__comment_token = comment_token
self.__output_file = None
self.__last_query_time = time.time()
self.__last_query_time_chapter = time.time()
self.__last_query_counter = 0
self.__last_query_counter_chapter = 0
self.__news_date = None
self.__freeling = freeling
self.__sep = sep
self.__last_nil = last_nil
self.__no_candidates = no_candidates
self.__start_at = start_at
self.__skip_header = skip_header
self.__filter = filter
def loadFileStopwords(self):
stopwords = []
with open(f"./stopwords/{self.__lang}.txt") as stopwords_file:
line = stopwords_file.readline()
while line:
line = line.rstrip('\n')
if line == "" or line.startswith("# "):
line = stopwords_file.readline()
continue
stopwords.append(line)
line = stopwords_file.readline()
return stopwords
def verifyQueryTiming(self, entrypoint):
if entrypoint == self.__sparql:
self.verifyQueryTimingEnglish()
else:
self.verifyQueryTimingChapter()
def verifyQueryTimingEnglish(self):
current_time = time.time()
if 0 >= current_time - self.__last_query_time > 1:
self.__last_query_counter += 1
if self.__last_query_counter >= 50:
time.sleep(1)
else:
self.__last_query_counter = 0
self.__last_query_time = current_time
def verifyQueryTimingChapter(self):
current_time = time.time()
if 0 >= current_time - self.__last_query_time_chapter > 1:
self.__last_query_counter_chapter += 1
if self.__last_query_counter_chapter >= 50:
time.sleep(1)
else:
self.__last_query_counter_chapter = 0
self.__last_query_time_chapter = current_time
def askExistsQuery(self, wd_id, entrypoint):
wikidata_complement = ""
if entrypoint == self.__sparql:
wikidata_complement = "www."
elif self.__lang in self.__wikidata_www_chapters:
wikidata_complement = "www."
query_base = f"""
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX wd: <http://{wikidata_complement}wikidata.org/entity/>
PREFIX umbel-rc: <http://umbel.org/umbel/rc/>
PREFIX dbc: <http://dbpedia.org/resource/Category>
PREFIX dbo: <http://dbpedia.org/ontology/>
PREFIX yago: <http://dbpedia.org/class/yago/>
PREFIX dul: <http://www.ontologydesignpatterns.org/ont/dul/DUL.owl>
ASK {{
?sub a ?type .
?sub owl:sameAs wd:{wd_id} .
}}
"""
self.verifyQueryTiming(entrypoint)
entrypoint.setQuery(query_base)
entrypoint.setReturnFormat(JSON)
try_again = True
try_counter = 0
result = {"boolean": False}
while try_again:
try:
result = entrypoint.query().convert()
try_again = False
except EndPointNotFound as e:
print(e)
time.sleep(60)
try_counter += 1
if try_counter > 2:
exit(1)
return result["boolean"]
def askDomainQuery(self, db_types, wd_id, entrypoint):
if self.__supported_lang or entrypoint == self.__sparql_chapter:
search_in = []
for db_type in db_types:
search_in += self.__db_types[db_type]
search_in = set(search_in)
search_in = ", ".join(search_in)
search_in = f"FILTER(?type IN ({search_in}))"
else:
search_in = ""
wikidata_complement = ""
if entrypoint == self.__sparql:
wikidata_complement = "www."
elif self.__lang in self.__wikidata_www_chapters:
wikidata_complement = "www."
query_base = f"""
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX wd: <http://{wikidata_complement}wikidata.org/entity/>
PREFIX umbel-rc: <http://umbel.org/umbel/rc/>
PREFIX dbc: <http://dbpedia.org/resource/Category>
PREFIX dbo: <http://dbpedia.org/ontology/>
PREFIX yago: <http://dbpedia.org/class/yago/>
PREFIX dul: <http://www.ontologydesignpatterns.org/ont/dul/DUL.owl>
ASK {{
?sub a ?type .
{search_in}
?sub owl:sameAs wd:{wd_id} .
}}
"""
self.verifyQueryTiming(entrypoint)
entrypoint.setQuery(query_base)
entrypoint.setReturnFormat(JSON)
try_again = True
try_counter = 0
result = {"boolean": False}
while try_again:
try:
result = entrypoint.query().convert()
try_again = False
except EndPointNotFound as e:
print(e)
time.sleep(60)
try_counter += 1
if try_counter > 2:
exit(1)
return result["boolean"]
def createQuery(self, db_types, wd_id, entrypoint):
sorting = ""
sort_by = []
select_elements = ["?sub", "?lbl"]
query_dob = ""
if self.__news_date is not None:
for db_type in db_types:
if db_type in ["pers", "per"]:
query_dob = f"""
OPTIONAL{{
?sub dbo:birthDate ?dob
BIND(REPLACE(?dob, "-.+$", "") AS ?year)
}}
"""
select_elements.append("?year")
if len(sort_by) > 0:
sort_by = " ".join(sort_by)
sorting = f"ORDER BY {sort_by}"
select = " ".join(select_elements)
wikidata_complement = ""
if entrypoint == self.__sparql:
wikidata_complement = "www."
elif self.__lang in self.__wikidata_www_chapters:
wikidata_complement = "www."
query_base = f"""
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX wd: <http://{wikidata_complement}wikidata.org/entity/>
PREFIX dbo: <http://dbpedia.org/ontology/>
SELECT {select} WHERE {{
?sub owl:sameAs wd:{wd_id}.
?sub rdfs:label ?lbl .
FILTER(langMatches(lang(?lbl), "{self.__lang}"))
{query_dob}
}}
GROUP BY ?sub
{sorting}
"""
return query_base
def runQuery(self, nel_query, wd_id, entrypoint):
self.verifyQueryTiming(entrypoint)
entrypoint.setQuery(nel_query)
entrypoint.setReturnFormat(JSON)
try_again = True
try_counter = 0
parsed_results = None
while try_again:
try:
results = entrypoint.query().convert()
try_again = False
parsed_results = {}
if len(results["results"]["bindings"]) == 0:
parsed_results[wd_id] = None
#return None
else:
for result in results["results"]["bindings"]:
if "year" in result:
year = result["year"]["value"]
if year == "":
year = 0 #This means that the year was negative
if int(year) > self.__news_date + 10:
return None
parsed_results[wd_id] = result["lbl"]["value"]
except EndPointNotFound as e:
print(e)
time.sleep(60)
try_counter += 1
if try_counter > 2:
exit(1)
return parsed_results
def processQuery(self, supported_tags, wd_id, wd_ids_to_skip, results, last_position, entrypoint):
nel_query = self.createQuery(supported_tags, wd_id, entrypoint)
temp_results = self.runQuery(nel_query, wd_id, entrypoint)
if temp_results is None: # We must skip this person
wd_ids_to_skip.add(wd_id)
elif temp_results[wd_id] is None:
if entrypoint == self.__sparql and self.__sparql_chapter is not None:
self.processQuery(supported_tags, wd_id, wd_ids_to_skip, results, last_position, self.__sparql_chapter)
else:
results.update(temp_results)
else:
results.update(temp_results)
def processToken(self, tokens, tags_to_process, wd_ids, shift_to_comment=False, nested=False):
results = {}
wd_ids_to_skip = set()
last_position = []
tokens_as_string = ""
ask_dob = False
if self.__filter:
supported_tags = []
for tag in tags_to_process:
if tag in self.__db_types.keys():
supported_tags.append(tag)
if tag in ["per", "pers"]:
ask_dob = True
if len(supported_tags) == 0:
return "|".join(wd_ids)
label_keys = []
comment_keys = []
if self.__supported_lang or self.__sparql_chapter is not None:
for token in tokens:
if regex.search("\d", token):
continue
if token == ",":
if not nested and len(label_keys) > 0:
shift_to_comment = True
elif nested and len(comment_keys) > 0:
shift_to_comment = False
if not shift_to_comment:
tokens_as_string += f"{token} "
for wd_id in wd_ids:
if wd_id in ["_", ""]:
wd_ids_to_skip.add(wd_id)
elif wd_id in ["NIL"]:
continue
elif self.askExistsQuery(wd_id, self.__sparql):
if self.askDomainQuery(supported_tags, wd_id, self.__sparql):
if self.__search_name or ask_dob:
if self.__supported_lang:
self.processQuery(supported_tags, wd_id, wd_ids_to_skip, results, last_position, self.__sparql)
elif self.__sparql_chapter is not None:
self.processQuery(supported_tags, wd_id, wd_ids_to_skip, results, last_position, self.__sparql_chapter)
else:
results[wd_id] = None
else:
results[wd_id] = None
else:
last_position.append(wd_id)
results[wd_id] = None
elif self.__sparql_chapter is not None and self.askExistsQuery(wd_id, self.__sparql_chapter):
if self.askDomainQuery(supported_tags, wd_id, self.__sparql_chapter):
if self.__search_name or ask_dob:
self.processQuery(supported_tags, wd_id, wd_ids_to_skip, results, last_position, self.__sparql_chapter)
else:
results[wd_id] = None
else:
last_position.append(wd_id)
results[wd_id] = None
else:
for wd_id in wd_ids:
if wd_id in ["_", ""]:
wd_ids_to_skip.add(wd_id)
elif wd_id in ["NIL"]:
continue
else:
results[wd_id] = None
return self.processResults(results, wd_ids_to_skip, last_position, wd_ids, tokens_as_string)
def processResults(self, results, wd_ids_to_skip, last_position, wd_ids, tokens_as_string):
final_results = []
catch_nil = False
if (self.__supported_lang or self.__sparql_chapter is not None) and self.__filter:
best_result = ""
if self.__search_name:
best_result = "NIL"
best_distance = 500 #I just consider that the distances should be smaller
for (key, value) in results.items():
if value is not None and value != "":
#distance = editdistance.eval(value.lower(), tokens_as_string.lower())
distance = 100 - fuzz.WRatio(value.lower(), tokens_as_string.lower())
if distance < best_distance:
best_distance = distance
best_result = key
for wd_id in wd_ids:
if wd_id == best_result or wd_id in wd_ids_to_skip or wd_id in last_position:
continue
if wd_id == "NIL":
catch_nil = True
final_results.append(wd_id)
if len(last_position) > 0 and best_result != "NIL" and not catch_nil:
final_results.append("NIL")
catch_nil = True
for wd_id in last_position:
final_results.append(wd_id)
if best_result != "":
final_results.insert(0, best_result)
if best_result == "NIL":
catch_nil = True
else:
for (key, value) in results.items():
final_results.append(key)
#final_results.append("NIL")
#catch_nil = True
if self.__last_nil:
if not catch_nil:
while len(final_results) > self.__no_candidates - 1:
final_results.pop()
final_results.append("NIL")
else:
while len(final_results) > self.__no_candidates:
final_results.pop()
else:
while len(final_results) > self.__no_candidates:
final_results.pop()
return "|".join(final_results)
def readFile(self):
if self.__start_at == 0:
self.__output_file = open(self.__output_file_path, "w")
else:
self.__output_file = open(self.__output_file_path, "a")
skip_lines = 0
with open(self.__input_file_path) as input_file:
if self.__start_at > 0:
while self.__start_at > skip_lines:
line = input_file.readline()
skip_lines += 1
else:
if self.__skip_header:
line = input_file.readline() #Header
line = line.rstrip('\n')
if not self.__freeling:
self.printOutput(line)
line = input_file.readline()
tokens = []
rows = []
tag_lit = ""
wd_ids = []
tags_to_process = []
while line:
line = line.rstrip('\n')
if line == "":
if len(tokens) > 0:
result = self.processToken(tokens, tags_to_process, wd_ids)
self.generateNELOutput(rows, result)
tokens = []
rows = []
tag_lit = ""
tags_to_process = []
wd_ids = []
self.printOutput(line)
elif self.__comment_token is not None and line.startswith(self.__comment_token):
if line.startswith("# date = "):
self.__news_date = int(regex.search("\d\d\d\d", line)[0])
if not self.__freeling:
if tag_lit == "":
self.printOutput(line)
else:
rows.append(line)
else:
columns = line.split("\t")
print(f"{line}\n")
if columns[self.__lit_col] == "O":
if len(tokens) > 0:
result = self.processToken(tokens, tags_to_process, wd_ids)
self.generateNELOutput(rows, result)
tokens = []
rows = []
tag_lit = ""
tags_to_process = []
wd_ids = []
#self.printOutput(line)
self.generateNELOutput([columns], None)
else:
if columns[self.__lit_col][0] == "B":
if len(tokens) > 0:
result = self.processToken(tokens, tags_to_process, wd_ids)
self.generateNELOutput(rows, result)
tokens = []
rows = []
tag_lit = ""
tags_to_process = []
wd_ids = []
tag_lit = columns[self.__lit_col][2:].lower()
tags_to_process.append(tag_lit)
wd_ids = columns[self.__wd_col].split(self.__sep)
if columns[self.__lit_col][0] == "I" and tag_lit == "": #Just in case the labeling isn't the correct one
tag_lit = columns[self.__lit_col][2:].lower()
tags_to_process.append(tag_lit)
wd_ids = columns[self.__wd_col].split(self.__sep)
if self.__meto_col is not None and columns[self.__meto_col][0] != "O":
meto_tag = columns[self.__meto_col][2:].lower()
meto_tag = regex.sub("\..+$", "", meto_tag)
if meto_tag not in tags_to_process:
tags_to_process.append(meto_tag)
rows.append(columns)
tokens.append(columns[self.__tokens_col])
line = input_file.readline()
if len(tokens) > 0:
result = self.processToken(tokens, tags_to_process, wd_ids)
self.generateNELOutput(rows, result)
self.__output_file.close()
def printOutput(self, line):
self.__output_file.write(f"{line}\n")
def generateNELOutput(self, rows, result):
for columns in rows:
if type(columns) is list:
if result is not None:
columns[self.__wd_col] = result
if self.__meto_col is not None and columns[self.__meto_col] != "O": #For METO
columns[self.__wd_col + 1] = result
if len(columns) == 10 and self.__freeling:
del columns[-1]
nel_output = "\t".join(columns)
self.printOutput(nel_output)
else:
self.printOutput(columns)
input_path=""
output_path=""
comment_symbol="# "
language=""
nel = FilteringNEL(language, input_path, output_path, comment_symbol, freeling=False)
nel.readFile()
| StarcoderdataPython |
4811162 | <gh_stars>1-10
import requests
import uwsgi
"""
uwsgi --module "microWsgi.websockets:application" --http :5050 --stats stats.socket
curl -i -N \
-H "Connection: Upgrade" \
-H "Upgrade: websocket" \
-H "Host: 127.0.0.1:5050" \
-H "Origin: 127.0.0.1:5050" \
127.0.0.1:5051
"""
def application(env, start_response):
# complete the handshake
uwsgi.websocket_handshake(env['HTTP_SEC_WEBSOCKET_KEY'], env.get('HTTP_ORIGIN', ''))
while True:
msg = uwsgi.websocket_recv()
uwsgi.websocket_send(msg)
| StarcoderdataPython |
3241070 | <gh_stars>0
def func(a, b, c, d):
exact = (a*b*c*d)
ra = round(a) #round sometimes gives us weird things
rb = round(b)
rc = round(c)
rd = round(d)
rounded = (ra*rb*rc*rd)
difference = exact - rounded
print(difference)
| StarcoderdataPython |
1606735 | from setuptools import setup
setup(name='mymessage',
version='0.1',
description='Helper for analyzing iMessage data',
url='http://github.com/storborg/funniest',
author='<NAME>',
author_email='<EMAIL>',
license='/',
packages=['mymessage'],
zip_safe=False) | StarcoderdataPython |
128834 | <gh_stars>10-100
#!/usr/bin/python
#
# -*- coding: utf-8 -*-
# Copyright 2019 SAP SE or an SAP affiliate company. All rights reserved
# ============================================================================
from collections import defaultdict
from xai.data.exceptions import ItemDataTypeNotSupported
from xai.data.explorer.abstract_analyzer import AbstractDataAnalyzer
from xai.data.explorer.categorical.categorical_stats import CategoricalStats
class CategoricalDataAnalyzer(AbstractDataAnalyzer):
"""
This analyzer class analyzes categorical data and accumulates frequency count for all values fed into it
"""
SUPPORTED_TYPES = [str, int]
def __init__(self):
super(CategoricalDataAnalyzer, self).__init__()
self._frequency_count = defaultdict(int)
def feed(self, value: int or str):
"""
Accumulate count for value
Args:
value: value that fed for frequency count update
"""
if type(value) not in CategoricalDataAnalyzer.SUPPORTED_TYPES:
raise ItemDataTypeNotSupported(type(value), type(self), CategoricalDataAnalyzer.SUPPORTED_TYPES)
self._frequency_count[value] += 1
def get_statistics(self) -> CategoricalStats:
"""
Return stats for the analyzer
Returns:
A CategoricalStats object that keeps track of frequency count
"""
stats = CategoricalStats(frequency_count=self._frequency_count)
return stats
| StarcoderdataPython |
6109 | <reponame>Ali-Tahir/sentry
from __future__ import absolute_import
import six
import string
import warnings
import pytz
from collections import OrderedDict
from dateutil.parser import parse as parse_date
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from hashlib import md5
from semaphore.processing import StoreNormalizer
from sentry import eventtypes
from sentry.db.models import (
BoundedBigIntegerField,
BoundedIntegerField,
Model,
NodeData,
NodeField,
sane_repr,
)
from sentry.db.models.manager import EventManager
from sentry.interfaces.base import get_interfaces
from sentry.utils import json
from sentry.utils.cache import memoize
from sentry.utils.canonical import CanonicalKeyDict, CanonicalKeyView
from sentry.utils.safe import get_path
from sentry.utils.strings import truncatechars
class EventDict(CanonicalKeyDict):
"""
Creating an instance of this dictionary will send the event through basic
(Rust-based) type/schema validation called "re-normalization".
This is used as a wrapper type for `Event.data` such that creating an event
object (or loading it from the DB) will ensure the data fits the type
schema.
"""
def __init__(self, data, skip_renormalization=False, **kwargs):
is_renormalized = isinstance(data, EventDict) or (
isinstance(data, NodeData) and isinstance(data.data, EventDict)
)
if not skip_renormalization and not is_renormalized:
normalizer = StoreNormalizer(is_renormalize=True, enable_trimming=False)
data = normalizer.normalize_event(dict(data))
CanonicalKeyDict.__init__(self, data, **kwargs)
class EventCommon(object):
"""
Methods and properties common to both Event and SnubaEvent.
"""
@classmethod
def generate_node_id(cls, project_id, event_id):
"""
Returns a deterministic node_id for this event based on the project_id
and event_id which together are globally unique. The event body should
be saved under this key in nodestore so it can be retrieved using the
same generated id when we only have project_id and event_id.
"""
return md5("{}:{}".format(project_id, event_id)).hexdigest()
# TODO (alex) We need a better way to cache these properties. functools32
# doesn't quite do the trick as there is a reference bug with unsaved
# models. But the current _group_cache thing is also clunky because these
# properties need to be stripped out in __getstate__.
@property
def group(self):
from sentry.models import Group
if not self.group_id:
return None
if not hasattr(self, "_group_cache"):
self._group_cache = Group.objects.get(id=self.group_id)
return self._group_cache
@group.setter
def group(self, group):
self.group_id = group.id
self._group_cache = group
@property
def project(self):
from sentry.models import Project
if not hasattr(self, "_project_cache"):
self._project_cache = Project.objects.get(id=self.project_id)
return self._project_cache
@project.setter
def project(self, project):
if project is None:
self.project_id = None
else:
self.project_id = project.id
self._project_cache = project
def get_interfaces(self):
return CanonicalKeyView(get_interfaces(self.data))
@memoize
def interfaces(self):
return self.get_interfaces()
def get_interface(self, name):
return self.interfaces.get(name)
def get_legacy_message(self):
# TODO(mitsuhiko): remove this code once it's unused. It's still
# being used by plugin code and once the message rename is through
# plugins should instead swithc to the actual message attribute or
# this method could return what currently is real_message.
return (
get_path(self.data, "logentry", "formatted")
or get_path(self.data, "logentry", "message")
or self.message
)
def get_event_type(self):
"""
Return the type of this event.
See ``sentry.eventtypes``.
"""
return self.data.get("type", "default")
def get_event_metadata(self):
"""
Return the metadata of this event.
See ``sentry.eventtypes``.
"""
# For some inexplicable reason we have some cases where the data
# is completely empty. In that case we want to hobble along
# further.
return self.data.get("metadata") or {}
def get_grouping_config(self):
"""Returns the event grouping config."""
from sentry.grouping.api import get_grouping_config_dict_for_event_data
return get_grouping_config_dict_for_event_data(self.data, self.project)
def get_hashes(self, force_config=None):
"""
Returns the calculated hashes for the event. This uses the stored
information if available. Grouping hashes will take into account
fingerprinting and checksums.
"""
# If we have hashes stored in the data we use them, otherwise we
# fall back to generating new ones from the data. We can only use
# this if we do not force a different config.
if force_config is None:
hashes = self.data.get("hashes")
if hashes is not None:
return hashes
return filter(
None, [x.get_hash() for x in self.get_grouping_variants(force_config).values()]
)
def get_grouping_variants(self, force_config=None, normalize_stacktraces=False):
"""
This is similar to `get_hashes` but will instead return the
grouping components for each variant in a dictionary.
If `normalize_stacktraces` is set to `True` then the event data will be
modified for `in_app` in addition to event variants being created. This
means that after calling that function the event data has been modified
in place.
"""
from sentry.grouping.api import get_grouping_variants_for_event, load_grouping_config
from sentry.stacktraces.processing import normalize_stacktraces_for_grouping
# Forcing configs has two separate modes. One is where just the
# config ID is given in which case it's merged with the stored or
# default config dictionary
if force_config is not None:
if isinstance(force_config, six.string_types):
stored_config = self.get_grouping_config()
config = dict(stored_config)
config["id"] = force_config
else:
config = force_config
# Otherwise we just use the same grouping config as stored. if
# this is None the `get_grouping_variants_for_event` will fill in
# the default.
else:
config = self.data.get("grouping_config")
config = load_grouping_config(config)
if normalize_stacktraces:
normalize_stacktraces_for_grouping(self.data, config)
return get_grouping_variants_for_event(self, config)
def get_primary_hash(self):
# TODO: This *might* need to be protected from an IndexError?
return self.get_hashes()[0]
@property
def title(self):
# also see event_manager.py which inserts this for snuba
et = eventtypes.get(self.get_event_type())()
return et.get_title(self.get_event_metadata())
@property
def culprit(self):
# For a while events did not save the culprit
if self.group_id:
return self.data.get("culprit") or self.group.culprit
return self.data.get("culprit")
@property
def location(self):
# also see event_manager.py which inserts this for snuba
et = eventtypes.get(self.get_event_type())()
return et.get_location(self.get_event_metadata())
@property
def real_message(self):
# XXX(mitsuhiko): this is a transitional attribute that should be
# removed. `message` will be renamed to `search_message` and this
# will become `message`.
return (
get_path(self.data, "logentry", "formatted")
or get_path(self.data, "logentry", "message")
or ""
)
@property
def organization(self):
return self.project.organization
@property
def version(self):
return self.data.get("version", "5")
@property
def ip_address(self):
ip_address = get_path(self.data, "user", "ip_address")
if ip_address:
return ip_address
remote_addr = get_path(self.data, "request", "env", "REMOTE_ADDR")
if remote_addr:
return remote_addr
return None
@property
def tags(self):
try:
rv = sorted(
[
(t, v)
for t, v in get_path(self.data, "tags", filter=True) or ()
if t is not None and v is not None
]
)
return rv
except ValueError:
# at one point Sentry allowed invalid tag sets such as (foo, bar)
# vs ((tag, foo), (tag, bar))
return []
# For compatibility, still used by plugins.
def get_tags(self):
return self.tags
def get_tag(self, key):
for t, v in self.get_tags():
if t == key:
return v
return None
@property
def release(self):
return self.get_tag("sentry:release")
@property
def dist(self):
return self.get_tag("sentry:dist")
def get_raw_data(self):
"""Returns the internal raw event data dict."""
return dict(self.data.items())
@property
def size(self):
return len(json.dumps(dict(self.data)))
@property
def transaction(self):
return self.get_tag("transaction")
def get_email_subject(self):
template = self.project.get_option("mail:subject_template")
if template:
template = EventSubjectTemplate(template)
else:
template = DEFAULT_SUBJECT_TEMPLATE
return truncatechars(template.safe_substitute(EventSubjectTemplateData(self)), 128).encode(
"utf-8"
)
def get_environment(self):
from sentry.models import Environment
if not hasattr(self, "_environment_cache"):
self._environment_cache = Environment.objects.get(
organization_id=self.project.organization_id,
name=Environment.get_name_or_default(self.get_tag("environment")),
)
return self._environment_cache
def get_minimal_user(self):
"""
A minimal 'User' interface object that gives us enough information
to render a user badge.
"""
return self.get_interface("user")
def as_dict(self):
"""Returns the data in normalized form for external consumers."""
# We use a OrderedDict to keep elements ordered for a potential JSON serializer
data = OrderedDict()
data["event_id"] = self.event_id
data["project"] = self.project_id
data["release"] = self.release
data["dist"] = self.dist
data["platform"] = self.platform
data["message"] = self.real_message
data["datetime"] = self.datetime
data["time_spent"] = self.time_spent
data["tags"] = [(k.split("sentry:", 1)[-1], v) for (k, v) in self.tags]
for k, v in sorted(six.iteritems(self.data)):
if k in data:
continue
if k == "sdk":
v = {v_k: v_v for v_k, v_v in six.iteritems(v) if v_k != "client_ip"}
data[k] = v
# for a long time culprit was not persisted. In those cases put
# the culprit in from the group.
if data.get("culprit") is None and self.group_id:
data["culprit"] = self.group.culprit
# Override title and location with dynamically generated data
data["title"] = self.title
data["location"] = self.location
return data
# ============================================
# DEPRECATED
# ============================================
@property
def level(self):
# we might want to move to this:
# return LOG_LEVELS_MAP.get(self.get_level_display()) or self.group.level
if self.group:
return self.group.level
else:
return None
def get_level_display(self):
# we might want to move to this:
# return self.get_tag('level') or self.group.get_level_display()
if self.group:
return self.group.get_level_display()
else:
return None
# deprecated accessors
@property
def logger(self):
warnings.warn("Event.logger is deprecated. Use Event.tags instead.", DeprecationWarning)
return self.get_tag("logger")
@property
def site(self):
warnings.warn("Event.site is deprecated. Use Event.tags instead.", DeprecationWarning)
return self.get_tag("site")
@property
def server_name(self):
warnings.warn(
"Event.server_name is deprecated. Use Event.tags instead.", DeprecationWarning
)
return self.get_tag("server_name")
@property
def checksum(self):
warnings.warn("Event.checksum is no longer used", DeprecationWarning)
return ""
def error(self): # TODO why is this not a property?
warnings.warn("Event.error is deprecated, use Event.title", DeprecationWarning)
return self.title
error.short_description = _("error")
@property
def message_short(self):
warnings.warn("Event.message_short is deprecated, use Event.title", DeprecationWarning)
return self.title
class SnubaEvent(EventCommon):
"""
An event backed by data stored in snuba.
This is a readonly event and does not support event creation or save.
The basic event data is fetched from snuba, and the event body is
fetched from nodestore and bound to the data property in the same way
as a regular Event.
"""
# The minimal list of columns we need to get from snuba to bootstrap an
# event. If the client is planning on loading the entire event body from
# nodestore anyway, we may as well only fetch the minimum from snuba to
# avoid duplicated work.
minimal_columns = ["event_id", "group_id", "project_id", "timestamp"]
# A list of all useful columns we can get from snuba.
selected_columns = minimal_columns + [
"culprit",
"location",
"message",
"platform",
"title",
"type",
# Required to provide snuba-only tags
"tags.key",
"tags.value",
# Required to provide snuba-only 'user' interface
"email",
"ip_address",
"user_id",
"username",
]
__repr__ = sane_repr("project_id", "group_id")
def __init__(self, snuba_values):
"""
When initializing a SnubaEvent, think about the attributes you
might need to access on it. If you only need a few properties, and
they are all available in snuba, then you should use
`SnubaEvent.selected_colums` (or a subset depending on your needs)
But if you know you are going to need the entire event body anyway
(which requires a nodestore lookup) you may as well just initialize
the event with `SnubaEvent.minimal_colums` and let the rest of of
the attributes come from nodestore.
"""
assert all(k in snuba_values for k in SnubaEvent.minimal_columns)
# self.snuba_data is a dict of all the stuff we got from snuba
self.snuba_data = snuba_values
# self.data is a (lazy) dict of everything we got from nodestore
node_id = SnubaEvent.generate_node_id(
self.snuba_data["project_id"], self.snuba_data["event_id"]
)
self.data = NodeData(None, node_id, data=None, wrapper=EventDict)
def __getattr__(self, name):
"""
Depending on what snuba data this event was initialized with, we may
have the data available to return, or we may have to look in the
`data` dict (which would force a nodestore load). All unresolved
self.foo type accesses will come through here.
"""
if name in ("_project_cache", "_group_cache", "_environment_cache"):
raise AttributeError()
if name in self.snuba_data:
return self.snuba_data[name]
else:
return self.data[name]
# ============================================
# Snuba-only implementations of properties that
# would otherwise require nodestore data.
# ============================================
@property
def tags(self):
"""
Override of tags property that uses tags from snuba rather than
the nodestore event body. This might be useful for implementing
tag deletions without having to rewrite nodestore blobs.
"""
if "tags.key" in self.snuba_data and "tags.value" in self.snuba_data:
keys = getattr(self, "tags.key")
values = getattr(self, "tags.value")
if keys and values and len(keys) == len(values):
return sorted(zip(keys, values))
else:
return []
else:
return super(SnubaEvent, self).tags
def get_minimal_user(self):
from sentry.interfaces.user import User
return User.to_python(
{
"id": self.user_id,
"email": self.email,
"username": self.username,
"ip_address": self.ip_address,
}
)
# If the data for these is available from snuba, we assume
# it was already normalized on the way in and we can just return
# it, otherwise we defer to EventCommon implementation.
def get_event_type(self):
if "type" in self.snuba_data:
return self.snuba_data["type"]
return super(SnubaEvent, self).get_event_type()
@property
def ip_address(self):
if "ip_address" in self.snuba_data:
return self.snuba_data["ip_address"]
return super(SnubaEvent, self).ip_address
@property
def title(self):
if "title" in self.snuba_data:
return self.snuba_data["title"]
return super(SnubaEvent, self).title
@property
def culprit(self):
if "culprit" in self.snuba_data:
return self.snuba_data["culprit"]
return super(SnubaEvent, self).culprit
@property
def location(self):
if "location" in self.snuba_data:
return self.snuba_data["location"]
return super(SnubaEvent, self).location
# ====================================================
# Snuba implementations of the django fields on Event
# ====================================================
@property
def datetime(self):
"""
Reconstruct the datetime of this event from the snuba timestamp
"""
# dateutil seems to use tzlocal() instead of UTC even though the string
# ends with '+00:00', so just replace the TZ with UTC because we know
# all timestamps from snuba are UTC.
return parse_date(self.timestamp).replace(tzinfo=pytz.utc)
@property
def time_spent(self):
return None
@property
def message(self):
if "message" in self.snuba_data:
return self.snuba_data["message"]
return self.data.get("message")
@property
def platform(self):
if "platform" in self.snuba_data:
return self.snuba_data["platform"]
return self.data.get("platform")
@property
def id(self):
# Because a snuba event will never have a django row id, just return
# the hex event_id here. We should be moving to a world where we never
# have to reference the row id anyway.
return self.event_id
def save(self):
raise NotImplementedError
class Event(EventCommon, Model):
"""
An event backed by data stored in postgres.
"""
__core__ = False
group_id = BoundedBigIntegerField(blank=True, null=True)
event_id = models.CharField(max_length=32, null=True, db_column="message_id")
project_id = BoundedBigIntegerField(blank=True, null=True)
message = models.TextField()
platform = models.CharField(max_length=64, null=True)
datetime = models.DateTimeField(default=timezone.now, db_index=True)
time_spent = BoundedIntegerField(null=True)
data = NodeField(
blank=True,
null=True,
ref_func=lambda x: x.project_id or x.project.id,
ref_version=2,
wrapper=EventDict,
)
objects = EventManager()
class Meta:
app_label = "sentry"
db_table = "sentry_message"
verbose_name = _("message")
verbose_name_plural = _("messages")
unique_together = (("project_id", "event_id"),)
index_together = (("group_id", "datetime"),)
__repr__ = sane_repr("project_id", "group_id")
def __getstate__(self):
state = Model.__getstate__(self)
# do not pickle cached info. We want to fetch this on demand
# again. In particular if we were to pickle interfaces we would
# pickle a CanonicalKeyView which old sentry workers do not know
# about
state.pop("_project_cache", None)
state.pop("_environment_cache", None)
state.pop("_group_cache", None)
state.pop("interfaces", None)
return state
class EventSubjectTemplate(string.Template):
idpattern = r"(tag:)?[_a-z][_a-z0-9]*"
class EventSubjectTemplateData(object):
tag_aliases = {"release": "sentry:release", "dist": "sentry:dist", "user": "sentry:user"}
def __init__(self, event):
self.event = event
def __getitem__(self, name):
if name.startswith("tag:"):
name = name[4:]
value = self.event.get_tag(self.tag_aliases.get(name, name))
if value is None:
raise KeyError
return six.text_type(value)
elif name == "project":
return self.event.project.get_full_name()
elif name == "projectID":
return self.event.project.slug
elif name == "shortID" and self.event.group_id:
return self.event.group.qualified_short_id
elif name == "orgID":
return self.event.organization.slug
elif name == "title":
return self.event.title
raise KeyError
DEFAULT_SUBJECT_TEMPLATE = EventSubjectTemplate("$shortID - $title")
| StarcoderdataPython |
1665707 | from kratos import Interface, Generator, always_ff, posedge, verilog
import tempfile
import os
class ConfigInterface(Interface):
def __init__(self):
Interface.__init__(self, "Config")
width = 8
# local variables
read = self.var("read_data", width)
write = self.var("write_data", width)
r_en = self.var("r_en", 1)
w_en = self.var("w_en", 1)
# common ports
clk = self.clock("clk")
# define master -> slave ports
# and slave -> master ports
m_to_s = [write, r_en, w_en]
s_to_m = [read]
# define master and slave
self.master = self.modport("Master")
self.slave = self.modport("Slave")
for port in m_to_s:
self.master.set_output(port)
self.slave.set_input(port)
for port in s_to_m:
self.master.set_input(port)
self.slave.set_output(port)
# both of them need clock
self.master.set_input(clk)
self.slave.set_input(clk)
def test_modport_io(check_gold):
config = ConfigInterface()
class Master(Generator):
def __init__(self):
Generator.__init__(self, "Master")
# instantiate the interface
self.bus = self.interface(config.master, "bus", is_port=True)
# some logic to loop the read and write
# cycle
self.counter = self.var("counter", 8)
# we wire counter value to the write data
self.wire(self.bus.write_data, self.counter)
# always_ff on the posedge of clock from the interface
@always_ff((posedge, self.bus.clk))
def logic():
if self.counter % 4 == 0:
self.bus.r_en = 1
self.bus.w_en = 0
elif self.counter % 4 == 1:
self.bus.r_en = 0
self.bus.w_en = 1
else:
self.bus.r_en = 0
self.bus.w_en = 0
@always_ff((posedge, self.bus.clk))
def update():
self.counter = self.counter + 1
self.add_always(logic)
self.add_always(update)
class Slave(Generator):
def __init__(self):
Generator.__init__(self, "Slave")
# instantiate the interface
self.bus = self.interface(config.slave, "bus", is_port=True)
self.value = self.var("value", 8)
# just read and write out
@always_ff((posedge, self.bus.clk))
def logic():
if self.bus.r_en:
self.value = self.bus.write_data
elif self.bus.w_en:
self.bus.read_data = self.value
self.add_always(logic)
class Top(Generator):
def __init__(self):
Generator.__init__(self, "Top")
# instantiate master and slave
self.master = Master()
self.slave = Slave()
self.add_child("master", self.master)
self.add_child("slave", self.slave)
# clock will be from outside
clk = self.clock("clk")
# instantiate the interface bus
# notice that we're using config, not the modport
# version such as config.master
self.bus = self.interface(config, "bus_top")
# just need to wire things up
self.wire(self.bus.clk, clk)
self.wire(self.master.bus, self.bus)
self.wire(self.slave.bus, self.bus)
# the following also works
# self.wire(self.master.bus, bus.Master)
# self.wire(self.slave.bus, bus.Slave)
top = Top()
check_gold(top, "test_modport_io")
assert str(top.bus.read_data) == "bus_top.read_data"
def test_port_interface():
mod = Generator("mod")
mod.interface(ConfigInterface(), "port_interface", is_port=True)
with tempfile.TemporaryDirectory() as temp:
filename = os.path.join(temp, "mod.sv")
verilog(mod, filename=filename)
with open(filename) as f:
content = f.read()
assert "endinterface" in content
if __name__ == "__main__":
from conftest import check_gold_fn
test_modport_io(check_gold_fn)
| StarcoderdataPython |
74910 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Test that the fuzzer works the way ClusterFuzz invokes it."""
import glob
import os
import shutil
import sys
import tempfile
import unittest
import setup
class WebBluetoothFuzzerTest(unittest.TestCase):
def setUp(self):
self._output_dir = tempfile.mkdtemp()
self._resources_path = setup.RetrieveResources()
def tearDown(self):
shutil.rmtree(self._output_dir)
shutil.rmtree(self._resources_path)
def testCanGenerate100Files(self):
sys.argv = ['fuzz_main_run.py', '--no_of_files=100',
'--input_dir={}'.format(self._output_dir),
'--output_dir={}'.format(self._output_dir)]
import fuzz_main_run
fuzz_main_run.main()
written_files = glob.glob(os.path.join(self._output_dir, '*.html'))
self.assertEquals(100, len(written_files), 'Should have written 100 '
'test files.')
for test_case in written_files:
self.assertFalse('TRANSFORM' in open(test_case).read())
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
3332254 | # encoding: utf-8
import pytest
from web.dispatch.route.router import __DYNAMIC__, Router
from sample import Root
@pytest.fixture
def router():
return Router.from_object(Root)
def test_dynamic_repr():
assert repr(__DYNAMIC__) == '<dynamic element>'
def test_router_singleton():
assert Router.from_object(Root) is Router.from_object(Root)
def test_invalid_route():
router = Router()
with pytest.raises(ValueError):
router.parse("{bad:/}")
class TestRouterSample(object):
def test_single_static(self, router):
assert len(router.routes) == 1 # There's only a single top-level element.
assert 'user' in router.routes # It's "user".
assert len(router.routes['user']) == 2 # Which has a terminus and dynamic continuation.
assert router.routes['user'][None] == Root.root # The terminus is the "root" method.
assert router.routes['user'][None](Root()) == "I'm all people." # It really is.
def test_dynamic_username(self, router):
assert __DYNAMIC__ in router.routes['user']
dynamic = router.routes['user'][__DYNAMIC__]
assert len(dynamic) == 1
assert list(dynamic.keys())[0].match("GothAlice") # The regular expression matches.
assert len(list(dynamic.values())[0]) == 2
assert list(dynamic.values())[0][None] == Root.user
assert list(dynamic.values())[0][None](Root(), "GothAlice") == "Hi, I'm GothAlice"
def test_dynamic_username_action(self, router):
assert __DYNAMIC__ in router.routes['user']
dynamic = router.routes['user'][__DYNAMIC__]
assert len(dynamic) == 1
assert list(dynamic.keys())[0].match("GothAlice") # The regular expression matches.
assert len(list(dynamic.values())[0]) == 2
assert list(dynamic.values())[0][None] == Root.user
assert list(dynamic.values())[0][None](Root(), "GothAlice") == "Hi, I'm GothAlice"
| StarcoderdataPython |
91774 | <filename>mozillians/users/cron.py
from django.conf import settings
import cronjobs
from celery.task.sets import TaskSet
from celeryutils import chunked
from elasticutils.contrib.django import get_es
from mozillians.users.tasks import index_objects
from mozillians.users.models import UserProfile, UserProfileMappingType
@cronjobs.register
def index_all_profiles():
# Get an es object, delete index and re-create it
es = get_es(timeout=settings.ES_INDEXING_TIMEOUT)
mappings = {'mappings': {UserProfileMappingType.get_mapping_type_name():
UserProfileMappingType.get_mapping()}}
def _recreate_index(index):
es.indices.delete(index=index, ignore=[400, 404])
es.indices.create(index, body=mappings)
_recreate_index(settings.ES_INDEXES['default'])
_recreate_index(settings.ES_INDEXES['public'])
# mozillians index
ids = UserProfile.objects.complete().values_list('id', flat=True)
ts = [index_objects.subtask(args=[UserProfileMappingType, chunk, 150, False])
for chunk in chunked(sorted(list(ids)), 150)]
# public index
ts += [index_objects.subtask(args=[UserProfileMappingType, chunk, 150, True])
for chunk in chunked(sorted(list(ids)), 150)]
TaskSet(ts).apply_async()
| StarcoderdataPython |
4804774 | import argparse
import math
import re
import hashlib
def hash_to_rgb(val):
return [
tuple(ord(c)/255. for c in i.decode('hex'))
for i in re.findall('.{6}', val)
]
def string_to_rgb(val):
return hash_to_rgb(hashlib.sha1(val).hexdigest())
def hash_keys(val):
return [
(string_to_rgb(key), value) for (key, value) in val
]
def normalize_sum(val, maxlen=7.):
if not val:
return val
maxval = float(max([a + b for _, (a, b) in val]))
if maxval > maxlen:
return [
(f, (math.ceil(a / maxval * maxlen), math.ceil(b / maxval * maxlen)))
for f, (a, b) in val
]
else:
return val
def normalize_single(val, maxlen=3.):
if not val:
return val
maxval = float(max([max(a, b) for _, (a, b) in val]))
if maxval > maxlen:
return [
(f, (math.ceil(a / maxval * maxlen), math.ceil(b / maxval * maxlen)))
for f, (a, b) in val
]
else:
return val
def controller_args(parser=None, parse=True):
if parser is None:
parser = argparse.ArgumentParser()
parser.add_argument(
'--controller', '-c',
default="launchpad",
)
parser.add_argument(
'--input', '-i',
default=None,
)
parser.add_argument(
'--output', '-o',
default=None,
)
if parse:
return parser.parse_args()
else:
return parser
class Unbuffered(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
| StarcoderdataPython |
3208187 | <gh_stars>10-100
from arcplus import * | StarcoderdataPython |
1781211 | <gh_stars>1-10
#!/bin/python
"""
grammar:
S -> datasets eof
datasets -> dataset datasets
datasets -> epsilon
dataset -> ( ident ) content
content -> [ q ]
content -> { data }
q -> q , q
q -> content
data-> assignment data
data-> epsilon
assignment -> ident = value,
value -> string
value -> ident
value -> dataset
value -> < ident >
"""
from lexer import *
class esxparser(object):
def __init__(self, lexer):
self.lexer = lexer
self.nxtToken = self.lexer.getToken()
def isType(self, token, tokentype):
if not isinstance(token, tokentype):
raise Exception("parse error , Expected another Token than -> " + str(token))
def token(self):
self.actToken = self.nxtToken
self.nxtToken = self.lexer.getToken()
return self.actToken
def nextToken(self):
return self.nxtToken
def S(self):
r = self.datasets()
self.isType(self.token(), EOFToken)
return r
def datasets(self):
resultList = []
resultList.append(self.dataset())
if isinstance(self.nxtToken, OpenBracketToken):
resultList.extend(self.datasets())
return resultList
def dataset(self):
result = {}
self.isType(self.token(), OpenBracketToken)
t = self.token()
self.isType(t, IdentifierToken)
result['__name'] = str(t)
self.isType(self.token(), CloseBracketToken)
self.content(result)
return result
def content(self, result):
t = self.nextToken()
if isinstance(t, IdentifierToken):
if 'null' == str(t):
self.token()
return
if isinstance(t, OpenArrayBracketToken) :
self.token()
result['array'] = self.q()
self.isType(self.token(), CloseArrayBracketToken)
return
if isinstance(t, OpenBracketToken):
result['dataset'] = self.dataset()
return
self.isType(t, OpenCurlyBracketToken)
self.token()
self.data(result)
self.isType(self.token(), CloseCurlyBracketToken)
return result
def q(self):
result = []
index = 0
while not isinstance (self.nextToken(), CloseArrayBracketToken):
result.append({ })
self.content(result[index])
if isinstance(self.nextToken(), CommaToken):
index += 1
t = self.token()
return result
def data(self, result):
ide = self.token()
self.isType(ide, IdentifierToken)
self.isType(self.token(), AssignmentToken)
result[str(ide)] = self.value()
self.isType(self.token(), CommaToken)
if isinstance(self.nextToken(), IdentifierToken):
self.data(result)
def value(self):
t = self.nextToken()
if isinstance(t, StringToken):
return str(self.token())
if isinstance(t, IdentifierToken):
return self.token()
if isinstance(t, OpenTagBracketToken):
self.token()
ident = self.token()
self.isType(ident, IdentifierToken)
self.isType(self.token(), CloseTagBracketToken)
return '< ' + str(ident) + ' >' ;
return self.dataset()
def printr(arr, level=0):
if isinstance(arr, list):
index = 0
for map in arr:
print tab(level) + str(index) + ' ='
printr(map, level+1)
index += 1
return
for k, v in arr.iteritems():
#print v.__class__.__name__ str(len(v))
if isinstance(v, list):
print tab(level) + str(k) + ' : '
for i in range(0, len(v)):
print tab(level) + str(i) + ' = '
printr(v[i], level+1)
elif isinstance(v, dict):
print tab(level) + str(k) + " : "
printr(v, level+1)
else:
print tab(level) + str(k) + " : " + str(v)
def tab(n):
return '\t' * n
| StarcoderdataPython |
30384 | <reponame>carlosdaniel-cyber/my-python-exercises
from time import sleep
n1 = int(input('Primeiro valor: '))
n2 = int(input('Segundo valor: '))
op = 0
while op != 5:
print(''' [ 1 ] somar
[ 2 ] multiplicar
[ 3 ] maior
[ 4 ] novos números
[ 5 ] sair do programa''')
op = int(input('>>>>> Qual é a sua opção? '))
if op == 1:
s = n1 + n2
print('A soma entre {} + {} é {}'.format(n1, n2, s))
elif op == 2:
m = n1 * n2
print('O resultado de {} x {} é {}'.format(n1, n2, m))
elif op == 3:
if n1 > n2:
maior = n1
else:
maior = n2
print('Entre {} e {} o maior valor é {}'.format(n1, n2, maior))
elif op == 4:
print('Informe os números novamente: ')
n1 = int(input('Primeiro valor: '))
n2 = int(input('Segundo valor: '))
elif op == 5:
print('Finalizando...')
else:
print('Opção inválida. Tente novamente')
print('=-'*20)
sleep(1)
print('Fim do programa! Volte sempre!')
| StarcoderdataPython |
1640858 | <filename>Leetcode/1000-2000/1016. Binary String With Substrings Representing 1 To N/1016.py<gh_stars>0
class Solution:
def queryString(self, S: str, N: int) -> bool:
if N > 1511:
return False
for i in range(N, N // 2, -1):
if format(i, 'b') not in S:
return False
return True
| StarcoderdataPython |
123182 | <filename>ARC/arc001-arc050/arc023/b.py
# -*- coding: utf-8 -*-
def main():
r, c, d = map(int, input().split())
a = [list(map(int, input().split())) for _ in range(r)]
ans = 0
# See:
# https://www.slideshare.net/chokudai/arc023
for y in range(r):
for x in range(c):
if (x + y <= d) and ((x + y) % 2 == d % 2):
ans = max(ans, a[y][x])
print(ans)
if __name__ == '__main__':
main()
| StarcoderdataPython |
174311 | <gh_stars>1-10
import os.path
import torch
import torch.nn as nn
import torchvision
import my_config
import mnist
device = my_config.device
mnist_dir = exp.main_dir
# data sets
n_workers = 8
batch_size = 128
trainset = torchvision.datasets.MNIST(
root=mnist_dir, train=True, download=True, transform=mnist.transform_train)
trainloader = torch.utils.data.DataLoader(
trainset, batch_size=batch_size, shuffle=True, num_workers=n_workers)
testset = torchvision.datasets.MNIST(
root=mnist_dir, train=False, download=True, transform=mnist.transform_test)
testloader = torch.utils.data.DataLoader(
testset, batch_size=batch_size, shuffle=False, num_workers=n_workers)
# network
net = mnist.LeNet()
net = net.to(device)
# loss
lr = .1 # learning rate
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(net.parameters(), lr=2e-3)
#optimizer = torch.optim.Adadelta(net.parameters(), lr=lr)
# iterate over epochs
n_epochs = 100
for i in range(n_epochs):
print('\nepoch: ', i)
# train
net.train()
train_loss = 0
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(trainloader):
inputs, targets = inputs.to(device), targets.to(device)
optimizer.zero_grad()
outputs = net(inputs)
loss = criterion(outputs, targets)
loss.backward()
optimizer.step()
# error
train_loss += loss.item()
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
print('train loss:', train_loss)
correct_pct = 100*(correct/total)
print('correct %: ', correct_pct)
# test
net.eval()
test_loss = 0
correct = 0
total = 0
with torch.no_grad():
for batch_idx, (inputs, targets) in enumerate(testloader):
inputs, targets = inputs.to(device), targets.to(device)
outputs = net(inputs)
loss = criterion(outputs, targets)
test_loss += loss.item()
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
print('test loss: ', test_loss)
correct_pct = 100*(correct/total)
print('correct %: ', correct_pct)
# save checkpoint
state = {'net': net.state_dict(), 'correct_pct': correct_pct, 'epoch': i}
ckpt_file = os.path.join(mnist_dir, 'ckpt_' + str(i) + '.ckpt')
torch.save(state, ckpt_file)
| StarcoderdataPython |
1696055 | <reponame>djpetti/isl-gazecapture<filename>itracker/common/network/autoencoder.py
import tensorflow as tf
from network import Network
layers = tf.keras.layers
K = tf.keras.backend
class Autoencoder(Network):
""" Implements autoencoder for analysing variations in eye or face appearance.
"""
def _build_custom(self):
trainable = not self._fine_tune
pool_eye_in = layers.MaxPooling2D(pool_size=2, padding="same")
# Encoder layers.
#conv_e1 = layers.Conv2D(48, (11, 11), strides=(2, 2), activation="relu",
# padding="same",
# kernel_regularizer=self._l2, trainable=trainable)
#norm_e1 = layers.BatchNormalization(trainable=trainable)
conv_e2 = layers.Conv2D(16, (3, 3), activation="relu",
padding="same",
kernel_regularizer=self._l2, trainable=trainable)
pool_e2 = layers.MaxPooling2D(pool_size=2, padding="same")
norm_e2 = layers.BatchNormalization(trainable=trainable)
conv_e3 = layers.Conv2D(8, (3, 3), activation="relu",
padding="same",
kernel_regularizer=self._l2, trainable=trainable)
pool_e3 = layers.MaxPooling2D(pool_size=2, padding="same")
norm_e3 = layers.BatchNormalization(trainable=trainable)
conv_e4 = layers.Conv2D(8, (3, 3), activation="relu",
padding="same",
kernel_regularizer=self._l2, trainable=trainable)
pool_e4 = layers.MaxPooling2D(pool_size=2, padding="same")
norm_e4 = layers.BatchNormalization(trainable=trainable)
# Decoder layers.
conv_d1 = layers.Conv2D(8, (3, 3), activation="relu",
padding="same",
kernel_regularizer=self._l2, trainable=trainable)
upsample_d1 = layers.UpSampling2D(size=2)
norm_d1 = layers.BatchNormalization(trainable=trainable)
conv_d2 = layers.Conv2D(8, (3, 3), activation="relu",
padding="same",
kernel_regularizer=self._l2, trainable=trainable)
upsample_d2 = layers.UpSampling2D(size=2)
norm_d2 = layers.BatchNormalization(trainable=trainable)
conv_d3 = layers.Conv2D(16, (3, 3), activation="relu",
padding="same",
kernel_regularizer=self._l2, trainable=trainable)
upsample_d3 = layers.UpSampling2D(size=2)
norm_d3 = layers.BatchNormalization(trainable=trainable)
conv_d4 = layers.Conv2D(1, (3, 3), padding="same",
kernel_regularizer=self._l2, trainable=trainable,
name="decode")
#norm_d4 = layers.BatchNormalization(trainable=trainable)
#upsample_d4 = layers.UpSampling2D(size=2)
#conv_d4 = layers.Conv2D(48, (11, 11), activation="relu",
# padding="same",
# kernel_regularizer=self._l2, trainable=trainable,
# name="decode")
self._small_eye = pool_eye_in(self._left_eye_node)
# Build the autoencoder network.
#enc1 = conv_e1(self._small_eye)
#enc2 = norm_e1(enc1)
enc3 = conv_e2(self._small_eye)
enc4 = pool_e2(enc3)
enc5 = norm_e2(enc4)
enc6 = conv_e3(enc5)
enc7 = pool_e3(enc6)
enc8 = norm_e3(enc7)
enc9 = conv_e4(enc8)
enc10 = pool_e4(enc9)
enc11 = norm_e4(enc10)
dec1 = conv_d1(enc11)
dec2 = upsample_d1(dec1)
dec3 = norm_d1(dec2)
dec4 = conv_d2(dec3)
dec5 = upsample_d2(dec4)
dec6 = norm_d2(dec5)
dec7 = conv_d3(dec6)
dec8 = upsample_d3(dec7)
dec9 = norm_d3(dec8)
dec10 = conv_d4(dec9)
# Build the gaze prediction pathway.
self.__encoded = layers.Flatten(name="encode")(enc11)
gaze_dense1 = layers.Dense(128, activation="relu",
kernel_regularizer=self._l2,
trainable=trainable)(self.__encoded)
gaze_dense2 = layers.Dense(128, activation="relu",
kernel_regularizer=self._l2,
trainable=trainable)(gaze_dense1)
gaze_pred = layers.Dense(2, kernel_regularizer=self._l2,
trainable=trainable,
name="dots")(gaze_dense2)
# The outputs are the decoded input and the gaze prediction.
return dec10, gaze_pred, self.__encoded
def prepare_labels(self, dots):
""" We abuse the prepare_labels functionality a little so that we can get
the right label data for this network without having to mess with the
experiment code.
Args:
dots: The input dots feature.
Returns:
The decodings and gaze predictions. """
# The expected decoding is just the input.
labels = dots.copy()
labels["decode"] = self._small_eye
# We don't really care about the encoded representation, so we can just set
# the labels to what the output already is.
labels["encode"] = self.__encoded
return labels
| StarcoderdataPython |
3214484 | <gh_stars>1-10
from .tool.func import *
def api_search(name = 'Test', num = 10, page = 1):
with get_db_connect() as conn:
curs = conn.cursor()
num = 1 if num > 1000 else num
page = (page * (num - 1)) if page * num > 0 else 0
curs.execute(db_change('select data from other where name = "count_all_title"'))
if int(curs.fetchall()[0][0]) < 30000:
curs.execute(db_change("" + \
"select distinct title, case " + \
"when title like ? then 'title' else 'data' end from data " + \
"where (title like ? or data like ?) order by case " + \
"when title like ? then 1 else 2 end limit ?, ?"),
['%' + name + '%', '%' + name + '%', '%' + name + '%', '%' + name + '%', page, num]
)
else:
curs.execute(db_change("select title from data where title like ? order by title limit ?, ?"),
['%' + name + '%', page, num]
)
all_list = curs.fetchall()
if all_list:
return flask.jsonify(all_list)
else:
return flask.jsonify({}) | StarcoderdataPython |
1756333 | import math
def sonOrtogonales(x,y):
cal = (x[0]*y[0]) + (x[1]*y[1])
if cal == 0:
print("Son ortogonales")
return True
else:
print("No son ortogonales")
return False
x = [1, 1.1024074512658109]
y = [-1, 1/x[1]]
if not sonOrtogonales(x,y):
print("Algo salió mal")
"""
Lo que sucede es que y[1] está multiplicandose
y dividiendo por x[1] pero como resultado da 0.9 periódico
"""
| StarcoderdataPython |
117514 | <gh_stars>1-10
import os
from hotsos.core.host_helpers import (
APTPackageChecksBase,
ServiceChecksBase,
)
from hotsos.core import (
host_helpers,
plugintools,
)
from hotsos.core.config import HotSOSConfig
SVC_VALID_SUFFIX = r'[0-9a-zA-Z-_]*'
MYSQL_SVC_EXPRS = [r'mysql{}'.format(SVC_VALID_SUFFIX)]
CORE_APT = ['mysql']
class MySQLChecksBase(plugintools.PluginPartBase):
def __init__(self, *args, **kwargs):
super().__init__()
self.apt_info = APTPackageChecksBase(core_pkgs=CORE_APT)
self.systemd_info = ServiceChecksBase(service_exprs=MYSQL_SVC_EXPRS)
@property
def plugin_runnable(self):
return self.apt_info.core is not None
class MySQLConfig(host_helpers.SectionalConfigBase):
def __init__(self, *args, **kwargs):
path = os.path.join(HotSOSConfig.DATA_ROOT,
'etc/mysql/mysql.conf.d/mysqld.cnf')
super().__init__(*args, path=path, **kwargs)
| StarcoderdataPython |
104859 | <filename>nox/_parametrize.py<gh_stars>1-10
# Copyright 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
def parametrize_decorator(arg_names, arg_values_list):
"""Parametrize a session.
Add new invocations to the underlying session function using the list of
``arg_values_list`` for the given ``arg_names``. Parametrization is
performed during session discovery and each invocation appears as a
separate session to nox.
Args:
arg_names (Sequence[str]): A list of argument names.
arg_values_list (Sequence[Union[Any, Tuple]]): The list of argument
values determines how often a session is invoked with different
argument values. If only one argument names was specified then
this is a simple list of values, for example ``[1, 2, 3]``. If N
argument names were specified, this must be a list of N-tuples,
where each tuple-element specifies a value for its respective
argument name, for example ``[(1, 'a'), (2, 'b')]``.
"""
# Allow args to be specified as any of 'arg', 'arg,arg2' or ('arg', 'arg2')
if not isinstance(arg_names, (list, tuple)):
arg_names = list(filter(None, [arg.strip() for arg in arg_names.split(",")]))
# If there's only one arg_name, arg_values_list should be a single item
# or list. Transform it so it'll work with the combine step.
if len(arg_names) == 1:
# In this case, the arg_values_list can also just be a single item.
if not isinstance(arg_values_list, (list, tuple)):
arg_values_list = [arg_values_list]
arg_values_list = [[value] for value in arg_values_list]
# Combine arg names and values into a list of dictionaries. These are
# 'call specs' that will be used to generate calls.
# [{arg: value1}, {arg: value2}, ...]
call_specs = []
for arg_values in arg_values_list:
call_spec = dict(zip(arg_names, arg_values))
call_specs.append(call_spec)
def inner(f):
previous_call_specs = getattr(f, "parametrize", None)
new_call_specs = update_call_specs(previous_call_specs, call_specs)
setattr(f, "parametrize", new_call_specs)
return f
return inner
def update_call_specs(call_specs, new_specs):
if not call_specs:
call_specs = [{}]
combined_specs = []
for new_spec in new_specs:
for spec in call_specs:
spec = spec.copy()
spec.update(new_spec)
combined_specs.append(spec)
return combined_specs
def generate_session_signature(func, call_spec):
args = ["{}={}".format(k, repr(call_spec[k])) for k in sorted(call_spec.keys())]
return "({})".format(", ".join(args))
def generate_calls(func, call_specs):
calls = []
for call_spec in call_specs:
def make_call_wrapper(call_spec):
@functools.wraps(func)
def call_wrapper(*args, **kwargs):
kwargs.update(call_spec)
return func(*args, **kwargs)
return call_wrapper
call = make_call_wrapper(call_spec)
call.session_signature = generate_session_signature(func, call_spec)
call.call_spec = call_spec
calls.append(call)
return calls
| StarcoderdataPython |
9872 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.tf.Lu."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
class LuOpTest(test.TestCase):
@property
def float_types(self):
return set((np.float64, np.float32, np.complex64, np.complex128))
def _verifyLuBase(self, x, lower, upper, perm, verification,
output_idx_type):
lower_np, upper_np, perm_np, verification_np = self.evaluate(
[lower, upper, perm, verification])
self.assertAllClose(x, verification_np)
self.assertShapeEqual(x, lower)
self.assertShapeEqual(x, upper)
self.assertAllEqual(x.shape[:-1], perm.shape.as_list())
# Check dtypes are as expected.
self.assertEqual(x.dtype, lower_np.dtype)
self.assertEqual(x.dtype, upper_np.dtype)
self.assertEqual(output_idx_type.as_numpy_dtype, perm_np.dtype)
# Check that the permutation is valid.
if perm_np.shape[-1] > 0:
perm_reshaped = np.reshape(perm_np, (-1, perm_np.shape[-1]))
for perm_vector in perm_reshaped:
self.assertAllClose(np.arange(len(perm_vector)), np.sort(perm_vector))
def _verifyLu(self, x, output_idx_type=dtypes.int64):
# Verify that Px = LU.
lu, perm = linalg_ops.lu(x, output_idx_type=output_idx_type)
# Prepare the lower factor of shape num_rows x num_rows
lu_shape = np.array(lu.shape.as_list())
batch_shape = lu_shape[:-2]
num_rows = lu_shape[-2]
num_cols = lu_shape[-1]
lower = array_ops.matrix_band_part(lu, -1, 0)
if num_rows > num_cols:
eye = linalg_ops.eye(
num_rows, batch_shape=batch_shape, dtype=lower.dtype)
lower = array_ops.concat([lower, eye[..., num_cols:]], axis=-1)
elif num_rows < num_cols:
lower = lower[..., :num_rows]
# Fill the diagonal with ones.
ones_diag = array_ops.ones(
np.append(batch_shape, num_rows), dtype=lower.dtype)
lower = array_ops.matrix_set_diag(lower, ones_diag)
# Prepare the upper factor.
upper = array_ops.matrix_band_part(lu, 0, -1)
verification = math_ops.matmul(lower, upper)
# Permute the rows of product of the Cholesky factors.
if num_rows > 0:
# Reshape the product of the triangular factors and permutation indices
# to a single batch dimension. This makes it easy to apply
# invert_permutation and gather_nd ops.
perm_reshaped = array_ops.reshape(perm, [-1, num_rows])
verification_reshaped = array_ops.reshape(verification,
[-1, num_rows, num_cols])
# Invert the permutation in each batch.
inv_perm_reshaped = map_fn.map_fn(array_ops.invert_permutation,
perm_reshaped)
batch_size = perm_reshaped.shape.as_list()[0]
# Prepare the batch indices with the same shape as the permutation.
# The corresponding batch index is paired with each of the `num_rows`
# permutation indices.
batch_indices = math_ops.cast(
array_ops.broadcast_to(
math_ops.range(batch_size)[:, None], perm_reshaped.shape),
dtype=output_idx_type)
permuted_verification_reshaped = array_ops.gather_nd(
verification_reshaped,
array_ops.stack([batch_indices, inv_perm_reshaped], axis=-1))
# Reshape the verification matrix back to the original shape.
verification = array_ops.reshape(permuted_verification_reshaped,
lu_shape)
self._verifyLuBase(x, lower, upper, perm, verification,
output_idx_type)
def testBasic(self):
data = np.array([[4., -1., 2.], [-1., 6., 0], [10., 0., 5.]])
for dtype in (np.float32, np.float64):
for output_idx_type in (dtypes.int32, dtypes.int64):
self._verifyLu(data.astype(dtype), output_idx_type=output_idx_type)
for dtype in (np.complex64, np.complex128):
for output_idx_type in (dtypes.int32, dtypes.int64):
complex_data = np.tril(1j * data, -1).astype(dtype)
complex_data += np.triu(-1j * data, 1).astype(dtype)
complex_data += data
self._verifyLu(complex_data, output_idx_type=output_idx_type)
def testPivoting(self):
# This matrix triggers partial pivoting because the first diagonal entry
# is small.
data = np.array([[1e-9, 1., 0.], [1., 0., 0], [0., 1., 5]])
self._verifyLu(data.astype(np.float32))
for dtype in (np.float32, np.float64):
self._verifyLu(data.astype(dtype))
_, p = linalg_ops.lu(data)
p_val = self.evaluate([p])
# Make sure p_val is not the identity permutation.
self.assertNotAllClose(np.arange(3), p_val)
for dtype in (np.complex64, np.complex128):
complex_data = np.tril(1j * data, -1).astype(dtype)
complex_data += np.triu(-1j * data, 1).astype(dtype)
complex_data += data
self._verifyLu(complex_data)
_, p = linalg_ops.lu(data)
p_val = self.evaluate([p])
# Make sure p_val is not the identity permutation.
self.assertNotAllClose(np.arange(3), p_val)
def testInvalidMatrix(self):
# LU factorization gives an error when the input is singular.
# Note: A singular matrix may return without error but it won't be a valid
# factorization.
for dtype in self.float_types:
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(
linalg_ops.lu(
np.array([[1., 2., 3.], [2., 4., 6.], [2., 3., 4.]],
dtype=dtype)))
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(
linalg_ops.lu(
np.array([[[1., 2., 3.], [2., 4., 6.], [1., 2., 3.]],
[[1., 2., 3.], [3., 4., 5.], [5., 6., 7.]]],
dtype=dtype)))
def testBatch(self):
simple_array = np.array([[[1., -1.], [2., 5.]]]) # shape (1, 2, 2)
self._verifyLu(simple_array)
self._verifyLu(np.vstack((simple_array, simple_array)))
odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]])
self._verifyLu(np.vstack((odd_sized_array, odd_sized_array)))
batch_size = 200
# Generate random matrices.
np.random.seed(42)
matrices = np.random.rand(batch_size, 5, 5)
self._verifyLu(matrices)
# Generate random complex valued matrices.
np.random.seed(52)
matrices = np.random.rand(batch_size, 5,
5) + 1j * np.random.rand(batch_size, 5, 5)
self._verifyLu(matrices)
def testLargeMatrix(self):
# Generate random matrices.
n = 500
np.random.seed(64)
data = np.random.rand(n, n)
self._verifyLu(data)
# Generate random complex valued matrices.
np.random.seed(129)
data = np.random.rand(n, n) + 1j * np.random.rand(n, n)
self._verifyLu(data)
@test_util.run_v1_only("b/120545219")
def testEmpty(self):
self._verifyLu(np.empty([0, 2, 2]))
self._verifyLu(np.empty([2, 0, 0]))
@test_util.run_deprecated_v1
def testConcurrentExecutesWithoutError(self):
matrix1 = random_ops.random_normal([5, 5], seed=42)
matrix2 = random_ops.random_normal([5, 5], seed=42)
lu1, p1 = linalg_ops.lu(matrix1)
lu2, p2 = linalg_ops.lu(matrix2)
lu1_val, p1_val, lu2_val, p2_val = self.evaluate([lu1, p1, lu2, p2])
self.assertAllEqual(lu1_val, lu2_val)
self.assertAllEqual(p1_val, p2_val)
class LuBenchmark(test.Benchmark):
shapes = [
(4, 4),
(10, 10),
(16, 16),
(101, 101),
(256, 256),
(1000, 1000),
(1024, 1024),
(2048, 2048),
(4096, 4096),
(513, 2, 2),
(513, 8, 8),
(513, 256, 256),
(4, 513, 2, 2),
]
def _GenerateMatrix(self, shape):
batch_shape = shape[:-2]
shape = shape[-2:]
assert shape[0] == shape[1]
n = shape[0]
matrix = np.ones(shape).astype(np.float32) / (2.0 * n) + np.diag(
np.ones(n).astype(np.float32))
return np.tile(matrix, batch_shape + (1, 1))
def benchmarkLuOp(self):
for shape in self.shapes:
with ops.Graph().as_default(), \
session.Session(config=benchmark.benchmark_config()) as sess, \
ops.device("/cpu:0"):
matrix = variables.Variable(self._GenerateMatrix(shape))
lu, p = linalg_ops.lu(matrix)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(lu, p),
min_iters=25,
name="lu_cpu_{shape}".format(shape=shape))
if test.is_gpu_available(True):
with ops.Graph().as_default(), \
session.Session(config=benchmark.benchmark_config()) as sess, \
ops.device("/device:GPU:0"):
matrix = variables.Variable(self._GenerateMatrix(shape))
lu, p = linalg_ops.lu(matrix)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(lu, p),
min_iters=25,
name="lu_gpu_{shape}".format(shape=shape))
if __name__ == "__main__":
test.main()
| StarcoderdataPython |
1688727 | <reponame>NPAPENBURG/databases_ds33<filename>module1/buddymove_holidayiq.py
""" Module to Query a database"""
import sqlite3
import pandas as pd
def connect_to_db(db_name='buddymove_holidayiq.sqlite3'):
''' Function to connect to the Database'''
return sqlite3.connect(db_name)
def execute_q(connection, query):
''' Exucute queries using this function'''
curs = connection.cursor()
curs.execute(query)
results = curs.fetchall()
return results
def create_table(dataframe):
'''Create a table for the Database'''
dataframe.to_sql('buddymovie', con=connect_to_db())
# URL TO THE CSV
URL = 'https://raw.githubusercontent.com/bloominstituteoftechnology/DS-Unit-3-Sprint-2-SQL-and-Databases/master/module1-introduction-to-sql/buddymove_holidayiq.csv'
# Reading in the CSV
df = pd.read_csv(URL)
# These lines of code were to make the Database and table
# connect_to_db()
# create_table(df)
# Query string to get the number of rows
RCOUNT = ''' SELECT COUNT(*)
FROM buddymovie;'''
# Query String to get the number of people who were over 100
NATURE_SHOPPING = ''' SELECT COUNT(*)
FROM buddymovie
WHERE nature >= 100 AND shopping >= 100;'''
# Query String to get the averages of the columns
AVERAGES = '''SELECT AVG(sports), AVG(Religious), AVG(Nature),
AVG(Theatre), AVG(Shopping), AVG(picnic)
FROM buddymovie
'''
# Connection to database
conn = connect_to_db()
# Querying the database using the strings
results1 = execute_q(conn, RCOUNT)
results2 = execute_q(conn, NATURE_SHOPPING)
results3 = execute_q(conn, AVERAGES)
#printing the Results
print(f'The number of rows: {results1[0][0]}')
print(f'Total of people who did 100+ for Nature and Shopping: {results2[0][0]}')
print(f'Averages - Sports: {round(results3[0][0], 2)},'
f'Religious: {round(results3[0][1], 2)}, Nature: {round(results3[0][2], 2)},'
f'Theatre: {round(results3[0][3], )}, Shopping: {round(results3[0][4], 2)},'
f'Picnic: {round(results3[0][5], 2)}')
| StarcoderdataPython |
70782 | <reponame>sorasful/minos-python
import unittest
from minos.common import (
classname,
)
from minos.networks import (
BrokerCommandEnrouteDecorator,
BrokerEventEnrouteDecorator,
BrokerQueryEnrouteDecorator,
EnrouteAnalyzer,
PeriodicEventEnrouteDecorator,
RestCommandEnrouteDecorator,
RestQueryEnrouteDecorator,
)
from tests.utils import (
FakeService,
FakeServiceWithGetEnroute,
)
class TestEnrouteAnalyzer(unittest.IsolatedAsyncioTestCase):
def test_decorated_str(self):
analyzer = EnrouteAnalyzer(classname(FakeService))
self.assertEqual(FakeService, analyzer.decorated)
def test_get_all(self):
analyzer = EnrouteAnalyzer(FakeService)
observed = analyzer.get_all()
expected = {
"get_tickets": {BrokerQueryEnrouteDecorator("GetTickets"), RestQueryEnrouteDecorator("tickets/", "GET")},
"create_ticket": {
BrokerCommandEnrouteDecorator("CreateTicket"),
BrokerCommandEnrouteDecorator("AddTicket"),
RestCommandEnrouteDecorator("orders/", "GET"),
},
"ticket_added": {BrokerEventEnrouteDecorator("TicketAdded")},
"delete_ticket": {
BrokerCommandEnrouteDecorator("DeleteTicket"),
RestCommandEnrouteDecorator("orders/", "DELETE"),
},
"send_newsletter": {PeriodicEventEnrouteDecorator("@daily")},
"check_inactive_users": {PeriodicEventEnrouteDecorator("@daily")},
}
self.assertEqual(expected, observed)
def test_get_rest_command_query(self):
analyzer = EnrouteAnalyzer(FakeService)
observed = analyzer.get_rest_command_query()
expected = {
"get_tickets": {RestQueryEnrouteDecorator("tickets/", "GET")},
"create_ticket": {RestCommandEnrouteDecorator("orders/", "GET")},
"delete_ticket": {RestCommandEnrouteDecorator("orders/", "DELETE")},
}
self.assertEqual(expected, observed)
def test_get_broker_command_query_event(self):
analyzer = EnrouteAnalyzer(FakeService)
observed = analyzer.get_broker_command_query_event()
expected = {
"get_tickets": {BrokerQueryEnrouteDecorator("GetTickets")},
"create_ticket": {
BrokerCommandEnrouteDecorator("CreateTicket"),
BrokerCommandEnrouteDecorator("AddTicket"),
},
"delete_ticket": {BrokerCommandEnrouteDecorator("DeleteTicket")},
"ticket_added": {BrokerEventEnrouteDecorator("TicketAdded")},
}
self.assertEqual(expected, observed)
def test_get_broker_command_query(self):
analyzer = EnrouteAnalyzer(FakeService)
observed = analyzer.get_broker_command_query()
expected = {
"get_tickets": {BrokerQueryEnrouteDecorator("GetTickets")},
"create_ticket": {
BrokerCommandEnrouteDecorator("CreateTicket"),
BrokerCommandEnrouteDecorator("AddTicket"),
},
"delete_ticket": {BrokerCommandEnrouteDecorator("DeleteTicket")},
}
self.assertEqual(expected, observed)
def test_get_broker_event(self):
analyzer = EnrouteAnalyzer(FakeService)
observed = analyzer.get_broker_event()
expected = {"ticket_added": {BrokerEventEnrouteDecorator("TicketAdded")}}
self.assertEqual(expected, observed)
def test_get_periodic_event(self):
analyzer = EnrouteAnalyzer(FakeService)
observed = analyzer.get_periodic_event()
expected = {
"send_newsletter": {PeriodicEventEnrouteDecorator("@daily")},
"check_inactive_users": {PeriodicEventEnrouteDecorator("@daily")},
}
self.assertEqual(expected, observed)
def test_with_get_enroute(self):
analyzer = EnrouteAnalyzer(FakeServiceWithGetEnroute)
observed = analyzer.get_all()
expected = {"create_foo": {BrokerCommandEnrouteDecorator("CreateFoo")}}
self.assertEqual(expected, observed)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
1746093 | <filename>meta_policy_search/utils/rl2/__init__.py<gh_stars>1-10
from meta_policy_search.utils.rl2.serializable import Serializable
from meta_policy_search.utils.rl2.utils import * | StarcoderdataPython |
89072 | from KingMaker.processor.tasks.CROWNBuild import CROWNBuild
import law
import luigi
import os
from subprocess import PIPE
from law.util import interruptable_popen
from processor.framework import RemoteTask
class ProducerDataset(RemoteTask):
"""
collective task to trigger ntuple production of a given dataset
"""
dataset = luigi.Parameter()
def output(self):
return self.local_target("tarball.tar.gz")
def requires(self):
return CROWNBuild.req(self)
def run(self):
_dataset = str(self.dataset)
# ensure that the output directory exists
output = self.output()
output.parent.touch()
# set environment variables
my_env = self.set_environment(self.env_script)
# checking cmake path
code, _cmake_executable, error = interruptable_popen(
["which", "cmake"], stdout=PIPE, stderr=PIPE, env=my_env
)
# actual payload:
print("=========================================================")
print("| Starting cmake step for CROWN")
print("| Using cmake {}".format(_cmake_executable))
print("| Using CROWN {}".format(_crown_path))
print("| Using build_directory {}".format(_build_dir))
print("=========================================================")
# run CROWN build step
_cmake_cmd = ["cmake", _crown_path]
_cmake_args = [
"-DANALYSIS={ANALYSIS}".format(ANALYSIS=_analysis),
"-DSAMPLES={SAMPLES}".format(SAMPLES=_samples),
"-DERAS={ERAS}".format(ERAS=_eras),
"-DCHANNELS={CHANNELS}".format(CHANNELS=_channels),
"-DSHIFTS={SHIFTS}".format(SHIFTS=_shifts),
"-B{BUILDFOLDER}".format(BUILDFOLDER=_build_dir),
]
print("Executable: {}".format(" ".join(_cmake_cmd + _cmake_args)))
code, out, error = interruptable_popen(
_cmake_cmd + _cmake_args, stdout=PIPE, stderr=PIPE, env=my_env
)
print(code, out, error)
# if successful save Herwig-cache and run-file as tar.gz
if code != 0:
print("Error when running cmake {}".format(error))
print("Output: {}".format(out))
print("cmake returned non-zero exit status {}".format(code))
raise Exception("cmake failed")
else:
print("Successful cmake build !")
print(
"Executable: {}".format(
" ".join(["make", "install", "-j{}".format(_build_cores)])
)
)
code, out, error = interruptable_popen(
["make", "install", "-j{}".format(_build_cores)],
stdout=PIPE,
stderr=PIPE,
env=my_env,
cwd=_build_dir,
)
if code != 0:
print("Error when running make {}".format(error))
print("Output: {}".format(out))
print("make returned non-zero exit status {}".format(code))
raise Exception("make failed")
print("=======================================================")
| StarcoderdataPython |
1754515 | <reponame>johnbanq/modl
# Author: <NAME>
# License: BSD
import time
import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
from modl.datasets.image import load_image
from modl.decomposition.image import ImageDictFact, DictionaryScorer
from modl.feature_extraction.image import LazyCleanPatchExtractor
from modl.plotting.image import plot_patches
batch_size = 400
learning_rate = 0.92
reduction = 2
alpha = 0.08
n_epochs = 10
n_components = 100
test_size = 4000
max_patches = 1000
patch_size = (32, 32)
n_threads = 2
verbose = 100
method = 'masked'
step_size = 0.1
setting = 'dictionary learning'
source = 'lisboa'
gray = False
scale = 1
print('Loading data')
image = load_image(source, scale=scale, gray=gray)
print('Done')
width, height, n_channel = image.shape
patch_extractor = LazyCleanPatchExtractor(patch_size=patch_size,
max_patches=test_size,
random_state=1)
test_data = patch_extractor.transform(image[:, :height // 2, :])
cb = DictionaryScorer(test_data)
dict_fact = ImageDictFact(method=method,
setting=setting,
alpha=alpha,
step_size=step_size,
n_epochs=n_epochs,
random_state=1,
n_components=n_components,
learning_rate=learning_rate,
max_patches=max_patches,
batch_size=batch_size,
patch_size=patch_size,
reduction=reduction,
callback=cb,
verbose=verbose,
n_threads=n_threads,
)
dict_fact.fit(image[:, height // 2:, :])
score = dict_fact.score(test_data)
fig = plt.figure()
patches = dict_fact.components_
plot_patches(fig, patches)
fig.suptitle('Dictionary')
fig, ax = plt.subplots(1, 1)
ax.plot(cb.time, cb.score, marker='o')
ax.legend()
ax.set_xscale('log')
ax.set_xlabel('Time (s)')
ax.set_ylabel('Test objective value')
plt.show() | StarcoderdataPython |
40502 | #! /usr/bin/env python
import pandas as pd
import click
'''
gene expression matrix, with gene id in first column,
gene expression level of each sample in othre columns.
'''
@click.group(chain=True, invoke_without_command=True)
@click.argument('exp_table', type=click.STRING, required=True)
@click.pass_context
def main(ctx, exp_table):
ctx.obj['exp_table'] = exp_table
@main.command('merge_by_group')
@click.option(
'-s',
'--sample_inf',
type=click.STRING,
required=True,
help='sample vs group file, with group id in first column,\
sample id in second column, seperated with tab.')
@click.option(
'-o',
'--output',
type=click.STRING,
default='genes.group.matrix.txt',
help='table with mean expression level of each group.')
@click.pass_context
def merge_by_group(ctx, sample_inf, output):
sample_df = pd.read_table(sample_inf, header=None, index_col=1)
gene_exp_df = pd.read_table(ctx.obj['exp_table'], index_col=0)
sample_df.columns = ['Group']
merged_df = pd.merge(
sample_df, gene_exp_df.T, left_index=True, right_index=True)
merged_df_group = merged_df.groupby(['Group'])
out_df = merged_df_group.mean().T
out_df.to_csv(output, sep='\t')
if __name__ == '__main__':
main(obj={})
| StarcoderdataPython |
1709095 | <reponame>dscook/topic-classification-with-kbs
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
import requests
from collections import defaultdict
from sklearn.metrics import classification_report, confusion_matrix
from sklearn.ensemble import RandomForestClassifier
from kb_common import wiki_topics_to_actual_topics
class KnowledgeBaseClassifier():
"""
The knowledge base classifier that makes HTTP REST calls to embed documents, trains a Random Forest on these
embeddings then is capable of making predictions on new documents.
"""
def __init__(self, topic_labels, topic_depth, top_level_prediction_number=None):
"""
:param topic_labels: the target topic class labels.
:param topic_depth: the topic depth to use for knowledge base document embedding, set to 'all' for all topics.
:param top_level_prediction_number: the number of root topics, only required to be set if the number of
root topics in the topic hierarchy is different to the number of
target topic class labels.
"""
self.topic_labels = topic_labels
self.topic_depth = topic_depth
self.top_level_prediction_number = len(self.topic_labels)
if top_level_prediction_number is not None:
self.top_level_prediction_number = top_level_prediction_number
def train(self, x, y, balanced_classes=True):
"""
Train the knowledge base classifier. Will obtain document embeddings for the supplied input documents then
fit a Random Forest.
:param x: the input documents.
:param y: the target class labels as integers.
:param balanced_classes: set to False if the target class distribution is uneven.
"""
document_embeddings = self.obtain_document_embeddings(x, training=True)
# Handle case where there is an imbalance in the class labels
class_weight = None
if not balanced_classes:
class_weight = 'balanced'
self.classifier = RandomForestClassifier(n_estimators=200, random_state=42, class_weight=class_weight)
self.classifier.fit(document_embeddings, y)
def predict(self, x):
"""
Given a list of input documents returns the predicted target classes.
:param x: the input documents.
:returns: the predicted target classes.
"""
document_embeddings = self.obtain_document_embeddings(x, training=False)
self.last_predict = self.classifier.predict(document_embeddings)
return self.last_predict
def get_classification_report(self, y, predict):
"""
Given actual target classes and predicted target classes generates a classification report
and confusion matrix.
:param y: actual target classes.
:param predict: predicted target classes.
:returns (classification report, confusion matrix).
"""
clazzification_report = classification_report(y,
predict,
digits=6,
target_names=self.topic_labels,
labels=np.arange(len(self.topic_labels)))
confuzion_matrix = confusion_matrix(y, predict)
return (clazzification_report, confuzion_matrix)
def obtain_document_embeddings(self, x, training):
"""
Obtain the document embeddings for the given input documents.
:param x: the input documents.
:param training: set to True if the input documents are the training set.
:returns: the document embeddings as a numpy array of shape (num of docs, embedding dimension)
"""
class_probabilities = np.zeros(shape=(len(x), self.top_level_prediction_number))
wikipedia_topic_probabilities = defaultdict(lambda: {})
for i in range(len(x)):
print(i)
# Make a REST request to get Wikipedia root topic probabilities from the classifier server
doc = { 'text': x[i] }
r = requests.post(url = 'http://127.0.0.1:5000/classify', json = doc)
wiki_topic_to_prob = r.json()
# Convert root Wikipedia topic probabilities to actual topic probabilities
topic_index_to_prob = self.convert_topic_probs_wikipedia_to_actual(wiki_topic_to_prob)
class_probabilities[i] = topic_index_to_prob
# Get Wikipedia topic probabilities at specified depth for random forest training/prediction
r = requests.get(url = 'http://127.0.0.1:5000/probabilities/{}'.format(self.topic_depth))
wiki_topic_to_prob = r.json()
# Store Wikipedia topic probabilities
for topic, probability in wiki_topic_to_prob.items():
wikipedia_topic_probabilities[topic][i] = probability
# Convert Wikipedia topic probabilities from dictionary to matrix
if training:
self.number_of_features = len(wikipedia_topic_probabilities.keys())
self.index_to_topic = {}
i = 0
for topic in wikipedia_topic_probabilities.keys():
self.index_to_topic[i] = topic
i += 1
wiki_prob_matrix = np.zeros(shape=(len(x), self.number_of_features))
print('Wiki topic probabilities shape: {}'.format(wiki_prob_matrix.shape)) # DEBUGGING
for i in range(self.number_of_features):
for j in range(len(x)):
# Get topic for index
topic = self.index_to_topic[i]
# A particular document may not have a value for this topic if its unrelated so set to 0
if topic in wikipedia_topic_probabilities and j in wikipedia_topic_probabilities[topic]:
wiki_prob_matrix[j][i] = wikipedia_topic_probabilities[topic][j]
else:
wiki_prob_matrix[j][i] = 0.0
self.last_class_probabilities = class_probabilities
return wiki_prob_matrix
def convert_topic_probs_wikipedia_to_actual(self, wiki_topic_to_prob):
"""
Converts root level wikipedia topic probabilities to target class probabilities.
Note more than one root level wikipedia topic may map to the same target class.
:param wiki_topic_to_prob: dict of wiki topic to probability.
:returns: numpy array of shape (number of target classes,).
"""
topic_indexes = set([index for index in wiki_topics_to_actual_topics.values()])
topic_index_to_prob = np.zeros(shape=len(topic_indexes))
if wiki_topic_to_prob:
for topic in wiki_topic_to_prob.keys():
topic_index_to_prob[wiki_topics_to_actual_topics[topic]] += wiki_topic_to_prob[topic]
return topic_index_to_prob | StarcoderdataPython |
1634087 | #!/usr/bin/python
import socket
import struct
import sys
import zlib
from google.protobuf import service
import rpc_pb2
import rpcservice_pb2
def encode(message):
body = "RPC0" + message.SerializeToString()
cksum = zlib.adler32(body)
return "".join((struct.pack(">l", len(body) + 4), body, struct.pack(">l", cksum)))
def decode(sock):
head = sock.recv(4, socket.MSG_WAITALL)
if not head:
return None
assert len(head) == 4
length, = struct.unpack(">l", head)
assert length > 8
body = sock.recv(length, socket.MSG_WAITALL)
assert len(body) == length
assert "RPC0" == body[:4]
cksum, = struct.unpack(">l", body[-4:])
cksum2 = zlib.adler32(body[:-4])
assert cksum == cksum2
message = rpc_pb2.RpcMessage()
message.ParseFromString(body[4:-4])
return message
class SyncRpcChannel(service.RpcChannel):
def __init__(self, hostport):
self.sock = socket.create_connection(hostport)
self.count = 0
def CallMethod(self, method_descriptor, rpc_controller,
request, response_class, done):
message = rpc_pb2.RpcMessage()
message.type = rpc_pb2.REQUEST
self.count += 1
message.id = self.count
message.service = method_descriptor.containing_service.full_name
message.method = method_descriptor.name
message.request = request.SerializeToString()
wire = encode(message)
self.sock.sendall(wire)
responseMessage = decode(self.sock)
assert responseMessage.type == rpc_pb2.RESPONSE
assert responseMessage.id == message.id
response = response_class()
response.ParseFromString(responseMessage.response)
return response
class ServerRpcChannel(service.RpcChannel):
def __init__(self, port):
self.serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.serversocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.serversocket.bind(('', port))
self.serversocket.listen(5)
def serveOneClient(self, service):
(clientsocket, address) = self.serversocket.accept()
print "got connection from", address
while True:
message = decode(clientsocket)
if not message:
clientsocket.close()
break
assert message.type == rpc_pb2.REQUEST
assert message.service == service.GetDescriptor().full_name
method = service.GetDescriptor().FindMethodByName(message.method)
request_class = service.GetRequestClass(method)
request = request_class()
request.ParseFromString(message.request)
response = service.CallMethod(method, None, request, None)
responseMessage = rpc_pb2.RpcMessage()
responseMessage.type = rpc_pb2.RESPONSE
responseMessage.id = message.id
responseMessage.response = response.SerializeToString()
wire = encode(responseMessage)
clientsocket.sendall(wire)
print "connection is down", address
if __name__ == "__main__":
channel = SyncRpcChannel(sys.argv[1].split(':'))
rpcList = rpcservice_pb2.RpcService_Stub(channel)
print rpcList.listRpc(None, rpcservice_pb2.ListRpcRequest())
request = rpcservice_pb2.ListRpcRequest()
request.list_method = True
print rpcList.listRpc(None, request)
request = rpcservice_pb2.GetServiceRequest()
request.service_name = "zurg.SlaveService"
print rpcList.getService(None, request)
| StarcoderdataPython |
3301391 | from .rpc_block_digestors import *
from .rpc_dev_digestors import *
from .rpc_log_digestors import *
from .rpc_mining_digestors import *
from .rpc_node_digestors import *
from .rpc_state_digestors import *
from .rpc_submission_digestors import *
from .rpc_transaction_digestors import *
from .rpc_whisper_digestors import *
| StarcoderdataPython |
4801791 | <filename>tests/functional/Hydro/Riemann/RiemannSolution.py
#!/usr/bin/env python
#-------------------------------------------------------------------------------
# RiemannSolution
#
# Adapted from code I got from <NAME>, which in turn was based on code from
# Toro as described in the following comments.
#
# Exact Riemann solver for the Euler equations in one dimension
# Translated from the Fortran code er1pex.f and er1pex.ini
# by Dr. <NAME> downloaded from
# http://www.numeritek.com/numerica_software.html#freesample
#-------------------------------------------------------------------------------
from math import *
import numpy as np
import argparse
# Several standard tests listed as (x0, x1, xdiaph, gamma_gas, out_time, dl, vl, pl, dr, vr, pr)
# The bounds are chosen so that a good comparison can be made in the range x \in [0,1]
Riemann_packaged_problems = {
"sod" : ( 0.0, 1.0, 0.5, 1.4, 0.20, 1.0, 0.0, 1.0, 0.125, 0.0, 0.1), # TEST 1 (Modified Sod)
"123" : ( 0.0, 1.0, 0.5, 1.4, 0.15, 1.0, -2.0, 0.4, 1.0, 2.0, 0.4), # TEST 2 (123 problem)
"leftwc" : ( 0.0, 1.0, 0.5, 1.4, 0.012, 1.0, 0.0, 1000.0, 1.0, 0.0, 0.01), # TEST 3 (Left Woodward & Colella)
"2shock_collision" : (-1.0, 2.0, 0.4, 1.4, 0.035, 5.99924, 19.5975, 460.894, 5.99242, -6.19633, 46.0950), # TEST 4 (Collision of 2 shocks)
"stationary_contact" : (-0.5, 1.5, 0.8, 1.4, 0.012, 1.0, -19.59745, 1000.0, 1.0, -19.59745, 0.01), # TEST 5 (Stationary contact)
"slow_shock" : (-2.0, 8.0, 0.5, 1.4, 1.0, 3.857143, -0.810631, 10.33333, 1.0, -3.44, 1.0), # TEST 6 (Slow shock)
"shock_contact_shock" : (-1.0, 2.0, 0.5, 1.4, 0.3, 1.0, 0.5, 1.0, 1.25, -0.5, 1.0), # TEST 7 (Shock-Contact-Shock)
"leblanc" : ( 0.0, 1.0, 0.3, 1.4, 0.5, 1.0, 0.0, 2.0e-1/3.0, 0.01, 0.0, 2.0e-10/3.0), # TEST 8 (LeBlanc)
}
#-------------------------------------------------------------------------------
# The main object.
#-------------------------------------------------------------------------------
class RiemannSolution:
def __init__(self,
problem = "Sod", # ("", "Sod", "123", "Stationary_contact", "Slow_shock", "Slow_contact_shock", "LeBlanc")
n = 1000, # number of points in evaluating exact solution
x0 = None, # box min coordinate
x1 = None, # box max coordinate
xdiaph = None, # position of diaphragm xdiaph \in [x0, x1]
gamma_gas = None, # ratio of specific heats
out_time = None, # default time of solution
dl = None, # density (left state)
vl = None, # velocity (left state)
pl = None, # pressure (left state)
hl = None, # smoothing scale (left state)
dr = None, # density (right state)
vr = None, # velocity (right state)
pr = None, # pressure (right state)
hr = None): # smoothing scale (right state)
assert problem or (x0 and x1 and out_time and xdiaph and gamma_gas and dl and vl and pl and dr and vr and pr)
# Get the ICs.
if problem:
assert problem.lower() in Riemann_packaged_problems
_x0, _x1, _xdiaph, _gamma_gas, _out_time, _dl, _vl, _pl, _dr, _vr, _pr = Riemann_packaged_problems[problem.lower()]
if x0 is None:
x0 = _x0
if x1 is None:
x1 = _x1
if xdiaph is None:
xdiaph = _xdiaph
if gamma_gas is None:
gamma_gas = _gamma_gas
if out_time is None:
out_time = _out_time
if dl is None:
dl = _dl
if vl is None:
vl = _vl
if pl is None:
pl = _pl
if dr is None:
dr = _dr
if vr is None:
vr = _vr
if pr is None:
pr = _pr
# Store the variables
self.n = n
self.x0 = x0
self.x1 = x1
self.xdiaph = xdiaph
self.gamma_gas = gamma_gas
self.out_time = out_time
self.dl = dl
self.vl = vl
self.pl = pl
self.hl = hl
self.dr = dr
self.vr = vr
self.pr = pr
self.hr = hr
return
#---------------------------------------------------------------------------
# Compute the solution.
#---------------------------------------------------------------------------
def solution(self,
time = None,
x = None):
n = self.n
x0 = self.x0
x1 = self.x1
xdiaph = self.xdiaph
gamma_gas = self.gamma_gas
out_time = self.out_time
dl = self.dl
vl = self.vl
pl = self.pl
hl = self.hl
dr = self.dr
vr = self.vr
pr = self.pr
hr = self.hr
# Solution time
if not time is None:
out_time = time
else:
out_time = self.out_time
# Sampling positions
if x is None:
assert n > 0
assert x1 > x0
x = np.linspace(x0, x1, n)
else:
n = len(x)
# Did we get the initial (left, right) h?
if hl is None:
hl = x[1] - x[0]
if hr is None:
hr = x[-1] - x[-2]
assert hl > 0 and hr > 0
# compute gamma related constants
g1 = (gamma_gas - 1.0)/(2.0*gamma_gas)
g2 = (gamma_gas + 1.0)/(2.0*gamma_gas)
g3 = 2.0*gamma_gas/(gamma_gas - 1.0)
g4 = 2.0/(gamma_gas - 1.0)
g5 = 2.0/(gamma_gas + 1.0)
g6 = (gamma_gas - 1.0)/(gamma_gas + 1.0)
g7 = (gamma_gas - 1.0)/2.0
g8 = gamma_gas - 1.0
# compute sound speeds
cl = sqrt(gamma_gas*pl/dl)
cr = sqrt(gamma_gas*pr/dr)
#---------------------------------------------------------------------------
# purpose: to provide a guessed value for pressure
# pm in the Star Region. The choice is made
# according to adaptive Riemann solver using
# the PVRS, TRRS and TSRS approximate
# Riemann solvers. See Sect. 9.5 of Chapt. 9 of Ref. 1
#---------------------------------------------------------------------------
def guessp():
quser = 2.0
# compute guess pressure from PVRS Riemann solver
cup = 0.25*(dl + dr)*(cl + cr)
ppv = 0.5*(pl + pr) + 0.5*(vl - vr)*cup
ppv = max(0.0, ppv)
pmin = min(pl, pr)
pmax = max(pl, pr)
qmax = pmax/pmin
if (qmax <= quser and (pmin <= ppv and ppv <= pmax)):
pm = ppv # select PVRS Riemann solver
else:
if (ppv < pmin):
# select Two-Rarefaction Riemann solver
pq = pow(pl/pr, g1)
vm = (pq*vl/cl + vr/cr + g4*(pq - 1.0))/(pq/cl + 1.0/cr)
ptl = 1.0 + g7*(vl - vm)/cl
ptr = 1.0 + g7*(vm - vr)/cr
pm = 0.5*(pow(pl*ptl, g3) + pow(pr*ptr, g3))
else:
# select Two-Shock Riemann solver with PVRS as estimate
gel = sqrt((g5/dl)/(g6*pl + ppv))
ger = sqrt((g5/dr)/(g6*pr + ppv))
pm = (gel*pl + ger*pr - (vr - vl))/(gel + ger)
return pm
#---------------------------------------------------------------------------
# purpose: to evaluate the pressure functions
# fl and fr in exact Riemann solver
# and their first derivatives
#---------------------------------------------------------------------------
def prefun(p, dk, pk, ck):
if (p <= pk):
# rarefaction wave
pratio = p/pk
f = g4*ck*(pow(pratio, g1) - 1.0)
fd = (1.0/(dk*ck))*pow(pratio, -g2)
else:
# shock wave
ak = g5/dk
bk = g6*pk
qrt = sqrt(ak/(bk + p))
f = (p - pk)*qrt
fd = (1.0 - 0.5*(p - pk)/(bk + p))*qrt
return f, fd
#---------------------------------------------------------------------------
# purpose: to compute the solution for pressure and
# velocity in the Star Region
#---------------------------------------------------------------------------
def starpu(pscale):
nriter = 20
tolpre = 1.0e-6
# guessed value pstart is computed
pstart = guessp()
pold = pstart
udiff = vr - vl
print ("----------------------------------------\n"
" Iteration number Change\n"
"----------------------------------------")
i = 1
change = 10.0*tolpre
while i <= nriter and change > tolpre:
fl, fld = prefun(pold, dl, pl, cl)
fr, frd = prefun(pold, dr, pr, cr)
p = pold - (fl + fr + udiff)/(fld + frd)
change = 2.0*abs((p - pold)/(p + pold))
print '\t', i, "\t\t", change
if (p < 0.0):
p = tolpre
pold = p
i += 1
if (i > nriter):
print "divergence in Newton-Raphson iteration"
# compute velocity in star region
u = 0.5*(vl + vr + fr - fl)
print "----------------------------------------\n" \
" Pressure Velocity\n" \
"----------------------------------------\n" \
" ", p/pscale, "\t\t", u, '\n' \
"----------------------------------------"
return p, u
#---------------------------------------------------------------------------
# purpose: to sample the solution throughout the wave
# pattern. Pressure pm and velocity vm in the
# star region are known. Sampling is performed
# in terms of the 'speed' s = x/t. Sampled
# values are d, v, p
#---------------------------------------------------------------------------
def sample(pm, vm, s):
if (s <= vm):
# sampling point lies to the left of the contact discontinuity
if (pm <= pl):
# left rarefaction
shl = vl - cl
if (s <= shl):
# sampled point is left data state
d = dl
v = vl
p = pl
h = hl
else:
cml = cl*pow(pm/pl, g1)
stl = vm - cml
if (s > stl):
# sampled point is star left state
d = dl*pow(pm/pl, 1.0/gamma_gas)
v = vm
p = pm
h = hl*dl/d
else:
# sampled point is inside left fan
v = g5*(cl + g7*vl + s)
c = g5*(cl + g7*(vl - s))
d = dl*pow(c/cl, g4)
p = pl*pow(c/cl, g3)
h = hl*dl/d
else:
# left shock
pml = pm/pl
sl = vl - cl*sqrt(g2*pml + g1)
if (s <= sl):
# sampled point is left data state
d = dl
v = vl
p = pl
h = hl
else:
# sampled point is star left state
d = dl*(pml + g6)/(pml*g6 + 1.0)
v = vm
p = pm
h = hl*dl/d
else:
# sampling point lies to the right of the contact discontinuity
if (pm > pr):
# right shock
pmr = pm/pr
sr = vr + cr*sqrt(g2*pmr + g1)
if (s >= sr):
# sampled point is right data state
d = dr
v = vr
p = pr
h = hr
else:
# sampled point is star right state
d = dr*(pmr + g6)/(pmr*g6 + 1.0)
v = vm
p = pm
h = hr*dr/d
else:
# right rarefaction
shr = vr + cr
if (s >= shr):
# sampled point is right data state
d = dr
v = vr
p = pr
h = hr
else:
cmr = cr*pow(pm/pr, g1)
str = vm + cmr
if (s <= str):
# sampled point is star right state
d = dr*pow(pm/pr, 1.0/gamma_gas)
v = vm
p = pm
h = hr*dr/d
else:
# sampled point is inside left fan
v = g5*(-cr + g7*vr + s)
c = g5*(cr - g7*(vr - s))
d = dr*pow(c/cr, g4)
p = pr*pow(c/cr, g3)
h = hr*dr/d
return d, v, p, h
# the pressure positivity condition is tested for
if (g4*(cl+cr) <= (vr-vl)):
raise RunTimeError, ("the initial data is such that vacuum is generated"
"\nstopping program")
# exact solution for pressure and velocity in star region is found
pm, vm = starpu(1.0)
# complete solution at time out_time is found
d = np.empty(n)
v = np.empty(n)
p = np.empty(n)
eps = np.empty(n)
A = np.empty(n)
h = np.empty(n)
for i in xrange(n):
s = (x[i] - xdiaph)/max(1e-10, out_time)
ds, vs, ps, hs = sample(pm, vm, s)
d[i] = ds
v[i] = vs
p[i] = ps
eps[i] = ps/(g8*ds)
A[i] = ps/pow(ds, gamma_gas)
h[i] = hs
return x, v, eps, d, p, A, h
#-------------------------------------------------------------------------------
# Provide a way to call this script as a standalone executable.
#-------------------------------------------------------------------------------
if __name__ == "__main__":
ap = argparse.ArgumentParser(description = "Compute the Riemann solution, with optional output to a file or plotted to the screen.")
ap.add_argument("--problem",
default = "Sod",
help = """
Use one of the canned Riemann initial conditions: (Sod, 123, Stationary_contact, Slow_shock, Slow_contact_shock, LeBlanc).
If specified as the empty string "" (or None), the full state must be specified explicitly.""")
ap.add_argument("--n",
default = 1000,
type = int,
help = "Number of points to generate in the solution.")
ap.add_argument("--x0",
default = None,
type = float,
help = "Minimum spatial coordinate in the tube.")
ap.add_argument("--x1",
default = None,
type = float,
help = "Maximum spatial coordinate in the tube.")
ap.add_argument("--xdiaph",
default = None,
type = float,
help = "Coordinate of the diaphragm.")
ap.add_argument("--gamma_gas",
default = None,
type = float,
help = "Ratio of specific heats.")
ap.add_argument("--out_time",
default = None,
type = float,
help = "Solution time.")
ap.add_argument("--dl",
default = None,
type = float,
help = "Initial density for left state.")
ap.add_argument("--vl",
default = None,
type = float,
help = "Initial velocity for left state.")
ap.add_argument("--pl",
default = None,
type = float,
help = "Initial pressure for left state.")
ap.add_argument("--hl",
default = None,
type = float,
help = "Initial smoothing scale for left state.")
ap.add_argument("--dr",
default = None,
type = float,
help = "Initial density for right state.")
ap.add_argument("--vr",
default = None,
type = float,
help = "Initial velocity for right state.")
ap.add_argument("--pr",
default = None,
type = float,
help = "Initial pressure for right state.")
ap.add_argument("--hr",
default = None,
type = float,
help = "Initial smoothing scale for right state.")
ap.add_argument("--file",
default = None,
help = "Write profiles to given file.")
ap.add_argument("--noheader",
action = "store_true",
help = "Do not write a header at the top of the output file.")
ap.add_argument("--plot",
action = "store_true",
help = "Plot the profiles to the screen.")
ap.add_argument("--plotsize",
default = 10,
type = float,
help = "Set the size of the figure (in inches) when plotting.")
args = ap.parse_args()
globals().update(vars(args))
# Compute the solution.
answer = RiemannSolution(problem = problem,
n = n,
x0 = x0,
x1 = x1,
xdiaph = xdiaph,
gamma_gas = gamma_gas,
out_time = out_time,
dl = dl,
vl = vl,
pl = pl,
hl = hl,
dr = dr,
vr = vr,
pr = pr,
hr = hr)
x, v, eps, d, p, A, h = answer.solution(time = out_time)
# Write the output to a text file.
if file:
with open(file, "w") as f:
# Write a header
if not noheader:
f.write(
"""# Output from RiemannSolution using the arguments:
# problem = %(problem)s
# n = %(n)s
# x0 = %(x0)s
# x1 = %(x0)s
# xdiaph = %(x0)s
# gamma_gas = %(x0)s
# out_time = %(out_time)s
# dl, vl, pl, hl = %(dl)s, %(vl)s, %(pl)s, %(hl)s
# dr, vr, pr = %(dr)s, %(vr)s, %(pr)s, %(hr)s
#
# x rho vel P eps A h
""" % {"problem" : problem,
"n" : n,
"x0" : x0,
"x1" : x1,
"xdiaph" : xdiaph,
"gamma_gas" : gamma_gas,
"out_time" : out_time,
"dl" : dl,
"vl" : vl,
"pl" : pl,
"hl" : hl,
"dr" : dr,
"vr" : vr,
"pr" : pr,
"hr" : hr})
for xi, di, vi, pi, epsi, Ai, hi in zip(x, d, v, p, eps, A, h):
f.write((7*"%20.17e ") % (xi, di, vi, pi, epsi, Ai, hi) + "\n")
# Plot the results to the screen via matplotlib (if available)
if plot:
try:
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(plotsize, 2.0/3.0*plotsize))
axes = []
for i, (q, label) in enumerate([(d, "Density"),
(v, "Velocity"),
(p, "Pressure"),
(eps, "Specific Thermal Energy"),
(A, "Entropy"),
(h, "Smoothing scale")]):
axes.append(fig.add_subplot(2, 3, i + 1))
plt.plot(x, q, linewidth=3)
plt.title(label)
qmin = min(q)
qmax = max(q)
qdiff = qmax - qmin
axes[i].set_ylim(qmin - 0.1*qdiff, qmax + 0.1*qdiff)
plt.show()
except:
print "ERROR: unable to import matplotlib for graphics."
pass
| StarcoderdataPython |
1772112 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 28 18:28:37 2021
@author: shreyan
"""
import turtle
import random
wn = turtle.Screen()
wn.title("Pong")
wn.bgcolor("white")
xres = 1024
yres = 720
wn.setup(width = xres, height = yres)
wn.tracer(0)
#padde 1
paddle1 = turtle.Turtle()
paddle1.speed(0)
paddle1.shape("square")
paddle1.penup()
paddle1.color("green")
paddle1.goto(-(xres/2 - 50),0)
paddle1.shapesize(stretch_wid = 5, stretch_len = 1)
#paddle 2
paddle2 = turtle.Turtle()
paddle2.speed(0)
paddle2.shape("square")
paddle2.penup()
paddle2.color("red")
paddle2.goto(xres/2 - 50,0)
paddle2.shapesize(stretch_wid = 5, stretch_len = 1)
#Ball
ball = turtle.Turtle()
ball.speed(40)
ball.shape("circle")
ball.penup()
ball.color("black")
ball.goto(0,0)
ball.dx = 0.5
ball.dy = 0.5
# Score Board
board = turtle.Turtle()
board.speed(0)
board.penup()
board.color("black")
board.hideturtle()
board.goto(0,yres/2 - 50)
board.write("Player A = 0 | Player B = 0", align = "center", font = ("Consolas",16,"normal"))
score1 = 0
score2 = 0
#Paddle1 Movement
def paddle1_up():
y = paddle1.ycor()
y += 10
y = paddle1.sety(y)
if paddle1.ycor() >= yres/2 - 60:
y = paddle1.sety(yres/2 - 60)
def paddle1_down():
y = paddle1.ycor()
y -= 10
y = paddle1.sety(y)
if paddle1.ycor() <= -(yres/2 - 60):
y = paddle1.sety(-(yres/2 - 60))
#Paddle2 Movement
def paddle2_up():
y = paddle2.ycor()
y += 10
y = paddle2.sety(y)
if paddle2.ycor() >= yres/2 - 60:
y = paddle2.sety(yres/2 - 60)
def paddle2_down():
y = paddle2.ycor()
y -= 10
y = paddle2.sety(y)
if paddle2.ycor() <= -(yres/2 - 60):
y = paddle2.sety(-(yres/2 - 60))
#Controls
wn.listen()
wn.onkeypress(paddle1_up,'w')
wn.onkeypress(paddle1_down,'s')
wn.onkeypress(paddle2_up,'o')
wn.onkeypress(paddle2_down,'l')
while True:
wn.update()
# Ball Movement
ball.setx(ball.xcor() + ball.dx)
ball.sety(ball.ycor() + ball.dy)
# Boundry Check
if ball.ycor() > yres/2 - 20:
ball.sety(yres/2 - 20)
ball.dy *= -1
if ball.ycor() < -(yres/2 - 20):
ball.sety(-(yres/2 - 20))
ball.dy *= -1
if ball.xcor() > xres/2 - 10:
ball.goto(0,0)
# if random.random() > 0.5:
# ball.dx *= -1
# else:
# ball.dx *= 1
ball.dx *= random.uniform(0.9,1)
score1 += 1
board.clear()
board.write("Player A = {} | Player B = {}".format(score1,score2), align = "center", font = ("Consolas",16,"normal"))
if ball.xcor() < -(xres/2 - 10):
ball.goto(0,0)
# if random.random() > 0.5:
# ball.dx *= -1
# else:
# ball.dx *= 1
ball.dx *= random.uniform(0.9,1)
score2 += 1
board.clear()
board.write("Player A = {} | Player B = {}".format(score1,score2), align = "center", font = ("Consolas",16,"normal"))
# Ball and paddle collision
if ball.xcor() > xres/2 - 70 and ball.xcor() < xres/2 - 40 and ball.ycor() < paddle2.ycor() + 60 and ball.ycor() > paddle2.ycor() - 60:
ball.setx(xres/2 - 70)
ball.dx *= -1
if ball.xcor() < -(xres/2 - 70) and ball.xcor() > -(xres/2 - 40) and ball.ycor() < paddle1.ycor() + 60 and ball.ycor() > paddle1.ycor() - 60:
ball.setx(-(xres/2 - 70))
ball.dx *= -1
| StarcoderdataPython |
3234728 | <reponame>calculusrobotics/RNNs-for-Bayesian-State-Estimation<filename>Blender 2.91/2.91/scripts/addons/object_collection_manager/operators.py
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Copyright 2011, <NAME>
import bpy
from copy import deepcopy
from bpy.types import (
Operator,
)
from bpy.props import (
BoolProperty,
StringProperty,
IntProperty
)
from . import internals
from .internals import (
expanded,
layer_collections,
qcd_slots,
rto_history,
expand_history,
phantom_history,
copy_buffer,
swap_buffer,
update_property_group,
get_modifiers,
get_move_selection,
get_move_active,
update_qcd_header,
send_report,
)
from .operator_utils import (
get_rto,
set_rto,
apply_to_children,
isolate_rto,
toggle_children,
activate_all_rtos,
invert_rtos,
copy_rtos,
swap_rtos,
clear_copy,
clear_swap,
link_child_collections_to_parent,
remove_collection,
select_collection_objects,
set_exclude_state,
)
from . import ui
class SetActiveCollection(Operator):
'''Set the active collection'''
bl_label = "Set Active Collection"
bl_idname = "view3d.set_active_collection"
bl_options = {'UNDO'}
is_master_collection: BoolProperty()
collection_name: StringProperty()
def execute(self, context):
if self.is_master_collection:
layer_collection = context.view_layer.layer_collection
else:
laycol = layer_collections[self.collection_name]
layer_collection = laycol["ptr"]
# set selection to this row
cm = context.scene.collection_manager
cm.cm_list_index = laycol["row_index"]
context.view_layer.active_layer_collection = layer_collection
if context.view_layer.active_layer_collection != layer_collection:
self.report({'WARNING'}, "Can't set excluded collection as active")
return {'FINISHED'}
class ExpandAllOperator(Operator):
'''Expand/Collapse all collections'''
bl_label = "Expand All Items"
bl_idname = "view3d.expand_all_items"
def execute(self, context):
global expand_history
if len(expanded) > 0:
expanded.clear()
context.scene.collection_manager.cm_list_index = 0
else:
for laycol in layer_collections.values():
if laycol["ptr"].children:
expanded.add(laycol["name"])
# clear expand history
expand_history["target"] = ""
expand_history["history"].clear()
# update tree view
update_property_group(context)
return {'FINISHED'}
class ExpandSublevelOperator(Operator):
bl_label = "Expand Sublevel Items"
bl_description = (
" * Ctrl+LMB - Expand/Collapse all sublevels\n"
" * Shift+LMB - Isolate tree/Restore\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.expand_sublevel"
expand: BoolProperty()
name: StringProperty()
index: IntProperty()
def invoke(self, context, event):
global expand_history
cls = ExpandSublevelOperator
modifiers = get_modifiers(event)
if modifiers == {"alt"}:
expand_history["target"] = ""
expand_history["history"].clear()
elif modifiers == {"ctrl"}:
# expand/collapse all subcollections
expand = None
# check whether to expand or collapse
if self.name in expanded:
expanded.remove(self.name)
expand = False
else:
expanded.add(self.name)
expand = True
# do expanding/collapsing
def set_expanded(layer_collection):
if expand:
expanded.add(layer_collection.name)
else:
expanded.discard(layer_collection.name)
apply_to_children(layer_collections[self.name]["ptr"], set_expanded)
expand_history["target"] = ""
expand_history["history"].clear()
elif modifiers == {"shift"}:
def isolate_tree(current_laycol):
parent = current_laycol["parent"]
for laycol in parent["children"]:
if laycol["name"] != current_laycol["name"] and laycol["name"] in expanded:
expanded.remove(laycol["name"])
expand_history["history"].append(laycol["name"])
if parent["parent"]:
isolate_tree(parent)
if self.name == expand_history["target"]:
for item in expand_history["history"]:
expanded.add(item)
expand_history["target"] = ""
expand_history["history"].clear()
else:
expand_history["target"] = ""
expand_history["history"].clear()
isolate_tree(layer_collections[self.name])
expand_history["target"] = self.name
else:
# expand/collapse collection
if self.expand:
expanded.add(self.name)
else:
expanded.remove(self.name)
expand_history["target"] = ""
expand_history["history"].clear()
# set the selected row to the collection you're expanding/collapsing to
# preserve the tree view's scrolling
context.scene.collection_manager.cm_list_index = self.index
#update tree view
update_property_group(context)
return {'FINISHED'}
class CMSelectCollectionObjectsOperator(Operator):
bl_label = "Select All Objects in the Collection"
bl_description = (
" * LMB - Select all objects in collection.\n"
" * Shift+LMB - Add/Remove collection objects from selection.\n"
" * Ctrl+LMB - Isolate nested selection.\n"
" * Ctrl+Shift+LMB - Add/Remove nested from selection"
)
bl_idname = "view3d.select_collection_objects"
bl_options = {'REGISTER', 'UNDO'}
is_master_collection: BoolProperty()
collection_name: StringProperty()
def invoke(self, context, event):
modifiers = get_modifiers(event)
if modifiers == {"shift"}:
select_collection_objects(
is_master_collection=self.is_master_collection,
collection_name=self.collection_name,
replace=False,
nested=False
)
elif modifiers == {"ctrl"}:
select_collection_objects(
is_master_collection=self.is_master_collection,
collection_name=self.collection_name,
replace=True,
nested=True
)
elif modifiers == {"ctrl", "shift"}:
select_collection_objects(
is_master_collection=self.is_master_collection,
collection_name=self.collection_name,
replace=False,
nested=True
)
else:
select_collection_objects(
is_master_collection=self.is_master_collection,
collection_name=self.collection_name,
replace=True,
nested=False
)
return {'FINISHED'}
class CMSetCollectionOperator(Operator):
bl_label = "Set Object Collection"
bl_description = (
" * LMB - Move object to collection.\n"
" * Shift+LMB - Add/Remove object from collection"
)
bl_idname = "view3d.set_collection"
bl_options = {'REGISTER', 'UNDO'}
is_master_collection: BoolProperty()
collection_name: StringProperty()
def invoke(self, context, event):
if self.is_master_collection:
target_collection = context.view_layer.layer_collection.collection
else:
laycol = layer_collections[self.collection_name]
target_collection = laycol["ptr"].collection
selected_objects = get_move_selection()
active_object = get_move_active()
internals.move_triggered = True
if not selected_objects:
return {'CANCELLED'}
if event.shift:
# add objects to collection
# make sure there is an active object
if not active_object:
active_object = tuple(selected_objects)[0]
# check if in collection
if not active_object.name in target_collection.objects:
# add to collection
for obj in selected_objects:
if obj.name not in target_collection.objects:
target_collection.objects.link(obj)
else:
warnings = False
master_warning = False
# remove from collections
for obj in selected_objects:
if obj.name in target_collection.objects:
# disallow removing if only one
if len(obj.users_collection) == 1:
warnings = True
master_laycol = context.view_layer.layer_collection
master_collection = master_laycol.collection
if obj.name not in master_collection.objects:
master_collection.objects.link(obj)
else:
master_warning = True
continue
# remove from collection
target_collection.objects.unlink(obj)
if warnings:
if master_warning:
send_report(
"Error removing 1 or more objects from the Scene Collection.\n"
"Objects would be left without a collection."
)
self.report({"WARNING"},
"Error removing 1 or more objects from the Scene Collection."
" Objects would be left without a collection."
)
else:
self.report({"INFO"}, "1 or more objects moved to Scene Collection.")
else:
# move objects to collection
for obj in selected_objects:
if obj.name not in target_collection.objects:
target_collection.objects.link(obj)
# remove from all other collections
for collection in obj.users_collection:
if collection != target_collection:
collection.objects.unlink(obj)
# update the active object if needed
if not context.active_object:
try:
context.view_layer.objects.active = active_object
except RuntimeError: # object not in visible collection
pass
# update qcd header UI
update_qcd_header()
return {'FINISHED'}
class CMExcludeOperator(Operator):
bl_label = "[EC] Exclude from View Layer"
bl_description = (
" * Shift+LMB - Isolate/Restore.\n"
" * Shift+Ctrl+LMB - Isolate nested/Restore.\n"
" * Ctrl+LMB - Toggle nested.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.exclude_collection"
bl_options = {'REGISTER', 'UNDO'}
name: StringProperty()
# static class var
isolated = False
def invoke(self, context, event):
global rto_history
cls = CMExcludeOperator
modifiers = get_modifiers(event)
view_layer = context.view_layer.name
orig_active_collection = context.view_layer.active_layer_collection
orig_active_object = context.view_layer.objects.active
laycol_ptr = layer_collections[self.name]["ptr"]
if not view_layer in rto_history["exclude"]:
rto_history["exclude"][view_layer] = {"target": "", "history": []}
if modifiers == {"alt"}:
del rto_history["exclude"][view_layer]
cls.isolated = False
elif modifiers == {"shift"}:
isolate_rto(cls, self, view_layer, "exclude")
elif modifiers == {"ctrl"}:
toggle_children(self, view_layer, "exclude")
cls.isolated = False
elif modifiers == {"ctrl", "shift"}:
isolate_rto(cls, self, view_layer, "exclude", children=True)
else:
# toggle exclusion
# reset exclude history
del rto_history["exclude"][view_layer]
set_exclude_state(laycol_ptr, not laycol_ptr.exclude)
cls.isolated = False
# restore active collection
context.view_layer.active_layer_collection = orig_active_collection
# restore active object if possible
if orig_active_object:
if orig_active_object.name in context.view_layer.objects:
context.view_layer.objects.active = orig_active_object
# reset exclude all history
if view_layer in rto_history["exclude_all"]:
del rto_history["exclude_all"][view_layer]
return {'FINISHED'}
class CMUnExcludeAllOperator(Operator):
bl_label = "[EC Global] Exclude from View Layer"
bl_description = (
" * LMB - Enable all/Restore.\n"
" * Shift+LMB - Invert.\n"
" * Ctrl+LMB - Copy/Paste RTOs.\n"
" * Ctrl+Alt+LMB - Swap RTOs.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.un_exclude_all_collections"
bl_options = {'REGISTER', 'UNDO'}
def invoke(self, context, event):
global rto_history
orig_active_collection = context.view_layer.active_layer_collection
orig_active_object = context.view_layer.objects.active
view_layer = context.view_layer.name
modifiers = get_modifiers(event)
if not view_layer in rto_history["exclude_all"]:
rto_history["exclude_all"][view_layer] = []
if modifiers == {"alt"}:
# clear all states
del rto_history["exclude_all"][view_layer]
clear_copy("exclude")
clear_swap("exclude")
elif modifiers == {"ctrl"}:
copy_rtos(view_layer, "exclude")
elif modifiers == {"ctrl", "alt"}:
swap_rtos(view_layer, "exclude")
elif modifiers == {"shift"}:
invert_rtos(view_layer, "exclude")
else:
activate_all_rtos(view_layer, "exclude")
# restore active collection
context.view_layer.active_layer_collection = orig_active_collection
# restore active object if possible
if orig_active_object:
if orig_active_object.name in context.view_layer.objects:
context.view_layer.objects.active = orig_active_object
return {'FINISHED'}
class CMRestrictSelectOperator(Operator):
bl_label = "[SS] Disable Selection"
bl_description = (
" * Shift+LMB - Isolate/Restore.\n"
" * Shift+Ctrl+LMB - Isolate nested/Restore.\n"
" * Ctrl+LMB - Toggle nested.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.restrict_select_collection"
bl_options = {'REGISTER', 'UNDO'}
name: StringProperty()
# static class var
isolated = False
def invoke(self, context, event):
global rto_history
cls = CMRestrictSelectOperator
modifiers = get_modifiers(event)
view_layer = context.view_layer.name
laycol_ptr = layer_collections[self.name]["ptr"]
if not view_layer in rto_history["select"]:
rto_history["select"][view_layer] = {"target": "", "history": []}
if modifiers == {"alt"}:
del rto_history["select"][view_layer]
cls.isolated = False
elif modifiers == {"shift"}:
isolate_rto(cls, self, view_layer, "select")
elif modifiers == {"ctrl"}:
toggle_children(self, view_layer, "select")
cls.isolated = False
elif modifiers == {"ctrl", "shift"}:
isolate_rto(cls, self, view_layer, "select", children=True)
else:
# toggle selectable
# reset select history
del rto_history["select"][view_layer]
# toggle selectability of collection
laycol_ptr.collection.hide_select = not laycol_ptr.collection.hide_select
cls.isolated = False
# reset select all history
if view_layer in rto_history["select_all"]:
del rto_history["select_all"][view_layer]
return {'FINISHED'}
class CMUnRestrictSelectAllOperator(Operator):
bl_label = "[SS Global] Disable Selection"
bl_description = (
" * LMB - Enable all/Restore.\n"
" * Shift+LMB - Invert.\n"
" * Ctrl+LMB - Copy/Paste RTOs.\n"
" * Ctrl+Alt+LMB - Swap RTOs.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.un_restrict_select_all_collections"
bl_options = {'REGISTER', 'UNDO'}
def invoke(self, context, event):
global rto_history
view_layer = context.view_layer.name
modifiers = get_modifiers(event)
if not view_layer in rto_history["select_all"]:
rto_history["select_all"][view_layer] = []
if modifiers == {"alt"}:
# clear all states
del rto_history["select_all"][view_layer]
clear_copy("select")
clear_swap("select")
elif modifiers == {"ctrl"}:
copy_rtos(view_layer, "select")
elif modifiers == {"ctrl", "alt"}:
swap_rtos(view_layer, "select")
elif modifiers == {"shift"}:
invert_rtos(view_layer, "select")
else:
activate_all_rtos(view_layer, "select")
return {'FINISHED'}
class CMHideOperator(Operator):
bl_label = "[VV] Hide in Viewport"
bl_description = (
" * Shift+LMB - Isolate/Restore.\n"
" * Shift+Ctrl+LMB - Isolate nested/Restore.\n"
" * Ctrl+LMB - Toggle nested.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.hide_collection"
bl_options = {'REGISTER', 'UNDO'}
name: StringProperty()
# static class var
isolated = False
def invoke(self, context, event):
global rto_history
cls = CMHideOperator
modifiers = get_modifiers(event)
view_layer = context.view_layer.name
laycol_ptr = layer_collections[self.name]["ptr"]
if not view_layer in rto_history["hide"]:
rto_history["hide"][view_layer] = {"target": "", "history": []}
if modifiers == {"alt"}:
del rto_history["hide"][view_layer]
cls.isolated = False
elif modifiers == {"shift"}:
isolate_rto(cls, self, view_layer, "hide")
elif modifiers == {"ctrl"}:
toggle_children(self, view_layer, "hide")
cls.isolated = False
elif modifiers == {"ctrl", "shift"}:
isolate_rto(cls, self, view_layer, "hide", children=True)
else:
# toggle visible
# reset hide history
del rto_history["hide"][view_layer]
# toggle view of collection
laycol_ptr.hide_viewport = not laycol_ptr.hide_viewport
cls.isolated = False
# reset hide all history
if view_layer in rto_history["hide_all"]:
del rto_history["hide_all"][view_layer]
return {'FINISHED'}
class CMUnHideAllOperator(Operator):
bl_label = "[VV Global] Hide in Viewport"
bl_description = (
" * LMB - Enable all/Restore.\n"
" * Shift+LMB - Invert.\n"
" * Ctrl+LMB - Copy/Paste RTOs.\n"
" * Ctrl+Alt+LMB - Swap RTOs.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.un_hide_all_collections"
bl_options = {'REGISTER', 'UNDO'}
def invoke(self, context, event):
global rto_history
view_layer = context.view_layer.name
modifiers = get_modifiers(event)
if not view_layer in rto_history["hide_all"]:
rto_history["hide_all"][view_layer] = []
if modifiers == {"alt"}:
# clear all states
del rto_history["hide_all"][view_layer]
clear_copy("hide")
clear_swap("hide")
elif modifiers == {"ctrl"}:
copy_rtos(view_layer, "hide")
elif modifiers == {"ctrl", "alt"}:
swap_rtos(view_layer, "hide")
elif modifiers == {"shift"}:
invert_rtos(view_layer, "hide")
else:
activate_all_rtos(view_layer, "hide")
return {'FINISHED'}
class CMDisableViewportOperator(Operator):
bl_label = "[DV] Disable in Viewports"
bl_description = (
" * Shift+LMB - Isolate/Restore.\n"
" * Shift+Ctrl+LMB - Isolate nested/Restore.\n"
" * Ctrl+LMB - Toggle nested.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.disable_viewport_collection"
bl_options = {'REGISTER', 'UNDO'}
name: StringProperty()
# static class var
isolated = False
def invoke(self, context, event):
global rto_history
cls = CMDisableViewportOperator
modifiers = get_modifiers(event)
view_layer = context.view_layer.name
laycol_ptr = layer_collections[self.name]["ptr"]
if not view_layer in rto_history["disable"]:
rto_history["disable"][view_layer] = {"target": "", "history": []}
if modifiers == {"alt"}:
del rto_history["disable"][view_layer]
cls.isolated = False
elif modifiers == {"shift"}:
isolate_rto(cls, self, view_layer, "disable")
elif modifiers == {"ctrl"}:
toggle_children(self, view_layer, "disable")
cls.isolated = False
elif modifiers == {"ctrl", "shift"}:
isolate_rto(cls, self, view_layer, "disable", children=True)
else:
# toggle disable
# reset disable history
del rto_history["disable"][view_layer]
# toggle disable of collection in viewport
laycol_ptr.collection.hide_viewport = not laycol_ptr.collection.hide_viewport
cls.isolated = False
# reset disable all history
if view_layer in rto_history["disable_all"]:
del rto_history["disable_all"][view_layer]
return {'FINISHED'}
class CMUnDisableViewportAllOperator(Operator):
bl_label = "[DV Global] Disable in Viewports"
bl_description = (
" * LMB - Enable all/Restore.\n"
" * Shift+LMB - Invert.\n"
" * Ctrl+LMB - Copy/Paste RTOs.\n"
" * Ctrl+Alt+LMB - Swap RTOs.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.un_disable_viewport_all_collections"
bl_options = {'REGISTER', 'UNDO'}
def invoke(self, context, event):
global rto_history
view_layer = context.view_layer.name
modifiers = get_modifiers(event)
if not view_layer in rto_history["disable_all"]:
rto_history["disable_all"][view_layer] = []
if modifiers == {"alt"}:
# clear all states
del rto_history["disable_all"][view_layer]
clear_copy("disable")
clear_swap("disable")
elif modifiers == {"ctrl"}:
copy_rtos(view_layer, "disable")
elif modifiers == {"ctrl", "alt"}:
swap_rtos(view_layer, "disable")
elif modifiers == {"shift"}:
invert_rtos(view_layer, "disable")
else:
activate_all_rtos(view_layer, "disable")
return {'FINISHED'}
class CMDisableRenderOperator(Operator):
bl_label = "[RR] Disable in Renders"
bl_description = (
" * Shift+LMB - Isolate/Restore.\n"
" * Shift+Ctrl+LMB - Isolate nested/Restore.\n"
" * Ctrl+LMB - Toggle nested.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.disable_render_collection"
bl_options = {'REGISTER', 'UNDO'}
name: StringProperty()
# static class var
isolated = False
def invoke(self, context, event):
global rto_history
cls = CMDisableRenderOperator
modifiers = get_modifiers(event)
view_layer = context.view_layer.name
laycol_ptr = layer_collections[self.name]["ptr"]
if not view_layer in rto_history["render"]:
rto_history["render"][view_layer] = {"target": "", "history": []}
if modifiers == {"alt"}:
del rto_history["render"][view_layer]
cls.isolated = False
elif modifiers == {"shift"}:
isolate_rto(cls, self, view_layer, "render")
elif modifiers == {"ctrl"}:
toggle_children(self, view_layer, "render")
cls.isolated = False
elif modifiers == {"ctrl", "shift"}:
isolate_rto(cls, self, view_layer, "render", children=True)
else:
# toggle renderable
# reset render history
del rto_history["render"][view_layer]
# toggle renderability of collection
laycol_ptr.collection.hide_render = not laycol_ptr.collection.hide_render
cls.isolated = False
# reset render all history
if view_layer in rto_history["render_all"]:
del rto_history["render_all"][view_layer]
return {'FINISHED'}
class CMUnDisableRenderAllOperator(Operator):
bl_label = "[RR Global] Disable in Renders"
bl_description = (
" * LMB - Enable all/Restore.\n"
" * Shift+LMB - Invert.\n"
" * Ctrl+LMB - Copy/Paste RTOs.\n"
" * Ctrl+Alt+LMB - Swap RTOs.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.un_disable_render_all_collections"
bl_options = {'REGISTER', 'UNDO'}
def invoke(self, context, event):
global rto_history
view_layer = context.view_layer.name
modifiers = get_modifiers(event)
if not view_layer in rto_history["render_all"]:
rto_history["render_all"][view_layer] = []
if modifiers == {"alt"}:
# clear all states
del rto_history["render_all"][view_layer]
clear_copy("render")
clear_swap("render")
elif modifiers == {"ctrl"}:
copy_rtos(view_layer, "render")
elif modifiers == {"ctrl", "alt"}:
swap_rtos(view_layer, "render")
elif modifiers == {"shift"}:
invert_rtos(view_layer, "render")
else:
activate_all_rtos(view_layer, "render")
return {'FINISHED'}
class CMHoldoutOperator(Operator):
bl_label = "[HH] Holdout"
bl_description = (
" * Shift+LMB - Isolate/Restore.\n"
" * Shift+Ctrl+LMB - Isolate nested/Restore.\n"
" * Ctrl+LMB - Toggle nested.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.holdout_collection"
bl_options = {'REGISTER', 'UNDO'}
name: StringProperty()
# static class var
isolated = False
def invoke(self, context, event):
global rto_history
cls = CMHoldoutOperator
modifiers = get_modifiers(event)
view_layer = context.view_layer.name
laycol_ptr = layer_collections[self.name]["ptr"]
if not view_layer in rto_history["holdout"]:
rto_history["holdout"][view_layer] = {"target": "", "history": []}
if modifiers == {"alt"}:
del rto_history["holdout"][view_layer]
cls.isolated = False
elif modifiers == {"shift"}:
isolate_rto(cls, self, view_layer, "holdout")
elif modifiers == {"ctrl"}:
toggle_children(self, view_layer, "holdout")
cls.isolated = False
elif modifiers == {"ctrl", "shift"}:
isolate_rto(cls, self, view_layer, "holdout", children=True)
else:
# toggle holdout
# reset holdout history
del rto_history["holdout"][view_layer]
# toggle holdout of collection in viewport
laycol_ptr.holdout = not laycol_ptr.holdout
cls.isolated = False
# reset holdout all history
if view_layer in rto_history["holdout_all"]:
del rto_history["holdout_all"][view_layer]
return {'FINISHED'}
class CMUnHoldoutAllOperator(Operator):
bl_label = "[HH Global] Holdout"
bl_description = (
" * LMB - Enable all/Restore.\n"
" * Shift+LMB - Invert.\n"
" * Ctrl+LMB - Copy/Paste RTOs.\n"
" * Ctrl+Alt+LMB - Swap RTOs.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.un_holdout_all_collections"
bl_options = {'REGISTER', 'UNDO'}
def invoke(self, context, event):
global rto_history
view_layer = context.view_layer.name
modifiers = get_modifiers(event)
if not view_layer in rto_history["holdout_all"]:
rto_history["holdout_all"][view_layer] = []
if modifiers == {"alt"}:
# clear all states
del rto_history["holdout_all"][view_layer]
clear_copy("holdout")
clear_swap("holdout")
elif modifiers == {"ctrl"}:
copy_rtos(view_layer, "holdout")
elif modifiers == {"ctrl", "alt"}:
swap_rtos(view_layer, "holdout")
elif modifiers == {"shift"}:
invert_rtos(view_layer, "holdout")
else:
activate_all_rtos(view_layer, "holdout")
return {'FINISHED'}
class CMIndirectOnlyOperator(Operator):
bl_label = "[IO] Indirect Only"
bl_description = (
" * Shift+LMB - Isolate/Restore.\n"
" * Shift+Ctrl+LMB - Isolate nested/Restore.\n"
" * Ctrl+LMB - Toggle nested.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.indirect_only_collection"
bl_options = {'REGISTER', 'UNDO'}
name: StringProperty()
# static class var
isolated = False
def invoke(self, context, event):
global rto_history
cls = CMIndirectOnlyOperator
modifiers = get_modifiers(event)
view_layer = context.view_layer.name
laycol_ptr = layer_collections[self.name]["ptr"]
if not view_layer in rto_history["indirect"]:
rto_history["indirect"][view_layer] = {"target": "", "history": []}
if modifiers == {"alt"}:
del rto_history["indirect"][view_layer]
cls.isolated = False
elif modifiers == {"shift"}:
isolate_rto(cls, self, view_layer, "indirect")
elif modifiers == {"ctrl"}:
toggle_children(self, view_layer, "indirect")
cls.isolated = False
elif modifiers == {"ctrl", "shift"}:
isolate_rto(cls, self, view_layer, "indirect", children=True)
else:
# toggle indirect only
# reset indirect history
del rto_history["indirect"][view_layer]
# toggle indirect only of collection
laycol_ptr.indirect_only = not laycol_ptr.indirect_only
cls.isolated = False
# reset indirect all history
if view_layer in rto_history["indirect_all"]:
del rto_history["indirect_all"][view_layer]
return {'FINISHED'}
class CMUnIndirectOnlyAllOperator(Operator):
bl_label = "[IO Global] Indirect Only"
bl_description = (
" * LMB - Enable all/Restore.\n"
" * Shift+LMB - Invert.\n"
" * Ctrl+LMB - Copy/Paste RTOs.\n"
" * Ctrl+Alt+LMB - Swap RTOs.\n"
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.un_indirect_only_all_collections"
bl_options = {'REGISTER', 'UNDO'}
def invoke(self, context, event):
global rto_history
view_layer = context.view_layer.name
modifiers = get_modifiers(event)
if not view_layer in rto_history["indirect_all"]:
rto_history["indirect_all"][view_layer] = []
if modifiers == {"alt"}:
# clear all states
del rto_history["indirect_all"][view_layer]
clear_copy("indirect")
clear_swap("indirect")
elif modifiers == {"ctrl"}:
copy_rtos(view_layer, "indirect")
elif modifiers == {"ctrl", "alt"}:
swap_rtos(view_layer, "indirect")
elif modifiers == {"shift"}:
invert_rtos(view_layer, "indirect")
else:
activate_all_rtos(view_layer, "indirect")
return {'FINISHED'}
class CMRemoveCollectionOperator(Operator):
'''Remove Collection'''
bl_label = "Remove Collection"
bl_idname = "view3d.remove_collection"
bl_options = {'UNDO'}
collection_name: StringProperty()
def execute(self, context):
global rto_history
global expand_history
global qcd_slots
laycol = layer_collections[self.collection_name]
collection = laycol["ptr"].collection
parent_collection = laycol["parent"]["ptr"].collection
# shift all objects in this collection to the parent collection
for obj in collection.objects:
if obj.name not in parent_collection.objects:
parent_collection.objects.link(obj)
# shift all child collections to the parent collection preserving view layer RTOs
if collection.children:
link_child_collections_to_parent(laycol, collection, parent_collection)
# remove collection, update references, and update tree view
remove_collection(laycol, collection, context)
return {'FINISHED'}
class CMRemoveEmptyCollectionsOperator(Operator):
bl_label = "Remove Empty Collections"
bl_idname = "view3d.remove_empty_collections"
bl_options = {'UNDO'}
without_objects: BoolProperty()
@classmethod
def description(cls, context, properties):
if properties.without_objects:
tooltip = (
"Purge All Collections Without Objects.\n"
"Deletes all collections that don't contain objects even if they have subcollections"
)
else:
tooltip = (
"Remove Empty Collections.\n"
"Delete collections that don't have any subcollections or objects"
)
return tooltip
def execute(self, context):
global rto_history
global expand_history
global qcd_slots
if self.without_objects:
empty_collections = [laycol["name"]
for laycol in layer_collections.values()
if not laycol["ptr"].collection.objects]
else:
empty_collections = [laycol["name"]
for laycol in layer_collections.values()
if not laycol["children"] and
not laycol["ptr"].collection.objects]
for name in empty_collections:
laycol = layer_collections[name]
collection = laycol["ptr"].collection
parent_collection = laycol["parent"]["ptr"].collection
# link all child collections to the parent collection preserving view layer RTOs
if collection.children:
link_child_collections_to_parent(laycol, collection, parent_collection)
# remove collection, update references, and update tree view
remove_collection(laycol, collection, context)
self.report({"INFO"}, f"Removed {len(empty_collections)} collections")
return {'FINISHED'}
rename = [False]
class CMNewCollectionOperator(Operator):
bl_label = "Add New Collection"
bl_idname = "view3d.add_collection"
bl_options = {'UNDO'}
child: BoolProperty()
@classmethod
def description(cls, context, properties):
if properties.child:
tooltip = (
"Add New SubCollection.\n"
"Add a new subcollection to the currently selected collection"
)
else:
tooltip = (
"Add New Collection.\n"
"Add a new collection as a sibling of the currently selected collection"
)
return tooltip
def execute(self, context):
global rto_history
new_collection = bpy.data.collections.new("New Collection")
cm = context.scene.collection_manager
# prevent adding collections when collections are filtered
# and the selection is ambiguous
if cm.cm_list_index == -1 and ui.CM_UL_items.filtering:
send_report("Cannot create new collection.\n"
"No collection is selected and collections are filtered."
)
return {'CANCELLED'}
if cm.cm_list_index > -1 and not ui.CM_UL_items.visible_items[cm.cm_list_index]:
send_report("Cannot create new collection.\n"
"The selected collection isn't visible."
)
return {'CANCELLED'}
# if there are collections
if len(cm.cm_list_collection) > 0:
if not cm.cm_list_index == -1:
# get selected collection
laycol = layer_collections[cm.cm_list_collection[cm.cm_list_index].name]
# add new collection
if self.child:
laycol["ptr"].collection.children.link(new_collection)
expanded.add(laycol["name"])
# update tree view property
update_property_group(context)
cm.cm_list_index = layer_collections[new_collection.name]["row_index"]
else:
laycol["parent"]["ptr"].collection.children.link(new_collection)
# update tree view property
update_property_group(context)
cm.cm_list_index = layer_collections[new_collection.name]["row_index"]
else:
context.scene.collection.children.link(new_collection)
# update tree view property
update_property_group(context)
cm.cm_list_index = layer_collections[new_collection.name]["row_index"]
# if no collections add top level collection and select it
else:
context.scene.collection.children.link(new_collection)
# update tree view property
update_property_group(context)
cm.cm_list_index = 0
# set new collection to active
layer_collection = layer_collections[new_collection.name]["ptr"]
context.view_layer.active_layer_collection = layer_collection
# show the new collection when collections are filtered.
ui.CM_UL_items.new_collections.append(new_collection.name)
global rename
rename[0] = True
# reset history
for rto in rto_history.values():
rto.clear()
return {'FINISHED'}
class CMPhantomModeOperator(Operator):
'''Toggle Phantom Mode'''
bl_label = "Toggle Phantom Mode"
bl_idname = "view3d.toggle_phantom_mode"
def execute(self, context):
global phantom_history
global rto_history
cm = context.scene.collection_manager
view_layer = context.view_layer
# enter Phantom Mode
if not cm.in_phantom_mode:
cm.in_phantom_mode = True
# save current visibility state
phantom_history["view_layer"] = view_layer.name
def save_visibility_state(layer_collection):
phantom_history["initial_state"][layer_collection.name] = {
"exclude": layer_collection.exclude,
"select": layer_collection.collection.hide_select,
"hide": layer_collection.hide_viewport,
"disable": layer_collection.collection.hide_viewport,
"render": layer_collection.collection.hide_render,
"holdout": layer_collection.holdout,
"indirect": layer_collection.indirect_only,
}
apply_to_children(view_layer.layer_collection, save_visibility_state)
# save current rto history
for rto, history, in rto_history.items():
if history.get(view_layer.name, None):
phantom_history[rto+"_history"] = deepcopy(history[view_layer.name])
else: # return to normal mode
def restore_visibility_state(layer_collection):
phantom_laycol = phantom_history["initial_state"][layer_collection.name]
layer_collection.exclude = phantom_laycol["exclude"]
layer_collection.collection.hide_select = phantom_laycol["select"]
layer_collection.hide_viewport = phantom_laycol["hide"]
layer_collection.collection.hide_viewport = phantom_laycol["disable"]
layer_collection.collection.hide_render = phantom_laycol["render"]
layer_collection.holdout = phantom_laycol["holdout"]
layer_collection.indirect_only = phantom_laycol["indirect"]
apply_to_children(view_layer.layer_collection, restore_visibility_state)
# restore previous rto history
for rto, history, in rto_history.items():
if view_layer.name in history:
del history[view_layer.name]
if phantom_history[rto+"_history"]:
history[view_layer.name] = deepcopy(phantom_history[rto+"_history"])
phantom_history[rto+"_history"].clear()
cm.in_phantom_mode = False
return {'FINISHED'}
class CMApplyPhantomModeOperator(Operator):
'''Apply changes and quit Phantom Mode'''
bl_label = "Apply Phantom Mode"
bl_idname = "view3d.apply_phantom_mode"
def execute(self, context):
cm = context.scene.collection_manager
cm.in_phantom_mode = False
return {'FINISHED'}
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.