blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
40a8094797f214895546ecf45393fb3426288015 | 6ea99c5eff3214b6f424f8b31f802693bb675e6e | /discord chat bot.py | ff8a67b292caa5dfb36c2024030306f8a3402db5 | [] | no_license | RajrupDasid/Python-Full | 8853ca00d8d49ae4d7ee8be3c2006e3501b63688 | c6eb6b14d31591e5319dde774aaad8b1d6a30cf3 | refs/heads/master | 2023-04-23T15:09:27.156157 | 2021-05-16T16:02:47 | 2021-05-16T16:02:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,779 | py | Python 3.7.8 (tags/v3.7.8:4b47a5b6ba, Jun 28 2020, 08:53:46) [MSC v.1916 64 bit (AMD64)] on win32
Type "help", "copyright", "credits" or "license()" for more information.
>>> import discord
import os
import requests
import json
import random
from replit import db
from keep_alive import keep_alive
client = discord.Client()
sad_words = ["I am sad","sad","Depressed","not feeling well","angry","depressing"]
starter_encouragement = ["Cheer Up!","Hang on there","you are a great person","Don't be sad ","Everything will be fine soon"]
if "responding" not in db.keys():
db ["responding"] = True
def get_quote():
response = requests.get ("https://zenquotes.io/api/random")
json_data = json.loads(response.text)
quote = json_data[0]['q'] + "-"+ json_data[0]['a']
return (quote)
def update_encouragements(encouraging_message):
if "encouragements" in db.keys():
encouragements = db ["encouragements"]
encouragements.append(encouraging_message)
db["encouragements"] = encouragements
else:
db ["encouragements"] = [encouraging_message]
def delete_encouragement(index):
encouragements = db ["encouragements"]
if len(encouragements)>index:
del encouragements[index]
db ["encouragements"] = encouragements
@client.event
async def on_ready():
print ('We have logged in as {0.user}'.format(client))
@client.event
async def on_message(message):
if message.author == client.user:
return
msg = message.content
if msg.startswith('$inspire'):
quote = get_quote()
await message.channel.send(quote)
if db["responding"]:
options = starter_encouragement
if "encouragements" in db.keys():
options = options + db ["encouragements"]
if any(word in msg for word in sad_words):
await message.channel.send(random.choice(options))
if msg.startswith("$new"):
encouraging_message = msg.split("$new",1)[1]
update_encouragements(encouraging_message)
await message.channel.send("New encouraging message added.")
if msg.startswith("$del"):
encouragements =[]
if "encouragements" in db.keys():
index = int(msg.split("$del",1)[1])
delete_encouragement(index)
encouragements = db ["encouragements"]
await message.channel.send(encouragements)
if msg.startswith("$list"):
encouragements = []
if "encouragements" in db.keys():
encouragements = db ["encouragements"]
await message.channel.send(encouragements)
if msg.startswith("$responding"):
value = msg.split("responding",1)[1]
if value.lower()== "true":
db["responding"] = True
await message.channel.send("Responding Activated.")
else:
db["responding"] = False
await message.channel.send("Responding deactivated")
keep_alive()
client.run(os.getenv('TOKEN')) | [
"rajrupdasid@outlook.com"
] | rajrupdasid@outlook.com |
de953e1a133d796d7c348777274fe9a4eb25f67e | ddb7916c3962713471044f03bd76414581dbf801 | /Myadmin/templatetags/get_table_rela_name.py | 57099dd00e91e49ac1775475fd5f2fe0ad581a24 | [] | no_license | so1so2so/SuperCrm | 92949819ea2200edd818bfafce8fd2c5ca99076a | ba17faa55b13a611fc579006994af6f0f836764b | refs/heads/master | 2020-03-06T18:24:11.238838 | 2018-05-08T13:42:27 | 2018-05-08T13:42:27 | 127,006,380 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,845 | py | #!/usr/bin/env python
# _*_ coding:utf-8 _*_
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
@register.simple_tag
def get_rela_name(table_obj):
table_name = table_obj.model._meta.verbose_name_plural or table_obj.verbose_name
if not table_name:
table_name = table_obj.model._meta.model_mame
return table_name
@register.simple_tag
def get_chinese_name(table_obj):
if hasattr(table_obj._meta, 'verbose_name_plural'):
return table_obj._meta.verbose_name_plural
elif hasattr(table_obj._meta, 'verbose_name'):
return table_obj._meta.verbose_name
else:
return table_obj._meta.model_mame
@register.simple_tag
def build_table_row(request, one_obj_django, obj_all_model_and_display):
row_ele = ""
for index, filed in enumerate(obj_all_model_and_display.list_display):
field_obj = one_obj_django._meta.get_field(filed)
if field_obj.choices: # choices type
column_data = getattr(one_obj_django, "get_%s_display" % filed)()
else:
column_data = getattr(one_obj_django, filed)
if type(column_data).__name__ == 'datetime':
column_data = column_data.strftime("%Y-%m-%d %H:%M:%S")
if type(field_obj).__name__ == "ManyToManyField":
all_date = getattr(field_obj, 'get_choices')()[1:]
for choice_item in all_date:
if str(choice_item[0]) == one_obj_django:
pass
if index == 0: # add <a></a> tag
column_data = "<a href='{request_path}/{obj_id}/change' target='_self'>{date}</a>".format(
request_path=request.path,
obj_id=one_obj_django.id,
date=column_data,
)
row_ele += "<td>%s</td>" % column_data
# print row_ele
return mark_safe(row_ele)
@register.simple_tag
def render_page_ele(loop_counter, query_sets, filter_condtions, order, search):
filters = ''
for k, v in filter_condtions.items():
filters += "&%s=%s" % (k, v)
if not order:
order = ''
if not search:
search = ''
if loop_counter < 3 or loop_counter > query_sets.paginator.num_pages - 2: # 显示前2页,或者最后2页
ele_class = ""
if query_sets.number == loop_counter:
ele_class = "active"
ele = '''<li class="%s"><a href="?page=%s%s&o=%s&q=%s">%s</a></li>''' % (
ele_class, loop_counter, filters, order, search, loop_counter)
return mark_safe(ele)
if abs(query_sets.number - loop_counter) <= 1:
ele_class = ""
if query_sets.number == loop_counter:
ele_class = "active"
ele = '''<li class="%s"><a href="?page=%s%s">%s</a></li>''' % (ele_class, loop_counter, filters, loop_counter)
return mark_safe(ele)
return ''
@register.simple_tag
def render_filter_ele(condtion, obj_all_model_and_display, filter_condtions):
select_ele = '''<select class="form-control" name='%s' ><option value=''>----</option>''' % condtion
# 拿到每一个需要filter的值
field_obj = obj_all_model_and_display.model._meta.get_field(condtion)
if field_obj.choices:
selected = ''
# 这个循环会循环所有的choices ((0, '已报名'), (1, '未报名'), (2, '已退学'), (3, '其他'))
for choice_item in field_obj.choices:
# 判断filter_condtions这个字典 {u'source': u'1', u'consultant': u'2'}
# print("choice", choice_item, filter_condtions.get(condtion), type(filter_condtions.get(condtion)))
# 如果前端传递来的值的
if filter_condtions.get(condtion) == str(choice_item[0]):
selected = "selected"
select_ele += '''<option value='%s' %s>%s</option>''' % (choice_item[0], selected, choice_item[1])
selected = ''
if type(field_obj).__name__ == "ForeignKey":
selected = ''
for choice_item in field_obj.get_choices()[1:]:
if filter_condtions.get(condtion) == str(choice_item[0]):
selected = "selected"
select_ele += '''<option value='%s' %s>%s</option>''' % (choice_item[0], selected, choice_item[1])
selected = ''
if type(field_obj).__name__ == "ManyToManyField":
selected = ''
for choice_item in field_obj.get_choices()[1:]:
# print filter_condtions.get(condtion)
if filter_condtions.get(condtion) == str(choice_item[0]):
selected = "selected"
select_ele += '''<option value='%s' %s>%s</option>''' % (choice_item[0], selected, choice_item[1])
selected = ''
select_ele += "</select>"
return mark_safe(select_ele)
@register.simple_tag
def change_order(column):
if column.startswith("-"):
column = column.strip("-")
else:
column = "-%s" % column
return column
@register.simple_tag
def get_all_m2m_list(obj_all_model_and_display, field, form_obj):
"""
:param obj_all_model_and_display:
:param field:
:param form_obj:
:return: 返还m2m所有待选数据
"""
# models.Customer.tags.rel.to.objects.all()
# obj_all_model_and_display.model=models.Customer
# print obj_all_model_and_display.model
if hasattr(obj_all_model_and_display.model, field.name):
field_all_obj = getattr(obj_all_model_and_display.model, field.name).rel.to.objects.all()
# print field_all_obj
# 相当于field_obj =models.Customer.tags.
# 类似 getattr(d,'tags').rel.to.objects.all()
# print field_all_obj.intersection(field_select_obj)
# "返还全部的减去待选的"
if hasattr(form_obj.instance, field.name):
field_select_obj = getattr(form_obj.instance, field.name).all()
return field_all_obj.difference(field_select_obj)
else:
return field_all_obj
# return (field_select_obj|field_all_obj).distinct()
@register.simple_tag
def print_obj_(obj):
return obj.instance
@register.simple_tag
def get_select_m2m_list(form_obj, field):
"""
:param form_obj:
:param field:
:return: {{ form_obj.instance.tags.all }}
form_obj= new_model_form(instance=table_obj)
返还已选择的
"""
if hasattr(form_obj.instance, field.name):
field_select_obj = getattr(form_obj.instance, field.name)
return field_select_obj.all()
else:
return ""
def recursive_related_objs_lookup(objs):
print "objs", objs
# model_name = objs[0]._meta.model_name
ul_ele = "<ul>"
for obj in objs:
li_ele = '''<li> %s: %s </li>''' % (obj._meta.verbose_name, obj.__unicode__().strip("<>"))
ul_ele += li_ele
# for local many to many
# print("------- obj._meta.local_many_to_many", obj._meta.local_many_to_many)
for m2m_field in obj._meta.local_many_to_many: # 把所有跟这个对象直接关联的m2m字段取出来了
sub_ul_ele = "<ul>"
m2m_field_obj = getattr(obj, m2m_field.name) # getattr(customer, 'tags')
for o in m2m_field_obj.select_related(): # customer.tags.select_related()
li_ele = '''<li> %s: %s </li>''' % (m2m_field.verbose_name, o.__unicode__().strip("<>"))
sub_ul_ele += li_ele
sub_ul_ele += "</ul>"
ul_ele += sub_ul_ele # 最终跟最外层的ul相拼接
for related_obj in obj._meta.related_objects:
if 'ManyToManyRel' in related_obj.__repr__():
if hasattr(obj, related_obj.get_accessor_name()): # hassattr(customer,'enrollment_set')
accessor_obj = getattr(obj, related_obj.get_accessor_name())
print("-------ManyToManyRel", accessor_obj, related_obj.get_accessor_name())
# 上面accessor_obj 相当于 customer.enrollment_set
if hasattr(accessor_obj, 'select_related'): # slect_related() == all()
target_objs = accessor_obj.select_related() # .filter(**filter_coditions)
# target_objs 相当于 customer.enrollment_set.all()
sub_ul_ele = "<ul style='color:red'>"
for o in target_objs:
li_ele = '''<li> %s: %s </li>''' % (o._meta.verbose_name, o.__unicode__().strip("<>"))
sub_ul_ele += li_ele
sub_ul_ele += "</ul>"
ul_ele += sub_ul_ele
elif hasattr(obj, related_obj.get_accessor_name()): # hassattr(customer,'enrollment_set')
accessor_obj = getattr(obj, related_obj.get_accessor_name())
# 上面accessor_obj 相当于 customer.enrollment_set
if hasattr(accessor_obj, 'select_related'): # slect_related() == all()
target_objs = accessor_obj.select_related() # .filter(**filter_coditions)
# target_objs 相当于 customer.enrollment_set.all()
else:
print("one to one i guess:", accessor_obj)
target_objs = accessor_obj
if len(target_objs) > 0:
# print("\033[31;1mdeeper layer lookup -------\033[0m")
# nodes = recursive_related_objs_lookup(target_objs,model_name)
nodes = recursive_related_objs_lookup(target_objs)
ul_ele += nodes
ul_ele += "</ul>"
return ul_ele
@register.simple_tag
def display_obj_related(objs):
'''把对象及所有相关联的数据取出来'''
# objs = [objs] # fake
# if objs:
# model_class = objs[0]._meta.model # <class 'crm.models.Customer'>
# mode_name = objs[0]._meta.model_name # customer
return mark_safe(recursive_related_objs_lookup(objs))
@register.simple_tag
def display_no_exist(one_obj_django, filed,table_name):
return mark_safe('''<a href="%s/%s/%s">点击报名</a>''' % (str(table_name),one_obj_django.id, filed))
@register.simple_tag
def get_filed_chinese_name(column, obj_all_model_and_display):
"""
models.Customer._meta.get_field('tags').verbose_name
:param column:
:param obj_all_model_and_display:
:return:
"""
# print obj_all_model_and_display.model._meta.get_field('tags').verbose_name
chinese_chinses_obj = obj_all_model_and_display.model._meta.get_field(column)
return chinese_chinses_obj.verbose_name
@register.simple_tag
def get_types(stra):
print stra.dispaly_name
return type(stra)
@register.simple_tag
def get_action_verbose_name(admin_class,action):
if hasattr(admin_class,action):
action_func = getattr(admin_class,action)
return action_func.display_name if hasattr(action_func,'display_name') else action | [
"1037930435@qq.com"
] | 1037930435@qq.com |
a9e5eb3ab4a218aa683ef5d03c99c1d078421c56 | 27fbe6c1f5de40022efe05f978c9af2a5d209e91 | /bug_trigger_inputs/mxnet/2/mxnet.ndarray.leakyrelu/VI_3c15c5ae85e16b766d42e7183dc7de1816f01cad.py | 21b1c28700106da26f921cec370d6a276fb8c6a8 | [] | no_license | icse-submit/materials | 830ea178351cd3523704ce63edfe18f83936d57c | 6a61764cea00470af6f74fb2969e77db023f1e84 | refs/heads/master | 2022-12-23T10:05:01.618766 | 2020-09-28T19:28:21 | 2020-09-28T19:31:07 | 290,929,497 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | import pickle
import mxnet
data = pickle.load(open('3c15c5ae85e16b766d42e7183dc7de1816f01cad.p', 'rb'))
mxnet.ndarray.LeakyReLU(**data)
| [
"4icse2021@gmail.com"
] | 4icse2021@gmail.com |
988b12ee3d4f3d915082fb26172765182b63a80d | 57006c285929a9689d6b1db0a32cc20dae43f747 | /21数据分析/21源码/11_numpy测试.py | 4492b379dd8e40132e1b2ef57e9507ac25520bfc | [] | no_license | ami66/AI-Lab | 2c8c20e98461668ae3cbb251fd50e258abbb8108 | 3b334f7aa2aaa8237e1da3750159fe080c826b6b | refs/heads/master | 2020-04-11T07:33:13.505162 | 2018-09-27T15:09:47 | 2018-09-27T15:09:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,696 | py | import numpy as np
import random
a = np.array([1,2,3])
b = np.array(range(6))
c = np.array(range(9), dtype=float)
d = np.array(range(9), dtype="i1")
print(a)
print(a.dtype)
print(b)
print(type(b))
print(c)
print(c.dtype)
print(d)
print(d.dtype)
t5 = np.array([1,1,0,1,0,0], dtype=bool)
print(t5)
print(t5.dtype)
# 调整数据类型
t6 = t5.astype("int8")
print(t6)
print(t6.dtype)
t7 = np.array([random.random() for i in range(10)])
print(t7)
print(t7.dtype)
# 取小数点后两位
t8 = np.round(t7, 2)
print(t8)
print(round(random.random(), 3))
print("%.2f"%random.random())
# 矩阵的shape
print(a.shape)
ts = np.array([[1,2,3],[4,5,6]])
print(ts)
print(ts.shape)
# print(ts.reshape((3,4))) # 报错,没有那么多元素
print(ts.reshape((3,2)))
print(ts.reshape((1,6)))
print(ts.reshape((6,1)))
print(ts.reshape((6,)))
print(ts.reshape(ts.shape[0]*ts.shape[1]))
# 矩阵的运算
# 与数字运算
t_calcu = np.array([[0,1,2,3],[4,5,6,7]])
print(t_calcu*2)
print(t_calcu+2)
print(t_calcu/2)
# print(t_calcu/0) # 不报错,只是警告,0/0结果非数字nan,数字/0结果无穷inf
# 与矩阵运算
tc2 = np.array([[1,2,3,4],[5,6,7,8]])
print(tc2-t_calcu)
tc3 = np.array([1,2,3,4])
print(tc3-t_calcu)
# tc4 = np.array([[1],[2]])
tc4 = np.arange(1,3).reshape((2,1))
print("tc4-t_calcu:")
print(tc4-t_calcu)
# 读取本地文本文件
file_path = "11_readFile.csv"
t_read_file = np.loadtxt(file_path,delimiter=",",dtype="int")
t_read_file2 = np.loadtxt(file_path,delimiter=",",dtype="int",unpack=True) # 矩阵转置
print(t_read_file)
print(t_read_file2)
# 矩阵转置
print(t_read_file.transpose())
print(t_read_file.T)
print(t_read_file.swapaxes(1,0)) # 转换1和0轴
| [
"ufofcz@163.com"
] | ufofcz@163.com |
5342ff68bdffe0f8ec7581753bdf1408d1eb68fc | 4e2aeea4c941b15617d3633ee5fca69d65119d99 | /prg96.py | 8dc803ead325935c21f438672fd88dfc0929325d | [] | no_license | kavinandha/kavipriya | d404ff268ce7f552ac7c9bfe374a6fca8fcaa8a9 | 2447162171535d8230f360343fee4a36e9d0e484 | refs/heads/master | 2020-05-05T03:10:17.271061 | 2019-05-18T10:06:19 | 2019-05-18T10:06:19 | 179,663,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | import math
def sum(a,d,b,r,n):
sum1=0
for i in range(1,n+1):
sum+=((a+(i-1)*d)*(b*math.pow(r,i-1)))
a=1
d=1
b=2
r=2
n=3
print(sum(a,d,b,r,n))
| [
"noreply@github.com"
] | kavinandha.noreply@github.com |
01b7735f6bf8f0b822b06b92b55d992ebdaafcae | f3fef992836191869c851daeacd7f83e41e16142 | /venv/lib/python3.6/site-packages/represent/core.py | d1eab4dc165dd8093b035ab08fce9f53c7e158c5 | [] | no_license | patrickcshan/TDAHackathon | ea0bdbabe75f4f791617e202148f7d93edb10c78 | b0110b6aab9c542ba3b9a4cba96d5cfce0b72720 | refs/heads/master | 2020-04-05T18:35:48.242483 | 2018-11-11T17:51:31 | 2018-11-11T17:51:31 | 157,105,119 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,435 | py | # code: utf-8
from __future__ import absolute_import, print_function
import inspect
from copy import copy
from functools import partial
import six
from .compat.contextlib import suppress
from .helper import ReprHelper, PrettyReprHelper
__all__ = ['ReprHelperMixin', 'autorepr']
def autorepr(*args, **kwargs):
"""Class decorator to construct :code:`__repr__` **automatically**
based on the arguments to ``__init__``.
:code:`_repr_pretty_` for :py:mod:`IPython.lib.pretty` is also constructed.
:param positional: Mark arguments as positional by number, or a list of
argument names.
Example:
.. code-block:: python
>>> @autorepr
... class A:
... def __init__(self, a, b):
... self.a = a
... self.b = b
>>> print(A(1, 2))
A(a=1, b=2)
.. code-block:: python
>>> @autorepr(positional=1)
... class B:
... def __init__(self, a, b):
... self.a = a
... self.b = b
>>> print(A(1, 2))
A(1, b=2)
.. versionadded:: 1.5.0
"""
cls = positional = None
# We allow using @autorepr or @autorepr(positional=...), so check
# how we were called.
if args and not kwargs:
if len(args) != 1:
raise TypeError('Class must be only positional argument.')
cls, = args
if not isinstance(cls, type):
raise TypeError(
"The sole positional argument must be a class. To use the "
"'positional' argument, use a keyword.")
elif not args and kwargs:
try:
positional = kwargs.pop('positional')
except KeyError:
raise TypeError(
"Missing required keyword-only argument: 'positional'")
elif (args and kwargs) or (not args and not kwargs):
raise TypeError(
"Must pass class or keyword-only argument 'positional'")
# Define the methods we'll add to the decorated class.
def __repr__(self):
return self.__class__._repr_formatstr.format(self=self)
def _repr_pretty_(self, p, cycle):
"""Pretty printer for :class:`IPython.lib.pretty`"""
cls = self.__class__
clsname = cls.__name__
if cycle:
p.text('{}(...)'.format(clsname))
else:
positional_args = cls._repr_pretty_positional_args
keyword_args = cls._repr_pretty_keyword_args
with p.group(len(clsname) + 1, clsname + '(', ')'):
for i, positional in enumerate(positional_args):
if i:
p.text(',')
p.breakable()
p.pretty(getattr(self, positional))
for i, keyword in enumerate(keyword_args,
start=len(positional_args)):
if i:
p.text(',')
p.breakable()
with p.group(len(keyword) + 1, keyword + '='):
p.pretty(getattr(self, keyword))
if cls is not None:
return _autorepr_decorate(
cls, positional=positional, repr=__repr__,
repr_pretty=_repr_pretty_)
elif positional is not None:
return partial(
_autorepr_decorate, positional=positional, repr=__repr__,
repr_pretty=_repr_pretty_)
def _autorepr_decorate(cls, positional, repr, repr_pretty):
cls._repr_clsname = cls.__name__
cls._repr_positional = positional
# Support Python 3 and Python 2 argspecs,
# including keyword only arguments
try:
argspec = inspect.getfullargspec(cls.__init__)
except AttributeError:
argspec = inspect.getargspec(cls.__init__)
fun_args = argspec.args[1:]
kwonly = set()
with suppress(AttributeError):
fun_args.extend(argspec.kwonlyargs)
kwonly.update(argspec.kwonlyargs)
# Args can be opted in as positional
if positional is None:
positional = []
elif isinstance(positional, int):
positional = fun_args[:positional]
elif isinstance(positional, six.string_types):
positional = [positional]
# Ensure positional args can't follow keyword args.
keyword_started = None
# _repr_pretty_ uses lists for the pretty printer calls
cls._repr_pretty_positional_args = list()
cls._repr_pretty_keyword_args = list()
# Construct format string for __repr__
repr_parts = ['{self.__class__.__name__}', '(']
for i, arg in enumerate(fun_args):
if i:
repr_parts.append(', ')
if arg in positional:
repr_parts.append('{{self.{0}!r}}'.format(arg))
cls._repr_pretty_positional_args.append(arg)
if arg in kwonly:
raise ValueError("keyword only argument '{}' cannot"
" be positional".format(arg))
if keyword_started:
raise ValueError(
"positional argument '{}' cannot follow keyword"
" argument '{}'".format(arg, keyword_started))
else:
keyword_started = arg
repr_parts.append('{0}={{self.{0}!r}}'.format(arg))
cls._repr_pretty_keyword_args.append(arg)
repr_parts.append(')')
# Store as class variable.
cls._repr_formatstr = ''.join(repr_parts)
cls.__repr__ = repr
cls._repr_pretty_ = repr_pretty
return cls
class ReprHelperMixin(object):
"""Mixin to provide :code:`__repr__` and :code:`_repr_pretty_` for
:py:mod:`IPython.lib.pretty` from user defined :code:`_repr_helper_`
function.
For full API, see :py:class:`represent.helper.BaseReprHelper`.
.. code-block:: python
def _repr_helper_(self, r):
r.positional_from_attr('attrname')
r.positional_with_value(value)
r.keyword_from_attr('attrname')
r.keyword_from_attr('keyword', 'attrname')
r.keyword_with_value('keyword', value)
.. versionadded:: 1.3
"""
__slots__ = ()
def __repr__(self):
r = ReprHelper(self)
self._repr_helper_(r)
return str(r)
def _repr_pretty_(self, p, cycle):
with PrettyReprHelper(self, p, cycle) as r:
self._repr_helper_(r)
| [
"patrickshan@gmail.com"
] | patrickshan@gmail.com |
433c2c36b3c14566f7590a3fd8c1f0be42e495a7 | c79c943c715656ed0dcb0d261df22f0ab0fa9ce5 | /PythonCharts/TSplotChart.py | 0580f60c0a55a7a97ffe4ec631a54e03095bdd40 | [] | no_license | rimon107/PythonCharts | 6633d8f09615b36a03ea39306318895b64619a3d | 2379425c9aba72488a35705431b89a30bb3caec5 | refs/heads/master | 2021-08-16T08:45:51.538128 | 2017-11-19T11:13:58 | 2017-11-19T11:13:58 | 111,287,841 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 580 | py | # We can use the pandas library in python to read in the csv file.
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd # conventional alias
from pandas.tools.plotting import parallel_coordinates
from sklearn import datasets
def LoadDataset():
dataset = sns.load_dataset("gammas")
return dataset
def TSPlotChart():
dataset = LoadDataset()
sns.set(color_codes=True)
ax = sns.tsplot(time="timepoint", value="BOLD signal",
unit="subject", condition="ROI",
data=dataset)
plt.show()
TSPlotChart()
| [
"nahidulislam107gmail.com"
] | nahidulislam107gmail.com |
2faaa07a120d7dc2a149700224cccc25c82a9f18 | 2967cbb157842713c145c0e3421f328f66e8589a | /reviews/migrations/0002_alter_review_options.py | 4bf7f9e0a7afc59a15d71fde9ea44b489b14c96f | [] | no_license | Code-Institute-Submissions/gentleman_mayer | ecd0a120344527dbf9020612bed9342da894a9d2 | 90ee7a5172bad999b97bc66fad3ad786874df19f | refs/heads/main | 2023-07-16T17:14:09.165499 | 2021-08-27T08:03:36 | 2021-08-27T08:03:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | # Generated by Django 3.2.5 on 2021-08-13 01:44
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reviews', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='review',
options={'ordering': ('-date_posted',), 'verbose_name_plural': 'Reviews'},
),
]
| [
"adrianc.chiriac@yahoo.com"
] | adrianc.chiriac@yahoo.com |
f521862b7446dd9e87cf49a022c0d0482adb5426 | 9c3043c41ef69503cd4854756bfcaed200ed112a | /catalog_service/src/catalog_service/event_consumer.py | 7a585923640f13add5485abefa904e76b6e8c267 | [] | no_license | ghedb/monsplit | 63a1baae1e625cb2f0e07923b88e56c1f4b26e92 | 028bdd64b6d48afa2198d9e4303cc829de586a29 | refs/heads/master | 2020-05-05T11:39:46.836934 | 2019-09-04T11:57:52 | 2019-09-04T11:57:52 | 179,998,508 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,289 | py | import json
import logging
import os
import sys
import time
from pprint import pprint
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "catalog_service.settings")
django.setup()
from catalog.const import ProductEventTypes, CONSUMER_GROUP, PRODUCT_TOPIC
from dateutil.parser import parse
from catalog.models import Product
from django.db.transaction import atomic
from pykafka import KafkaClient
from pykafka.exceptions import NoBrokersAvailableError, SocketDisconnectedError
from django.conf import settings
class EventProcessingFailure(Exception):
pass
def create_consumer(topic_name):
kafka_client = KafkaClient(hosts=settings.KAFKA_HOSTS)
topic = kafka_client.topics[topic_name]
consumer = topic.get_balanced_consumer(
consumer_group=CONSUMER_GROUP,
auto_commit_enable=False,
zookeeper_hosts=settings.ZOOKEEPER_HOSTS
)
return consumer
def event_handler(message):
try:
if message.value:
message = json.loads(message.value.decode('utf-8'))
else:
message = {'name': 'empty'}
event_type = message['name']
# As of now only create and update events are generated so
# no change in behaviour, based on the type.
product_updates = [
ProductEventTypes.CREATED,
ProductEventTypes.UPDATED,
]
if event_type in product_updates:
update_product(message)
elif event_type == ProductEventTypes.DISCONTINUED:
product_discontinued(message)
except Exception as err:
raise EventProcessingFailure(err)
def is_new_event(object_updated_at, date_occurred):
if object_updated_at:
return object_updated_at < date_occurred
else:
# No updated at set, assume event is newer.
return True
@atomic()
def update_product(event_dict):
pprint(event_dict)
event_data = event_dict['data']
date_occurred = parse(event_dict['dateOccurred'])
product_obj, created = Product.objects.get_or_create(
product_uuid=event_data['productUUID']
)
if not created and not is_new_event(product_obj.updated_at, date_occurred):
# Event date is older than object update date
# Since all events are currently "update"
# we do not update with "older" data
return
product_obj.name = event_data['productName']
product_obj.product_category = event_data['productCategory']
# Discontinued updated here until monolith service can send "discontinued" events
product_obj.discontinued = event_data['discontinued']
product_obj.updated_at = date_occurred
product_obj.save()
@atomic()
def product_discontinued(event_dict):
event_data = event_dict['data']
date_occurred = parse(event_dict['dateOccurred'])
product_obj, created = Product.objects.get_or_create(
product_uuid=event_data['productUUID']
)
if not created and not is_new_event(product_obj.updated_at, date_occurred):
# Event date is older than object update date
# Since all events are currently "update"
# we do not update with "older" data
return
product_obj.discontinued = event_data['discontinued']
product_obj.updated_at = date_occurred
product_obj.save()
def start_consume(topics):
log = logging.getLogger()
consumers = []
for topic in topics:
consumers.append(create_consumer(topic))
while True:
for consumer in consumers:
try:
message = consumer.consume(block=False)
if message is not None:
event_handler(message)
consumer.commit_offsets()
except (SocketDisconnectedError, NoBrokersAvailableError) as err:
log.error(
'Caught Exception {0}\n restarting consumer'.format(
err
)
)
consumer.stop()
consumer.start()
except EventProcessingFailure as err:
log.error(
'Caught Exception {0}\n Stopping all consumers'.format(
err
)
)
for consumer in consumers:
consumer.stop()
raise
except Exception as err:
log.error(
'Caught unhandled exception {0}\n'.format(
err
)
)
for consumer in consumers:
consumer.stop()
raise Exception(err)
time.sleep(0.1)
if __name__ == "__main__":
root = logging.getLogger(__name__)
root.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
root.addHandler(handler)
try:
topics = [PRODUCT_TOPIC]
start_consume(topics)
except Exception as err:
# delay to prevent container
# from spinning up/down too fast in case of failure
root.error(err)
time.sleep(20)
sys.exit(1)
| [
"g.h.hedberg@gmail.com"
] | g.h.hedberg@gmail.com |
15cb8a3c9ef14ce398b2219a881c3c6bad010d8f | f334fadf8ca44e88992dc4092a34256a957d10b8 | /sensorDash/teste/bin/django-admin | a8ea7cbbf3f73694eb4f6f53abcd36e4225decaa | [] | no_license | juniorug/inf627_exercicios | f1f1450ede8764792142f494d597eed91eae4a22 | 7ab3102ddfb65e411f717a929fd269237dbe195c | refs/heads/master | 2021-01-10T09:35:29.632418 | 2016-04-26T22:16:12 | 2016-04-26T22:16:17 | 54,422,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | #!/home/adewale/Especializacao/inf627_exercicios/sensorDash/teste/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"adewale.andrade@gmail.com"
] | adewale.andrade@gmail.com | |
d06ab34fea0bac11e8aa864a35184490730e2a5a | 02b495111594a367405b2bfbf220e38da3a5f7b0 | /devel/lib/python2.7/dist-packages/brics_actuator/msg/_JointValue.py | 0723b3357a381bbe1f9fbd1dbb79f58932d32bef | [
"BSD-2-Clause"
] | permissive | Ashuditya/Rebellious-Cowards | 3f7c6afd314e4bf2ffb72b99ecf58be23f309e97 | 56ec395147f2fc59a26669a74a04fe02227bc7b7 | refs/heads/master | 2023-01-24T10:57:47.533839 | 2020-10-01T15:58:07 | 2020-10-01T15:58:07 | 218,202,193 | 0 | 3 | BSD-2-Clause | 2020-10-01T17:07:44 | 2019-10-29T04:09:46 | Makefile | UTF-8 | Python | false | false | 6,583 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from brics_actuator/JointValue.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import genpy
class JointValue(genpy.Message):
_md5sum = "c8dad5a006889ad7de711a684999f0c6"
_type = "brics_actuator/JointValue"
_has_header = False #flag to mark the presence of a Header object
_full_text = """time timeStamp #time of the data
string joint_uri
string unit #if empy expects si units, you can use boost::unit
float64 value
"""
__slots__ = ['timeStamp','joint_uri','unit','value']
_slot_types = ['time','string','string','float64']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
timeStamp,joint_uri,unit,value
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(JointValue, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.timeStamp is None:
self.timeStamp = genpy.Time()
if self.joint_uri is None:
self.joint_uri = ''
if self.unit is None:
self.unit = ''
if self.value is None:
self.value = 0.
else:
self.timeStamp = genpy.Time()
self.joint_uri = ''
self.unit = ''
self.value = 0.
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_2I().pack(_x.timeStamp.secs, _x.timeStamp.nsecs))
_x = self.joint_uri
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.unit
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_d().pack(self.value))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.timeStamp is None:
self.timeStamp = genpy.Time()
end = 0
_x = self
start = end
end += 8
(_x.timeStamp.secs, _x.timeStamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.joint_uri = str[start:end].decode('utf-8')
else:
self.joint_uri = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.unit = str[start:end].decode('utf-8')
else:
self.unit = str[start:end]
start = end
end += 8
(self.value,) = _get_struct_d().unpack(str[start:end])
self.timeStamp.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_2I().pack(_x.timeStamp.secs, _x.timeStamp.nsecs))
_x = self.joint_uri
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.unit
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_d().pack(self.value))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.timeStamp is None:
self.timeStamp = genpy.Time()
end = 0
_x = self
start = end
end += 8
(_x.timeStamp.secs, _x.timeStamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.joint_uri = str[start:end].decode('utf-8')
else:
self.joint_uri = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.unit = str[start:end].decode('utf-8')
else:
self.unit = str[start:end]
start = end
end += 8
(self.value,) = _get_struct_d().unpack(str[start:end])
self.timeStamp.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
_struct_d = None
def _get_struct_d():
global _struct_d
if _struct_d is None:
_struct_d = struct.Struct("<d")
return _struct_d
| [
"srujannwankhede786@gmail.com"
] | srujannwankhede786@gmail.com |
c42393571b709d8b7b7b63c733b9226da1ba6504 | c08890939d900901e2b9442969b8260b1083b81d | /go_django/asgi.py | 8395ef72e47291374935963302732743fbd5c2b3 | [] | no_license | ConaGo/django-starter | 71b076d60843c6cf0ab107daef9961d94ff26c77 | 9018746860fb3bf3fedfa68ae38d4b0862824840 | refs/heads/main | 2023-08-22T05:42:17.109140 | 2021-10-22T00:15:11 | 2021-10-22T00:15:11 | 419,427,079 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
ASGI config for go_django project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'go_django.settings')
application = get_asgi_application()
| [
"l_jasper@web.de"
] | l_jasper@web.de |
6b2b02a3e562e86899504fd989b8c02b140b8709 | 1779acf92cdf4b603ef28b0746c53e1c2e174db2 | /ThinkPython/py-OOP3.py | f429a0fb97b0bfed10354030feac1a2391bd091d | [] | no_license | antodank/Anto_Python | 6306b021781715beb84e527c67a835fbb900d5d9 | 87823c9c82bebed7fcb2ed2f4a2dfce5c74300a9 | refs/heads/master | 2020-06-06T12:51:01.212190 | 2019-08-20T18:15:40 | 2019-08-20T18:15:40 | 192,743,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,129 | py | class Point(object):
"""Represents a point in 2-D space."""
def print_point(p):
"""Print a Point object in human-readable format."""
#print "(%g, %g", % (p.x, p.y)
print("X=%d, Y=%s" % (p.x, p.y))
class Rectangle(object):
"""Represents a rectangle.
"""
def find_center(rect):
"""Returns a Point at the center of a Rectangle."""
p = Point()
p.x = rect.corner.x + rect.width/2.0
p.y = rect.corner.y + rect.height/2.0
return p
def grow_rectangle(rect, dwidth, dheight):
rect.width += dwidth
rect.height += dheight
def main():
blank = Point()
blank.x = 3
blank.y = 4
print('blank')
blank.print_point()
box = Rectangle()
box.width = 100.0
box.height = 200.0
box.corner = Point()
box.corner.x = 0.0
box.corner.y = 0.0
center = box.find_center()
print('center')
center.print_point()
# print box.width
# print box.height
# print 'grow'
# grow_rectangle(box, 50, 100)
# print box.width
# print box.height
if __name__ == '__main__':
main() | [
"ankitkt.47@outlook.com"
] | ankitkt.47@outlook.com |
f32e61acab543b074d8350bb2c926e937628cbb7 | 97f285b6f8016a8d1d2d675fffb771df3c9e37b9 | /study/algorithms/sorting/selection_sort.py | b1177b6f5b9e1b1dd7feb0d3974b2999b7447124 | [] | no_license | oskomorokhov/python | ef5408499840465d18852954aee9de460d0e7250 | 8909396c4200bd2fca19d3f216ed5f484fb2192a | refs/heads/master | 2021-05-14T09:27:25.413163 | 2019-12-12T21:00:05 | 2019-12-12T21:00:05 | 116,327,306 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,256 | py | # selection sort
def ssort(lst):
""" The algorithm divides the input list into two parts: the sublist of items already sorted, which is built up from left to right at the front (left) of the list,
and the sublist of items remaining to be sorted that occupy the rest of the list. Initially, the sorted sublist is empty and the unsorted sublist is the entire input list.
The algorithm proceeds by finding the smallest (or largest, depending on sorting order) element in the unsorted sublist, exchanging (swapping) it with the leftmost unsorted element (putting it in sorted order),
and moving the sublist boundaries one element to the right.
"""
pivot = 0
while pivot < len(lst):
current_min = lst[pivot]
new_min = None
for num in lst[pivot+1:]:
if num < current_min:
current_min = new_min = num
if new_min:
lst[lst.index(new_min)
], lst[pivot] = lst[pivot], lst[lst.index(new_min)]
pivot += 1
return lst
if __name__ == '__main__':
print("original list", [3, 44, 38, 5, 47,
15, 36, 26, 27, 2, 46, 4, 19, 50, 48])
print(ssort([3, 44, 38, 5, 47, 15, 36, 26, 27, 2, 46, 4, 19, 50, 48]))
| [
"oskom85@gmail.com"
] | oskom85@gmail.com |
5791e759734afcae1c0217d224986a43aee2394e | 234b317eb1b8c836a6e3599ee16af92844222301 | /ModernArchitecturesFromPyTorch/nb_GRU.py | 39712f3ff5d91d3777a4f857d6bb969924b16091 | [
"Apache-2.0"
] | permissive | BradleyBrown19/ModernArchitecturesFromScratch | 98b9eaf8cea2424d72297044591964cbda061a07 | 9511c94cc7b6782df4603fd3f7751e653b8e7f20 | refs/heads/master | 2023-04-14T21:09:28.003507 | 2021-11-01T14:16:06 | 2021-11-01T14:16:06 | 233,241,743 | 0 | 0 | Apache-2.0 | 2023-04-11T23:43:00 | 2020-01-11T14:08:39 | Jupyter Notebook | UTF-8 | Python | false | false | 317 | py |
#################################################
### THIS FILE WAS AUTOGENERATED! DO NOT EDIT! ###
#################################################
# file to edit: dev_nb/GRU.ipynb
from ModernArchitecturesFromPyTorch.nb_ScratchToPytorch import *
from ModernArchitecturesFromScratch.convolutions_pooling_04 import * | [
"bradley19brown@gmail.com"
] | bradley19brown@gmail.com |
e6728fd49b3a778b40a7a6fc9cd3ccc3f191a11a | d0b5adc8ff50da360c5cf4927b005690edb74b1d | /conjugation_problem_sp/mainsp.py | 102c26ecf0fe3d29cc0614eb77919dc35d623033 | [
"MIT"
] | permissive | nikolskydn/discrete_vortex_method_in_2D | a11bb390f4b7291f9b7824c1533931b158660402 | 68c6672d3744706f4c51a184470bb027ce377f4b | refs/heads/master | 2020-12-25T07:28:12.990185 | 2017-06-18T19:00:53 | 2017-06-18T19:00:53 | 61,661,607 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,062 | py | #!/usr/bin/python
import numpy as np
from sys import path
path.append("./../")
from smodule.sfunctp import *
Kx=1
Hx=1
Px=1
Ky=1
Hy=1
Py=1
Phi1=-0.25/sp.pi*sp.log((x0-y0)**2+(x1-y1)**2)-0.25/sp.pi*sp.log((x0-y0)**2+(x1+y1)**2)
Psi2=0.25/sp.pi*sp.log((x0-y0)**2+(x1-y1)**2)-0.25/sp.pi*sp.log((x0-y0)**2+(x1+y1)**2)
n=800
q=np.array([np.pi,np.pi])
z=np.array([[0.0,3.0],[3.0,2.0]],dtype='d')
lambdaK=0.8
minHDist=0.01
hv=0.2 # for velocity field grid
y_crcl=1.5
thet=np.linspace(2*np.pi,0,n+1,endpoint=True)
x=np.array([np.cos(thet),y_crcl+np.sin(thet)])
A,f=ieFredgolm2dk(x,x,lambdaK,0.0,q,z,varphiSources,Py,Phi1)
g=np.linalg.solve(A,f)
xs=np.mgrid[-4.:4.0001:hv,0.0001:4.0001:hv]
xs=xs[:,((xs[0]-z[0][0])**2+(xs[1]-z[1][0])**2)>0.4]
xs=xs[:,((xs[0]-z[0][1])**2+(xs[1]-z[1][1])**2)>0.4]
Ws=VSources(q,xs,z,Kx,Phi1)+VDL(xs,x,g,Hx,Psi2)
from pylab import quiver,show, gca,Circle,text, axis, grid
gca().add_patch(Circle((0,y_crcl),radius=1,alpha =.5, fc='y'))
quiver(xs[0],xs[1],Ws[0],Ws[1],color='r')
"""
# Testing on the exact solution
qOut=np.array([np.pi,lambdaK*np.pi,-lambdaK*np.pi])
zOut=np.array([[0.0,0.0,0.0],[2.0,0.5,0.0]],dtype='d')
xOut=xs[:,(xs[0]**2+xs[1]**2)>(1+minHDist)**2]
WOut=VSources(qOut,xOut,zOut)
WNOut=VSources(q,xOut,z)+VDL(xOut,x,g)
etaOut=(1.-np.sqrt(WNOut[0]**2+WNOut[1]**2)/np.sqrt(WOut[0]**2+WOut[1]**2))*100
etaOutMax=np.max(etaOut)
print etaOut
text(-2.5,3.1,ur"$\eta_{Out,max}=%6.4f$per." % etaOutMax)
qIn=np.array([(1-lambdaK)*np.pi])
xIn=xs[:,(xs[0]**2+xs[1]**2)<(1-minHDist)**2]
WIn=VSources(qIn,xIn,z)
WNIn=VSources(q,xIn,z)+VDL(xIn,x,g)
etaIn=(1-np.sqrt(WNIn[0]**2+WNIn[1]**2)/np.sqrt(WIn[0]**2+WIn[1]**2))*100
etaInMax=np.max(etaIn)
text(0,3.1,ur"$\eta_{In,max}=%6.4f$per." % etaInMax)
"""
#quiver(xOut[0],xOut[1],WOut[0],WOut[1],color='b',lw=1)
#quiver(xIn[0],xIn[1],WIn[0],WIn[1],color='g',lw=1)
#quiver(np.r_[xOut[0],xIn[0]],np.r_[xOut[1],xIn[1]],np.r_[WOut[0],WIn[0]],np.r_[WOut[1],WIn[1]])
#np.savetxt('W.dat',np.c_[xs[0].ravel(),xs[1].ravel(),Ws[0],Ws[1]],fmt='%6.4f',delimiter=' ')
grid(True)
axis('equal')
show()
| [
"nikoslkydn@mail.ru"
] | nikoslkydn@mail.ru |
5877c60e529afc0f3fee94588971ca9f8c20a53c | d52a9bae59fb20793237904b2f164be752ce43ff | /plot_install.py | ee6c3bd7f302287ffea3a0e996945745dd99aea0 | [] | no_license | torkildr/tools | fda0dbf5654d5152a0c16f3c0b9366e7db4ba66e | cd392989785d4065dee33bcd47eceb70f8825114 | refs/heads/master | 2020-06-04T16:54:49.447624 | 2011-06-22T21:05:36 | 2011-06-22T21:05:43 | 1,021,633 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,507 | py | #!/usr/bin/env python
import datetime
import dateutil.rrule as rrule
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
dates = []
totals = []
actives = []
versions = {}
f = open("/home/markild/stuff/trommelyd_installs.txt")
for line in f:
data = line.split()
if len(data) >= 3:
try:
date = datetime.datetime.strptime(data[0], "%d.%m.%Y")
total = int(data[1])
active = int(data[2])
dates.append(date)
totals.append(total)
actives.append(active)
if len(data) == 4:
versions[dates[-1]] = data[3]
except:
pass
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(dates, actives, 'b')
ax.plot(dates, totals, 'r')
# format the coords message box
def value(x):
return '$%1.2f' % x
ax.format_ydata = value
ax.grid(True)
ax.xaxis.set_major_locator(mdates.MonthLocator())
ax.xaxis.set_major_formatter(mdates.DateFormatter('%b %Y'))
ax.xaxis.set_minor_locator(mdates.DayLocator())
datemin = min(dates) - datetime.timedelta(days=3)
datemax = max(dates)
ax.set_xlim(datemin, datemax)
ax.set_xlabel("Date")
ax.set_ylabel("Installs")
for key, value in versions.items():
ax.axvline(x=key, linestyle='--', color='g', label=value)
# rotates and right aligns the x labels, and moves the bottom of the
# axes up to make room for them
#fig.autofmt_xdate()
plt.savefig("tempfile.png", format='png')
| [
"torkild@retvedt.no"
] | torkild@retvedt.no |
8ca1e09fb7ee173a14faeb5049dd0aa0737a9ba0 | eff2fc11905f6118dcd70050392f168cd7aea086 | /leetcode/40_combination_sum_ii/solution1.py | df0fa9abba6a73cfa6548fd39c14982c906e75fb | [] | no_license | algobot76/leetcode-python | 28f1e1107fa941a3b40006f074eec6231e674ac1 | ec8bff8978d6915bfdf187c760b97ee70f7515af | refs/heads/master | 2021-07-05T17:06:40.581977 | 2020-09-19T22:02:38 | 2020-09-19T22:02:38 | 199,255,699 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py | class Solution:
def combinationSum2(self, candidates, target):
candidates.sort()
combs = []
self.dfs(candidates, target, 0, [], combs)
return combs
def dfs(self, candidates, target, start, comb, combs):
if target < 0:
return
if target == 0:
return combs.append(list(comb))
prev = 0
while start < len(candidates) and candidates[start] <= target:
if prev != candidates[start]:
comb.append(candidates[start])
self.dfs(candidates, target - candidates[start], start + 1,
comb, combs)
comb.pop()
prev = candidates[start]
start += 1
| [
"xkaitian@gmail.com"
] | xkaitian@gmail.com |
0703e5f22212b00ffaf7e02dd00eeaa7b1966ce3 | cc578cec7c485e2c1060fd075ccc08eb18124345 | /cs15211/TopKFrequentWords.py | 7733837228f8d83367a4b89021aa264f1154d5e3 | [
"Apache-2.0"
] | permissive | JulyKikuAkita/PythonPrac | 18e36bfad934a6112f727b4906a5e4b784182354 | 0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c | refs/heads/master | 2021-01-21T16:49:01.482561 | 2019-02-07T06:15:29 | 2019-02-07T06:15:29 | 91,907,704 | 1 | 1 | Apache-2.0 | 2019-02-07T06:15:30 | 2017-05-20T18:12:53 | Python | UTF-8 | Python | false | false | 5,923 | py | __source__ = 'https://leetcode.com/problems/top-k-frequent-words/'
# Time: O()
# Space: O()
#
# Description: Leetcode # 692. Top K Frequent Words
#
# Given a non-empty list of words, return the k most frequent elements.
#
# Your answer should be sorted by frequency from highest to lowest.
# If two words have the same frequency, then the word with the lower alphabetical order comes first.
#
# Example 1:
# Input: ["i", "love", "leetcode", "i", "love", "coding"], k = 2
# Output: ["i", "love"]
# Explanation: "i" and "love" are the two most frequent words.
# Note that "i" comes before "love" due to a lower alphabetical order.
# Example 2:
# Input: ["the", "day", "is", "sunny", "the", "the", "the", "sunny", "is", "is"], k = 4
# Output: ["the", "is", "sunny", "day"]
# Explanation: "the", "is", "sunny" and "day" are the four most frequent words,
# with the number of occurrence being 4, 3, 2 and 1 respectively.
# Note:
# You may assume k is always valid, 1 <= k <= number of unique elements.
# Input words contain only lowercase letters.
# Follow up:
# Try to solve it in O(n log k) time and O(n) extra space.
#
import heapq
import unittest
import collections
#
# Approach #1: Sorting [Accepted]
# Time Complexity: O(NlogN), where N is the length of words.
# We count the frequency of each word in O(N) time, then we sort the given words in O(NlogN) time.
#
# Space Complexity: O(N), the space used to store our candidates.
class Solution(object):
def topKFrequent(self, words, k):
"""
:type words: List[str]
:type k: int
:rtype: List[str]
"""
count = collections.Counter(words)
candidates = count.keys()
candidates.sort(key = lambda w: (-count[w], w))
return candidates[:k]
# In Python, we improve this to O(N+klogN): our heapq.heapify operation and counting operations are O(N),
# and each of kk heapq.heappop operations are O(logN).
# Space Complexity: O(N)O(N), the space used to store our count.
class Solution2(object):
def topKFrequent(self, words, k):
"""
:type words: List[str]
:type k: int
:rtype: List[str]
"""
count = collections.Counter(words)
heap = [(-freq, word) for word, freq in count.items()]
heapq.heapify(heap)
return [heapq.heappop(heap)[1] for _ in xrange(k)]
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/top-k-frequent-words/solution/
# Approach #1: Sorting [Accepted]
# 68ms 11.37%
class Solution {
public List<String> topKFrequent(String[] words, int k) {
Map<String, Integer> count = new HashMap<>();
for (String word: words) {
count.put(word, count.getOrDefault(word, 0) + 1);
}
List<String> candidates = new ArrayList(count.keySet());
Collections.sort(candidates, (w1, w2) -> count.get(w1).equals(count.get(w2))?
w1.compareTo(w2) : count.get(w2) - count.get(w1)); //if w1 - w2,
// sorting in increasing order, thus return least frequent words
return candidates.subList(0, k);
}
}
# Approach #2: Heap [Accepted] PQ
# 11ms 99.80%
# Time Complexity: O(Nlogk), where N is the length of words.
# We count the frequency of each word in O(N) time, then we add N words to the heap,
# each in O(logk) time. Finally, we pop from the heap up to k times. As k <= N, this is O(Nlogk) in total.
/*
Lambda expression
https://www.mkyong.com/java8/java-8-lambda-comparator-example/
*/
# 13ms 81.92%
class Solution {
public List<String> topKFrequent(String[] words, int k) {
List<String> res = new ArrayList<>();
Map<String, Integer> map = new HashMap<>();
for (String word: words) {
map.put(word, map.getOrDefault(word, 0) + 1);
}
PriorityQueue<Map.Entry<String, Integer>> pq = new PriorityQueue<>(new Checker());
for (Map.Entry<String, Integer> entry : map.entrySet()) {
pq.offer(entry);
if (pq.size() > k) pq.poll();
}
while (pq.size() != 0) {
res.add(0, pq.poll().getKey());
}
return res;
}
}
class Checker implements Comparator<Map.Entry<String, Integer>> {
public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
if (o1.getValue() == o2.getValue()) {
return o2.getKey().compareTo(o1.getKey());
} else {
return o1.getValue() - o2.getValue();
}
}
}
# 10ms 99.34%
class Solution {
private class Point implements Comparable<Point> {
private String str;
private int count;
public Point(String str) {
this.str = str;
this.count = 1;
}
@Override
public int hashCode() {
return str.hashCode();
}
@Override
public int compareTo(Point b) {
if(count != b.count) {
return b.count - count;
}
else {
return str.compareTo(b.str);
}
}
public void addCount() {
count++;
}
public String getStr() {
return str;
}
}
public List<String> topKFrequent(String[] words, int k) {
Map<String, Point> map = new HashMap<>();
for(String word: words) {
if(map.containsKey(word)) {
map.get(word).addCount();
}
else map.put(word, new Point(word));
}
PriorityQueue<Point> pq = new PriorityQueue<>(map.values());
int count = 0;
List<String> res = new ArrayList<>();
while(!pq.isEmpty() && count < k) {
res.add(pq.poll().getStr());
count++;
}
return res;
}
}
''' | [
"b92701105@gmail.com"
] | b92701105@gmail.com |
ae753451fb6e1386b234a56c92481ab547af11b9 | 52ec37b5e930165896bc5322ffd8903c3b8bae27 | /hello.py | 285c3c9717b7cc4e01d02ec35a329fee00294020 | [] | no_license | sicardnicolas/pyqt-hello | d4586aa50fed5a14b20e37f098276237effc5325 | f5578f90d26bfb112842d9ed1b9e1a84b2b9f1bb | refs/heads/master | 2022-12-25T12:23:59.385206 | 2020-09-22T20:52:46 | 2020-09-22T20:52:46 | 297,449,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,966 | py | from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
import sys
class MainWindow(QMainWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
# SIGNAL: The connected function will be called whenever the window
# title is changed. The new title will be passed to the function.
self.windowTitleChanged.connect(self.onWindowTitleChange)
# SIGNAL: The connected function will be called whenever the window
# title is changed. The new title is discarded in the lambda and the
# function is called without parameters.
self.windowTitleChanged.connect(lambda x: self.my_custom_fn())
# SIGNAL: The connected function will be called whenever the window
# title is changed. The new title is passed to the function
# and replaces the default parameter
self.windowTitleChanged.connect(lambda x: self.my_custom_fn(x))
# SIGNAL: The connected function will be called whenever the window
# title is changed. The new title is passed to the function
# and replaces the default parameter. Extra data is passed from
# within the lambda.
self.windowTitleChanged.connect(lambda x: self.my_custom_fn(x, 25))
self.setWindowTitle("Hello, World!")
label = QLabel("HELLO, WORLD!\n(but yelling)")
label.setAlignment(Qt.AlignCenter)
self.setCentralWidget(label)
# SLOT: This accepts a string, e.g. the window title, and prints it
def onWindowTitleChange(self, s):
print(s)
# SLOT: This has default parameters and can be called without a value
def my_custom_fn(self, a="HELLLO!", b=5):
print(a, b)
def contextMenuEvent(self, event):
print("Context menu event!")
super(MainWindow, self).contextMenuEvent(event)
app = QApplication(sys.argv)
window = MainWindow()
window.show()
app.exec_()
| [
"nicolas@sicard.me"
] | nicolas@sicard.me |
a3e1226d63fcfcf528781ec2ed98b724fb2a041d | d0cc52ff1ae294dc6883b67c33b98f10f16e1f73 | /foodplaner/foodApp/migrations/0001_initial.py | 50927417a583e967bb4ff144c13ad6d58d9cf75b | [] | no_license | FreshMax9000/Essenswebsite | dc34e7fd7398906625fae689d513fdddf871dcd8 | d23d6fa0ace126637aac0f094a16fe4c16a5cbf6 | refs/heads/development | 2022-12-05T01:20:32.119196 | 2020-06-05T09:06:03 | 2020-06-05T09:06:03 | 245,122,540 | 2 | 0 | null | 2022-11-22T05:53:10 | 2020-03-05T09:39:03 | CSS | UTF-8 | Python | false | false | 4,478 | py | # Generated by Django 3.0.5 on 2020-05-07 19:19
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Foodplan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='Grocery',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('unit', models.CharField(default='', max_length=15)),
],
),
migrations.CreateModel(
name='Ingredient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.FloatField(default=0)),
('grocery', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='foodApp.Grocery')),
],
),
migrations.CreateModel(
name='Recipe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('description', models.CharField(default='', max_length=100)),
('preparation', models.TextField(default='')),
('work_time', models.IntegerField(default=0)),
('avg_rating', models.FloatField(default=0)),
('difficulty', models.IntegerField(default=0)),
('reviewed', models.BooleanField(default=False)),
('image', models.ImageField(upload_to='recipe_images')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('ingredients', models.ManyToManyField(through='foodApp.Ingredient', to='foodApp.Grocery')),
],
options={
'ordering': ['title'],
'permissions': (('can_review_recipe', 'Can review Recipe'),),
},
),
migrations.AddField(
model_name='ingredient',
name='recipe',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='foodApp.Recipe'),
),
migrations.CreateModel(
name='Foodplan_Recipe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(default=datetime.date.today)),
('daytime', models.BooleanField(default=True)),
('foodplan', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='foodApp.Foodplan')),
('recipe', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='foodApp.Recipe')),
],
options={
'ordering': ['date', '-daytime'],
},
),
migrations.AddField(
model_name='foodplan',
name='recipes',
field=models.ManyToManyField(through='foodApp.Foodplan_Recipe', to='foodApp.Recipe'),
),
migrations.AddField(
model_name='foodplan',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.CreateModel(
name='Commentary',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('content', models.TextField(default='')),
('rating', models.IntegerField(default=0)),
('date', models.DateField(default=datetime.date.today)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('recipe', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='foodApp.Recipe')),
],
),
]
| [
"philipp@hmreimund.de"
] | philipp@hmreimund.de |
01651216a026d86c1a68fac21316efefe8e285b4 | 6b05bddf2e294c8e1b39846aecadfa06b4ff805d | /kubevirt/models/v1_secret_volume_source.py | a4149f175fdbc18ed8d07833b30451edf27ea370 | [
"Apache-2.0"
] | permissive | kubevirt/client-python | 5ca82fe55d48c07f62796d2bed3605a7c189922c | 235fe17f58d41165010be7e4122cb67bdc866fe7 | refs/heads/master | 2023-09-03T12:25:27.272479 | 2023-08-17T00:33:31 | 2023-08-17T00:33:31 | 105,017,761 | 29 | 25 | Apache-2.0 | 2022-10-20T13:52:10 | 2017-09-27T12:51:32 | Python | UTF-8 | Python | false | false | 5,318 | py | # coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: kubevirt-dev@googlegroups.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1SecretVolumeSource(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'optional': 'bool',
'secret_name': 'str',
'volume_label': 'str'
}
attribute_map = {
'optional': 'optional',
'secret_name': 'secretName',
'volume_label': 'volumeLabel'
}
def __init__(self, optional=None, secret_name=None, volume_label=None):
"""
V1SecretVolumeSource - a model defined in Swagger
"""
self._optional = None
self._secret_name = None
self._volume_label = None
if optional is not None:
self.optional = optional
if secret_name is not None:
self.secret_name = secret_name
if volume_label is not None:
self.volume_label = volume_label
@property
def optional(self):
"""
Gets the optional of this V1SecretVolumeSource.
Specify whether the Secret or it's keys must be defined
:return: The optional of this V1SecretVolumeSource.
:rtype: bool
"""
return self._optional
@optional.setter
def optional(self, optional):
"""
Sets the optional of this V1SecretVolumeSource.
Specify whether the Secret or it's keys must be defined
:param optional: The optional of this V1SecretVolumeSource.
:type: bool
"""
self._optional = optional
@property
def secret_name(self):
"""
Gets the secret_name of this V1SecretVolumeSource.
Name of the secret in the pod's namespace to use. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret
:return: The secret_name of this V1SecretVolumeSource.
:rtype: str
"""
return self._secret_name
@secret_name.setter
def secret_name(self, secret_name):
"""
Sets the secret_name of this V1SecretVolumeSource.
Name of the secret in the pod's namespace to use. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret
:param secret_name: The secret_name of this V1SecretVolumeSource.
:type: str
"""
self._secret_name = secret_name
@property
def volume_label(self):
"""
Gets the volume_label of this V1SecretVolumeSource.
The volume label of the resulting disk inside the VMI. Different bootstrapping mechanisms require different values. Typical values are \"cidata\" (cloud-init), \"config-2\" (cloud-init) or \"OEMDRV\" (kickstart).
:return: The volume_label of this V1SecretVolumeSource.
:rtype: str
"""
return self._volume_label
@volume_label.setter
def volume_label(self, volume_label):
"""
Sets the volume_label of this V1SecretVolumeSource.
The volume label of the resulting disk inside the VMI. Different bootstrapping mechanisms require different values. Typical values are \"cidata\" (cloud-init), \"config-2\" (cloud-init) or \"OEMDRV\" (kickstart).
:param volume_label: The volume_label of this V1SecretVolumeSource.
:type: str
"""
self._volume_label = volume_label
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1SecretVolumeSource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"travis@travis-ci.org"
] | travis@travis-ci.org |
478818c9037b20e91668146589a7d716069fa610 | 364e81cb0c01136ac179ff42e33b2449c491b7e5 | /spell/tags/2.0.9/src/server/core/ipc/interfaceserver.py | 2e6b16cfaf929891ff700add41005364bc788a31 | [] | no_license | unnch/spell-sat | 2b06d9ed62b002e02d219bd0784f0a6477e365b4 | fb11a6800316b93e22ee8c777fe4733032004a4a | refs/heads/master | 2021-01-23T11:49:25.452995 | 2014-10-14T13:04:18 | 2014-10-14T13:04:18 | 42,499,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,221 | py | ###################################################################################
## MODULE : core.ipc.interfaceserver
## DATE : Mar 18, 2011
## PROJECT : SPELL
## DESCRIPTION: IPC interface for servers
## --------------------------------------------------------------------------------
##
## Copyright (C) 2008, 2011 SES ENGINEERING, Luxembourg S.A.R.L.
##
## This file is part of SPELL.
##
## This component is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This software is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with SPELL. If not, see <http://www.gnu.org/licenses/>.
##
###################################################################################
#*******************************************************************************
# SPELL Imports
#*******************************************************************************
from spell.utils.log import *
from server.core.messages.base import *
from server.core.messages.msghelper import *
#*******************************************************************************
# Local Imports
#*******************************************************************************
from ipc import IPCworker,IPCerror,IPC_DEBUG,IPC_KEY_ENCODING
from xmlmsg import MessageClass,MessageException
from mailbox import Mailbox
from input import IPCinput
from output import IPCoutput
from incoming import IncomingMessage,IncomingRequest
#*******************************************************************************
# System Imports
#*******************************************************************************
import os,sys,thread,time
import socket,os,sys,select,struct
#*******************************************************************************
# Exceptions
#*******************************************************************************
#*******************************************************************************
# Module globals
#*******************************************************************************
################################################################################
class IPCinterfaceServer(IPCworker):
# Callbacks for processing incoming requests, messages and errors
messageCallback = None
requestCallback = None
errorCallback = None
# Client keys
lastClientKey = 0
clientKeys = []
# Connectivity data
serverPort = None
serverKey = None
# Input readers and output writers for each client
writers = {}
readers = {}
# Connected flag
connected = False
# Connection socket
socket = None
# Mailbox stores for a while the responses
mailbox = None
# Outgoing request sequence count
reqSeq = None
reqLock = None
#===========================================================================
def __init__(self, name):
IPCworker.__init__(self,name)
self.lastClientKey = 0
self.clientKeys = []
self.serverKey = None
self.serverPort = None
self.messageCallback = None
self.requestCallback = None
self.errorCallback = None
self.writers = {}
self.readers = {}
self.socket = None
self.mailbox = Mailbox(name)
self.reqSeq = 0
self.reqLock = thread.allocate_lock()
self.connected = False
#===========================================================================
def connect(self, serverKey, serverPort, messageCallback = None, requestCallback = None, errorCallback = None):
if self.connected: return
self.serverKey = serverKey
self.serverPort = serverPort
self.messageCallback = messageCallback
self.requestCallback = requestCallback
self.errorCallback = errorCallback
try:
self.socket = socket.socket( socket.AF_INET, socket.SOCK_STREAM )
linger = struct.pack('ii', 1, 0)
self.socket.setsockopt( socket.SOL_SOCKET, socket.SO_KEEPALIVE, 0 )
self.socket.setsockopt( socket.SOL_SOCKET, socket.SO_LINGER, linger )
self.socket.bind( ( '', serverPort) )
self.socket.listen(5)
self.port = self.socket.getsockname()[1]
LOG( self.name + ": Socket server ready: " + str(self.socket.getsockname()), level = LOG_COMM)
self.connected = True
except BaseException,ex:
LOG( self.name + ": Setup failed, disconnecting: " + str(ex), level = LOG_COMM)
self.disconnect()
raise ex
#===========================================================================
def work(self):
# Accept connections
readyRead,readyWrite,e = select.select([self.socket], [], [], 1)
if readyRead:
try:
(clientsocket, address) = self.socket.accept()
except:
if not self.working(): return
if clientsocket:
LOG( self.name + ": Client connected from " + str(address), level = LOG_COMM)
# Get a key for the incoming client
clientKey = self.lastClientKey
self.lastClientKey = clientKey + 1
self.clientKeys.append(clientKey)
LOG( self.name + ": Assigned key: " + repr(clientKey), level = LOG_COMM)
# The first thing to do is to send the key to the client. After
# that, conversation can be started
keystr = struct.pack(IPC_KEY_ENCODING,clientKey)
clientsocket.sendall(keystr)
if IPC_DEBUG: LOG( self.name + ": Key sent", level = LOG_COMM)
# Create IO classes for this client
self.readers[clientKey] = IPCinput( self.name, clientsocket, clientKey, self )
self.writers[clientKey] = IPCoutput( self.name, clientsocket, self.serverKey, self )
# Start the input thread
self.readers[clientKey].start()
LOG( self.name + ": Channel ready", level = LOG_COMM)
self.connected = True
else:
LOG( self.name + ": Invalid client handle", LOG_ERROR, level = LOG_COMM)
#===========================================================================
def disconnect(self, clientKey = None, eoc = True ):
if not self.connected: return
if clientKey: clientKey = int(clientKey)
# Shutdown input and output for the given client
if clientKey in self.clientKeys:
if IPC_DEBUG: LOG( self.name + ": Disconnecting '" + repr(clientKey) + "'", level = LOG_COMM)
if (clientKey in self.readers):
reader = self.readers.pop(clientKey)
reader.disconnect()
# We dont care if there is a failure here, we are removing it
if (clientKey in self.writers):
writer = self.writers.pop(clientKey)
writer.disconnect( sendEOC = False )
if (clientKey in self.clientKeys):
self.clientKeys.remove(clientKey)
LOG( self.name + ": Disconnected '" + repr(clientKey) + "'", level = LOG_COMM)
# Shutdown input and output for all clients
elif clientKey == None:
self.connected = False
LOG( self.name + ": Disconnecting all", level = LOG_COMM)
for clientKey in self.clientKeys:
self.writers[clientKey].disconnect( sendEOC = eoc )
self.readers[clientKey].disconnect()
LOG( self.name + ": Disconnected '" + repr(clientKey) + "'", level = LOG_COMM)
self.writers.clear()
self.readers.clear()
self.clientKeys = []
self.mailbox.shutdown()
self.socket.close()
self.working(False)
LOG( self.name + ": All disconnected", level = LOG_COMM)
else:
LOG("Unknown client key: " + repr(clientKey), LOG_ERROR, level=LOG_COMM)
#===========================================================================
def incomingRequest(self, peerKey, msg):
writer = self.writers.get(peerKey)
senderId = msg.getSender()
receiverId = msg.getReceiver()
reqId = str(peerKey) + "-" + receiverId + "-" + senderId + ":" + msg.getSequence()
IncomingRequest( reqId, msg, self.requestCallback, writer ).start()
#===========================================================================
def incomingMessage(self, peerKey, msg):
senderId = msg.getSender()
receiverId = msg.getReceiver()
msgId = str(peerKey) + "-" + receiverId + "-" + senderId
IncomingMessage( msg, self.messageCallback ).start()
#===========================================================================
def incomingResponse(self, peerKey, msg):
senderId = msg.getSender()
receiverId = msg.getReceiver()
seq = msg.getSequence()
if seq is None and msg.getType() == MSG_TYPE_ERROR:
self.incomingMessage(peerKey, msg)
else:
reqId = str(peerKey) + "-" + senderId + "-" + receiverId + ":" + msg.getSequence()
self.mailbox.place(reqId, msg)
#===========================================================================
def connectionLost(self, peerKey):
self.errorCallback(peerKey)
#===========================================================================
def sendMessage(self, msg, clientKey ):
# If there is no client, due to a connection lost issue, show an error
# and continue
clientKey = int(clientKey)
if not self.writers.has_key(clientKey):
senderId = msg.getSender()
receiverId = msg.getReceiver()
LOG( self.name + ": Cannot send message, no such client key: " + repr(clientKey), LOG_WARN, level = LOG_COMM)
LOG( self.name + ": Keys: " + repr(self.clientKeys) + "/" + repr(self.writers.keys()), LOG_WARN, level = LOG_COMM)
LOG( self.name + ": Msg ID : " + msg.getId(), LOG_ERROR, level = LOG_COMM)
LOG( self.name + ": Msg sender : " + senderId, LOG_ERROR, level = LOG_COMM)
LOG( self.name + ": Msg receiver: " + receiverId, LOG_ERROR, level = LOG_COMM)
return
# Actually send the message through the corresponding output channel
self.writers[clientKey].send(msg)
#===========================================================================
def sendRequest(self, msg, clientKey, timeout = 150000 ):
# The message shall contain the target clientKey, sender id and receiver id
clientKey = int(clientKey)
senderId = msg.getSender()
receiverId = msg.getReceiver()
reqId = str(clientKey) + "-" + receiverId + "-" + senderId
writer = self.getWriter(clientKey)
# If there is no client key, fail
if clientKey is None:
LOG( self.name + ": Cannot send request, no client key given", LOG_ERROR, level = LOG_COMM)
LOG( self.name + ": Req ID : " + msg.getId(), LOG_ERROR, level = LOG_COMM)
LOG( self.name + ": Req sender : " + senderId, LOG_ERROR, level = LOG_COMM)
LOG( self.name + ": Req receiver: " + receiverId, LOG_ERROR, level = LOG_COMM)
errorMsg = MsgHelper.createError(msg.getId(), msg, "Cannot send request", "No client key set")
return errorMsg
# If there is no corresponding client, show a warning and return an error
if writer is None:
LOG( self.name + ": Cannot send request, no such client key: " + repr(clientKey), LOG_WARN, level = LOG_COMM)
LOG( self.name + ": Keys: " + repr(self.clientKeys) + "/" + repr(self.writers.keys()), LOG_WARN, level = LOG_COMM)
LOG( self.name + ": Req ID : " + msg.getId(), LOG_ERROR, level = LOG_COMM)
LOG( self.name + ": Req sender : " + senderId, LOG_ERROR, level = LOG_COMM)
LOG( self.name + ": Req receiver: " + receiverId, LOG_ERROR, level = LOG_COMM)
errorMsg = MsgHelper.createError(msg.getId(), msg, "Cannot send request", "No such client")
return errorMsg
if IPC_DEBUG:
LOG( self.name + ": Sending request to client key " + repr(clientKey), level = LOG_COMM)
LOG( self.name + ": Req ID : " + msg.getId(), level = LOG_COMM)
LOG( self.name + ": Req sender : " + senderId, level = LOG_COMM)
LOG( self.name + ": Req receiver: " + receiverId, level = LOG_COMM)
# Set the request sequence number. The sequence number shall be
# the same for the corresponding response.
reqId = self.setSequence(reqId, msg)
response = None
self.mailbox.prepare(reqId)
writer.send(msg)
response = self.mailbox.retrieve(reqId,timeout)
if response is None:
response = MsgHelper.createError(MSG_ID_TIMEOUT, msg, "Cannot get response", "IPC")
return response
#===========================================================================
def forwardRequest(self, msg, clientKey ):
originalSeq = msg.getSequence()
response = self.sendRequest(msg, clientKey)
response.setSequence(originalSeq)
return response
#===========================================================================
def getPort(self):
return self.serverPort
#===========================================================================
def setSequence(self, id, msg):
reqId = id
self.reqLock.acquire()
try:
msg.setSequence(self.reqSeq)
reqId += ":" + str(self.reqSeq)
self.reqSeq += 1
finally:
self.reqLock.release()
return reqId
#===========================================================================
def getWriter(self, clientKey):
writer = None
self.reqLock.acquire()
try:
writer = self.writers.get(clientKey)
finally:
self.reqLock.release()
return writer
| [
"rafael.chinchilla@b4576358-0e6a-c6b8-6e87-62523fae65e4"
] | rafael.chinchilla@b4576358-0e6a-c6b8-6e87-62523fae65e4 |
08777ef56a0df912e73d6c15c9f138bd8b2e87c3 | f4434c85e3814b6347f8f8099c081ed4af5678a5 | /sdk/textanalytics/azure-ai-textanalytics/samples/async_samples/sample_recognize_pii_entities_async.py | 7c580718d21294e4c46f62a5a71fbf2a0867ba92 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | yunhaoling/azure-sdk-for-python | 5da12a174a37672ac6ed8e3c1f863cb77010a506 | c4eb0ca1aadb76ad892114230473034830116362 | refs/heads/master | 2022-06-11T01:17:39.636461 | 2020-12-08T17:42:08 | 2020-12-08T17:42:08 | 177,675,796 | 1 | 0 | MIT | 2020-03-31T20:35:17 | 2019-03-25T22:43:40 | Python | UTF-8 | Python | false | false | 4,031 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_recognize_pii_entities_async.py
DESCRIPTION:
This sample demonstrates how to recognize personally identifiable information in a batch of documents.
The endpoint recognize_pii_entities is only available for API version v3.1-preview and up.
In this sample, we will be working for a company that handles loan payments. To follow privacy guidelines,
we need to redact all of our information before we make it public.
USAGE:
python sample_recognize_pii_entities_async.py
Set the environment variables with your own values before running the sample:
1) AZURE_TEXT_ANALYTICS_ENDPOINT - the endpoint to your Cognitive Services resource.
2) AZURE_TEXT_ANALYTICS_KEY - your Text Analytics subscription key
"""
import os
import asyncio
class RecognizePiiEntitiesSampleAsync(object):
async def recognize_pii_entities_async(self):
print(
"In this sample we will be going through our customer's loan payment information and redacting "
"all PII (personally identifable information) before storing this information on our public website. "
"I'm also looking to explicitly extract the SSN information, so I can update my database with SSNs for "
"our customers"
)
# [START recognize_pii_entities_async]
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics.aio import TextAnalyticsClient
endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]
text_analytics_client = TextAnalyticsClient(
endpoint=endpoint, credential=AzureKeyCredential(key)
)
documents = [
"""Parker Doe has repaid all of their loans as of 2020-04-25.
Their SSN is 859-98-0987. To contact them, use their phone number
555-555-5555. They are originally from Brazil and have Brazilian CPF number 998.214.865-68"""
]
async with text_analytics_client:
result = await text_analytics_client.recognize_pii_entities(documents)
docs = [doc for doc in result if not doc.is_error]
print(
"Let's compare the original document with the documents after redaction. "
"I also want to comb through all of the entities that got redacted"
)
for idx, doc in enumerate(docs):
print("Document text: {}".format(documents[idx]))
print("Redacted document text: {}".format(doc.redacted_text))
for entity in doc.entities:
print("...Entity '{}' with category '{}' got redacted".format(
entity.text, entity.category
))
# [END recognize_pii_entities_async]
print("All of the information that I expect to be redacted is!")
print(
"Now I want to explicitly extract SSN information to add to my user SSN database. "
"I also want to be fairly confident that what I'm storing is an SSN, so let's also "
"ensure that we're > 60% positive the entity is a SSN"
)
ssns = []
for doc in docs:
for entity in doc.entities:
if entity.category == 'U.S. Social Security Number (SSN)' and entity.confidence_score >= 0.6:
ssns.append(entity.text)
print("We have extracted the following SSNs as well: '{}'".format(
"', '".join(ssns)
))
async def main():
sample = RecognizePiiEntitiesSampleAsync()
await sample.recognize_pii_entities_async()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| [
"noreply@github.com"
] | yunhaoling.noreply@github.com |
592954c24d90392accd10d69fa5c5591d6254b29 | ccbe9d81fe6e126ccae3e3ecb36b9c04b1544bea | /config/services/configure_web_sockets.py | 19605f5afd1b06e9f0f14fef5bebdc1abb61abda | [] | no_license | Silver3310/Aiohttp-sample-chat | 9bb3b44f88c39b594afb528cc425bb6741c9e1f1 | 88f4f93b43f2ddced3af1e6ecf6c227f4756744f | refs/heads/master | 2021-06-23T20:39:09.907074 | 2019-10-05T10:21:44 | 2019-10-05T10:21:44 | 212,976,233 | 0 | 0 | null | 2021-03-20T01:51:21 | 2019-10-05T09:44:04 | Python | UTF-8 | Python | false | false | 369 | py | def configure_web_sockets(app):
"""
Configure WebSockets
"""
async def on_shutdown(app):
for ws in app['websockets']:
await ws.close(
code=1001,
message='Server shutdown'
)
app.on_cleanup.append(on_shutdown)
# a list of sockets to close after use
app['websockets'] = list()
| [
"epifanov998@mail.ru"
] | epifanov998@mail.ru |
414558f8f2f2f959546e50c46144100f193f178d | 6d429c1bc185fc8180fc69f1d49fd781e9a90748 | /appuser/codemanager.py | 98381f12400d2cfb23c1cb65a163547d03f84290 | [] | no_license | FirayMa/store | 6bc5d350da4170d0ef87d25748635cd1a32aa717 | 542a955451f78f9f904010383b1c661e2fbef471 | refs/heads/master | 2023-05-28T05:33:13.867339 | 2017-09-07T01:00:30 | 2017-09-07T01:00:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,766 | py | from django.db import models
import pdb
import random
import string
from django.conf import settings
from common.e_mail import EmailEx
class CodeManager(models.Manager):
"""
验证码的manager
"""
email = EmailEx()
def send_code(self, email):
result={}
if not self.email.EMAIL_REGEX.match(email):
result['status'] = 1
result['msg'] = '电子邮件格式不正确'
else:
code = ''.join(random.choice(string.ascii_lowercase + string.digits) for i in range(4))
Subject = settings.PROJECTNAME+'注册邮箱验证'
content = '您好, 欢迎您注册, 欢迎加入我们, 您的邮箱验证码是: ' + code
try:
self.email.send_text_email(Subject, content, email)
try:
verify_code = self.model.objects.get(email__exact = email, type ='0')
verify_code.code = code
verify_code.save()
except self.model.DoesNotExist:
verify_code = self.model(email=email, code=code, type ='0')
verify_code.save()
result['status'] = 2
result['msg'] = '验证码已发至您的邮箱中, 请到邮箱中查看您的验证码!'
except Exception as e:
result['status'] = 3
result['msg'] = '发送邮件的过程中发生错误: '+ str(e)
return result
def veirfy_code(self, code, email):
try:
verify_code = self.model.objects.get(email__exact = email, code =code)
return True
except self.model.DoesNotExist:
return False
| [
"281475120@163.com"
] | 281475120@163.com |
40ae0947ad0df6b4f4373286bba0699460598253 | 3ebca13c180f6314189f353b197f177f76c87c38 | /Leaderelection.py | 5e71b8de897253c412dfdfc226695ee22f1a411e | [] | no_license | gurpradeep/TF | ca87630176a48eb190c1f05e00bf4c868a48ae02 | 8b0c6364395f3a3d3f18fd929352c0d2fe757355 | refs/heads/master | 2023-01-20T14:59:19.937144 | 2020-12-05T17:57:33 | 2020-12-05T17:57:33 | 301,216,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,013 | py | import etcd3
import sys
import time
from threading import *
LEADER_KEY = "/myleader"
def main(server_name):
client = etcd3.client (host="localhost", port=2379)
while True:
is_leader, lease = leader_election(client, server_name)
if(is_leader):
print("I am the leader")
on_leadership_gained(lease)
else:
print("I am a follower")
wait_for_next_election(client)
def leader_election(client, server_name):
print("New leader_election happening.")
try:
lease = client.lease(5)
except:
print("Unexpected error:", sys.exc_info()[0])
is_leader = try_insert(client,LEADER_KEY,server_name,lease)
return is_leader,lease
def try_insert(client,key,value,lease):
insert_succeeded,_=client.transaction(failure=[],success = [client.transactions.put(key,value,lease)],compare =[client.transactions.version(key) ==0],)
return insert_succeeded
def on_leadership_gained(lease):
while True:
try:
print("Refreshing lease; still the leader.")
lease.refresh()
do_work()
except Exception:
lease.revoke()
return
except KeyboardInterrupt:
print("\n Revoking lease;no longer the leader")
lease.revoke()
sys.exit(1)
def wait_for_next_election(client):
election_event = Event()
def watch_callback(resp):
for event in resp.events:
if isintance(event,etcd3.events.DeleteEvent):
print("Leadership Change Required")
election_event.set()
watch_id = client.add_watch_callback(LEADER_KEY,watch_callback)
try:
while not election_event.is_set():
time.sleep(1)
except KeyBoardInterrupt:
client.cancel_watch(watch_id)
sys.exit(1)
client.cancel_watch(watch_id)
def do_work():
time.sleep(1)
if __name__=="__main__":
server_name = sys.argv[1]
main(server_name)
| [
"gupta.pradeeep@gmail.com"
] | gupta.pradeeep@gmail.com |
10c7be24b3c3c0c17d53b00c3a7c8d0bf4c0e5fc | 04bf8241a836af9d56b626d56a477bff41732d7f | /q2/twitter/admin.py | 9905e7d3cefad59b556c5ec71a6b9a1b636d84ac | [] | no_license | hiteshmishra708/jiohaptik-assignment | cf950c978897100d2c50c47e4a22bb5a39a50f9e | 4da4ac37899a32a1324c506b902170f021d1cfc3 | refs/heads/main | 2023-04-14T02:16:04.440975 | 2021-04-04T09:10:48 | 2021-04-04T09:10:48 | 354,222,812 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from django.contrib import admin
from .models import People, FollowRecord, Tweet
admin.site.register(People)
admin.site.register(FollowRecord)
admin.site.register(Tweet) | [
"hiteshmishra708@gmail.com"
] | hiteshmishra708@gmail.com |
7ff143344ce9306dacdfcea254efe272b77b8201 | 79ab7c2b50109ed5518e890c8ff56019ebc233a7 | /Programs/Program_3/Gage-Halverson-Program3-test.py | 8dad5a7b7dbb654a8c8fc6f1c5570fb37971261f | [] | no_license | hi2gage/csci127 | 8c0d977376c00c8e7eb2adaf739b427ef4b64372 | a973f5198e4d6315fa86db585a8e47f953bcf421 | refs/heads/master | 2020-04-21T11:13:09.291635 | 2019-05-03T00:08:29 | 2019-05-03T00:08:29 | 169,515,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,313 | py | # -----------------------------------------+
# CSCI 127, Joy and Beauty of Data |
# Program 3: Weather CSV Library |
# Gage Halverson |
# Last Modified: Feb, 26 2019 |
# -----------------------------------------+
# Provide a brief overview of the program. |
# Provide a brief overview of the program. |
# Provide a brief overview of the program. |
# Provide a brief overview of the program. |
# Provide a brief overview of the program. |
# Provide a brief overview of the program. |
# -----------------------------------------+
def coldest_temperature(input_file):
#reads file
temp_file = open(input_file, "r")
input_line = temp_file.readline()
input_line = temp_file.readline()#called again to skip reference line
#creates empty list for all the temp values
temp_list = []
#runs through all the lines in the file and adds the temp value to temp_list
while input_line:
value_1 = input_line.split(",")
value_int = int(value_1[-7])
#Checks to see if temp value is already in temp_list if not, adds it to temp_list
if temp_list.count(value_int) == 0:
temp_list.append(value_int)
#Goes to next line in fle
input_line = temp_file.readline()
#creates variable of min temp for later use
coldest_line = min(temp_list)
#Goes back to top of the list
temp_file.seek(0)
input_line = temp_file.readline()
input_line = temp_file.readline()#called again to skip reference line
#Runs through all the lines
while input_line:
value_1 = input_line.split(",")
value_int = int(value_1[-7])
#finds the right line with the lowest temp
if value_int == coldest_line:
value_1 = input_line.split(",")
#Compiles the City and State
city = (value_1[5])
state = (value_1[6])
location = city +" "+ state
location = location[1:-1]
#Complies the Date
date = value_1[4]
break
#Goes to next line in fle
input_line = temp_file.readline()
#Prints out values
print("Coldest Fahrenheit temperature reading:", str(coldest_line))
print("Location:", str(location))
print("Date:", str(date))
def average_temperature(input_file, location):
#reads file
location_file = open(input_file, "r")
input_line = location_file.readline()
input_line = location_file.readline()#called again to skip reference line
#creates empty list for all the temp values
temp_list = []
#runs through all the lines in the file
num_locations = 0
while input_line:
value_1 = input_line.split(",")
city = (value_1[5])
state = (value_1[6])
location_value = city + "," + state
location_value = location_value[1:-1]
#adds the temp value to temp_list if location is the same
if location.lower() == location_value.lower():
num_locations += 1
temp_list.append(int(value_1[0]))
#Goes to next line in fle
input_line = location_file.readline()
#prints out number of readings at location
print("Number of readings: " + str(num_locations))
#Checks to see if there are temps for locations
if num_locations == 0:
print("Average temperature: Not Applicable")
return "None"
else:
average = sum(temp_list)/num_locations
print("Average temperature: " + "{0:.2f}".format((average)))
return state
def all_stations_by_state(input_file, state):
#creates list of all the locations
locations = []
#Opens file and reads each line into list
location_file = open(input_file, "r")
input_line = location_file.readline()
input_line = location_file.readline()#called again to skip reference line
#Runs through all the lines
while input_line:
value_1 = input_line.split(",")
state_value = (value_1[-3])
report_location = value_1[1]
#checks to see if the states match
if state.lower() == state_value.lower():
#checks to see if location is in locations list
if locations.count(report_location) == 0:
locations.append(report_location)
#Goes to next line in fle
input_line = location_file.readline()
if len(locations) == 0:
print("There are no recording stations")
else:
#Prints header
print("Recording Stations \n------------------")
#prints out the list in format
for j in range(len(locations)):
print("{:>2d}".format(j+1) + ".",locations[j])
def largest_spread_of_temp_at_location(input_file, state):
#creates list of all the spreads
spread = []
#Opens file and reads each line into list
location_file = open(input_file, "r")
input_line = location_file.readline()
input_line = location_file.readline()#called again to skip reference line
#Runs through all the lines
while input_line:
value_1 = input_line.split(",")
state_value = (value_1[12])
report_location = value_1[1]
#checks to see if the states match
if state.lower() == state_value.lower():
spread.append(int(value_1[-8]) - int(value_1[-7]))
#Goes to next line in fle
input_line = location_file.readline()
if len(spread) == 0:
print("There are no recording stations")
else:
max_spread = max(spread)
#Goes back to top of the list
location_file.seek(0)
input_line = location_file.readline()
input_line = location_file.readline()#called again to skip reference line
#Runs through all the lines
i = 0
while input_line:
state_value = (value_1[12])
value_1 = input_line.split(",")
if state.lower() == state_value.lower():
if max_spread == (int(value_1[-8]) - int(value_1[-7])):
#Compiles the City and State
city_1 = (value_1[5])
state_1 = (value_1[6])
#print(state)
#print(city)
location = city_1 +" "+ state_1
location = location[1:-1]
#Complies the Date
date = value_1[4]
print("Widest Spread of Fahrenheit temperature reading:", str(max_spread))
print("Location:", str(location))
print("Date:", str(date))
return state
#Goes to next line in fle
input_line = location_file.readline()
return "None"
def make_state_list(input_file):
#reads file
temp_file = open(input_file, "r")
input_line = temp_file.readline()
input_line = temp_file.readline()#called again to skip reference line
#creates empty list for all the temp values
state_list = []
#runs through all the lines in the file and adds the temp value to temp_list
while input_line:
value_1 = input_line.split(",")
value_int = value_1[-3]
#Checks to see if temp value is already in temp_list if not, adds it to temp_list
if state_list.count(value_int) == 0 and len(value_1) ==15:
state_list.append(value_int)
#Goes to next line in fle
input_line = temp_file.readline()
return state_list
# -----------------------------------------+
# Do not change anything below this line |
# with the exception of code related to |
# option 4. |
# -----------------------------------------+
# -----------------------------------------+
# menu |
# -----------------------------------------+
# Prints a menu of options for the user. |
# -----------------------------------------+
def menu():
print()
print("1. Identify coldest temperature.")
print("2. Identify average temperature for a given location.")
print("3. Identify all recording station locations by state.")
print("4. Identify Location of Largest Temperature Spread by State.")
print("5. Quit.")
print()
# -----------------------------------------+
# main |
# -----------------------------------------+
# Repeatedly query the user for options. |
# -----------------------------------------+
def main():
input_file = "weather.csv"
state_list = make_state_list(input_file)
print(state_list)
right = []
for states in state_list:
print("--------------------------------------------")
print(states)
right_temp = largest_spread_of_temp_at_location(input_file, states)
if right_temp != "None":
right.append(right_temp)
print("--------------------------------------------")
print(right)
print("--------------------------------------------")
print(state_list)
print("--------------------------------------------")
print("These are the states that are not working in option 4")
print(list(set(state_list) - set(right)))
print(len(list(set(state_list) - set(right))))
# -----------------------------------------+
main()
| [
"coolgage@Gages-MacBook-Pro-2.local"
] | coolgage@Gages-MacBook-Pro-2.local |
c1c573cf1fe905c86e85038a39dd736d9d842676 | e5e8d2741a2c6f5615a563e938e0c53afb019c9f | /kwiiyatta/converter/dataset.py | 58398effaa50a1a8a9ea5b936da93e266f6f871a | [
"MIT"
] | permissive | Iselix/kwiiyatta | 1d89c8b009f25f7daad4fab3040ee8ff5ab93301 | 5771c88fffbcfdadb1fb5084be8af220164a01ff | refs/heads/experimental/konokoetuki | 2023-05-01T01:20:20.063948 | 2020-08-16T09:46:37 | 2020-08-16T09:46:37 | 184,916,222 | 2 | 0 | MIT | 2023-04-21T20:15:30 | 2019-05-04T16:03:23 | Python | UTF-8 | Python | false | false | 2,194 | py | import copy
from nnmnkwii.preprocessing import remove_zeros_frames, trim_zeros_frames
import numpy as np
import kwiiyatta
from . import abc
class WavFileDataset(abc.Dataset):
def __init__(self, data_dir, Analyzer=None):
super().__init__()
if Analyzer is None:
Analyzer = kwiiyatta.analyze_wav
self.Analyzer = Analyzer
self.data_dir = data_dir
if not self.data_dir.exists():
raise FileNotFoundError(f'wav files dir "{self.data_dir!s}"'
f' is not found')
if not self.data_dir.is_dir():
raise NotADirectoryError(f'wav files dir "{self.data_dir!s}"'
f' is not directory')
self.files = frozenset(f.relative_to(self.data_dir) for f
in self.data_dir.glob('*.wav'))
def keys(self):
return self.files
def get_data(self, key):
return self.Analyzer(self.data_dir/key)
class ParallelDataset(abc.Dataset):
def __init__(self, dataset1, dataset2):
super().__init__()
self.dataset1 = dataset1
self.dataset2 = dataset2
self.common_keys = self.dataset1.keys() & self.dataset2.keys()
def keys(self):
return self.common_keys
def get_data(self, key):
return self.dataset1[key], self.dataset2[key]
@abc.map_dataset()
def TrimmedDataset(feature):
s = trim_zeros_frames(feature.spectrum_envelope)
return feature[:len(s)] # トリムするフレームが手前にずれてるのでは?
@abc.map_dataset(expand_tuple=False)
def AlignedDataset(features, **kwargs):
a, b = features
return kwiiyatta.align_even(a, b, **kwargs)
def make_dataset_to_array(dataset, keys=None):
if keys is None:
keys = sorted(dataset.keys())
data = None
for key in keys:
d = dataset[key]
if isinstance(d, tuple):
d = np.hstack(d)
d = remove_zeros_frames(d)
if data is None:
data = copy.copy(d)
else:
len_data = len(data)
data.resize(len_data+len(d), d.shape[-1])
data[len_data:, :] = d
return data
| [
"iselix.x17351@gmail.com"
] | iselix.x17351@gmail.com |
7e848136723f6fa4f38797ac7246888240d8898f | f579e331f4d8a083843ae4da1bc7bfb1f90b44fb | /main.py | 0fcd1bcb390c6ac66ec06455b0849429e9c86368 | [] | no_license | yowenter/NotificationFeed | 4a71f781412fd046259f45238ebc3e2bbf365140 | 64d531e03bf2f179d8d343e74bbe9da23d991f44 | refs/heads/master | 2020-04-24T16:01:28.787966 | 2019-03-15T06:29:08 | 2019-03-15T06:29:08 | 172,091,330 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,262 | py | import os
import time
import logging
import signal
import sys
from noti_feed import setup_feed
setup_feed()
from datetime import datetime
from watcher.github_service import RepoIssueWatcher
from noti_feed.manager import new_issues
from noti_feed.feed import ManagerNotificationFeed
from models.notification import IssuesNotification
from models.issue import NotificationIssue
from utils.density import hour_density
from common.config import PROD
from email_service.email_sender import send_notification
import errno
logging.basicConfig(stream=sys.stdout, level=logging.INFO if PROD == 'true' else logging.DEBUG)
logging.getLogger("stream_framework").setLevel(logging.WARN)
logging.getLogger("urllib3").setLevel(logging.WARN)
logging.getLogger("github").setLevel(logging.WARN)
LOG = logging.getLogger(__name__)
MANAGER = "manager"
def notification_watcher():
LOG.info("Starting repo issue watch service...")
repos = ["kubernetes/kubernetes", "prometheus/prometheus", "goharbor/harbor", "coredns/coredns", "envoyproxy/envoy"]
issue_watchers = [RepoIssueWatcher(repo) for repo in repos]
while 1:
issues = []
for watcher in issue_watchers:
issues.extend(watcher.fetch_new_issues())
notification_issues = [NotificationIssue.from_github_issue(i) for i in issues]
LOG.info("New notification issues %s queued.", len(notification_issues))
new_issues(MANAGER, notification_issues)
time.sleep(180)
def notification_consumer():
LOG.info("Starting notification sender service...")
feed = ManagerNotificationFeed(MANAGER)
while True:
current_hour = datetime.now().hour
work_hours = list(range(10, 22))
if current_hour + 8 not in work_hours:
LOG.info("Not in work hours, continue %s", current_hour)
time.sleep(1000)
continue
activities = feed[:]
issues = [NotificationIssue.from_dict(activity.extra_context) for activity in activities]
title = "[CNCF GitHub Issue 提醒] {0} 你有 {1} 条新消息未读 ".format(datetime.now().date(), len(activities))
summary = title
if len(issues) >= 1:
LOG.info("Sending Message %s", title)
notification = IssuesNotification(title, issues, summary)
if send_notification(notification) > 0:
feed.remove_many(activities)
[feed.remove_activity(ac) for ac in activities]
density = hour_density((current_hour + 8) % 24) # Asia/shanghai timeonze offset 8
should_sleep = int(3600 * (2 - density) * (2 - density)) + 1800 # 最短sleep 30mins, 最长sleep 3 H
LOG.info("Current time %s, should sleep %s ", datetime.now(), should_sleep)
time.sleep(should_sleep)
class Manager(object):
def __init__(self):
self.WORKERS = {}
self.exit_now = False
self.healthy = True
def spawn_worker(self, func):
LOG.info("Spawning worker %s", func)
pid = os.fork()
if pid != 0:
self.WORKERS[pid] = func
return pid
func()
def kill(self, signum, frame):
LOG.info("Killing self.")
self.exit_now = True
for k, v in self.WORKERS.items():
LOG.info("Killing process %s %s", k, v)
try:
os.kill(k, signal.SIGKILL)
except Exception as e:
LOG.warning("Kill worker %s failure %s", k, e)
sys.exit(0)
def start(self):
start = datetime.now()
self.spawn_worker(notification_watcher)
self.spawn_worker(notification_consumer)
while 1:
time.sleep(10)
if self.exit_now:
LOG.info("Gracefully stop.")
break
if not self.healthy:
LOG.error("Manager unhealthy, exiting.")
self.kill(signal.SIGKILL, None)
sys.exit(1)
LOG.info("Heartbeat, stay alive since `%s`.for %s", start, datetime.now() - start)
LOG.debug("Workers %s", self.WORKERS)
for pid, name in self.WORKERS.items():
_, err_code = os.waitpid(pid, os.WNOHANG)
LOG.debug("Wait pid %s err_code %s ", pid, err_code)
if err_code != 0:
LOG.warning("Process %s %s unhealthy", pid, name)
self.healthy = False
try:
os.kill(pid, 0)
except OSError as e:
if e.errno == errno.ESRCH:
LOG.warning("Process %s %s not running", pid, name)
else:
LOG.warning("Process %s %s unknown state %s", pid, name, e)
LOG.info("Worker states not healthy. Exiting")
self.healthy = False
else:
LOG.debug("Process %s %s is running", pid, name)
def main():
manager = Manager()
signal.signal(signal.SIGINT, manager.kill)
signal.signal(signal.SIGTERM, manager.kill)
manager.start()
# notification_watcher()
# notification_consumer()
# start 2 processes,
# one for watcher
# one for consumer
if __name__ == '__main__':
main()
| [
"wenter.wu@gmail.com"
] | wenter.wu@gmail.com |
5e7a1db37f5eccb027a0eb393996754bacb5bcaf | 39b7675802d772748ceede6ec18ad902b6346ecc | /PythonClass/notesonfunction/fourth.py | a06c12dc6f1ce9a2b30fd4806697de392c017c1f | [] | no_license | krishnakarki195/PythonNote | 12426fc32aa7ef5a5b2c70ed35bc5db142c55539 | 1b99ce24f8f46f1d8abe639e6e85b792dd5c5790 | refs/heads/master | 2020-09-17T23:12:34.333109 | 2016-09-22T22:53:01 | 2016-09-22T22:53:01 | 67,238,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,067 | py | #!/usr/bin/python
# *,**,*args(args),**kwargs(kwargs)
def callguy(**kwargs):
if 'name' in kwargs:
print kwargs['name']
if 'age' in kwargs:
print kwargs['age']
if 'gender' in kwargs:
print kwargs['gender']
if 'email' in kwargs:
print kwargs['email']
callguy(name='kushi',age=1)
#callguy(age=1,gender='f')
#callguy(name='kushi',email='kushi.hlp@gmail.com')
'''
def gmax(*args):
big=0
for value in args:
if value > big:
big = value
return big
print gmax(21,42,55,98,99,1)
In [3]: max?
Type: builtin_function_or_method
String Form:<built-in function max>
Namespace: Python builtin
Docstring:
max(iterable[, key=func]) -> value
max(a, b, c, ...[, key=func]) -> value
With a single iterable argument, return its largest item.
With two or more arguments, return the largest argument.
In [4]: max(21,42)
Out[4]: 42
In [5]: max(21,42,55,98,99,01)
Out[5]: 99
def my_func(a,b):
return a + b
# my_func(10,20)
my_list = [1,2]
print my_func(*my_list)
my_dict = {'b':10,'a':10}
print my_func(**my_dict)
'''
| [
"krishna.karki195@gmail.com"
] | krishna.karki195@gmail.com |
90103b4dfe92fcefbca7e03b61049dfd4b387ab2 | cc0c0f99a5cf563ff52a76f2ac17cdad09d22f01 | /venv/Lib/site-packages/itk/itkBinaryMask3DMeshSourcePython.py | 9b1e3354b60a4ae82b8bc30de79fa59d8b65a3ec | [] | no_license | Marxss/carck_detect_system | 9c0d338bde322b4c7304fd0addb524d8697c8a7b | d2480f2108052af8af0aa5265a5239c309885043 | refs/heads/master | 2022-04-15T23:34:20.988335 | 2020-03-29T16:24:00 | 2020-03-29T16:24:00 | 214,625,168 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96,779 | py | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.8
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3, 0, 0):
new_instancemethod = lambda func, inst, cls: _itkBinaryMask3DMeshSourcePython.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_itkBinaryMask3DMeshSourcePython', [dirname(__file__)])
except ImportError:
import _itkBinaryMask3DMeshSourcePython
return _itkBinaryMask3DMeshSourcePython
if fp is not None:
try:
_mod = imp.load_module('_itkBinaryMask3DMeshSourcePython', fp, pathname, description)
finally:
fp.close()
return _mod
_itkBinaryMask3DMeshSourcePython = swig_import_helper()
del swig_import_helper
else:
import _itkBinaryMask3DMeshSourcePython
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
object.__setattr__(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self, name, value):
if (name == "thisown"):
return self.this.own(value)
if hasattr(self, name) or (name == "this"):
set(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import itkImageToMeshFilterPython
import itkMeshBasePython
import itkBoundingBoxPython
import itkMapContainerPython
import ITKCommonBasePython
import pyBasePython
import itkVectorPython
import vnl_vectorPython
import vnl_matrixPython
import stdcomplexPython
import vnl_vector_refPython
import itkFixedArrayPython
import itkPointPython
import itkVectorContainerPython
import itkOffsetPython
import itkSizePython
import itkContinuousIndexPython
import itkIndexPython
import itkMatrixPython
import vnl_matrix_fixedPython
import itkCovariantVectorPython
import itkPointSetPython
import itkArrayPython
import itkImagePython
import itkSymmetricSecondRankTensorPython
import itkImageRegionPython
import itkRGBPixelPython
import itkRGBAPixelPython
import itkMeshSourcePython
def itkBinaryMask3DMeshSourceIUS3MD3_New():
return itkBinaryMask3DMeshSourceIUS3MD3.New()
def itkBinaryMask3DMeshSourceIUS3MF3_New():
return itkBinaryMask3DMeshSourceIUS3MF3.New()
def itkBinaryMask3DMeshSourceIUS3MUS3_New():
return itkBinaryMask3DMeshSourceIUS3MUS3.New()
def itkBinaryMask3DMeshSourceIUS3MUC3_New():
return itkBinaryMask3DMeshSourceIUS3MUC3.New()
def itkBinaryMask3DMeshSourceIUS3MSS3_New():
return itkBinaryMask3DMeshSourceIUS3MSS3.New()
def itkBinaryMask3DMeshSourceIUC3MD3_New():
return itkBinaryMask3DMeshSourceIUC3MD3.New()
def itkBinaryMask3DMeshSourceIUC3MF3_New():
return itkBinaryMask3DMeshSourceIUC3MF3.New()
def itkBinaryMask3DMeshSourceIUC3MUS3_New():
return itkBinaryMask3DMeshSourceIUC3MUS3.New()
def itkBinaryMask3DMeshSourceIUC3MUC3_New():
return itkBinaryMask3DMeshSourceIUC3MUC3.New()
def itkBinaryMask3DMeshSourceIUC3MSS3_New():
return itkBinaryMask3DMeshSourceIUC3MSS3.New()
def itkBinaryMask3DMeshSourceISS3MD3_New():
return itkBinaryMask3DMeshSourceISS3MD3.New()
def itkBinaryMask3DMeshSourceISS3MF3_New():
return itkBinaryMask3DMeshSourceISS3MF3.New()
def itkBinaryMask3DMeshSourceISS3MUS3_New():
return itkBinaryMask3DMeshSourceISS3MUS3.New()
def itkBinaryMask3DMeshSourceISS3MUC3_New():
return itkBinaryMask3DMeshSourceISS3MUC3.New()
def itkBinaryMask3DMeshSourceISS3MSS3_New():
return itkBinaryMask3DMeshSourceISS3MSS3.New()
class itkBinaryMask3DMeshSourceISS3MD3(itkImageToMeshFilterPython.itkImageToMeshFilterISS3MD3):
"""Proxy of C++ itkBinaryMask3DMeshSourceISS3MD3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceISS3MD3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceISS3MD3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceISS3MD3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceISS3MD3 self) -> itkBinaryMask3DMeshSourceISS3MD3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_Clone(self)
def SetObjectValue(self, _arg: 'short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceISS3MD3 self, short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceISS3MD3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceISS3MD3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageSS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceISS3MD3 self, itkImageSS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MD3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MD3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceISS3MD3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MD3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MD3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceISS3MD3
Create a new object of the class itkBinaryMask3DMeshSourceISS3MD3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceISS3MD3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceISS3MD3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceISS3MD3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceISS3MD3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_Clone, None, itkBinaryMask3DMeshSourceISS3MD3)
itkBinaryMask3DMeshSourceISS3MD3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_SetObjectValue, None, itkBinaryMask3DMeshSourceISS3MD3)
itkBinaryMask3DMeshSourceISS3MD3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceISS3MD3)
itkBinaryMask3DMeshSourceISS3MD3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceISS3MD3)
itkBinaryMask3DMeshSourceISS3MD3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_SetInput, None, itkBinaryMask3DMeshSourceISS3MD3)
itkBinaryMask3DMeshSourceISS3MD3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MD3)
itkBinaryMask3DMeshSourceISS3MD3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MD3)
itkBinaryMask3DMeshSourceISS3MD3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_swigregister
itkBinaryMask3DMeshSourceISS3MD3_swigregister(itkBinaryMask3DMeshSourceISS3MD3)
def itkBinaryMask3DMeshSourceISS3MD3___New_orig__() -> "itkBinaryMask3DMeshSourceISS3MD3_Pointer":
"""itkBinaryMask3DMeshSourceISS3MD3___New_orig__() -> itkBinaryMask3DMeshSourceISS3MD3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3___New_orig__()
def itkBinaryMask3DMeshSourceISS3MD3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MD3 *":
"""itkBinaryMask3DMeshSourceISS3MD3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MD3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MD3_cast(obj)
class itkBinaryMask3DMeshSourceISS3MF3(itkImageToMeshFilterPython.itkImageToMeshFilterISS3MF3):
"""Proxy of C++ itkBinaryMask3DMeshSourceISS3MF3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceISS3MF3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceISS3MF3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceISS3MF3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceISS3MF3 self) -> itkBinaryMask3DMeshSourceISS3MF3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_Clone(self)
def SetObjectValue(self, _arg: 'short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceISS3MF3 self, short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceISS3MF3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceISS3MF3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageSS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceISS3MF3 self, itkImageSS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MF3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MF3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceISS3MF3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MF3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MF3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceISS3MF3
Create a new object of the class itkBinaryMask3DMeshSourceISS3MF3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceISS3MF3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceISS3MF3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceISS3MF3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceISS3MF3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_Clone, None, itkBinaryMask3DMeshSourceISS3MF3)
itkBinaryMask3DMeshSourceISS3MF3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_SetObjectValue, None, itkBinaryMask3DMeshSourceISS3MF3)
itkBinaryMask3DMeshSourceISS3MF3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceISS3MF3)
itkBinaryMask3DMeshSourceISS3MF3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceISS3MF3)
itkBinaryMask3DMeshSourceISS3MF3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_SetInput, None, itkBinaryMask3DMeshSourceISS3MF3)
itkBinaryMask3DMeshSourceISS3MF3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MF3)
itkBinaryMask3DMeshSourceISS3MF3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MF3)
itkBinaryMask3DMeshSourceISS3MF3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_swigregister
itkBinaryMask3DMeshSourceISS3MF3_swigregister(itkBinaryMask3DMeshSourceISS3MF3)
def itkBinaryMask3DMeshSourceISS3MF3___New_orig__() -> "itkBinaryMask3DMeshSourceISS3MF3_Pointer":
"""itkBinaryMask3DMeshSourceISS3MF3___New_orig__() -> itkBinaryMask3DMeshSourceISS3MF3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3___New_orig__()
def itkBinaryMask3DMeshSourceISS3MF3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MF3 *":
"""itkBinaryMask3DMeshSourceISS3MF3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MF3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MF3_cast(obj)
class itkBinaryMask3DMeshSourceISS3MSS3(itkImageToMeshFilterPython.itkImageToMeshFilterISS3MSS3):
"""Proxy of C++ itkBinaryMask3DMeshSourceISS3MSS3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceISS3MSS3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceISS3MSS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceISS3MSS3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceISS3MSS3 self) -> itkBinaryMask3DMeshSourceISS3MSS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_Clone(self)
def SetObjectValue(self, _arg: 'short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceISS3MSS3 self, short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceISS3MSS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceISS3MSS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageSS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceISS3MSS3 self, itkImageSS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MSS3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MSS3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceISS3MSS3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MSS3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MSS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceISS3MSS3
Create a new object of the class itkBinaryMask3DMeshSourceISS3MSS3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceISS3MSS3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceISS3MSS3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceISS3MSS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceISS3MSS3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_Clone, None, itkBinaryMask3DMeshSourceISS3MSS3)
itkBinaryMask3DMeshSourceISS3MSS3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_SetObjectValue, None, itkBinaryMask3DMeshSourceISS3MSS3)
itkBinaryMask3DMeshSourceISS3MSS3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceISS3MSS3)
itkBinaryMask3DMeshSourceISS3MSS3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceISS3MSS3)
itkBinaryMask3DMeshSourceISS3MSS3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_SetInput, None, itkBinaryMask3DMeshSourceISS3MSS3)
itkBinaryMask3DMeshSourceISS3MSS3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MSS3)
itkBinaryMask3DMeshSourceISS3MSS3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MSS3)
itkBinaryMask3DMeshSourceISS3MSS3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_swigregister
itkBinaryMask3DMeshSourceISS3MSS3_swigregister(itkBinaryMask3DMeshSourceISS3MSS3)
def itkBinaryMask3DMeshSourceISS3MSS3___New_orig__() -> "itkBinaryMask3DMeshSourceISS3MSS3_Pointer":
"""itkBinaryMask3DMeshSourceISS3MSS3___New_orig__() -> itkBinaryMask3DMeshSourceISS3MSS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3___New_orig__()
def itkBinaryMask3DMeshSourceISS3MSS3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MSS3 *":
"""itkBinaryMask3DMeshSourceISS3MSS3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MSS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MSS3_cast(obj)
class itkBinaryMask3DMeshSourceISS3MUC3(itkImageToMeshFilterPython.itkImageToMeshFilterISS3MUC3):
"""Proxy of C++ itkBinaryMask3DMeshSourceISS3MUC3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceISS3MUC3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceISS3MUC3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceISS3MUC3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceISS3MUC3 self) -> itkBinaryMask3DMeshSourceISS3MUC3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_Clone(self)
def SetObjectValue(self, _arg: 'short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceISS3MUC3 self, short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceISS3MUC3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceISS3MUC3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageSS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceISS3MUC3 self, itkImageSS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MUC3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MUC3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceISS3MUC3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MUC3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MUC3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceISS3MUC3
Create a new object of the class itkBinaryMask3DMeshSourceISS3MUC3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceISS3MUC3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceISS3MUC3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceISS3MUC3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceISS3MUC3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_Clone, None, itkBinaryMask3DMeshSourceISS3MUC3)
itkBinaryMask3DMeshSourceISS3MUC3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_SetObjectValue, None, itkBinaryMask3DMeshSourceISS3MUC3)
itkBinaryMask3DMeshSourceISS3MUC3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceISS3MUC3)
itkBinaryMask3DMeshSourceISS3MUC3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceISS3MUC3)
itkBinaryMask3DMeshSourceISS3MUC3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_SetInput, None, itkBinaryMask3DMeshSourceISS3MUC3)
itkBinaryMask3DMeshSourceISS3MUC3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MUC3)
itkBinaryMask3DMeshSourceISS3MUC3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MUC3)
itkBinaryMask3DMeshSourceISS3MUC3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_swigregister
itkBinaryMask3DMeshSourceISS3MUC3_swigregister(itkBinaryMask3DMeshSourceISS3MUC3)
def itkBinaryMask3DMeshSourceISS3MUC3___New_orig__() -> "itkBinaryMask3DMeshSourceISS3MUC3_Pointer":
"""itkBinaryMask3DMeshSourceISS3MUC3___New_orig__() -> itkBinaryMask3DMeshSourceISS3MUC3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3___New_orig__()
def itkBinaryMask3DMeshSourceISS3MUC3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MUC3 *":
"""itkBinaryMask3DMeshSourceISS3MUC3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MUC3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUC3_cast(obj)
class itkBinaryMask3DMeshSourceISS3MUS3(itkImageToMeshFilterPython.itkImageToMeshFilterISS3MUS3):
"""Proxy of C++ itkBinaryMask3DMeshSourceISS3MUS3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceISS3MUS3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceISS3MUS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceISS3MUS3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceISS3MUS3 self) -> itkBinaryMask3DMeshSourceISS3MUS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_Clone(self)
def SetObjectValue(self, _arg: 'short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceISS3MUS3 self, short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceISS3MUS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceISS3MUS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageSS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceISS3MUS3 self, itkImageSS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MUS3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceISS3MUS3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceISS3MUS3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MUS3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MUS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceISS3MUS3
Create a new object of the class itkBinaryMask3DMeshSourceISS3MUS3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceISS3MUS3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceISS3MUS3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceISS3MUS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceISS3MUS3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_Clone, None, itkBinaryMask3DMeshSourceISS3MUS3)
itkBinaryMask3DMeshSourceISS3MUS3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_SetObjectValue, None, itkBinaryMask3DMeshSourceISS3MUS3)
itkBinaryMask3DMeshSourceISS3MUS3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceISS3MUS3)
itkBinaryMask3DMeshSourceISS3MUS3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceISS3MUS3)
itkBinaryMask3DMeshSourceISS3MUS3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_SetInput, None, itkBinaryMask3DMeshSourceISS3MUS3)
itkBinaryMask3DMeshSourceISS3MUS3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MUS3)
itkBinaryMask3DMeshSourceISS3MUS3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceISS3MUS3)
itkBinaryMask3DMeshSourceISS3MUS3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_swigregister
itkBinaryMask3DMeshSourceISS3MUS3_swigregister(itkBinaryMask3DMeshSourceISS3MUS3)
def itkBinaryMask3DMeshSourceISS3MUS3___New_orig__() -> "itkBinaryMask3DMeshSourceISS3MUS3_Pointer":
"""itkBinaryMask3DMeshSourceISS3MUS3___New_orig__() -> itkBinaryMask3DMeshSourceISS3MUS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3___New_orig__()
def itkBinaryMask3DMeshSourceISS3MUS3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceISS3MUS3 *":
"""itkBinaryMask3DMeshSourceISS3MUS3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceISS3MUS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceISS3MUS3_cast(obj)
class itkBinaryMask3DMeshSourceIUC3MD3(itkImageToMeshFilterPython.itkImageToMeshFilterIUC3MD3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUC3MD3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MD3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUC3MD3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUC3MD3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUC3MD3 self) -> itkBinaryMask3DMeshSourceIUC3MD3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned char const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUC3MD3 self, unsigned char const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUC3MD3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUC3MD3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUC3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUC3MD3 self, itkImageUC3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MD3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MD3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUC3MD3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MD3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MD3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUC3MD3
Create a new object of the class itkBinaryMask3DMeshSourceIUC3MD3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUC3MD3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUC3MD3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUC3MD3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUC3MD3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_Clone, None, itkBinaryMask3DMeshSourceIUC3MD3)
itkBinaryMask3DMeshSourceIUC3MD3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUC3MD3)
itkBinaryMask3DMeshSourceIUC3MD3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUC3MD3)
itkBinaryMask3DMeshSourceIUC3MD3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUC3MD3)
itkBinaryMask3DMeshSourceIUC3MD3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_SetInput, None, itkBinaryMask3DMeshSourceIUC3MD3)
itkBinaryMask3DMeshSourceIUC3MD3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MD3)
itkBinaryMask3DMeshSourceIUC3MD3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MD3)
itkBinaryMask3DMeshSourceIUC3MD3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_swigregister
itkBinaryMask3DMeshSourceIUC3MD3_swigregister(itkBinaryMask3DMeshSourceIUC3MD3)
def itkBinaryMask3DMeshSourceIUC3MD3___New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MD3_Pointer":
"""itkBinaryMask3DMeshSourceIUC3MD3___New_orig__() -> itkBinaryMask3DMeshSourceIUC3MD3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3___New_orig__()
def itkBinaryMask3DMeshSourceIUC3MD3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MD3 *":
"""itkBinaryMask3DMeshSourceIUC3MD3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MD3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MD3_cast(obj)
class itkBinaryMask3DMeshSourceIUC3MF3(itkImageToMeshFilterPython.itkImageToMeshFilterIUC3MF3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUC3MF3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MF3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUC3MF3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUC3MF3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUC3MF3 self) -> itkBinaryMask3DMeshSourceIUC3MF3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned char const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUC3MF3 self, unsigned char const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUC3MF3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUC3MF3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUC3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUC3MF3 self, itkImageUC3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MF3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MF3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUC3MF3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MF3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MF3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUC3MF3
Create a new object of the class itkBinaryMask3DMeshSourceIUC3MF3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUC3MF3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUC3MF3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUC3MF3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUC3MF3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_Clone, None, itkBinaryMask3DMeshSourceIUC3MF3)
itkBinaryMask3DMeshSourceIUC3MF3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUC3MF3)
itkBinaryMask3DMeshSourceIUC3MF3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUC3MF3)
itkBinaryMask3DMeshSourceIUC3MF3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUC3MF3)
itkBinaryMask3DMeshSourceIUC3MF3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_SetInput, None, itkBinaryMask3DMeshSourceIUC3MF3)
itkBinaryMask3DMeshSourceIUC3MF3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MF3)
itkBinaryMask3DMeshSourceIUC3MF3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MF3)
itkBinaryMask3DMeshSourceIUC3MF3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_swigregister
itkBinaryMask3DMeshSourceIUC3MF3_swigregister(itkBinaryMask3DMeshSourceIUC3MF3)
def itkBinaryMask3DMeshSourceIUC3MF3___New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MF3_Pointer":
"""itkBinaryMask3DMeshSourceIUC3MF3___New_orig__() -> itkBinaryMask3DMeshSourceIUC3MF3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3___New_orig__()
def itkBinaryMask3DMeshSourceIUC3MF3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MF3 *":
"""itkBinaryMask3DMeshSourceIUC3MF3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MF3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MF3_cast(obj)
class itkBinaryMask3DMeshSourceIUC3MSS3(itkImageToMeshFilterPython.itkImageToMeshFilterIUC3MSS3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUC3MSS3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MSS3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUC3MSS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUC3MSS3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUC3MSS3 self) -> itkBinaryMask3DMeshSourceIUC3MSS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned char const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUC3MSS3 self, unsigned char const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUC3MSS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUC3MSS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUC3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUC3MSS3 self, itkImageUC3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MSS3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MSS3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUC3MSS3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MSS3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MSS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUC3MSS3
Create a new object of the class itkBinaryMask3DMeshSourceIUC3MSS3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUC3MSS3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUC3MSS3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUC3MSS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUC3MSS3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_Clone, None, itkBinaryMask3DMeshSourceIUC3MSS3)
itkBinaryMask3DMeshSourceIUC3MSS3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUC3MSS3)
itkBinaryMask3DMeshSourceIUC3MSS3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUC3MSS3)
itkBinaryMask3DMeshSourceIUC3MSS3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUC3MSS3)
itkBinaryMask3DMeshSourceIUC3MSS3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_SetInput, None, itkBinaryMask3DMeshSourceIUC3MSS3)
itkBinaryMask3DMeshSourceIUC3MSS3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MSS3)
itkBinaryMask3DMeshSourceIUC3MSS3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MSS3)
itkBinaryMask3DMeshSourceIUC3MSS3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_swigregister
itkBinaryMask3DMeshSourceIUC3MSS3_swigregister(itkBinaryMask3DMeshSourceIUC3MSS3)
def itkBinaryMask3DMeshSourceIUC3MSS3___New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MSS3_Pointer":
"""itkBinaryMask3DMeshSourceIUC3MSS3___New_orig__() -> itkBinaryMask3DMeshSourceIUC3MSS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3___New_orig__()
def itkBinaryMask3DMeshSourceIUC3MSS3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MSS3 *":
"""itkBinaryMask3DMeshSourceIUC3MSS3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MSS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MSS3_cast(obj)
class itkBinaryMask3DMeshSourceIUC3MUC3(itkImageToMeshFilterPython.itkImageToMeshFilterIUC3MUC3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUC3MUC3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MUC3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUC3MUC3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUC3MUC3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUC3MUC3 self) -> itkBinaryMask3DMeshSourceIUC3MUC3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned char const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUC3MUC3 self, unsigned char const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUC3MUC3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUC3MUC3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUC3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUC3MUC3 self, itkImageUC3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MUC3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MUC3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUC3MUC3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MUC3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MUC3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUC3MUC3
Create a new object of the class itkBinaryMask3DMeshSourceIUC3MUC3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUC3MUC3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUC3MUC3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUC3MUC3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUC3MUC3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_Clone, None, itkBinaryMask3DMeshSourceIUC3MUC3)
itkBinaryMask3DMeshSourceIUC3MUC3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUC3MUC3)
itkBinaryMask3DMeshSourceIUC3MUC3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUC3MUC3)
itkBinaryMask3DMeshSourceIUC3MUC3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUC3MUC3)
itkBinaryMask3DMeshSourceIUC3MUC3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_SetInput, None, itkBinaryMask3DMeshSourceIUC3MUC3)
itkBinaryMask3DMeshSourceIUC3MUC3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MUC3)
itkBinaryMask3DMeshSourceIUC3MUC3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MUC3)
itkBinaryMask3DMeshSourceIUC3MUC3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_swigregister
itkBinaryMask3DMeshSourceIUC3MUC3_swigregister(itkBinaryMask3DMeshSourceIUC3MUC3)
def itkBinaryMask3DMeshSourceIUC3MUC3___New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MUC3_Pointer":
"""itkBinaryMask3DMeshSourceIUC3MUC3___New_orig__() -> itkBinaryMask3DMeshSourceIUC3MUC3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3___New_orig__()
def itkBinaryMask3DMeshSourceIUC3MUC3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MUC3 *":
"""itkBinaryMask3DMeshSourceIUC3MUC3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MUC3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUC3_cast(obj)
class itkBinaryMask3DMeshSourceIUC3MUS3(itkImageToMeshFilterPython.itkImageToMeshFilterIUC3MUS3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUC3MUS3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MUS3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUC3MUS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUC3MUS3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUC3MUS3 self) -> itkBinaryMask3DMeshSourceIUC3MUS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned char const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUC3MUS3 self, unsigned char const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUC3MUS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUC3MUS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUC3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUC3MUS3 self, itkImageUC3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MUS3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUC3MUS3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUC3MUS3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MUS3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MUS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUC3MUS3
Create a new object of the class itkBinaryMask3DMeshSourceIUC3MUS3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUC3MUS3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUC3MUS3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUC3MUS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUC3MUS3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_Clone, None, itkBinaryMask3DMeshSourceIUC3MUS3)
itkBinaryMask3DMeshSourceIUC3MUS3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUC3MUS3)
itkBinaryMask3DMeshSourceIUC3MUS3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUC3MUS3)
itkBinaryMask3DMeshSourceIUC3MUS3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUC3MUS3)
itkBinaryMask3DMeshSourceIUC3MUS3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_SetInput, None, itkBinaryMask3DMeshSourceIUC3MUS3)
itkBinaryMask3DMeshSourceIUC3MUS3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MUS3)
itkBinaryMask3DMeshSourceIUC3MUS3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUC3MUS3)
itkBinaryMask3DMeshSourceIUC3MUS3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_swigregister
itkBinaryMask3DMeshSourceIUC3MUS3_swigregister(itkBinaryMask3DMeshSourceIUC3MUS3)
def itkBinaryMask3DMeshSourceIUC3MUS3___New_orig__() -> "itkBinaryMask3DMeshSourceIUC3MUS3_Pointer":
"""itkBinaryMask3DMeshSourceIUC3MUS3___New_orig__() -> itkBinaryMask3DMeshSourceIUC3MUS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3___New_orig__()
def itkBinaryMask3DMeshSourceIUC3MUS3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUC3MUS3 *":
"""itkBinaryMask3DMeshSourceIUC3MUS3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUC3MUS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUC3MUS3_cast(obj)
class itkBinaryMask3DMeshSourceIUS3MD3(itkImageToMeshFilterPython.itkImageToMeshFilterIUS3MD3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUS3MD3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MD3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUS3MD3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUS3MD3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUS3MD3 self) -> itkBinaryMask3DMeshSourceIUS3MD3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUS3MD3 self, unsigned short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUS3MD3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUS3MD3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUS3MD3 self, itkImageUS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MD3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MD3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUS3MD3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MD3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MD3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUS3MD3
Create a new object of the class itkBinaryMask3DMeshSourceIUS3MD3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUS3MD3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUS3MD3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUS3MD3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUS3MD3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_Clone, None, itkBinaryMask3DMeshSourceIUS3MD3)
itkBinaryMask3DMeshSourceIUS3MD3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUS3MD3)
itkBinaryMask3DMeshSourceIUS3MD3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUS3MD3)
itkBinaryMask3DMeshSourceIUS3MD3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUS3MD3)
itkBinaryMask3DMeshSourceIUS3MD3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_SetInput, None, itkBinaryMask3DMeshSourceIUS3MD3)
itkBinaryMask3DMeshSourceIUS3MD3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MD3)
itkBinaryMask3DMeshSourceIUS3MD3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MD3)
itkBinaryMask3DMeshSourceIUS3MD3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_swigregister
itkBinaryMask3DMeshSourceIUS3MD3_swigregister(itkBinaryMask3DMeshSourceIUS3MD3)
def itkBinaryMask3DMeshSourceIUS3MD3___New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MD3_Pointer":
"""itkBinaryMask3DMeshSourceIUS3MD3___New_orig__() -> itkBinaryMask3DMeshSourceIUS3MD3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3___New_orig__()
def itkBinaryMask3DMeshSourceIUS3MD3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MD3 *":
"""itkBinaryMask3DMeshSourceIUS3MD3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MD3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MD3_cast(obj)
class itkBinaryMask3DMeshSourceIUS3MF3(itkImageToMeshFilterPython.itkImageToMeshFilterIUS3MF3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUS3MF3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MF3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUS3MF3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUS3MF3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUS3MF3 self) -> itkBinaryMask3DMeshSourceIUS3MF3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUS3MF3 self, unsigned short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUS3MF3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUS3MF3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUS3MF3 self, itkImageUS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MF3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MF3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUS3MF3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MF3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MF3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUS3MF3
Create a new object of the class itkBinaryMask3DMeshSourceIUS3MF3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUS3MF3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUS3MF3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUS3MF3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUS3MF3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_Clone, None, itkBinaryMask3DMeshSourceIUS3MF3)
itkBinaryMask3DMeshSourceIUS3MF3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUS3MF3)
itkBinaryMask3DMeshSourceIUS3MF3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUS3MF3)
itkBinaryMask3DMeshSourceIUS3MF3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUS3MF3)
itkBinaryMask3DMeshSourceIUS3MF3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_SetInput, None, itkBinaryMask3DMeshSourceIUS3MF3)
itkBinaryMask3DMeshSourceIUS3MF3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MF3)
itkBinaryMask3DMeshSourceIUS3MF3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MF3)
itkBinaryMask3DMeshSourceIUS3MF3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_swigregister
itkBinaryMask3DMeshSourceIUS3MF3_swigregister(itkBinaryMask3DMeshSourceIUS3MF3)
def itkBinaryMask3DMeshSourceIUS3MF3___New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MF3_Pointer":
"""itkBinaryMask3DMeshSourceIUS3MF3___New_orig__() -> itkBinaryMask3DMeshSourceIUS3MF3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3___New_orig__()
def itkBinaryMask3DMeshSourceIUS3MF3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MF3 *":
"""itkBinaryMask3DMeshSourceIUS3MF3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MF3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MF3_cast(obj)
class itkBinaryMask3DMeshSourceIUS3MSS3(itkImageToMeshFilterPython.itkImageToMeshFilterIUS3MSS3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUS3MSS3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MSS3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUS3MSS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUS3MSS3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUS3MSS3 self) -> itkBinaryMask3DMeshSourceIUS3MSS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUS3MSS3 self, unsigned short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUS3MSS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUS3MSS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUS3MSS3 self, itkImageUS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MSS3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MSS3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUS3MSS3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MSS3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MSS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUS3MSS3
Create a new object of the class itkBinaryMask3DMeshSourceIUS3MSS3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUS3MSS3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUS3MSS3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUS3MSS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUS3MSS3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_Clone, None, itkBinaryMask3DMeshSourceIUS3MSS3)
itkBinaryMask3DMeshSourceIUS3MSS3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUS3MSS3)
itkBinaryMask3DMeshSourceIUS3MSS3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUS3MSS3)
itkBinaryMask3DMeshSourceIUS3MSS3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUS3MSS3)
itkBinaryMask3DMeshSourceIUS3MSS3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_SetInput, None, itkBinaryMask3DMeshSourceIUS3MSS3)
itkBinaryMask3DMeshSourceIUS3MSS3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MSS3)
itkBinaryMask3DMeshSourceIUS3MSS3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MSS3)
itkBinaryMask3DMeshSourceIUS3MSS3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_swigregister
itkBinaryMask3DMeshSourceIUS3MSS3_swigregister(itkBinaryMask3DMeshSourceIUS3MSS3)
def itkBinaryMask3DMeshSourceIUS3MSS3___New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MSS3_Pointer":
"""itkBinaryMask3DMeshSourceIUS3MSS3___New_orig__() -> itkBinaryMask3DMeshSourceIUS3MSS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3___New_orig__()
def itkBinaryMask3DMeshSourceIUS3MSS3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MSS3 *":
"""itkBinaryMask3DMeshSourceIUS3MSS3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MSS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MSS3_cast(obj)
class itkBinaryMask3DMeshSourceIUS3MUC3(itkImageToMeshFilterPython.itkImageToMeshFilterIUS3MUC3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUS3MUC3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MUC3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUS3MUC3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUS3MUC3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUS3MUC3 self) -> itkBinaryMask3DMeshSourceIUS3MUC3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUS3MUC3 self, unsigned short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUS3MUC3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUS3MUC3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUS3MUC3 self, itkImageUS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MUC3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MUC3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUS3MUC3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MUC3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MUC3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUS3MUC3
Create a new object of the class itkBinaryMask3DMeshSourceIUS3MUC3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUS3MUC3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUS3MUC3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUS3MUC3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUS3MUC3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_Clone, None, itkBinaryMask3DMeshSourceIUS3MUC3)
itkBinaryMask3DMeshSourceIUS3MUC3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUS3MUC3)
itkBinaryMask3DMeshSourceIUS3MUC3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUS3MUC3)
itkBinaryMask3DMeshSourceIUS3MUC3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUS3MUC3)
itkBinaryMask3DMeshSourceIUS3MUC3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_SetInput, None, itkBinaryMask3DMeshSourceIUS3MUC3)
itkBinaryMask3DMeshSourceIUS3MUC3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MUC3)
itkBinaryMask3DMeshSourceIUS3MUC3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MUC3)
itkBinaryMask3DMeshSourceIUS3MUC3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_swigregister
itkBinaryMask3DMeshSourceIUS3MUC3_swigregister(itkBinaryMask3DMeshSourceIUS3MUC3)
def itkBinaryMask3DMeshSourceIUS3MUC3___New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MUC3_Pointer":
"""itkBinaryMask3DMeshSourceIUS3MUC3___New_orig__() -> itkBinaryMask3DMeshSourceIUS3MUC3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3___New_orig__()
def itkBinaryMask3DMeshSourceIUS3MUC3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MUC3 *":
"""itkBinaryMask3DMeshSourceIUS3MUC3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MUC3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUC3_cast(obj)
class itkBinaryMask3DMeshSourceIUS3MUS3(itkImageToMeshFilterPython.itkImageToMeshFilterIUS3MUS3):
"""Proxy of C++ itkBinaryMask3DMeshSourceIUS3MUS3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MUS3_Pointer":
"""__New_orig__() -> itkBinaryMask3DMeshSourceIUS3MUS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkBinaryMask3DMeshSourceIUS3MUS3_Pointer":
"""Clone(itkBinaryMask3DMeshSourceIUS3MUS3 self) -> itkBinaryMask3DMeshSourceIUS3MUS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_Clone(self)
def SetObjectValue(self, _arg: 'unsigned short const') -> "void":
"""SetObjectValue(itkBinaryMask3DMeshSourceIUS3MUS3 self, unsigned short const _arg)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_SetObjectValue(self, _arg)
def GetNumberOfNodes(self) -> "unsigned long long":
"""GetNumberOfNodes(itkBinaryMask3DMeshSourceIUS3MUS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_GetNumberOfNodes(self)
def GetNumberOfCells(self) -> "unsigned long long":
"""GetNumberOfCells(itkBinaryMask3DMeshSourceIUS3MUS3 self) -> unsigned long long"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_GetNumberOfCells(self)
def SetInput(self, inputImage: 'itkImageUS3') -> "void":
"""SetInput(itkBinaryMask3DMeshSourceIUS3MUS3 self, itkImageUS3 inputImage)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_SetInput(self, inputImage)
def SetRegionOfInterest(self, iRegion: 'itkImageRegion3') -> "void":
"""SetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MUS3 self, itkImageRegion3 iRegion)"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_SetRegionOfInterest(self, iRegion)
def GetRegionOfInterest(self) -> "itkImageRegion3 const &":
"""GetRegionOfInterest(itkBinaryMask3DMeshSourceIUS3MUS3 self) -> itkImageRegion3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_GetRegionOfInterest(self)
__swig_destroy__ = _itkBinaryMask3DMeshSourcePython.delete_itkBinaryMask3DMeshSourceIUS3MUS3
def cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MUS3 *":
"""cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MUS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkBinaryMask3DMeshSourceIUS3MUS3
Create a new object of the class itkBinaryMask3DMeshSourceIUS3MUS3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkBinaryMask3DMeshSourceIUS3MUS3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkBinaryMask3DMeshSourceIUS3MUS3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkBinaryMask3DMeshSourceIUS3MUS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkBinaryMask3DMeshSourceIUS3MUS3.Clone = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_Clone, None, itkBinaryMask3DMeshSourceIUS3MUS3)
itkBinaryMask3DMeshSourceIUS3MUS3.SetObjectValue = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_SetObjectValue, None, itkBinaryMask3DMeshSourceIUS3MUS3)
itkBinaryMask3DMeshSourceIUS3MUS3.GetNumberOfNodes = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_GetNumberOfNodes, None, itkBinaryMask3DMeshSourceIUS3MUS3)
itkBinaryMask3DMeshSourceIUS3MUS3.GetNumberOfCells = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_GetNumberOfCells, None, itkBinaryMask3DMeshSourceIUS3MUS3)
itkBinaryMask3DMeshSourceIUS3MUS3.SetInput = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_SetInput, None, itkBinaryMask3DMeshSourceIUS3MUS3)
itkBinaryMask3DMeshSourceIUS3MUS3.SetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_SetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MUS3)
itkBinaryMask3DMeshSourceIUS3MUS3.GetRegionOfInterest = new_instancemethod(_itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_GetRegionOfInterest, None, itkBinaryMask3DMeshSourceIUS3MUS3)
itkBinaryMask3DMeshSourceIUS3MUS3_swigregister = _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_swigregister
itkBinaryMask3DMeshSourceIUS3MUS3_swigregister(itkBinaryMask3DMeshSourceIUS3MUS3)
def itkBinaryMask3DMeshSourceIUS3MUS3___New_orig__() -> "itkBinaryMask3DMeshSourceIUS3MUS3_Pointer":
"""itkBinaryMask3DMeshSourceIUS3MUS3___New_orig__() -> itkBinaryMask3DMeshSourceIUS3MUS3_Pointer"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3___New_orig__()
def itkBinaryMask3DMeshSourceIUS3MUS3_cast(obj: 'itkLightObject') -> "itkBinaryMask3DMeshSourceIUS3MUS3 *":
"""itkBinaryMask3DMeshSourceIUS3MUS3_cast(itkLightObject obj) -> itkBinaryMask3DMeshSourceIUS3MUS3"""
return _itkBinaryMask3DMeshSourcePython.itkBinaryMask3DMeshSourceIUS3MUS3_cast(obj)
def binary_mask3_d_mesh_source(*args, **kwargs):
"""Procedural interface for BinaryMask3DMeshSource"""
import itk
instance = itk.BinaryMask3DMeshSource.New(*args, **kwargs)
return instance.__internal_call__()
def binary_mask3_d_mesh_source_init_docstring():
import itk
import itkTemplate
if isinstance(itk.BinaryMask3DMeshSource, itkTemplate.itkTemplate):
binary_mask3_d_mesh_source.__doc__ = itk.BinaryMask3DMeshSource.values()[0].__doc__
else:
binary_mask3_d_mesh_source.__doc__ = itk.BinaryMask3DMeshSource.__doc__
| [
"274065539@qq.com"
] | 274065539@qq.com |
cbfdc2132564815458464e1f64c86110d7b3e056 | db3d4aa39bc6b3f521ba21afbfedd8164a68e4d5 | /asgiref/conformance_async.py | 5aeeeeacffef2d0bc4747777b7306d1a0c04b24e | [
"BSD-3-Clause"
] | permissive | EdwardBetts/asgiref | 808c55b5089d9c9d8ae33583b0a1728a6313f930 | 0ad52409735109a85238b5b068c77c0f4e60e59e | refs/heads/master | 2021-01-21T22:19:00.404420 | 2017-08-23T03:33:56 | 2017-08-23T03:33:56 | 102,147,619 | 0 | 0 | null | 2017-09-01T19:45:30 | 2017-09-01T19:45:30 | null | UTF-8 | Python | false | false | 743 | py | import asyncio
def test_receive_async(self):
"""
Tests that the asynchronous receive() method works.
"""
# Make sure we can run asyncio code
self.skip_if_no_extension("async")
try:
import asyncio
except ImportError:
raise unittest.SkipTest("No asyncio")
# Test that receive works
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
@asyncio.coroutine
def test():
self.channel_layer.send("test_async", {"is it": "working"})
channel, message = yield from self.channel_layer.receive_async(["test_async"])
self.assertEqual(channel, "test_async")
self.assertEqual(message, {"is it": "working"})
self.loop.run_until_complete(test())
| [
"andrew@aeracode.org"
] | andrew@aeracode.org |
de6ff1b606ca0939e9cc25ea37d7b88e7f76c315 | b9b19792e1890b56679dc167fb99f9612af477f7 | /deeppy/graph/nodes.py | 17893ad9ede4ed472d8bf3fcd5e5d7a6a94a5bf0 | [
"MIT"
] | permissive | fullstackenviormentss/deeppy_experimental | 7990674a8eda0655671940d3baf25256af8a384b | dc06e294e37a30340c7d02ac12c4d00653baf96c | refs/heads/master | 2020-03-18T22:01:01.964338 | 2015-08-25T18:15:28 | 2015-08-25T18:15:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | from ..base import Model, ParamMixin, PickleMixin, PhaseMixin
class Node(PhaseMixin, PickleMixin):
def _setup(self, **shapes):
pass
def fprop(self, **arrays):
pass
def bprop(self, **arrays):
pass
def out_shapes(self, **shapes):
pass
class SupervisedBatch(Node):
def __init__(self):
self.name = 'input'
pass
def _setup(self, x_shape, y_shape):
pass
def fprop(self, x, y):
return {'samples': x, 'labels': y}
def bprop(self, samples_grad, labels_grad):
pass
def out_shapes(self, x_shape, y_shape):
return {'samples': x_shape, 'labels': y_shape}
| [
"anders.bll@gmail.com"
] | anders.bll@gmail.com |
ec2c05a3ef7e22b1d5841dfc6f0483ca53a9c40d | 41911c73dec55eec3eac36b6d2e4346d949d2d96 | /snack machine.py | 12b1aefef3218920ce50c0e69db260c1c973573d | [] | no_license | mbarbour0/Practice | cff321a0e5e5090da266016f8e31a1902e1c5cb2 | 781cf1e10154f43adec59f736ef1716acba6c98d | refs/heads/master | 2021-09-07T17:02:37.501748 | 2018-02-26T14:02:35 | 2018-02-26T14:02:35 | 115,876,807 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 648 | py | sodas = ["Pepsi", "Cherry Coke Zero", "Sprite"]
chips = ["Doritos", "Fritos"]
candy = ["Snickers", "M&Ms", "Twizzlers"]
while True:
choice = input("Would you like a SODA, some CHIPS, or a CANDY? ").lower()
try:
if choice == "soda":
snack = sodas.pop()
elif choice == "chips":
snack = chips.pop()
elif choice == "candy":
snack = candy.pop()
else:
print("Sorry, I didn't understand that.")
continue
except IndexError:
print("We're all out of {}: Sorry!".format(choice))
else:
print("Here's your {}: {}".format(choice, snack)) | [
"matthew.barbour@icloud.com"
] | matthew.barbour@icloud.com |
d6064d20eaf987e05a303a1b57a32eb45ca259b1 | 2894904e1f4964ac627ac3d892684d8e1e4b5c6d | /plot_together.py | 2949b6927e5eb38e09f19b2762624149e2f880a1 | [] | no_license | kun0906/activity_recognition-1 | 5615a4048d53376e6f40a78ae1f766d08adef72d | 96b53fda3818ab9d00c0d865a76eb759df3e2bd0 | refs/heads/main | 2023-08-11T18:55:37.225360 | 2021-10-06T19:32:13 | 2021-10-06T19:32:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 845 | py | #minhui plot
fig, ax = plt.subplots(3, 1, figsize=(12, 6))
people = [1, 2, 3, 4, 5, 7, 8, 9]
for p in people:
xseries_total = x_tot[p]
yseries_total = y_tot[p]
zseries_total = z_tot[p]
ax[0].plot(timevec, xseries_total, marker=".", markersize=3)
ax[1].plot(timevec, yseries_total, marker=".", markersize=3)
ax[2].plot(timevec, zseries_total, marker=".", markersize=3)
ax[0].set_ylabel("x coordinate", fontsize=10)
ax[1].set_ylabel("y coordinate", fontsize=10)
ax[2].set_xlabel("frame number", fontsize=10)
ax[2].set_ylabel("z coordinate", fontsize=10)
labels = ["person 1", "person 2", "person 3", "person 4", "person 5", "person 7", "person 8",
"person 9"]
# labels_first5 = ["person 1", "person 2", "person 3", "person 4", "person 5"]
fig.legend(labels, loc='upper right')
fig.suptitle("put back item")
| [
"noreply@github.com"
] | kun0906.noreply@github.com |
79065302221ef451fff8e0c6027fe80e53c493f3 | 345a21611a97ab63d3fbea89c85a0e6968089354 | /icbc-sentiment/PeopleCN.py | 8831b16785c41cd4d450ad117ae2970c10517b40 | [] | no_license | ArP2018/WebCrawling | 3484294d53a786d385ed3099488c1812bade6ae5 | c92d104eaccdc9fa1507e621ce2c23bdb8df2b03 | refs/heads/master | 2021-05-16T19:07:17.822029 | 2020-03-31T03:14:05 | 2020-03-31T03:14:05 | 250,432,437 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,937 | py | # 爬取人民网 http://www.people.com.cn/
# author: Yin Yalin
import re
import threading
import time
import traceback
from bs4 import BeautifulSoup
from selenium.common.exceptions import TimeoutException, NoSuchElementException
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from Entity import Entity
from crawler_base import SentimentCrawler
from selenium.webdriver.support import expected_conditions as ec
from logger import CustomLogging, LogType
from utils import in_date_range, conv_pub_date
class PeopleCN(SentimentCrawler):
def __init__(self):
super().__init__()
self.url = 'http://search.people.com.cn/cnpeople/news/'
self.name = '人民网'
def crawl_main_page(self, keyword):
self.driver.get(self.url)
try:
self.wait.until(ec.presence_of_element_located((By.ID, 'keyword')))
except:
CustomLogging.log_to_file('人民网主页打开失败', LogType.ERROR)
self.driver.find_element_by_id('keyword').clear()
self.driver.find_element_by_id('keyword').send_keys(keyword + Keys.ENTER)
self.crawl_search_results()
def crawl_search_results(self):
exit_flag = 0
index = 0
while True:
try:
self.wait.until(ec.presence_of_element_located((By.CLASS_NAME, 'fr')))
except TimeoutException:
CustomLogging.log_to_file('人民网搜索结果页面加载失败', LogType.ERROR)
CustomLogging.log_to_file(traceback.format_exc(), LogType.ERROR)
break
try:
result_articles = self.driver.find_elements_by_xpath('//div[@class="fr w800"]//ul')
for each_article in result_articles:
item = Entity()
pub_date = each_article.find_elements_by_tag_name('li')[2].text
item.publish_date = re.search(re.compile(
'[1-9]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])\s+(20|21|22|23|[0-1]\d):[0-5]\d:[0-5]\d'),
pub_date).group()
if not in_date_range(conv_pub_date(item.publish_date, 'peoplecn'), self.year_range):
exit_flag = 1
# 跳出for循环
break
item.title = each_article.find_element_by_tag_name('a').text
item.short_description = each_article.find_elements_by_tag_name('li')[1].text
if self.keyword not in item.short_description and self.keyword not in item.title:
continue
item.url = each_article.find_element_by_tag_name('a').get_attribute('href')
threading.Thread(target=self.download_and_save_item, args=(item,)).start()
except NoSuchElementException:
break
if exit_flag == 1:
break
try:
next_page = self.driver.find_element_by_xpath(
'//div[@class="show_nav_bar"]//a[contains(text(), "下一页")]')
next_page.click()
time.sleep(2)
except NoSuchElementException:
break
def parse_html(self, url, html):
bs = BeautifulSoup(html, 'lxml')
try:
full_content = bs.find('div', attrs={'id': 'rwb_zw'}).text # rwb_zw
return full_content
except Exception:
try:
full_content = bs.find('div', attrs={'class': re.compile('(show_text)|(con)|(gray box_text)')})
return full_content
except:
CustomLogging.log_to_file('页面解析错误: {0}|{1}'.format(self.name, url), LogType.ERROR)
return
if __name__ == '__main__':
test = PeopleCN()
test.start_crawl(('德勤', '德勤中国'), case_id=111)
| [
"yalin1012@126.com"
] | yalin1012@126.com |
1d165ff041aa75e01d583ed5e6a58ca9e318d535 | df8d37d670a1494e81eb5b90b30a245aee6d330a | /toolset.py | 066651f9a81410feaf7444670145d262f6514e97 | [] | no_license | SammieLJ/TDS-RPG-Python | 201ab0aa5a382b66a16c1e11c08116ae3e5326eb | 463032f4a2d98f03f242716626be2fd0dc36e32f | refs/heads/master | 2020-04-24T22:51:38.504127 | 2019-04-15T20:07:11 | 2019-04-15T20:07:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,721 | py | import sys
import random
def readFileNames(fileName):
text_file = open(fileName, "r")
lines = text_file.read().split()
text_file.close()
return lines
def random_names_from_list(nameList):
OnceOrTwice = random.choice([1, 2])
names = []
for index in range (0, OnceOrTwice):
names.append(random.choice(nameList))
return names
def two_dictionary_passwd_gen_banner():
two_dictionary_passwd_gen_banner = """
##############################################################
# PYTHON - Dictionary shuffler and Random Password Generetor #
##############################################################
# CONTACT #
##############################################################
# DEVELOPER : SAMMY 76 LJ #
# Mail Address : sammy76lj@gmail.com #
# DESC: Loads two dictionaries from file for shuffeling #
# DESC: Shuffles 1 or 2 Slovenian male and female name, #
# DESC: and adds rand. numbers to fill up min 25 chars #
# USAGE: Intended as internal tool, now it's o.s. #
##############################################################
"""
print(two_dictionary_passwd_gen_banner)
def get_arg(index):
try:
sys.argv[index]
except IndexError:
return ''
else:
return sys.argv[index]
def writeDownToFile(genList, fileName):
# Open a file
print("Write to file: " + fileName)
fo = open(fileName, "w+")
# Write sequence of lines at the end of the file.
fo.writelines( "%s\n" % item for item in genList )
# Close opend file
fo.close() | [
"samir.subasic@gmail.com"
] | samir.subasic@gmail.com |
7d1e1a3f3bcbb68d55b3ecb1da02f2067428146e | 11e05ac7640c269ec53af83c626691925b86e992 | /tf/cifar10/cifar10.py | 1baa1a7e797cd120ff0af81a43cc0158e2dee8d9 | [] | no_license | NeverMoes/practice | 1b921b9ea165a811cb33ff5d5c04bd1dfaaadca1 | 9e164bb176faaae99c12aeccaea7bb94de795d5d | refs/heads/master | 2021-01-19T23:26:25.268525 | 2017-05-10T03:27:40 | 2017-05-10T03:27:40 | 88,984,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,345 | py | import tensorflow as tf
import dataloader.path
import cifar10_input
max_steps = 100
batch_size = 120
data_dir = dataloader.path.CIFAR10
def variable_with_weight_loss(shape, stddev, wl):
# 对权重进行正则化
var = tf.Variable(tf.truncated_normal(shape, stddev=stddev))
if wl is not None:
weight_loss = tf.multiply(tf.nn.l2_loss(var), wl, name='weight_loss')
# 加入集合最后在计算cost的时候使用
tf.add_to_collection('losses', weight_loss)
return var
# 数据增强操作
images_train, labels_train = cifar10_input.distorted_inputs(data_dir=data_dir, batch_size=batch_size)
# 测试数据生成
images_test, labels_test = cifar10_input.inputs(eval_data=True,
data_dir=data_dir,
batch_size=batch_size)
# 输入
image_holder = tf.placeholder(tf.float32, [batch_size, 24, 24, 3])
label_holder = tf.placeholder(tf.int32, [batch_size])
# 第一层 #
# 第一层卷积核参数
weight1 = variable_with_weight_loss(shape=[5, 5, 3, 64], stddev=5e-2, wl=0.0)
kernel1 = tf.nn.conv2d(image_holder, weight1, [1, 1, 1,1], padding='SAME')
bias1 = tf.Variable(tf.constant(0.0, shape=[64]))
# 激活结果
conv1 = tf.nn.relu(tf.nn.bias_add(kernel1, bias1))
# 池化
pool1 = tf.nn.max_pool(conv1, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME')
# LRN层
norm1 = tf.nn.lrn(pool1, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75)
# 第二层 #
weight2 = variable_with_weight_loss(shape=[5, 5, 64, 64], stddev=5e-2, wl=0.0)
kernel2 = tf.nn.conv2d(norm1, weight2, [1, 1, 1,1], padding='SAME')
bias2 = tf.Variable(tf.constant(0.1, shape=[64]))
conv2 = tf.nn.relu(tf.nn.bias_add(kernel2, bias2))
# 调换了LRN和pool层的顺序
norm2 = tf.nn.lrn(conv2, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75)
pool2 = tf.nn.max_pool(norm2, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME')
# 全连接层 #
# flatten处理
reshape = tf.reshape(pool2, [batch_size, -1])
dim = reshape.get_shape()[1].value
weight3 = variable_with_weight_loss(shape=[dim, 384], stddev=0.04, wl=0.004)
bias3 = tf.Variable(tf.constant(0.1, shape=[384]))
local3 = tf.nn.relu(tf.matmul(reshape, weight3) + bias3)
weight4 = variable_with_weight_loss(shape=[384, 192], stddev=0.04, wl=0.004)
bias4 = tf.Variable(tf.constant(0.1, shape=[192]))
local4 = tf.nn.relu(tf.matmul(local3, weight4) + bias4)
# 输出层
weight5 = variable_with_weight_loss(shape=[192, 10], stddev=1/192.0, wl=0.0)
bias5 = tf.Variable(tf.constant(0.0, shape=[10]))
logits = tf.add(tf.matmul(local4, weight5), bias5)
def loss(logits, labels):
labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=labels, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
return tf.add_n(tf.get_collection('losses'), name='total_loss')
loss = loss(logits, label_holder)
train_op = tf.train.AdamOptimizer(1e-3).minimize(loss)
# 输出分数最高那一类的准确率
top_k_op = tf.nn.in_top_k(logits, label_holder, 1)
# 启用session并初始化所有参数模型
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
# 启动图片数据增强的队列
tf.train.start_queue_runners()
# 开始训练
for step in range(max_steps):
image_batch, label_batch = sess.run([images_train, labels_train])
_, loss_value = sess.run([train_op, loss],
feed_dict={image_holder: image_batch, label_holder:label_batch})
if step % 10 == 0:
print('step: {step}, loss_value:{loss}'.format(step=step, loss=loss_value))
# 计算测试集准确率
num_examples = 10000
import math
import numpy as np
num_iter = int(math.ceil(num_examples / batch_size))
true_count = 0
total_sample_count = num_iter * batch_size
step = 0
while step < num_iter:
image_batch, label_batch = sess.run([images_test, labels_test])
predictions = sess.run([top_k_op], feed_dict={image_holder: image_batch,
label_holder: label_batch})
true_count += np.sum(predictions)
step += 1
precision = true_count / total_sample_count
print(precision)
| [
"nevermoes@gmail.com"
] | nevermoes@gmail.com |
98809dfea4ff4dba9a3ba0d6f49603d5b7cd8938 | f1d67722dcd4c2209eedc0a61e5ea0ee27c95470 | /examples/farmer/farmer_ama.py | 00a79662b473eef48f1d277a7ec361a36bbfb408 | [] | no_license | wangcj05/mpi-sppy | 08204019b466da5e0812b16dd5cb53da1bdbd793 | 42aff4c11dc42fcba8a9520da00e48c6e9ab7d85 | refs/heads/main | 2023-08-25T04:36:58.606490 | 2021-11-01T21:40:14 | 2021-11-01T21:40:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 951 | py | # Copyright 2021 by B. Knueven, D. Mildebrath, C. Muir, J-P Watson, and D.L. Woodruff
# This software is distributed under the 3-clause BSD License.
"""
An example of using amalgomator and solving directly the EF
To execute this:
python farmer_ama.py --num-scens=10 --crops-multiplier=3 --farmer-with-integer
WARNING:
num-scens must be specified !
"""
import mpisppy.utils.amalgomator as amalgomator
def main():
solution_files = {"first_stage_solution":"farmer_first_stage.csv",
}
ama_options = {"EF-2stage": True, # We are solving directly the EF
"write_solution":solution_files}
#The module can be a local file
ama = amalgomator.from_module("afarmer", ama_options)
ama.run()
print("first_stage_solution=", ama.first_stage_solution)
print("inner bound=", ama.best_inner_bound)
print("outer bound=", ama.best_outer_bound)
if __name__ == "__main__":
main() | [
"noreply@github.com"
] | wangcj05.noreply@github.com |
410d41b69f42c4a59e75e20e031f715661df380f | 0ae0acc1173692ea49127fb00676b8a41aa6b2f2 | /cmpreqs/compare_requirements.py | 0838f1e89dd93204082586299a68fc0cd1fa4ba8 | [
"MIT"
] | permissive | pombredanne/compare-requirements | c796cca4a428fa680dcf021b152ea99a59d6e956 | 1f95c1779234a65e0f0e732b5fd728c159508aff | refs/heads/master | 2021-06-22T19:08:03.429884 | 2017-01-16T14:32:03 | 2017-01-16T14:32:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,882 | py | #!/usr/bin/env python
import subprocess
from io import StringIO
def get_version(line):
if 'git+' in line or 'hg+' in line:
return line.split('@')[-1].split('#')[0]
else:
d = line.split('==')
if len(d) > 1:
return d[1]
else:
return
def get_package_name(line):
if 'git+' in line or 'hg+' in line:
return line.split('#egg=')[1]
else:
return line.split('==')[0]
def get_requirements(indata):
requirements = {}
lines = indata.readlines()
for line in lines:
if line.startswith('#'):
continue
line = line.replace('\n', '')
line = line.replace('\r', '')
requirements[get_package_name(line)] = get_version(line)
return requirements
def req_not_in(requirements1, requirements2):
return {key: requirements1[key] for key in list(set(requirements1) - set(requirements2))}
def print_columns(rows, titles=False):
rows = [titles] + rows if titles else rows
widths = [max(map(len, map(str, col))) for col in zip(*rows)]
if titles:
rows = [rows[0]] + [['-' * width for width in widths]] + rows[1:]
for row in rows:
print(" ".join((str(val).ljust(width) for val, width in zip(row, widths))))
def get_input_title(indata, index=None):
return getattr(indata, 'name', 'Input {}'.format(index or '?'))
def print_title(name):
print()
print(name)
print('=' * len(name))
def compare(input1, input2):
requirements1 = get_requirements(input1)
requirements2 = get_requirements(input2)
title1, title2 = get_input_title(input1, 1), get_input_title(input2, 2)
reqs = {name: (version, requirements2[name]) for name, version
in requirements1.items() if name in requirements2}
print_title('Different dependencies')
print_columns([[name, ver[0], ver[1]] for name, ver in reqs.items()
if ver[0] != ver[1]], ['Name', title1, title2])
print_title('Equal dependencies')
print_columns([[name, ver[0]] for name, ver in reqs.items()
if ver[0] == ver[1]], ['Name', 'Version'])
not_in = {title2: [requirements2, requirements1], title1: [requirements1, requirements2]}
for file, reqs in not_in.items():
print_title('Only available on {}'.format(file))
print_columns([[name, ver] for name, ver in req_not_in(*reqs).items()], ['Name', 'Version'])
def read_freeze():
return StringIO(subprocess.check_output(['pip', 'freeze']).decode('utf-8'))
def read_pipdeptree():
data = subprocess.check_output(['pipdeptree']).decode('utf-8')
data = [x for x in data.split('\n') if x and not x[0] in ['-', ' ', '*']]
return StringIO('\n'.join(data))
if __name__ == '__main__':
import sys
compare(open(sys.argv[1]), open(sys.argv[2]))
| [
"nekmo@alsur.es"
] | nekmo@alsur.es |
d3ff67e4264d3034d5ceb38352781f2f524189c2 | e2e1da4f241c59d0da8d6c267ea876adfd457f50 | /features/environment.py | 8c26f892e82fed577d3786edc5882b7c6398c0ba | [] | no_license | one-focus/python-selenium-behave | 51751af1875353750159226ffaa3f477bbc711c7 | 3a08ae526f02a5f27a8f32a48801a0ab77ddd650 | refs/heads/main | 2023-01-13T17:38:28.036398 | 2020-11-19T08:57:35 | 2020-11-19T08:57:35 | 312,779,333 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,285 | py | import configparser
import allure
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
# TODO check all context attributes on https://behave.readthedocs.io/en/latest/context_attributes.html#user-attributes
def before_all(context):
# chrome_options = Options()
# chrome_options.add_argument('--headless')
# context.driver = webdriver.Chrome(options=chrome_options)
capabilities = {
'browserName': 'chrome',
'version': '86.0',
'enableVNC': True,
'enableVideo': True
}
context.driver = webdriver.Remote(
command_executor='http://159.65.133.63:4444/wd/hub',
desired_capabilities=capabilities)
context.driver.implicitly_wait(5)
# read config
parser = configparser.ConfigParser()
parser.read('behave.ini')
context.config = parser
def before_scenario(context, scenario):
context.driver.delete_all_cookies()
def after_step(context, step) -> None:
if step.status == "failed":
try:
allure.attach(context.driver.get_screenshot_as_png(),
name="bug.png",
attachment_type=allure.attachment_type.PNG)
except:
pass
def after_all(context):
context.driver.quit()
| [
"sash.kardash@gmail.com"
] | sash.kardash@gmail.com |
378fa6b25eaa9e9486b6c7a12858055e6e0933d7 | e7e8649259d4aeb86445094099d72a051c4d13f6 | /djangoPr/test_git.py | c4de2cf598c64f88555e7a1208fa9c98f629640a | [] | no_license | shadowstarser/testP | 63fe12549dc75515fc246ab89230679490621045 | 5c201064d4391c7c59549e70cd033167a50c83c7 | refs/heads/main | 2023-06-23T01:41:01.144360 | 2021-07-19T16:27:42 | 2021-07-19T16:27:42 | 386,978,699 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26 | py | # test file for commit 555 | [
"shadowstarser@gmail.com"
] | shadowstarser@gmail.com |
8d10e7491951c0bd46c4a395dc2122f14caff8ef | 05effb8a3118d9ea458c2155daf659e73e91b15c | /ann_app/green_card_lot/green_card_lot/asgi.py | 152239c5d31cf19b6ea93279a0bdf68829482a65 | [] | no_license | Annahuljakevych/WebLabs | 01130c2327edf0cf35e25b9d9b6b85c034b3ee42 | 03e8f63f25b011092d31f4a79c318d86c2f6244e | refs/heads/master | 2021-01-08T16:44:02.953568 | 2020-12-18T00:06:37 | 2020-12-18T00:06:37 | 242,082,654 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
ASGI config for green_card_lot project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'green_card_lot.settings')
application = get_asgi_application()
| [
"61308675+Annahuljakevych@users.noreply.github.com"
] | 61308675+Annahuljakevych@users.noreply.github.com |
a1429e362075e48b30a0d426f80a88982dde38b1 | 190442b08588d62a154c9f56e13ba334246a870c | /configs/styleganv2/stylegan2_c2_lsun-car_384x512_b4x8.py | 2807777b140011dbe488a8ab3f185c823997efc0 | [
"Apache-2.0"
] | permissive | youtang1993/mmgeneration | a6260c65075279d2a04959035fed1597469d05e6 | 0169a9a0c4674229b2c3d3c386c98fd91fce8fa1 | refs/heads/master | 2023-04-12T14:55:57.908986 | 2021-04-28T09:06:42 | 2021-04-28T09:06:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,358 | py | _base_ = [
'../_base_/datasets/lsun-car_pad_512.py',
'../_base_/models/stylegan2_base.py', '../_base_/default_runtime.py'
]
model = dict(generator=dict(out_size=512), discriminator=dict(in_size=512))
data = dict(
samples_per_gpu=4,
train=dict(dataset=dict(imgs_root='./data/lsun/images/car')),
val=dict(imgs_root='./data/lsun/images/car'))
ema_half_life = 10. # G_smoothing_kimg
custom_hooks = [
dict(
type='VisualizeUnconditionalSamples',
output_dir='training_samples',
interval=5000),
dict(
type='ExponentialMovingAverageHook',
module_keys=('generator_ema', ),
interval=1,
interp_cfg=dict(momentum=0.5**(32. / (ema_half_life * 1000.))),
priority='VERY_HIGH')
]
checkpoint_config = dict(interval=10000, by_epoch=False, max_keep_ckpts=40)
lr_config = None
total_iters = 1800002
metrics = dict(
fid50k=dict(type='FID', num_images=50000, bgr2rgb=True),
pr50k3=dict(type='PR', num_images=50000, k=3),
ppl_wend=dict(type='PPL', space='W', sampling='end', num_images=50000))
evaluation = dict(
type='GenerativeEvalHook',
interval=10000,
metrics=dict(
type='FID',
num_images=50000,
inception_pkl='work_dirs/inception_pkl/lsun-car-512-50k-rgb.pkl',
bgr2rgb=True),
sample_kwargs=dict(sample_model='ema'))
| [
"yangyfaker@gmail.com"
] | yangyfaker@gmail.com |
339ed0442b7204075d1602bcd7c43bdf220c2b5e | ecf58d3880af1ae484644748579cc77ea54428bd | /codility/triangle_triplet.py | 09e9ca69e9b0ba4968555f03e4a42c03df22eebe | [] | no_license | vuamitom/Code-Exercises | 0601d186f4f505024d3f46b5c95a1c165d195830 | 3cc56d2d25c7b0832bd6e01154112bfd88d0ec52 | refs/heads/master | 2022-12-11T01:49:20.400506 | 2020-04-15T08:25:40 | 2020-04-15T08:25:40 | 8,831,614 | 13 | 14 | null | 2022-12-08T18:17:26 | 2013-03-17T07:59:58 | Jupyter Notebook | UTF-8 | Python | false | false | 1,576 | py | """
A zero-indexed array A consisting of N integers is given. A triplet (P, Q, R) is triangular if 0 ≤ P < Q < R < N and:
A[P] + A[Q] > A[R],
A[Q] + A[R] > A[P],
A[R] + A[P] > A[Q].
For example, consider array A such that:
A[0] = 10 A[1] = 2 A[2] = 5
A[3] = 1 A[4] = 8 A[5] = 20
Triplet (0, 2, 4) is triangular.
Write a function:
def solution(A)
that, given a zero-indexed array A consisting of N integers, returns 1 if there exists a triangular triplet for this array and returns 0 otherwise.
For example, given array A such that:
A[0] = 10 A[1] = 2 A[2] = 5
A[3] = 1 A[4] = 8 A[5] = 20
the function should return 1, as explained above. Given array A such that:
A[0] = 10 A[1] = 50 A[2] = 5
A[3] = 1
the function should return 0.
Assume that:
N is an integer within the range [0..100,000];
each element of array A is an integer within the range [−2,147,483,648..2,147,483,647].
Complexity:
expected worst-case time complexity is O(N*log(N));
expected worst-case space complexity is O(N), beyond input storage (not counting the storage required for input arguments).
Elements of input arrays can be modified.
"""
# you can use print for debugging purposes, e.g.
# print "this is a debug message"
def solution(A):
# write your code in Python 2.7
A.sort()
for i in xrange(0, len(A)):
j = len(A)- i -1
if j >= 2:
ap, aq, ar = A[j], A[j-1], A[j-2]
if ap < aq + ar:
# check
return 1
else:
return 0
| [
"tamvu@Tams-MacBook-Pro-9.local"
] | tamvu@Tams-MacBook-Pro-9.local |
7ed4820a7660c408e148bacbb5f423e52d2c67b9 | 0dd26a11895cd96f30ca02fad8dac7469574fdd6 | /feature_manager/FeatureManager_class.py | eac4d566bf1a2e6c7659baa14237efe36dec0d15 | [] | no_license | marc22alain/G-code-repositories | 6612d443962eb07a2139490595c0354c8ae376a2 | 4947dddd4942299314eeebb2f4895aa4a372ebf3 | refs/heads/master | 2022-02-01T15:16:13.906770 | 2021-04-25T01:57:22 | 2021-04-25T01:58:12 | 41,597,149 | 0 | 0 | null | 2021-04-11T20:33:18 | 2015-08-29T15:05:58 | Python | UTF-8 | Python | false | false | 2,154 | py | from AbstractFeatureManager_class import AbstractFeatureManager
from machines import SimpleMachine
from workpieces import SimpleWorkpiece
import json
class FeatureManager(AbstractFeatureManager):
"""Feature manager for the app."""
def __init__(self, view_space=None):
self.machine = SimpleMachine(self)
self.work_piece = SimpleWorkpiece(self, view_space)
self.view_space = view_space
self.g_code = None
AbstractFeatureManager.__init__(self)
def deleteChild(self, feature):
"""Delete child from app's list and from own list."""
self.app.feature_list.removeFeature(feature)
self.features.remove(feature)
def getGCode(self):
"""Get gcode from all app's features."""
# wrapping the features' gcode:
self.g_code = self.machine.setUpProgram()
for feature in self.features:
self.g_code += feature.getGCode()
self.g_code += self.machine.endProgram()
return self.g_code
def changeViewPlane(self):
"""Change view plane in response to user's selection."""
self.work_piece.drawGeometry()
for feature in self.features:
feature.changeViewPlane()
def reDrawAll(self):
"""Generic trigger for redrawing all feature geometry."""
for feature in self.features:
feature.drawGeometry()
def saveFeatureConfigs(self, file_name):
"""Saves the composition of the Feature Manager to disk.
Current format is JSON."""
collection = self.genFeatureCollection()
file = open(file_name + '.json', 'w')
json.dump(collection, file)
def genFeatureCollection(self):
"""Creates a dict representation of the FeatureManager's composition."""
collection = { 'machine': {}, 'work_piece': {}, 'features': [] }
collection['machine'] = self.machine.getRepresentationForCollection()
collection['work_piece'] = self.work_piece.getRepresentationForCollection()
for feat in self.features:
collection['features'].append(feat.getRepresentationForCollection())
return collection
| [
"marc22alain@users.noreply.github.com"
] | marc22alain@users.noreply.github.com |
4ede039a5f8e824cee79fba2efaf8cbcedf0a1bc | 11195ea809c363f834f3fb31eb7de26437e2eb53 | /course3/reachability.py | a1a09b13ad880b57067f789a2d3918fe4ab64d7b | [
"MIT"
] | permissive | ropable/algorithmic_toolbox | e8d517dbc00541ef10fdc8c3e586194ebbd1b30b | b4dcf4fda19c394da2baa6eced0732bf50585237 | refs/heads/master | 2021-09-09T12:15:37.378207 | 2018-03-16T01:58:41 | 2018-03-16T01:58:41 | 110,786,531 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,117 | py | # python3
import sys
def reach(adj, x, y):
# Determine if x can reach y by exploring all of the nodes that x can reach.
visited = [False] * len(adj) # List of all the edges, and whether they have been visited.
return explore(adj, x, y, visited)
def explore(adj, x, y, visited):
# Explore each edge pair.
if x == y: # Nodes are the same: we've reached y.
return 1
visited[x] = True
for i in range(len(adj[x])):
if not visited[adj[x][i]]: # Recurse into each node of the pair.
if explore(adj, adj[x][i], y, visited):
return 1
return 0
if __name__ == '__main__':
input = sys.stdin.read()
data = list(map(int, input.split()))
n, m = data[0:2] # No. of vertices and edges.
data = data[2:]
edges = list(zip(data[0:(2 * m):2], data[1:(2 * m):2]))
x, y = data[2 * m:] # u and v - is there a path between these?
x, y = x - 1, y - 1 # They are zero-indexed.
adj = [[] for _ in range(n)]
for (a, b) in edges:
adj[a - 1].append(b - 1)
adj[b - 1].append(a - 1)
print(reach(adj, x, y))
| [
"ashley@ropable.com"
] | ashley@ropable.com |
ba833629540aef748023c997802be077ac85a4ca | 8a185473649cb91500d6689fa06489f3187e6378 | /Prey-RK.py | 001d7e8c5122bfb750817994f397f986135958f9 | [] | no_license | Luis7523/Scientific-computting-hw | fbc4cf800c82db8038273a8bfbb34f2318fad88e | e69deb5a07a4d34e2bd02889d21168cf850afd26 | refs/heads/master | 2016-08-12T18:09:48.407141 | 2016-04-21T16:32:14 | 2016-04-21T16:32:14 | 51,376,797 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 690 | py | #Marzo 8 cosa esta para la relacion entre depredador y preza
#Luis Miguel Parra Rueda
import numpy
import matplotlib
import matplotlib.pyplot
x0 = 15
y0 = 100
Ky = 2.0
Kyx = 0.01
Kx = 1.06
Kxy = 0.01
delta = 0.1
y = range(101)
x = range(101)
t = range(101)
def f(x):
return (Kyx * y0 * x) - (Kx * x)
def f(y):
return (Ky * y) - (Kxy * x0 * y)
for i in range(101):
x1 = x0 + delta * f(x0 + 1/2 * delta * f(x0))
x0 = x1
x[i] = x1
y1 = y0 + delta * f(y0 + 1/2 * delta * f(y0))
y0 = y1
y[i] = y1
print x
print y
Xnew = numpy.array(x)
Ynew = numpy.array(y)
print Xnew
print Ynew
matplotlib.pyplot.plot(Ynew,Xnew)
matplotlib.pyplot.plot(Xnew,Ynew)
matplotlib.pyplot.show()
| [
"luismi0101@gmail.com"
] | luismi0101@gmail.com |
793f9e73ba5caba9ede7ba2ad51bf2b38a7c59fe | 1c9957d5b3e59fc8f2e04dc45a171a65d86b191a | /mm.py | cd0134b8e4d93b7972175691d42943c5b5d86a48 | [] | no_license | SantaDiver/mapReduce | 2d5e2313fbe2f129a55ba6aa65b7008740f3ce01 | ac27f782d8d591f58caa5f8e6e694694744745c4 | refs/heads/master | 2021-05-11T22:02:47.013363 | 2018-01-15T02:41:12 | 2018-01-15T02:41:12 | 117,483,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,759 | py | #!/usr/bin/python
import sys
import pandas
from nltk.tokenize import RegexpTokenizer
import mincemeat
import csv
import argparse
RESULTS_DIRECTORY = './results/'
import os
directory = os.path.dirname(RESULTS_DIRECTORY)
try:
os.stat(RESULTS_DIRECTORY)
except:
os.mkdir(RESULTS_DIRECTORY)
PATH_TO_RESULT = RESULTS_DIRECTORY + 'Matrix.csv'
DATA_DIR = './matrix/'
PASSWROD = 'dontforgetme'
# Read data
parser = argparse.ArgumentParser()
parser.add_argument('-m', help='csv file with first matricies', required=True)
parser.add_argument('-n', help='matricies size', required=True)
args = parser.parse_args()
PATH_TO_M = DATA_DIR + args.m
N = int(args.n)
data = {}
key=0
with open(PATH_TO_M, 'r') as file:
rows = csv.DictReader(file, delimiter=',')
for row in rows:
data[key] = row
data[key]['n'] = N
key += 1
# Server params
def mapfn(k, v):
for i in range(v['n']):
if v['matrix'] == 'A':
yield (int(v['row']), i) , (int(v['col']), int(v['val']))
else:
yield (i, int(v['col'])) , (int(v['row']), int(v['val']))
def reducefn(key, vals):
res = 0
used = {}
for val in vals:
if val[0] in used:
res += used[val[0]] * val[1]
else:
used[val[0]] = val[1]
return res%97
server = mincemeat.Server()
server.mapfn = mapfn
server.reducefn = reducefn
server.datasource = data
print('Server is now running...')
result = server.run_server(password=PASSWROD)
# Write result
with open(PATH_TO_RESULT, 'w') as csvFile:
csvFile.write(','.join(['matrix', 'row', 'col', 'val']) + '\n')
for key,value in result.iteritems():
csvFile.write('c,' + str(key[0]) + ',' + str(key[1]) + ',' + \
str(value) + '\n')
| [
"nik7101@yandex.ru"
] | nik7101@yandex.ru |
6c3755b67cc4a7bbc79be33a27724d07313eaf07 | 72f1a2515222f6e5475fbf2f7d115090b32685b5 | /minipnm/__init__.py | 6c483278c5cb9d20c6108a8c12e1d661c119d1ea | [
"MIT"
] | permissive | RodericDay/MiniPNM | a654970ee23bd9cc9c2cab484efa4bf63c251db8 | c0f89780da96fe7573f78752e8420d344fcc0c46 | refs/heads/master | 2020-05-17T08:43:07.739483 | 2015-04-02T22:00:40 | 2015-04-02T22:00:40 | 18,856,112 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
__title__ = 'minipnm'
__version__ = '0.1.0'
__author__ = 'PMEAL'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 PMEAL'
from .network import *
from .graphics import Scene
from .misc import *
from .utils import *
from .binary import *
from .algorithms import *
from . import simulations
from . import geometry
from . import image
from . import graphics
from . import models
from . import gui
| [
"roderic.day@gmail.com"
] | roderic.day@gmail.com |
f4defd7c3886056da601cb49f0c8fb7fe70a0fc3 | c4828ad9d88dab72c8957ea0e36b3a37751724ce | /apps/organization/migrations/0008_teacher_course_nums.py | 55b3f4267d73074081416fc97e319f0078705c7e | [] | no_license | laosuaidami/MxOline | be7d5a991427110478095fd1ac744533dc3015e3 | eeb5323cf7f76e1a28a05b3c508a67788cd4e0ce | refs/heads/master | 2020-04-08T17:48:36.571259 | 2018-12-09T06:54:50 | 2018-12-09T06:54:50 | 159,581,772 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-11-26 11:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organization', '0007_teacher_image'),
]
operations = [
migrations.AddField(
model_name='teacher',
name='course_nums',
field=models.IntegerField(default=0, verbose_name='课程数'),
),
]
| [
"hewei@penguinsinnovate.com"
] | hewei@penguinsinnovate.com |
505cbf08fefd6c5cd0a65def56f233d67ca7365d | d854dbbd23e0d97b4a2d38445ae6694723eed15e | /models/Car.py | 005bab4fa6927980d69eca854da9bdc3d6de4d60 | [] | no_license | saradogg95/peppbilar | 0d50d2d0839f9c9c2d159f54f9fd1e6e2bce8b3d | c4ccc4989944ca14282e46221c25c26720172ef4 | refs/heads/master | 2020-04-09T11:47:31.307866 | 2018-12-14T19:26:21 | 2018-12-14T19:26:21 | 160,323,824 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,310 | py | class Car:
def __init__(self, reg_num="0", brand="", model="", category="",
category_price="", registration_date="", mileage="0"):
self.__reg_num = reg_num.upper()
self.__brand = brand.upper()
self.__model = model.upper()
self.__category = category.upper()
self.__registration_date = registration_date.upper()
self.__mileage = mileage.upper()
self.__category_price = category_price.upper()
def __str__(self):
car_info = self.__brand + " " + self.__model + ", " + self.__registration_date
return "{:<15s}{:<45s}{:<15s}{:<15s}".format(self.__reg_num, car_info, self.__category,
self.__category_price)
def get_reg_num(self):
return self.__reg_num
def get_brand(self):
return self.__brand
def get_category(self):
return self.__category
def get_model(self):
return self.__model
def get_category_price(self):
return self.__category_price
def get_registration_date(self):
return self.__registration_date
def get_mileage(self):
return self.__mileage
def set_mileage(self, mileage):
self.__mileage = mileage
| [
"sveppagreifinn@gmail.com"
] | sveppagreifinn@gmail.com |
f8b141b97813164e7f7a1cca232e9e9d9ad3eede | f0a6ce24b9bb306d8f7686033b6fd85ea2ab1f51 | /Day 1-Data Types/DataTypes.py | ab4df649f41f77ef764c5bbe6a5a4cf590e89c3b | [] | no_license | Aadi2001/HackerRank30DaysChallenge | d282d4da88009537d38a5f1bf1d7c692015be9a0 | 9fd8040f210bfe6b79909aa1e7cb7bb19c79cdc2 | refs/heads/main | 2023-06-20T14:38:10.627828 | 2021-07-22T06:16:51 | 2021-07-22T06:19:55 | 388,338,754 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,488 | py | '''
Task Complete the code in the editor below. The variables i, d, and s are
already declared and initialized for you. You must:
Declare 3 variables: one of type int, one of type double, and one of type
String. Read 3 lines of input from stdin (according to the sequence given in
the Input Format section below) and initialize your 3 variables. Use the +
operator to perform the following operations: Print the sum of i plus your
int variable on a new line. Print the sum of d plus your double variable to a
scale of one decimal place on a new line. Concatenate s with the string you
read as input and print the result on a new line. Note: If you are using a
language that doesn't support using + for string concatenation (e.g.: C), you
can just print one variable immediately following the other on the same line.
The string provided in your editor must be printed first, immediately
followed by the string you read as input.
Input Format
The first line contains an integer that you must sum with i. The second line
contains a double that you must sum with d. The third line contains a string
that you must concatenate with s.
Output Format
Print the sum of both integers on the first line, the sum of both doubles
(scaled to 1 decimal place) on the second line, and then the two concatenated
strings on the third line.
'''
i = 4
d = 4.0
s = 'HackerRank '
sumOne = int(input())
sumTwo = float(input())
sumThree = str(input())
print(i+sumOne)
print(d+sumTwo)
print(s+sumThree)
| [
"iamadi2001@gmail.com"
] | iamadi2001@gmail.com |
f4a6ff61bd09f097e3f78db368e0296793dad68d | f1e9f557c5d724dcabbfa17903de93bb82767e35 | /py_opencv_playrtsp.py | 48961e3539f940982eb4128f70fc2a9f5ce1a858 | [] | no_license | gregsheu/python | e5e9ff83dc0ce90541591e726c940e8a1f71a3d4 | 4a77295d58a522974ee85b201ab99cdbe410fd08 | refs/heads/master | 2023-08-18T08:30:15.611727 | 2023-08-08T06:55:44 | 2023-08-08T06:55:44 | 181,270,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | py | import cv2
import ffmpeg
import time
vcap = cv2.VideoCapture("rtsp://admin:admin12345@192.168.1.77:554/cam/realmonitor?channel=1&subtype=0")
while(1):
ret, frame = vcap.read()
print(frame.tobytes())
cv2.imshow('channel2', frame)
cv2.waitKey(1)
| [
"greg@mymacpro.bluefinops.io"
] | greg@mymacpro.bluefinops.io |
74f3adb28be71f66fce3007a2bdcc98592206f91 | c7bcd58b8dbc09b3cbb25571a605074e161abe49 | /document/ArticulationPoints/code/Tarjan.py | 174f1755106b1b04915c3a3999503289ae3e0b02 | [] | no_license | anvalenciao/ArticulationPoints | d3d20f346aa1a5b5df088c68f26a81599d90256c | 97d9b824997d80a2bf899baed32dd1a9efd88612 | refs/heads/master | 2020-09-22T16:47:42.947767 | 2020-01-22T16:21:40 | 2020-01-22T16:21:40 | 225,276,882 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,328 | py | # Python program to find articulation points in an undirected graph
from collections import defaultdict
from var_dump import var_dump
#This class represents an undirected graph
#using adjacency list representation
class Graph:
def __init__(self,vertices):
self.V= vertices #No. of vertices
self.graph = defaultdict(list) # default dictionary to store graph
self.Time = 0
# function to add an edge to graph
def addEdge(self,u,v):
self.graph[u].append(v)
self.graph[v].append(u)
# var_dump(self.graph)
'''A recursive function that find articulation points
using DFS traversal
u --> The vertex to be visited next
visited[] --> keeps tract of visited vertices
disc[] --> Stores discovery times of visited vertices
parent[] --> Stores parent vertices in DFS tree
ap[] --> Store articulation points'''
def APUtil(self,u, visited, ap, parent, low, disc):
#Count of children in current node
children =0
# Mark the current node as visited and print it
visited[u]= True
# Initialize discovery time and low value
disc[u] = self.Time
low[u] = self.Time
self.Time += 1
#Recur for all the vertices adjacent to this vertex
for v in self.graph[u]:
# If v is not visited yet, then make it a child of u
# in DFS tree and recur for it
if visited[v] == False :
parent[v] = u
children += 1
self.APUtil(v, visited, ap, parent, low, disc)
# Check if the subtree rooted with v has a connection to
# one of the ancestors of u
low[u] = min(low[u], low[v])
# u is an articulation point in following cases
# (1) u is root of DFS tree and has two or more chilren.
if parent[u] == -1 and children > 1:
ap[u] = True
#(2) If u is not root and low value of one of its child is more
# than discovery value of u.
if parent[u] != -1 and low[v] >= disc[u]:
ap[u] = True
# Update low value of u for parent function calls
elif v != parent[u]:
low[u] = min(low[u], disc[v])
#The function to do DFS traversal. It uses recursive APUtil()
def AP(self):
# Mark all the vertices as not visited
# and Initialize parent and visited,
# and ap(articulation point) arrays
visited = [False] * (self.V)
disc = [float("Inf")] * (self.V)
low = [float("Inf")] * (self.V)
parent = [-1] * (self.V)
ap = [False] * (self.V) #To store articulation points
# Call the recursive helper function
# to find articulation points
# in DFS tree rooted with vertex 'i'
for i in range(self.V):
if visited[i] == False:
self.APUtil(i, visited, ap, parent, low, disc)
# var_dump(ap)
for index, value in enumerate (ap):
if value == True: print(index)
# Create a graph given in the above diagram
'''
g1 = Graph(5)
g1.addEdge(1, 0)
g1.addEdge(0, 2)
g1.addEdge(2, 1)
g1.addEdge(0, 3)
g1.addEdge(3, 4)
print "\nArticulation points in first graph "
g1.AP()
'''
g2 = Graph(4)
g2.addEdge(0, 1)
g2.addEdge(1, 2)
g2.addEdge(2, 3)
print ("\nArticulation points in second graph ")
g2.AP()
'''
g3 = Graph (7)
g3.addEdge(0, 1)
g3.addEdge(1, 2)
g3.addEdge(2, 0)
g3.addEdge(1, 3)
g3.addEdge(1, 4)
g3.addEdge(1, 6)
g3.addEdge(3, 5)
g3.addEdge(4, 5)
print "\nArticulation points in third graph "
g3.AP()
#This code is contributed by Neelam Yadav
''' | [
"andres.valencia@farmalisto.com.co"
] | andres.valencia@farmalisto.com.co |
689e917e38aef10a089a80107e8c7f64eb0b43f6 | 3dfb40aaac2388ad1a58a680468c17396f70f9e4 | /venv/Scripts/easy_install-3.7-script.py | ca1bdbcfeb723acaa7a0b97650bc512a77a340bf | [] | no_license | ShuherDa/Home-work-9 | af6d21cec99d8e1cd15001715b7be05d997920c9 | 242f881ef1332fde3cd1ccf83bfefa68fa8a1ea1 | refs/heads/master | 2020-03-25T08:35:42.021791 | 2018-08-06T14:03:46 | 2018-08-06T14:03:46 | 143,622,018 | 0 | 0 | null | null | null | null | ISO-8859-7 | Python | false | false | 454 | py | #!E:\Ξασχενθε\Home-work-9\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"shuherda@gmail.com"
] | shuherda@gmail.com |
9b5332d5c81fce935d7514fe4b9d76576e82731a | 77ffb288797e48c4c74b18f69bfd7d5ea19b8929 | /INF5620/project_2/project_2.py | 0aa958f14c4f78690c4bc0aebf295a848f92a51b | [] | no_license | evenmn/master | 59e555a3146997f6e4001e6b863eae69c6ea935a | cd35ea91514afc7557e4f634c0b1823c3c967324 | refs/heads/master | 2021-09-21T08:37:29.172587 | 2018-08-22T19:00:19 | 2018-08-22T19:00:19 | 105,077,958 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,433 | py | from fenics import *
import numpy as np
from plot_exact_solutions import *
def solve_diffusion(I, f, alpha, rho, T, Nx, Ny, Nz, Nt,
elements = 1, dimension = 3, Plot = False):
'''
Exercise c
This function solves the diffusion equation
rho*(du/dt)=grad*(alpha*grad(u)) + f(u)
given the value of rho and the functions alpha and f.
We need a (Diriclet) boundary function I to find an
initial function u_n
'''
# Define constants
dt = float(T)/Nt # Time step
C = dt/(2*rho) # To reduce the number of FLOPS
# Create mesh and define function space
mesh = [UnitIntervalMesh(Nx), UnitSquareMesh(Nx, Ny), UnitCubeMesh(Nx, Ny, Nz)]
V = FunctionSpace(mesh[dimension - 1], 'P', elements) # Function space
u_n = interpolate(I, V) # Define initial value
# Define variational problem
u = TrialFunction(V)
v = TestFunction(V)
a = u*v*dx + C*alpha*dot(grad(u), grad(v))*dx
L = u_n*v*dx - C*alpha*dot(grad(u_n), grad(v))*dx + 2*C*f*v*dx
u = Function(V) # This gonna be the u approximation
t_list = np.linspace(dt, T, Nt) # Time-stepping
for t in t_list:
# Update current time
f.t = t
alpha.u = u_n
solve(a == L, u) # Solve system
u_n.assign(u) # Update previous solution
# Plot and hold plot
if Plot:
plot(u, interactive=True)
return u, V
def test_constant_solution(T, Nx, Ny, Nz, Nt, tolerance = 1e-9, Plot = False):
'''
Exercise d
With a constant solution u(x,t) = C we expect rho and
alpha to be constants and f to be constant zero
independent of the values of C, rho and alpha
'''
rho = Constant(1.0)
alpha = Constant(1.0)
f = Constant(0.0)
u_D = Constant(1.0)
print '\nConstant solution'
for Ne in [1,2]:
for dim in [1,2,3]:
'''Find the solution for P1 and P2 elements studying 1D, 2D and 3D'''
if Ne == 2:
Plot_ = Plot
else:
Plot_ = False
u, V = solve_diffusion(u_D, f, alpha, rho, T, Nx, Ny, Nz, Nt,
Ne, dim, Plot=Plot)
# Compute error at vertices
u_e = interpolate(u_D, V)
error = np.abs(u_e.vector().array() - u.vector().array()).max()
print 'P{} element, {} dimensions, Error: '.format(Ne, dim), error
success = error < tolerance
msg = 'The script is apparently not running as it should'
assert success, msg
def test_linear_solution(T, Nz, tolerance = 1e-1, Plot=False):
'''
Exercise e
With a exponential fluctuating solution u(x,y,t) = exp(-pi^2*t) cos(pi*x)
we expect rho and
alpha to be constants and f to be constant zero
independent of the values of C, rho and alpha
'''
rho = Constant(1.0)
alpha = rho
f = Constant(0.0)
I = Expression('cos(pi*x[0])', degree=3)
u_D = Expression('exp(-pi*pi*t) * cos(pi*x[0])', degree=2, t=T)
h_list = [0.001, 0.0005, 0.0001, 0.00005, 0.00001]
print '\nLinear solution'
for dim in [1]:
'''Find the solution for P1 elements studying 1D, 2D and 3D.
dx = dy = dt = sqrt(h) => Nx = Ny = Nt = h^(-1/2)'''
for h in h_list:
if h == h_list[-1]:
Plot_ = Plot
else:
Plot_ = False
Nx = Ny = Nt = int(round(h**(-0.5))) # Steps
Ne = 1 # P1 element
u, V = solve_diffusion(I, f, alpha, rho, T, Nx, Ny, Nz, Nt,
Ne, dim, Plot=Plot_)
# Compute error at vertices
u_e = interpolate(u_D, V)
e = u_e.vector().array() - u.vector().array()
E = np.sqrt(np.sum(e**2)/u.vector().array().size)
print '{} dimensions, h = {}, Error = {}, E/h = {}'\
.format(dim, h, E, E/h)
success = E < tolerance
msg = 'The script is apparently not running as it should'
assert success, msg
def test_nonlinear_solution(T, Nx, Ny, Nz, Nt, tolerance = 1, Plot = False):
'''
Exercise f
A nonlinear solution u(x,y,t) = tx^2(1/2-x/3)
'''
rho = Constant(1.0)
I = Constant(0.0)
alpha = Expression('1 + u*u', degree=2, u=I)
f = Expression('-rho*pow(x[0], 3)/3 + rho*pow(x[0], 2)/2 + 8*pow(t, 3)*'
'pow(x[0],7)/9 - 28*pow(t, 3)*pow(x[0], 6)/9 + 7*pow(t, 3)*'
'pow(x[0], 5)/2 - 5*pow(t, 3)*pow(x[0], 4)/4 + 2*t*x[0] - t',
degree = 2, rho = rho, t=0)
u_D = Expression('t*x[0]*x[0]*(0.5 - x[0]/3)', degree = 2, t=0)
u, V = solve_diffusion(I, f, alpha, rho, T, Nx, Ny, Nz, Nt, 1, 1, Plot=Plot)
u_e = interpolate(u_D, V)
e = u_e.vector().array() - u.vector().array()
print '\nNon-linear solution \nError: {}'.format(np.max(abs(e)))
msg = 'The non-linear numerical solution has too big error'
assert np.max(abs(e)) < tolerance, msg
if __name__ == '__main__':
pass
set_log_active(False)
test_constant_solution(T=1.0, Nx=10, Ny=10, Nz=10, Nt=10, Plot=False)
test_linear_solution(T=1.0, Nz=10, Plot=False)
test_nonlinear_solution(T=1.0, Nx=10, Ny=10, Nz=10, Nt=100, Plot=True)
#plot_exact(f = u_linear, Nx = 100, T = 1.0)
plot_exact(f = u_non_linear, Nx = 100, T = 1.0)
'''
terminal >>> python2 project_2.py
Constant solution
P1 element, 1 dimensions, Error: 2.77555756156e-15
P1 element, 2 dimensions, Error: 3.5527136788e-15
P1 element, 3 dimensions, Error: 2.60902410787e-14
P2 element, 1 dimensions, Error: 1.26343380202e-13
P2 element, 2 dimensions, Error: 1.39666056498e-13
P2 element, 3 dimensions, Error: 8.69304628281e-14
Linear solution
1 dimensions, h = 0.001, Error = 3.1101672842e-06, E/h = 0.0031101672842
1 dimensions, h = 0.0005, Error = 1.58817190084e-06, E/h = 0.00317634380168
1 dimensions, h = 0.0001, Error = 3.23369943115e-07, E/h = 0.00323369943115
1 dimensions, h = 5e-05, Error = 1.62653483093e-07, E/h = 0.00325306966185
1 dimensions, h = 1e-05, Error = 3.23584499113e-08, E/h = 0.00323584499113
Non-linear solution
Error: 0.167165830348
'''
| [
"evenmn@fys.uio.no"
] | evenmn@fys.uio.no |
d8404e6a9aae98d70c84bba8bb0af8bf28a4f214 | ba0b63d03855ff034d8ac50694680028591b7316 | /webui/settings.py | 7c37c3020d44be6af3af4e9474ec6703b65f5dc9 | [] | no_license | stephenwilley/sge4vfx | 3d266e53c37b6d2940b8981c51d81482579aa26b | 8aaa4b92db5b2de7c41eaea8307d6ff1be7fc4ab | refs/heads/master | 2021-01-09T09:39:03.004604 | 2012-07-25T09:39:03 | 2012-07-25T09:39:03 | 1,156,099 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,000 | py | import logging
import tornado
import tornado.template
import os
from tornado.options import define, options
import environment
import logconfig
# Make filepaths relative to settings.
path = lambda root,*a: os.path.join(root, *a)
ROOT = os.path.dirname(os.path.abspath(__file__))
define("port", default=8888, help="run on the given port", type=int)
define("config", default=None, help="tornado config file")
define("debug", default=False, help="debug mode")
tornado.options.parse_command_line()
STATIC_ROOT = path(ROOT, 'static')
TEMPLATE_ROOT = path(ROOT, 'templates')
# Deployment Configuration
class DeploymentType:
PRODUCTION = "PRODUCTION"
DEV = "DEV"
SOLO = "SOLO"
STAGING = "STAGING"
dict = {
SOLO: 1,
PRODUCTION: 2,
DEV: 3,
STAGING: 4
}
if 'DEPLOYMENT_TYPE' in os.environ:
DEPLOYMENT = os.environ['DEPLOYMENT_TYPE'].upper()
else:
DEPLOYMENT = DeploymentType.SOLO
settings = {}
settings['debug'] = DEPLOYMENT != DeploymentType.PRODUCTION or options.debug
settings['static_path'] = STATIC_ROOT
settings['cookie_secret'] = "cookie-secret-ness"
settings['xsrf_cookies'] = True
settings['template_loader'] = tornado.template.Loader(TEMPLATE_ROOT)
SYSLOG_TAG = "theq2"
SYSLOG_FACILITY = logging.handlers.SysLogHandler.LOG_LOCAL2
# See PEP 391 and logconfig for formatting help. Each section of LOGGERS
# will get merged into the corresponding section of log_settings.py.
# Handlers and log levels are set up automatically based on LOG_LEVEL and DEBUG
# unless you set them here. Messages will not propagate through a logger
# unless propagate: True is set.
LOGGERS = {
'loggers': {
'theq2': {},
},
}
if settings['debug']:
LOG_LEVEL = logging.DEBUG
else:
LOG_LEVEL = logging.INFO
USE_SYSLOG = DEPLOYMENT != DeploymentType.SOLO
logconfig.initialize_logging(SYSLOG_TAG, SYSLOG_FACILITY, LOGGERS,
LOG_LEVEL, USE_SYSLOG)
if options.config:
tornado.options.parse_config_file(options.config)
| [
"stephen.willey@primefocusworld.com"
] | stephen.willey@primefocusworld.com |
35db68f4d801fb29393b71e9e2420fe4bcd46cad | cdd99ef2356d99fbf8c157af58dc3f49bdc9d096 | /backend/accounts/admin.py | 700c8ff4f7b8f53a832e630a054339b80b960500 | [] | no_license | abdullah-mjawaz/HalepliOgluProject | c6e5f44be86db9bb98d8acced65096bf0f0c2473 | 520d14519d3195c0fe33e2b209956e7ce8142528 | refs/heads/master | 2023-08-11T17:17:53.781262 | 2019-12-18T15:55:43 | 2019-12-18T15:55:43 | 228,851,540 | 0 | 0 | null | 2023-07-07T23:03:57 | 2019-12-18T13:58:44 | TypeScript | UTF-8 | Python | false | false | 195 | py | from django.contrib import admin
from .models import User
from django.contrib.auth.models import Permission
# Register your models here.
admin.site.register(User)
admin.site.register(Permission) | [
"abdullah.mjawaz@gmail.com"
] | abdullah.mjawaz@gmail.com |
f06068900ed58d8de660a790e7ee15db796b234a | 09d4e48c8932bae69302e7d63ac2905ea3407d7a | /JERSF_Analysis/JER/wide_eta_binning/steer.py | 3864338dae022a1c363d9dcd5c80acdbd22fa9f7 | [] | no_license | anmalara/DiJetJERC | 1060f7f6bdd52a9b3bcfdda24fe44d245a4b6e59 | 2d352ef03d4bbfe5ee4144741bb108164829eaab | refs/heads/master | 2022-10-24T13:29:49.541585 | 2022-09-25T19:47:38 | 2022-09-25T19:47:38 | 193,882,709 | 0 | 0 | null | 2019-06-26T10:26:14 | 2019-06-26T10:26:14 | null | UTF-8 | Python | false | false | 9,272 | py | #!/usr/bin/env python
import sys
import os
import time
import numpy as np
sys.path.append(os.environ["CMSSW_BASE"]+"/src/UHH2/DiJetJERC/conf/")
from utils import *
def getLabel(sample, year):
if year=="2022":
if sample == "C":
LABEL_LUMI_INV_FB = "[MC 124X] Run2022C 4.48 fb^{-1}"
elif sample == "D":
LABEL_LUMI_INV_FB = "[MC 124X] Run2022D 2.75 fb^{-1}"
elif sample == "CD":
LABEL_LUMI_INV_FB = "[MC 124X] Run2022CD 7.31 fb^{-1}"
else:
if sample == "A":
LABEL_LUMI_INV_FB = "[MC 102X] Run2018A 14.00 fb^{-1}"
elif sample == "B":
LABEL_LUMI_INV_FB = "[MC 102X] Run2018B 7.10 fb^{-1}"
elif sample == "C":
LABEL_LUMI_INV_FB = "[MC 102X] Run2018C 6.94 fb^{-1}"
elif sample == "D":
LABEL_LUMI_INV_FB = "[MC 102X] Run2018D 31.93 fb^{-1}"
elif sample == "ABC":
LABEL_LUMI_INV_FB = "[MC 102X] Run2018 28.04 fb^{-1}"
elif sample == "ABCD":
LABEL_LUMI_INV_FB = "[MC 102X] Run2018 59.97 fb^{-1}"
else:
LABEL_LUMI_INV_FB = "[MC 102X] (2018)"
return LABEL_LUMI_INV_FB
def main_function(gaustails=False, shiftForPLI="central", gaustail_num = 0.985):
outdir = out_path+pattern+QCDsample+"/"+run+"/"
shiftForPLI_num = 0.0
ref_shift = 3
if "barrel_check" in extraText:
ref_shift = int(extraText[-2])
if "eta_simple" in MC_file:
ref_shift = 1
if gaustails:
outdir = out_path+year+"/"+newJECVersion+"/"+newJetLabel+"/gaustails_"+str(gaustail_num)+"/"+QCDsample+"/"+run+"/"
if shiftForPLI=="up":
outdir = out_path+year+"/"+newJECVersion+"/"+newJetLabel+"/PLI/up/"+QCDsample+"/"+run+"/"
shiftForPLI_num = 0.25
if shiftForPLI=="down":
outdir = out_path+year+"/"+newJECVersion+"/"+newJetLabel+"/PLI/down/"+QCDsample+"/"+run+"/"
shiftForPLI_num = -0.25
print "outdir ", outdir
if os.path.isdir(outdir):
for el in sorted(os.listdir(outdir)):
cmd = "rm -fr %s" % (outdir+el)
a = os.system(cmd)
else:
os.makedirs(outdir)
programm ="mainRun"
# if "AK8" in outdir: programm += "AK8"
cmd = "cp %s.cxx %s" % (programm, outdir)
a = os.system(cmd)
cmd = "cp functions.C %s" % (outdir)
a = os.system(cmd)
cmd = "cp "+os.environ["CMSSW_BASE"]+"/src/UHH2/DiJetJERC/include/tdrstyle_all.h %s" % (outdir)
a = os.system(cmd)
cmd = "cp "+os.environ["CMSSW_BASE"]+"/src/UHH2/DiJetJERC/include/constants.h %s" % (outdir)
a = os.system(cmd)
os.chdir(outdir)
os.makedirs("pdfy")
os.makedirs("pdfy/MCTruth")
os.makedirs("pdfy/SFs")
os.makedirs("pdfy/NSC_SFs")
os.makedirs("pdfy/JERs")
os.makedirs("pdfy/widths")
os.makedirs("pdfy/maps")
os.makedirs("ClosureTest")
os.makedirs("output")
os.makedirs("output/asymmetries")
# print pattern+run
temp_time=time.time()
# f = open("log.txt",'w')
MC_type = '\\"MC\\"'
data_type = '\\"Data\\"'
trigger_type = '\\"'+study+'\\"'
# cmd = 'root -l -b -q "%s%s.cxx(%s, false, %s, %s, %s, %s , %s, %s, %s, %s, %s, %s)" >> log.txt &' % (outdir, programm, year, MC_file, Data_file, LABEL_LUMI_INV_FB, MC_type, data_type, trigger_type, '\\"'+outdir+'\\"', gaustail_num, shiftForPLI_num, ref_shift)
# cmd = 'root -l -b -q "%s%s.cxx(%s, false, %s, %s, %s, %s , %s, %s, %s, %s, %s, %s)" >> log.txt ' % (outdir, programm, year, MC_file, Data_file, LABEL_LUMI_INV_FB, MC_type, data_type, trigger_type, '\\"'+outdir+'\\"', gaustail_num, shiftForPLI_num, ref_shift)
cmd = 'root -l -b -q "%s%s.cxx(\\"%s\\", false, %s, %s, %s, %s , %s, %s, %s, %s, %s, %s)"' % (outdir, programm, year, MC_file, Data_file, LABEL_LUMI_INV_FB, MC_type, data_type, trigger_type, '\\"'+outdir+'\\"', gaustail_num, shiftForPLI_num, ref_shift)
print "cmd", cmd
a = os.system(cmd)
print ("time needed: "+str((time.time()-temp_time))+" s")
os.chdir(common_path)
source_path = os.environ["CMSSW_BASE"]+"/src/UHH2/DiJetJERC/JERSF_Analysis/hist_preparation/"
common_path = os.environ["CMSSW_BASE"]+"/src/UHH2/DiJetJERC/JERSF_Analysis/JER/wide_eta_binning/"
# year = "2018"
year = "UL16preVFP_split"
# year = "UL16preVFP"
# year = "UL16postVFP"
# year = "UL17"
# year = "UL18"
year = "Legacy"
year = "2022"
# year = sys.argv[1]
samples = {}
# samples["2018"] = ["A", "B", "C", "D", "ABC", "ABCD"]
# samples["2018"] = ["D", "ABC", "ABCD"]
# samples["UL17"] = ["B", "C", "D", "E", "F","BCDEF"]
# samples["UL18"] = ["ABC", "ABCD", "A", "B", "C", "D"]
# samples["UL16preVFP"] = ["B", "C", "D", "E", "F", "BCD", "EF", "BCDEF"]
# samples["UL16postVFP"] = ["F", "G", "H", "FG", "FGH"]
# samples["UL16preVFP"] = ["BCD", "EF", "BCDEF"]
samples["UL16preVFP_split"] = ["BCDEF"]
samples["UL16preVFP"] = ["BCDEF"]
samples["UL16postVFP"] = ["FGH"]
samples["UL17"] = ["BCDEF"]
samples["UL18"] = ["ABCD"]
samples["UL16preVFP"] = ["B", "C", "D", "E", "F"]
samples["UL16postVFP"] = ["FG", "H"]
samples["UL17"] = ["B", "C", "D", "E", "F"]
samples["UL18"] = ["A", "B", "C", "D", "AB", "CD", "ABC"]
samples["UL16preVFP"] = ["EF"]
samples["UL17"] = ["EF"]
samples["Legacy"] = ["II"]
# samples["2022"] = ["C", "D","CD"]
samples["2022"] = ["CD"]
QCDSamples = {}
QCDSamples["2018"] = ["QCDHT"]
# QCDSamples["UL17"] = ["QCDPt"]
QCDSamples["UL16preVFP_split"] = ["QCDHT"]
QCDSamples["UL16preVFP"] = ["QCDHT"]
QCDSamples["UL16postVFP"] = ["QCDHT"]
QCDSamples["UL17"] = ["QCDHT"]
QCDSamples["UL18"] = ["QCDHT"]
QCDSamples["Legacy"] = ["QCDHT"]
# QCDSamples["2022"] = ["QCDPt", "QCDFlat"]
QCDSamples["2022"] = ["QCDFlat"]
JECVersions = {}
JECVersions["2018"] = ["Autumn18_V19"]
JECVersions["UL16preVFP_split"] = ["Summer19UL16APV_V3"]
JECVersions["UL16preVFP"] = ["Summer19UL16APV_V3"]
JECVersions["UL16postVFP"] = ["Summer19UL16_V2"]
JECVersions["UL17"] = ["Summer19UL17_V5"]
JECVersions["UL18"] = ["Summer19UL18_V5"]
JECVersions["Legacy"] = ["Summer19Legacy"]
JECVersions["2022"] = ["Winter22Run3_V1"]
# JetLabels=["AK4CHS", "AK8Puppi", "AK4Puppi"]
JetLabels=["AK4CHS"]
dirs = ["", "up", "down"]
systematics=["", "PU", "JEC", "alpha", "JER"]
# systematics=["", "JER"]
# systematics=["JER"]
# systematics=["JEC"]
# systematics=["PU", "alpha"]
systematics=[""]
systematics = ["", "alpha"]
studies = []
# studies.append("Standard")
studies.append("L1L2Residual")
# studies.append("L1L2")
# studies.append("eta_JER")
# studies.append("eta_simple")
for extraText in [""]:
for study in studies:
out_path = common_path+"file/"+study+"/"+extraText
for newJECVersion in JECVersions[year]:
for newJetLabel in JetLabels:
for syst in systematics:
for dir in dirs:
if syst == "JER" and dir != "":
continue
if syst == "JER" and dir == "":
dir = "nominal"
if (syst == "" and dir != "") or (syst == "alpha" and dir != "") or ((syst != "" and syst != "alpha") and dir == ""):
continue
pattern = year+"/"+newJECVersion+"/"+newJetLabel
if syst == "":
pattern += "/standard/"
elif syst == "alpha":
pattern += "/"+syst+"/"
else:
pattern += "/"+syst+"/"+dir+"/"
print "pattern ", pattern
for QCDsample in QCDSamples[year]:
for sample in samples[year]:
run = "Run"+sample
LABEL_LUMI_INV_FB=getLabel(sample, year)
LABEL_LUMI_INV_FB = '\\"'+LABEL_LUMI_INV_FB+'\\"'
MC_file = '\\"'+source_path+"MC/wide_eta_bin/file/"+study+"/"+pattern.replace("/standard","")+QCDsample+"_"+year+extraText+"/histograms_mc_incl_full.root"+'\\"'
Data_file = '\\"'+source_path+"data/wide_eta_bin/file/"+study+"/"+pattern.replace("/standard","")+run+"_"+year+extraText+"/histograms_data_incl_full.root"+'\\"'
print MC_file, Data_file
if not os.path.isfile(str(MC_file.replace("\\","").strip("\""))) or not os.path.isfile(str(Data_file.replace("\\","").strip("\""))):
continue
# print MC_file, Data_file
main_function(gaustails=False)
if syst == "" and len(systematics)!=1:
main_function(gaustails=False, shiftForPLI="up")
main_function(gaustails=False, shiftForPLI="down")
main_function(gaustails=True, shiftForPLI="central")
main_function(gaustails=True, shiftForPLI="central", gaustail_num = 0.95)
## for gaustail_num in np.arange(0.8,1.0,0.005):
## main_function(gaustails=True, shiftForPLI="central", gaustail_num=gaustail_num)
| [
"andrea.malara@cern.ch"
] | andrea.malara@cern.ch |
2daa7490a61cc2719677837eea96644bd3d7879a | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-vod/aliyunsdkvod/request/v20170321/ListDynamicImageRequest.py | ebe94cb925af50eeef533e6e56955b712cd79567 | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 1,447 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvod.endpoint import endpoint_data
class ListDynamicImageRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'vod', '2017-03-21', 'ListDynamicImage','vod')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_VideoId(self): # String
return self.get_query_params().get('VideoId')
def set_VideoId(self, VideoId): # String
self.add_query_param('VideoId', VideoId)
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
279e2a94ca6dac62b4adabb84fb8c787243a4149 | 966a162e2aecb382b0c9599630dc36582f926507 | /code/fix_RepeatMasker_breaks.py | 11b223fa74009fc230296f332d657bd4c2d961e4 | [
"MIT"
] | permissive | knowah/vm-retrotransposons | b64124aaef0344ea873577a8d0111e31341d99c6 | 3c1fc49efcba34874014ee89da29f2aeb11e49ae | refs/heads/master | 2023-01-01T12:23:53.312180 | 2020-10-22T11:01:16 | 2020-10-22T11:01:16 | 306,093,444 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,879 | py | #!/usr/bin/env python3
import argparse
import sys
import gzip
import contextlib
RM_FIELDS = ['chrom', 'start', 'end', 'strand', 'repName', 'repClass', 'repFamily', 'repStart', 'repEnd', 'element_ID']
class RM_Entry(object):
__slots__ = RM_FIELDS
def __str__(self):
return "\t".join([str(self.__getattribute__(x)) for x in RM_FIELDS])
def process_line(ln):
tokens = ln.rstrip('\n').split('\t')
# chrom start end strand repName repClass repFamily repStart repEnd element.ID
entry = RM_Entry()
entry.chrom = tokens[5]
entry.start = int(tokens[6])
entry.end = int(tokens[7])
entry.strand = tokens[9]
entry.repName = tokens[10]
entry.repClass = tokens[11]
entry.repFamily = tokens[12]
entry.repStart = int(tokens[13])
entry.repEnd = int(tokens[14])
entry.element_ID = int(tokens[16])
return entry
def process_line_alt(ln):
tokens = ln.rstrip('\n').split('\t')
#chrom chromStart chromEnd name score strand swScore milliDiv milliDel milliIns genoLeft repClass repFamily repStart repEnd repLeft
entry = RM_Entry()
entry.chrom = tokens[0]
entry.start = int(tokens[1])
entry.end = int(tokens[2])
entry.strand = tokens[5]
entry.repName = tokens[3]
entry.repClass = tokens[11]
entry.repFamily = tokens[12]
entry.repStart = abs(int(tokens[13]))
entry.repEnd = int(tokens[14])
entry.element_ID = lineno
return entry
@contextlib.contextmanager
def fopen(filename=None, mode='rt'):
if filename and filename != "-":
openfn = open if not filename.endswith('.gz') else gzip.open
f = openfn(filename, mode)
else:
f = sys.stdin if 'r' in mode else sys.stdout
try:
yield f
finally:
if f is not sys.stdin and f is not sys.stdout:
f.close()
def fix_RM_breaks(infile, breaks=500000, seq_gap=0, rep_gap=1, alt_format=False, outfile=None):
# process RepeatMasker file by line
read_entry = process_line if not alt_format else process_line_alt
with fopen(infile, 'rt') as inf, fopen(outfile, 'wt') as outf:
lineno = 1
firstln = inf.readline()
while firstln.startswith('#'):
# skip past header lines
firstln = inf.readline()
prev = read_entry(firstln)
merge_elements = False
elem_ids = (None, None) # when merge_elements == True, this stores the element IDs to merge
for line in inf:
lineno += 1
curr = read_entry(line)
# fail if input file not sorted by start
if prev.chrom == curr.chrom and curr.start < prev.start:
raise IOError("ERROR: Entries out of order (line {})".format(lineno))
# check if this element and the previous one need to be merged
if not merge_elements \
and curr.start % breaks <= seq_gap \
and curr.chrom == prev.chrom \
and curr.strand == prev.strand \
and curr.start - prev.end <= seq_gap \
and curr.repName == prev.repName \
and curr.repClass == prev.repClass \
and curr.repFamily == prev.repFamily \
and ((curr.strand == "+" and curr.repStart - prev.repEnd <= rep_gap) or \
(curr.strand == "-" and prev.repStart - curr.repEnd <= rep_gap)):
merge_elements = True
elem_ids = (prev.element_ID, curr.element_ID)
# merge elements if the element ID matches the ID to be changed
if merge_elements and curr.element_ID == elem_ids[1]:
curr.element_ID = elem_ids[0]
else:
merge_elements = False
outf.write(str(prev)+"\n") # output new entry for the previous line
prev = curr
outf.write(str(prev)+"\n") # print last line
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("infile", type=str, help="Input (tab-separated) RepeatMasker file (must be sorted)")
parser.add_argument("breaks", type=int, default=500000, help="Breakpoints to merge upon [default: 500000]")
parser.add_argument("-o", "--outfile", type=str, help="Output file (leave blank or - for stdout)")
parser.add_argument("-s", "--seq_gap", type=int, default=0, help="Maximum (reference sequence) gap in bp between broken subelements")
parser.add_argument("-r", "--rep_gap", type=int, default=1, help="Maximum gap between repEnd & repStart of broken subelements")
parser.add_argument("--alt_format", action="store_true", help="Use alternative input format (e.g. from mouse strain genomes)")
args = parser.parse_args()
fix_RM_breaks(args.infile, args.breaks, args.seq_gap, args.rep_gap, args.alt_format, args.outfile)
| [
"njk40@cam.ac.uk"
] | njk40@cam.ac.uk |
c78e0f7af5816b19efcea2334f9803e925c03c0c | d25eebb25595c25b73fdc64447f7cf5998204b0d | /gtkApi/ReportEditor.py | ca6d3ae8746a0c2d9fb7a526f2f18423739f3bc5 | [] | no_license | BackupTheBerlios/baseui | a3867c0cc4aa30cf2a7b0dcaf9dbeec68dc5ef0b | a8296aa42f0de42c18f7dfb5d20966bad695709b | refs/heads/master | 2021-01-15T22:28:52.114731 | 2012-12-05T16:31:03 | 2012-12-05T16:31:03 | 39,894,612 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,685 | py | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
#===============================================================================
# ReportEditor module.
# by Mark Muzenhardt, published under LGPL-License.
#===============================================================================
import pygtk
pygtk.require('2.0')
import gtk
class ReportEditor:
def __init__(self):
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
window.set_title("Translation Editor")
window.connect("destroy", lambda w: gtk.main_quit())
vbox = gtk.VBox()
window.add(vbox)
toolbar = gtk.Toolbar()
vbox.pack_start(toolbar, expand=False, fill=True)
button_print = gtk.Button('Druck')
button_print.connect("clicked", self.on_button_print_clicked)
toolbar.add(button_print)
button_backward = gtk.Button('<-')
toolbar.add(button_backward)
button_forward = gtk.Button('->')
toolbar.add(button_forward)
button_cancel = gtk.Button('Abbruch')
button_cancel.connect("clicked", lambda w: gtk.main_quit())
toolbar.add(button_cancel)
label = gtk.Label('NIIX')
vbox.add(label)
window.show_all()
# This methods are doing the initial --------------------------------------
def on_button_print_clicked(self, widget=None, data=None):
pass
# Start the GTK mainloop ------------------------------------------------------
def main():
gtk.main()
return 0
if __name__ == "__main__":
ReportEditor()
main()
| [
"devnull@localhost"
] | devnull@localhost |
56d09969c8320eebcb40434cfaf414b04eb22e28 | 8f6cf9b74db999a136c4b758d229a992064371e6 | /migrations/versions/8c43c5ea96ea_followers.py | 8055f7b8368148df6c6331e4da479927e0134534 | [] | no_license | hkwiseman/Microblog | 0aadfb48061a31402d1459e2a76a35d84e5d24a2 | 13f149d9cbd8bb6919e9fdef51c39ddb59a49c62 | refs/heads/master | 2022-11-20T17:45:41.716107 | 2020-07-23T19:37:53 | 2020-07-23T19:37:53 | 275,980,632 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | """followers
Revision ID: 8c43c5ea96ea
Revises: 71948c2263cd
Create Date: 2020-07-14 16:13:48.173869
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8c43c5ea96ea'
down_revision = '71948c2263cd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('followers',
sa.Column('follower_id', sa.Integer(), nullable=True),
sa.Column('followed_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['followed_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['follower_id'], ['user.id'], )
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('followers')
# ### end Alembic commands ###
| [
"kwiseman@highpoint.edu"
] | kwiseman@highpoint.edu |
a19902b87214a992563e457f4968c347081091e4 | f77bf89a96ffec6b86900bbe5ce5257746946085 | /lab9/1.4.sqs.make.queues.py | 21729f28c675f2c6922acb9e5a582f5918a2d97b | [] | no_license | wwdillingham/aws_extension_course | eee568937bb1554e10cbb7a4a76a8190bc5d33bb | 977020f0c378812eae0fe486fafacb165c2042d7 | refs/heads/master | 2021-01-18T19:52:54.977563 | 2017-05-12T13:18:11 | 2017-05-12T13:18:11 | 86,919,103 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | import boto3
client = boto3.client('sqs')
#create the queues
create_collection_queue_response = client.create_queue(
QueueName='CreditCollectionQueue',
)
create_notification_queue_response = client.create_queue(
QueueName='UserNotificationQueue',
)
queues = client.list_queues()
print(queues)
| [
"wes_dillingham@harvard.edu"
] | wes_dillingham@harvard.edu |
553f276cba612be3fbf0e81adef87aa3d04097f5 | efec7d7cc21e5653bb9031b9b5611f825bd7cd46 | /kaypruning/data/base.py | fcf7d7343cf8f4ae923c51094620f057d5d44dfc | [
"MIT"
] | permissive | AmrMKayid/KayPruning | fbe864e99d6e3ab1d4c41f7d25cb34c2d2b417c0 | 63439abab57065661b58faba92246e539cf10870 | refs/heads/master | 2022-12-18T08:25:16.813479 | 2020-03-14T13:32:40 | 2020-03-14T13:32:40 | 211,220,243 | 2 | 0 | MIT | 2022-12-08T06:13:37 | 2019-09-27T02:30:23 | Jupyter Notebook | UTF-8 | Python | false | false | 1,901 | py | import tensorflow as tf
import tensorflow_datasets as tfds
from kaypruning.configs import *
from kaypruning.utils import describe, glogger
class DataBunch:
r"""
DataBunch Class is used to load the dataset,
pre-processing and dividing the data to train and test data
which will be used inside the trainer class
"""
@classmethod
def _convert_images(cls, dataset):
r"""
Converting the images and flatting them
:param dataset: raw dataset of images and labels
:return: new mapped dataset with flattened images
"""
def flatten(image):
image = tf.image.convert_image_dtype(image, tf.float32)
image_reshaped = tf.reshape(image, [model_hparams.num_features])
return tf.cast(image_reshaped, tf.float32)
return dataset.map(
lambda image, label: (
flatten(image),
label
)
)
def __init__(self, name: str = 'mnist', batch: int = 32,
cache: bool = True, split=None):
self.name = name
ds = tfds.load(name=name, as_supervised=True, split=split)
self.train = DataBunch._convert_images(ds['train'])
self.test = DataBunch._convert_images(ds['test'])
if cache:
# speed things up considerably
self.train = self.train.cache()
self.test = self.test.cache()
self.batch = batch
glogger.info(describe(self))
def get_train(self):
return (self.train
.shuffle(data_configs.shuffle)
.batch(self.batch)
.prefetch(data_configs.prefetch)
.repeat(data_configs.repeat))
def get_test(self):
return (self.test
.batch(self.batch)
.prefetch(data_configs.prefetch)
.repeat(data_configs.repeat))
| [
"amrkayid2027@gmail.com"
] | amrkayid2027@gmail.com |
b10a04128fcc917c4304d3f58318355dd33ade8e | 937e2a265e42d21a915f9aedf1b4b5e4aa28fb06 | /apps/mainApp/models.py | 8db7e25a395d3afb7fe02558014f58e7a79b19ab | [] | no_license | Briggs-Py/beltExam | 5f900fbef35f8f442400a19d724082aac37467d3 | 0b7376262266d7397137d60568611f5e0662ef24 | refs/heads/master | 2021-01-23T03:33:29.166442 | 2017-03-24T20:37:27 | 2017-03-24T20:37:27 | 86,088,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 424 | py | from __future__ import unicode_literals
from ..loginAndReg.models import User
from django.db import models
class Wishlist(models.Model):
name = models.CharField(max_length=50)
users = models.ManyToManyField(User, related_name="wishes")
creator = models.ForeignKey(User, related_name="created_wishes")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
| [
"briggs.mcknight@gmail.com"
] | briggs.mcknight@gmail.com |
519e8b05797206f9dbf3195e0d67c9a1df898f84 | 806a123fc1e26a9b0509de380f9dab1909f180ac | /aser/extract/raw_parser.py | a64f5147d3ee2e8561ca4eafd7c47e9b10fff33d | [
"MIT"
] | permissive | anshiquanshu66/CSKB-Population | df621b46bde97bf9794fa9c887f91715f591823e | c553bf769da661ff28f5c1cd35e1a683b7da67ad | refs/heads/main | 2023-08-13T22:06:05.395192 | 2021-10-19T05:43:29 | 2021-10-19T05:43:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,715 | py | import os
import random
import math
import errno
import argparse
from typing import List
from multiprocessing import Pool
from shutil import copyfile
from tqdm import tqdm
try:
import ujson as json
except:
import json
from aser.extract.utils import get_corenlp_client, parse_sentense_with_stanford
from aser.extract.entity_linker import LinkSharedSource, Mention, Entity, str_contain, acronym, DisjointSet, base_url, link
from aser.utils.config import get_raw_process_parser
# ------------------- class section -------------------
class FileName:
def __init__(self, root, fn):
self.root = root
self.fn = fn
self.full = os.path.join(root, fn)
def __str__(self):
return '{} {}'.format(self.root, self.fn)
def __repr__(self):
return self.__str__()
class ParsingTask:
def __init__(self, file_list: List[FileName], parsed_root, corenlp_path, port, annotators, link_flg=True):
self.file_list = file_list
self.corenlp_path = corenlp_path
self.port = port
self.annotators = annotators
self.parsed_root = parsed_root
self.link_flg = link_flg
# ------------------- utils section -------------------
def read_raw(file_name):
with open(file_name) as f:
raw = [l.strip() for l in f.readlines()]
return raw
def silent_remove(filename: str):
try:
os.remove(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def read_dir(root):
def read_dir_func(folder):
flist = []
for f in os.listdir(folder):
tmp = folder[len(root) + 1:]
fn = FileName(root, os.path.join(tmp, f))
if os.path.isfile(fn.full):
flist.append(fn)
else:
flist.extend(read_dir_func(fn.full))
return flist
files = read_dir_func(root)
return files
def dump_paths(name: str, fn_list: List[FileName]):
with open(name, 'w') as fw:
for fn in fn_list:
fw.write(f"{fn.full}\n")
def load_paths(name: str, root: str = None):
tmp, file_name_list = [], []
for line in open(name, 'r'):
line = line.strip()
tmp.append(line)
if root is not None:
for path in tmp:
fn = FileName(root, path[len(root) + 1:])
file_name_list.append(fn)
return file_name_list
return tmp
def change_file_extension(fn, new_extension='jsonl'):
fs = fn.split('.')[:-1]
fs.append(new_extension)
return '.'.join(fs)
def check_func(task):
parsed_num, unparsed_num, empty_num = 0, 0, 0
file_list = task.file_list
parsed_root = task.parsed_root
check_unparsed_rawlist = []
def check_file_empty(fn: str):
try:
for _ in open(fn):
return False
except Exception as e:
print(f'{fn} error: {e}')
return False
return True
def check_file_integrity(fn: str):
lens = None
line_num, except_line_num = 0, None
for line in open(fn):
line = line.strip()
if lens is None:
lens = json.loads(line)['sentence_lens']
except_line_num = lens[-1]
line_num += 1
if except_line_num == line_num:
return True
return False
parsed_fn_list = [os.path.join(parsed_root, change_file_extension(f.fn)) for f in file_list]
for i_f, item in enumerate(file_list):
raw_file_empty = check_file_empty(item.full)
# raw file empty
if raw_file_empty:
empty_num += 1
silent_remove(item.full)
silent_remove(parsed_fn_list[i_f])
# raw file not empty (unparsed or parsed)
else:
if os.path.exists(parsed_fn_list[i_f]):
parsed_file_flg = not check_file_empty(parsed_fn_list[i_f]) and check_file_integrity(parsed_fn_list[i_f])
# unparsed or corrupted
if not parsed_file_flg:
unparsed_num += 1
silent_remove(parsed_fn_list[i_f])
check_unparsed_rawlist.append(item)
# parsed
else:
parsed_num += 1
else:
unparsed_num += 1
check_unparsed_rawlist.append(item)
total_num = parsed_num + unparsed_num + empty_num
return parsed_num, unparsed_num, empty_num, total_num, check_unparsed_rawlist
def parse_func(task):
threshold = 2000
# file_writen_counter = 0
corenlp_path = task.corenlp_path
annotators = task.annotators
port = task.port
file_list = task.file_list
parsed_root = task.parsed_root
parsed_fn_list = [os.path.join(parsed_root, change_file_extension(f.fn)) for f in file_list]
client, _ = get_corenlp_client(corenlp_path=corenlp_path, corenlp_port=port, annotators=annotators)
for i_f, item in enumerate(file_list):
if os.path.exists(parsed_fn_list[i_f]):
if 'ner' not in anno:
print('file:{} already parsed without ner, continue..'.format(parsed_fn_list[i_f]))
continue
no_link = True
for i, line in enumerate(open(parsed_fn_list[i_f])):
if i > 0:
if json.loads(line.strip()).get('mentions', None) is not None:
no_link = False
break
if not no_link:
print('file:{} already parsed and linked, continue..'.format(parsed_fn_list[i_f]))
continue
fn = item.full
sentences = []
para_lens = []
for para in open(fn):
sentences_unlinked = []
for i_p in range(0, len(para), threshold):
content = para[i_p:i_p + threshold].strip()
if content is not None and len(content) > 0:
tmp = parse_sentense_with_stanford(content, client, annotators=anno)
sentences_unlinked.extend(tmp)
if task.link_flg and not link_per_doc:
link(sentences_unlinked)
para_lens.append(len(sentences_unlinked))
sentences.extend(sentences_unlinked)
if task.link_flg and link_per_doc:
link(sentences)
if sentences == [] or para_lens == []:
print('fn {} is empty'.format(fn))
continue
para_lens[0] += 1
for i in range(1, len(para_lens)):
para_lens[i] += para_lens[i - 1]
if not os.path.exists(os.path.dirname(parsed_fn_list[i_f])):
os.makedirs(os.path.dirname(parsed_fn_list[i_f]), exist_ok=True)
with open(parsed_fn_list[i_f], 'w') as fw:
fw.write(json.dumps({'sentence_lens': para_lens}) + '\n')
for s in sentences:
fw.write(json.dumps(s) + '\n')
# file_writen_counter += 1
# if file_writen_counter % 50 == 0:
# print(f'write {file_writen_counter} files already..')
client.stop()
def main():
parser = get_raw_process_parser()
args = parser.parse_args()
check_flg = args.check
link_flg = args.link
parse_flg = args.parse
link_per_doc = args.link_per_doc
print("process dataset:{}".format(args.data))
if check_flg and not parse_flg:
print('only check dataset')
elif check_flg and parse_flg:
print('check before parse dataset')
elif not check_flg and parse_flg:
print('only parse dataset')
else:
print('check and parse both not activated, error!')
exit(-1)
print(f'{"enable" if link_flg else "disable"} link')
share_src = None
if link_flg and parse_flg:
print(f'link per {"doc" if link_per_doc else "paragraph"}')
disam_fn = '/home/hkeaa/data/nel/basic_data/wiki_disambiguation_pages.txt'
name_id_fn = '/home/hkeaa/data/nel/basic_data/wiki_name_id_map.txt'
redirect_fn = '/home/hkeaa/data/nel/basic_data/wiki_redirects.txt'
ment_ent_fn = '/home/data/corpora/wikipedia/ment_ent'
person_fn = '/home/hkeaa/data/nel/basic_data/p_e_m_data/persons.txt'
share_src = LinkSharedSource(disam_fn, redirect_fn, ment_ent_fn, person_fn)
aser_root = '/home/data/corpora/aser/data'
dataset_name = args.data
raw_root = os.path.join(aser_root, dataset_name + '/raw')
parsed_root = os.path.join(aser_root, dataset_name + '/parsed_new')
worker_num = args.worker_num
corenlp_path = '/home/software/stanford-corenlp/stanford-corenlp-full-2018-02-27'
anno = ['tokenize', 'ssplit', 'pos', 'lemma', 'parse', 'ner']
raw_inx_fn = os.path.join(raw_root, 'path_inx.json')
if os.path.exists(raw_inx_fn) and not check_flg:
file_name_list = load_paths(raw_inx_fn, raw_root)
else:
file_name_list = read_dir(raw_root)
dump_paths(raw_inx_fn, file_name_list)
print(f'saved in {raw_inx_fn}')
print('all raw file number: {}'.format(len(file_name_list)))
def chunk_list(l):
chunk_size = min(20000, int(math.ceil(len(l) / (worker_num + 20.0))))
for i in range(0, len(l), chunk_size):
yield l[i:i + chunk_size]
tasks = []
server_num = 10
if check_flg:
print('check parsed result..')
unparsed_fn = os.path.join(raw_root, 'path_inx.unparsed.json')
parsed_num, unparsed_num, empty_num, total_num = 0, 0, 0, 0
for i, fn_list in enumerate(chunk_list(file_name_list)):
t = ParsingTask(fn_list, parsed_root, corenlp_path, 9101 + i % server_num, anno, link_flg)
tasks.append(t)
unparsed_list = []
with Pool(worker_num) as pool:
for res in tqdm(pool.imap_unordered(check_func, tasks), total=len(tasks)):
res_parsed_num, res_unparsed_num, res_empty_num, res_total_num, res_unparsed_list = res
parsed_num += res_parsed_num
unparsed_num += res_unparsed_num
empty_num += res_empty_num
total_num += res_total_num
unparsed_list.extend(res_unparsed_list)
print('parsed num:{} prob:{:.4f}'.format(parsed_num, parsed_num / total_num))
print('unparsed num:{} prob:{:.4f}'.format(unparsed_num, unparsed_num / total_num))
print('empty num:{} prob:{:.4f}'.format(empty_num, empty_num / total_num))
print('total num:{}'.format(total_num))
dump_paths(unparsed_fn, unparsed_list)
print(f'unparsed save in {unparsed_fn}')
copyfile(unparsed_fn, raw_inx_fn)
if parse_flg:
file_name_list = unparsed_list
print(f'read {len(file_name_list)} unparsed files after check')
if parse_flg:
div_num = args.chunk_size
index = args.chunk_inx
internal = len(file_name_list) // div_num + 1
file_name_list = file_name_list[index * internal:(index + 1) * internal]
print(f'parsing file num:{len(file_name_list)} from {index*internal} to {(index+1)*internal}')
tasks = []
for i, fn_list in enumerate(chunk_list(file_name_list)):
t = ParsingTask(fn_list, parsed_root, corenlp_path, 9101 + i % server_num, anno, link_flg)
tasks.append(t)
# print('task num:{} file num:{} for {} workers'.format(len(tasks), file_num, worker_num))
print(f'file {len(tasks[-1].file_list)} per task')
import time
t = time.time()
with Pool(worker_num) as pool:
res = pool.map_async(parse_func, tasks)
res.get()
res.wait()
print(f'cost {time.time()-t}f')
if __name__ == '__main__':
main()
| [
"785850602@qq.com"
] | 785850602@qq.com |
462a7046e8a050379388b4c55914328f5e45deca | a34df0359b8aa5ef03c010fe91229e4cbb765d1f | /Step X/twilio/rest/studio/v1/flow/engagement/__init__.py | fe27c9983cdfb3ea2a9b071aeb5806fec9df053a | [
"Unlicense"
] | permissive | wrestlerdude/QuackathonRubeGoldberg2019 | f881d6c131ca8349946d01be29ff4ad272e11159 | fdaafb79add30a3de075fa0ab9c7c88900081f65 | refs/heads/master | 2020-04-20T11:52:01.937292 | 2019-02-04T18:10:54 | 2019-02-04T18:10:54 | 168,828,471 | 1 | 0 | Unlicense | 2019-02-02T21:50:33 | 2019-02-02T12:16:32 | PHP | UTF-8 | Python | false | false | 16,098 | py | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import serialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.studio.v1.flow.engagement.engagement_context import EngagementContextList
from twilio.rest.studio.v1.flow.engagement.step import StepList
class EngagementList(ListResource):
""" """
def __init__(self, version, flow_sid):
"""
Initialize the EngagementList
:param Version version: Version that contains the resource
:param flow_sid: Flow Sid.
:returns: twilio.rest.studio.v1.flow.engagement.EngagementList
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementList
"""
super(EngagementList, self).__init__(version)
# Path Solution
self._solution = {'flow_sid': flow_sid, }
self._uri = '/Flows/{flow_sid}/Engagements'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams EngagementInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.studio.v1.flow.engagement.EngagementInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists EngagementInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.studio.v1.flow.engagement.EngagementInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of EngagementInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of EngagementInstance
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementPage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return EngagementPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of EngagementInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of EngagementInstance
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return EngagementPage(self._version, response, self._solution)
def create(self, to, from_, parameters=values.unset):
"""
Create a new EngagementInstance
:param unicode to: The Contact phone number to start a Studio Flow Engagement.
:param unicode from_: The Twilio phone number to send messages or initiate calls from during the Flow Engagement.
:param dict parameters: JSON data that will be added to your flow's context and can accessed as variables inside your flow.
:returns: Newly created EngagementInstance
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementInstance
"""
data = values.of({'To': to, 'From': from_, 'Parameters': serialize.object(parameters), })
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return EngagementInstance(self._version, payload, flow_sid=self._solution['flow_sid'], )
def get(self, sid):
"""
Constructs a EngagementContext
:param sid: Engagement Sid.
:returns: twilio.rest.studio.v1.flow.engagement.EngagementContext
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementContext
"""
return EngagementContext(self._version, flow_sid=self._solution['flow_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a EngagementContext
:param sid: Engagement Sid.
:returns: twilio.rest.studio.v1.flow.engagement.EngagementContext
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementContext
"""
return EngagementContext(self._version, flow_sid=self._solution['flow_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Studio.V1.EngagementList>'
class EngagementPage(Page):
""" """
def __init__(self, version, response, solution):
"""
Initialize the EngagementPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param flow_sid: Flow Sid.
:returns: twilio.rest.studio.v1.flow.engagement.EngagementPage
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementPage
"""
super(EngagementPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of EngagementInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.studio.v1.flow.engagement.EngagementInstance
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementInstance
"""
return EngagementInstance(self._version, payload, flow_sid=self._solution['flow_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Studio.V1.EngagementPage>'
class EngagementContext(InstanceContext):
""" """
def __init__(self, version, flow_sid, sid):
"""
Initialize the EngagementContext
:param Version version: Version that contains the resource
:param flow_sid: Flow Sid.
:param sid: Engagement Sid.
:returns: twilio.rest.studio.v1.flow.engagement.EngagementContext
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementContext
"""
super(EngagementContext, self).__init__(version)
# Path Solution
self._solution = {'flow_sid': flow_sid, 'sid': sid, }
self._uri = '/Flows/{flow_sid}/Engagements/{sid}'.format(**self._solution)
# Dependents
self._steps = None
self._engagement_context = None
def fetch(self):
"""
Fetch a EngagementInstance
:returns: Fetched EngagementInstance
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return EngagementInstance(
self._version,
payload,
flow_sid=self._solution['flow_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the EngagementInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
@property
def steps(self):
"""
Access the steps
:returns: twilio.rest.studio.v1.flow.engagement.step.StepList
:rtype: twilio.rest.studio.v1.flow.engagement.step.StepList
"""
if self._steps is None:
self._steps = StepList(
self._version,
flow_sid=self._solution['flow_sid'],
engagement_sid=self._solution['sid'],
)
return self._steps
@property
def engagement_context(self):
"""
Access the engagement_context
:returns: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextList
:rtype: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextList
"""
if self._engagement_context is None:
self._engagement_context = EngagementContextList(
self._version,
flow_sid=self._solution['flow_sid'],
engagement_sid=self._solution['sid'],
)
return self._engagement_context
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Studio.V1.EngagementContext {}>'.format(context)
class EngagementInstance(InstanceResource):
""" """
class Status(object):
ACTIVE = "active"
ENDED = "ended"
def __init__(self, version, payload, flow_sid, sid=None):
"""
Initialize the EngagementInstance
:returns: twilio.rest.studio.v1.flow.engagement.EngagementInstance
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementInstance
"""
super(EngagementInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload['sid'],
'account_sid': payload['account_sid'],
'flow_sid': payload['flow_sid'],
'contact_sid': payload['contact_sid'],
'contact_channel_address': payload['contact_channel_address'],
'context': payload['context'],
'status': payload['status'],
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'url': payload['url'],
'links': payload['links'],
}
# Context
self._context = None
self._solution = {'flow_sid': flow_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: EngagementContext for this EngagementInstance
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementContext
"""
if self._context is None:
self._context = EngagementContext(
self._version,
flow_sid=self._solution['flow_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: A string that uniquely identifies this Engagement.
:rtype: unicode
"""
return self._properties['sid']
@property
def account_sid(self):
"""
:returns: Account Sid.
:rtype: unicode
"""
return self._properties['account_sid']
@property
def flow_sid(self):
"""
:returns: Flow Sid.
:rtype: unicode
"""
return self._properties['flow_sid']
@property
def contact_sid(self):
"""
:returns: Contact Sid.
:rtype: unicode
"""
return self._properties['contact_sid']
@property
def contact_channel_address(self):
"""
:returns: The phone number, SIP address or Client identifier that triggered this Engagement.
:rtype: unicode
"""
return self._properties['contact_channel_address']
@property
def context(self):
"""
:returns: Flow state.
:rtype: dict
"""
return self._properties['context']
@property
def status(self):
"""
:returns: The Status of this Engagement
:rtype: EngagementInstance.Status
"""
return self._properties['status']
@property
def date_created(self):
"""
:returns: The date this Engagement was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date this Engagement was updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def url(self):
"""
:returns: The URL of this resource.
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: Nested resource URLs.
:rtype: unicode
"""
return self._properties['links']
def fetch(self):
"""
Fetch a EngagementInstance
:returns: Fetched EngagementInstance
:rtype: twilio.rest.studio.v1.flow.engagement.EngagementInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the EngagementInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
@property
def steps(self):
"""
Access the steps
:returns: twilio.rest.studio.v1.flow.engagement.step.StepList
:rtype: twilio.rest.studio.v1.flow.engagement.step.StepList
"""
return self._proxy.steps
@property
def engagement_context(self):
"""
Access the engagement_context
:returns: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextList
:rtype: twilio.rest.studio.v1.flow.engagement.engagement_context.EngagementContextList
"""
return self._proxy.engagement_context
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Studio.V1.EngagementInstance {}>'.format(context)
| [
"raishallan2@gmail.com"
] | raishallan2@gmail.com |
456514a59068288798944a3a97f656cb83dd4767 | af166a14d718fc52b0b4fc51fd07bd00e30be328 | /ncaa_basketball/views.py | 93900c9c18ad8dfa4f00ec6782d8e984420ff417 | [] | no_license | carlosrivera22/basketball-django-analysis | f1bd5f65476e9b0851c4603c85c8e523b6dd383c | 51eb8e7042e58c5edb118a4d7b77678fb245898b | refs/heads/master | 2022-12-18T10:43:56.547394 | 2020-09-07T15:52:55 | 2020-09-07T15:52:55 | 293,566,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,857 | py | from django.shortcuts import render
from .models import Team
import pandas as pd
import seaborn as sns
from .utils import get_image,get_correlation_plot
import matplotlib.pyplot as plt
# Create your views here.
def winning_correlation_view(request):
graph = None
error_message = None
df = None
corr = None
try:
df = pd.DataFrame(Team.objects.all().values())
if request.method == 'POST':
chart_title = request.POST.get('title')
print(chart_title)
if chart_title == "offensive efficiency vs. winning %":
graph, corr= get_correlation_plot(chart_title,
x='offensive_efficiency',
y='win_pct',
data=df,
x_label='Offensive Efficiency Rating \n (Points Scored per 100 Possessions)',
y_label='Winning %')
elif chart_title == "defensive efficiency vs. winning %":
graph, corr = get_correlation_plot(chart_title,
x='defensive_efficiency',
y='win_pct',
data=df,
x_label='Defensive Efficiency Rating \n (Points Allowed per 100 Possessions)',
y_label='Winning %')
else:
error_message = 'Please select a chart to continue'
except:
error_message = 'No records in the database'
context = {
'graph': graph,
'corr':corr,
'error_message': error_message
}
return render(request,'ncaa_basketball/correlations.html', context)
| [
"carlos.rivera22@upr.edu"
] | carlos.rivera22@upr.edu |
54dc0542608fb96fd12bb48b1d4e91e839992097 | be47bcdeca864634dfc3576b05d8cd2cc4e57379 | /utils.py | 4eaf24a995ecd12ee8b4ffee233c7d3e0ae2b285 | [
"MIT"
] | permissive | 4eyes4u/BouncingBall | 955de357904dc9de4c48b7b6c5cbb7c025a5f3c4 | b641227d332e3b2c6080b608344880d9cad9c985 | refs/heads/master | 2022-07-17T02:15:10.435841 | 2019-05-28T14:02:23 | 2019-05-28T14:02:23 | 188,619,804 | 0 | 0 | MIT | 2022-06-21T22:01:06 | 2019-05-25T23:00:49 | Jupyter Notebook | UTF-8 | Python | false | false | 965 | py | from matplotlib import pyplot as plt
def singleton_plot(ball, idx):
f = plt.figure(num='Singleton')
# xy plot
ax_xy = f.add_subplot(2, 2, 1)
plt.xlabel('t [s]')
plt.ylabel('x, y [m]')
plt.title('r / t')
ball.plot_xy(ax_xy, idx)
plt.legend()
# E plot
ax_E = f.add_subplot(2, 2, 2)
plt.xlabel('x [m]')
plt.ylabel('E [J]')
plt.title('E / x')
ball.plot_E(ax_E, idx)
plt.legend()
# r plot
ax_r = f.add_subplot(2, 1, 2)
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.title('r (x, y)')
ball.plot_r(ax_r, idx)
plt.legend()
def comparison_plot(ball):
f = plt.figure(num='Comparison')
ax = f.add_subplot(1, 1, 1)
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.title('r (x, y)')
ball.plot_r(ax)
plt.legend()
def simulation(ball, indices):
'''
#REQ: 'Indices' is a list.
'''
ball.simulation(indices)
| [
"noreply@github.com"
] | 4eyes4u.noreply@github.com |
f5b5d16702bc9a55d4736c220d1bcce79944a381 | b20f125ce198fd26b9048152592a6efd5de8aee7 | /fix_coverage.py | 60275ffdbfeb12880211645717d1a555be404c2b | [
"MIT"
] | permissive | greninger-lab/covid_swift_pipeline | dc39630ea21fe0f421f2d997f63872506350b7b5 | ad02013851e154fc8034817382ef6d25d9c921b7 | refs/heads/master | 2023-06-26T02:20:51.425404 | 2023-04-20T02:00:26 | 2023-06-08T16:30:10 | 286,556,061 | 5 | 6 | MIT | 2022-08-13T20:55:04 | 2020-08-10T18:58:02 | Nextflow | UTF-8 | Python | false | false | 937 | py | import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='')
parser.add_argument('sample_name', help='sample name')
args = parser.parse_args()
# Grabs name of file.
summmary_csv = str(args.sample_name) + "_summary.csv"
summary_csv_fixed = open(str(args.sample_name) + "_summary_fixed.csv","w+")
line_num = 0
n_percent = 100
for line in open(str(args.sample_name) + "_taylor.fasta"):
if(line_num==1):
sequence = line
counter = sequence.lower().count('n')
length = len(sequence)
n_percent = (counter/length*100)
line_num +=1
line_num = 0
for line in open(summmary_csv):
if(line_num==0):
summary_csv_fixed.write(line)
if(line_num==1):
sample_data = line.split(",")
new_sample_data = sample_data
new_sample_data[-1] = n_percent
new_sample_line = ','.join(map(str,new_sample_data))
summary_csv_fixed.write(new_sample_line)
line_num+=1 | [
"michellejadelin@gmail.com"
] | michellejadelin@gmail.com |
97dc0dee0ef8ce0ada8c9102b035a98d5717adee | e0045eec29aab56212c00f9293a21eb3b4b9fe53 | /account_voucher/__manifest__.py | 34480401b13ad5043af7067acd03109289d910d1 | [] | no_license | tamam001/ALWAFI_P1 | a3a9268081b9befc668a5f51c29ce5119434cc21 | 402ea8687c607fbcb5ba762c2020ebc4ee98e705 | refs/heads/master | 2020-05-18T08:16:50.583264 | 2019-04-30T14:43:46 | 2019-04-30T14:43:46 | 184,268,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,459 | py | # -*- coding: utf-8 -*-
# Part of ALWAFI. See LICENSE file for full copyright and licensing details.
{
'name' : 'Sale & Purchase Vouchers',
'version' : '1.0',
'summary': 'Manage your debts and credits thanks to simple sale/purchase receipts',
'description': """
TODO
old description:
Invoicing & Payments by Accounting Voucher & Receipts
=====================================================
The specific and easy-to-use Invoicing system in ALWAFI allows you to keep track of your accounting, even when you are not an accountant. It provides an easy way to follow up on your vendors and customers.
You could use this simplified accounting in case you work with an (external) account to keep your books, and you still want to keep track of payments.
The Invoicing system includes receipts and vouchers (an easy way to keep track of sales and purchases). It also offers you an easy method of registering payments, without having to encode complete abstracts of account.
This module manages:
* Voucher Entry
* Voucher Receipt [Sales & Purchase]
* Voucher Payment [Customer & Vendors]
""",
'category': 'Accounting',
'sequence': 20,
'depends' : ['account'],
'demo' : [],
'data' : [
'security/ir.model.access.csv',
'views/account_voucher_views.xml',
'security/account_voucher_security.xml',
'data/account_voucher_data.xml',
],
'auto_install': False,
'installable': True,
}
| [
"50145400+gilbertp7@users.noreply.github.com"
] | 50145400+gilbertp7@users.noreply.github.com |
b56d4fe821cd8462bbda70acd89752b0fbce8a74 | 7c91f92d2d82e0d9fd85af09f9d18226c747f7fa | /rhoci/forms/test.py | bb9d6fe3cf671e23e1b037366251aa9886986d9a | [
"Apache-2.0"
] | permissive | bregman-arie/rhoci | 5488afe8d884cb72a3475eef68ebc54944b45453 | bae1f1d737a12ede50d263a6496faf2b698515b5 | refs/heads/master | 2023-02-25T10:53:01.642377 | 2022-12-10T14:37:40 | 2022-12-10T14:37:40 | 90,493,854 | 12 | 8 | Apache-2.0 | 2023-02-16T07:11:11 | 2017-05-06T22:06:20 | CSS | UTF-8 | Python | false | false | 1,117 | py | # Copyright 2019 Arie Bregman
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from flask_wtf import FlaskForm
from wtforms import BooleanField
from wtforms import StringField
from wtforms import SubmitField
class TestSearch(FlaskForm):
class_name = StringField('class name')
test_name = StringField('test name')
status = StringField('status')
failed_since = StringField('failed since')
skipped_message = StringField('skipped message')
stdout = StringField('stdout')
stderr = StringField('stderr')
skipped = BooleanField()
submit = SubmitField('Search')
| [
"abregman@redhat.com"
] | abregman@redhat.com |
0fa2b8c8ec819233bc34543f46cd4fd13fe8509b | 7d75018c3d8e2ac85ea0f5bbaf52ce5eae9761ca | /project/gohelp/settings.py | 3bfa30ab59e9abf68240589b9a17501126484713 | [] | no_license | AVS18/sdp-sem5 | fff484331d9b588558b928e557a974f05652adcb | 238dcc7dfe50dda9678383590a43b23bbcd99553 | refs/heads/main | 2023-01-14T01:01:18.297711 | 2020-11-14T13:43:55 | 2020-11-14T13:43:55 | 288,098,284 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,850 | py | """
Django settings for gohelp project.
Generated by 'django-admin startproject' using Django 3.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6-rp4=_omlx$ya3@dms@a8jnpamp#$dl^y(bx!0ptji47ag!qk'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'base',
'worker',
'customer',
'storages'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'gohelp.urls'
AUTH_USER_MODEL = 'base.User'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'gohelp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'gohelp',
'USER': 'postgres',
'PASSWORD': 'kamakshi@1234',
'HOST': 'localhost'
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
import os
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR,'static'),
]
STATIC_ROOT = os.path.join(BASE_DIR, 'assets')
AWS_ACCESS_KEY_ID = 'replace the credentials'
AWS_SECRET_ACCESS_KEY = "replace the credentials"
AWS_STORAGE_BUCKET_NAME = "gohelp"
AWS_S3_REGION_NAME = 'ap-south-1'
AWS_S3_FILE_OVERWRITE = False
AWS_DEFAULT_ACL = None
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER='adityaintern11@gmail.com'
EMAIL_HOST_PASSWORD='aditya12345'
EMAIL_USE_TLS = True
| [
"avsadityavardhan18bcs@iiitkottayam.ac.in"
] | avsadityavardhan18bcs@iiitkottayam.ac.in |
da538ba6c5ed25378502cecfd4ab8a614450ca6f | 702f145cd55e40dd4d1bc9297100e1ba8f940847 | /ex12.py | 2e7fe5f3f9e5de9b2204fd37db7d9579cfc065f1 | [] | no_license | demianvle/PythonExercises | 531642a85685779f5f47e3816278d4223d98ee20 | dfa04e8b8e237d5460fcf6fd2972cca9d66bf3c4 | refs/heads/master | 2021-07-17T08:52:22.204963 | 2018-04-27T04:07:13 | 2018-04-27T04:07:13 | 96,750,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | import sys
##################################################
#http://www.practicepython.org/exercise/2014/04/25/12-list-ends.html
"""Write a program that takes a list of numbers (for example, a = [5, 10, 15, 20, 25])
and makes a new list of only the first and last elements of the given list.
For practice, write this code inside a function."""
a=[6,8,9,5,4,7,2,3,6,6,5,4,11,3,6,5,4,8]
b=[a[0],a[-1]]
print(b)
#################################################3
sys.exit()
| [
"demianvle@protonmail.com"
] | demianvle@protonmail.com |
82d2e4dc48f033a7b3fd55ba860b7794abfed464 | 27262c99a938b100ad1dec3fde9bc0fdcf22e23c | /Day5/Lecture/day05_lecture.py | ef6eadd0ba997f336a14dfe5a29740448b50a92e | [] | no_license | weiyedeng/python_summer2021 | 7a8ac04554f4580438b9bdd5d646803f24f6b186 | 55d9c3e743d043b44d3495c37f9e2dc06dd49528 | refs/heads/main | 2023-07-17T10:31:09.360005 | 2021-09-03T02:15:53 | 2021-09-03T02:15:53 | 395,026,184 | 0 | 0 | null | 2021-08-11T14:57:41 | 2021-08-11T14:57:40 | null | UTF-8 | Python | false | false | 13,149 | py | # Python - 2021 Summer Course
# Day 5
# Topic: Regular Expressions and Naive Bayes Classifier
# Instructor: Ben Noble
# Former Instructors: Patrick Cunha Silva, Ryden Buttler,
# Erin Rossiter, Michele Torres
# David Carlson, and Betul Demirkaya
# First Instructor: Matt Dickenson
# Set Directory
import os
os.chdir('/Users/bennoble/Dropbox/Ben/GitHub/python_summer2021/Day5/Lecture/')
#---------- Regular Expressions ----------#
# Regular expressions are useful to extract information from text.
# Set of “rules” to identify or match a particular sequence of characters.
# Most text in ASCII: letters, digits, punctuation and symbols
# (but unicode can also be used)
# In Python, mainly through library re.
import re
# Load example text
# read in example text, remember:
# readlines makes a list of each line break in file
with open("obama-nh.txt", "r") as f:
text = f.readlines()
# How is this file structured?
# How does it impact our 'text' object?
print(text[0:3])
print(text[0])
print(text[1])
print(text[2])
# Join into one string
# What could we have done at the outset instead?
alltext = ''.join(text)
# OR
with open("obama-nh.txt", "r") as f:
alltext = f.read()
# Useful functions from re module:
# re.findall: Return all non-overlapping matches of pattern
# in string, as a list of strings
# re.split: Split string by the occurrences of pattern.
# re.match: If zero or more characters at the beginning of
# string match the regular expression pattern,
# return a corresponding match object.
# re.search: Scan through string looking for the first location where
# the regular expression pattern produces a match,
# and return a corresponding match object.
# re.compile: Compile a regular expression pattern into a regular
# expression object, which can be used for matching using i
# ts match(), search() and other methods
# See https://docs.python.org/3/library/re.html for more.
# Examples
re.findall(r"Yes we can", alltext) # All instance of Yes we can
re.findall(r"American", alltext) # All instances of American
re.findall(r"\n", alltext) # all breaklines
# we use "r" to signal the start of a pattern.
# "r" is Python's raw string notation for regular expression patterns
# used instead of escape character "\"
"\n"
print("\n")
"\\n"
print("\\n")
r"\n"
print(r"\n")
r"\\n"
print(r"\\n")
#---------- Basic special characters ----------#
# \d digits
re.findall(r"\d", alltext)
# \D non-digits
re.findall(r"\D", alltext)
# all instances of the char in []
re.findall(r"[a]", alltext)
# all instances of the from char 1 to char 2 in []
re.findall(r"[a-d]", alltext)
# all char, ^ except for of the from char 1 to char 2 in []
re.findall(r"[^a-d]", alltext)
# all char and digits (alphanumeric)
re.findall(r"[a-zA-Z0-9]", alltext)
re.findall(r"\w", alltext) # same as re.findall(r"[a-zA-Z0-9]", alltext)
# \W non-alphanumeric, one non-word char
re.findall(r"\W", alltext) # same as re.findall(r"[^a-zA-Z0-9]", alltext)
# \s whitespace
re.findall(r"\s", alltext)
# \S non-whitespace
re.findall(r"\S", alltext)
# . any char (include white spaces)
re.findall(r".", alltext)
# \ is an escape character (. has a special use)
re.findall(r"\.", alltext)
# At least one occurrence for the match to succeed ( 1 or many)
re.findall(r"\d+", alltext)
# Makes the preceding item optional.
re.findall(r"\S?", alltext)
# {x} exactly x times (numbers with exact number of digits)
re.findall(r"\d{3}", alltext)
re.findall(r"\d{2}", alltext)
re.findall(r"\d{1}", alltext)
# {x, y} from x to y times (numbers with exact number of digits from x to y)
re.findall(r"\d{1,3}", alltext)
# More here: https://www.regular-expressions.info/refrepeat.html
# And hear: https://www.debuggex.com/cheatsheet/regex/python
# Short Exercise: How would we grab 10/10 as it appears in text?
x = "Hi 10/10 hello 9/18 asdf 9/9"
# Answer
re.findall(r"\d{2}/\d{2}", x)
## Explain what's happening:
x = "American's lov\we McDonalds"
re.findall(r"\w", x)
# \w is Regular Expression Character Classes
x
# We need to add a escape
re.findall(r"\\w", x)
# get any word that starts with America
re.findall(r"America[a-z]*", alltext)
# get any complete word starting with an upper-case letter
re.findall(r"([A-Z]+\w*)", alltext)
# () group of characters
# starting with a letter A to Z
# + the next n characters
# * 0 or more of the item
#---------- re.split() ----------#
# splits at digits, deletes digits
re.split(r'\d', alltext)
# splits at non-digits, deletes char
re.split(r'\D', alltext)
# What is this doing?
re.split(r'\.', alltext) # remove separator
re.split(r'(\.)', alltext) # using () we split and keep separator
#---------- re.compile() ----------#
# compile the regular expression as an object
# then the regular expression has methods!
keyword = re.compile(r"America[a-z]*")
# search file for keyword in line by line version
for i, line in enumerate(text):
if keyword.search(line):
print(i)
print(line)
# enumerate() allows us to loop over something and have an automatic counter
# Create a regex object
pattern = re.compile(r'\d')
pattern.findall(alltext)
pattern.split(alltext)
# Can also search across lines in single strings with re.MULTILINE
mline = 'bin\nban\ncan'
print(mline)
# ^ check the start of the string
# looking for b
pattern = re.compile(r'^b\w*') # "^" words starting in b
pattern.findall(mline)
# looking for b in multilines
pattern = re.compile(r'^b\w*', re.MULTILINE)
pattern.findall(mline)
# Now, back to the speech as a single string...
# Explain the difference between these two lines
re.findall(r'^b\w*', alltext, re.MULTILINE)
re.findall(r'^b\w*', alltext)
# re.MULTILINE treats each line as its own string
# for the sake of the pattern
# Short Exercise
# Check if a line ends in a period
# How is this working?
re.findall(r'^.*\.$', alltext, re.MULTILINE)
# '^.' = starts with any char
# * returns up to the end of the line
# \. if the line has a period
# $ if the line ends with a period
#---------- search, match, and groups ----------#
t = '12 twelve'
# find a number and a word separated by a whitespace
pattern = re.compile(r'(\d*)\s(\w*)')
# create an instance
tsearch = pattern.search(t)
# tuple of all groups
tsearch.groups()
# the complete match
tsearch.group(0)
# the first group
tsearch.group(1)
# the second group
tsearch.group(2)
# Similar to using () alone, but the text
# matched by the group is then accessible
# (?P<Y>...) Capturing group named Y
pattern = re.compile(r'(?P<number>\d*)\s(?P<name>\w*)')
tsearch = pattern.search(t)
tsearch.groups()
tsearch.groupdict()
# Another example
mytext = '12 24'
pattern = re.compile(r'(\d*)\s(\d*)')
pattern.search(mytext).groups()
pattern.search(mytext).group(0)
pattern.search(mytext).group(1)
pattern.search(mytext).group(2)
# match starts search at beginning of string
# like an invisible ^
pattern.match(r"12 24").groups()
pattern.match(r"a12 24").groups() # fails
pattern.search(r"a12 24").groups() # works
#---------- Naive Bayes ----------#
# Some docs for this library:
# http://nltk.org/api/nltk.classify.html#module-nltk.classify.naivebayes
# pip install nltk
import nltk
nltk.download('names')
from nltk.corpus import names
import random
# Create a list of tuples with names
names = ([(name, 'male') for name in names.words('male.txt')] +
[(name, 'female') for name in names.words('female.txt')])
# Now, we shuffle
random.shuffle(names)
# We need training and test sets.
# Define training and test set sizes
len(names) # N of observations
train_size = 5000
# Split train and test objects
train_names = names[:train_size]
test_names = names[train_size:]
# A simple feature: Get the last letter of the name
def g_features1(word):
return {'last_letter': word[-1]}
# Quick break—some syntax:
def return_two():
return 5, 10
# When a method returns two values, we can use this format:
x, y = return_two()
# Loop over names, return tuple of dictionary and label
train_set = [(g_features1(n), g) for (n, g) in train_names]
test_set = [(g_features1(n), g) for (n,g) in test_names]
# Run the naive Bayes classifier for the train set
classifier = nltk.NaiveBayesClassifier.train(train_set)
# Apply the classifier to some names
classifier.classify(g_features1('Neo'))
classifier.classify(g_features1('Trinity'))
classifier.classify(g_features1('Max'))
classifier.classify(g_features1('Lucy'))
# Get the probability of female:
classifier.prob_classify(g_features1('Lucy')).prob("female")
# Check the overall accuracy with test set
print(nltk.classify.accuracy(classifier, test_set))
# Lets see what is driving this
classifier.show_most_informative_features(5)
# Lets be smarter
# What all are we including now?
def g_features2(name):
features = {}
features["firstletter"] = name[0].lower()
features["lastletter"] = name[-1].lower()
for letter in 'abcdefghijklmnopqrstuvwxyz':
features["count(%s)" % letter] = name.lower().count(letter)
features["has(%s)" % letter] = (letter in name.lower())
return features
# Test function
g_features2('Rex')
# Run for train set
train_set = [(g_features2(n), g) for (n,g) in train_names]
# Run for test set
test_set = [(g_features2(n), g) for (n,g) in test_names]
# Run new classifier
classifier = nltk.NaiveBayesClassifier.train(train_set)
# Check the overall accuracy with test set
print(nltk.classify.accuracy(classifier, test_set))
# Lets see what is driving this
classifier.show_most_informative_features(10)
# Worse? Better? How can we refine?
# Lets look at the errors from this model
# and see if we can do better
errors = []
for (name, label) in test_names:
guess = classifier.classify(g_features2(name))
if guess != label:
prob = classifier.prob_classify(g_features2(name)).prob(guess)
errors.append((label, guess, prob, name))
for (label, guess, prob, name) in sorted(errors):
print('correct={} guess={} prob={:.2f} name={}'.format(label, guess, prob, name))
# What should we do here?
def g_features3(name):
features = {}
if name[-2:] == "ie" or name[-1] == "y":
features["last_ie"] = True
else:
features["last_ie"] = False
if name[-1] == "k":
features["last_k"] = True
else:
features["last_k"] = False
return features
train_set = [(g_features3(n), g) for (n,g) in train_names]
test_set = [(g_features3(n), g) for (n,g) in test_names]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print(nltk.classify.accuracy(classifier, test_set))
classifier.show_most_informative_features(5)
# Now lets look at some bigger documents
from nltk.corpus import movie_reviews
nltk.download('movie_reviews')
# list of tuples
# ([words], label)
documents = [(list(movie_reviews.words(fileid)), category)
for category in movie_reviews.categories()
for fileid in movie_reviews.fileids(category)]
documents[0]
random.shuffle(documents)
# Dictionary of words and number of instances
all_words = nltk.FreqDist(w.lower() for w in movie_reviews.words())
len(all_words)
word_features = [k for k in all_words.keys() if all_words[k] > 5]
# Check the frequency of ','
all_words[',']
# Print frequency of all words
for w in word_features:
print(all_words[w])
# Function to get document features
def document_features(document):
document_words = set(document)
features = {}
for word in word_features:
features['contains(%s)' % word] = (word in document_words)
return features
print(document_features(movie_reviews.words('pos/cv957_8737.txt')))
## Now we have tuple of ({features}, label)
train_docs = documents[:500]
test_docs = documents[1000:1500]
train_set = [(document_features(d), c) for (d,c) in train_docs]
test_set = [(document_features(d), c) for (d,c) in test_docs]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print(nltk.classify.accuracy(classifier, test_set[:50]))
classifier.show_most_informative_features(10)
# Copyright of the original version:
# Copyright (c) 2014 Matt Dickenson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
| [
"bennoble@Bens-MBP.attlocal.net"
] | bennoble@Bens-MBP.attlocal.net |
cf41492dbc05f545c6e1696e2f82b7b9ed0602ca | 85441ad60b397b6dc076a2d19a59c7ad97c6b361 | /python/mpu9250.py | 3bdf3052f9349a07483c1d075dd6bf3a040c1631 | [] | no_license | dangernme/Rover | cde06cfd45cf66cb6ef372b6cda6ba39aebbb89b | 89f3b03e736c41c570c669b44110017eb16fa937 | refs/heads/master | 2021-04-09T10:59:18.292102 | 2018-12-27T13:48:43 | 2018-12-27T13:48:43 | 125,483,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,827 | py | import smbus
import time
# Device addresses
mpu_adr = 0x68
ak8963_adr = 0x0c
# Mpu9250 register addresses
pwr_mgmt_1 = 0x6b # To wake up the 6050 module
int_pin_cfg = 0x37 # To enable the ak8963 module
# AK8963 register addresses
hxl = 0x03 # Magnetometer x axis low byte
hyl = 0x05 # Magnetometer y axis low byte
hzl = 0x07 # Magnetometer z axis low byte
st1 = 0x02 # Magnetometer status 1
st2 = 0x09 # Magnetometer status 2
cntl1 = 0x0A # Magnetometer status 1
cntl2 = 0x0B # Magnetometer status 2
class Mpu9250:
def __init__(self):
# Power management registers
self.bus = smbus.SMBus(1)
# Enable the magnetometer module
self.bus.write_byte_data(mpu_adr, int_pin_cfg, 0x22)
self.bus.write_byte_data(ak8963_adr, cntl1, 0x01)
# Now wake the 6050 up as it starts in sleep mode
self.bus.write_byte_data(mpu_adr, pwr_mgmt_1, 0)
def get_accel(self):
accel_x = self._read_word_2c(mpu_adr, 0x3b)
accel_y = self._read_word_2c(mpu_adr, 0x3d)
accel_z = self._read_word_2c(mpu_adr, 0x3f)
return(accel_x, accel_y, accel_z)
def get_gyro(self):
gyro_x = self._read_word_2c(mpu_adr, 0x43)
gyro_y = self._read_word_2c(mpu_adr, 0x45)
gyro_z = self._read_word_2c(mpu_adr, 0x47)
return(gyro_x, gyro_y, gyro_z)
def get_temp(self):
temp = self._read_word_2c(mpu_adr, 0x41)
return temp
def get_mag_status(self):
status1 = self._read_byte(ak8963_adr, st1)
status2 = self._read_byte(ak8963_adr, st2)
return status1, status2
def get_mag(self):
self.bus.write_byte_data(ak8963_adr, cntl1, 0x01)
while True:
status = self._read_byte(ak8963_adr, st1)
if (status & 0x01) == 0x01:
mag_x = self._read_word_2c(ak8963_adr, hxl)
mag_y = self._read_word_2c(ak8963_adr, hyl)
mag_z = self._read_word_2c(ak8963_adr, hzl)
break;
return mag_x, mag_y, mag_z
def _read_byte(self, dev_adr, reg_adr):
byte = self.bus.read_byte_data(dev_adr, reg_adr)
return byte
def _read_word(self, dev_adr, reg_adr):
high = self.bus.read_byte_data(dev_adr, reg_adr)
low = self.bus.read_byte_data(dev_adr, reg_adr+1)
val = (high << 8) + low
return val
def _read_word_2c(self, dev_adr, reg_adr):
val = self._read_word(dev_adr, reg_adr)
if (val >= 0x8000):
return -((65535 - val) + 1)
else:
return val
| [
"netking1984@gmx.de"
] | netking1984@gmx.de |
0054d4a83babd6133180964e7028d6435c456eb9 | 402d32031b7a2c24ad7db105f413cf4a1c190210 | /Program Control Flow/ifelse_learyear.py | 24cd8959bf35334d764648aea95b4fad1d7de988 | [] | no_license | Ganes21/pythontAutomation | 367302c0e498b1dc3f8d2a9c1c56e9406029a4de | 779e554f08644e13215ad8bbabe407d7010914a6 | refs/heads/master | 2020-04-08T22:45:44.536668 | 2019-03-14T07:22:57 | 2019-03-14T07:22:57 | 159,799,385 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | year=int(input("enter the year"))
if (year%4)==0:
if(year%100)==0:
if(year%400)==0:
print("{0} is leap year".format(year))
else:
print("{0} is not lear year".format(year))
else:
print("{0} is leap year".format(year))
else:
print("{0} is not lear year".format(year))
| [
"uday.mallampalli@gmail.com"
] | uday.mallampalli@gmail.com |
cb6446e424cac36c9370913ccb9d7b99f6b5afde | 0c9211cf8422fa5345da54214ccce70007d90341 | /Exercise-2/sensor_stick/scripts/template.py | 93d79265416dcf036d713e7230ef07fc0b6c6518 | [] | no_license | bennybitcoin/RoboND-Perception-Exercises | dd9eb917371756f422b69247be6531b445d9bd01 | c60b54ccdeeeab300ea53cda00840e65dd68b2d8 | refs/heads/master | 2021-08-16T15:39:39.339573 | 2017-11-20T03:36:02 | 2017-11-20T03:36:02 | 111,357,459 | 0 | 0 | null | 2017-11-20T03:30:51 | 2017-11-20T03:30:51 | null | UTF-8 | Python | false | false | 774 | py | #!/usr/bin/env python
# Import modules
from pcl_helper import *
# TODO: Define functions as required
# Callback function for your Point Cloud Subscriber
def pcl_callback(pcl_msg):
# TODO: Convert ROS msg to PCL data
# TODO: Voxel Grid Downsampling
# TODO: PassThrough Filter
# TODO: RANSAC Plane Segmentation
# TODO: Extract inliers and outliers
# TODO: Euclidean Clustering
# TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
# TODO: Convert PCL data to ROS messages
# TODO: Publish ROS messages
if __name__ == '__main__':
# TODO: ROS node initialization
# TODO: Create Subscribers
# TODO: Create Publishers
# Initialize color_list
# TODO: Spin while node is not shutdown
| [
"hvpandya@ncsu.edu"
] | hvpandya@ncsu.edu |
4443aa6863038875ca5ad3372f122475c4993118 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2015_04_01/aio/_monitor_management_client.py | 5640ee566505865cb91ec42008e9408f5e7a74d8 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 5,526 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from .. import models as _models
from ..._serialization import Deserializer, Serializer
from ._configuration import MonitorManagementClientConfiguration
from .operations import (
ActivityLogsOperations,
AlertRulesOperations,
AutoscaleSettingsOperations,
EventCategoriesOperations,
Operations,
TenantActivityLogsOperations,
)
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class MonitorManagementClient: # pylint: disable=client-accepts-api-version-keyword
"""Monitor Management Client.
:ivar activity_logs: ActivityLogsOperations operations
:vartype activity_logs: azure.mgmt.monitor.v2015_04_01.aio.operations.ActivityLogsOperations
:ivar autoscale_settings: AutoscaleSettingsOperations operations
:vartype autoscale_settings:
azure.mgmt.monitor.v2015_04_01.aio.operations.AutoscaleSettingsOperations
:ivar event_categories: EventCategoriesOperations operations
:vartype event_categories:
azure.mgmt.monitor.v2015_04_01.aio.operations.EventCategoriesOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.monitor.v2015_04_01.aio.operations.Operations
:ivar tenant_activity_logs: TenantActivityLogsOperations operations
:vartype tenant_activity_logs:
azure.mgmt.monitor.v2015_04_01.aio.operations.TenantActivityLogsOperations
:ivar alert_rules: AlertRulesOperations operations
:vartype alert_rules: azure.mgmt.monitor.v2015_04_01.aio.operations.AlertRulesOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = MonitorManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.activity_logs = ActivityLogsOperations(self._client, self._config, self._serialize, self._deserialize)
self.autoscale_settings = AutoscaleSettingsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.event_categories = EventCategoriesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.tenant_activity_logs = TenantActivityLogsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.alert_rules = AlertRulesOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "MonitorManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details: Any) -> None:
await self._client.__aexit__(*exc_details)
| [
"noreply@github.com"
] | Azure.noreply@github.com |
225cb8122ed2d03426f8094145ffb23f26efd779 | bfba78fdb93d57b6c8d23390ab6a55918201bc34 | /CDagency/spiders/cd11_zongjiaoju.py | fb268d4cd03ecdcb935388faf3b672a041b7e55a | [] | no_license | gongdx/CDagency | 61ce0359f3b0e735793fff9332106f5e30cc1509 | d2d66206d799afbfe68cafcc9bd7cd6d9533685d | refs/heads/master | 2021-03-07T14:49:41.810523 | 2020-03-26T14:24:28 | 2020-03-26T14:24:28 | 246,273,589 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,660 | py | # -*- coding: utf-8 -*-
#成都市民族宗教事务局
import scrapy
from CDagency.items import *
import re
from .browse_times import getBrowseTimes
class CdZongjiaojuSpider(scrapy.Spider):
name = 'CD11zongjiaoju'
allowed_domains = ['mzzj.chengdu.gov.cn']
start_urls = ['http://mzzj.chengdu.gov.cn/search/s?q=1&qt=%E4%B8%8D%E5%BF%98%E5%88%9D%E5%BF%83%E7%89%A2%E8%AE%B0%E4%BD%BF%E5%91%BD&pageSize=10&sort=D&database=all&siteCode=5101000029&docQt=&page=1']
def parse(self, response):
news = response.xpath("//div[contains(@class,'discuss') or contains(@class,'topic')]")
# print(response.text)
for new in news:
item = CDagency()
item["col_name"] = "CD11zongjiaoju"
# item['url'] = new.xpath('.//a[@class="link"]/@href').extract()
item['pub_time'] = new.xpath('.//span[@class="colo-666"]/text()').extract_first()
item['title'] = new.xpath('.//a[contains(@class,"fl")]/@title').extract_first()
bureau = new.xpath('.//a[@class="link"]/text()').extract_first()
bureau = "".join(bureau).strip()
item['bureau'] = bureau
# yield item
href = new.xpath('.//a[@class="link"]/@href').extract_first()
# print(href)
initial_url = 'http://mzzj.chengdu.gov.cn/search/' + str(href)
# print(detail_url)
# detail_url = 'http://mzzj.chengdu.gov.cn' + re.search('location.href\ =\ "(.*?)";', response.text).group(1)
yield scrapy.Request(url=initial_url, callback=self.get_detail_url, meta={'item': item, 'url': initial_url}, dont_filter=True)
# 获取下一页链接
current_url = response.url #当前页面
num = '' #当前页面数
for x in current_url[::-1]:
if x == '=':
break
num = x + num
next_url = 'http://mzzj.chengdu.gov.cn/search/s?q=1&qt=%E4%B8%8D%E5%BF%98%E5%88%9D%E5%BF%83%E7%89%A2%E8%AE%B0%E4%BD%BF%E5%91%BD&pageSize=10&sort=D&database=all&siteCode=5101000029&docQt=&page=' + str(int(num)+1)
if int(num) < 15:
yield scrapy.Request(next_url, callback=self.parse)
def get_detail_url(self, response):
item = response.meta['item']
detail_url = 'http://mzzj.chengdu.gov.cn' + re.search('location.href\ =\ "(.*?)";', response.text).group(1)
# print(detail_url)
item['detail_url'] = detail_url
yield scrapy.Request(url=detail_url, callback=self.get_text, meta={'item': item, 'url': detail_url}, dont_filter=True)
def get_text(self, response):
item = response.meta['item']
# item['department'] = response.xpath('//span[@id="source"]/text()').extarct()
content_list = response.xpath('.//div[@id="zoomcon"]//text()').getall()
# 对文本进行拼接
remove = re.compile(r'\s')
douhao = re.compile(',')
content = ''
for string in content_list:
string = re.sub(remove, '', string)
string = re.sub(douhao, '', string)
content += string
item['content'] = content
# 获取浏览量
url = (response.meta['url'])
item['browse_times'] = getBrowseTimes(url)
#判断新闻内容所属类型
item["type"] = []
type_lists = ["主题教育","专题讲座","专题党课","主题党日","专题报告","知识竞赛","研讨交流会","座谈会","专题学习会","集中学习周","读书班"]
for type_list in type_lists:
if type_list in content:
item["type"].append(type_list)
yield item
| [
"1097456719@qq.com"
] | 1097456719@qq.com |
712dba93a2621c8f100b375020d6fe1a26b33587 | 155cfef4bb35d20bc574f63f3443039bfcc1ab7e | /srcs/mahjong/admin/admin/admin.py | dae8ebe48a3a7b3d835c15ac939a653d4622e62b | [] | no_license | rolllyroman/fish_web | 3116481a6a16484283f428eb7c98ecea7cee99d4 | eb5a05ea3d56c7d9f599009e8ab6f4cb322e9023 | refs/heads/master | 2020-03-28T01:08:57.346228 | 2018-09-06T03:34:05 | 2018-09-06T03:34:05 | 147,480,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | #-*- coding:utf-8 -*-
#!/usr/bin/python
"""
Author:$Author$
Date:$Date$
Revision:$Revision$
Description:
后台APP应用入口
"""
from bottle import Bottle
from common.install_plugin import install_redis_plugin,install_session_plugin
admin_app = Bottle()
install_redis_plugin(admin_app)
install_session_plugin(admin_app)
import admin_index
import admin_auth
#会员模块
import admin_member
# 数据统计模块
import admin_statistics
# 个人信息模块
import admin_self
# 代理模块
import admin_agent
# 用户权限模块
import admin_power
#游戏模块
import admin_game
#订单模块
import admin_order
#商品模块
import admin_goods
#系统设置
import admin_setting
#消息设置
import admin_notic
#捕鱼模块
import admin_fish
#福利模块
import admin_benefit
'''
金币场模块
'''
import admin_gold
| [
"1983654762@qq.com"
] | 1983654762@qq.com |
c40becd033d3ea8a15e2c42b16e0245831010393 | f173b23a2ac3b05c35185e6071ea7468318be83d | /benchmarks/vote/pqueue/pqueue.py | a21e0e9f5669ecc3a6b03f014b6c78c34a76e61d | [
"Apache-2.0"
] | permissive | danieldzahka/cyclone | d6e550a70458c6d2ce47277ec9daed6577898510 | 45032a03506c918b2de4fd3846d711d47f56bbf0 | refs/heads/master | 2022-12-20T21:56:31.093423 | 2020-10-04T05:20:39 | 2020-10-04T05:20:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,198 | py | def launch_cmds_startup():
print("Configuring for vote application")
def launch_cmds_server_gen(f, q, r, m, quorums, replicas, clients, ports):
passwd = 'dummy'
if os.environ.has_key('CYCLONE_PASS'):
passwd=os.environ.get('CYCLONE_PASS')
cmd='echo ' + passwd + ' | sudo -S '
cmd= cmd + 'rm -rf /mnt/pmem0/pmemds\n'
cmd= cmd + 'echo ' + passwd + ' | sudo -S '
cmd= cmd + 'mkdir -p /mnt/pmem0/pmemds\n'
f.write(cmd)
cmd=''
cmd=cmd + 'echo ' + passwd + ' | sudo -S '
cmd=cmd + ' PMEM_IS_PMEM_FORCE=1 '
cmd=cmd + ' LD_LIBRARY_PATH=/usr/lib:/usr/local/lib '
cmd=cmd + '/home/pfernando/cyclone/cyclone.git/benchmarks/vote/pqueue/pqueue_server '
cmd=cmd + str(r) + ' '
cmd=cmd + str(m) + ' '
cmd=cmd + str(clients) + ' '
cmd=cmd + 'config_cluster.ini config_quorum.ini ' +str(ports) + ' &> server_log &\n'
f.write(cmd)
def launch_cmds_preload_gen(f, m, c, quorums, replicas, clients, machines, ports):
cmd=''
def launch_cmds_client_gen(f, m, c, quorums, replicas, clients, machines, ports,bufsize):
passwd = 'dummy'
if m >= replicas:
client_machines=machines-replicas
if client_machines > clients:
client_machines = clients
clients_per_machine=clients/client_machines
c_start = clients_per_machine*(m - replicas)
c_stop = c_start + clients_per_machine
if m == replicas + client_machines - 1:
c_stop = clients
if c == 0 and m < replicas + client_machines:
cmd=''
if os.environ.has_key('KV_FRAC_READ'):
cmd=cmd + 'KV_FRAC_READ=' + os.environ.get('KV_FRAC_READ') + ' '
if os.environ.has_key('KV_KEYS'):
cmd=cmd + 'KV_KEYS=' + os.environ.get('KV_KEYS') + ' '
if os.environ.has_key('ACTIVE'):
cmd=cmd + 'ACTIVE=' + os.environ.get('ACTIVE') + ' '
if os.environ.has_key('CYCLONE_PASS'):
passwd=os.environ.get('CYCLONE_PASS')
cmd=cmd + ' echo ' + passwd + ' | sudo -S '
cmd=cmd + ' LD_LIBRARY_PATH=/usr/lib:/usr/local/lib:/usr/local/lib64 '
#cmd=cmd + '/home/pfernando/cyclone/cyclone.git/test/pmemkv_client '
cmd=cmd + '/home/pfernando/cyclone/cyclone.git/benchmarks/vote/pqueue/pqueue_async_client '
cmd=cmd + str(c_start) + ' '
cmd=cmd + str(c_stop) + ' '
cmd=cmd + str(m) + ' '
cmd=cmd + str(replicas) + ' '
cmd=cmd + str(clients) + ' '
cmd=cmd + str(quorums) + ' '
#cmd=cmd + 'config_cluster.ini config_quorum ' + str(ports) + ' &> client_log' + str(0) + '&\n'
cmd=cmd + 'config_cluster.ini config_quorum ' + str(ports) + ' ' + str(bufsize) +' &> client_log' + str(0) + '&\n'
f.write(cmd)
def killall_cmds_gen(f):
passwd = 'dummy'
if os.environ.has_key('CYCLONE_PASS'):
passwd=os.environ.get('CYCLONE_PASS')
f.write('echo ' + passwd + ' | sudo -S pkill pqueue_server\n')
f.write('echo ' + passwd + '| sudo -S pkill pqueue_client\n')
f.write('echo ' + passwd + '| sudo -S pkill pqueue_async\n')
| [
"pradeepfn@gmail.com"
] | pradeepfn@gmail.com |
052c039c8740f46756fb0891f065a5816eba47a9 | df0e5307827cf0f3fb4099714ce6f848afc462ca | /main_formal.py | 2903fec404314849ebaa844b42702d1ec0b51335 | [
"BSD-3-Clause"
] | permissive | prtx/RANE | c80533c744e46df49798a17c8ad4469c78aedeb6 | 898bb7da2edd2a8b49257b4f0750d176a8b37f04 | refs/heads/master | 2023-04-28T20:33:35.265257 | 2021-05-21T19:49:00 | 2021-05-21T19:49:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | from formal.seq_attack_formal import main
from os import path
class Config:
module_paths = ['benchmarks/verilog/original/lat.v',
'benchmarks/verilog/original/dff.v']
enable_async = False
cycsat = False
depth = 20
step = 5
# solver = 'jaspergold'
solver = 'symbiyosys'
engine = 'yices'
exe_path = path.expanduser('~') + "/lockbox/"
if __name__ == "__main__":
main(Config)
| [
"shervinrs@gmail.com"
] | shervinrs@gmail.com |
d34c789dde64b5b39999009db01b1063b4be7c34 | 2b42b40ae2e84b438146003bf231532973f1081d | /spec/mgm4458015.3.spec | e9ca614706903a6b3dabcb4a519425f93b2f6d24 | [] | no_license | MG-RAST/mtf | 0ea0ebd0c0eb18ec6711e30de7cc336bdae7215a | e2ddb3b145068f22808ef43e2bbbbaeec7abccff | refs/heads/master | 2020-05-20T15:32:04.334532 | 2012-03-05T09:51:49 | 2012-03-05T09:51:49 | 3,625,755 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 14,311 | spec | {
"id": "mgm4458015.3",
"metadata": {
"mgm4458015.3.metadata.json": {
"format": "json",
"provider": "metagenomics.anl.gov"
}
},
"providers": {
"metagenomics.anl.gov": {
"files": {
"100.preprocess.info": {
"compression": null,
"description": null,
"size": 736,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/100.preprocess.info"
},
"100.preprocess.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 862581,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/100.preprocess.passed.fna.gz"
},
"100.preprocess.passed.fna.stats": {
"compression": null,
"description": null,
"size": 311,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/100.preprocess.passed.fna.stats"
},
"100.preprocess.removed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 33579,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/100.preprocess.removed.fna.gz"
},
"100.preprocess.removed.fna.stats": {
"compression": null,
"description": null,
"size": 306,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/100.preprocess.removed.fna.stats"
},
"205.screen.h_sapiens_asm.info": {
"compression": null,
"description": null,
"size": 450,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/205.screen.h_sapiens_asm.info"
},
"299.screen.info": {
"compression": null,
"description": null,
"size": 410,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/299.screen.info"
},
"299.screen.passed.fna.gcs": {
"compression": null,
"description": null,
"size": 1675,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/299.screen.passed.fna.gcs"
},
"299.screen.passed.fna.gz": {
"compression": "gzip",
"description": null,
"size": 532230,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/299.screen.passed.fna.gz"
},
"299.screen.passed.fna.lens": {
"compression": null,
"description": null,
"size": 392,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/299.screen.passed.fna.lens"
},
"299.screen.passed.fna.stats": {
"compression": null,
"description": null,
"size": 311,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/299.screen.passed.fna.stats"
},
"440.cluster.rna97.fna.gz": {
"compression": "gzip",
"description": null,
"size": 23712,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/440.cluster.rna97.fna.gz"
},
"440.cluster.rna97.fna.stats": {
"compression": null,
"description": null,
"size": 309,
"type": "fasta",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/440.cluster.rna97.fna.stats"
},
"440.cluster.rna97.info": {
"compression": null,
"description": null,
"size": 947,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/440.cluster.rna97.info"
},
"440.cluster.rna97.mapping": {
"compression": null,
"description": null,
"size": 1217940,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/440.cluster.rna97.mapping"
},
"440.cluster.rna97.mapping.stats": {
"compression": null,
"description": null,
"size": 49,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/440.cluster.rna97.mapping.stats"
},
"450.rna.expand.lca.gz": {
"compression": "gzip",
"description": null,
"size": 122822,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/450.rna.expand.lca.gz"
},
"450.rna.expand.rna.gz": {
"compression": "gzip",
"description": null,
"size": 46050,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/450.rna.expand.rna.gz"
},
"450.rna.sims.filter.gz": {
"compression": "gzip",
"description": null,
"size": 26663,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/450.rna.sims.filter.gz"
},
"450.rna.sims.gz": {
"compression": "gzip",
"description": null,
"size": 277435,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/450.rna.sims.gz"
},
"900.abundance.function.gz": {
"compression": "gzip",
"description": null,
"size": 6684,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/900.abundance.function.gz"
},
"900.abundance.lca.gz": {
"compression": "gzip",
"description": null,
"size": 5341,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/900.abundance.lca.gz"
},
"900.abundance.md5.gz": {
"compression": "gzip",
"description": null,
"size": 11391,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/900.abundance.md5.gz"
},
"900.abundance.ontology.gz": {
"compression": "gzip",
"description": null,
"size": 43,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/900.abundance.ontology.gz"
},
"900.abundance.organism.gz": {
"compression": "gzip",
"description": null,
"size": 15601,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/900.abundance.organism.gz"
},
"900.loadDB.sims.filter.seq": {
"compression": null,
"description": null,
"size": 12182568,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/900.loadDB.sims.filter.seq"
},
"900.loadDB.source.stats": {
"compression": null,
"description": null,
"size": 98,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/900.loadDB.source.stats"
},
"999.done.COG.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.COG.stats"
},
"999.done.KO.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.KO.stats"
},
"999.done.NOG.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.NOG.stats"
},
"999.done.Subsystems.stats": {
"compression": null,
"description": null,
"size": 1,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.Subsystems.stats"
},
"999.done.class.stats": {
"compression": null,
"description": null,
"size": 376,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.class.stats"
},
"999.done.domain.stats": {
"compression": null,
"description": null,
"size": 28,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.domain.stats"
},
"999.done.family.stats": {
"compression": null,
"description": null,
"size": 800,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.family.stats"
},
"999.done.genus.stats": {
"compression": null,
"description": null,
"size": 1298,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.genus.stats"
},
"999.done.order.stats": {
"compression": null,
"description": null,
"size": 422,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.order.stats"
},
"999.done.phylum.stats": {
"compression": null,
"description": null,
"size": 193,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.phylum.stats"
},
"999.done.rarefaction.stats": {
"compression": null,
"description": null,
"size": 22871,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.rarefaction.stats"
},
"999.done.sims.stats": {
"compression": null,
"description": null,
"size": 79,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.sims.stats"
},
"999.done.species.stats": {
"compression": null,
"description": null,
"size": 4675,
"type": "txt",
"url": "http://api.metagenomics.anl.gov/analysis/data/id/mgm4458015.3/file/999.done.species.stats"
}
},
"id": "mgm4458015.3",
"provider": "metagenomics.anl.gov",
"providerId": "mgm4458015.3"
}
},
"raw": {
"mgm4458015.3.fna.gz": {
"compression": "gzip",
"format": "fasta",
"provider": "metagenomics.anl.gov",
"url": "http://api.metagenomics.anl.gov/reads/mgm4458015.3"
}
}
} | [
"jared.wilkening@gmail.com"
] | jared.wilkening@gmail.com |
a9289a1f6b4ed942a127823b3c56eb3e4087cf50 | 83e372779472742eb5eaa4f4484a51e701b1c684 | /.emacs.d/backups/!home!rasorasi!p!q25.py~ | 0f15c76a3d5623b7b3416e19f48c9bed59f8b187 | [] | no_license | rasorasi/dotfiles | 168fd1ae40c0258f53381e0932606dbd7dcd10ec | ce89d06fe3d4d2bdbbc8946f42264f7c725ff77d | refs/heads/master | 2020-05-17T08:39:42.090089 | 2015-01-21T06:35:36 | 2015-01-21T06:35:36 | 16,072,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,660 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from PyQt4 import QtGui, QtCore
class Button(QtGui.QPushButton):
def __init__(self, title, parent):
super(Button, self).__init__(title, parent)
def mouseMoveEvent(self, e):
if e.buttons() != QtCore.Qt.RightButton:
return
mimeData = QtCore.QMimeData()
drag = QtGui.QDrag(self)
drag.setMimeData(mimeData)
drag.setHotSpot(e.pos() - self.rect().topLeft())
dropAction = drag.start(QtCore.Qt.MoveAction)
print str(dropAction)
def mousePressEvent(self, e):
super(Button, self).mousePressEvent(e)
if e.button() == QtCore.Qt.LeftButton:
print 'press'
class Example(QtGui.QWidget):
def __init__(self):
super(Example, self).__init__()
self.initUI()
def initUI(self):
self.setAcceptDrops(True)
self.button = Button('Button', self)
self.button.move(100, 65)
self.lbl = QtGui.QLabel('test', self)
self.lbl.move(120, 140)
self.setGeometry(300, 300, 300, 200)
self.show()
def dragEnterEvent(self, e):
e.accept()
def dropEvent(self, e):
position = e.pos()
self.button.move(position)
e.setDropAction(QtCore.Qt.MoveAction)
e.accept()
self.lbl.setText(str(e.pos()))
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Escape:
self.close()
def main():
app = QtGui.QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| [
"raso_katoh@yahoo.co.jp"
] | raso_katoh@yahoo.co.jp | |
47231b4f6eff1ab3742c9509b45b9bc88decca39 | ed9c561a4b3882010366cd5a77f4f0baabfc077a | /dummylearning/plots/classification.py | 5a7f25f7d6890b836dac2fe62df124525863a531 | [
"MIT"
] | permissive | JuantonioMS/dummylearning | 031ad962e13961d72864554298463ed9e210b475 | 1780a7ba0f38633bcbfbacd4f35a31cce4c94a87 | refs/heads/main | 2023-03-08T03:38:32.067454 | 2021-02-23T16:07:08 | 2021-02-23T16:07:08 | 313,150,218 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,390 | py | import sys
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
sys.path.append("../utilities/")
from dummylearning.utilities.info import Info
from dummylearning.analysis.clasification import Analysis
class Plots(Info):
def __init__(self, model, verbose = True):
super().__init__(verbose)
self.model = model
self.analysis = Analysis(model)
#____________________________________METRICS___________________________________
def datasetMetrics(self, outfile: str, extension: str = "png") -> None:
"""
Function -> datasetMetrics
Plot metrics per dataset
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate metrics plots per dataset
"""
self.upgradeInfo("Generating datasets metrics plots")
metrics = self.analysis.metrics()
datasets = list(self.model.dataset.keys())
_, ax = plt.subplots(nrows = 2, ncols = 2, figsize = (8, 10))
# Accuracy
accuracies = [metrics[dataset]["accuracy"] for dataset in datasets]
ax[0, 0].bar(datasets, accuracies)
ax[0, 0].axhline(0.5, color = "black", linestyle = "--")
ax[0, 0].set_ylabel("Accuracy")
ax[0, 0].set_xlabel("Dataset")
ax[0, 0].set_title("Accuracy scores")
ax[0, 0].set_ylim([0.0, 1.5])
borders = np.linspace(-0.4, 0.4, len(self.model.model.classes_) + 1)
width = abs(borders[0] - borders[1])
centers = np.linspace(-0.4 + width / 2, 0.4 - width / 2, len(self.model.model.classes_))
# Precision
for index, clas in enumerate(self.model.model.classes_):
precisions = [metrics[dataset][clas]["precision"] for dataset in datasets]
ax[0, 1].bar(np.arange(len(datasets)) + centers[index] , precisions, width, label = clas)
ax[0, 1].axhline(0.5, color = "black", linestyle = "--")
ax[0, 1].set_ylabel("Precision")
ax[0, 1].set_xlabel("Dataset")
ax[0, 1].set_xticks(np.arange(len(datasets)))
ax[0, 1].set_xticklabels(datasets)
ax[0, 1].set_title("Precision scores")
ax[0, 1].legend()
ax[0, 1].set_ylim([0.0, 1.5])
# Recall
for index, clas in enumerate(self.model.model.classes_):
recalls = [metrics[dataset][clas]["recall"] for dataset in datasets]
ax[1, 0].bar(np.arange(len(datasets)) + centers[index] , recalls, width, label = clas)
ax[1, 0].axhline(0.5, color = "black", linestyle = "--")
ax[1, 0].set_ylabel("Recall")
ax[1, 0].set_xlabel("Dataset")
ax[1, 0].set_xticks(np.arange(len(datasets)))
ax[1, 0].set_xticklabels(datasets)
ax[1, 0].set_title("Recall scores")
ax[1, 0].legend()
ax[1, 0].set_ylim([0.0, 1.5])
# F1
for index, clas in enumerate(self.model.model.classes_):
f1s = [metrics[dataset][clas]["f1"] for dataset in datasets]
ax[1, 1].bar(np.arange(len(datasets)) + centers[index] , f1s, width, label = clas)
ax[1, 1].axhline(0.5, color = "black", linestyle = "--")
ax[1, 1].set_ylabel("F1")
ax[1, 1].set_xlabel("Dataset")
ax[1, 1].set_xticks(np.arange(len(datasets)))
ax[1, 1].set_xticklabels(datasets)
ax[1, 1].set_title("F1 scores")
ax[1, 1].legend()
ax[1, 1].set_ylim([0.0, 1.5])
plt.savefig(f"{outfile}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
def classMetrics(self, outfile: str, extension: str = "png") -> None:
"""
Function -> datasetMetrics
Plot metrics per dataset
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate metrics plots per dataset
"""
self.upgradeInfo("Generating datasets metrics plots")
metrics = self.analysis.metrics()
classes = list(self.model.model.classes_)
datasets = list(self.model.dataset.keys())
_, ax = plt.subplots(nrows = 2, ncols = 2, figsize = (8, 10))
# Accuracy
accuracies = [metrics[dataset]["accuracy"] for dataset in datasets]
ax[0, 0].bar(datasets, accuracies)
ax[0, 0].axhline(0.5, color = "black", linestyle = "--")
ax[0, 0].set_ylabel("Accuracy")
ax[0, 0].set_xlabel("Dataset")
ax[0, 0].set_title("Accuracy scores")
ax[0, 0].set_ylim([0.0, 1.5])
borders = np.linspace(-0.4, 0.4, len(datasets) + 1)
width = abs(borders[0] - borders[1])
centers = np.linspace(-0.4 + width / 2, 0.4 - width / 2, len(datasets))
# Precision
for index, dataset in enumerate(datasets):
precisions = [metrics[dataset][clas]["precision"] for clas in classes]
ax[0, 1].bar(np.arange(len(classes)) + centers[index] , precisions, width, label = dataset)
ax[0, 1].axhline(0.5, color = "black", linestyle = "--")
ax[0, 1].set_ylabel("Precision")
ax[0, 1].set_xlabel("Class")
ax[0, 1].set_xticks(np.arange(len(classes)))
ax[0, 1].set_xticklabels(classes)
ax[0, 1].set_title("Precision scores")
ax[0, 1].legend()
ax[0, 1].set_ylim([0.0, 1.5])
# Recall
for index, dataset in enumerate(datasets):
recalls = [metrics[dataset][clas]["recall"] for clas in classes]
ax[1, 0].bar(np.arange(len(classes)) + centers[index] , recalls, width, label = dataset)
ax[1, 1].axhline(0.5, color = "black", linestyle = "--")
ax[1, 0].set_ylabel("Recall")
ax[1, 0].set_xlabel("Class")
ax[1, 0].set_xticks(np.arange(len(classes)))
ax[1, 0].set_xticklabels(classes)
ax[1, 0].set_title("Recall scores")
ax[1, 0].legend()
ax[1, 0].set_ylim([0.0, 1.5])
# F1
for index, dataset in enumerate(datasets):
f1s = [metrics[dataset][clas]["f1"] for clas in classes]
ax[1, 1].bar(np.arange(len(classes)) + centers[index] , f1s, width, label = dataset)
ax[1, 1].axhline(0.5, color = "black", linestyle = "--")
ax[1, 1].set_ylabel("F1")
ax[1, 1].set_xlabel("Class")
ax[1, 1].set_xticks(np.arange(len(classes)))
ax[1, 1].set_xticklabels(classes)
ax[1, 1].set_title("F1 scores")
ax[1, 1].legend()
ax[1, 1].set_ylim([0.0, 1.5])
plt.savefig(f"{outfile}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
#_________________________________COEFFICIENTS_________________________________
def coefficients(self, outfile: str, extension: str = "png") -> None:
"""
Function -> coefficients
Plot coefficients per class
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate coefficients plots
"""
self.upgradeInfo("Generating coefficients plots")
totalCoefficients = self.analysis.coefficients()
for clas, coefs in totalCoefficients.items():
nonZeroValues, nonZeroCoefs = [], []
for coef, value in coefs.items():
if value != 0:
nonZeroCoefs.append(coef)
nonZeroValues.append(value)
nonZeroValues, nonZeroCoefs = zip(*sorted(zip(nonZeroValues, nonZeroCoefs)))
_, ax = plt.subplots()
ax.barh(nonZeroCoefs, nonZeroValues, align = "center")
ax.axvline(0, color = "black", linewidth = 2.0)
ax.set_xlabel("Coefficient Value")
ax.set_title(f"{clas} Coefficient")
ax.grid(True)
plt.savefig(f"{outfile}_{clas}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
def oddsRatios(self, outfile: str, extension: str = "png") -> None:
self.upgradeInfo("Generating odds ratio plots")
totalOdds = self.analysis.oddsRatios()
for clas, coefs in totalOdds.items():
nonZeroValues, nonZeroCoefs = [], []
for coef, value in coefs.items():
if value != 0:
nonZeroCoefs.append(coef)
nonZeroValues.append(value)
nonZeroValues, nonZeroCoefs = zip(*sorted(zip(nonZeroValues, nonZeroCoefs)))
_, ax = plt.subplots()
ax.barh(nonZeroCoefs, nonZeroValues, align = "center")
ax.axvline(1, color = "black", linewidth = 2.0)
ax.set_xlabel("Odds Ratio Value")
ax.set_title(f"{clas} Odds Ratio")
ax.grid(True)
plt.savefig(f"{outfile}_{clas}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
def log2OddsRatios(self, outfile: str, extension: str = "png") -> None:
self.upgradeInfo("Generating log2 odds ratio plots")
totalOdds = self.analysis.log2oddsRatios()
for clas, coefs in totalOdds.items():
nonZeroValues, nonZeroCoefs = [], []
for coef, value in coefs.items():
if value != 0:
nonZeroCoefs.append(coef)
nonZeroValues.append(value)
nonZeroValues, nonZeroCoefs = zip(*sorted(zip(nonZeroValues, nonZeroCoefs)))
_, ax = plt.subplots()
ax.barh(nonZeroCoefs, nonZeroValues, align = "center")
ax.axvline(0, color = "black", linewidth = 2.0)
ax.set_xlabel("Log2 Odds Ratio Value")
ax.set_title(f"{clas} Log2 Odds Ratio")
ax.grid(True)
plt.savefig(f"{outfile}_{clas}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
#_______________________________CONFUSSION MATRIX______________________________
def confussionMatrix(self, outfile: str, extension:str = "png") -> None:
"""
Function -> confussionMatrix
Plot confussion matrix per dataset
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate confussion matrix plots
"""
self.upgradeInfo("Generating confussion matrix plot")
from sklearn.metrics import plot_confusion_matrix
for dataset in self.model.dataset:
plot = plot_confusion_matrix(self.model.model,
self.model.dataset[dataset]["values"],
self.model.dataset[dataset]["tags"],
display_labels = self.model.model.classes_)
plot.ax_.set_title(f"{dataset} dataset")
plt.savefig(f"{outfile}_{dataset}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
#__________________________________ROC CURVES__________________________________
def rocCurve(self, outfile: str, extension: str = "png") -> None:
"""
Function -> rocCurve
Plot ROC curves per class and dataset
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate ROC curves plots
"""
self.upgradeInfo("Generating ROC curves plot")
fpr, tpr, area = self.analysis.rocInfo()
for datasetName in fpr:
for clas in fpr[datasetName]:
_, ax = plt.subplots()
ax.plot(fpr[datasetName][clas], tpr[datasetName][clas],
color = "darkorange",
lw = 2,
label = f"ROC curve (area = {round(area[datasetName][clas], 3)})")
ax.plot([0, 1], [0, 1], color = "black", lw = 2, linestyle = "--")
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.05])
ax.set_xlabel("False Positive Rate")
ax.set_ylabel("True Positive Rate")
ax.set_title(f"ROC curve for {clas} class in {datasetName} dataset")
ax.legend(loc = "lower right")
ax.grid(True)
plt.savefig(f"{outfile}_{datasetName}_{clas}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
def datasetRocCurve(self, outfile: str, extension: str = "png") -> None:
"""
Function -> datasetRocCurve
Plot ROC curves per dataset
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate ROC curves plots
"""
self.upgradeInfo("Generating ROC curves plot")
fpr, tpr, area = self.analysis.rocInfo()
for datasetName in fpr:
_, ax = plt.subplots()
for clas in fpr[datasetName]:
ax.plot(fpr[datasetName][clas], tpr[datasetName][clas],
lw = 4 if clas in ["micro", "macro"] else 2,
label = f"ROC curve {clas} (area = {round(area[datasetName][clas], 3)})",
linestyle = ":" if clas in ["micro", "macro"] else "-")
ax.plot([0, 1], [0, 1], color = "black", lw = 2, linestyle = "--")
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.05])
ax.set_xlabel("False Positive Rate")
ax.set_ylabel("True Positive Rate")
ax.set_title(f"ROC curve for {datasetName} dataset")
ax.legend(loc = "lower right")
ax.grid(True)
plt.savefig(f"{outfile}_{datasetName}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
def classRocCurve(self, outfile: str, extension: str = "png") -> None:
"""
Function -> classRocCurve
Plot ROC curves per class
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate ROC curves plots
"""
self.upgradeInfo("Generating clas ROC curves plot")
fpr, tpr, area = self.analysis.rocInfo()
datasets = fpr.keys()
classes = fpr[list(datasets)[0]].keys()
for clas in classes:
_, ax = plt.subplots()
for dataset in datasets:
ax.plot(fpr[dataset][clas], tpr[dataset][clas],
lw = 3 if dataset in ["Complete", "Production"] else 1,
label = f"ROC curve {dataset} (area = {round(area[dataset][clas], 3)})")
ax.plot([0, 1], [0, 1], color = "black", lw = 2, linestyle = "--")
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.05])
ax.set_xlabel("False Positive Rate")
ax.set_ylabel("True Positive Rate")
ax.set_title(f"ROC curve for {clas} class")
ax.legend(loc = "lower right")
ax.grid(True)
plt.savefig(f"{outfile}_{clas}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
def effectRocCurve(self, outfile: str, extension: str = "png") -> None:
"""
Function -> effectRocCurve
Plot accumulated coefficient effect ROC curves per class and dataset
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate ROC curves plots
"""
fpr, tpr, area = self.analysis.accumulatedRocInfo()
for dataset in fpr:
for clas in fpr[dataset]:
_, ax = plt.subplots()
counter = 0
for coefficient in fpr[dataset][clas]:
counter += 1
ax.plot(fpr[dataset][clas][coefficient],
tpr[dataset][clas][coefficient],
lw = 2,
label = f"{coefficient} area = {area[dataset][clas][coefficient]}")
if counter == 20:
break
ax.plot([0, 1], [0, 1], color = "black", lw = 2, linestyle = "--")
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.05])
ax.set_xlabel("False Positive Ratio")
ax.set_ylabel("True Positive Ratio")
ax.set_title(f"{dataset} {clas}")
ax.legend(bbox_to_anchor=(1.05, 1), loc='upper left', borderaxespad=0.)
ax.grid(True)
plt.savefig(f"{outfile}_{dataset}_{clas}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
#____________________________PRECISION RECALL CURVES___________________________
def precisionRecallCurve(self, outfile: str, extension: str = "png") -> None:
"""
Function -> precisionRecallCurve
Plot precision recall curves per class and dataset
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate coefficients plots
"""
self.upgradeInfo("Generating ROC curves plot")
precision, recall, area = self.analysis.prcInfo()
for datasetName in precision:
for clas in precision[datasetName]:
_, ax = plt.subplots()
ax.plot(recall[datasetName][clas],
precision[datasetName][clas],
color = "darkorange",
lw = 2,
label = f"{clas} AP = {round(area[datasetName][clas], 3)}")
ax.plot([0, 1], [1, 0], color = "black", lw = 2, linestyle = "--")
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.05])
ax.set_xlabel("Recall")
ax.set_ylabel("Precision")
ax.set_title(f"Precision-Recall curve for {clas} class in {datasetName} dataset")
ax.legend(loc = "lower right")
ax.grid(True)
plt.savefig(f"{outfile}_{datasetName}_{clas}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
def datasetPrecisionRecallCurve(self, outfile: str, extension: str = "png") -> None:
"""
Function -> datasetPrecisionRecallCurve
Plot Precision Recall curves per dataset
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate Precision Recall curves plots
"""
self.upgradeInfo("Generating ROC curves plot")
precision, recall, area = self.analysis.prcInfo()
for datasetName in precision:
_, ax = plt.subplots()
for clas in precision[datasetName]:
ax.plot(recall[datasetName][clas],
precision[datasetName][clas],
lw = 4 if clas in ["micro", "macro"] else 2,
label = f"{clas} AP = {round(area[datasetName][clas], 3)}",
linestyle = ":" if clas in ["micro", "macro"] else "-",)
ax.plot([0, 1], [1, 0], color = "black", lw = 2, linestyle = "--")
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.05])
ax.set_xlabel("Recall")
ax.set_ylabel("Precision")
ax.set_title(f"Precision-Recall curve for {datasetName} dataset")
ax.legend(loc = "lower right")
ax.grid(True)
plt.savefig(f"{outfile}_{datasetName}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
def classPrecisionRecallCurve(self, outfile: str, extension: str = "png"):
"""
Function -> classPrecisionRecallCurve
Plot Precision Recall curves per class
Parameters
---------------------------------------------------------------------------
outfile <str> (positional) => Plot name
extension <str> (default: png) => Image extension
Return
---------------------------------------------------------------------------
None => Generate Precision Recall curves plots
"""
self.upgradeInfo("Generation class Precision Recall curves plot")
precision, recall, area = self.analysis.prcInfo()
datasets = precision.keys()
classes = precision[list(datasets)[0]].keys()
for clas in classes:
_, ax = plt.subplots()
for dataset in datasets:
ax.plot(recall[dataset][clas], precision[dataset][clas],
lw = 3 if dataset in ["Complete", "Production"] else 1,
label = f"{dataset} AP = {round(area[dataset][clas], 3)}")
ax.plot([0, 1], [1, 0], color = "black", lw = 2, linestyle = "--")
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.05])
ax.set_xlabel("Recall")
ax.set_ylabel("Precision")
ax.set_title(f"Precision-Recall curve for {clas} class")
ax.legend(loc = "lower right")
ax.grid(True)
plt.savefig(f"{outfile}_{clas}.{extension}", dpi = 100, bbox_inches = "tight")
plt.close()
| [
"b32masaj@gmail.com"
] | b32masaj@gmail.com |
6e84b3bdb7cc737f67e5fd03795a6e4b8bac610f | c485671b5af21327d41458ea3d002682d79d9a9b | /src/basic/process_ubuntu.py | aa86bca2177b1ed250a63abb3f9212e45f902e87 | [
"MIT"
] | permissive | dowhilefalse/AdversarialDialogue | d5abef3057d73c60c18a449687d2c27c6ba61145 | de406d3b624f9500e79bef7c5bc10e24376fefa8 | refs/heads/master | 2021-04-04T01:56:22.031175 | 2019-02-06T03:43:43 | 2019-02-06T03:43:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,020 | py |
# coding: utf-8
# In[ ]:
# coding: utf-8
# In[25]:
"""
Todo!!
1. " ".join each utterance
2. Serve turns in order within one dialogue
(like in LM)
3. Never filter out examples with long sequence, otherwise turn progression will be lost.
4. Remember to shuffle all the dialogues first
5. When training, no need to modfiy "previous_context_input",
just set the "start_turn_index" correctly!
Todo: combine read_lines, load_pickle, etc... to one single function load_file(),
and use if statement to see which suffix the file has. Also keep an optional param
suffix=None just incase we want to force it to load with a certain format
"""
import numpy as np
import itertools
from pprint import pprint
import random
from nltk import FreqDist
from nltk.tokenize import word_tokenize
from nltk.tag.stanford import StanfordPOSTagger
from nltk.corpus import stopwords
from nltk.tokenize import sent_tokenize
from nltk import pos_tag
from src.basic.util import load_pickle, read_lines, zip_lsts, split_lst, unzip_lst
import math
import string
import copy
from pathlib import Path
from pattern import en
from functools import reduce
from nltk.corpus import wordnet
# In[26]:
path = "/playpen/home/tongn/processed_ubuntu/"
dataset_dict_file = "Dataset.dict.pkl"
dataset_dict = load_pickle(path + dataset_dict_file)
index2token = {index: token for (token, index, _, _) in dataset_dict}
token2index = {token: index for (token, index, _, _) in dataset_dict}
vocab = list(token2index.keys())
indices = list(token2index.values())
eot_token = "__eot__"
eou_token = "__eou__"
unk_token = "**unknown**"
eot = token2index[eot_token]
eou = token2index[eou_token]
unk = token2index[unk_token]
global num_replaces
num_replaces = 0
# In[15]:
def to_str_lst(indices):
str_lst = [index2token[index] for index in indices]
return str_lst
def index(token):
try:
return token2index[token]
except:
return unk
def double_trans(translator, utterance):
try:
utterance_str = ' '.join([index2token[index] for index in utterance])
# print(utterance_str)
utterance_de = translator.translate(utterance_str, dest="de", src="en").text
utterance_adversarial = translator.translate(utterance_de, dest="en", src="de").text.lower()
# print(utterance_adversarial)
# correct a translation "error" where **unknown** is separated
utterance_adversarial = utterance_adversarial.replace("** unknown **", "**unknown**")
# print(utterance_adversarial)
# print(" ".join(word_tokenize(utterance_adversarial)))
indexed_adversarial = list(map(index, word_tokenize(utterance_adversarial.lower())))
return indexed_adversarial
except:
print("Warning: unsuccessful with utterance:", utterance)
return utterance
para_dict = load_pickle("/playpen/home/tongn/pointer_generator_log/ubuntu/para_dict.pkl")
def get_para(utterance):
try:
paraphrased_utterance = para_dict[tuple(utterance)]
return paraphrased_utterance + [eou]
except:
return utterance + [eou]
def generative_paraphrase(utterance):
"""strategy 7"""
turns = split_lst(utterance[:(-1)], eou, keep_delimiter=False)
paraphrased_utterances = list(map(get_para, turns))
paraphrased_utterances_concat = list(itertools.chain(*paraphrased_utterances))
turns_concat = paraphrased_utterances_concat + [eot]
return turns_concat
# def generative_paraphrase(utterance):
# """strategy 7"""
# # translator = Translator()
# # turns = [
# # [double_trans(translator, utterance[:(-1)]) for utterance in split_lst(turn[:(-1)], eou)]
# # for turn in split_lst(utterance, eot)]
# # utterances_concat = [
# # reduce(lambda a, b: (a + [eou] + b), turn) + [eou] for turn in turns]
# # turns_concat = reduce(lambda a, b: (a + [eot] + b), utterances_concat)
# turns_concat = utterance # when testing with strategy 7, we already have parphrase.Test.dialogues.pkl
# return turns_concat
# In[16]:
# x = [1539, 4745, 10269, 17925, 6916, 8070, 19766, 9267, 18828, eou,
# 0, 4563, 8828, 11680, 7336, 19883, 5971, 1968, 780, eou,
# eot]
# generative_paraphrase(x)
# In[5]:
# utterance_str = "im trying to use ubuntu on my macbook pro **unknown** __eou__ i read in the forums that ubuntu has apple version now ? __eou__ __eot__ not that ive ever heard of .. normal ubutnu should work on an intel based mac . there is PPC version also . __eou__ you want total control ? or what are you wanting exactly ? __eou__"
# utterance = list(map(lambda x: token2index[x], utterance_str.split()))
# paraphrased_utterance = generative_paraphrase(utterance)
# print(paraphrased_utterance)
# print(utterance_str)
# print(' '.join(list(map(lambda x: index2token[x], paraphrased_utterance))))
# In[23]:
"""
• POS tags we need:
JJ adjective green
JJR adjective, comparative greener
JJS adjective, superlative greenest
VB: verb, base form
ask assemble assess assign assume atone attention avoid bake balkanize
bank begin behold believe bend benefit bevel beware bless boil bomb
boost brace break bring broil brush build ...
VBD: verb, past tense
dipped pleaded swiped regummed soaked tidied convened halted registered
cushioned exacted snubbed strode aimed adopted belied figgered
speculated wore appreciated contemplated ...
VBG: verb, present participle or gerund
telegraphing stirring focusing angering judging stalling lactating
hankerin' alleging veering capping approaching traveling besieging
encrypting interrupting erasing wincing ...
VBN: verb, past participle
multihulled dilapidated aerosolized chaired languished panelized used
experimented flourished imitated reunifed factored condensed sheared
unsettled primed dubbed desired ...
VBP: verb, present tense, not 3rd person singular
predominate wrap resort sue twist spill cure lengthen brush terminate
appear tend stray glisten obtain comprise detest tease attract
emphasize mold postpone sever return wag ...
VBZ: verb, present tense, 3rd person singular
bases reconstructs marks mixes displeases seals carps weaves snatches
slumps stretches authorizes smolders pictures emerges stockpiles
seduces fizzes uses bolsters slaps speaks pleads ...
VB verb be, base form be
VV verb, base form take
VBD verb be, past tense was, were
VVD verb, past tense took
VBG verb be, gerund/present participle being
VVG verb, gerund/present participle taking
VBN verb be, past participle been
VVN verb, past participle taken
VBP verb be, sing. present, non-3d am, are
VVP verb, sing. present, non-3d take
VBZ verb be, 3rd person sing. present is
VVZ verb, 3rd person sing. present takes
VH verb have, base form have
VHD verb have, past tense had
VHG verb have, gerund/present participle having
VHN verb have, past participle had
VHP verb have, sing. present, non-3d have
VHZ verb have, 3rd person sing. present has
• Might be differnt from StanfordPosTagger:
• Think about how to change "does n't do sth." to "does sth." (i.e. how to change it back)
"""
path_pos = "/playpen/home/tongn/stanford-postagger-full-2017-06-09/"
tagger_pos = "models/english-bidirectional-distsim.tagger"
jar_pos = "stanford-postagger.jar"
st_pos = StanfordPOSTagger(path_pos + tagger_pos, path_pos + jar_pos)
# path_to_jar = 'path_to/stanford-parser-full-2014-08-27/stanford-parser.jar'
# path_to_models_jar = 'path_to/stanford-parser-full-2014-08-27/stanford-parser-3.4.1-models.jar'
# dependency_parser = StanfordDependencyParser(path_to_jar=path_to_jar, path_to_models_jar=path_to_models_jar)
# result = dependency_parser.raw_parse('I shot an elephant in my sleep')
# dep = result.__next__()
# list(dep.triples())
not_token = "not"
def add_not(turn):
"""strategy 6"""
utterances = split_lst(turn[:(-1)], eou) # get rid of __eot__
if utterances == []:
return turn
adv_utterances = []
for utterance in utterances:
if utterance[:(-1)] == []: # if only __eou__ is present
adv_utterances.append(utterance)
continue
str_utterance = [index2token[index] for index in utterance] # Convert to text
tagged_utterance = pos_tag(str_utterance[:(-1)]) # get ride of __eou__
for (token, tag) in tagged_utterance:
if tag in ["VB", "VBD", "VBG", "VBN", "VBP", "VBZ"]:
try:
i = str_utterance.index(token)
except:
continue
if tag in ["VB", "VBG", "VBN"]:
str_utterance.insert(i, not_token)
elif tag == "VBD":
if token in ["was", "were"]:
# str_utterance.insert(i + 1, not_token)
str_utterance[i] += "n't"
else:
present = en.conjugate(token, person=1, tense=en.PRESENT)
if present != token and present in vocab:
# str_utterance[i: (i + 1)] = ["did", not_token, present]
str_utterance[i: (i + 1)] = ["didn't", present]
else:
continue
elif tag == "VBP":
# if token in ["am", "are"]:
if token == "am":
str_utterance.insert(i + 1, not_token)
elif token == "are":
str_utterance[i] += "n't"
else:
if token in vocab:
# str_utterance[i: (i + 1)] = ["do", not_token, token]
str_utterance[i: (i + 1)] = ["don't", token]
else:
continue
elif tag == "VBZ":
if token == "is":
# str_utterance.insert(i + 1, not_token)
str_utterance[i] += "n't"
else:
present = en.conjugate(token, person=1, tense=en.PRESENT)
if present != token and present in vocab:
# str_utterance[i: (i + 1)] = ["does", not_token, present]
str_utterance[i: (i + 1)] = ["doesn't", present]
else:
continue
break # only need to replace one token for each utterance
# Convert back to token
indexed_utterance = [token2index[str_token] for str_token in str_utterance]
adv_utterances.append(indexed_utterance)
final_turn = list(itertools.chain(*adv_utterances)) + [eot] # append back __eot__
return final_turn
# In[24]:
# utterance = "i was big . __eou__ i went to class . __eou__ i see some cats . __eou__ he hates you . __eou__ __eot__".lower().split()
# indexed_utterance = [token2index[token] for token in utterance]
# not_utterance = add_not(indexed_utterance)
# print(not_utterance)
# print(" ".join([index2token[index] for index in not_utterance]))
# In[8]:
# def tag_everything(data):
# dialogues = [
# split_lst(dialogue, delimiter)
# for dialogue in data]
# In[9]:
# """
# Load pickled lists
# """
# data_path = "/playpen/home/tongn/processed_ubuntu/"
# tagged_test_file = data_path + "tagged_test.pkl"
# if Path(tagged_test_file).is_file():
# tagged_test = load_pickle(tagged_test_file)
# else:
# test = load_pickle(data_path + "Test.dialogues.pkl")
# tagged_test = tag_everything(test)
# In[10]:
grammar_error_dict = load_pickle("/playpen/home/tongn/adversarial/data/aesw-2016/bpe_grammar_error_dict.pkl")
def grammar_errors(utterance, adv_rate=1.0):
"""
strategy 8:
Introduce grammatical errors
"""
utterance = copy.copy(utterance)
for i in range(len(utterance)):
if random.random() <= adv_rate:
try:
utterance[i] = random.sample(grammar_error_dict[utterance[i]], 1)[0]
except:
continue
return utterance
# In[39]:
pos_tag_dict = {
'VERB': wordnet.VERB,
'NOUN': wordnet.NOUN,
'ADJ': wordnet.ADJ,
'ADV': wordnet.ADV}
def antonym(turn):
"""
strategy 9:
Replace a word from each utterance with its antonym
"""
utterances = split_lst(turn[:(-1)], eou) # get rid of __eot__
if utterances == []:
return turn
adv_utterances = []
for utterance in utterances:
if utterance[:(-1)] == []: # if only __eou__ is present
adv_utterances.append(utterance)
continue
str_utterance = [index2token[index] for index in utterance] # Convert to text
tagged_utterance = pos_tag(str_utterance[:(-1)], tagset='universal') # get ride of __eou__
for (i, (token, tag)) in enumerate(tagged_utterance):
if token in ['be', 'am', 'is', 'are', 'was', 'were', 'been']:
continue
try:
pos = pos_tag_dict[tag]
except:
pos = None
antonyms = [
lemma.antonyms()[0].name()
for synset in wordnet.synsets(token, pos=pos)
for lemma in synset.lemmas()
if lemma.antonyms() != []]
in_vocab_antonyms = set(antonyms).intersection(set(vocab))
if len(in_vocab_antonyms) == 0: # this includes the case where there are no antonyms
continue
else:
str_utterance[i] = random.sample(in_vocab_antonyms, 1)[0]
break # only need to replace one token for each utterance
# Convert back to token
indexed_utterance = [token2index[str_token] for str_token in str_utterance]
adv_utterances.append(indexed_utterance)
final_turn = list(itertools.chain(*adv_utterances)) + [eot] # append back __eot__
return final_turn
# In[41]:
# sent = [token2index[token] for token in "i hate you . __eou__ he was beautiful . __eou__ he left without a word . __eou__ __eot__".split()]
# print(sent)
# print([index2token[index] for index in antonym(sent)])
# In[17]:
# unigram2entity = load_pickle("/home/tongn/Ubuntu-Multiresolution-Tools/ActEntRepresentation/unigram2entity_dict.pkl")
# activities_dict = load_pickle("/home/tongn/Ubuntu-Multiresolution-Tools/ActEntRepresentation/activities_dict.pkl")
# entities = set(unigram2entity.keys()).intersection(set(vocab))
# activities = set(activities_dict.keys()).intersection(set(vocab))
# def confusing_actent(utterance):
# """
# strategy 7:
# Replace entity with a confusing one
# """
# utterance_str = [index2token[index] for index in utterance]
# for (i, token) in enumerate(utterance_str):
# if token in entities:
# utterance_str[i] = random.sample(entities, 1)[0]
# break
# elif token in activities:
# utterance_str[i] = random.sample(activities, 1)[0]
# break
# utterance = [token2index[token] for token in utterance_str]
# return utterance
# In[18]:
# utterance = "i was big . __eou__ i went to class . __eou__ i like talking . __eou__ he install hates you . __eou__ __eot__".lower().split()
# indexed_utterance = [token2index[token] for token in utterance]
# not_utterance = confusing_actent(indexed_utterance)
# print(not_utterance)
# print(" ".join([index2token[index] for index in not_utterance]))
# In[14]:
# x = list(grammar_error_dict.items())
# print(len(x))
# print(x[:20])
# In[19]:
"""
Adversarial strategies
"""
no_swap_lst = []
for token in list(string.punctuation) + [eou_token, eot_token]:
try:
no_swap_lst.append(token2index[token])
except:
pass
# num_stopwords = 100
eou = token2index["__eou__"]
# word_freq_list = read_lines("/home/tongn/vocab_book/word_freq_list.txt")
# stopwords_str = unzip_lst([line.split("\t") for line in word_freq_list])[1][:num_stopwords]
stopwords_str = stopwords.words("english")
# stopwords_str[1:2] = ["is", "are", "am", "was", "were", "been"]
# keep_lst = ["I", "you", "he", "we", "his", "not", "n't", "she",
# "what", "their", "who", "her", "my", "when", "which",
# "them", "me", "him", "your", "how", "our", "because"]
keep_lst = ['i', 'me', 'my', 'myself', 'we', 'our', 'ours', 'ourselves', 'you', "you're",
"you've", "you'll", "you'd", 'your', 'yours', 'yourself', 'yourselves', 'he',
'him', 'his', 'himself', 'she', "she's", 'her', 'hers', 'herself', 'it', "it's",
'its', 'itself', 'they', 'them', 'their', 'theirs', 'themselves',
'what', 'which', 'who', 'whom', 'when', 'where', 'why', 'how',
"not"]
stopwords_str = list(set(stopwords_str).difference(set(keep_lst)))
indexed_stopwords = []
for token in (stopwords_str + list(string.punctuation)):
if "n't" in token:
continue
try:
indexed_stopwords.append(token2index[token])
except:
pass
def get_boundary_indices(utterance):
boundary_indices = [0] + [i for (i, token) in enumerate(utterance) if token in no_swap_lst] + [len(utterance) - 1]
return boundary_indices
def identity(utterance):
"""
Identity strategy:
i.e., do nothing
"""
return utterance
def random_swap(utterance, swap_ratio=3):
"""
strategy 1:
Only swapping neighboring words for each sentence
New:
obtain a list of boundary indices, determined by punctuation/__eou__
between each two boundary indices, randomly swap two tokens
"""
# Avoid modifying the original list
utterance = copy.deepcopy(utterance)
boundary_indices = get_boundary_indices(utterance)
for (start, end) in zip(boundary_indices[:(-1)], boundary_indices[1:]):
candidate_indices = set(range(start + 1, end - 1)) # exclude start. end is excluded by default.
# if len(candidate_indices) != 0:
# first_letter_of_first_token = utterance[start + 1][0]
# if not (first_letter_of_first_token >= 'A' and first_letter_of_first_token <= 'Z'): # if the first letter is not capitalized
# candidate_indices.add(start + 1)
num_swaps = (end - start - 1) // swap_ratio
for i in range(num_swaps):
if len(candidate_indices) == 0:
break
index = random.sample(candidate_indices, 1)[0]
(utterance[index], utterance[index + 1]) = (utterance[index + 1], utterance[index])
candidate_indices.remove(index)
try:
candidate_indices.remove(index - 1)
except:
pass
try:
candidate_indices.remove(index + 1)
except:
pass
return utterance
def stopword_dropout(utterance, adv_rate=1.0):
"""strategy 2"""
utterance = copy.deepcopy(utterance)
utterance_len = len(utterance)
utterance = [token for token in utterance if token not in indexed_stopwords]
# max_change = 8
# count = 0
# for i in reversed(range(utterance_len)):
# if utterance[i] in indexed_stopwords:
# if random.random() <= adv_rate: # randomly drop each word with dropout rate
# utterance.pop(i)
# count += 1
# if count >= max_change:
# break
return utterance
# paraphrase_dict: tuple --> [tuples]
paraphrase_dict = load_pickle(
"/playpen/home/tongn/ppdb2.0/ppdb-2.0-s-all_paraphrase_dict.pkl")
keys = list(paraphrase_dict.values())
lens = [len(key) for key in keys]
# print(FreqDist(lens).most_common())
def paraphrase(utterance, adv_rate=1.0):
"""strategy 3"""
utterance = copy.deepcopy(utterance)
utterance_len = len(utterance)
paraphrased_indices = set() # keep track of which indices are already paraphrased
for i in range(utterance_len - 1, -1, -1): # reverse order loop
for j in range(utterance_len, i, -1): # match the longest segment first
if len(set(range(i, j)).intersection(paraphrased_indices)) == 0: # if no sub-segment of this segment hasn't been paraphrased
segment = tuple(utterance[i: j])
try:
paraphrased_segment = list(random.choice(paraphrase_dict[segment]))
if random.random() <= adv_rate:
utterance[i: j] = paraphrased_segment
paraphrased_indices.update(list(range(i, j))) # update paraphrased indices
except:
continue
return utterance
def random_input(utterance):
"""strategy 4"""
utterance_len = len(utterance)
return random.sample(indices, utterance_len)
def stammer(utterance, stammer_ratio=3):
"""strategy 5"""
utterance = copy.deepcopy(utterance)
utterance_len = len(utterance)
num_stammers = utterance_len // stammer_ratio
indices = [
index for index
in sorted(random.sample(range(utterance_len), num_stammers), reverse=True)
if index not in [eou, eot]]
for index in indices: # from largest to smallest
utterance[index: (index + 1)] = [utterance[index]] * 2
return utterance
strategies = [
identity, #0
random_swap, #1
stopword_dropout, #2
paraphrase, #3
random_input, #4
stammer, #5
# stopword_insertion, #5
add_not, #6
generative_paraphrase, #7
# confusing_actent, #7
grammar_errors, #8
antonym, #9
]
# In[3]:
def concat(turns):
return list(itertools.chain(*turns))
def split_lst_by_length(lst, step_size):
split = [lst[i: (i + step_size)] for i in range(0, len(lst), step_size)]
return split
# In[1]:
class DataGenerator(object):
def __init__(self, data, delimiter, glimpse_training=True, step_size=10,
max_dialogue_length=3,
batch_size=192, shuffle=True,
is_training=True, strategy_index=0,
feed_both_examples=False,
concat_turns=False,
bpe=False):
self.is_training = is_training
self.glimpse_training = glimpse_training
# If we are training and use the truncated version,
# we divide data by even chuncks of step_size, otherwise by turns ("__eot__")
if bpe:
self.dialogues = data
strategy_index = 0 # no need to apply any strategy
else:
print("Dividing each dialogue by turns (i.e. __eot__)")
self.dialogues = [
split_lst(dialogue, delimiter)
for dialogue in data]
self.max_dialogue_length = max_dialogue_length
self.shuffle = shuffle
if self.shuffle:
random.shuffle(self.dialogues)
self.strategy_index = strategy_index
self.feed_both_examples = feed_both_examples
if self.feed_both_examples:
assert strategy_index != 0
assert batch_size % 2 == 0
self.batch_size = batch_size // 2 # because we concatenate pos + neg examples in each batch
else:
self.batch_size = batch_size
self.concat_turns = concat_turns
@staticmethod
def max_len(lst):
return max([len(x) for x in lst])
@staticmethod
def to_str(batch_examples):
str_batch_examples = [
[" ".join([str(token) for token in turn])
for turn in example]
for example in batch_examples]
return str_batch_examples
def batch_generator(self):
print("There are %d dialogues." % len(self.dialogues))
turn_lengths_lst = [
len(turn)
for dialogue in self.dialogues
for turn in dialogue]
if not self.is_training:
print("We are testing ==> no length threshold is applied.")
length_threshold = int(np.percentile(turn_lengths_lst, 90)) if self.is_training else max(turn_lengths_lst)
print("Length threshold:", length_threshold)
print("All turns longer than this will be truncated to this length.")
# Convert all indices to string_indices
self.dialogues = [
[[token
for token in turn[(-length_threshold):]] # only keeping the last length_threshold tokens
for turn in dialogue]
for dialogue in self.dialogues
if len(dialogue) >= self.max_dialogue_length]
num_dialogues = len(self.dialogues)
print("There are %d dialogues left." % num_dialogues)
assert num_dialogues >= self.batch_size, "Error: Number of dialogues less than batch_size"
acc = self.dialogues[:self.batch_size] # an accumlator that yields a batch when it is of batch_size
dialogue_indices = list(range(self.batch_size)) # initialize dialogue indices
next_dialogue_index = self.batch_size # initialize the index of the next dialogue
start_turn_indices = [0] * self.batch_size # track which turn we are at
while True:
batch_examples = [
dialogue[start_turn_index: (start_turn_index + self.max_dialogue_length)]
for (dialogue, start_turn_index)
in zip(acc, start_turn_indices)]
if self.strategy_index == 4: # if we are using random_source_input
adv_batch_examples = [
[random.choice(random.choice(self.dialogues))
if (i != self.max_dialogue_length - 1)
else turn # avoid modifying target turn
for (i, turn) in enumerate(example)]
for example in batch_examples]
elif self.strategy_index == 5: # if we are using random ground-truth target
adv_batch_examples = [
[turn
if (i != self.max_dialogue_length - 1)
else random.choice(random.choice(self.dialogues)) # modify target turn
for (i, turn) in enumerate(example)]
for example in batch_examples]
else:
adv_batch_examples = [
[strategies[self.strategy_index](turn)
if (i != self.max_dialogue_length - 1)
else turn # avoid modifying target turn
for (i, turn) in enumerate(example)]
for example in batch_examples]
if self.feed_both_examples:
feed_dialogue_indices = dialogue_indices + dialogue_indices
feed_start_turn_indices = start_turn_indices + start_turn_indices
feed_batch_examples = batch_examples + adv_batch_examples
else:
feed_dialogue_indices = dialogue_indices
feed_start_turn_indices = start_turn_indices
feed_batch_examples = adv_batch_examples
turn_lengths_lst = [
[len(turn) for turn in example]
for example in feed_batch_examples]
if self.concat_turns:
turn_lengths_lst = [
[sum(turn_lengths[:(-1)]), turn_lengths[-1]]
for turn_lengths in turn_lengths_lst]
# When cancating turns, yield [int] rather than str
if self.concat_turns:
feed_batch_examples = [
[concat(example[:(-1)]), example[-1]]
for example in feed_batch_examples]
else:
feed_batch_examples = DataGenerator.to_str(feed_batch_examples)
yield (feed_dialogue_indices,
feed_start_turn_indices,
feed_batch_examples,
turn_lengths_lst)
for (i, dialogue) in enumerate(acc):
start_turn_indices[i] += 1 # move on to the next example
if start_turn_indices[i] + self.max_dialogue_length > len(dialogue): # if we finish the current dialogue
acc[i] = self.dialogues[next_dialogue_index] # change examples at index i in acc
dialogue_indices[i] = next_dialogue_index
start_turn_indices[i] = 0 # reset example index
next_dialogue_index += 1
if next_dialogue_index >= num_dialogues:
"""todo: let the remaining dialgoues finish when out of new dialgoues"""
yield None
return
# # Apply adversarial strategy
# examples = [
# (strategies[self.strategy_index](turn1),
# strategies[self.strategy_index](turn2),
# turn3)
# for (turn1, turn2, turn3) in examples]
# if concat_turns:
# examples = [
# (turn1 + turn2, turn3)
# for (turn1, turn2, turn3)
# in examples
# if len(turn3) <= self.max_iterations]
# num_examples = len(examples)
# acc_size = len(acc)
# if acc_size + num_examples < self.batch_size:
# acc += examples
# continue
# else:
# concat_lst = acc + examples
# acc = concat_lst[self.batch_size:]
# yield concat_lst[:self.batch_size
# In[4]:
# from src.basic.util import load_pickles, unzip_lst
# def get_vocab(data):
# vocab = unzip_lst(data)[0]
# return vocab
# """
# Load pickled lists
# """
# data_path = "/playpen/home/tongn/processed_ubuntu/"
# filenames = [
# "Dataset.dict.pkl",
# "Training.dialogues.pkl",
# "Validation.dialogues.pkl",
# "Test.dialogues.pkl"
# ]
# files = [data_path + filename for filename in filenames]
# # Load files
# data = load_pickles(files)
# vocab = get_vocab(data[0])
# splits = ["train", "valid", "test"]
# data_dict = {
# split: dialogues for (split, dialogues)
# in zip(splits, data[1:])}
# end_token_str = "__eot__"
# end_token = vocab.index(end_token_str)
# In[5]:
# generator = DataGenerator(
# data_dict["train"], end_token,
# 20001, step_size=20,
# max_dialogue_length=3, max_iterations=None,
# batch_size=8, shuffle=True,
# is_training=True,
# strategy_index=0)
# batch = generator.batch_generator()
# In[6]:
# pprint(next(batch)[1])
| [
"tn66@duke.edu"
] | tn66@duke.edu |
0565aa6f020b9a0cec1aadb20a7b89e216fe928b | f0d0ea29240c53b6ce1c4b06095b528ece02fdd7 | /core/championship.py | d714983cb55b99879f051866efec7695e0065120 | [] | no_license | zhifuliu/dianjing | 477529ccd6159329e1bc121aeb2ff328ee499f4a | 7b3f6d58f5bc0738651d8d72c9a24df4ade0ed36 | refs/heads/master | 2020-03-21T09:10:28.343268 | 2017-03-24T03:06:24 | 2017-03-24T03:06:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41,317 | py | # -*- coding: utf-8 -*-
"""
Author: Wang Chao <yueyoum@gmail.com>
Filename: championship
Date Created: 2016-12-09 15:13
Description:
"""
import random
import arrow
import itertools
import requests
from django.conf import settings
from dianjing.exception import GameException
from core.mongo import (
MongoChampionship,
MongoChampionshipFormationWay1,
MongoChampionshipFormationWay2,
MongoChampionshipFormationWay3,
MongoChampionshipGroup,
MongoChampionshipLevel,
MongoChampionHistory,
MongoCharacter,
)
from core.plunder import PlunderFormation, Plunder, is_npc
from core.vip import VIP
from core.club import Club, get_club_property
from core.mail import MailManager
from core.resource import ResourceClassification
from core.match import ClubMatch, MatchRecord
from core.winning import WinningChampionship
from utils.message import MessagePipe, MessageFactory
from utils.functional import make_string_id, make_time_of_today, get_start_time_of_today
from utils.operation_log import OperationLog
from config import (
GlobalConfig,
ConfigErrorMessage,
ConfigChampionBet,
ConfigChampionRankReward,
ConfigChampionScoreReward,
ConfigChampionWinScore,
ConfigPlunderNPC,
ConfigNPCFormation,
)
from protomsg.common_pb2 import ACT_INIT, ACT_UPDATE
from protomsg.championship_pb2 import (
CHAMPION_LEVEL_1,
CHAMPION_LEVEL_2,
CHAMPION_LEVEL_4,
CHAMPION_LEVEL_8,
CHAMPION_LEVEL_16,
ChampionFormationNotify,
ChampionGroupNotify,
ChampionLevelNotify,
ChampionNotify,
ChampionClub as MsgChampionClub,
)
from protomsg.match_pb2 import ClubMatchServerSideRequest, ClubMatchServerSideResponse
from protomsg.plunder_pb2 import PlunderFormation as MsgPlunderFormation
from protomsg.formation_pb2 import FORMATION_SLOT_USE
from protomsg.leaderboard_pb2 import LeaderboardChampionshipNotify
# XX强 进阶顺序
LEVEL_SEQ = [16, 8, 4, 2, 1]
LEVEL_NEXT_TABLE = {
16: 8,
8: 4,
4: 2,
2: 1,
}
LEVEL_PREVIOUS_TABLE = {v: k for k, v in LEVEL_NEXT_TABLE.iteritems()}
# 小组赛比赛时间
GROUP_MATCH_TIME = [
[14, 0],
[15, 0],
[16, 0],
[17, 0],
[18, 0],
[19, 0],
]
# GROUP_MATCH_TIME = [
# [16, 5],
# [16, 7],
# [16, 9],
# [16, 11],
# [16, 13],
# [16, 15],
# ]
LEVEL_MATCH_TIMES_TO_HOUR_MINUTE_TABLE = {
16: [19, 30],
8: [20, 0],
4: [20, 30],
2: [21, 0],
}
# LEVEL_MATCH_TIMES_TO_HOUR_MINUTE_TABLE = {
# 16: [16, 20],
# 8: [16, 25],
# 4: [16, 30],
# 2: [16, 35],
# }
# 开战前几分钟不能调整阵型和下注
MINUTES_LIMIT_FOR_FORMATION_AND_BET = 10
# [[(hour, minute), (hour, minute)] ...]
# 每个元素是两个 h, m 的组合
# 表示 在他们之间 的时间 是禁止的
TIME_LIMIT = []
for __h, __m in itertools.chain(GROUP_MATCH_TIME, LEVEL_MATCH_TIMES_TO_HOUR_MINUTE_TABLE.values()):
__m1 = __m - MINUTES_LIMIT_FOR_FORMATION_AND_BET
if __m1 < 0:
__m1 += 60
__h1 = __h - 1
assert __h1 >= 0
else:
__h1 = __h
TIME_LIMIT.append(((__h1, __m1), (__h, __m)))
# 提前几分钟开打
MATCH_AHEAD_MINUTE = 1
# 允许报名 周几
APPLY_WEEKDAY = [
# 0, # 星期一
1, # 星期二
# 2, # 星期三
3, # 星期四
# 4, # 星期五
5, # 星期六
# 6, # 星期日
]
# 允许报名时间范围 hour, minute
APPLY_TIME_RANGE = [(8, 0), (13, 30)]
MATCH_SERVER_REQ_HEADERS = {'NMVC_APIRequest': 'StartCombat'}
AUTO_APPLY_VIP_LEVEL = GlobalConfig.value("CHAMPIONSHIP_AUTO_APPLY_VIP_LEVEL")
APPLY_CLUB_LEVEL_LIMIT = GlobalConfig.value("CHAMPIONSHIP_APPLY_LEVEL_LIMIT")
def find_level_match_at(lv):
today = get_start_time_of_today()
weekday = today.weekday()
days_shift = 0
while True:
if weekday in APPLY_WEEKDAY:
break
weekday -= 1
if weekday < 0:
weekday = 6
days_shift += 1
if days_shift >= 7:
raise RuntimeError("ChampionshipLevel find match at error!")
that_day = today.replace(days=-days_shift)
prev_lv = LEVEL_PREVIOUS_TABLE[lv]
hour, minute = LEVEL_MATCH_TIMES_TO_HOUR_MINUTE_TABLE[prev_lv]
that_day = that_day.replace(hour=hour)
that_day = that_day.replace(minute=minute)
return that_day
def make_pairs_from_flat_list(items):
pairs = []
for i in range(0, len(items) - 1, 2):
pairs.append((items[i], items[i + 1]))
return pairs
def check_club_level(silence=True):
def deco(fun):
def wrap(self, *args, **kwargs):
"""
:type self: Championship
"""
if self.club_level < APPLY_CLUB_LEVEL_LIMIT:
if silence:
return
raise GameException(ConfigErrorMessage.get_error_id("CLUB_LEVEL_NOT_ENOUGH"))
return fun(self, *args, **kwargs)
return wrap
return deco
def check_time_limit(fun):
def wrap(self, *args, **kwargs):
now = arrow.utcnow().to(settings.TIME_ZONE)
for (_h1, _m1), (_h2, _m2) in TIME_LIMIT:
if _h1 <= now.hour <= _h2 and _m1 <= now.minute < _m2:
raise GameException(ConfigErrorMessage.get_error_id("CHAMPIONSHIP_FORMATION_FORBIDDEN"))
return fun(self, *args, **kwargs)
return wrap
class ChampionshipFormationWay1(PlunderFormation):
__slots__ = []
MONGO_COLLECTION = MongoChampionshipFormationWay1
class ChampionshipFormationWay2(PlunderFormation):
__slots__ = []
MONGO_COLLECTION = MongoChampionshipFormationWay2
class ChampionshipFormationWay3(PlunderFormation):
__slots__ = []
MONGO_COLLECTION = MongoChampionshipFormationWay3
WAY_MAP = {
1: ChampionshipFormationWay1,
2: ChampionshipFormationWay2,
3: ChampionshipFormationWay3,
}
# 报名前清理上一次残留信息
def before_apply(server_id):
MongoChampionshipLevel.db(server_id).drop()
MongoChampionship.db(server_id).update_many(
{},
{'$set': {
'bet': {},
'has_bet': False
}}
)
basic_notify = make_common_basic_notify_msg(server_id)
basic_data = MessageFactory.pack(basic_notify)
level_notify = ChampionshipLevel(server_id).make_protomsg()
level_data = MessageFactory.pack(level_notify)
char_ids = OperationLog.get_recent_action_char_ids(server_id)
for cid in char_ids:
mp = MessagePipe(cid)
mp.put(data=basic_data)
mp.put(data=level_data)
# 取历史前N
def get_history_top_clubs(server_id):
doc = MongoChampionHistory.db(server_id).find_one(
{'_id': MongoChampionHistory.DOC_ID}
)
if not doc:
return []
clubs = []
for i in doc['member_ids']:
clubs.append((i, doc['info'][i]['name'], doc['info'][i]['flag']))
return clubs
# 公共相同的 ChampionNotify, applied, bet 就每个角色自己设置
def make_common_basic_notify_msg(server_id):
notify = ChampionNotify()
notify.applied = False
for lv in LEVEL_SEQ:
notify_bet = notify.bet.add()
notify_bet.level = lv
# no bet info
top_clubs = get_history_top_clubs(server_id)
for i, name, flag in top_clubs:
notify_top_club = notify.top_clubs.add()
notify_top_club.id = i
notify_top_club.name = name
notify_top_club.flag = flag
return notify
# 空的group消息
def make_empty_group_notify_msg():
notify = ChampionGroupNotify()
notify.my_score = 0
notify.my_rank = 0
return notify
# 清空championship
# NOTE 这个方法用不上
def totally_reset(server_id, send_notify=False):
MongoChampionship.db(server_id).update_many(
{},
{'$set': {
'applied': False,
'bet': {},
'has_bet': False,
}}
)
MongoChampionshipGroup.db(server_id).drop()
MongoChampionshipLevel.db(server_id).drop()
if send_notify:
basic_notify = make_common_basic_notify_msg(server_id)
basic_data = MessageFactory.pack(basic_notify)
group_notify = make_empty_group_notify_msg()
group_data = MessageFactory.pack(group_notify)
level_notify = ChampionshipLevel(server_id).make_protomsg()
level_data = MessageFactory.pack(level_notify)
char_ids = OperationLog.get_recent_action_char_ids(server_id)
for cid in char_ids:
mp = MessagePipe(cid)
mp.put(data=basic_data)
mp.put(data=group_data)
mp.put(data=level_data)
def make_plunder_formation_msg(club, way_id):
"""
:type club: core.abstract.AbstractClub
:type way_id: int
"""
msg = MsgPlunderFormation()
msg.way = way_id
power = 0
for index, s in enumerate(club.formation_staffs):
power += s.power
msg_slot = msg.formation.add()
msg_slot.slot_id = index + 1
msg_slot.status = FORMATION_SLOT_USE
msg_slot.staff_id = s.id
msg_slot.unit_id = s.unit.id
msg_slot.position = s.formation_position
msg_slot.staff_oid = s.oid
msg_slot.policy = 1
msg.power = power
return msg
class Match(object):
__slots__ = ['server_id', 'id_one', 'info_one', 'id_two', 'info_two']
def __init__(self, server_id, id_one, info_one, id_two, info_two):
self.server_id = server_id
self.id_one = id_one
self.info_one = info_one
self.id_two = id_two
self.info_two = info_two
def make_3_way_clubs(self, _id, _info):
"""
:rtype: list[core.abstract.AbstractClub]
"""
clubs = []
skill_sequences = []
if is_npc(_id):
for i in range(1, 4):
npc_id = _info['ways_npc'][i - 1]
club = ConfigNPCFormation.get(npc_id)
club.id = _id
club.name = _info['name']
club.flag = _info['flag']
clubs.append(club)
skill_sequences.append({})
else:
cs = Championship(self.server_id, int(_id))
for i in range(1, 4):
way = cs.get_way_object(i)
club = Club(self.server_id, int(_id), load_staffs=False)
club.formation_staffs = way.formation_staffs
clubs.append(club)
skill_sequences.append(way.get_skill_sequence())
return clubs, skill_sequences
def start(self):
def one_way_match(_club_one, _club_two, _skill_sequence_one, _skill_sequence_two):
_match = ClubMatch(_club_one, _club_two, 3, _skill_sequence_one, _skill_sequence_two)
_msg = _match.start(auto_load_staffs=False, check_empty=False)
_msg.key = ""
_msg.map_name = GlobalConfig.value_string("MATCH_MAP_CHAMPIONSHIP")
_req = ClubMatchServerSideRequest()
_req.match.MergeFrom(_msg)
_data = _req.SerializeToString()
_res = requests.post(match_server_url, headers=MATCH_SERVER_REQ_HEADERS, data=_data)
response = ClubMatchServerSideResponse()
response.ParseFromString(_res.content)
if response.star > 0:
_win = 1
else:
_win = 0
return _win, _msg.SerializeToString(), response.record
host, port = random.choice(settings.MATCH_SERVERS)
match_server_url = 'http://{0}:{1}/'.format(host, port)
one_clubs, one_skill_sequences = self.make_3_way_clubs(self.id_one, self.info_one)
two_clubs, two_skill_sequences = self.make_3_way_clubs(self.id_two, self.info_two)
# [one_wins, record_ids]
one_wins = []
info_sets = []
for i in range(3):
club_one = one_clubs[i]
club_two = two_clubs[i]
win, club_match, record = one_way_match(
one_clubs[i], two_clubs[i],
one_skill_sequences[i], two_skill_sequences[i]
)
one_wins.append(win)
info_sets.append((club_one.id, club_two.id, club_match, record))
record_ids = MatchRecord.batch_create(self.server_id, info_sets)
return one_wins, record_ids
class Championship(object):
__slots__ = ['server_id', 'char_id', 'doc', 'club_level']
def __init__(self, server_id, char_id):
self.server_id = server_id
self.char_id = char_id
self.doc = MongoChampionship.db(self.server_id).find_one({'_id': self.char_id})
if not self.doc:
self.doc = MongoChampionship.document()
self.doc['_id'] = self.char_id
MongoChampionship.db(self.server_id).insert_one(self.doc)
self.club_level = get_club_property(self.server_id, self.char_id, 'level')
@check_club_level(silence=True)
def try_initialize(self, send_notify=True):
if self.doc['active']:
return
# 从 掠夺阵型 拷贝
p = Plunder(self.server_id, self.char_id)
for i in [1, 2, 3]:
way = p.get_way_object(i)
doc = way.get_or_create_doc()
WAY_MAP[i].MONGO_COLLECTION.db(self.server_id).delete_one({'_id': self.char_id})
WAY_MAP[i].MONGO_COLLECTION.db(self.server_id).insert_one(doc)
self.doc['active'] = True
MongoChampionship.db(self.server_id).update_one(
{'_id': self.char_id},
{'$set': {
'active': True
}}
)
if send_notify:
self.send_notify()
def is_applied(self):
# vip 自动apply
if self.doc['applied']:
return True
if self.club_level < APPLY_CLUB_LEVEL_LIMIT:
return False
if VIP(self.server_id, self.char_id).level < AUTO_APPLY_VIP_LEVEL:
return False
return True
@check_club_level(silence=False)
def apply_in(self):
now = arrow.utcnow().to(settings.TIME_ZONE)
if now.weekday() not in APPLY_WEEKDAY:
raise GameException(ConfigErrorMessage.get_error_id("CHAMPIONSHIP_APPLY_NOT_OPEN"))
range_start = make_time_of_today(APPLY_TIME_RANGE[0][0], APPLY_TIME_RANGE[0][1])
range_end = make_time_of_today(APPLY_TIME_RANGE[1][0], APPLY_TIME_RANGE[1][1])
if now < range_start or now >= range_end:
raise GameException(ConfigErrorMessage.get_error_id("CHAMPIONSHIP_APPLY_NOT_OPEN"))
if self.is_applied():
raise GameException(ConfigErrorMessage.get_error_id("CHAMPIONSHIP_ALREADY_APPLIED"))
self.doc['applied'] = True
MongoChampionship.db(self.server_id).update_one(
{'_id': self.char_id},
{'$set': {
'applied': True
}}
)
self.send_basic_notify()
@check_time_limit
@check_club_level(silence=False)
def bet(self, club_id, bet_id):
cl = ChampionshipLevel(self.server_id)
lv = cl.get_current_level()
if lv == 1:
raise GameException(ConfigErrorMessage.get_error_id("INVALID_OPERATE"))
if str(lv) in self.doc['bet']:
raise GameException(ConfigErrorMessage.get_error_id("CHAMPIONSHIP_ALREADY_BET"))
if club_id not in cl.doc['levels'].get(str(lv), {}).get('member_ids', []):
raise GameException(ConfigErrorMessage.get_error_id("INVALID_OPERATE"))
config = ConfigChampionBet.get(bet_id)
if not config:
raise GameException(ConfigErrorMessage.get_error_id("INVALID_OPERATE"))
if config.level != lv:
raise GameException(ConfigErrorMessage.get_error_id("INVALID_OPERATE"))
rc = ResourceClassification.classify(config.cost)
rc.check_exist(self.server_id, self.char_id)
rc.remove(self.server_id, self.char_id, message="Champion.bet:{0}".format(bet_id))
bet_info = {
'club_id': club_id,
'bet_id': bet_id
}
self.doc['bet'][str(lv)] = bet_info
self.doc['has_bet'] = True
MongoChampionship.db(self.server_id).update_one(
{'_id': self.char_id},
{'$set': {
'bet.{0}'.format(lv): bet_info,
'has_bet': True,
}}
)
self.send_basic_notify()
def get_way_object(self, way_id):
"""
:rtype: PlunderFormation
"""
try:
way_class = WAY_MAP[way_id]
except KeyError:
raise GameException(ConfigErrorMessage.get_error_id("INVALID_OPERATE"))
return way_class(self.server_id, self.char_id, way_id)
def find_way_id_by_staff_id(self, staff_id):
for i in [1, 2, 3]:
if self.get_way_object(i).is_staff_in_formation(staff_id):
return i
return 0
def find_way_id_by_unit_id(self, unit_id):
for i in [1, 2, 3]:
if self.get_way_object(i).is_unit_in_formation(unit_id):
return i
return 0
@check_time_limit
@check_club_level(silence=False)
def set_staff(self, way_id, slot_id, staff_id):
way_list = [1, 2, 3]
if way_id not in way_list:
raise GameException(ConfigErrorMessage.get_error_id("INVALID_OPERATE"))
if slot_id not in [1, 2, 3]:
raise GameException(ConfigErrorMessage.get_error_id("INVALID_OPERATE"))
way_list.remove(way_id)
for i in way_list:
w = self.get_way_object(i)
w.try_unset_staff(staff_id)
w = self.get_way_object(way_id)
w.set_staff(slot_id, staff_id)
self.send_formation_notify()
@check_time_limit
@check_club_level(silence=False)
def set_unit(self, way_id, slot_id, unit_id):
if slot_id not in [1, 2, 3]:
raise GameException(ConfigErrorMessage.get_error_id("INVALID_OPERATE"))
w = self.get_way_object(way_id)
w.set_unit(slot_id, unit_id)
self.send_formation_notify()
@check_time_limit
@check_club_level(silence=False)
def set_position(self, way_id, formation_slots):
my_way = self.get_way_object(way_id)
my_way.sync_slots(formation_slots)
self.send_formation_notify()
@check_time_limit
@check_club_level(silence=False)
def skill_sequence_set_staff(self, way_id, seq_id, index, staff_id):
w = self.get_way_object(way_id)
w.skill_sequence_set_staff(seq_id, index, staff_id)
self.send_formation_notify()
@check_club_level(silence=False)
def sync_group(self):
cg = ChampionshipGroup(self.server_id)
cg.find_by_char_id(self.char_id)
group_msg = cg.make_protomsg()
MessagePipe(self.char_id).put(msg=group_msg)
@check_club_level(silence=False)
def sync_level(self):
cl = ChampionshipLevel(self.server_id)
current_lv = cl.doc['current_level']
level_msg = cl.make_protomsg(level=current_lv)
MessagePipe(self.char_id).put(msg=level_msg)
@check_club_level(silence=True)
def send_notify(self):
self.send_basic_notify()
self.send_formation_notify()
cg = ChampionshipGroup(self.server_id)
cg.find_by_char_id(self.char_id)
group_msg = cg.make_protomsg()
MessagePipe(self.char_id).put(msg=group_msg)
cl = ChampionshipLevel(self.server_id)
level_msg = cl.make_protomsg()
MessagePipe(self.char_id).put(msg=level_msg)
def send_basic_notify(self, basic_notify=None):
if not basic_notify:
basic_notify = make_common_basic_notify_msg(self.server_id)
basic_notify.applied = self.is_applied()
for bet in basic_notify.bet:
bet_info = self.doc['bet'].get(str(bet.level), {})
if bet_info:
bet.bet_for = bet_info['club_id']
bet.bet_id = bet_info['bet_id']
MessagePipe(self.char_id).put(msg=basic_notify)
@check_club_level(silence=True)
def send_formation_notify(self):
notify = ChampionFormationNotify()
for i in [1, 2, 3]:
notify_way = notify.formation.add()
w = self.get_way_object(i)
notify_way.MergeFrom(w.make_protobuf())
MessagePipe(self.char_id).put(msg=notify)
class ChampionshipGroup(object):
__slots__ = ['server_id', 'group_id', 'doc', '_char_id', '_member_ids', '_info']
def __init__(self, server_id):
self.server_id = server_id
self.group_id = None
self.doc = None
# 只有在 find_by_char_id 并且找到group的清空下,才填充 _char_id
self._char_id = None
# 这两个仅仅是初始化group的适合保存信息的,
# 后面查询到的数据,这两个并不填充
self._member_ids = []
self._info = {}
def find_by_char_id(self, char_id):
self.doc = MongoChampionshipGroup.db(self.server_id).find_one(
{'member_ids': str(char_id)}
)
if self.doc:
self.group_id = self.doc['_id']
self._char_id = char_id
def find_by_group_id(self, group_id):
self.doc = MongoChampionshipGroup.db(self.server_id).find_one(
{'_id': group_id}
)
if self.doc:
self.group_id = group_id
@classmethod
def new(cls, server_id):
obj = cls(server_id)
obj.group_id = make_string_id()
return obj
def add_club(self, club_id, club_info):
self._member_ids.append(club_id)
self._info[club_id] = club_info
def finish(self):
doc = MongoChampionshipGroup.document()
doc['_id'] = self.group_id
doc['member_ids'] = self._member_ids
doc['info'] = self._info
doc['scores'] = {i: 0 for i in self._member_ids}
doc['logs'] = {i: [] for i in self._member_ids}
doc['match_times'] = 1
MongoChampionshipGroup.db(self.server_id).insert_one(doc)
def get_scores_sorted(self):
if not self.doc:
return []
scores = self.doc['scores'].items()
scores.sort(key=lambda item: item[1], reverse=True)
return scores
def get_top_two(self):
scores = self.get_scores_sorted()
return [scores[0][0], scores[1][0]]
def start_match(self):
match_times = self.doc['match_times']
if match_times == 7:
return match_times
hour, minute = GROUP_MATCH_TIME[match_times - 1]
match_at = make_time_of_today(hour, minute).timestamp
scores = self.get_scores_sorted()
pairs = make_pairs_from_flat_list(scores)
for (id_one, _), (id_two, _) in pairs:
info_one = self.doc['info'][id_one]
info_two = self.doc['info'][id_two]
m = Match(self.server_id, id_one, info_one, id_two, info_two)
one_way_wins, record_ids = m.start()
two_way_wins = [1 - _w for _w in one_way_wins]
one_way_wins_count = len([_w for _w in one_way_wins if _w == 1])
two_way_wins_count = len([_w for _w in two_way_wins if _w == 1])
one_got_score = ConfigChampionWinScore.get(one_way_wins_count).score
two_got_score = ConfigChampionWinScore.get(two_way_wins_count).score
self.doc['scores'][id_one] += one_got_score
self.doc['scores'][id_two] += two_got_score
one_name = self.doc['info'][id_one]['name']
two_name = self.doc['info'][id_two]['name']
one_log = self.make_match_log(match_at, two_name, one_got_score, one_way_wins, record_ids)
two_log = self.make_match_log(match_at, one_name, two_got_score, two_way_wins, record_ids)
self.doc['logs'][id_one].append(one_log)
self.doc['logs'][id_two].append(two_log)
self.send_score_reward_mail(id_one, self.doc['scores'][id_one])
self.send_score_reward_mail(id_two, self.doc['scores'][id_two])
self.doc['match_times'] += 1
MongoChampionshipGroup.db(self.server_id).update_one(
{'_id': self.group_id},
{'$set': {
'scores': self.doc['scores'],
'logs': self.doc['logs'],
'match_times': self.doc['match_times'],
}}
)
return self.doc['match_times']
def send_score_reward_mail(self, club_id, score):
if is_npc(club_id):
return
config = ConfigChampionScoreReward.get(score)
if not config:
return
rc = ResourceClassification.classify(config.reward)
attachment = rc.to_json()
m = MailManager(self.server_id, int(club_id))
m.add(config.mail_title, config.mail_content, attachment=attachment)
@staticmethod
def make_match_log(match_at, target_name, got_score, way_wins, record_ids):
doc = MongoChampionshipGroup.document_match_log()
doc['timestamp'] = match_at
doc['target_name'] = target_name
doc['got_score'] = got_score
doc['way_wins'] = way_wins
doc['record_ids'] = record_ids
return doc
def make_clubs_msg(self, scores=None):
msgs = []
if not scores:
scores = self.get_scores_sorted()
for index, (club_id, score) in enumerate(scores):
rank = index + 1
if rank >= 10:
# 只发前10名
break
msg = MsgChampionClub()
msg.id = club_id
msg.name = self.doc['info'][club_id]['name']
msg.flag = self.doc['info'][club_id]['flag']
msg.rank = rank
msg.score = score
msgs.append(msg)
return msgs
def make_protomsg(self):
if not self.doc:
return make_empty_group_notify_msg()
my_score = 0
my_rank = 0
scores = self.get_scores_sorted()
for _index, (_id, _score) in enumerate(scores):
if _id == str(self._char_id):
my_score = _score
my_rank = _index + 1
break
notify = ChampionGroupNotify()
notify.my_score = my_score
notify.my_rank = my_rank
clubs = self.make_clubs_msg(scores=scores)
for c in clubs:
notify_club = notify.clubs.add()
notify_club.MergeFrom(c)
for log in self.doc['logs'][str(self._char_id)]:
notify_log = notify.logs.add()
notify_log.timestamp = log['timestamp']
notify_log.target_name = log['target_name']
notify_log.got_score = log['got_score']
notify_log.way_wins.extend(log['way_wins'])
notify_log.match_record_ids.extend(log['record_ids'])
match_times = self.doc['match_times']
if match_times > 6:
notify.next_match_at = 0
else:
hour, minute = GROUP_MATCH_TIME[match_times - 1]
notify.next_match_at = make_time_of_today(hour, minute).timestamp
pairs = make_pairs_from_flat_list(scores)
for (id_one, _), (id_two, _) in pairs:
if id_one == str(self._char_id):
notify.next_target.id = id_two
notify.next_target.name = self.doc['info'][id_two]['name']
notify.next_target.flag = self.doc['info'][id_two]['flag']
elif id_two == str(self._char_id):
notify.next_target.id = id_one
notify.next_target.name = self.doc['info'][id_one]['name']
notify.next_target.flag = self.doc['info'][id_one]['flag']
return notify
class ChampionshipGroupManager(object):
@classmethod
def find_all_groups(cls, server_id):
"""
:rtype: list[ChampionshipGroup]
"""
groups = []
""":type: list[ChampionshipGroup]"""
group_docs = MongoChampionshipGroup.db(server_id).find({})
for doc in group_docs:
g = ChampionshipGroup(server_id)
g.group_id = doc['_id']
g.doc = doc
groups.append(g)
return groups
@classmethod
def find_applied_clubs(cls, server_id):
docs = MongoChampionship.db(server_id).find(
{'applied': True},
{'_id': 1}
)
club_ids = [doc['_id'] for doc in docs]
club_ids = set(club_ids)
vip_ids = VIP.query_char_ids(server_id, min_level=AUTO_APPLY_VIP_LEVEL)
if vip_ids:
club_docs = MongoCharacter.db(server_id).find(
{'_id': {'$in': vip_ids}},
{'level': 1}
)
for doc in club_docs:
if doc['level'] >= APPLY_CLUB_LEVEL_LIMIT:
club_ids.add(doc['_id'])
return list(club_ids)
@classmethod
def assign_to_groups(cls, server_id, club_ids):
club_amount = len(club_ids)
if club_amount < 32:
need_npc_amount = 32 - club_amount
else:
if club_amount % 2 == 0:
need_npc_amount = 0
else:
need_npc_amount = 1
info = {}
if club_ids:
club_docs = MongoCharacter.db(server_id).find(
{'_id': {'$in': club_ids}},
{'name': 1, 'flag': 1}
)
club_info = {doc['_id']: doc for doc in club_docs}
for i in club_ids:
info[str(i)] = {
'name': club_info[i]['name'],
'flag': club_info[i]['flag'],
}
for i in range(need_npc_amount):
npc_doc = ConfigPlunderNPC.get(2).to_simple_doc()
npc_id = npc_doc.pop('id')
info[npc_id] = npc_doc
ids = info.keys()
random.shuffle(ids)
# 把这些ids 随机分配到8个 group 中
groups = []
""":type: list[ChampionshipGroup]"""
for i in range(8):
g = ChampionshipGroup.new(server_id)
groups.append(g)
g_index = 0
while True:
try:
_id = ids.pop(0)
except IndexError:
break
groups[g_index].add_club(_id, info[_id])
g_index += 1
if g_index >= 8:
g_index = 0
for g in groups:
g.finish()
char_ids = OperationLog.get_recent_action_char_ids(server_id)
for cid in char_ids:
g = ChampionshipGroup(server_id)
g.find_by_char_id(cid)
msg = g.make_protomsg()
MessagePipe(cid).put(msg=msg)
@classmethod
def start_match(cls, server_id):
groups = cls.find_all_groups(server_id)
if not groups:
return 0
match_times = 0
for g in groups:
match_times = g.start_match()
if match_times == 7:
# 小组赛打完了
# 其实这个drop没必要,不过以防万一
MongoChampionshipLevel.db(server_id).drop()
cl = ChampionshipLevel(server_id)
cl.initialize()
level_notify = cl.make_protomsg()
level_data = MessageFactory.pack(level_notify)
char_ids = OperationLog.get_recent_action_char_ids(server_id)
for cid in char_ids:
MessagePipe(cid).put(data=level_data)
return match_times - 1
class ChampionshipLevel(object):
__slots__ = ['server_id', 'doc']
def __init__(self, server_id):
self.server_id = server_id
self.doc = MongoChampionshipLevel.db(self.server_id).find_one(
{'_id': MongoChampionshipLevel.DOC_ID}
)
if not self.doc:
self.doc = MongoChampionshipLevel.document()
MongoChampionshipLevel.db(self.server_id).insert_one(self.doc)
def initialize(self):
# 16强初始化
groups = ChampionshipGroupManager.find_all_groups(self.server_id)
info = {}
tops = []
way_wins = {}
record_ids = {}
for g in groups:
id_one, id_two = g.get_top_two()
info[id_one] = g.doc['info'][id_one]
info[id_two] = g.doc['info'][id_two]
tops.append((id_one, id_two))
way_wins[id_one] = g.doc['logs'][id_one][-1]['way_wins']
record_ids[id_one] = g.doc['logs'][id_one][-1]['record_ids']
way_wins[id_two] = g.doc['logs'][id_two][-1]['way_wins']
record_ids[id_two] = g.doc['logs'][id_two][-1]['record_ids']
# 1~4组第一名 vs 5~8组第二名
# 1~4组第二名 vs 5~8组第一名
member_ids = []
for i in range(4):
member_ids.append(tops[i][0])
member_ids.append(tops[i + 4][1])
for i in range(4):
member_ids.append(tops[i][1])
member_ids.append(tops[i + 4][0])
self.doc['info'] = info
self.save(16, member_ids, way_wins, record_ids, info=info)
def get_current_level(self):
return self.doc['current_level']
def save(self, level, member_ids, way_wins, record_ids, info=None):
level_doc = MongoChampionshipLevel.document_level()
level_doc['member_ids'] = member_ids
level_doc['way_wins'] = way_wins
level_doc['record_ids'] = record_ids
self.doc['levels'][str(level)] = level_doc
self.doc['current_level'] = level
updater = {
'levels.{0}'.format(level): level_doc,
'current_level': level,
}
if info:
updater['info'] = info
MongoChampionshipLevel.db(self.server_id).update_one(
{'_id': MongoChampionshipLevel.DOC_ID},
{'$set': updater}
)
self.send_rank_reward_mail(level)
def send_rank_reward_mail(self, level):
config = ConfigChampionRankReward.get(level)
member_ids = self.doc['levels'][str(level)]['member_ids']
rc = ResourceClassification.classify(config.reward)
attachment = rc.to_json()
for m in member_ids:
if is_npc(m):
continue
m = MailManager(self.server_id, int(m))
m.add(config.mail_title, config.mail_content, attachment=attachment)
def send_bet_reward_mail(self, level, win_ids):
# 找到所有bet的玩家,然后遍历
docs = MongoChampionship.db(self.server_id).find({'has_bet': True})
for doc in docs:
bet_info = doc['bet'].get(str(level), {})
if not bet_info:
continue
config = ConfigChampionBet.get(bet_info['bet_id'])
if bet_info['club_id'] in win_ids:
m_title = config.win_mail_title
m_content = config.win_mail_content
m_reward = config.win_reward
else:
m_title = config.lose_mail_title
m_content = config.lose_mail_content
m_reward = config.lose_reward
rc = ResourceClassification.classify(m_reward)
attachment = rc.to_json()
m = MailManager(self.server_id, doc['_id'])
m.add(m_title, m_content, attachment=attachment)
def start_match(self):
if not self.doc['levels']:
return 0
lv = self.doc['current_level']
if lv == 1:
return None
next_level = LEVEL_NEXT_TABLE[lv]
member_ids = self.doc['levels'][str(lv)]['member_ids']
pairs = make_pairs_from_flat_list(member_ids)
win_ids = []
lose_ids = []
way_wins = {}
record_ids = {}
for id_one, id_two in pairs:
info_one = self.doc['info'][id_one]
info_two = self.doc['info'][id_two]
m = Match(self.server_id, id_one, info_one, id_two, info_two)
one_way_wins, one_record_ids = m.start()
two_way_wins = [1 - _w for _w in one_way_wins]
one_way_wins_count = len([_w for _w in one_way_wins if _w == 1])
if one_way_wins_count >= 2:
win_ids.append(id_one)
lose_ids.append(id_two)
way_wins[id_one] = one_way_wins
record_ids[id_one] = one_record_ids
else:
win_ids.append(id_two)
lose_ids.append(id_one)
way_wins[id_two] = two_way_wins
record_ids[id_two] = one_record_ids
self.save(next_level, win_ids, way_wins, record_ids)
# 发送下注邮件
self.send_bet_reward_mail(lv, win_ids)
if next_level == 1:
self.after_final_match()
return lv
def after_final_match(self):
# 已经打完了,但还要得出第三四名,并记录前四名
level_4_member_ids = self.doc['levels']['4']['member_ids'][:]
level_2_member_ids = self.doc['levels']['2']['member_ids'][:]
for i in level_2_member_ids:
level_4_member_ids.remove(i)
id_one = level_4_member_ids[0]
id_two = level_4_member_ids[1]
info_one = self.doc['info'][id_one]
info_two = self.doc['info'][id_two]
m = Match(self.server_id, id_one, info_one, id_two, info_two)
one_way_wins, one_record_ids = m.start()
# two_way_wins = [1 - _w for _w in one_way_wins]
one_way_wins_count = len([_w for _w in one_way_wins if _w == 1])
if one_way_wins_count >= 2:
third = id_one
fourth = id_two
else:
third = id_two
fourth = id_one
first = self.doc['levels']['1']['member_ids'][0]
level_2_member_ids.remove(first)
second = level_2_member_ids[0]
first_info = self.doc['info'][first]
second_info = self.doc['info'][second]
third_info = self.doc['info'][third]
fourth_info = self.doc['info'][fourth]
MongoChampionHistory.db(self.server_id).drop()
history_doc = MongoChampionHistory.document()
history_doc['member_ids'] = [first, second, third, fourth]
history_doc['info'] = {
first: first_info,
second: second_info,
third: third_info,
fourth: fourth_info,
}
MongoChampionHistory.db(self.server_id).insert_one(history_doc)
# 清空小组赛
MongoChampionshipGroup.db(self.server_id).drop()
group_notify = make_empty_group_notify_msg()
group_data = MessageFactory.pack(group_notify)
# 清空玩家的报名标识
MongoChampionship.db(self.server_id).update_many(
{},
{'$set': {
'applied': False
}}
)
char_ids = OperationLog.get_recent_action_char_ids(self.server_id)
basic_notify = make_common_basic_notify_msg(self.server_id)
for _cid in char_ids:
MessagePipe(_cid).put(data=group_data)
Championship(self.server_id, _cid).send_basic_notify(basic_notify=basic_notify)
# 设置winning
winning_notify = LeaderboardChampionshipNotify()
winning_notify.session = ""
for __id, __info in [(first, first_info), (second, second_info), (third, third_info)]:
__match = Match(self.server_id, None, None, None, None)
__clubs, __skill_sequence = __match.make_3_way_clubs(__id, __info)
winning_notify_club = winning_notify.clubs.add()
winning_notify_club.club.MergeFrom(__clubs[0].make_protomsg())
for __way_id in [1, 2, 3]:
winning_notify_club_formation = winning_notify_club.formation.add()
winning_notify_club_formation.MergeFrom(make_plunder_formation_msg(__clubs[__way_id - 1], __way_id))
WinningChampionship(self.server_id, None).set_to_common(winning_notify)
def make_protomsg(self, level=None):
if level:
levels = [level]
act = ACT_UPDATE
else:
levels = [CHAMPION_LEVEL_16, CHAMPION_LEVEL_8, CHAMPION_LEVEL_4, CHAMPION_LEVEL_2, CHAMPION_LEVEL_1]
act = ACT_INIT
notify = ChampionLevelNotify()
notify.act = act
if act == ACT_INIT:
level16 = self.doc['levels'].get('16', {})
if level16:
for i in level16['member_ids']:
notify_club = notify.clubs.add()
notify_club.id = i
notify_club.name = self.doc['info'][i]['name']
notify_club.flag = self.doc['info'][i]['flag']
for lv in levels:
notify_level = notify.levels.add()
notify_level.level = lv
this_level = self.doc['levels'].get(str(lv), {})
if this_level:
for _mid in this_level['member_ids']:
notify_level_club = notify_level.clubs.add()
notify_level_club.id = _mid
notify_level_club.way_wins.extend(this_level['way_wins'][str(_mid)])
notify_level_club.match_record_ids.extend(this_level['record_ids'][str(_mid)])
if lv == 16:
notify_level.match_at = 0
else:
notify_level.match_at = find_level_match_at(lv).timestamp
return notify
| [
"yueyoum@gmail.com"
] | yueyoum@gmail.com |
911744a0becf71a9d8142dc9e796c3949f6243a8 | 26c0f80688f75a188097a232c229a73c8e7cc6ed | /user/migrations/0016_auto_20210511_1700.py | c17235302b993169c5ae1b568f59d2271a6b2144 | [] | no_license | creep1g/DjangoWebstore | 8207d7ea53c478fb7e5745e1c6ae6699102b5df5 | bd27340b86bf2289b8c14216462d932ccdf4986d | refs/heads/main | 2023-05-06T09:50:04.846489 | 2021-05-28T14:40:40 | 2021-05-28T14:40:40 | 371,730,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 444 | py | # Generated by Django 3.2 on 2021-05-11 17:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('user', '0015_auto_20210511_1655'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='searches',
field=models.ManyToManyField(null=True, to='user.SearchHistory'),
),
]
| [
"thorgilshjalmarsson@gmail.com"
] | thorgilshjalmarsson@gmail.com |
ea8979d74e418a117695e41be3c993a3908baf15 | 1965343babd76b97b1ff8206991f3270baa33192 | /pos_session_pay_invoice_extend/wizard/cash_invoice_in.py | 6aa275837dde17096380c8591c5a3a3dcf2427cd | [] | no_license | leangjia/odoo-addons-1 | 62774d842561d5ce89a8d3628c29e41135a74415 | 12082e49ca6f23540d501e4dfa9ca73da28a117a | refs/heads/10.0 | 2020-04-15T06:23:07.488597 | 2019-01-04T12:43:26 | 2019-01-04T12:43:26 | 164,458,983 | 0 | 1 | null | 2019-01-07T16:20:20 | 2019-01-07T16:20:19 | null | UTF-8 | Python | false | false | 2,519 | py | # -*- encoding: utf-8 -*-
# Copyright 2018 Sistemas de Datos - Rodrigo Colombo Vlaeminch <rcolombo@sdatos.es>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0
from odoo import api, models, _, fields
from odoo.exceptions import UserError
class CashInvoiceIn(models.TransientModel):
_inherit = 'cash.invoice.in'
def _default_invoice(self):
return self._default_value(self.default_invoice)
def _default_session(self):
return self._default_value(self.default_session)
def _session_domain(self):
return [('state', '=', 'opened'),
('user_id', '=', self.env.uid)]
invoice_id = fields.Many2one('account.invoice', string='Invoice', required=True, default=_default_invoice)
pos_session_id = fields.Many2one('pos.session', string='Pos Session', default=_default_session, domain=_session_domain)
active_model = fields.Char(default=lambda self: self.env.context.get('active_model', False))
def default_invoice(self, active_model, active_ids):
if active_model == 'account.invoice':
return self.env[active_model].browse(active_ids).id
def default_session(self, active_model, active_ids):
if active_model == 'account.invoice':
session = self.env['pos.session'].search([('state', '=', 'opened'),
('user_id', '=', self.env.uid)]).id
if not session:
raise UserError(_('There is not pos session opened for yourself'))
return session
return False
def default_journals(self, active_model, active_ids):
if active_model == 'account.invoice':
session = self.env['pos.session'].browse(self._default_session())
return self.env['account.journal'].browse([r.journal_id.id for r in session.statement_ids])
return super(CashInvoiceIn, self).default_journals(active_model, active_ids)
@api.multi
def run(self):
if self.env.context.get('active_model', False) == 'account.invoice':
bank_statements = [
session.statement_ids.filtered(
lambda r: r.journal_id.id == self.journal_id.id
)
for session in self.env['pos.session'].browse(self._default_session())
]
if not bank_statements:
raise UserError(_('Bank Statement was not found'))
return self._run(bank_statements)
return super(CashInvoiceIn, self).run()
| [
"rodrigo.covl@gmail.com"
] | rodrigo.covl@gmail.com |
31c63484ece90ef1a58d4d8a1c917875e71e42ba | 0729bc2e2236fadb8fb2eac8b30534d939a45b2e | /DistAnnot/Annot/tests.py | e0c741e72f672231d4fd71b9ee91a723a70a444e | [] | no_license | JudoWill/pyMutF | 8ecdc24fbb2efe2a0a721aab164a2b060de11832 | aaf41ab41eb897c10a721c62913bb49c79f2cefc | refs/heads/master | 2021-01-16T20:34:06.705933 | 2010-10-11T16:55:08 | 2010-10-11T16:55:08 | 710,208 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | """
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
from django.core.urlresolvers import reverse
from DistAnnot.Interaction.models import *
from forms import AnnotForm, InteractionEffectForm
from django.forms.formsets import formset_factory
class SimpleTest(TestCase):
fixtures = ['Interaction.simple_data.yaml']
| [
"judowill@gmail.com"
] | judowill@gmail.com |
2edffd4600267c56a246b76b1f853487487f3432 | e95ef5e61682a947e82d69842f75f4cf002d6203 | /lib/networks/net_labeled_fpn_fixed.py | e0be0c709bf3c684c0fc9f5b2734cb1a530c1509 | [
"MIT"
] | permissive | daweim0/Just-some-image-features | 4aaa4f6674ecb7c925b98d94cb5643de9e44bdcc | 23b2bc80f5acf3940a718dc462fb45625b61a853 | refs/heads/master | 2021-07-18T22:36:27.218646 | 2017-10-26T02:15:04 | 2017-10-26T02:15:04 | 100,301,844 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,371 | py | import tensorflow as tf
from networks.network import Network
from fcn.config import cfg
""" A network that produces dense features.
This particular network was heavily inspired by 'Feature Pyramid Networks for Object Detection' and adhere's more
closely to the paper than net_labeled_fpn.py
"""
class custom_network(Network):
def __init__(self):
self.inputs = cfg.INPUT
# self.input_format = input_format
self.num_output_dimensions = 2 # formerly num_classes
self.num_units = cfg.TRAIN.NUM_UNITS
self.scale = 1 / cfg.TRAIN.SCALES_BASE[0]
self.vertex_reg = cfg.TRAIN.VERTEX_REG
self.data_left = tf.placeholder(tf.float32, shape=[None, None, None, 3])
self.data_right = tf.placeholder(tf.float32, shape=[None, None, None, 3])
self.gt_flow = tf.placeholder(tf.float32, shape=[None, None, None, self.num_output_dimensions])
self.occluded = tf.placeholder(tf.int32, shape=[None, None, None, 1])
self.labels_left = tf.placeholder(tf.int32, shape=[None, None, None, None])
self.labels_right = tf.placeholder(tf.int32, shape=[None, None, None, None])
self.keep_prob = tf.placeholder(tf.float32)
self.queue_size = 20
# define a queue
self.q = tf.FIFOQueue(self.queue_size, [tf.float32, tf.float32, tf.float32, tf.int32, tf.int32, tf.int32, tf.float32])
self.enqueue_op = self.q.enqueue([self.data_left, self.data_right, self.gt_flow, self.occluded, self.labels_left, self.labels_right, self.keep_prob])
data_left, data_right, gt_flow, occluded, left_labels, right_labels, self.keep_prob_queue = self.q.dequeue()
self.layers = dict({'data_left': data_left, 'data_right': data_right, 'gt_flow': gt_flow, 'occluded': occluded,
'left_labels': left_labels, "right_labels": right_labels})
self.close_queue_op = self.q.close(cancel_pending_enqueues=True)
self.queue_size_op = self.q.size('queue_size')
self.trainable = cfg.TRAIN.TRAINABLE
if cfg.NET_CONF.CONV1_SKIP_LINK:
self.skip_1_mult = tf.constant(1.0, tf.float32)
else:
self.skip_1_mult = tf.constant(0.0, tf.float32)
if cfg.NET_CONF.CONV2_SKIP_LINK:
self.skip_2_mult = tf.constant(1.0, tf.float32)
else:
self.skip_2_mult = tf.constant(0.0, tf.float32)
if cfg.NET_CONF.CONV3_SKIP_LINK:
self.skip_4_mult = tf.constant(1.0, tf.float32)
else:
self.skip_4_mult = tf.constant(0.0, tf.float32)
self.setup()
def setup(self):
trainable = self.trainable
reuse = True
feature_len = 128
# scaled versions of ground truth
(self.feed('gt_flow')
.avg_pool(2, 2, 2, 2, name='flow_pool1')
.div_immediate(tf.constant(2.0, tf.float32), name='gt_flow_2x')
.avg_pool(2, 2, 2, 2, name='flow_pool2')
.div_immediate(tf.constant(2.0, tf.float32), name='gt_flow_4x')
.avg_pool(2, 2, 2, 2, name='flow_pool3')
.div_immediate(tf.constant(2.0, tf.float32), name='gt_flow_8x')
.avg_pool(2, 2, 2, 2, name='flow_pool4')
.div_immediate(tf.constant(2.0, tf.float32), name='gt_flow_16x'))
(self.feed('occluded').cast(tf.float32)
.avg_pool(2, 2, 2, 2, name='occluded_2x_avg')
.avg_pool(2, 2, 2, 2, name='occluded_4x_avg')
.avg_pool(2, 2, 2, 2, name='occluded_8x_avg')
.avg_pool(2, 2, 2, 2, name='occluded_16x_avg'))
self.feed('occluded_2x_avg').round().cast(tf.int32, name="occluded_2x")
self.feed('occluded_4x_avg').round().cast(tf.int32, name="occluded_4x")
self.feed('occluded_8x_avg').round().cast(tf.int32, name="occluded_8x")
self.feed('occluded_16x_avg').round().cast(tf.int32, name="occluded_16x")
(self.feed('left_labels').cast(tf.float32)
.avg_pool(2, 2, 2, 2, name='left_labels_2x_avg')
.avg_pool(2, 2, 2, 2, name='left_labels_4x_avg')
.avg_pool(2, 2, 2, 2, name='left_labels_8x_avg')
.avg_pool(2, 2, 2, 2, name='left_labels_16x_avg'))
self.feed('left_labels_2x_avg').round().cast(tf.int32, name="left_labels_2x")
self.feed('left_labels_4x_avg').round().cast(tf.int32, name="left_labels_4x")
self.feed('left_labels_8x_avg').round().cast(tf.int32, name="left_labels_8x")
self.feed('left_labels_16x_avg').round().cast(tf.int32, name="left_labels_16x")
(self.feed('right_labels').cast(tf.float32)
.avg_pool(2, 2, 2, 2, name='right_labels_2x_avg')
.avg_pool(2, 2, 2, 2, name='right_labels_4x_avg')
.avg_pool(2, 2, 2, 2, name='right_labels_8x_avg')
.avg_pool(2, 2, 2, 2, name='right_labels_16x_avg'))
self.feed('right_labels_2x_avg').round().cast(tf.int32, name="right_labels_2x")
self.feed('right_labels_4x_avg').round().cast(tf.int32, name="right_labels_4x")
self.feed('right_labels_8x_avg').round().cast(tf.int32, name="right_labels_8x")
self.feed('right_labels_16x_avg').round().cast(tf.int32, name="right_labels_16x")
# left tower
(self.feed('data_left')
.add_immediate(tf.constant(0.0, tf.float32), name='data_left_tap')
.conv(3, 3, 64, 1, 1, name='conv1_1', c_i=3, trainable=trainable)
.conv(3, 3, 64, 1, 1, name='conv1_2', c_i=64, trainable=trainable)
.add_immediate(tf.constant(0.0, tf.float32), name='conv1_l')
.max_pool(2, 2, 2, 2, name='pool1')
.conv(3, 3, 128, 1, 1, name='conv2_1', c_i=64, trainable=trainable)
.conv(3, 3, 128, 1, 1, name='conv2_2', c_i=128, trainable=trainable)
.add_immediate(tf.constant(0.0, tf.float32), name='conv2_l')
.max_pool(2, 2, 2, 2, name='pool2')
.conv(3, 3, 256, 1, 1, name='conv3_1', c_i=128, trainable=trainable)
.conv(3, 3, 256, 1, 1, name='conv3_2', c_i=256, trainable=trainable)
.conv(3, 3, 256, 1, 1, name='conv3_3', c_i=256, trainable=trainable)
.add_immediate(tf.constant(0.0, tf.float32), name='conv3_l')
.max_pool(2, 2, 2, 2, name='pool3')
.conv(3, 3, 512, 1, 1, name='conv4_1', c_i=256, trainable=trainable)
.conv(3, 3, 512, 1, 1, name='conv4_2', c_i=512, trainable=trainable)
.conv(3, 3, 512, 1, 1, name='conv4_3', c_i=512, trainable=trainable)
.add_immediate(tf.constant(0.0, tf.float32), name='conv4_3_l')
.max_pool(2, 2, 2, 2, name='pool4')
.conv(3, 3, 512, 1, 1, name='conv5_1', c_i=512, trainable=trainable)
.conv(3, 3, 512, 1, 1, name='conv5_2', c_i=512, trainable=trainable)
.conv(3, 3, 512, 1, 1, name='conv5_3', c_i=512, trainable=trainable)
.add_immediate(tf.constant(0.0, tf.float32), name='conv5_3_l'))
# 16x scaling input
(self.feed('conv5_3_l')
.conv(1, 1, feature_len, 1, 1, name='16_conv_1', c_i=512, elu=True)
# .conv(1, 1, 128, 1, 1, name='16_conv_2', c_i=128, elu=True, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='features_16x_l')
.deconv(4, 4, feature_len, 2, 2, name='upscale_16x_l', trainable=False))
# 8x scaling input
(self.feed('conv4_3_l')
.conv(1, 1, feature_len, 1, 1, name='8x_skip_cov_1', c_i=512, relu=False)
.add_immediate(tf.constant(0.0, tf.float32), name='skip_link_8x_l'))
(self.feed('upscale_16x_l', 'skip_link_8x_l')
.add(name='8_add')
.add_immediate(tf.constant(0.0, tf.float32), name='features_8x_l')
.conv(3, 3, feature_len, 1, 1, name='8x_conv_2', relu=False)
.deconv(4, 4, feature_len, 2, 2, name='upscale_8x_l', trainable=False))
# 4x scaling input
(self.feed('conv3_l')
.conv(1, 1, feature_len, 1, 1, name='4x_skip_cov_1', c_i=256, relu=False)
.add_immediate(tf.constant(0.0, tf.float32), name='skip_link_4x_l'))
(self.feed('upscale_8x_l', 'skip_link_4x_l')
.add(name='4_add')
.add_immediate(tf.constant(0.0, tf.float32), name='features_4x_l')
.conv(3, 3, feature_len, 1, 1, name='4x_conv_2', relu=False)
.deconv(4, 4, feature_len, 2, 2, name='upscale_4x_l', trainable=False))
# 2x scaling input
(self.feed('conv2_l')
.conv(1, 1, feature_len, 1, 1, name='2x_skip_cov_1', c_i=128, relu=False)
.add_immediate(tf.constant(0.0, tf.float32), name='skip_link_2x_l'))
(self.feed('upscale_4x_l', 'skip_link_2x_l')
.add(name='2_add')
.add_immediate(tf.constant(0.0, tf.float32), name='features_2x_l')
.conv(3, 3, feature_len, 1, 1, name='2x_conv_2', relu=False)
.deconv(4, 4, feature_len, 2, 2, name='upscale_2x_l', trainable=False))
# # 1x scaling input
# (self.feed('conv1_l')
# .conv(1, 1, feature_len, 1, 1, name='1x_skip_cov_1', c_i=64, relu=False)
# .add_immediate(tf.constant(0.0, tf.float32), name='skip_link_1x_l'))
# (self.feed('upscale_2x_l', 'skip_link_1x_l')
# .add(name='1_add')
# .add_immediate(tf.constant(0.0, tf.float32), name='features_1x_l'))
# right tower
(self.feed('data_right')
.add_immediate(tf.constant(0.0, tf.float32), name='data_right_tap')
.conv(3, 3, 64, 1, 1, name='conv1_1', c_i=3, trainable=trainable, reuse=reuse)
.conv(3, 3, 64, 1, 1, name='conv1_2', c_i=64, trainable=trainable, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='conv1_r')
.max_pool(2, 2, 2, 2, name='pool1')
.conv(3, 3, 128, 1, 1, name='conv2_1', c_i=64, trainable=trainable, reuse=reuse)
.conv(3, 3, 128, 1, 1, name='conv2_2', c_i=128, trainable=trainable, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='conv2_r')
.max_pool(2, 2, 2, 2, name='pool2')
.conv(3, 3, 256, 1, 1, name='conv3_1', c_i=128, trainable=trainable, reuse=reuse)
.conv(3, 3, 256, 1, 1, name='conv3_2', c_i=256, trainable=trainable, reuse=reuse)
.conv(3, 3, 256, 1, 1, name='conv3_3', c_i=256, trainable=trainable, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='conv3_r')
.max_pool(2, 2, 2, 2, name='pool3')
.conv(3, 3, 512, 1, 1, name='conv4_1', c_i=256, trainable=trainable, reuse=reuse)
.conv(3, 3, 512, 1, 1, name='conv4_2', c_i=512, trainable=trainable, reuse=reuse)
.conv(3, 3, 512, 1, 1, name='conv4_3', c_i=512, trainable=trainable, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='conv4_3_r')
.max_pool(2, 2, 2, 2, name='pool4')
.conv(3, 3, 512, 1, 1, name='conv5_1', c_i=512, trainable=trainable, reuse=reuse)
.conv(3, 3, 512, 1, 1, name='conv5_2', c_i=512, trainable=trainable, reuse=reuse)
.conv(3, 3, 512, 1, 1, name='conv5_3', c_i=512, trainable=trainable, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='conv5_3_r'))
# 16x scaling input
(self.feed('conv5_3_r')
.conv(1, 1, feature_len, 1, 1, name='16_conv_1', c_i=512, elu=True, reuse=reuse)
# .conv(1, 1, 128, 1, 1, name='16_conv_2', c_i=128, elu=True, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='features_16x_r')
.deconv(4, 4, feature_len, 2, 2, name='upscale_16x_r', trainable=False))
# 8x scaling input
(self.feed('conv4_3_r')
.conv(1, 1, feature_len, 1, 1, name='8x_skip_cov_1', c_i=512, relu=False, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='skip_link_8x_r'))
(self.feed('upscale_16x_r', 'skip_link_8x_r')
.add(name='8_add')
.add_immediate(tf.constant(0.0, tf.float32), name='features_8x_r')
.conv(3, 3, feature_len, 1, 1, name='8x_conv_2', relu=False, reuse=reuse)
.deconv(4, 4, feature_len, 2, 2, name='upscale_8x_r', trainable=False))
# 4x scaling input
(self.feed('conv3_r')
.conv(1, 1, feature_len, 1, 1, name='4x_skip_cov_1', c_i=256, relu=False, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='skip_link_4x_r'))
(self.feed('upscale_8x_r', 'skip_link_4x_r')
.add(name='4_add')
.add_immediate(tf.constant(0.0, tf.float32), name='features_4x_r')
.conv(3, 3, feature_len, 1, 1, name='4x_conv_2', relu=False, reuse=reuse)
.deconv(4, 4, feature_len, 2, 2, name='upscale_4x_r', trainable=False))
# 2x scaling input
(self.feed('conv2_r')
.conv(1, 1, feature_len, 1, 1, name='2x_skip_cov_1', c_i=128, relu=False, reuse=reuse)
.add_immediate(tf.constant(0.0, tf.float32), name='skip_link_2x_r'))
(self.feed('upscale_4x_r', 'skip_link_2x_r')
.add(name='2_add')
.add_immediate(tf.constant(0.0, tf.float32), name='features_2x_r')
.conv(3, 3, feature_len, 1, 1, name='2x_conv_2', relu=False, reuse=reuse)
.deconv(4, 4, feature_len, 2, 2, name='upscale_2x_r', trainable=False))
# # 1x scaling input
# (self.feed('conv1_r')
# .conv(1, 1, feature_len, 1, 1, name='1x_skip_cov_1', c_i=64, relu=False, reuse=reuse)
# .add_immediate(tf.constant(0.0, tf.float32), name='skip_link_1x_r'))
# (self.feed('upscale_2x_r', 'skip_link_1x_r')
# .add(name='1_add')
# .add_immediate(tf.constant(0.0, tf.float32), name='features_1x_r'))
self.feed('upscale_2x_l')
self.add_immediate(tf.constant(0.0, tf.float32), name='features_1x_l')
self.feed('upscale_2x_r')
self.add_immediate(tf.constant(0.0, tf.float32), name='features_1x_r')
with tf.device("/cpu:0"):
# triplet loss
# (self.feed(['features_1x_l', 'features_1x_r', 'gt_flow', 'occluded', 'left_labels', 'right_labels'])
# .triplet_flow_loss(margin=1.0, negative_radius=2, positive_radius=1, name="triplet_loss_1x"))
(self.feed(['features_2x_l', 'features_2x_r', 'gt_flow_2x', 'occluded_2x', 'left_labels_2x', 'right_labels_2x'])
.triplet_flow_loss(margin=1.0, negative_radius=2, positive_radius=1, name="triplet_loss_2x"))
# (self.feed(['features_4x_l', 'features_4x_r', 'gt_flow_4x', 'occluded_4x', 'left_labels_4x', 'right_labels_4x'])
# .triplet_flow_loss(margin=1.0, negative_radius=4, positive_radius=2, name="triplet_loss_4x"))
#
# (self.feed(['features_8x_l', 'features_8x_r', 'gt_flow_8x', 'occluded_8x', 'left_labels_8x', 'right_labels_8x'])
# .triplet_flow_loss(margin=1.0, negative_radius=5, positive_radius=2, name="triplet_loss_8x"))
#
# final_output = (self.get_output('triplet_loss_8x')[0] + self.get_output('triplet_loss_2x')[0] +
# self.get_output('triplet_loss_4x')[0] + self.get_output('triplet_loss_1x')[0]) / 4.0
final_output = self.get_output('triplet_loss_2x')[0]
self.layers["final_triplet_loss"] = [final_output]
# (self.feed(['features_8x_l', 'features4x_l', 'features_2x_l', 'features_1x_l'])
# .concat(axis=3, name="final_features_l_out"))
#
# (self.feed(['features_8x_r', 'features4x_r', 'features_2x_r', 'features_1x_r'])
# .concat(axis=3, name="final_features_r_out"))
pass | [
"daweim0@gmail.com"
] | daweim0@gmail.com |
ba0f8b5d3e6818f96a7f42132ea32967e054c957 | 2f330fc050de11676ab46b963b7878882e9b6614 | /memsource_cli/models/create_analyse_list_async_dto.py | 0679fd3b864b9449bd836de3615a6545e4f4fed0 | [
"Apache-2.0"
] | permissive | zerodayz/memsource-cli-client | 609f48c18a2b6daaa639d4cb8a61da43763b5143 | c2574f1467539a49e6637c874e88d75c7ef789b3 | refs/heads/master | 2020-08-01T12:43:06.497982 | 2019-09-30T11:14:13 | 2019-09-30T11:14:13 | 210,999,654 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,872 | py | # coding: utf-8
"""
Memsource REST API
Welcome to Memsource's API documentation. To view our legacy APIs please [visit our documentation](https://wiki.memsource.com/wiki/Memsource_API) and for more information about our new APIs, [visit our blog](https://www.memsource.com/blog/2017/10/24/introducing-rest-apis-qa-with-the-memsource-api-team/). If you have any questions, please contact [Memsource Support](<mailto:support@memsource.com>). # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from memsource_cli.models.id_reference import IdReference # noqa: F401,E501
from memsource_cli.models.uid_reference import UidReference # noqa: F401,E501
class CreateAnalyseListAsyncDto(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'jobs': 'list[UidReference]',
'type': 'str',
'include_fuzzy_repetitions': 'bool',
'include_confirmed_segments': 'bool',
'include_numbers': 'bool',
'include_locked_segments': 'bool',
'count_source_units': 'bool',
'include_trans_memory': 'bool',
'include_non_translatables': 'bool',
'include_machine_translation_matches': 'bool',
'trans_memory_post_editing': 'bool',
'non_translatable_post_editing': 'bool',
'machine_translate_post_editing': 'bool',
'name': 'str',
'net_rate_scheme': 'IdReference',
'compare_workflow_level': 'int',
'use_project_analysis_settings': 'bool',
'callback_url': 'str'
}
attribute_map = {
'jobs': 'jobs',
'type': 'type',
'include_fuzzy_repetitions': 'includeFuzzyRepetitions',
'include_confirmed_segments': 'includeConfirmedSegments',
'include_numbers': 'includeNumbers',
'include_locked_segments': 'includeLockedSegments',
'count_source_units': 'countSourceUnits',
'include_trans_memory': 'includeTransMemory',
'include_non_translatables': 'includeNonTranslatables',
'include_machine_translation_matches': 'includeMachineTranslationMatches',
'trans_memory_post_editing': 'transMemoryPostEditing',
'non_translatable_post_editing': 'nonTranslatablePostEditing',
'machine_translate_post_editing': 'machineTranslatePostEditing',
'name': 'name',
'net_rate_scheme': 'netRateScheme',
'compare_workflow_level': 'compareWorkflowLevel',
'use_project_analysis_settings': 'useProjectAnalysisSettings',
'callback_url': 'callbackUrl'
}
def __init__(self, jobs=None, type=None, include_fuzzy_repetitions=None, include_confirmed_segments=None, include_numbers=None, include_locked_segments=None, count_source_units=None, include_trans_memory=None, include_non_translatables=None, include_machine_translation_matches=None, trans_memory_post_editing=None, non_translatable_post_editing=None, machine_translate_post_editing=None, name=None, net_rate_scheme=None, compare_workflow_level=None, use_project_analysis_settings=None, callback_url=None): # noqa: E501
"""CreateAnalyseListAsyncDto - a model defined in Swagger""" # noqa: E501
self._jobs = None
self._type = None
self._include_fuzzy_repetitions = None
self._include_confirmed_segments = None
self._include_numbers = None
self._include_locked_segments = None
self._count_source_units = None
self._include_trans_memory = None
self._include_non_translatables = None
self._include_machine_translation_matches = None
self._trans_memory_post_editing = None
self._non_translatable_post_editing = None
self._machine_translate_post_editing = None
self._name = None
self._net_rate_scheme = None
self._compare_workflow_level = None
self._use_project_analysis_settings = None
self._callback_url = None
self.discriminator = None
self.jobs = jobs
if type is not None:
self.type = type
if include_fuzzy_repetitions is not None:
self.include_fuzzy_repetitions = include_fuzzy_repetitions
if include_confirmed_segments is not None:
self.include_confirmed_segments = include_confirmed_segments
if include_numbers is not None:
self.include_numbers = include_numbers
if include_locked_segments is not None:
self.include_locked_segments = include_locked_segments
if count_source_units is not None:
self.count_source_units = count_source_units
if include_trans_memory is not None:
self.include_trans_memory = include_trans_memory
if include_non_translatables is not None:
self.include_non_translatables = include_non_translatables
if include_machine_translation_matches is not None:
self.include_machine_translation_matches = include_machine_translation_matches
if trans_memory_post_editing is not None:
self.trans_memory_post_editing = trans_memory_post_editing
if non_translatable_post_editing is not None:
self.non_translatable_post_editing = non_translatable_post_editing
if machine_translate_post_editing is not None:
self.machine_translate_post_editing = machine_translate_post_editing
if name is not None:
self.name = name
if net_rate_scheme is not None:
self.net_rate_scheme = net_rate_scheme
if compare_workflow_level is not None:
self.compare_workflow_level = compare_workflow_level
if use_project_analysis_settings is not None:
self.use_project_analysis_settings = use_project_analysis_settings
if callback_url is not None:
self.callback_url = callback_url
@property
def jobs(self):
"""Gets the jobs of this CreateAnalyseListAsyncDto. # noqa: E501
:return: The jobs of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: list[UidReference]
"""
return self._jobs
@jobs.setter
def jobs(self, jobs):
"""Sets the jobs of this CreateAnalyseListAsyncDto.
:param jobs: The jobs of this CreateAnalyseListAsyncDto. # noqa: E501
:type: list[UidReference]
"""
if jobs is None:
raise ValueError("Invalid value for `jobs`, must not be `None`") # noqa: E501
self._jobs = jobs
@property
def type(self):
"""Gets the type of this CreateAnalyseListAsyncDto. # noqa: E501
default: PreAnalyse # noqa: E501
:return: The type of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this CreateAnalyseListAsyncDto.
default: PreAnalyse # noqa: E501
:param type: The type of this CreateAnalyseListAsyncDto. # noqa: E501
:type: str
"""
allowed_values = ["PreAnalyse", "PostAnalyse", "Compare"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
)
self._type = type
@property
def include_fuzzy_repetitions(self):
"""Gets the include_fuzzy_repetitions of this CreateAnalyseListAsyncDto. # noqa: E501
Default: true # noqa: E501
:return: The include_fuzzy_repetitions of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._include_fuzzy_repetitions
@include_fuzzy_repetitions.setter
def include_fuzzy_repetitions(self, include_fuzzy_repetitions):
"""Sets the include_fuzzy_repetitions of this CreateAnalyseListAsyncDto.
Default: true # noqa: E501
:param include_fuzzy_repetitions: The include_fuzzy_repetitions of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._include_fuzzy_repetitions = include_fuzzy_repetitions
@property
def include_confirmed_segments(self):
"""Gets the include_confirmed_segments of this CreateAnalyseListAsyncDto. # noqa: E501
Default: true # noqa: E501
:return: The include_confirmed_segments of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._include_confirmed_segments
@include_confirmed_segments.setter
def include_confirmed_segments(self, include_confirmed_segments):
"""Sets the include_confirmed_segments of this CreateAnalyseListAsyncDto.
Default: true # noqa: E501
:param include_confirmed_segments: The include_confirmed_segments of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._include_confirmed_segments = include_confirmed_segments
@property
def include_numbers(self):
"""Gets the include_numbers of this CreateAnalyseListAsyncDto. # noqa: E501
Default: true # noqa: E501
:return: The include_numbers of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._include_numbers
@include_numbers.setter
def include_numbers(self, include_numbers):
"""Sets the include_numbers of this CreateAnalyseListAsyncDto.
Default: true # noqa: E501
:param include_numbers: The include_numbers of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._include_numbers = include_numbers
@property
def include_locked_segments(self):
"""Gets the include_locked_segments of this CreateAnalyseListAsyncDto. # noqa: E501
Default: true # noqa: E501
:return: The include_locked_segments of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._include_locked_segments
@include_locked_segments.setter
def include_locked_segments(self, include_locked_segments):
"""Sets the include_locked_segments of this CreateAnalyseListAsyncDto.
Default: true # noqa: E501
:param include_locked_segments: The include_locked_segments of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._include_locked_segments = include_locked_segments
@property
def count_source_units(self):
"""Gets the count_source_units of this CreateAnalyseListAsyncDto. # noqa: E501
Default: true # noqa: E501
:return: The count_source_units of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._count_source_units
@count_source_units.setter
def count_source_units(self, count_source_units):
"""Sets the count_source_units of this CreateAnalyseListAsyncDto.
Default: true # noqa: E501
:param count_source_units: The count_source_units of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._count_source_units = count_source_units
@property
def include_trans_memory(self):
"""Gets the include_trans_memory of this CreateAnalyseListAsyncDto. # noqa: E501
Default: true # noqa: E501
:return: The include_trans_memory of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._include_trans_memory
@include_trans_memory.setter
def include_trans_memory(self, include_trans_memory):
"""Sets the include_trans_memory of this CreateAnalyseListAsyncDto.
Default: true # noqa: E501
:param include_trans_memory: The include_trans_memory of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._include_trans_memory = include_trans_memory
@property
def include_non_translatables(self):
"""Gets the include_non_translatables of this CreateAnalyseListAsyncDto. # noqa: E501
Default: false. Works only for type=PreAnalyse. # noqa: E501
:return: The include_non_translatables of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._include_non_translatables
@include_non_translatables.setter
def include_non_translatables(self, include_non_translatables):
"""Sets the include_non_translatables of this CreateAnalyseListAsyncDto.
Default: false. Works only for type=PreAnalyse. # noqa: E501
:param include_non_translatables: The include_non_translatables of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._include_non_translatables = include_non_translatables
@property
def include_machine_translation_matches(self):
"""Gets the include_machine_translation_matches of this CreateAnalyseListAsyncDto. # noqa: E501
Default: false. Works only for type=PreAnalyse. # noqa: E501
:return: The include_machine_translation_matches of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._include_machine_translation_matches
@include_machine_translation_matches.setter
def include_machine_translation_matches(self, include_machine_translation_matches):
"""Sets the include_machine_translation_matches of this CreateAnalyseListAsyncDto.
Default: false. Works only for type=PreAnalyse. # noqa: E501
:param include_machine_translation_matches: The include_machine_translation_matches of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._include_machine_translation_matches = include_machine_translation_matches
@property
def trans_memory_post_editing(self):
"""Gets the trans_memory_post_editing of this CreateAnalyseListAsyncDto. # noqa: E501
Default: false. Works only for type=PostAnalyse. # noqa: E501
:return: The trans_memory_post_editing of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._trans_memory_post_editing
@trans_memory_post_editing.setter
def trans_memory_post_editing(self, trans_memory_post_editing):
"""Sets the trans_memory_post_editing of this CreateAnalyseListAsyncDto.
Default: false. Works only for type=PostAnalyse. # noqa: E501
:param trans_memory_post_editing: The trans_memory_post_editing of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._trans_memory_post_editing = trans_memory_post_editing
@property
def non_translatable_post_editing(self):
"""Gets the non_translatable_post_editing of this CreateAnalyseListAsyncDto. # noqa: E501
Default: false. Works only for type=PostAnalyse. # noqa: E501
:return: The non_translatable_post_editing of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._non_translatable_post_editing
@non_translatable_post_editing.setter
def non_translatable_post_editing(self, non_translatable_post_editing):
"""Sets the non_translatable_post_editing of this CreateAnalyseListAsyncDto.
Default: false. Works only for type=PostAnalyse. # noqa: E501
:param non_translatable_post_editing: The non_translatable_post_editing of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._non_translatable_post_editing = non_translatable_post_editing
@property
def machine_translate_post_editing(self):
"""Gets the machine_translate_post_editing of this CreateAnalyseListAsyncDto. # noqa: E501
Default: false. Works only for type=PostAnalyse. # noqa: E501
:return: The machine_translate_post_editing of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._machine_translate_post_editing
@machine_translate_post_editing.setter
def machine_translate_post_editing(self, machine_translate_post_editing):
"""Sets the machine_translate_post_editing of this CreateAnalyseListAsyncDto.
Default: false. Works only for type=PostAnalyse. # noqa: E501
:param machine_translate_post_editing: The machine_translate_post_editing of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._machine_translate_post_editing = machine_translate_post_editing
@property
def name(self):
"""Gets the name of this CreateAnalyseListAsyncDto. # noqa: E501
:return: The name of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this CreateAnalyseListAsyncDto.
:param name: The name of this CreateAnalyseListAsyncDto. # noqa: E501
:type: str
"""
if name is not None and len(name) > 255:
raise ValueError("Invalid value for `name`, length must be less than or equal to `255`") # noqa: E501
if name is not None and len(name) < 0:
raise ValueError("Invalid value for `name`, length must be greater than or equal to `0`") # noqa: E501
self._name = name
@property
def net_rate_scheme(self):
"""Gets the net_rate_scheme of this CreateAnalyseListAsyncDto. # noqa: E501
:return: The net_rate_scheme of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: IdReference
"""
return self._net_rate_scheme
@net_rate_scheme.setter
def net_rate_scheme(self, net_rate_scheme):
"""Sets the net_rate_scheme of this CreateAnalyseListAsyncDto.
:param net_rate_scheme: The net_rate_scheme of this CreateAnalyseListAsyncDto. # noqa: E501
:type: IdReference
"""
self._net_rate_scheme = net_rate_scheme
@property
def compare_workflow_level(self):
"""Gets the compare_workflow_level of this CreateAnalyseListAsyncDto. # noqa: E501
Required for type=Compare # noqa: E501
:return: The compare_workflow_level of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: int
"""
return self._compare_workflow_level
@compare_workflow_level.setter
def compare_workflow_level(self, compare_workflow_level):
"""Sets the compare_workflow_level of this CreateAnalyseListAsyncDto.
Required for type=Compare # noqa: E501
:param compare_workflow_level: The compare_workflow_level of this CreateAnalyseListAsyncDto. # noqa: E501
:type: int
"""
if compare_workflow_level is not None and compare_workflow_level > 15: # noqa: E501
raise ValueError("Invalid value for `compare_workflow_level`, must be a value less than or equal to `15`") # noqa: E501
if compare_workflow_level is not None and compare_workflow_level < 1: # noqa: E501
raise ValueError("Invalid value for `compare_workflow_level`, must be a value greater than or equal to `1`") # noqa: E501
self._compare_workflow_level = compare_workflow_level
@property
def use_project_analysis_settings(self):
"""Gets the use_project_analysis_settings of this CreateAnalyseListAsyncDto. # noqa: E501
Default: false. Use default project settings. Will be overwritten with setting sent in the API call. # noqa: E501
:return: The use_project_analysis_settings of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: bool
"""
return self._use_project_analysis_settings
@use_project_analysis_settings.setter
def use_project_analysis_settings(self, use_project_analysis_settings):
"""Sets the use_project_analysis_settings of this CreateAnalyseListAsyncDto.
Default: false. Use default project settings. Will be overwritten with setting sent in the API call. # noqa: E501
:param use_project_analysis_settings: The use_project_analysis_settings of this CreateAnalyseListAsyncDto. # noqa: E501
:type: bool
"""
self._use_project_analysis_settings = use_project_analysis_settings
@property
def callback_url(self):
"""Gets the callback_url of this CreateAnalyseListAsyncDto. # noqa: E501
:return: The callback_url of this CreateAnalyseListAsyncDto. # noqa: E501
:rtype: str
"""
return self._callback_url
@callback_url.setter
def callback_url(self, callback_url):
"""Sets the callback_url of this CreateAnalyseListAsyncDto.
:param callback_url: The callback_url of this CreateAnalyseListAsyncDto. # noqa: E501
:type: str
"""
self._callback_url = callback_url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(CreateAnalyseListAsyncDto, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateAnalyseListAsyncDto):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"cerninr@gmail.com"
] | cerninr@gmail.com |
0dc0c5dfb1070b7e877aca16428197e57351ccdc | 41db99b01c461d5d6acfa444eccb7d9cd2d92283 | /day_9/day9_pt1.py | cdf2ee5fbf8803330face30ebb406488dbb5dc25 | [
"MIT"
] | permissive | allisoncstafford/advent_of_code | 412867210fe77207fa3c9ec00c239cb9cbd40134 | a0f19a7b23e3112698944512efa2d23794c02a82 | refs/heads/master | 2022-07-02T00:47:12.283366 | 2020-05-12T01:05:45 | 2020-05-12T01:05:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | import sys
sys.path.append('/Users/allisonhonold/ds0805/advent_of_code')
from day_5.day5_pt2 import get_input
from intcode9 import IntcodeComp
def main():
# read input
puzzle_input = get_input('day9_input.txt')
# create intcode computer
comp = IntcodeComp(puzzle_input, 1)
# run intcode computer
print('BOOST keycode:')
comp.compute()
# create a new intcode computer for part 2 with input value 2
comp2 = IntcodeComp(puzzle_input, 2)
# run this intcode computer
print('Distress signal coordinates:')
comp2.compute()
if __name__ == "__main__":
main() | [
"allisonhonold@gmail.com"
] | allisonhonold@gmail.com |
e95d2a546a58bdc936ed45628786ab9b51567146 | f0a1d5be3b92427757b8520cc78556e6a510afba | /layers/convolution.py | e02b79f55a8fb1537b002b1e6a08c5e13c3e783b | [] | no_license | yungu-imr/keita | 8bbabc91e000d164f688f44f4f45fee580c44c1b | 3f027445748eadd06111f3d385cecb1b8e8e5cf4 | refs/heads/master | 2023-08-17T23:12:52.817848 | 2017-08-19T17:02:49 | 2017-08-19T17:02:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,298 | py | """
Layers to do with convolution.
"""
from torch import nn
import torch.nn.functional as F
class SeparableConv2d(nn.Module):
"""
A depth-wise convolution followed by a point-wise convolution.
WARNING: Very slow! Unoptimized for PyTorch.
"""
def __init__(self, in_channels, out_channels, stride):
super(SeparableConv2d, self).__init__()
self.depthwise = nn.Conv2d(in_channels=in_channels, out_channels=in_channels, kernel_size=3,
stride=stride, padding=1, groups=in_channels, bias=False)
self.batch_norm_in = nn.BatchNorm2d(in_channels)
self.pointwise = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=1,
stride=1, padding=0, bias=False)
self.batch_norm_out = nn.BatchNorm2d(out_channels)
self.activation = nn.ReLU(inplace=True)
def forward(self, x):
x = self.depthwise(x)
x = self.batch_norm_in(x)
x = self.activation(x)
x = self.pointwise(x)
x = self.batch_norm_out(x)
x = self.activation(x)
return x
class CausalConv1d(nn.Conv1d):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, dilation=1, groups=1, bias=True):
super(CausalConv1d, self).__init__(in_channels, out_channels, kernel_size, stride=stride, padding=0,
dilation=dilation, groups=groups, bias=bias)
self.left_padding = dilation * (kernel_size - 1)
def forward(self, inputs):
"""
A 1D dilated convolution w/ padding such that the output
is the same size as the input.
:param inputs: (batch size, # channels, height)
:return: (batch size, # channels, height)
"""
x = F.pad(inputs.unsqueeze(2), (self.left_padding, 0, 0, 0)).squeeze(2)
return super(CausalConv1d, self).forward(x)
if __name__ == "__main__":
import torch
image = torch.arange(0, 4).unsqueeze(0).unsqueeze(0)
image = torch.autograd.Variable(image)
layer = CausalConv1d(in_channels=1, out_channels=1, kernel_size=2, dilation=1)
layer.weight.data.fill_(1)
layer.bias.data.fill_(0)
print(image.data.numpy())
print(layer(image).round().data.numpy())
| [
"kiwasaki@connect.ust.hk"
] | kiwasaki@connect.ust.hk |
a64c441f9fb98b7b2b1f876c18a4cdbd8043064c | e5d7cfc7b7212090d72e0040f30ae5ac89c34550 | /ui/ui_camera.py | 4344e31c17129ec8c9b4520a9138ec2510fe4e47 | [] | no_license | eagleqq/MyCV | bfba91af1a4fe7b7f29a7ab7bf3274b6d13b6216 | 298402fdd5f564f98fd80fb75875b570d5ae247d | refs/heads/master | 2020-12-30T04:24:43.923637 | 2020-03-11T05:27:08 | 2020-03-11T05:27:08 | 238,859,165 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,664 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_camera.ui'
#
# Created by: PyQt5 UI code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1338, 800)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setGeometry(QtCore.QRect(10, 10, 651, 581))
self.groupBox.setObjectName("groupBox")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.groupBox)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.verticalLayout_3 = QtWidgets.QVBoxLayout()
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.label_camera = QtWidgets.QLabel(self.groupBox)
self.label_camera.setMinimumSize(QtCore.QSize(0, 574))
self.label_camera.setStyleSheet("background-color: rgb(255, 255, 255);")
self.label_camera.setText("")
self.label_camera.setScaledContents(True)
self.label_camera.setObjectName("label_camera")
self.verticalLayout_3.addWidget(self.label_camera)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton_open = QtWidgets.QPushButton(self.groupBox)
self.pushButton_open.setObjectName("pushButton_open")
self.horizontalLayout.addWidget(self.pushButton_open)
self.pushButton_close = QtWidgets.QPushButton(self.groupBox)
self.pushButton_close.setObjectName("pushButton_close")
self.horizontalLayout.addWidget(self.pushButton_close)
self.pushButton_takephoto = QtWidgets.QPushButton(self.groupBox)
self.pushButton_takephoto.setObjectName("pushButton_takephoto")
self.horizontalLayout.addWidget(self.pushButton_takephoto)
self.verticalLayout_3.addLayout(self.horizontalLayout)
self.verticalLayout_4.addLayout(self.verticalLayout_3)
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setGeometry(QtCore.QRect(670, 440, 651, 271))
self.tabWidget.setStyleSheet("background-color: rgb(239, 235, 231);")
self.tabWidget.setUsesScrollButtons(True)
self.tabWidget.setDocumentMode(True)
self.tabWidget.setTabsClosable(False)
self.tabWidget.setMovable(True)
self.tabWidget.setTabBarAutoHide(True)
self.tabWidget.setObjectName("tabWidget")
self.tab_canny = QtWidgets.QWidget()
self.tab_canny.setObjectName("tab_canny")
self.horizontalSlider_canny_max = QtWidgets.QSlider(self.tab_canny)
self.horizontalSlider_canny_max.setGeometry(QtCore.QRect(130, 80, 411, 16))
self.horizontalSlider_canny_max.setMaximum(255)
self.horizontalSlider_canny_max.setProperty("value", 0)
self.horizontalSlider_canny_max.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_canny_max.setObjectName("horizontalSlider_canny_max")
self.horizontalSlider_canny_min = QtWidgets.QSlider(self.tab_canny)
self.horizontalSlider_canny_min.setGeometry(QtCore.QRect(130, 120, 411, 16))
self.horizontalSlider_canny_min.setMaximum(255)
self.horizontalSlider_canny_min.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_canny_min.setObjectName("horizontalSlider_canny_min")
self.label_canny_max = QtWidgets.QLabel(self.tab_canny)
self.label_canny_max.setGeometry(QtCore.QRect(10, 80, 111, 21))
self.label_canny_max.setObjectName("label_canny_max")
self.label_canny_min = QtWidgets.QLabel(self.tab_canny)
self.label_canny_min.setGeometry(QtCore.QRect(10, 110, 111, 21))
self.label_canny_min.setObjectName("label_canny_min")
self.checkBox_canny_use = QtWidgets.QCheckBox(self.tab_canny)
self.checkBox_canny_use.setGeometry(QtCore.QRect(570, 100, 61, 20))
self.checkBox_canny_use.setObjectName("checkBox_canny_use")
self.tabWidget.addTab(self.tab_canny, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.tabWidget.addTab(self.tab_2, "")
self.tab_3 = QtWidgets.QWidget()
self.tab_3.setObjectName("tab_3")
self.tabWidget.addTab(self.tab_3, "")
self.tab_4 = QtWidgets.QWidget()
self.tab_4.setObjectName("tab_4")
self.tabWidget.addTab(self.tab_4, "")
self.groupBox_2 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_2.setGeometry(QtCore.QRect(10, 610, 651, 121))
self.groupBox_2.setObjectName("groupBox_2")
self.radioButton = QtWidgets.QRadioButton(self.groupBox_2)
self.radioButton.setGeometry(QtCore.QRect(30, 40, 101, 20))
self.radioButton.setObjectName("radioButton")
self.radioButton_2 = QtWidgets.QRadioButton(self.groupBox_2)
self.radioButton_2.setGeometry(QtCore.QRect(140, 40, 91, 20))
self.radioButton_2.setObjectName("radioButton_2")
self.radioButton_3 = QtWidgets.QRadioButton(self.groupBox_2)
self.radioButton_3.setGeometry(QtCore.QRect(260, 40, 101, 20))
self.radioButton_3.setObjectName("radioButton_3")
self.radioButton_4 = QtWidgets.QRadioButton(self.groupBox_2)
self.radioButton_4.setGeometry(QtCore.QRect(390, 40, 101, 20))
self.radioButton_4.setObjectName("radioButton_4")
self.radioButton_5 = QtWidgets.QRadioButton(self.groupBox_2)
self.radioButton_5.setGeometry(QtCore.QRect(520, 40, 101, 20))
self.radioButton_5.setObjectName("radioButton_5")
self.groupBox_3 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_3.setGeometry(QtCore.QRect(670, 10, 641, 411))
self.groupBox_3.setObjectName("groupBox_3")
self.label_image = QtWidgets.QLabel(self.groupBox_3)
self.label_image.setGeometry(QtCore.QRect(20, 40, 491, 361))
self.label_image.setMinimumSize(QtCore.QSize(0, 340))
self.label_image.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.label_image.setStyleSheet("background-color: rgb(255, 255, 255);")
self.label_image.setText("")
self.label_image.setScaledContents(True)
self.label_image.setObjectName("label_image")
self.pushButton_openPic = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_openPic.setGeometry(QtCore.QRect(520, 40, 111, 26))
self.pushButton_openPic.setObjectName("pushButton_openPic")
self.pushButton_savePic = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_savePic.setGeometry(QtCore.QRect(520, 370, 101, 26))
self.pushButton_savePic.setObjectName("pushButton_savePic")
self.label_imageSize = QtWidgets.QLabel(self.groupBox_3)
self.label_imageSize.setGeometry(QtCore.QRect(30, 50, 121, 21))
self.label_imageSize.setStyleSheet("color: rgb(255, 0, 0);")
self.label_imageSize.setText("")
self.label_imageSize.setObjectName("label_imageSize")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1338, 23))
self.menubar.setStyleSheet("")
self.menubar.setDefaultUp(False)
self.menubar.setObjectName("menubar")
self.menu = QtWidgets.QMenu(self.menubar)
self.menu.setObjectName("menu")
self.menu_2 = QtWidgets.QMenu(self.menubar)
self.menu_2.setObjectName("menu_2")
self.menu_3 = QtWidgets.QMenu(self.menubar)
self.menu_3.setObjectName("menu_3")
self.menu_4 = QtWidgets.QMenu(self.menubar)
self.menu_4.setObjectName("menu_4")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionXuanzhuan = QtWidgets.QAction(MainWindow)
self.actionXuanzhuan.setObjectName("actionXuanzhuan")
self.actionXuanzhuan_2 = QtWidgets.QAction(MainWindow)
self.actionXuanzhuan_2.setObjectName("actionXuanzhuan_2")
self.actionRed = QtWidgets.QAction(MainWindow)
self.actionRed.setObjectName("actionRed")
self.actionGreen = QtWidgets.QAction(MainWindow)
self.actionGreen.setObjectName("actionGreen")
self.actionBlue = QtWidgets.QAction(MainWindow)
self.actionBlue.setObjectName("actionBlue")
self.actionOpenCv = QtWidgets.QAction(MainWindow)
self.actionOpenCv.setObjectName("actionOpenCv")
self.action = QtWidgets.QAction(MainWindow)
self.action.setObjectName("action")
self.action_2 = QtWidgets.QAction(MainWindow)
self.action_2.setObjectName("action_2")
self.actionRgb = QtWidgets.QAction(MainWindow)
self.actionRgb.setObjectName("actionRgb")
self.actionPingyi = QtWidgets.QAction(MainWindow)
self.actionPingyi.setObjectName("actionPingyi")
self.action_3 = QtWidgets.QAction(MainWindow)
self.action_3.setObjectName("action_3")
self.actionYasuo = QtWidgets.QAction(MainWindow)
self.actionYasuo.setObjectName("actionYasuo")
self.actionJieya = QtWidgets.QAction(MainWindow)
self.actionJieya.setObjectName("actionJieya")
self.menu.addAction(self.actionXuanzhuan)
self.menu.addAction(self.actionXuanzhuan_2)
self.menu.addAction(self.actionPingyi)
self.menu.addAction(self.action_3)
self.menu_2.addAction(self.actionRed)
self.menu_2.addAction(self.actionGreen)
self.menu_2.addAction(self.actionBlue)
self.menu_2.addAction(self.actionRgb)
self.menu_3.addAction(self.actionOpenCv)
self.menu_3.addAction(self.action)
self.menu_3.addAction(self.action_2)
self.menu_4.addAction(self.actionYasuo)
self.menu_4.addAction(self.actionJieya)
self.menubar.addAction(self.menu.menuAction())
self.menubar.addAction(self.menu_2.menuAction())
self.menubar.addAction(self.menu_4.menuAction())
self.menubar.addAction(self.menu_3.menuAction())
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "OpenCv图像处理教学演示系统V1.0"))
self.groupBox.setTitle(_translate("MainWindow", "摄像头"))
self.pushButton_open.setText(_translate("MainWindow", "打开摄像头"))
self.pushButton_close.setText(_translate("MainWindow", "关闭摄像头"))
self.pushButton_takephoto.setText(_translate("MainWindow", "拍照"))
self.label_canny_max.setText(_translate("MainWindow", "最高阈值:0/255"))
self.label_canny_min.setText(_translate("MainWindow", "最低阈值:0/255"))
self.checkBox_canny_use.setText(_translate("MainWindow", "启用"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_canny), _translate("MainWindow", "边缘检测"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "平滑图像"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_3), _translate("MainWindow", "几何变换"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_4), _translate("MainWindow", "压缩解压"))
self.groupBox_2.setTitle(_translate("MainWindow", "高级设置"))
self.radioButton.setText(_translate("MainWindow", "原始图像"))
self.radioButton_2.setText(_translate("MainWindow", "边缘检测"))
self.radioButton_3.setText(_translate("MainWindow", "原始图像"))
self.radioButton_4.setText(_translate("MainWindow", "原始图像"))
self.radioButton_5.setText(_translate("MainWindow", "原始图像"))
self.groupBox_3.setTitle(_translate("MainWindow", "图片"))
self.pushButton_openPic.setText(_translate("MainWindow", "打开图片"))
self.pushButton_savePic.setText(_translate("MainWindow", "保存图片"))
self.menu.setTitle(_translate("MainWindow", "几何变换"))
self.menu_2.setTitle(_translate("MainWindow", "颜色空间转换"))
self.menu_3.setTitle(_translate("MainWindow", "帮助"))
self.menu_4.setTitle(_translate("MainWindow", "压缩解压"))
self.actionXuanzhuan.setText(_translate("MainWindow", "翻转"))
self.actionXuanzhuan_2.setText(_translate("MainWindow", "旋转"))
self.actionRed.setText(_translate("MainWindow", "追踪红色"))
self.actionGreen.setText(_translate("MainWindow", "追踪绿色"))
self.actionBlue.setText(_translate("MainWindow", "追踪蓝色"))
self.actionOpenCv.setText(_translate("MainWindow", "OpenCv功能接口"))
self.action.setText(_translate("MainWindow", "软件使用说明"))
self.action_2.setText(_translate("MainWindow", "版权说明"))
self.actionRgb.setText(_translate("MainWindow", "追踪红绿蓝"))
self.actionPingyi.setText(_translate("MainWindow", "平移"))
self.action_3.setText(_translate("MainWindow", "缩放"))
self.actionYasuo.setText(_translate("MainWindow", "压缩图片"))
self.actionJieya.setText(_translate("MainWindow", "解压图片"))
| [
"2260216684@qq.com"
] | 2260216684@qq.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.