text stringlengths 38 1.54M |
|---|
from functions import toString
from models import *
def printMenu():
print("Menu:")
print("1.Add a new expense into the list")
print("2.Insert a new expense into the list")
print("3.Remove all the expenses for a day")
print("4.Remove all the expenses between two days")
print("5.Remove all the expenses for a category")
print("6.List the entire list of expenses")
print("7.List all the expenses for a category")
print("8.List all the expenses with a property")
print("9.Calculate the total expense for a category")
print("10.Find the day with the maximum expenses")
print("11.Sort the total daily expenses in ascending order by amount of money spent")
print("12.Sort the daily expenses for a category in ascending order by amount of money spent")
print("13.Filter the expenses for a category")
print("14.Filter the expenses with a property")
print("15.Undo")
def writeList(Expenses):
for x in range(0, len(Expenses)):
for y in range(0, len(Expenses[x])):
print(toString(getDay(Expenses[x][y])) + " " + toString(getAmount(Expenses[x][y])) + " RON " + toString(getCategory(Expenses[x][y])))
def writeCategory(Expenses, new_category):
print("Expenses for " + toString(new_category))
for x in range(0, len(Expenses)):
for y in range(0, len(Expenses[x])):
if getCategory(Expenses[x][y]) == new_category:
print(toString(getDay(Expenses[x][y])) + " " + toString(getAmount(Expenses[x][y])) + " RON")
def writeProperty(Expenses, new_category, symbol, new_amount):
new_amount = int(new_amount)
if symbol == "<":
for x in range(0, len(Expenses)):
for y in range(0, len(Expenses[x])):
if getCategory(Expenses[x][y]) == new_category and getAmount(Expenses[x][y]) < new_amount:
print(toString(getDay(Expenses[x][y])) + " " + toString(getAmount(Expenses[x][y])) + " RON " + toString(getCategory(Expenses[x][y])))
elif symbol == "=":
for x in range(0, len(Expenses)):
for y in range(0, len(Expenses[x])):
if getCategory(Expenses[x][y]) == new_category and getAmount(Expenses[x][y]) == new_amount:
print(toString(getDay(Expenses[x][y])) + " " + toString(getAmount(Expenses[x][y])) + " RON " + toString(getCategory(Expenses[x][y])))
else:
for x in range(0, len(Expenses)):
for y in range(0, len(Expenses[x])):
if getCategory(Expenses[x][y]) == new_category and getAmount(Expenses[x][y]) > new_amount:
print(toString(getDay(Expenses[x][y])) + " " + toString(getAmount(Expenses[x][y])) + " RON " + toString(getCategory(Expenses[x][y])))
def writeSortedList(Expenses):
for x in range(0, len(Expenses)):
print(toString(getDay(Expenses[x])) + " " + toString(getAmount(Expenses[x])) + " RON " + toString(getCategory(Expenses[x])))
|
import numpy as np
input_data = open('input.txt')
input_config = []
for line in input_data:
input_config.append(line)
# input_config = [
# '.#.#.#',
# '...##.',
# '#....#',
# '..#...',
# '#.#..#',
# '####..',
# ]
input_as_array = np.zeros((100, 100))
# input_as_array = np.zeros((6, 6))
# create an array with each light as an element
i = 0
for row in input_config:
j = 0
for light in row:
if light == '#':
input_as_array[i, j] = 1
elif light == '.':
input_as_array[i, j] = 0
j = j+1
i = i+1
for i in range(100):
# create a new array
new_array = np.ones(input_as_array.shape)*9
# loop through each element of initial array
size_y, size_x = input_as_array.shape
for y in range(size_y):
for x in range(size_x):
# get the neighbours subset
nn_values = []
if y > 0 and x > 0:
nn_values.append(input_as_array[y-1, x-1])
if x > 0:
nn_values.append(input_as_array[y, x-1])
if x > 0 and y < size_y-1:
nn_values.append(input_as_array[y+1, x-1])
if y < size_y-1:
nn_values.append(input_as_array[y+1, x])
if y > 0:
nn_values.append(input_as_array[y-1, x])
if y < size_y-1 and x < size_y-1:
nn_values.append(input_as_array[y+1, x+1])
if x < size_x-1:
nn_values.append(input_as_array[y, x+1])
if y > 0 and x < size_x-1:
nn_values.append(input_as_array[y-1, x+1])
num_ones = nn_values.count(1)
if input_as_array[y, x] == 1:
if num_ones == 2 or num_ones == 3:
new_array[y, x] = 1
else:
new_array[y, x] = 0
elif input_as_array[y, x] == 0:
if num_ones == 3:
new_array[y, x] = 1
else:
new_array[y, x] = 0
input_as_array = new_array
input_as_array[0, 0] = 1.0
input_as_array[0, 99] = 1.0
input_as_array[99, 0] = 1.0
input_as_array[99, 99] = 1.0
# print(new_array)
print(np.count_nonzero(new_array == 1)) |
# Generated by Django 3.0.2 on 2020-02-16 06:52
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='admin',
fields=[
('admin_id', models.AutoField(primary_key=True, serialize=False)),
('fname', models.CharField(max_length=25)),
('lname', models.CharField(max_length=25)),
('adminname', models.CharField(max_length=25)),
('password', models.CharField(max_length=32)),
],
options={
'db_table': 'admin',
},
),
migrations.CreateModel(
name='follow',
fields=[
('follow_id', models.AutoField(primary_key=True, serialize=False)),
('user_id', models.IntegerField()),
('profile_id', models.IntegerField()),
],
options={
'db_table': 'follow',
},
),
migrations.CreateModel(
name='image',
fields=[
('image_id', models.AutoField(primary_key=True, serialize=False)),
('image', models.ImageField(default='profile.jpg', upload_to='images/')),
('user_id', models.IntegerField()),
],
options={
'db_table': 'Image',
},
),
migrations.CreateModel(
name='Users',
fields=[
('user_id', models.AutoField(primary_key=True, serialize=False)),
('fname', models.CharField(max_length=25)),
('lname', models.CharField(max_length=25)),
('username', models.CharField(max_length=50)),
('email', models.CharField(max_length=50)),
('password', models.CharField(max_length=32)),
('bio', models.TextField(max_length=500)),
('image', models.ImageField(default='profile.jpg', upload_to='')),
],
options={
'db_table': 'Users',
},
),
migrations.RunSQL("INSERT into Users(username,email,password,image) VALUES('admin','admin@gmail.com','admin','avatar.jpg'"),
migrations.RunSQL("INSERT into admin(adminname,password) VALUES('admin','admin'")
]
|
# -*- coding: utf-8 -*-
from urllib import request
import tempfile
import uuid
from datetime import datetime
from google.cloud import datastore
from google.cloud import storage
PROJECT_ID = "persian-172808"
DATA_STORE_KEY_PATH = "/home/vagrant/.json_keys/persian-3a9988725cae.json"
STORAGE_STORE_KEY_PATH = "/home/vagrant/.json_keys/persian-efe392f65854.json"
BUCKET_NAME = "persian-172808.appspot.com"
class Repository(object):
'''BlogDataのRepositoryクラス
現状は保存のみ処理のみ対応
'''
def __init__(self):
'''初期化処理
Cloud Storage, Cloud Datastoreのクライエントオブジェクト生成
StorageのBucketとDataStoreのKeyを生成
'''
self.storage_client = storage.Client()
self.datastore_client = datastore.Client.from_service_account_json(DATA_STORE_KEY_PATH, project=PROJECT_ID)
self.bucket = self.storage_client.get_bucket(BUCKET_NAME)
self.key = self.datastore_client.key('blog_data')
def _register(self, items):
'''登録処理
'''
if not items['image'] or self._duplicate(items):
return
self._put(items)
def _put(self, items):
'''Cloudにデータを保存します
'''
self._put_storage(items)
self._put_datastore(items)
def _put_storage(self, items):
'''Cloud Storageに画像を保存
'''
res = request.urlopen('http:' + items['image'])
blob = self.bucket.blob('blog_image' + '/' + uuid.uuid4().hex)
blob.upload_from_string(res.read())
blob.make_public()
items['image_store_url'] = blob.public_url
def _put_datastore(self, items):
'''DataStoreにEntityを生成
'''
entity = datastore.Entity(self.key, exclude_from_indexes=('text', 'title'))
entity.update({
'image_url': items['image_store_url']
, 'title': items['title']
, 'text': items['text']
, 'article_url': items['url']
, 'created': datetime.now()
})
self.datastore_client.put(entity)
def _duplicate(self, items):
'''データの重複をチェックします
重複データ対してにTrue、新規データに対してFalseを返却
'''
query = self.datastore_client.query(kind='blog_data')
query.add_filter('article_url', '=', items['url'])
return len(list(query.fetch())) != 0
def __call__(self, items):
'''Repositoryクラスのデータ保存用の関数のエントリーポイント
self._register()のエイリアス
こっちをcallしてください
'''
self._register(items) |
biner = input()
while len(biner)%3!=0 :
biner = "0"+biner
hls = ""
for i in range(0,len(biner),3):
if biner[i:i+3] == "000" :
hls+="0"
elif biner[i:i+3] == "001" :
hls+="1"
elif biner[i:i+3] == "010" :
hls+="2"
elif biner[i:i+3] == "011" :
hls+="3"
elif biner[i:i+3] == "100" :
hls+="4"
elif biner[i:i+3] == "101" :
hls+="5"
elif biner[i:i + 3] == "110":
hls += "6"
elif biner[i:i+3] == "111" :
hls+="7"
print(int(hls))
|
"""
Module with logic to handle different types of payloads in Slack
"""
import abc
import logging
from slackviews import View
from werkzeug.datastructures import ImmutableDict
# -- helper
def get_obj_attr(object, item, missing_value=None, join_with=None, transform=None):
"""
Returns the value of an object's attribute, checking if it exists. It can provide a predefined default value,
in case it's an array can be joined with supplied char, and can be even transformed with supplied lambda function
:param object: The object to look for the attribute
:param item: The name of the field to retrieve
:param missing_value: Default value in case it doesn't exists
:param join_with: If the field value is an array, join the items with supplied char
:param transform: Apply supplied transformation to the field, or to each member of the field if it's
an array. The supplied function, must consider the type of data that the field value should contain, that is,
we can not apply an upper() to an integer, for example.
:return: The value of the field, or the default value if it doesn't exists and, optionally, transformed with
supplied function or if it's an array, a single value with it's items joined
"""
# traverse fields if several are provided joined by a dot
fields = item.split('.')
item = fields[-1]
for _child in fields[:-1]:
if hasattr(object, _child):
object = getattr(object, _child)
else:
return missing_value
if not hasattr(object, item):
return missing_value
value = getattr(object, item)
if not isinstance(value, list):
if transform:
return transform(value)
else:
return value
else:
if transform and join_with:
assert isinstance(join_with, str), f'{join_with} must be a string'
return join_with.join([transform(x) for x in value])
elif transform:
return list(map(transform, value))
elif join_with:
return join_with.join(value)
else:
return value
# -- model classes to handle data easier
class Serializable:
"""
Represents the interface that an object should implement to be serialized
"""
def serialize(self, *skip_fields):
"""
Serialize current object as json
:param skip_fields: A list of fieldnames separated by colon to be skipped from serialized output
:return: A valid serialized object as a dictionary
"""
serialized_dict = dict()
for k, v in self.__dict__.items():
if k in skip_fields:
continue
if hasattr(v, 'serialize'):
serialized_dict[k] = v.serialize(*skip_fields)
else:
serialized_dict[k] = v
return serialized_dict
class DictionaryField(Serializable):
"""
Encapsulates any object field with nested elements
"""
def __init__(self, **kwargs):
for name, value in kwargs.items():
if isinstance(value, dict):
value = DictionaryField(**value)
setattr(self, name, value)
class Command(ImmutableDict):
__metaclass__ = abc.ABCMeta
"""
Encapsulates the form content received when a slack command is invoked
It's basically a wrapper over InmmutableDict to use it as a regular object
with accessor methods
"""
def __init__(self, _form, **kwargs):
super().__init__(_form)
# make sure command and arguments are ok
self._verify_command()
def token(self):
"""
Provides the token
:return: The token as a string
"""
return self.get('token')
def team_id(self):
"""
Provides the team id
:return: The team id as a string
"""
return self.get('team_id')
def team_domain(self):
"""
Provides the team domain
:return: The team domain as a string
"""
return self.get('team_domain')
def channel_id(self):
"""
Provides id of the channel
:return: The channel's id as a string
"""
return self.get('channel_id')
def channel_name(self):
"""
Provides the name of the channel
:return: The channel's name as a string
"""
return self.get('channel_name')
def user_id(self):
"""
Provides user id
:return: The user's id as a string
"""
return self.get('user_id')
def user_name(self):
"""
Provides the user name
:return: User's name as a string
"""
return self.get('user_name')
def command(self):
"""
Provides invoked slack command
:return: The name of the command as a string
"""
return self.get('command')
def text(self):
"""
Provides arguments of invoked Slack commmand
:return: The arguments of the slack command as a string
"""
return self.get('text')
def response_url(self):
"""
Provides url to send the response of the command to
:return: The response url for the command invoked
"""
return self.get('response_url')
def trigger_id(self):
"""
Unique identifier of the action that started the interaction between
user and the bot
:return: The trigger id as a string
"""
return self.get('trigger_id')
def contains_command(self, command):
"""
Checks if current form contains supplied command
:param command: Name of the command to check
:return: True if command in current form, False otherwise
"""
return command in self.command()
def contains_argument(self, arg):
"""
Checks if current form contains supplied argument
:param arg: name of the arg to check
:return: True if arg in current form, False otherwise
"""
return arg in self.text()
def parse_args(self):
"""
Returns an array with supplied arguments in form
:return: An array with each argument in order
"""
if not self.text():
return []
else:
return self.text().split(' ')
def num_args(self):
"""
Provides the number of supplied arguments
:return: The number of supplied arguments as an integer
"""
return len(self.parse_args())
@abc.abstractmethod
def _verify_command(self):
"""
Checks if command contains correct arguments for it's execution
:return: True if arguments are correct. False otherwise
"""
raise NotImplementedError()
# -- interactions
class HasBlocks:
"""
Represents a interface that provides a method to get the blocks of an interaction
"""
__metaclass__ = abc.ABCMeta
def __init__(self):
raise NotImplementedError()
@abc.abstractmethod
def blocks(self):
"""
Provides the blocks of last interaction
:return: An array with blocks or an empty array
"""
raise NotImplementedError()
class HasInputAction:
"""
Represents a interface that provides a method to obtain a given input action in 'values' or 'actions' fields
"""
__metaclass__ = abc.ABCMeta
def __init__(self):
raise NotImplementedError()
@abc.abstractmethod
def get_input_action(self, block_id, action_id):
"""
Checks if given block_id has an action_id is in submitted 'values' or 'actions'
:param block_id: The block id of the input being searched
:param action_id: The action_id being searched
:return: True if exists, False otherwise
"""
raise NotImplementedError()
class HasPrivateMetadata:
"""
Interface that provides access to privatemetadata
"""
__metaclass__ = abc.ABCMeta
def private_metadata(self):
"""
Provides the private metadata of interaction view, if any. In our
model, if any content exists, it'll be given as a dictionary, since
the format of private_metadata is field1=value1&field2=value2...
A dictionary is built with supplied data
:return: The private metadata content as dictionary
"""
raise NotImplementedError()
class HasView(HasPrivateMetadata):
"""
Represents an interface with methods to access interaction's view data
"""
__metaclass__ = abc.ABCMeta
def is_home(self):
"""
Returns whether or not, current view is a "Home"
:return: True if view is home, False otherwise
"""
raise NotImplementedError()
class Interaction(DictionaryField, HasBlocks, HasInputAction):
"""
Encapsulates the payload occurred in a message interaction (button, combo, date etc...)
"""
__metaclass__ = abc.ABCMeta
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.logger = logging.getLogger(self.__class__.__name__)
def is_block_actions(self):
return getattr(self, 'type') == 'block_actions'
def is_view_submission(self):
return getattr(self, 'type') == 'view_submission'
def is_view_closed(self):
return getattr(self, 'type') == 'view_closed'
def search_block(self, block_id):
"""
Searches for a given block_id in view's blocks
:param block_id: The id of the block to look for
:return: The block with given block_id, if anyone exists
"""
_found = list(filter(lambda b: (b.get('block_id') == block_id) or (block_id in b.get('block_id')),
self.blocks()))
if _found:
return _found[0]
else:
return None
def user_slack_id(self):
"""
Provides the slack_id of the user who performed the interaction
:return: The slack id as a string
"""
return get_obj_attr(self, 'user.id', None)
def get_selectmenu_value(self, block_id, action_id):
"""
Provides the text, and the value of the select element in a SelectMenu
:param block_id: The block id of the input being searched
:param action_id: The action_id being searched
:return: The text and value of the selected element in the SelectMenu
"""
element = self.get_input_action(block_id, action_id)
assert element.type == 'static_select', f'Wrong element type, it should be a static_select'
value = get_obj_attr(element, 'selected_option.value')
text = get_obj_attr(element, 'selected_option.text.text')
self.logger.debug(f'\t text, value -> {text}, {value}')
return text, value
def get_textinput_value(self, block_id, action_id):
"""
Provides the value, of a PlainTextInput element, if found
:param block_id: The block id of the input being searched
:param action_id: The action_id being searched
:return: The value of the PlainTextInput element
"""
element = self.get_input_action(block_id, action_id)
assert element.type == 'plain_text_input', f'Wrong element type, it should be a plain_text_input'
value = get_obj_attr(element, 'value')
self.logger.debug(f'\f value -> {value}')
return value
class ViewInteraction(Interaction, HasView):
"""
Represent any interaction that contains view data
"""
__metaclass__ = abc.ABCMeta
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._private_metadata = None
@staticmethod
def private_metadata_string(dictionary):
"""
Creates a string that can be used as private_metadata from supplied dictionary
:param dictionary: The dictionary with key=value pairs to create a string representation separated by &
:return: A string representing private metadata from supplied dictionary
"""
return '&'.join([f'{key}={value}' for key, value in dictionary.items()])
@staticmethod
def private_metadata_dictionary(string):
"""
Creates a dictionary from supplied string with private metadata
:param string: A private metadata string repsentation
:return: A dictionary representing private metadata from supplied string
"""
dict_ = dict()
for f in string.split('&'):
f = f.split('=')
dict_[f[0]] = f[1]
return dict_
def is_home(self):
return get_obj_attr(self, 'view.type') == 'home'
def private_metadata(self):
"""
Provides the private metadata of interaction view, if any. In our
model, if any content exists, it'll be given as a dictionary, since
the format of private_metadata is field1=value1&field2=value2...
A dictionary is built with supplied data
:return: The private metadata content as dictionary
"""
# depending on interaction, it could be a message, and not a view
if not hasattr(self, 'view') or not getattr(getattr(self, 'view'), 'private_metadata'):
return None
if not self._private_metadata:
_metadata = getattr(getattr(self, 'view'), 'private_metadata')
self.logger.debug(f'converting {_metadata} to a dictionary....')
dict_ = ViewInteraction.private_metadata_dictionary(_metadata)
self.logger.debug(f'\t -> OK {dict_}')
self._private_metadata = dict_
return self._private_metadata
def blocks(self):
if not hasattr(self, 'view'):
return []
elif isinstance(getattr(self, 'view'), View):
return getattr(getattr(self, 'view'), '_blocks')
else:
return getattr(getattr(self, 'view'), 'blocks')
class ViewSubmission(ViewInteraction):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def get_input_action(self, block_id, action_id):
self.logger.debug(f'searching for __{action_id}__ in input block __{block_id}__')
element = get_obj_attr(self, f'view.state.values.{block_id}.{action_id}')
self.logger.debug(f'\t -> FOUND -> __{element}__')
return element
class NoActionIdException(Exception):
"""
Exception returned when no action_id is found in interaction message
"""
class BlockActions(Interaction):
__metaclass__ = abc.ABCMeta
def __init__(self, **kwargs):
super().__init__(**kwargs)
def action_id(self):
"""
Provides action_id name, if exists, else an empty string
:return: Current action id name, or ''
"""
if hasattr(self, 'actions'):
return self.actions[0]['action_id']
else:
return NoActionIdException('No actions field in payload')
def action_value(self):
"""
Provides action_id value, if exists
:return: Current action value as a string
"""
if hasattr(self, 'actions'):
return self.actions[0]['value']
else:
return NoActionIdException('No actions field in payload')
def action_element_type(self):
"""
Provides the type of element that started the interaction, i.e. "button", ....
:return: The type of element as a string
"""
if hasattr(self, 'actions'):
return self.actions[0]['type']
else:
return NoActionIdException('No actions field in payload')
def get_input_action(self, block_id, action_id):
self.logger.debug(f'searching for __{action_id}__ in input block __{block_id}__')
element = None
actions = [DictionaryField(**a) for a in get_obj_attr(self, 'actions', missing_value=[])]
try:
element = [act_ for act_ in actions if getattr(act_, 'block_id') == block_id and
getattr(act_, 'action_id') == action_id][0]
except IndexError:
pass
self.logger.debug(f'\t -> FOUND -> __{element}__')
return element
class ViewBlocksInteraction(BlockActions, ViewInteraction):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def blocks(self):
if not hasattr(self, 'view'):
return []
elif isinstance(getattr(self, 'view'), View):
return getattr(getattr(self, 'view'), '_blocks')
else:
return getattr(getattr(self, 'view'), 'blocks')
class MessageBlocksInteraction(BlockActions):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def message_ts(self):
"""
Provides the message timestamp of this interaction
:return: The message timestamp as a string
"""
assert hasattr(self, 'container'), 'Wrong interaction type'
return get_obj_attr(self, 'container.message_ts')
def channel_id(self):
"""
Provides the channel id where this interaction started
:return: The id of the channel as a string
"""
assert hasattr(self, 'channel'), 'Wrong interaction type'
return get_obj_attr(self, 'channel.id')
def channel_name(self):
"""
Provides the channel name where this interaction started
:return: The name of the channel as a string
"""
assert hasattr(self, 'channel'), 'Wrong interaction type'
return get_obj_attr(self, 'channel.name')
def blocks(self):
"""
Provides the interaction message's blocks array
:return: The message blocks as an array
"""
assert hasattr(self, 'message'), 'Wrong interaction type'
return get_obj_attr(self, 'message.blocks')
|
class movies:
def __init__(self,moviename,runtime,Genres,lang):
self.moviename=moviename
self.runtime=runtime
self.Genres=Genres
self.lang=lang
def famous(self):
if self.lang=='Telugu':
print "Ultimate BlockBuster"
else:
print "Marvel Movie"
def croresTurnover(self):
if self.moviename=='Bahubali':
print "croresTurnover"
else:
print "High BudgetMovie"
def displaymovies(self):
print "moviename:",self.moviename
print "runtime:",self.runtime
print "Genres:",self.Genres
print "language:",self.lang |
from django.db import models
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
# Create your models here.
class Order(models.Model):
user = models.ForeignKey('base_user.MyUser', related_name='user_orders', on_delete=models.CASCADE, null=True,
blank=True)
first_name = models.CharField(_('Ad'), max_length=50)
last_name = models.CharField(_('Soyad'), max_length=50)
phone = models.CharField(_('Əlaqə Nömrəsi'), max_length=15)
email = models.EmailField(_('E-poçt'))
address = models.CharField(_('Adres'), max_length=250)
city = models.CharField(_('Şəhər'), max_length=100)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
paid = models.BooleanField(default=False)
class Meta:
ordering = ('-created',)
verbose_name = _('Sifariş')
verbose_name_plural = _('Sifarişlər')
def __str__(self):
return f'{_("Sifariş")} - {self.id}'
@property
@admin.display(description=_("Ümumi Qiymət"))
def get_total_cost(self):
return sum(item.get_total_item_cost for item in self.items.all())
class OrderItem(models.Model):
order = models.ForeignKey(Order, on_delete=models.CASCADE, related_name='items')
product = models.ForeignKey('product.Product', on_delete=models.CASCADE, related_name='order_items')
price = models.DecimalField(max_digits=10, decimal_places=2)
quantity = models.PositiveIntegerField(default=1)
product_size = models.ForeignKey('product.ProductSize', on_delete=models.SET_NULL, null=True, blank=True)
def __str__(self):
return '{}'.format(self.id)
@property
def get_total_item_cost(self):
return self.price * self.quantity
|
from flask.ext.sqlalchemy import get_debug_queries
from app.core.logging import Logging
from app.core.ansible import Ansi
from app.modules.domains.models import Domains, DomainDetails, DomainSSLDetails
from app.core.common import ModuleController
from passlib.hash import sha512_crypt
from sqlalchemy import func, distinct, exists
from config import DATABASE_QUERY_TIMEOUT, POSTS_PER_PAGE
from flask import abort, jsonify, url_for
from app import app, db
import datetime
@app.after_request
def after_request(response):
for query in get_debug_queries():
if query.duration >= DATABASE_QUERY_TIMEOUT:
app.logger.warning("SLOW QUERY: %s\nParameters: %s\nDuration: %fs\nContext: %s\n" % (query.statement, query.parameters, query.duration, query.context))
return response
def dump_datetime(value):
""" Deserialize datetime object into string form for JSON processing. """
if value is None:
return
return [value.strftime("%Y-%m-%d"), value.strftime("%H:%M:%S")]
class DomainController():
def __init__(self, request=None, database=DomainDetails, key='domain'):
if request:
self.request = request
self.mc = ModuleController(
main_db=Domains,
details_db=database,
relationship='domain_name',
key=key,
name=self.request.json['domain_name']
)
def make_public_domain(self, fields):
uri = "{0}s.get_{0}".format(self.key)
new = {}
for field in fields:
if field == self.relationship:
new_domain['uri'] = url_for(
uri,
name=fields[self.relationship],
_external=True
)
else:
new_domain[field] = domain[field]
return new_domain
def dataAsJson(self, key, dictionary):
return jsonify({key: self.make_public_domain(dictionary)})
def checkFields(self):
if not self.request.json or not 'domain_name' in self.request.json:
abort(404)
if not 'domain_details' in self.request.json:
self.request.json['domain_details'] = {}
if not 'group' in self.request.json['domain_details']:
self.request.json['domain_details']['group'] = 'apache'
if not 'owner' in self.request.json['domain_details']:
self.request.json['domain_details']['owner'] = 'apache'
if not 'port' in self.request.json['domain_details']:
self.request.json['domain_details']['port'] = '80'
if not 'document_root' in self.request.json['domain_details']:
self.request.json['domain_details']['document_root'] = '/var/www/vhosts/'+self.request.json['domain_name']
return True
def create(self):
created = datetime.datetime.utcnow()
if self.checkFields():
ansi = Ansi("domain")
ansi.run(self.request)
return self.mc.create(self.request, created)
else:
abort(500)
def create_csr(self):
created = datetime.datetime.utcnow()
if self.checkFields():
ansi = Ansi("csr")
ansi.run(self.request)
self.request.json['domain_ssl_details']['csr'] = self.openCsr(
domain=self.request.json['domain_name']
)
return self.mc.create(self.request, created)
else:
abort(500)
def openFile(self, filepath):
try:
with open(filepath, 'r') as f:
data = f.read()
return data
except Exception, e:
print 'file does not exist '+filepath
def openVhost(self, protocol, domain):
path = '/var/www/vhosts/{0}/conf/{1}.{0}.conf'.format(domain, protocol)
return self.openFile(path)
def openPhp(self, domain):
path = '/var/www/vhosts/{0}/etc/php.ini'.format(domain)
return self.openFile(path)
def openCsr(self, domain):
path = '/var/www/vhosts/{0}/ssl/{0}.csr'.format(domain)
return self.openFile(path)
def saveFile(self, file, data):
if data:
with open(file, 'w') as f:
f.write(data)
def writeVhost(self, domain, protocol, data):
path = '/var/www/vhosts/{0}/conf/{1}.{0}.conf'.format(domain, protocol)
self.saveFile(path, data)
|
from model.group import Group
import random
import string
from builtins import *
def random_string(prefix, maxlen):
symbols=string.ascii_letters+string.digits+string.punctuation+" "*10
return prefix+ "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata=[Group("", "", "")]+[
Group(name=random_string("name", 7), header=random_string("header", 5), footer=random_string("footer", 5))
for i in range(2)] |
from ballet import Feature
import ballet.eng
input = "Screen Porch"
transformer = ballet.eng.SimpleFunctionTransformer(lambda ser: ser > 0)
name = "Has screen porch"
feature = Feature(input=input, transformer=transformer, name=name)
|
#!/usr/bin/env python
PACKAGE = "particle_filter_cuda"
from dynamic_reconfigure.parameter_generator_catkin import *
gen = ParameterGenerator()
gen.add("angular_map_offset", double_t, 0, "Angular offset of map - needed "+
"to integrate map with IMU. That value is angle in degrees "+
"between geographical EAST and X direction of map.", 0, -180, 180)
#gen.add("double_param", double_t, 0, "A double parameter", .5, 0, 1)
#gen.add("str_param", str_t, 0, "A string parameter", "Hello World")
#gen.add("bool_param", bool_t, 0, "A Boolean parameter", True)
exit(gen.generate(PACKAGE, "particle_filter_cuda", "particle_filter_cuda"))
|
"""
Implements some common tasks for every type of postgresql relation.
"""
from ops.framework import Object
class PostgresqlRelation(Object):
def __init__(self, charm, relation_name, peer_rel):
super().__init__(charm, relation_name)
self._unit = charm.unit
self._charm = charm
self._relation_name = relation_name
self._relation = self.framework.model.get_relation(self._relation_name)
# Needed given the leader must update information such as which password
# has been created for a given client relation
self.peer_rel = peer_rel
@property
def unit(self):
return self._unit
@property
def app(self):
return self._charm.unit.app
@property
def relation(self):
return self.framework.model.get_relation(self._relation_name)
@property
def peer_addresses(self):
addresses = []
for u in self.relation.units:
addresses.append(str(self.relation.data[u]["ingress-address"]))
return addresses
@property
def advertise_addr(self):
m = self.model
return str(m.get_binding(self._relation_name).network.ingress_address)
@property
def binding_addr(self):
m = self.model
return str(m.get_binding(self._relation_name).network.bind_address)
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.paginator import Paginator
from .models import category ,post,tag
# Create your views here.
def showposts(request):
testcat=category.objects.all()
pcat= post.objects.all()
paginator = Paginator(pcat,5) #Show 5 posts per page
page_num=request.GET.get('page')
try:
walk = paginator.page(page_num)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
walk = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
walk= paginator.page(paginator.num_pages)
return render(request, 'cat.html',{"walk":walk,"testcat":testcat})
def postcat(request,categoryn):
pcat= post.objects.filter(post_category_id=categoryn)
paginator = Paginator(pcat,5) #Show 5 posts per page
page_num=request.GET.get('page')
try:
box = paginator.page(page_num)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
box = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
box= paginator.page(paginator.num_pages)
return render(request, 'index.html',{"box":box})
def getpost(request,post_id):
selected_post=post.objects.get(id=post_id)
tags= post.objects.get(id=post_id).tag.all()
return render(request,'posts.html',{"selected_post":selected_post,"tags":tags})
def details(request,postn):
ptag= post.objects.get(id=postn)
tags= post.objects.get(id=postn).tag.all()
return render(request,'details.html',{"tags":tags,"ptag":ptag})
def postbytag(request,t):
par=post.objects.raw('select * from users_app_post as a ,users_app_post_tag as b where b.tag_id=%s and a.id=b.post_id',[t])
return render(request,'postbytag.html',{"par":par})
def comment_posted (request):
#redirect to same page after comment
return HttpResponseRedirect(request.META.get('HTTP_REFERER')) |
class Solution(object):
def kthSmallest(self, root, k):
"""
https://leetcode.com/problems/kth-smallest-element-in-a-bst/description/
:type root: TreeNode
:type k: int
:rtype: int
"""
if not root:
return []
ans, level = [], [root]
while level:
ans.append([node.val for node in level])
temp = []
for node in level:
temp.extend([node.left, node.right])
level = [leaf for leaf in temp if leaf]
li = []
for i in ans:
li.extend(i)
# print(sorted(li))
return sorted(li)[k-1]
|
from openbabel import OBMol, OBConversion, OBResidueIter
import pyplif as pp
import operator
from IOhandle.models import Protein, Molecule
from LLOOMMPPAA.models import PlifMethod,PlifRes,PlifBit,Plif,SynthPoint,PlifBitInstance
from django.core.exceptions import ValidationError
import os
import ast
import sys
import tempfile
from rdkit import Chem
from auxfuns import *
def django_run(target, opt="XTAL"):
"""Function to take multiple confs of ONE ligand and generate their PLIFS against one template protein"""
# Set up the OpenBaebel conversion modules
sdconv = OBConversion()
ligref = OBMol()
# Define the residues and the proteisn to analyse
if os.path.isfile(os.path.join(os.path.split(sys.argv[0])[0], 'data/res_def.py')):
res_d = [trans_res(x) for x in ast.literal_eval(open(os.path.join(os.path.split(sys.argv[0])[0], 'data/res_def.py')).read())[target.title].split()]
print res_d
# Molecules
# Now read in the ligand
plif_method = PlifMethod()
plif_method.text= "PYPLIF"
feature_list = ["POLAR","FACE","EDGE","ACCEPTOR","DONOR","NEGATIVE","POSITIVE"]
try:
plif_method.validate_unique()
plif_method.save()
except ValidationError:
plif_method = PlifMethod.objects.get(text="PYPLIF")
out_d = {}
counter = 0
# Create a file for the protein
t = tempfile.NamedTemporaryFile(suffix=".pdb",delete=False)
my_prot = Protein.objects.get(code=target.title+"TEMP")
t.write(my_prot.pdb_info.name)
t.close()
protref = read_prot(t.name, res_d)
t = tempfile.NamedTemporaryFile(suffix=".sdf",delete=False)
t.close()
sdconv.SetInFormat("sdf")
if opt == "XTAL":
mols = Molecule.objects.exclude(prot_id__code__contains=target.title).filter(prot_id__target_id=target)
elif opt == "LLOOMMPPAA":
mols = []
sps = SynthPoint.objects.filter(target_id=target)
for s in sps:
mols.extend([m for m in s.mol_id.all()])
else:
print "UNKNOWN OPTION"
return
for dj_mol in mols:
out_sd = Chem.SDWriter(t.name)
out_sd.write(Chem.MolFromMolBlock(str(dj_mol.sdf_info)))
out_sd.close()
sdconv.ReadFile(ligref, t.name)
# Now make the new plif
new_plif = Plif()
new_plif.mol_id = dj_mol
new_plif.prot_id = my_prot
new_plif.method_id = plif_method
try:
new_plif.validate_unique()
new_plif.save()
except ValidationError:
new_plif = Plif.objects.get(mol_id=dj_mol,prot_id=my_prot,method_id=plif_method)
lig_name = ligref.GetTitle().strip(",")
prot_name = lig_name.split("_")[0]
ligref.AddHydrogens()
counter +=1
refresdict = pp.getresiduedict(protref, res_d)
new_d = get_fp(protref,ligref, res_d)
for res in new_d:
new_res = PlifRes()
new_res.res_name = res[:3]
new_res.res_num = int(res[3:])
new_res.prot_id = my_prot
try:
new_res.validate_unique()
new_res.save()
except ValidationError:
new_res = PlifRes.objects.get(res_name=res[:3],res_num=int(res[3:]),prot_id=my_prot)
new_plif.res_id.add(new_res)
for bit_num, bit in enumerate(new_d[res]):
new_bit = PlifBit()
new_bit.feature = feature_list[bit_num]
new_bit.method_id = plif_method
new_bit.res_id = new_res
try:
new_bit.validate_unique()
new_bit.save()
my_fun(dj_mol,new_bit,new_plif,bit)
except ValidationError:
new_bit = PlifBit.objects.get(feature=feature_list[bit_num],method_id=plif_method,res_id=new_res)
new_bit.save()
new_plif.bit_id.add(new_bit)
my_fun(dj_mol,new_bit,new_plif,bit)
ligref = OBMol()
notatend = sdconv.Read(ligref)
def get_fp(protref,ligref,res_d):
refresdict = pp.getresiduedict(protref, res_d)
new_d = dict((k, v) for (k, v) in refresdict.iteritems() if k in res_d)
pp.otherinteractions(new_d, res_d, protref, ligref, [])
# New_d is now this bit string. Return the output
return new_d
def write_res(out_d,res_d):
"""Function to write the results out in a dictionary"""
# Now make the csv file
out_f = open("out.csv","w")
# First the header
out_f.write("mol,")
out_l = []
# Put gaps either side
for res in res_d:
out_l.extend([res,"","","","",""])
# Now write this list
out_f.write(",".join(out_l))
out_f.write("\n")
# Now the val for each mol
for mol in out_d:
out_f.write(mol+",")
for res in res_d:
out_f.write(",".join([str(int(x)) for x in list(out_d[mol][res])]))
out_f.write("\n")
out_f.close()
def read_prot(prot_file, res_d):
"""Function to read in a protein to an OBMol"""
conv = OBConversion()
protref = OBMol()
conv.SetInFormat("pdb")
conv.ReadFile(protref,prot_file)
# Now assign the residue names
i = 0
my_res = []
for residue in OBResidueIter(protref):
i+=1
residue.SetName(residue.GetName()+str(residue.GetNum()))
my_res.append(residue.GetName())
# Now check that all the residues exist and print out if not
fail_counter = 0
fail_list = []
# Loop through the res and check they are in the list
for res_me in res_d:
if res_me not in my_res:
fail_counter += 1
fail_list.append(res_me)
# If it's out of register by one do again
if fail_counter > 0:
i = 0
my_res = []
for residue in OBResidueIter(protref):
i+=1
residue.SetName(residue.GetName()+str(residue.GetNum()))
my_res.append(residue.GetName())
# Now check that all the residues exist and print out if not
fail_counter = 0
fail_list = []
# Loop through the res and check they are in the list
for res_me in res_d:
if res_me not in my_res:
fail_counter += 1
fail_list.append(res_me)
out_err.write(prot_file+",")
out_err.write(str(fail_counter)+"\n")
out_err.write(str(fail_list))
out_err.write(str(my_res))
out_err.write(str(res_d))
protref.AddHydrogens()
return protref
def trans_res(res):
"""Function to take a RES of form blah./^V64 and output VAL64"""
d = {'CYS': 'C', 'ASP': 'D', 'SER': 'S', 'GLN': 'Q', 'LYS': 'K',
'ILE': 'I', 'PRO': 'P', 'THR': 'T', 'PHE': 'F', 'ASN': 'N',
'GLY': 'G', 'HIS': 'H', 'LEU': 'L', 'ARG': 'R', 'TRP': 'W',
'ALA': 'A', 'VAL':'V', 'GLU': 'E', 'TYR': 'Y', 'MET': 'M'}
rev_d = dict((v, k) for (k, v) in d.iteritems())
# Take the res
out_res = rev_d[res.split("^")[1][0]]
return out_res+res.split("^")[1][1:]
def get_dict(file):
"""Function to make a dictionary of residues for the binding site
of each protein. Based on a text file"""
res_list = [x.rstrip() for x in open(file).readlines()]
# Make the output dictionary
out_d = {}
# write it
for res in res_list:
if res[2:6] in out_d:
out_d[res[2:6]].append(trans_res(res))
else:
out_d[res[2:6]] = [trans_res(res)]
# Define the lists to count the aligned residues
prot_list = []
tot_list = []
restrictive_list = []
tot_dict = {}
for prot in out_d:
if out_d[prot][0] == "ILE10":
prot_list.append(prot)
tot_list.extend(out_d[prot])
for res in out_d[prot]:
if res in tot_dict:
tot_dict[res] +=1
else:
tot_dict[res] =1
list(set(tot_list))
sorted_x = sorted(tot_dict.iteritems(), key=operator.itemgetter(1),reverse=True)
core_res = [x[0] for x in sorted_x[:16]]
# Now make a restrictive list - only those with all residues
for prot in out_d:
if set(core_res).issubset(set(out_d[prot])):
restrictive_list.append(prot)
return list(set(core_res)), restrictive_list[:30]
def all_to_all():
"""Function to compare all to all"""
# Set up the OpenBaebel conversion modules
sdconv = OBConversion()
ligref = OBMol()
# Define the residues and the proteisn to analyse
res_d, prot_list = get_dict("myFirstFile.txt")
# Now read in the ligand
sdconv.SetInFormat("sdf")
notatend = sdconv.ReadFile(ligref,"../mols.sdf")
out_d = {}
counter = 0
# Now read the ligand file
while notatend:
lig_name = ligref.GetTitle().strip(",")
prot_name = lig_name.split("_")[0]
if prot_name not in prot_list:
ligref = OBMol()
notatend = sdconv.Read(ligref)
continue
ligref.AddHydrogens()
counter +=1
print counter
for j, my_prot in enumerate(prot_list):
protref = read_prot(r"C:\www\Protoype\media_coninchi\pdb" + "\\" + my_prot + "al.pdb", res_d)
# Get the reference dictionary
refresdict = pp.getresiduedict(protref, res_d)
# Update this dict, to only residues in the binding site
new_d = get_fp(protref,ligref, res_d)
# Make sure it is a unique name for the output
while lig_name in out_d:
lig_name = lig_name + "Z"
# Add it to the dict
out_d[lig_name+my_prot] = {}
for res in new_d:
# Assign each residue the scores for each molecule
out_d[lig_name+my_prot][res] = new_d[res]
# Make the ligand
ligref = OBMol()
notatend = sdconv.Read(ligref)
# Now write the results out
write_res(out_d, res_d)
def one_to_many():
"""Function to take multiple confs of ONE ligand and generate their PLIFS against one template protein"""
# Set up the OpenBaebel conversion modules
sdconv = OBConversion()
ligref = OBMol()
# Define the residues and the proteisn to analyse
res_d, prot_list = get_dict("myFirstFile.txt")
# Now read in the ligand
sdconv.SetInFormat("sdf")
notatend = sdconv.ReadFile(ligref,"../out.sdf")
out_d = {}
counter = 0
my_prot = "1qmz"
protref = read_prot(r"C:\www\Protoype\media_coninchi\pdb" + "\\" + my_prot + "al.pdb", res_d)
# Now read the ligand file
while notatend:
lig_name = ligref.GetTitle().strip(",")
prot_name = lig_name.split("_")[0]
ligref.AddHydrogens()
counter +=1
print counter
# Get the reference dictionary
refresdict = pp.getresiduedict(protref, res_d)
# Update this dict, to only residues in the binding site
new_d = get_fp(protref,ligref, res_d)
# Add it to the dict
out_d[lig_name+str(counter)] = {}
for res in new_d:
# Assign each residue the scores for each molecule
out_d[lig_name+str(counter)][res] = new_d[res]
# Make the ligand
ligref = OBMol()
notatend = sdconv.Read(ligref)
# Now write the results out
write_res(out_d, res_d)
# Script to read in a series of ligands (as an SD file) and output a CSV file of the bit vectors
# Define the OB objects
if __name__ == "__main__":
# Make the error file
out_err = open("out.std.err","w")
out_err.write("file,errors\n")
one_to_many()#all_to_all()
|
import signal
import sys
import asab
from .service import BSPumpService
from .__version__ import __version__, __build__
class BSPumpApplication(asab.Application):
"""
Application object for BSPump.
"""
def __init__(self, args=None, web_listen=None):
super().__init__(args=args)
# Banner
print("BitSwan BSPump version {}".format(__version__))
from asab.metrics import Module
self.add_module(Module)
# TODO: Make sure that we don't occupy unnecessary high amount of threads
from asab.proactor import Module
self.add_module(Module)
self.PumpService = BSPumpService(self)
self.WebContainer = None
# Conditionally activate LogMan.io service
if asab.Config.has_section("logman.io"):
from asab.logman import Module
self.add_module(Module)
logman_service = self.get_service('asab.LogManIOService')
logman_service.configure_metrics(self.get_service('asab.MetricsService'))
logman_service.configure_logging(self)
try:
# Signals are not available on Windows
self.Loop.add_signal_handler(signal.SIGUSR1, self._on_signal_usr1)
except (NotImplementedError, AttributeError):
pass
# Activate web frontend, if requested
if web_listen is None:
if self._web_listen is not None and len(self._web_listen) > 0:
web_listen = self._web_listen
elif "bspump:web" in asab.Config:
web_listen = asab.Config["bspump:web"].get("listen", "")
if web_listen is not None and len(web_listen) > 0:
from .web import _initialize_web
self.WebContainer = _initialize_web(self, web_listen)
def create_argument_parser(self):
prog = sys.argv[0]
if prog[-11:] == '__main__.py':
prog = sys.executable + " -m bspump"
description = '''
BSPump is a stream processor. It is a part of BitSwan.
For more information, visit: https://github.com/LibertyAces/BitSwanPump
version: {}
build: {} [{}]
'''.format(__version__, __build__, __build__[:7])
parser = super().create_argument_parser(
prog=prog,
description=description
)
return parser
def parse_arguments(self, args=None):
args = super().parse_arguments(args=args)
self._web_listen = args.web_api
return args
async def main(self):
print("{} pipeline(s) ready.".format(len(self.PumpService.Pipelines)))
# TODO: Come up with solution how to reconsile this with unittests, maybe as follows?
# L.log(31, "{} pipeline(s) ready.".format(len(self.PumpService.Pipelines)))
pass
def _on_signal_usr1(self):
'''
To clear reset from all pipelines, run
$ kill -SIGUSR1 xxxx
Equivalently, you can use `docker kill -s SIGUSR1 ....` to reset containerized BSPump.
'''
# Reset errors from all pipelines
for pipeline in self.PumpService.Pipelines.values():
if not pipeline.is_error():
continue # Focus only on pipelines that has errors
pipeline.set_error(None, None, None)
|
import threading
import time
from colorama import Fore
count = 0
def increment(lock, delay,color):
global count
print(color, threading.currentThread().getName(), '\t-> Worker_1 starting')
while count < 100:
lock.acquire()
count += 1
print(color, threading.currentThread().getName(), '\t-> Count is:', count)
lock.release()
time.sleep(delay)
lock = threading.Lock()
i=0
colors = [Fore.RED, Fore.GREEN, Fore.CYAN, Fore.YELLOW, Fore.MAGENTA, Fore.WHITE]
for t in range(5):
i += 1 if i < 5 else 0
t = threading.Thread(name='T'+str(i), target=increment, args=(lock, 0.1, colors[i]))
t.start()
|
#Albion Burrniku
#180714100040
#Rrjetat Kompjuterike #Prof:Blerim Rexha #Ass:Haxhi Lajqi
import socket
import threading
import _thread
from socket import gethostname
import time
import random
import math
import sys
import string
# IPADRESA
def IPADDRESS(IP):
return IP[0]
# PORTI
def PORT(porti):
return porti[1]
# BASHKETINGELLORE
def BASHKETINGELLORE(text):
i = 0
n = 0
while i < len(text):
if text[i] in 'bcdfghjklmnpqrstvwxzBCDFGHJKLMNPQRSTVWXZ':
n += 1
i += 1
return n
#ZANORE
def ZANORE(text):
i = 0
n = 0
while i < len(text):
if text[i] in 'aeiouyAEIOUY':
n += 1
i += 1
return n
#REVERSE
def REVERSE(text):
return text[::-1]
#PALINDROME
def PALINDROME(text):
rev = REVERSE(text)
if(text==rev):
return "Eshte Palindrom"
return "Nuk eshte Palindrom"
# KOHA
def TIME():
t = str(time.ctime(time.time()))
return t
# LOJA
def GAME():
array = []
for i in range(5):
array.append(random.randint(1, 35))
return array
#GCF
def GCF(n1,n2):
if n1>n2:
n1, n2 = n2, n1
for i in range(n1,0,-1):
if n1%i==0 and n2%i==0:
return i
# KONVERTIMI
def CONVERT(option, nr):
option=str(option).upper()
if (option == "CMTOINCH"):
return int(nr)/2.54
elif (option == "INCHTOCM"):
return int(nr)*2.54
elif (option == "KMTOMILES"):
return int(nr)/1.609
elif (option == "MILETOKM"):
return int(nr)*1.609
else:
return("Duhet te zgjidhni njeren nga opcionet : CMTOINCH, INCHTOCM , KMTOMILES, MILETOKM")
# ~~~* Metodat shtese *~~~
def MESATARJA(nr1, nr2):
rez = (nr1+nr2) / 2
return rez
try:
serverPort=13000
serverName='localhost'
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
except socket.error as e:
print("Error ne krijimin e socket "+str(e))
try:
s.bind((serverName,serverPort))
except socket.error as m:
print("Error: " + m)
print('---------------------------------------')
print('Serveri eshte startuar ne localhost ne portin: ' + str(serverPort))
print('Serveri eshte duke pritur per ndonje kerkese')
print('---------------------------------------\n')
while True:
message,address=s.recvfrom(128)
option=message.decode()
print("Kerkesa "+str(option)+" u pranua nga IP: "+str(address[0])+" ne Portin : "+str(address[1]))
if(option == 'IPADDRESS'):
data = "Pergjigjia: Ip e klientit eshte : "+str(IPADDRESS(address))
s.sendto(data.encode(),address)
elif(option == 'PORT'):
data = "Pergjigjia: Numri i portit te klientit eshte : " + str(PORT(address))
s.sendto(data.encode(),address)
elif(option == 'COUNT'):
data1 , address= s.recvfrom(128)
data2 = str(BASHKETINGELLORE(data1.decode()))
data3 = str(ZANORE(data1.decode()))
data="Pergjigjia: Teksti i pranuar përmban " + data3 +" zanore dhe "+ data2 +" bashkëtingëllore"
s.sendto(data.encode(),address)
elif(option== 'REVERSE'):
data1 , address= s.recvfrom(128)
data = REVERSE(data1.decode())
s.sendto(data.encode(),address)
elif(option == 'PALINDROME'):
data , address= s.recvfrom(128)
data1 = str(PALINDROME(data.decode()))
s.sendto(data1.encode(),address)
elif(option == 'TIME'):
data = str(TIME())
s.sendto(data.encode(),address)
elif(option == 'GAME'):
data = str(GAME())
s.sendto(data.encode(),address)
elif(option == 'GCF'):
n1 , address= s.recvfrom(128)
n2 , address= s.recvfrom(128)
data ="Pergjigjia: GCF eshte: " + str(GCF(int(n1.decode()),int(n2.decode())))
s.sendto(data.encode(),address)
elif(option == 'CONVERT'):
m , address= s.recvfrom(128)
nr , address= s.recvfrom(128)
data = str(CONVERT(m.decode(),nr.decode()))
s.sendto(data.encode(),address)
elif(option == 'MESATARJA'):
nr1 , address=s.recvfrom(128)
nr2 , address=s.recvfrom(128)
data = str(CONVERT(nr1.decode(),nr2.decode()))
s.sendto(data.encode(),address)
|
#
# MLDB-1030_apply_stopwords.py
# mldb.ai inc, 2015
# This file is part of MLDB. Copyright 2015 mldb.ai inc. All rights reserved.
#
import datetime
import unittest
from mldb import mldb, MldbUnitTest, ResponseException
class Mldb1030Test(MldbUnitTest):
@classmethod
def setUpClass(self):
dataset_config = {
'type' : 'sparse.mutable',
'id' : "toy"
}
dataset = mldb.create_dataset(dataset_config)
now = datetime.datetime.now()
dataset.record_row("elem1", [ ["title", "patate where when poire when", now]])
dataset.record_row("elem2", [ ["title", "allo where what he a allo", now]])
dataset.commit()
#add function
func_conf = {
"type":"filter_stopwords",
"params": {}
}
func_output = mldb.put("/v1/functions/stop", func_conf)
mldb.log(func_output)
def test_it(self):
# baggify our words
baggify_conf = {
"type": "transform",
"params": {
"inputData": "select tokenize(title, {splitChars:' ', quoteChar:'', "
"minTokenLength: 2}) as * from toy",
"outputDataset": {
"id": "bag_of_words",
"type": "sparse.mutable"
}
}
}
baggify_output = mldb.put("/v1/procedures/baggify", baggify_conf)
mldb.log(baggify_output)
run_output = mldb.post("/v1/procedures/baggify/runs")
mldb.log(run_output)
# query all
rez = mldb.get("/v1/query",
q="select * from bag_of_words order by rowName() ASC")
mldb.log(rez.json())
def do_check(my_rez):
words = [[x[0] for x in line["columns"]] for line in my_rez]
assert set(["patate", "poire"]) == set(words[0])
assert ["allo"] == words[1]
# query while applying stopwords
rez = mldb.get("/v1/query",
q="select stop({words: {*}})[words] as * from bag_of_words "
"order by rowName() ASC")
js_rez = rez.json()
mldb.log(js_rez)
do_check(js_rez)
#####
# try both operations at once
rez = mldb.get("/v1/query", q="""
select stop({
words: tokenize(title, {minTokenLength:2,
splitChars: ' ',
quoteChar: ''})
}
)[words] as *
from toy
order by rowName() ASC""")
js_rez = rez.json()
mldb.log(js_rez)
do_check(js_rez)
#####
# the following shouldn't error out (MLDB-1689)
self.assertTableResultEquals(
mldb.query(""" select stop({ words: {} } ) """),
[
[
"_rowName"
],
[
"result"
]
])
mldb.run_tests()
|
from home import app
from flask import render_template,request,redirect,url_for,session,flash,jsonify
#from mysql import MySQL
from flaskext.mysql import MySQL
from datetime import datetime
import os
from flask_mail import Mail,Message
mysql = MySQL()
mysql.init_app(app)
db = mysql.connect()
mail = Mail(app)
#APP_ROOT = os.path.dirname(os.path.abspath(__file__))
@app.route('/admin')
def admin():
return render_template("signin_admin.html")
@app.route('/signout_admin')
def signout_admin():
session.pop('id', None)
session.pop('role', None)
session.pop('name', None)
session.pop('img', None)
return redirect(url_for('.index'))
@app.route('/signin_admin')
def signin_admin():
arr=['a']
Email = request.args['Email']
Pass = request.args['Pass']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("SELECT * FROM admin_information where email_id=%s and password=%s;", (Email,Pass))
for raw in cursor.fetchall():
arr=raw
if arr[0] =='a':
return "Unauthorized User"
else:
session['id'] = arr[0]
session['name'] = arr[1] + " " + arr[3]
session['img'] = arr[9]
session['role'] = 'admin'
return redirect(url_for('.admin_new'))
@app.route('/admin_home')
def admin_home():
if 'id' not in session:
return render_template("signin_admin.html")
else:
fname=session['name']
img=session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_home.html",fname=fname,img=img,arr=arrx,c=count)
def notify(arr):
c=0
d=0
e=0
f=0
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select n_id from admin_notification")
for raw in cursor.fetchall():
if raw[0]==2:
c=c+1
if raw[0]==3:
d=d+1
if raw[0]==4:
e=e+1
if raw[0]==5:
f=f+1
if c>0:
arr.append({'time':c,'type':2,'message':'User has resubmitted Paper.'})
if d>0:
arr.append({'time':d,'type':3,'message':'Reviewer has commented on a Paper.'})
if e>0:
arr.append({'time':e,'type':4,'message':'User has uploaded new paper.'})
if f>0:
arr.append({'time':f,'type':5,'message':'User has Submitted CRC Copy.'})
# if raw[0]!=None:
# p_id=int(raw[0])
# cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",p_id)
# id=cursor.fetchone()
# cursor.execute("select title from submission_of_paper where submission_id=%s",id)
# title=cursor.fetchone()
# tt=str(title)
# msg=str(raw[1])
# msg=msg+' '+tt+' paper.'
# arr.append({'index':raw[2],'paper_id':raw[0],'message':msg})
# else:
# arr.append({'index':raw[2],'paper_id':raw[0],'message':raw[1]})
return arr
def noticount():
count=0
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select n_id from admin_notification")
for raw in cursor.fetchall():
count=count+1
return count
@app.route('/Admin_Personal_Info')
def admin_pi():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img = session['img']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select * from admin_information where admin_id=%s",id)
arr=['a','a','a','a','a','a','a','a','a','a']
for raw in cursor.fetchall():
arr=raw
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("pi_admin.html",name="Admin",k=arr,c=count,arr=arrx,fname=fname,img=img)
@app.route('/pi_admin1', methods=['POST'])
def pi_admin1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
img = session['img']
destination='temp'
for f in request.files.getlist("file"):
if f.filename == '':
break
else:
target=os.path.join('/home/tp/python_flask/Research_Paper/static','Img/')
filename=f.filenam
destination = "/".join([target,filename])
f.save(destination)
img=f.filename
session['img']=img
first = request.form['fname']
middle = request.form['mname']
last = request.form['lname']
session['name'] = first + " " + last
gender = request.form['gn']
email = request.form['email']
mobile = request.form['mobile']
skill = request.form['skill']
dob = request.form['date']
db = mysql.connect()
cursor = db.cursor()
cursor.execute('''update admin_information set first_name=%s,middle_name=%s,last_name=%s,mobile_no=%s,email_id=%s,gender=%s,skills=%s,date_of_birth=%s,profile_pic=%s where admin_id=%s''', (first,middle,last,mobile,email,gender,skill,dob,img,id))
db.commit()
return redirect(url_for('.admin_pi'))
@app.route('/status')
def status():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
arr2=[{'id':'1','name':'jkj'}]
arr2.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select * from status""")
for raw in cursor.fetchall():
arr2.append({'id':raw[0],'name':raw[1]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("Edit_Status.html",name="Status",arr2=arr2,c=count,arr=arrx,fname=fname,img=img)
@app.route('/status_1', methods=['POST'])
def status_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
name = request.form['Sname']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""insert into status (status_name) values(%s)""", name)
db.commit()
return redirect(url_for('.status'))
@app.route('/status_2', methods=['POST'])
def status_2():
if 'id' not in session:
return render_template("signin_admin.html")
else:
key = request.form.getlist('key')
keys=','.join(key)
keyx=keys.split(",")
db = mysql.connect()
cursor = db.cursor()
for i in keyx:
if i!='':
cursor.execute("""delete from status where status_id=%s""", i)
db.commit()
return "Successfully deleted"
@app.route('/keyword')
def keyword():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
arr2=[{'id':'1','name':'jkj'}]
arr2.clear()
arr3=[{'id':'1','name':'jkj'}]
arr3.clear()
arr4=[{'id':'1','name':'jkj'}]
arr4.clear()
page_count=''
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select * from subject")
for raw in cursor.fetchall():
arr3.append({'id':raw[0],'name':raw[1]})
cursor.execute("select * from track")
for raw in cursor.fetchall():
arr4.append({'id':raw[0],'name':raw[1]})
cursor.execute("""select * from keyword""")
for raw in cursor.fetchall():
arr2.append({'id':raw[0],'name':raw[1]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
#page_count=0
return render_template("Edit_Keyword.html",name="Keyword",page_count=page_count,img=img,c=count,arr=arrx,arr2=arr2,arr3=arr3,arr4=arr4,fname=fname)
@app.route('/keyword_1', methods=['POST'])
def keyword_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
t_id = request.form['track']
name = request.form['keyword']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""insert into keyword (keyword_name,track_id) values(%s,%s)""", (name,t_id))
db.commit()
return "success"
#return render_template("successful_message.html",page_count=page_count)
@app.route('/keyword_2', methods=['POST'])
def keyword_2():
if 'id' not in session:
return render_template("signin_admin.html")
else:
db = mysql.connect()
cursor = db.cursor()
key = request.form.getlist('key')
keys=','.join(key)
keyx=keys.split(",")
for i in keyx:
if i!='':
cursor.execute("""delete from keyword where keyword_id=%s""", i)
db.commit()
return redirect(url_for('.keyword'))
@app.route('/track')
def track():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
arr2=[{'id':'1','name':'jkj'}]
arr2.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select * from track""")
for raw in cursor.fetchall():
arr2.append({'id':raw[0],'name':raw[1]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("Edit_Track.html",name="Track",c=count,arr=arrx,img=img,arr2=arr2,fname=fname)
@app.route('/track_1', methods=['POST'])
def track_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
name = request.form['Tname']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""insert into track (track_name) values(%s)""", name)
db.commit()
return "success"
@app.route('/track_2', methods=['POST'])
def track_2():
if 'id' not in session:
return render_template("signin_admin.html")
else:
db = mysql.connect()
cursor = db.cursor()
key = request.form.getlist('key')
keys=','.join(key)
keyx=keys.split(",")
cursor = db.cursor()
for i in keyx:
if i!='':
cursor.execute("""delete from track where track_id=%s""", i)
db.commit()
return "success"
@app.route('/subject')
def subject():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
arr2=[{'id':'1','name':'jkj'}]
arr2.clear()
arr4=[{'id':'1','name':'jkj'}]
arr4.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select * from track")
for raw in cursor.fetchall():
arr4.append({'id':raw[0],'name':raw[1]})
cursor.execute("""select * from subject """,)
for raw in cursor.fetchall():
arr2.append({'id':raw[0],'name':raw[1]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("edit_Subject.html",name="Subject",img=img,c=count,arr=arrx,arr2=arr2,arr4=arr4,fname=fname)
@app.route('/subject_1', methods=['POST'])
def subject_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
t_id = request.form['track']
name = request.form['Sname']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""insert into subject (subject_name,track_id) values(%s,%s)""", (name,t_id))
db.commit()
return redirect(url_for('.subject'))
@app.route('/subject_2', methods=['POST'])
def subject_2():
if 'id' not in session:
return render_template("signin_admin.html")
else:
key = request.form.getlist('key')
keys=','.join(key)
keyx=keys.split(",")
db = mysql.connect()
cursor = db.cursor()
for i in keyx:
if i!='':
cursor.execute("""delete from subject where subject_id=%s""", i)
db.commit()
return "Deleted Successfully"
@app.route('/admin_announcements')
def admin_announcements():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
db = mysql.connect()
cursor = db.cursor()
arry=[{'id':'1','details':'asa','relate':'as','date':'21'}]
arr1=[{'id':'1','email':'asa','fname':'sasas','lname':'sasas'}]
arr2=[{'id':'1','email':'asa','fname':'sasas','lname':'sasas'}]
arr1.clear()
arr2.clear()
arry.clear()
cursor.execute("select user_id,email_id,first_name,last_name from user_information")
for raw in cursor.fetchall():
arr1.append({'id':raw[0],'email':raw[1],'fname':raw[2],'lname':raw[3]})
cursor.execute("select expert_id,email_id,first_name,last_name from expert_information")
for raw in cursor.fetchall():
arr2.append({'id':raw[0],'email':raw[1],'fname':raw[2],'lname':raw[3]})
cursor.execute("select announcement_id,announcement_details,related_to,announcements_date from announcement")
for raw in cursor.fetchall():
arry.append({'id':raw[0],'details':raw[1],'relate':raw[2],'date':raw[3]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_announcement.html",name="Announcement",img=img,c=count,k=arr1,l=arr2,arr=arrx,fname=fname,arry=arry)
@app.route('/admin_announcements_1', methods=['POST'])
def admin_announcements_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
name = request.form['Aname']
relate=request.form['relate']
dd=datetime.utcnow()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""insert into announcement (admin_id,announcement_details,related_to,announcements_date) values(%s,%s,%s,%s)""", (id,name,relate,dd))
db.commit()
return redirect(url_for('.admin_announcements'))
@app.route("/delete_announcement")
def delete_announcement():
if 'id' not in session:
return render_template("signin_admin.html")
else:
a_id= request.args.get('id', default='', type=str)
db = mysql.connect()
cursor = db.cursor()
cursor.execute("delete from announcement where announcement_id=%s",a_id)
db.commit()
return redirect(url_for('.admin_announcements'))
@app.route('/site_wide_announcements')
def site_wide_announcements():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
arr1=[{'a_id':'1','date':'1','dec':'asa'}]
arr1.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select an_id,date_announcement,description from site_wide_announcement")
for raw in cursor.fetchall():
arr1.append({'a_id':raw[0],'date':raw[1],'dec':raw[2]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("site_wide_announcements.html",name="Site Wide Announcement",arr1=arr1,img=img,c=count,arr=arrx,fname=fname)
@app.route('/admin_announcements_2', methods=['POST'])
def admin_announcements_2():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
name = request.form['Aname_site']
dd=datetime.utcnow()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""insert into site_wide_announcement (date_announcement,description) values(%s,%s)""", (dd,name))
db.commit()
return redirect(url_for('.site_wide_announcements'))
@app.route("/delete_site_announcement")
def delete_site_announcement():
if 'id' not in session:
return render_template("signin_admin.html")
else:
a_id= request.args.get('id', default='', type=str)
db = mysql.connect()
cursor = db.cursor()
cursor.execute("delete from site_wide_announcement where an_id=%s",a_id)
db.commit()
return redirect(url_for('.site_wide_announcements'))
@app.route('/add_expert')
def add_expert():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("expert_signup.html",name="Add Reviewer",img=img,c=count,arr=arrx,fname=fname)
@app.route('/add_expert_1', methods=['POST'])
def add_expert_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
destination='temp'
cv='temp'
db = mysql.connect()
cursor = db.cursor()
for f in request.files.getlist("file"):
if f.filename == '':
destination='Default.jpg'
else:
target=os.path.join('/home/tp/python_flask/Research_Paper/static','Img/')
filename=f.filename
destination = "/".join([target,filename])
f.save(destination)
destination=filename
for f1 in request.files.getlist("cv"):
if f1.filename == '':
cv=''
else:
target1=os.path.join('/home/tp/python_flask/Research_Paper/static','Expert_CV/')
filename1=f1.filename
cv = "/".join([target1,filename1])
f1.save(cv)
cv=f1.filename
dd=datetime.utcnow()
first = request.form['fname']
middle = request.form['mname']
last = request.form['lname']
gender = request.form['gn']
email = request.form['email']
mobile = request.form['mobile']
password = request.form['password']
skill = request.form['skill']
dob = request.form['date']
exp = request.form['experience']
exp_words = request.form['experience_words']
cursor.execute("""insert into expert_information (first_name,middle_name,last_name,mobile_no,email_id,password,date_of_birth,gender,skills,total_experience,experience_in_words,profile_pic,date_registration,expert_cv) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""", (first,middle,last,mobile,email,password,dob,gender,skill,exp,exp_words,destination,dd,cv))
db.commit()
return redirect(url_for('.add_expert'))
@app.route('/list_experts')
def list_experts():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select expert_id,first_name from expert_information")
arr=[{'id':'12','first_name':'shyam','Reviewed':'5','Remaining':'5','Total':'10'}]
arr.clear()
for raw in cursor.fetchall():
cursor.execute("Select count(paper_id) from paper_of_expert where expert_id=%s and (status_id=7 or status_id=6)",raw[0])
for raw1 in cursor.fetchall():
id=raw1[0]
cursor.execute("Select count(paper_id) from paper_of_expert where expert_id=%s and (status_id=2 or status_id=3 or status_id=4 or status_id=5)",raw[0])
for raw2 in cursor.fetchall():
id1=raw2[0]
id2=id1+id
arr.append({'id':raw[0],'first_name':raw[1],'Remaining':id,'Reviewed':id1,'Total':id2})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_list_experts.html",name="List Of Reviewers",k=arr,arr=arrx,c=count,img=img,fname=fname)
@app.route('/expert_details')
def expert_details():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
mode= request.args.get('mode', default='', type=str)
e_id= request.args.get('e_id', default='', type=str)
arr=[{'paper_id':'12','title':'asdsa','creation_date':'12','last_modified_date':'12','status':'dsds'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select paper_id,status_name from paper_of_expert,status where paper_of_expert.status_id=status.status_id and expert_id=%s ",e_id)
for raw in cursor.fetchall():
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",raw[0])
for raw1 in cursor.fetchall():
cursor.execute("select title,last_modified_date from submission_of_paper where submission_id=%s",raw1[0])
for raw2 in cursor.fetchall():
title=raw2[0]
last=raw2[1]
cursor.execute("select creation_date from paper_creation,status where paper_creation.status_id=status.status_id and paper_id=%s",raw[0])
for raw3 in cursor.fetchall():
arr.append({'paper_id':raw[0],'title':title,'creation_date':raw3[0],'last_modified_date':last,'status':raw[1]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_expert_details.html",name="Papers of Reviewer",mode=mode,eid=e_id,arr=arrx,c=count,img=img,k=arr,fname=fname)
# @app.route('/list_papers')
# def list_papers():
# id = session['id']
# if id=='':
# return render_template("signin.html")
# else:
# fname=session['name']
# cursor.execute("Select paper_id from paper_creation")
# arr=[{'id':'12','f_name':'harsh','l_name':'shah','topic':'shyam','creation_date':'asd','last':'asd','status':'asasd'}]
# arr.clear()
# for raw in cursor.fetchall():
# cursor.execute("Select status_name,creation_date from paper_creation,status where paper_creation.status_id=status.status_id and paper_id=%s",raw[0])
# for raw2 in cursor.fetchall():
# id1=raw2[0]
# id2=raw2[1]
# cursor.execute("Select user_id from paper_creation where paper_id=%s",raw[0])
# u_id=cursor.fetchone()
# cursor.execute("Select first_name,last_name from user_information where user_id=%s",u_id)
# for raw3 in cursor.fetchall():
# name1=raw3[0]
# name2=raw3[1]
# cursor.execute("Select MAX(submission_id) from submission_of_paper where paper_id=%s",raw[0])
# id=cursor.fetchone()
# cursor.execute("Select title,last_modified_date from submission_of_paper where submission_id=%s",id)
# for raw1 in cursor.fetchall():
# arr.append({'id':id,'f_name':name1,'l_name':name2,'topic':raw1[0],'creation_date':id2,'last':raw1[1],'status':id1})
# return render_template("admin_list_papers.html",name="List Of Papers",k=arr,fname=fname)
@app.route('/admin_paper_details')
def admin_paper_details():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
i_id= request.args.get('i_id', default='', type=str)
p_id= request.args.get('p_id', default='', type=str)
search= request.args.get('search', default='', type=str)
mode= request.args.get('mode', default='', type=str)
if mode=='expert_':
e_id= request.args.get('e_id', default='', type=str)
u_id=0
elif mode=='author_paper':
a_id= request.args.get('a_id', default='', type=str)
u_id=0
e_id=0
else:
u_id= request.args.get('u_id', default='', type=str)
e_id=0
idd='0'
key=[{'name':'hgh'}]
key.clear();
arr=['a']
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",p_id)
idd=cursor.fetchone()
cursor.execute("select * from submission_of_paper where submission_id=%s",idd)
for raw in cursor.fetchall():
arr=raw
cursor.execute("Select keyword_name from paper_keyword where paper_id=%s and submission_id=%s",(p_id,idd))
for i in cursor.fetchall():
key.append({'name':i[0]})
cursor.execute("select track_name from track,submission_of_paper where submission_of_paper.track_id=track.track_id and submission_id=%s",idd)
for r in cursor.fetchall():
track=r[0]
cursor.execute("select subject_name from subject,submission_of_paper where submission_of_paper.subject_id=subject.subject_id and submission_id=%s",idd)
for s in cursor.fetchall():
sub=s[0]
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_paper_details.html",name="Paper Details",search=search,a_id=a_id,i_id=i_id,mode=mode,img=img,eid=e_id,uid=u_id,arr=arr,keyw=key,track=track,sub=sub,fname=fname,pp_id=p_id,arr1=arrx,c=count)
@app.route('/list_users')
def list_users():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select user_id,first_name from user_information")
arr=[{'user_id':'21','first_name':'shyam','Total':'5'}]
arr.clear()
for raw in cursor.fetchall():
cursor.execute("Select count(paper_id) from paper_creation where user_id=%s",raw[0])
for raw1 in cursor.fetchall():
id=raw1[0]
arr.append({'user_id':raw[0],'first_name':raw[1],'Total':id})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_list_users.html",name="List Of Users",img=img,k=arr,fname=fname,arr=arrx,c=count)
@app.route('/admin_user_details')
def admin_user_details():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
u_id= request.args.get('u_id', default='', type=str)
arr=[{'paper_id':'12','title':'asdsa','creation_date':'12','last_modified_date':'12','status':'dsds'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select paper_id from paper_creation where user_id=%s",u_id)
for raw in cursor.fetchall():
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",raw[0])
for raw1 in cursor.fetchall():
cursor.execute("select title,last_modified_date from submission_of_paper where submission_id=%s",raw1[0])
for raw2 in cursor.fetchall():
title=raw2[0]
cursor.execute("select creation_date,status_name from paper_creation,status where paper_creation.status_id=status.status_id and paper_id=%s",raw[0])
for raw3 in cursor.fetchall():
arr.append({'paper_id':raw[0],'title':raw2[0],'creation_date':raw3[0],'last_modified_date':raw2[1],'status':raw3[1]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_expert_details.html",name="Papers of User",k=arr,uid=u_id,img=img,fname=fname,arr=arrx,c=count)
@app.route('/allocation')
def allocation():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
arr=[{'id':'1','title':'as'}]
arr.clear()
arr1=[{'id':'1','fname':'as','lname':'as'}]
arr1.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select paper_id from paper_creation where status_allocation=%s",'false')
for raw in cursor.fetchall():
id=raw[0]
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",id)
for raw2 in cursor.fetchall():
cursor.execute("select title from submission_of_paper where submission_id=%s",raw2[0])
for raw1 in cursor.fetchall():
arr.append({'id':id,'title':raw1[0]})
cursor.execute("select expert_id,first_name,last_name from expert_information")
for raw in cursor.fetchall():
arr1.append({'id':raw[0],'fname':raw[1],'lname':raw[2]})
if not arr:
x='true'
else:
x='false'
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("allocation.html",name="Allocation Of Paper",img=img,k=arr,l=arr1,x=x,fname=fname,arr=arrx,c=count)
@app.route('/allocation_1', methods=['POST'])
def allocation_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
exp_ids = request.form['temp_12']
pap_id = request.form.getlist('papers_title')
exp_id=exp_ids.split(",")
db = mysql.connect()
cursor = db.cursor()
for i in pap_id:
for j in exp_id:
if int(j)!=0:
cursor.execute("""insert into paper_of_expert (expert_id,paper_id,status_id) values(%s,%s,%s)""",(int(j),i,7))
cursor.execute("insert into expert_notification (expert_id,n_id) values(%s,%s)",(j,1))
cursor.execute('''update paper_creation set status_allocation="true" where paper_id=%s''', i)
cursor.execute('''update paper_creation set status_id=7 where paper_id=%s''',i)
db.commit()
return redirect(url_for('.allocation'))
@app.route('/add_admin')
def add_admin():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname = session['name']
img=session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_signup.html",name="Add Admin",fname=fname,img=img,c=count,arr=arrx)
@app.route('/add_admin_1',methods=['POST'])
def add_admin_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
db = mysql.connect()
cursor = db.cursor()
destination='temp'
for f in request.files.getlist("file"):
if f.filename=='':
destination='Default.jpg'
else:
target=os.path.join('/home/tp/python_flask/Research_Paper/static','Img/')
filename=f.filename
ext=filename.split(".")
destination = "/".join([target,filename])
f.save(destination)
destination=filename
dd=datetime.utcnow()
first = request.form['fname']
middle = request.form['mname']
last = request.form['lname']
gender = request.form['gn']
email = request.form['email']
mobile = request.form['mobile']
password = request.form['password']
skill = request.form['skill']
dob = request.form['date']
cursor.execute("""insert into admin_information (first_name,middle_name,last_name,mobile_no,email_id,password,gender,skills,profile_pic,date_of_birth,date_registration) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""", (first,middle,last,mobile,email,password,gender,skill,destination,dob,dd))
db.commit()
return redirect(url_for('.add_admin'))
@app.route('/validate_email_admin',methods=['POST'])
def validate_email_admin():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
email = request.form['email']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select * from admin_information where email_id=%s",email)
x=0
for raw in cursor.fetchall():
x=x+1
if x>0:
return "wrong"
else:
return "success"
@app.route('/validate_email_expert',methods=['POST'])
def validate_email_expert():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
email = request.form['email']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select * from expert_information where email_id=%s",email)
x=0
for raw in cursor.fetchall():
x=x+1
if x>0:
return "wrong"
else:
return "success"
@app.route('/validate_email_admin_1',methods=['POST'])
def validate_email_admin_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
email = request.form['email']
email1=''
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select email_id from admin_information where admin_id=%s",id)
for raw in cursor.fetchall():
email1=raw[0]
if email1==email:
return "success"
else:
cursor.execute("select * from admin_information where email_id=%s",email)
x=0
for raw in cursor.fetchall():
x=x+1
if x>0:
return "wrong"
else:
return "success"
@app.route('/admin_comments',methods=['POST'])
def admin_comments():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname = session['name']
img=session['img']
arr=[{'title':'harsh','date':'s','msg':'s','e_name':'x'}]
arr.clear()
p_id= request.args.get('pp_id', default='', type=str)
db = mysql.connect()
cursor = db.cursor()
# cursor.execute("Select paper_id from submission_of_paper where submission_id=%s",s_id)
# for raw4 in cursor.fetchall():
# p_id=raw4[0]
cursor.execute("Select date_of_comment,comment_message,expert_id from expert_comment where paper_id=%s and (status_id=4 or status_id=5 or status_id=6)",p_id)
for raw in cursor.fetchall():
cursor.execute("select first_name from expert_information where expert_id=%s",raw[2])
for raw3 in cursor.fetchall():
e_name=raw3[0]
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",p_id)
for raw1 in cursor.fetchall():
cursor.execute("select title from submission_of_paper where submission_id=%s",raw1[0])
for raw2 in cursor.fetchall():
arr.append({'title':raw2[0],'date':raw[0],'msg':raw[1],'e_name':e_name})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_comments.html",name="List Of Comments",k=arr,img=img,fname=fname,c=count,arr=arrx)
@app.route('/admin_change_pass')
def admin_change_pass():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname = session['name']
img = session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_change_password.html",name="Change Password",fname=fname,img=img,c=count,arr=arrx)
@app.route('/admin_change_pass_1',methods=['POST'])
def admin_change_pass_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
old=request.form['old']
new=request.form['new1']
#cnew=request.form['cnew']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select password from admin_information where admin_id=%s",id)
for raw in cursor.fetchall():
pas=raw[0]
if pas == old:
cursor.execute("update admin_information set password=%s where admin_id=%s",(new,id))
db.commit()
return "success"
else:
return "wrong"
@app.route('/admin_archived')
def admin_archived():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname = session['name']
img=session['img']
arr=[{'id':'1','name':'f'}]
arr.clear()
arr1=[{'id':'1','name':'f','volume':'x'}]
arr1.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select * from volume")
for raw in cursor.fetchall():
arr.append({'id':raw[0],'name':raw[1]})
cursor.execute("select * from issue where volume_id=%s",raw[0])
for raw1 in cursor.fetchall():
arr1.append({'id':raw1[0],'name':raw1[1],'volume':raw[0]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_archived_paper.html",name="Archived Paper",arr2=arr,arr1=arr1,img=img,fname=fname,c=count,arr=arrx)
# s_id = session['id']
# if s_id=='':
# return render_template("signin.html")
# else:
# fname = session['name']
# arr=[{'id':'1','name':'as'}]
# arr.clear()
# cursor.execute("select * from volume")
# for raw in cursor.fetchall():
# arr.append({'id':raw[0],'name':raw[1]})
# return render_template("admin_archived_paper.html",name="Archived Paper",k=arr,fname=fname)
@app.route('/admin_pending')
def admin_pending():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname = session['name']
img=session['img']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select paper_id from paper_creation where (status_id=%s or status_id=%s or status_id=%s or status_id=%s or status_id=%s or status_id=%s) and issue_id is null",(2,3,4,5,6,7))
arr=[{'id':'12','f_name':'harsh','l_name':'shah','topic':'shyam','creation_date':'asd','last':'asd','status':'asasd'}]
arr.clear()
for raw in cursor.fetchall():
cursor.execute("Select status_name,creation_date from paper_creation,status where paper_creation.status_id=status.status_id and paper_id=%s",raw[0])
for raw2 in cursor.fetchall():
id1=raw2[0]
id2=raw2[1]
cursor.execute("Select user_id from paper_creation where paper_id=%s",raw[0])
u_id=cursor.fetchone()
cursor.execute("Select first_name from user_information where user_id=%s",u_id)
for raw3 in cursor.fetchall():
name1=raw3[0]
cursor.execute("Select MAX(submission_id) from submission_of_paper where paper_id=%s",raw[0])
id=cursor.fetchone()
cursor.execute("Select title,last_modified_date from submission_of_paper where submission_id=%s",id)
for raw1 in cursor.fetchall():
arr.append({'id':raw[0],'f_name':name1,'topic':raw1[0],'creation_date':id2,'last':raw1[1],'status':id1})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_pending_paper.html",name="Pending Paper",k=arr,img=img,fname=fname,arr=arrx,c=count)
@app.route('/admin_new')
def admin_new():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname = session['name']
img=session['img']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select paper_id from paper_creation where status_id=%s",1)
arr=[{'id':'12','f_name':'harsh','l_name':'shah','topic':'shyam','creation_date':'asd','last':'asd','status':'asasd'}]
arr.clear()
for raw in cursor.fetchall():
cursor.execute("Select status_name,creation_date from paper_creation,status where paper_creation.status_id=status.status_id and paper_id=%s",raw[0])
for raw2 in cursor.fetchall():
id1=raw2[0]
id2=raw2[1]
cursor.execute("Select user_id from paper_creation where paper_id=%s",raw[0])
u_id=cursor.fetchone()
cursor.execute("Select first_name from user_information where user_id=%s",u_id)
for raw3 in cursor.fetchall():
name1=raw3[0]
cursor.execute("Select MAX(submission_id) from submission_of_paper where paper_id=%s",raw[0])
id=cursor.fetchone()
cursor.execute("Select title,last_modified_date from submission_of_paper where submission_id=%s",id)
for raw1 in cursor.fetchall():
arr.append({'id':raw[0],'f_name':name1,'topic':raw1[0],'creation_date':id2,'last':raw1[1],'status':id1})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_list_papers.html",name="New Paper",k=arr,img=img,fname=fname,c=count,arr=arrx)
@app.route('/volume')
def volume():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname = session['name']
img=session['img']
arr=[{'id':'1','name':'f'}]
arr.clear()
arr1=[{'id':'1','name':'f','volume':'x','i_no':'1'}]
arr1.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select * from volume")
for raw in cursor.fetchall():
arr.append({'id':raw[0],'name':raw[1]})
cursor.execute("select * from issue where volume_id=%s",raw[0])
for raw1 in cursor.fetchall():
arr1.append({'id':raw1[0],'name':raw1[1],'volume':raw[0],'i_no':raw1[3]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_add_volume.html",name="Volume",arr2=arr,arr1=arr1,img=img,fname=fname,arr=arrx,c=count)
@app.route('/volume_1',methods=['POST'])
def volume_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
temp=0
name = request.form['Vname']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select volume_name from volume")
for raw in cursor.fetchall():
if raw[0]==name:
temp=temp+1
break
if temp==0:
cursor.execute("""insert into volume (volume_name) values(%s)""", name)
db.commit()
return redirect(url_for('.volume'))
else:
return "Already Created volume"
@app.route('/issue')
def issue():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
arr=[{'id':'1','name':'f'}]
arr.clear()
arr1=[{'id':'1','name':'f','volume':'x','i_no':'1'}]
arr1.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select * from volume")
for raw in cursor.fetchall():
arr.append({'id':raw[0],'name':raw[1]})
cursor.execute("select * from issue where volume_id=%s",raw[0])
for raw1 in cursor.fetchall():
arr1.append({'id':raw1[0],'name':raw1[1],'volume':raw[0],'i_no':raw1[3]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
# cursor.execute("select * from volume")
# for raw in cursor.fetchall():
# arr.append({'id':raw[0],'name':raw[1]})
# cursor.execute("select * from issue")
# for raw in cursor.fetchall():
# arr1.append({'id':raw[0],'name':raw[1]})
return render_template("admin_add_issue.html",name="Issue",arr2=arr,img=img,arr1=arr1,fname=fname,arr=arrx,c=count)
@app.route('/issue_check',methods=['POST'])
def issue_check():
if 'id' not in session:
return render_template("signin_admin.html")
else:
temp=0
x=0
issue_no = request.form['issue']
v_id = request.form['volume']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select issue_no from issue where volume_id=%s",v_id)
for raw in cursor.fetchall():
x=int(issue_no)
if raw[0] == x:
temp=temp+1
if temp==0:
return "success"
else:
return "wrong"
@app.route('/issue_1',methods=['POST'])
def issue_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
v_id = request.form['volume']
issue_no = request.form['issue_no']
name1 = request.form['start_month']
name2 = request.form['end_month']
name=name1+' to '+name2
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""insert into issue (issue_name,volume_id,issue_no) values(%s,%s,%s)""", (name,v_id,issue_no))
db.commit()
return redirect(url_for('.issue'))
# cursor.execute("""insert into issue (issue_name,volume_id,issue_no) values(%s,%s,%s)""", (name,v_id,issue_no))
# db.commit()
# return redirect(url_for('.issue'))
@app.route('/issue_paper_list')
def issue_paper_list():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
i_id= request.args.get('i_id', default='', type=str)
arr=[{'id':'12','f_name':'harsh','l_name':'shah','topic':'shyam','last':'asd'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select paper_id from paper_creation where issue_id=%s",i_id)
for raw in cursor.fetchall():
cursor.execute("Select user_id from paper_creation where paper_id=%s",raw[0])
u_id=cursor.fetchone()
cursor.execute("Select first_name,last_name from user_information where user_id=%s",u_id)
for raw3 in cursor.fetchall():
name1=raw3[0]
name2=raw3[1]
cursor.execute("Select MAX(submission_id) from submission_of_paper where paper_id=%s",raw[0])
id=cursor.fetchone()
cursor.execute("Select title,last_modified_date from submission_of_paper where submission_id=%s",id)
for raw1 in cursor.fetchall():
arr.append({'id':id,'f_name':name1,'l_name':name2,'topic':raw1[0],'last':raw1[1]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_issue_list_paper.html",name="List Of Papers",i_id=i_id,img=img,k=arr,fname=fname,arr=arrx,c=count)
@app.route('/paper_add_volume')
def paper_add_volume():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
arr=[{'id':'1','title':'as'}]
arr.clear()
arr1=[{'id':'1','year':'as'}]
arr1.clear()
arr2=[{'id':'1','month':'as','no':1}]
arr2.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select paper_id from paper_creation where status_id=%s",(8))
for raw in cursor.fetchall():
id=raw[0]
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",id)
for raw2 in cursor.fetchall():
cursor.execute("select title from submission_of_paper where submission_id=%s",raw2[0])
for raw1 in cursor.fetchall():
arr.append({'id':raw2[0],'title':raw1[0]})
cursor.execute("select * from volume")
for raw in cursor.fetchall():
arr1.append({'id':raw[0],'year':raw[1]})
cursor.execute("select * from issue")
for raw in cursor.fetchall():
arr2.append({'id':raw[0],'month':raw[1],'no':raw[3]})
if not arr:
x='true'
else:
x='false'
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_paper_in_volume.html",name="Add Paper In Volume",k=arr,l=arr1,m=arr2,x=x,img=img,fname=fname,arr=arrx,c=count)
@app.route('/paper_add_volume_1',methods=['POST'])
def paper_add_volume_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
y_name = request.form['year']
m_name = request.form['month']
subm_id = request.form['paper']
paper_path=''
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select paper_id from submission_of_paper where submission_id=%s",subm_id)
for raw in cursor.fetchall():
p_id=raw[0]
cursor.execute('''update paper_creation set issue_id=%s,status_id=%s where paper_id=%s''',(m_name,9,p_id))
cursor.execute("select user_id from paper_creation where paper_id=%s",p_id)
u_id=cursor.fetchone()
cursor.execute("insert into user_notification (user_id,n_id) values(%s,%s)",(u_id,6))
for f in request.files.getlist("file"):
target=os.path.join('/home/tp/python_flask/Research_Paper/static','Paper/')
filename=f.filename
ext=filename.split(".")
destination = "/".join([target,filename])
f.save(destination)
paper_path=f.filename
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",p_id)
id=cursor.fetchone()
cursor.execute("select * from submission_of_paper where submission_id=%s",id)
for raw in cursor.fetchall():
cursor.execute("insert into submission_of_paper (title,abstract,last_modified_date,path,track_id,paper_id,subject_id) values(%s,%s,%s,%s,%s,%s,%s)",(raw[1],raw[2],raw[3],paper_path,raw[5],raw[6],raw[7]))
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",p_id)
new_id=cursor.fetchone()
cursor.execute("select keyword_name from paper_keyword where paper_id=%s and submission_id=%s",(p_id,id))
for raw in cursor.fetchall():
cursor.execute("insert into paper_keyword (keyword_name,submission_id,paper_id) values(%s,%s,%s)",(raw[0],new_id,p_id))
db.commit()
return redirect(url_for('.paper_add_volume'))
@app.route("/admin_search",methods=['POST','GET'])
def admin_search():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
search=request.args.get('search', default='', type=str)
if search=='':
search = request.form['search']
arr=[{'id':'1','title':'as','date':'x','i_id':'x','v_id':'y'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select paper_id from paper_creation")
for raw6 in cursor.fetchall():
id6=raw6[0]
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",id6)
for raw7 in cursor.fetchall():
id7=raw7[0]
cursor.execute("select distinct paper_id from submission_of_paper where submission_id=%s and title like %s",(id7,"%" + search + "%"))
for raw in cursor.fetchall():
id1=raw[0]
cursor.execute("select paper_id,issue_id from paper_creation where paper_id=%s and issue_id is not null",id1)
for raw3 in cursor.fetchall():
id=raw3[0]
i_id=raw3[1]
cursor.execute("select volume_id from issue where issue_id=%s",i_id)
for raw4 in cursor.fetchall():
cursor.execute("select volume_name from volume where volume_id=%s",raw4[0])
for raw5 in cursor.fetchall():
v_id=raw5[0]
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",id)
for raw2 in cursor.fetchall():
cursor.execute("select title,last_modified_date from submission_of_paper where submission_id=%s",raw2[0])
for raw1 in cursor.fetchall():
arr.append({'id':id,'title':raw1[0],'date':raw1[1],'i_id':i_id,'v_id':v_id})
if not arr:
x='true'
else:
x='false'
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_search.html",name="Search",search=search,k=arr,img=img,fname=fname,x=x,arr=arrx,c=count)
@app.route('/admin_summary_paper')
def admin_summary_paper():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
p_id= request.args.get('p_id', default='', type=str)
max=0
submission=[{'count':'1','id':'1','title':'abc'}]
submission.clear()
admin_comment=[{'id':'1','comment':'asdsa'}]
admin_comment.clear()
comment=[{'id':'1','status':'asa','comment':'dsd','name':'abc'}]
comment.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select status_id from paper_creation where paper_id=%s",p_id)
sst=cursor.fetchone()
cursor.execute("select count(submission_id) from submission_of_paper where paper_id=%s",p_id)
for raw in cursor.fetchall():
max=raw[0]
cursor.execute("select submission_id from submission_of_paper where paper_id=%s order by submission_id DESC",p_id)
for raw in cursor.fetchall():
cursor.execute("select title from submission_of_paper where submission_id=%s",raw[0])
for raw1 in cursor.fetchall():
submission.append({'count':max,'id':raw[0],'title':raw1[0]})
max=max-1
for raw in submission:
cursor.execute("select comment_message,status_name,first_name from expert_comment,status,expert_information where status.status_id=expert_comment.status_id and expert_comment.expert_id=expert_information.expert_id and submission_id=%s",raw['id'])
for raw1 in cursor.fetchall():
comment.append({'id':raw['id'],'status':raw1[1],'comment':raw1[0],'name':raw1[2]})
cursor.execute("select comment_message from admin_comment where paper_id=%s and submission_id=%s",(p_id,raw['id']))
for raw2 in cursor.fetchall():
admin_comment.append({'id':raw['id'],'comment':raw2[0]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_summary_paper.html",name="Summary of paper",s=submission,d=comment,a=admin_comment,img=img,fname=fname,p_id=p_id,arr=arrx,c=count,sst=sst)
@app.route('/admin_submission_details')
def admin_submission_details():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
p_id= request.args.get('p_id', default='', type=str)
s_id= request.args.get('s_id', default='', type=str)
mode= request.args.get('mode', default='', type=str)
key=[{'name':'hgh'}]
key.clear();
arr=['a']
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select * from submission_of_paper where submission_id=%s",s_id)
for raw in cursor.fetchall():
arr=raw
cursor.execute("Select keyword_name from paper_keyword where paper_id=%s and submission_id=%s",(p_id,s_id))
for i in cursor.fetchall():
key.append({'name':i[0]})
cursor.execute("select track_name from track,submission_of_paper where submission_of_paper.track_id=track.track_id and submission_id=%s",s_id)
for r in cursor.fetchall():
track=r[0]
cursor.execute("select subject_name from subject,submission_of_paper where submission_of_paper.subject_id=subject.subject_id and submission_id=%s",s_id)
for s in cursor.fetchall():
sub=s[0]
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_paper_details.html",name="Paper Details",mode=mode,p_id=p_id,img=img,arr=arr,keyw=key,track=track,sub=sub,fname=fname,s_id=s_id,c=count,arr1=arrx)
@app.route('/admin_review_comment')
def admin_review_comment():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
paper= request.args.get('pp_id', default='', type=str)
arr=[{'c_id':'1','comment':'aas','status':'asa','name':'asa'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",paper)
for raw in cursor.fetchall():
s_id=raw[0]
cursor.execute("select comment_id,comment_message,first_name,status_name from expert_comment,expert_information,status where submission_id=%s and status.status_id=expert_comment.status_id and expert_comment.expert_id=expert_information.expert_id",s_id)
for raw in cursor.fetchall():
arr.append({'c_id':raw[0],'comment':raw[1],'name':raw[2],'status':raw[3]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_review_comment.html",name="Review Comment",k=arr,img=img,fname=fname,p_id=paper,s_id=s_id,c=count,arr=arrx)
@app.route('/admin_review_comment_1',methods=['POST'])
def admin_review_comment_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
a_id = session['id']
fname=session['name']
img=session['img']
p_id=request.args.get('p_id', default='', type=str)
ss_id=request.args.get('s_id', default='', type=str)
send = request.form.getlist('send')
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select MAX(submission_id) from submission_of_paper where paper_id=%s",p_id)
id=cursor.fetchone()
if request.form['hdd'] == 'accept':
cursor.execute("""update paper_creation set status_id=%s where paper_id=%s""",(2,p_id))
for s in send:
cursor.execute("""insert into paper_comment (comment_id,paper_id,submission_id) values(%s,%s,%s)""",(s,p_id,ss_id))
elif request.form['hdd'] == 'reject':
cursor.execute("""update paper_creation set status_id=%s where paper_id=%s""",(3,p_id))
for s in send:
cursor.execute("""insert into paper_comment (comment_id,paper_id,submission_id) values(%s,%s,%s)""",(s,p_id,ss_id))
elif request.form['hdd'] == 'rejectwithcomment':
comment=request.form['comment']
if comment == '':
return "Please give comment"
cursor.execute("""update paper_creation set status_id=%s where paper_id=%s""",(5,p_id))
for s in send:
cursor.execute("""insert into paper_comment (comment_id,paper_id,submission_id) values(%s,%s,%s)""",(s,p_id,ss_id))
cursor.execute("insert into admin_comment (comment_message,admin_id,paper_id,submission_id) values(%s,%s,%s,%s)",(comment,a_id,p_id,ss_id))
elif request.form['hdd'] == 'acceptwithcomment':
comment=request.form['comment']
if comment == '':
return "Please give comment"
cursor.execute("""update paper_creation set status_id=%s where paper_id=%s""",(4,p_id))
for s in send:
cursor.execute("""insert into paper_comment (comment_id,paper_id,submission_id) values(%s,%s,%s)""",(s,p_id,ss_id))
cursor.execute("insert into admin_comment (comment_message,admin_id,paper_id,submission_id) values(%s,%s,%s,%s)",(comment,a_id,p_id,ss_id))
elif request.form['hdd'] == 'continuewithmodification':
comment=request.form['comment']
if comment == '':
return "Please give comment"
cursor.execute("""update paper_creation set status_id=%s where paper_id=%s""",(6,p_id))
for s in send:
cursor.execute("""insert into paper_comment (comment_id,paper_id,submission_id) values(%s,%s,%s)""",(s,p_id,ss_id))
cursor.execute("insert into admin_comment (comment_message,admin_id,paper_id,submission_id) values(%s,%s,%s,%s)",(comment,a_id,p_id,ss_id))
cursor.execute("select user_id from paper_creation where paper_id=%s",p_id)
u_id=cursor.fetchone()
cursor.execute("insert into user_notification (user_id,n_id) values(%s,%s)",(u_id,3))
db.commit()
return redirect(url_for('.admin_pending'))
@app.route('/admin_notification_delete',methods=['POST','GET'])
def admin_notification_delete():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
noty=0
index=request.args.get('index', default='', type=str)
i=int(index)
db = mysql.connect()
cursor = db.cursor()
cursor.execute("delete from admin_notification where n_id=%s",i)
db.commit()
if i==4:
return redirect(url_for('.admin_new'))
if i==2 or i==3:
return redirect(url_for('.admin_pending'))
if i==5:
return redirect(url_for('.admin_crc'))
#start
@app.route('/volume_issue',methods=['POST'])
def volume_issue():
year = request.form['year']
mode=''
arr=[{'id':'12','month':'harsh','mode':'0'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select * from issue where volume_id=%s """,year)
for raw in cursor.fetchall():
arr.append({'id':raw[0],'month':raw[1],'no':raw[3]})
db.commit()
return render_template("volume_issue.html",arr=arr,mode=mode)
@app.route('/admin_track_subject_delete',methods=['POST'])
def admin_track_subject_delete():
track_id = request.form['track']
mode=''
arr=[{'id':'12','name':'harsh','mode':'0'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select * from subject where track_id=%s """,track_id)
for raw in cursor.fetchall():
arr.append({'id':raw[0],'name':raw[1]})
db.commit()
return render_template("admin_track_subject_delete.html",arr=arr,mode=mode)
@app.route('/admin_track_keyword',methods=['POST'])
def admin_track_keyword():
track_id = request.form['track']
arr=[{'id':'12','name':'harsh'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select * from keyword where track_id=%s """,track_id)
for raw in cursor.fetchall():
arr.append({'id':raw[0],'name':raw[1]})
db.commit()
return render_template("admin_track_keyword.html",arr=arr)
@app.route('/send_by_mail',methods=['POST'])
def send_by_mail():
if 'id' not in session:
return render_template("signin_admin.html")
else:
mm=request.form['for_user']
u_id = request.form['temp_22']
e_id = request.form['temp_11']
uu_id=u_id.split(",")
ee_id=e_id.split(",")
ux_id=[]
uu_id=uu_id + ee_id
for i in uu_id:
if i!='###' or i!='':
ux_id.append(i)
print(ux_id)
msg = Message('From Flask', sender = 'harshitus99@gmail.com', recipients = ux_id)
msg.body=mm
mail.send(msg)
return redirect(url_for('.admin_announcements'))
@app.route('/admin_expert_personal_info',methods=['POST','GET'])
def admin_expert_personal_info():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
e_id=request.form['e_id']
# e_id=request.args.get('e_id', default='', type=str)
# mode=request.args.get('mode', default='', type=str)
arr=['a']
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select * from expert_information where expert_id=%s """,e_id)
for raw in cursor.fetchall():
arr=raw
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return jsonify({'dd':arr})
# return render_template("admin_expert_personal_info.html",name="Expert Personal Information",mode=mode,k=arr,img=img,fname=fname,s_id=s_id,arr=arrx,c=count)
@app.route('/search_allocation',methods=['POST','GET'])
def search_allocation():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
arr=[{'id':'1','fname':'abc','lname':'sas'}]
arr.clear()
fname=session['name']
img=session['img']
text=request.form['ss']
temp=request.form['arr1']
if text=='':
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select expert_id,first_name,last_name from expert_information """)
for raw in cursor.fetchall():
arr.append({'id':raw[0],'fname':raw[1],'lname':raw[2]})
else:
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select expert_id,first_name,last_name from expert_information where first_name like %s or last_name like %s """,("%" + text + "%","%" + text + "%"))
for raw in cursor.fetchall():
arr.append({'id':raw[0],'fname':raw[1],'lname':raw[2]})
return render_template("search_allocation.html",l=arr,tat=temp)
@app.route('/search_announcement1',methods=['POST','GET'])
def search_announcement1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
arr=[{'id':'1','email':'abc','fname':'abc','lname':'sas'}]
arr.clear()
fname=session['name']
img=session['img']
text=request.form['ss']
temp=request.form['arrx']
if text=='':
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select expert_id,email_id,first_name,last_name from expert_information """)
for raw in cursor.fetchall():
arr.append({'id':raw[0],'email':raw[1],'fname':raw[2],'lname':raw[3]})
else:
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select expert_id,email_id,first_name,last_name from expert_information where first_name like %s or last_name like %s """,("%" + text + "%","%" + text + "%"))
for raw in cursor.fetchall():
arr.append({'id':raw[0],'email':raw[1],'fname':raw[2],'lname':raw[3]})
return render_template("search_announcement1.html",l=arr,tat=temp)
@app.route('/search_announcement2',methods=['POST','GET'])
def search_announcement2():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
arr=[{'id':'1','email':'abc','fname':'abc','lname':'sas'}]
arr.clear()
text=request.form['ss']
temp=request.form['arry']
if text=='':
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select user_id,email_id,first_name,last_name from user_information """)
for raw in cursor.fetchall():
arr.append({'id':raw[0],'email':raw[1],'fname':raw[2],'lname':raw[3]})
else:
db = mysql.connect()
cursor = db.cursor()
cursor.execute("""select user_id,email_id,first_name,last_name from user_information where first_name like %s or last_name like %s """,("%" + text + "%","%" + text + "%"))
for raw in cursor.fetchall():
arr.append({'id':raw[0],'email':raw[1],'fname':raw[2],'lname':raw[3]})
return render_template("search_announcement2.html",k=arr,tat=temp)
@app.route("/add_editorial")
def add_editorial():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template('add_editorial.html',name="Add Editorial",arr=arrx,c=count,fname=fname,img=img)
@app.route("/add_editorial_1",methods=['POST','GET'])
def add_editorial_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
fname=request.form['fname']
mname=request.form['mname']
lname=request.form['lname']
desc=request.form['desc']
designation=request.form['designation']
profile=''
db = mysql.connect()
cursor = db.cursor()
for f in request.files.getlist("file"):
if f.filename=='':
profile='Default.jpg'
else:
target=os.path.join('/home/tp/python_flask/Research_Paper/static','Img/')
filename=f.filename
ext=filename.split(".")
profile = "/".join([target,filename])
f.save(profile)
profile=filename
cursor.execute("insert into editorial_board (first_name,middle_name,last_name,description,profile,designation) values(%s,%s,%s,%s,%s,%s)",(fname,mname,lname,desc,profile,designation))
db.commit()
return redirect(url_for('.add_editorial'))
@app.route("/delete_editorial")
def delete_editorial():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
db = mysql.connect()
cursor = db.cursor()
arry=[{'id':'1','fname':'abc','mname':'ds','lname':'sdada','desc':'sdsd','pic':'asa','designation':'asa'}]
arry.clear()
cursor.execute("select guest_id,first_name,middle_name,last_name,description,profile,designation from editorial_board")
for raw in cursor.fetchall():
arry.append({'id':raw[0],'fname':raw[1],'mname':raw[2],'lname':raw[3],'desc':raw[4],'pic':raw[5],'designation':raw[6]})
return render_template('delete_editorial.html',name="List Of Editorials",arr=arrx,c=count,fname=fname,img=img,arry=arry)
@app.route("/delete_editorial_1",methods=['POST'])
def delete_editorial_1():
print("abc")
if 'id' not in session:
return render_template("signin_admin.html")
else:
#g_id= request.args.get('id', default='', type=str)
print("abc")
g_id=request.form['id']
print(g_id)
db = mysql.connect()
cursor = db.cursor()
cursor.execute("delete from editorial_board where guest_id=%s",g_id)
db.commit()
return redirect(url_for('.delete_editorial'))
@app.route("/edit_editorial")
def edit_editorial():
if 'id' not in session:
return render_template("signin_admin.html")
else:
g_id= request.args.get('id', default='', type=str)
s_id = session['id']
fname=session['name']
img=session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("select * from editorial_board where guest_id=%s",g_id)
arr=['a','a','a','a','a','a','a','a','a','a','a']
for raw in cursor.fetchall():
arr=raw
return render_template("edit_editorial.html",name="Edit Editorial",arr=arrx,c=count,fname=fname,img=img,k=arr)
@app.route("/edit_editorial_1",methods=['POST','GET'])
def edit_editorial_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
g_id=request.form['guest_id']
fname=request.form['fname']
mname=request.form['mname']
lname=request.form['lname']
desc=request.form['desc']
designation=request.form['designation']
profile=''
db = mysql.connect()
cursor = db.cursor()
for f in request.files.getlist("file"):
if f.filename=='':
cursor.execute("select profile from editorial_board where guest_id=%s",g_id)
for raw in cursor.fetchall():
profile=raw[0]
else:
target=os.path.join('/home/tp/python_flask/Research_Paper/static','Img/')
filename=f.filename
ext=filename.split(".")
profile = "/".join([target,filename])
f.save(profile)
profile=filename
cursor.execute("update editorial_board set first_name=%s,middle_name=%s,last_name=%s,description=%s,profile=%s,designation=%s where guest_id=%s",(fname,mname,lname,desc,profile,designation,g_id))
db.commit()
return redirect(url_for('.delete_editorial'))
@app.route("/admin_query")
def admin_query():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
db = mysql.connect()
cursor = db.cursor()
arry=[{'id':'1','mobile':'abc','email':'ds','message':'sdada','name':'sdsd'}]
arry.clear()
cursor.execute("select c_id,mobile,email,message,name from contactus")
for raw in cursor.fetchall():
arry.append({'id':raw[0],'mobile':raw[1],'email':raw[2],'message':raw[3],'name':raw[4]})
return render_template('admin_queries.html',name="User Queries",arr=arrx,c=count,fname=fname,img=img,arry=arry)
@app.route("/admin_query_1",methods=['POST','GET'])
def admin_query_1():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
db = mysql.connect()
cursor = db.cursor()
q_id=request.form.getlist('query')
for i in q_id:
cursor.execute("delete from contactus where c_id=%s",i)
db.commit()
return redirect(url_for('.admin_query'))
@app.route("/admin_crc")
def admin_crc():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select paper_id from paper_creation where status_id=%s",8)
arry=[{'id':'12','f_name':'harsh','l_name':'shah','topic':'shyam','creation_date':'asd','last':'asd','status':'asasd','path':'c'}]
arry.clear()
for raw in cursor.fetchall():
cursor.execute("Select status_name,creation_date from paper_creation,status where paper_creation.status_id=status.status_id and paper_id=%s",raw[0])
for raw2 in cursor.fetchall():
id1=raw2[0]
id2=raw2[1]
cursor.execute("Select user_id from paper_creation where paper_id=%s",raw[0])
u_id=cursor.fetchone()
cursor.execute("Select first_name from user_information where user_id=%s",u_id)
for raw3 in cursor.fetchall():
name1=raw3[0]
cursor.execute("Select MAX(submission_id) from submission_of_paper where paper_id=%s",raw[0])
id=cursor.fetchone()
cursor.execute("Select title,last_modified_date,path from submission_of_paper where submission_id=%s",id)
for raw1 in cursor.fetchall():
arry.append({'id':raw[0],'f_name':name1,'topic':raw1[0],'creation_date':id2,'last':raw1[1],'status':id1,'path':raw1[2]})
return render_template('admin_crc.html',name="CRC Papers",arr=arrx,c=count,fname=fname,img=img,arry=arry)
@app.route("/admin_authors")
def admin_authors():
if 'id' not in session:
return render_template("signin_admin.html")
else:
s_id = session['id']
fname=session['name']
img=session['img']
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
arr=[{'id':'1','name':'m','no_paper':'5'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select author_id,name from author")
for raw in cursor.fetchall():
cursor.execute("Select count(paper_id) from user_author where author_id=%s",raw[0])
for raw1 in cursor.fetchall():
no_paper=raw1[0]
print(no_paper)
arr.append({'id':raw[0],'name':raw[1],'no_paper':no_paper})
return render_template('admin_authors.html',name="List Of Authors",k=arr,arr=arrx,c=count,fname=fname,img=img)
@app.route('/admin_author_details')
def admin_author_details():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
a_id= request.args.get('a_id', default='', type=str)
db = mysql.connect()
cursor = db.cursor()
arr=['a','a','a','a','a','a','a','a','a']
cursor.execute("Select * from author where author_id=%s",a_id)
for raw in cursor.fetchall():
arr=raw
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_author_details.html",name="Details Of Author",a_id=a_id,arr=arrx,c=count,img=img,k=arr,fname=fname)
@app.route('/author_paper_details')
def author_paper_details():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
a_id= request.args.get('a_id', default='', type=str)
arr=[{'topic':'shyam','creation_date':'asd','last':'asd','status':'asasd','pap_id':'1'}]
arr.clear()
db = mysql.connect()
cursor = db.cursor()
cursor.execute("Select paper_id from user_author where author_id=%s",a_id)
nn=0
for raw in cursor.fetchall():
cursor.execute("Select status_name,creation_date from paper_creation,status where paper_creation.status_id=status.status_id and paper_id=%s",raw[0])
for raw2 in cursor.fetchall():
id1=raw2[0]
id2=raw2[1]
cursor.execute("Select MAX(submission_id) from submission_of_paper where paper_id=%s",raw[0])
id=cursor.fetchone()
cursor.execute("Select title,last_modified_date from submission_of_paper where submission_id=%s",id)
for raw1 in cursor.fetchall():
nn=1
arr.append({'topic':raw1[0],'creation_date':id2,'last':raw1[1],'status':id1,'pap_id':raw[0]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("author_paper_details.html",name="Paper associated with Author",a_id=a_id,n=nn,arr=arrx,c=count,k=arr,img=img,fname=fname)
@app.route('/admin_show_comment_reviewer')
def admin_show_comment_reviewer():
if 'id' not in session:
return render_template("signin_admin.html")
else:
id = session['id']
fname=session['name']
img=session['img']
e_id= request.args.get('e_id', default='', type=str)
p_id= request.args.get('p_id', default='', type=str)
db = mysql.connect()
cursor = db.cursor()
arr=[{'message':'sssdfd','date':'sfdsds'}]
arr.clear()
nos=0
cursor.execute("Select comment_message,date_of_comment from expert_comment where paper_id=%s and expert_id=%s order by submission_id desc",(p_id,e_id))
for raw in cursor.fetchall():
nos=1
arr.append({'message':raw[0],'date':raw[1]})
arrx=[{'time':1,'type':1,'message':'sds'}]
arrx.clear()
arrx=notify(arrx)
count=noticount()
return render_template("admin_reviewer_comment.html",name="Comment of Reviewer",nos=nos,eid=e_id,arr=arrx,c=count,img=img,k=arr,fname=fname)
|
from PyQt5 import QtWidgets
from PyQt5.QtWidgets import QApplication, QMainWindow, QMessageBox
import sys, os
import string
import json
import numpy as np
import pandas as pd
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.neighbors import KNeighborsClassifier
from sklearn.cluster import KMeans
class KlasifikasiSMS(QMainWindow):
def __init__(self,a,b,c,d,e):
super(KlasifikasiSMS,self).__init__()
self.initUI()
self.stopword = a
self.tandaBaca = b
self.vectorizer = c
self.x_train = d
self.y_train = e
def button_clicked(self):
clear = lambda: os.system('clear')
clear()
k = self.inputk.text()
textsms = self.textsms.toPlainText()
kkn = KNeighborsClassifier(n_neighbors=int(k), weights='distance')
kkn.fit(self.x_train,self.y_train)
sms = textsms
sms = sms.split(",, ")
nama_label = ["normal", "penipuan", "penawaran"]
for teks in sms:
arr_teks = []
arr_teks.append(teks)
vektor = self.vectorizer.transform(arr_teks)
prediksi_label_knn = kkn.predict(vektor)
QMessageBox.about(self, "Hasil Klasifikasi","\n " + "Kelompok : "+nama_label[np.int(prediksi_label_knn)]+"\t\n\t")
def initUI(self):
self.setGeometry(2000, 200, 380, 240)
self.setWindowTitle("Klasifikasi SMS")
self.lsms = QtWidgets.QLabel(self)
self.lsms.setText("Masukkan SMS : ")
self.lsms.move(20,30)
self.textsms = QtWidgets.QTextEdit(self)
self.textsms.setGeometry(130, 30,200,100)
self.lk = QtWidgets.QLabel(self)
self.lk.setText("Masukkan K : ")
self.lk.move(20,135)
self.inputk = QtWidgets.QLineEdit(self)
self.inputk.setGeometry(130, 135, 200,30)
self.hasil = QtWidgets.QLabel(self)
self.hasil.setGeometry(370,20,200,50)
self.b1 = QtWidgets.QPushButton(self)
self.b1.setText("Klasifikasi")
self.b1.setGeometry(130, 175,200,25)
self.b1.clicked.connect(self.button_clicked)
def window():
app = QApplication(sys.argv)
data_stopword = json.load(open('stopwords-id.json','r'))
stopword = set(data_stopword)
tandaBaca = set(string.punctuation)
sms_csv = pd.read_csv('dataset_sms.csv')
dataset = []
for index, row in sms_csv.iterrows():
dataset.append(row["Teks"])
y_train = []
for index, row in sms_csv.iterrows():
y_train.append(row["label"])
vectorizer = TfidfVectorizer(stop_words=data_stopword)
x_train = vectorizer.fit_transform(dataset)
ksms = KlasifikasiSMS(stopword,tandaBaca,vectorizer,x_train,y_train)
ksms.show()
sys.exit(app.exec_())
window()
|
#!/bin/env python
#coding:utf-8
import sys
import thread
reload(sys)
sys.setdefaultencoding('utf-8')
import mseg
class MsegAnalyzer:
initialized = False
__lock = thread.allocate_lock()
def __init__(self,path = '/data0/home/xiulei/workspace/mseg/dict/mseg.conf'):
if not MsegAnalyzer.initialized:
MsegAnalyzer.__lock.acquire()
if not MsegAnalyzer.initialized:
mseg.init(path)
MsegAnalyzer.__lock.release()
MsegAnalyzer.initialized = True
def smart_split(self,content):
return mseg.smart_split(content)
def forward_split(self,content):
return mseg.forward_split(content)
def backward_split(self,content):
return mseg.backward_split(content)
def tagging(self,content):
return mseg.tagging(content)
def full_split(self,content):
return mseg.full_split(content)
|
__author__ = 'tabby'
import Responses
response = ''
while not Responses.is_farewell(response):
response = raw_input('Your turn: ')
response = response.lower()
print Responses.respond(response)
|
import tensorflow as tf
import numpy as np
from tensorflow import square, exp, divide, log, scalar_mul, to_float, cast
from tensorflow.python import reduce_sum
"""
Exercise 1.1: Diagonal Gaussian Likelihood
Write a function which takes in Tensorflow symbols for the means and
log stds of a batch of diagonal Gaussian distributions, along with a
Tensorflow placeholder for (previously-generated) samples from those
distributions, and returns a Tensorflow symbol for computing the log
likelihoods of those samples.
"""
def gaussian_likelihood(x, mu, log_std):
"""
Args:
x: Tensor with shape [batch, dim]
mu: Tensor with shape [batch, dim]
log_std: Tensor with shape [batch, dim] or [dim]
Returns:
Tensor with shape [batch]
"""
#######################
# #
# YOUR CODE HERE #
# #
#######################
global sum
global factor1
global result
shape = x.get_shape().as_list()[-1]
tf.print(shape)
sum = divide(square(x - mu), square(exp(log_std))) + scalar_mul(2, log_std)
tf.print(sum)
factor1 = reduce_sum(sum, 1)
factor2 = cast(shape * np.log(2.0 * np.pi), dtype=tf.float32)
result = (factor1 + factor2) / -2.0
return result
# return tf.constant(0)
if __name__ == '__main__':
"""
Run this file to verify your solution.
"""
from spinup.exercises.problem_set_1_solutions import exercise1_1_soln
from spinup.exercises.common import print_result
sess = tf.Session()
dim = 10
x = tf.placeholder(tf.float32, shape=(None, dim))
mu = tf.placeholder(tf.float32, shape=(None, dim))
log_std = tf.placeholder(tf.float32, shape=(dim,))
your_gaussian_likelihood = gaussian_likelihood(x, mu, log_std)
true_gaussian_likelihood = exercise1_1_soln.gaussian_likelihood(x, mu, log_std)
batch_size = 32
feed_dict = {x: np.random.rand(batch_size, dim),
mu: np.random.rand(batch_size, dim),
log_std: np.random.rand(dim)}
test, test2, test3, your_result, true_result = sess.run([sum, factor1, result, your_gaussian_likelihood, true_gaussian_likelihood],
feed_dict=feed_dict)
print(test)
print(test2)
print(test3)
correct = np.allclose(your_result, true_result)
print_result(correct)
|
from django.contrib import admin
from body.models import *
# Register your models here.
class ProfileAdmin(admin.ModelAdmin):
list_display = ('name', 'phone', 'user','user_type')
class TrainerAdmin(admin.ModelAdmin):
list_display = ('trainer', 'name', 'phone', 'age', 'gender','experience','skills')
class MemberAdmin(admin.ModelAdmin):
list_display = ('member', 'trainer', 'phone', 'age', 'gender', 'height', 'weight')
class WorkoutAdmin(admin.ModelAdmin):
list_display = ('name', 'image')
class SessionAdmin(admin.ModelAdmin):
list_display = ('member', 'trainer', 'name', 'date', 'time','duration')
admin.site.register(Profile, ProfileAdmin)
admin.site.register(Trainer, TrainerAdmin)
admin.site.register(Member, MemberAdmin)
admin.site.register(Workout, WorkoutAdmin)
admin.site.register(Session, SessionAdmin)
|
import cv2,time,pandas as pd
import numpy as np
from datetime import datetime
first_frame= None
status_list = [None,None]
times=[]
df=pd.DataFrame(columns=["Start","End"])
video=cv2.VideoCapture(0)
while True:
check, frame=video.read()
status=0
gray=cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
gray=cv2.GaussianBlur(gray,(21,21),0)
if first_frame is None:
first_frame=gray
continue
delta_frame = cv2.absdiff(first_frame,gray)
thresh_delta=cv2.threshold(delta_frame,30,255,cv2.THRESH_BINARY)[1]
thresh_delta=cv2.dilate(thresh_delta,None,iterations=0)
(cnts,_)=cv2.findContours(thresh_delta.copy(),cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
for contour in cnts:
if cv2.contourArea(contour)<1000:
continue
status=1
(x,y,w,h)=cv2.boundingRect(contour)
cv2.rectangle(frame,(x,y), (x+w,y+h),(0,255,0),3)
status_list.append(status)
status_list=status_list[-2:]
if status_list[-1]==1 and status_list[-2]==0:
times.append(datetime.now())
if status_list[-1] and status_list[-2]==1:
times.append(datetime.now())
cv2.imshow('frame',frame)
cv2.imshow('Capturing',gray)
cv2.imshow('delta',delta_frame)
cv2.imshow('thresh',thresh_delta)
key=cv2.waitKey(1)
if key == ord('q'):
break
print(status_list)
print(times)
for i in range(0,len(times),2):
df=df.append({"Start":times[i],"End":times[i+1]},ignore_index=True)
df.to_csv("A:\\TechLife\\Web Projects\\14-06-2019'\\Times.csv")
video.release()
cv2.destroyAllWindows() |
"""
This type stub file was generated by pyright.
"""
import vtkmodules.vtkCommonCore as __vtkmodules_vtkCommonCore
class vtkAbstractPicker(__vtkmodules_vtkCommonCore.vtkObject):
"""
vtkAbstractPicker - define API for picking subclasses
Superclass: vtkObject
vtkAbstractPicker is an abstract superclass that defines a minimal
API for its concrete subclasses. The minimum functionality of a
picker is to return the x-y-z global coordinate position of a pick
(the pick itself is defined in display coordinates).
The API to this class is to invoke the Pick() method with a selection
point (in display coordinates - pixels) and a renderer. Then get the
resulting pick position in global coordinates with the
GetPickPosition() method.
vtkPicker fires events during the picking process. These events are
StartPickEvent, PickEvent, and EndPickEvent which are invoked prior
to picking, when something is picked, and after all picking
candidates have been tested. Note that during the pick process the
PickEvent of vtkProp (and its subclasses such as vtkActor) is fired
prior to the PickEvent of vtkPicker.
@warning
vtkAbstractPicker and its subclasses will not pick props that are
"unpickable" (see vtkProp) or are fully transparent (if transparency
is a property of the vtkProp).
@warning
There are two classes of pickers: those that pick using geometric
methods (typically a ray cast); and those that use rendering
hardware. Geometric methods return more information but are slower.
Hardware methods are much faster and return minimal information.
Examples of geometric pickers include vtkPicker, vtkCellPicker, and
vtkPointPicker. Examples of hardware pickers include
vtkWorldPointPicker and vtkPropPicker.
@sa
vtkPropPicker uses hardware acceleration to pick an instance of
vtkProp. (This means that 2D and 3D props can be picked, and it's
relatively fast.) If you need to pick cells or points, you might wish
to use vtkCellPicker or vtkPointPicker. vtkWorldPointPicker is the
fastest picker, returning an x-y-z coordinate value using the
hardware z-buffer. vtkPicker can be used to pick the bounding box of
3D props.
"""
def AddPickList(self, vtkProp):
"""
V.AddPickList(vtkProp)
C++: void AddPickList(vtkProp *)
Add an actor to the pick list.
"""
...
def DeletePickList(self, vtkProp):
"""
V.DeletePickList(vtkProp)
C++: void DeletePickList(vtkProp *)
Delete an actor from the pick list.
"""
...
def GetNumberOfGenerationsFromBase(self, string):
"""
V.GetNumberOfGenerationsFromBase(string) -> int
C++: vtkIdType GetNumberOfGenerationsFromBase(const char *type)
override;
Given a the name of a base class of this class type, return the
distance of inheritance between this class type and the named
class (how many generations of inheritance are there between this
class and the named class). If the named class is not in this
class's inheritance tree, return a negative value. Valid
responses will always be nonnegative. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def GetNumberOfGenerationsFromBaseType(self, string):
"""
V.GetNumberOfGenerationsFromBaseType(string) -> int
C++: static vtkIdType GetNumberOfGenerationsFromBaseType(
const char *type)
Given a the name of a base class of this class type, return the
distance of inheritance between this class type and the named
class (how many generations of inheritance are there between this
class and the named class). If the named class is not in this
class's inheritance tree, return a negative value. Valid
responses will always be nonnegative. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def GetPickFromList(self):
"""
V.GetPickFromList() -> int
C++: virtual vtkTypeBool GetPickFromList()
Use these methods to control whether to limit the picking to this
list (rather than renderer's actors). Make sure that the pick
list contains actors that referred to by the picker's renderer.
"""
...
def GetPickList(self):
"""
V.GetPickList() -> vtkPropCollection
C++: vtkPropCollection *GetPickList()
Return the list of actors in the PickList.
"""
...
def GetPickPosition(self):
"""
V.GetPickPosition() -> (float, float, float)
C++: virtual double *GetPickPosition()
Return position in global coordinates of pick point.
"""
...
def GetRenderer(self):
"""
V.GetRenderer() -> vtkRenderer
C++: virtual vtkRenderer *GetRenderer()
Get the renderer in which pick event occurred.
"""
...
def GetSelectionPoint(self):
"""
V.GetSelectionPoint() -> (float, float, float)
C++: virtual double *GetSelectionPoint()
Get the selection point in screen (pixel) coordinates. The third
value is related to z-buffer depth. (Normally should be =0.)
"""
...
def InitializePickList(self):
"""
V.InitializePickList()
C++: void InitializePickList()
Initialize list of actors in pick list.
"""
...
def IsA(self, string):
"""
V.IsA(string) -> int
C++: vtkTypeBool IsA(const char *type) override;
Return 1 if this class is the same type of (or a subclass of) the
named class. Returns 0 otherwise. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def IsTypeOf(self, string):
"""
V.IsTypeOf(string) -> int
C++: static vtkTypeBool IsTypeOf(const char *type)
Return 1 if this class type is the same type of (or a subclass
of) the named class. Returns 0 otherwise. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def NewInstance(self):
"""
V.NewInstance() -> vtkAbstractPicker
C++: vtkAbstractPicker *NewInstance()
"""
...
def Pick(self, p_float, p_float_1, p_float_2, vtkRenderer):
"""
V.Pick(float, float, float, vtkRenderer) -> int
C++: virtual int Pick(double selectionX, double selectionY,
double selectionZ, vtkRenderer *renderer)
V.Pick([float, float, float], vtkRenderer) -> int
C++: int Pick(double selectionPt[3], vtkRenderer *ren)
Perform pick operation with selection point provided. Normally
the first two values for the selection point are x-y pixel
coordinate, and the third value is =0. Return non-zero if
something was successfully picked.
"""
...
def Pick3DPoint(self, p_float=..., p_float=..., p_float=..., *args, **kwargs):
"""
V.Pick3DPoint([float, float, float], vtkRenderer) -> int
C++: virtual int Pick3DPoint(double[3], vtkRenderer *)
Perform pick operation with selection point provided. The
selectionPt is in world coordinates. Return non-zero if something
was successfully picked.
"""
...
def Pick3DRay(self, p_float=..., p_float=..., p_float=..., *args, **kwargs):
"""
V.Pick3DRay([float, float, float], [float, float, float, float],
vtkRenderer) -> int
C++: virtual int Pick3DRay(double[3], double[4], vtkRenderer *)
Perform pick operation with selection point and orientation
provided. The selectionPt is in world coordinates. Return
non-zero if something was successfully picked.
"""
...
def PickFromListOff(self):
"""
V.PickFromListOff()
C++: virtual void PickFromListOff()
Use these methods to control whether to limit the picking to this
list (rather than renderer's actors). Make sure that the pick
list contains actors that referred to by the picker's renderer.
"""
...
def PickFromListOn(self):
"""
V.PickFromListOn()
C++: virtual void PickFromListOn()
Use these methods to control whether to limit the picking to this
list (rather than renderer's actors). Make sure that the pick
list contains actors that referred to by the picker's renderer.
"""
...
def SafeDownCast(self, vtkObjectBase):
"""
V.SafeDownCast(vtkObjectBase) -> vtkAbstractPicker
C++: static vtkAbstractPicker *SafeDownCast(vtkObjectBase *o)
"""
...
def SetPickFromList(self, p_int):
"""
V.SetPickFromList(int)
C++: virtual void SetPickFromList(vtkTypeBool _arg)
Use these methods to control whether to limit the picking to this
list (rather than renderer's actors). Make sure that the pick
list contains actors that referred to by the picker's renderer.
"""
...
def __delattr__(self, *args, **kwargs):
""" Implement delattr(self, name). """
...
def __getattribute__(self, *args, **kwargs):
""" Return getattr(self, name). """
...
def __init__(self, *args, **kwargs) -> None:
...
@staticmethod
def __new__(*args, **kwargs):
""" Create and return a new object. See help(type) for accurate signature. """
...
def __repr__(self, *args, **kwargs):
""" Return repr(self). """
...
def __setattr__(self, *args, **kwargs):
""" Implement setattr(self, name, value). """
...
def __str__(self, *args, **kwargs) -> str:
""" Return str(self). """
...
__this__ = ...
__dict__ = ...
__vtkname__ = ...
|
import pickle
from preprocess.utils import create_action_object
path_data = "/home/ximenes/Desktop/openpose/data/"
print("[INFO] Cretating object list ...")
action_list = create_action_object(path_data)
print("[INFO] Creating pickle file ...")
pik = "database.dat"
with open(pik, "wb") as f:
pickle.dump(action_list, f)
|
from __future__ import unicode_literals
import frappe
import json
import frappe.utils
from frappe.utils import cstr, flt, getdate, comma_and
from frappe import _
def item_query(doctype, txt, searchfield, start, page_len, filters):
response = [['']]
if filters.get('location'):
data = frappe.db.sql(''' select item from `tabLocations Item` where parent = "%s"'''%(filters.get('location')), as_list=1, debug=1)
response = data if data else response
return response
def location_query(doctype, txt, searchfield, start, page_len, filters):
response = [['']]
if filters.get('company'):
data = frappe.db.sql(''' select location from `tabCompanies Location` where parent = "%s"'''%(filters.get('company')), as_list=1, debug=1)
response = data if data else response
return response |
import network
import usocket as socket
from machine import Pin, Timer, PWM
import machine
from switch import Switch
import time
import _thread
import math
# Setup
wlan = network.WLAN(network.STA_IF)
status_led = Pin(2, Pin.OUT)
led_red = Pin(21, mode=Pin.OUT)
led_green = Pin(22, mode=Pin.OUT)
led_blue = Pin(23, mode=Pin.OUT)
buzzer_button = Pin(18, mode=Pin.IN, pull=Pin.PULL_UP)
buzzer_led = Pin(19, mode=Pin.OUT)
buzzer_pwm = PWM(buzzer_led, freq=1000)
buzzer_pwm.duty(0)
pulsing = False
def _connect_wifi():
print("connecting")
pulse_on(buzzer_pwm, 50)
led_blue.on()
led_green.off()
wlan.active(True)
wlan.connect('photobooze')
t=0.2
dt = 0
while not wlan.isconnected():
led_red.on()
time.sleep(t)
led_red.off()
time.sleep(t)
dt += 2*t
if dt >= 10 and dt < 20:
print("das wird nichts mehr ...")
elif dt >= 20:
print("gib es auf ...")
dt = 0
status_led.on()
led_blue.off()
led_red.off()
led_green.on()
pulse_off(buzzer_pwm)
def pulse_on(led, pause):
global pulsing
pulsing = True
_thread.start_new_thread(pulse_thread, (led,pause,))
def pulse_off(led):
global pulsing
pulsing = False
def pulse_thread(led, pause):
global pulsing
while pulsing:
for i in range(20):
led.duty(int(math.sin(i / 10 * math.pi) * 500 + 500))
time.sleep_ms(pause)
led.duty(0)
def take_photo():
try:
led_red.off()
led_blue.on()
pulse_on(buzzer_pwm, 50)
if not wlan.isconnected():
_connect_wifi()
s=socket.socket()
ai = socket.getaddrinfo("photobooze.org", 80)
addr = ai[0][-1]
print("connection to photobooze...")
s.connect(addr)
print("sending request...")
s.send(b"GET /api/v1/trigger HTTP/1.0\r\nHOST: photobooze.org\r\n\r\n")
print(s.recv(4096))
s.close()
del s
except Exception as e:
print("Error requesting picture:" + str(e))
led_red.on()
finally:
led_blue.off()
pulse_off(buzzer_pwm)
def rgb_off():
led_red.off()
led_green.off()
led_blue.off()
def rgb_on():
led_red.on()
led_green.on()
led_blue.on()
def request_photo():
print("taking photo")
take_photo()
def main():
my_switch = Switch(buzzer_button)
while True:
my_switch_new_value = False
# Disable interrupts for a short time to read shared variable
irq_state = machine.disable_irq()
if my_switch.new_value_available:
my_switch_value = my_switch.value
my_switch_new_value = True
my_switch.new_value_available = False
machine.enable_irq(irq_state)
# If my switch had a new value, print the new state
if my_switch_new_value:
if not my_switch_value:
request_photo()
#while True:
# sleep(0.1)
# if buzzer_button.value() == 1:
# photo_request()
print("starting up")
_connect_wifi()
main()
#buzzer_button.irq( trigger=Pin.IRQ_FALLING, handler=photo_request )
|
# -*- coding: utf-8 -*-
'''
My Accounts
'''
import sys
from urllib.parse import parse_qsl
from myaccounts.modules import control
control.set_active_monitor()
params = {}
for param in sys.argv[1:]:
param = param.split('=')
param_dict = dict([param])
params = dict(params, **param_dict)
action = params.get('action')
query = params.get('query')
addon_id = params.get('addon_id')
if action and not any(i in action for i in ['Auth', 'Revoke']):
control.release_active_monitor()
if action is None:
control.openSettings(query, "script.module.myaccounts")
elif action == 'traktAcct':
from myaccounts.modules import trakt
trakt.Trakt().account_info_to_dialog()
elif action == 'traktAuth':
from myaccounts.modules import trakt
control.function_monitor(trakt.Trakt().auth)
elif action == 'traktRevoke':
from myaccounts.modules import trakt
control.function_monitor(trakt.Trakt().revoke)
elif action == 'alldebridAcct':
from myaccounts.modules import alldebrid
alldebrid.AllDebrid().account_info_to_dialog()
elif action == 'alldebridAuth':
from myaccounts.modules import alldebrid
control.function_monitor(alldebrid.AllDebrid().auth)
elif action == 'alldebridRevoke':
from myaccounts.modules import alldebrid
control.function_monitor(alldebrid.AllDebrid().revoke)
elif action == 'premiumizeAcct':
from myaccounts.modules import premiumize
premiumize.Premiumize().account_info_to_dialog()
elif action == 'premiumizeAuth':
from myaccounts.modules import premiumize
control.function_monitor(premiumize.Premiumize().auth)
elif action == 'premiumizeRevoke':
from myaccounts.modules import premiumize
control.function_monitor(premiumize.Premiumize().revoke)
elif action == 'realdebridAcct':
from myaccounts.modules import realdebrid
realdebrid.RealDebrid().account_info_to_dialog()
elif action == 'realdebridAuth':
from myaccounts.modules import realdebrid
control.function_monitor(realdebrid.RealDebrid().auth)
elif action == 'realdebridRevoke':
from myaccounts.modules import realdebrid
control.function_monitor(realdebrid.RealDebrid().revoke)
elif action == 'tmdbAuth':
from myaccounts.modules import tmdb
control.function_monitor(tmdb.Auth().create_session_id)
elif action == 'tmdbRevoke':
from myaccounts.modules import tmdb
control.function_monitor(tmdb.Auth().revoke_session_id)
elif action == 'ShowChangelog':
from myaccounts.modules import changelog
changelog.get()
elif action == 'ShowHelp':
from myaccounts.help import help
help.get(params.get('name'))
elif action == 'ShowOKDialog':
control.okDialog(params.get('title', 'default'), int(params.get('message', '')))
elif action == 'tools_clearLogFile':
from myaccounts.modules import log_utils
cleared = log_utils.clear_logFile()
if cleared == 'canceled': pass
elif cleared: control.notification(message='My Accounts Log File Successfully Cleared')
else: control.notification(message='Error clearing My Accounts Log File, see kodi.log for more info')
elif action == 'tools_viewLogFile':
from myaccounts.modules import log_utils
log_utils.view_LogFile(params.get('name'))
elif action == 'tools_uploadLogFile':
from myaccounts.modules import log_utils
log_utils.upload_LogFile() |
""" Streaming twitter API example """
from __future__ import print_function
import sys
import tweepy
from ConfigParser import ConfigParser
class TwitterListener(tweepy.StreamListener):
""" Twitter stream listener. """
def __init__(self,filename,nooflines):
print ('Twitter Listener constructed')
super(TwitterListener, self).__init__()
self.filename = filename
self.file = open("data/"+filename, 'w')
self.nolines = nooflines
self.line = 0
def on_status(self, tweet):
print ("Receiving tweet no :"+str(self.line))
### Have a counter to limiit the number of tweets , once limit exceeded exit the program. Also write the tweets to a file
if self.line < int(self.nolines):
self.file.write(tweet.text.encode('ascii','ignore'))
self.line = self.line + 1
else :
self.file.close()
exit()
def on_error(self, msg):
print('Error: %s', msg)
def on_timeout(self):
print('timeout : wait for next poll')
sleep(10)
def get_config():
""" Get the configuration """
conf = ConfigParser()
conf.read('../cfg/mona.cfg')
return conf
def get_stream(filepath,no_of_lines):
config = get_config()
auth = tweepy.OAuthHandler(config.get('twitter', 'consumer_key'),
config.get('twitter', 'consumer_secret'))
auth.set_access_token(config.get('twitter', 'access_token'),
config.get('twitter', 'access_token_secret'))
listener = TwitterListener(filepath,no_of_lines)
stream = tweepy.Stream(auth=auth, listener=listener)
return stream
if __name__ == "__main__":
if len(sys.argv) != 4:
print("Usage: %s <word> <no_of_lines> <filepath>" % (sys.argv[0]))
else:
word = sys.argv[1]
no_of_lines = sys.argv[2]
filepath = sys.argv[3]
stream = get_stream(filepath,no_of_lines)
print("Listening to '%s' and '%s' ..." %('#' + word, word))
stream.filter(track=['#' + word, word])
|
from collections import OrderedDict
from dataclasses import dataclass
from enum import Enum
from typing import Dict
import torch
from torch.utils.tensorboard import SummaryWriter
from distributed import comm
class TaskState(Enum):
INIT = 1
TRAIN = 2
EVAL = 3
DONE = 4
@dataclass
class TaskReturns:
state: TaskState
value: Dict = None
class Output(object):
def __init__(self, name, value=None, weight=1., fmt="5.4f", suffix=""):
self._name = name
self._value = value
self._weight = weight
self._fmt = fmt
self._suffix = suffix
try:
self.__repr__()
except ValueError:
raise ValueError(f'Invalid format specifer: {fmt}')
def __repr__(self):
if self.value is None:
return f"{self._name}:<empty value>"
if (isinstance(self.value, torch.Tensor) and
self.value.view(-1).size(0) > 1):
return f"{self._name}:<none scalar value>"
return f"{self._name}:{self.weighted_value:{self._fmt}}{self._suffix}"
def show(self):
return self.__repr__() + f" (x{self._weight})"
def is_scalar(self):
if (self.value is None or
(isinstance(self.value, torch.Tensor) and
self.value.view(-1).size(0) > 1)):
return False
return True
@property
def value(self):
if self._value is None:
return None
return self._value
@property
def weighted_value(self):
if self._value is None:
return None
return self._value * self._weight
class Loss(Output):
def __init__(self, name, value=None, weight=1., fmt="5.4f", suffix=""):
super(Loss, self).__init__(name, value, weight, fmt, suffix)
class Metric(Output):
def __init__(self, name, value=None, weight=100., fmt="5.2f", suffix=""):
super(Metric, self).__init__(name, value, weight, fmt, suffix)
class TrainerOutputs(object):
"""This class aids to extend trainer outputs without additional cost.
"""
def __init__(self, *outputs):
if not all([isinstance(output, Output) for output in outputs]):
raise ValueError("positional arguments have to be subclasses of "
"trainer_output.Output.")
if not len(outputs) == len(set([output._name for output in outputs])):
raise ValueError("duplicated name of trainer_output.Output.")
self._outputs = OrderedDict({output._name: output for output in outputs})
def __repr__(self):
return ", ".join([output.__repr__() for output in self._outputs.values()
if output.is_scalar()])
def __getitem__(self, key):
return self._outputs.__getitem__(key).weighted_value
def __len__(self):
return self._outputs.__len__()
def __iter__(self):
return self._outputs.__iter__()
def __contains__(self, key):
return key in self._outputs
def __del__(self):
del self._outputs
def show(self):
return ", ".join([output.show() for output in self._outputs.values()
if output.is_scalar()])
def keys(self):
return list(self._outputs.keys())
def values(self):
return [output.weighted_value for output in self._outputs.values()]
def items(self):
return list(zip(self.keys(), self.values()))
def to_dict(self):
return OrderedDict(**self)
def by_cls_name(self, cls_name):
outputs = [output for output in self._outputs.values()
if output.__class__.__name__ == cls_name]
if len(outputs) == 0:
return None
return TrainerOutputs(*outputs)
def scalar_only(self):
return TrainerOutputs(*[
output for output in self._outputs.values()
if output.is_scalar()])
def sum_scalars(self):
return sum([output.value for output in self._outputs.values()
if output.is_scalar()])
def weighted_sum_scalars(self):
return sum([output.weighted_value for output in self._outputs.values()
if output.is_scalar()])
class TensorBoardWriter(object):
def __init__(self, interval=1, save_dir=None, flush_secs=120):
self.interval = interval
if comm.is_main_process():
self.writer = SummaryWriter(log_dir=f"tboard/{save_dir}/",
flush_secs=flush_secs)
else:
self.writer = None
@classmethod
def init_for_train_from_config(cls, cfg):
return cls(
interval=cfg.train.tb_interval,
save_dir=cfg.save_dir,
flush_secs=120 if not cfg.debug else 5,
)
def __del__(self):
if self.writer is not None:
self.writer.close()
def add_outputs(self, outputs, global_step, prefix=""):
if self.writer is None:
return False
if (self.interval >= 0 and
not global_step % self.interval == 0):
return False
assert isinstance(outputs, TrainerOutputs)
for key, value in outputs.scalar_only().items():
key = f"{prefix}/{key}" if prefix else key
self.writer.add_scalar(key, value, global_step)
return True
|
import time
import numpy as np
import math
def rand_time_gen():
mu, sigma = 12, 3
start_time = np.random.normal(mu, sigma, 1)
return start_time
print(rand_time_gen())
|
import datetime
logged_in_user = False
while True:
task = input("\nWelcome to the wackiest notepad ever \nPlease enter : \n# su for signup\n# si for signin\n# wr to add journal\n# r to read journal\n# del to delete jornal\n# stats to get stats\n\n> : ")
if task == "su":
############# Sign UP ##############
firstname = input("Please enter your name : ")
surname = input("Please enter your surname : ")
username = input("Please enter your username : ")
password = input("Please enter your password : ")
confirm_password = input("Please re-enter your password : ")
while password != confirm_password:
print("Sorry you passwords dont match !!\n")
password = input("Please enter your password : ")
confirm_password = input("Please re-enter your password : ")
file = open("notepad/db.csv", "a")
file.write(f"{firstname},{surname},{username},{password}\n")
file.close()
elif task == "si":
############ Sign in ###############
input_username = input("Please enter your username : ")
input_password = input("Please enter your password : ")
file = open("notepad/db.csv", "r")
for line in file.readlines():
saved_username, saved_password = line.replace("\n","").split(",")[2:]
if saved_username.lower() == input_username.lower():
if input_password == saved_password:
print(f"Welcome {saved_username}")
logged_in_user = saved_username ## ASSIGN THE USERNAME AS LOGGED IN USER GLOBALLY
break
else:
print("Sorry you may not have an account, Please sign up.")
elif task == "wr":
if logged_in_user:
############# write journal ##############
title = input("Please enter a Title : ")
body = input("Please enter your mind : ")
today = datetime.datetime.now()
date = f"{today.day}-{today.month}-{today.year}"
file = open("notepad/journals.csv", "a")
file.write(f"{logged_in_user},{title},{body},{date}\n")
file.close()
else:
print("Sorry you need to be logged in first.!!")
elif task == "r":
if logged_in_user:
############# read jornal ##############
journals_found = []
file = open("notepad/journals.csv", "r")
for line in file.readlines():
username, title, body, date = line.split(",")
if logged_in_user == username:
journals_found.append({
"title":title,
"body":body
})
print()
for index, journal in enumerate(journals_found):
print(index+1, journal["title"])
print()
selected_option = int(input("Above are your notes pick one \n> "))
print("\n###########################")
print(journals_found[selected_option-1]["title"].upper())
print("###########################\n")
print(journals_found[selected_option-1]["body"])
print("\n###########################")
print("###########################\n")
else:
print("Sorry you need to be logged in first.!!")
elif task == "del":
if logged_in_user:
############# delete jornal ##############
journals_found = []
file = open("notepad/journals.csv", "r")
for line in file.readlines():
username, title, body, date = line.split(",")
if logged_in_user == username:
journals_found.append({
"title":title,
"body":body
})
file.close()
print()
for index, journal in enumerate(journals_found):
print(index+1, journal["title"])
print()
selected_option = int(input("Select a jornal to delete \n> "))
new_db = []
file = open("notepad/journals.csv", "r")
for line in file.readlines():
username, title, body, date = line.split(",")
if logged_in_user == username and title == journals_found[selected_option-1]["title"]:
continue
new_db.append(line)
file.close()
file = open("notepad/journals.csv", "w")
for journal in new_db:
file.write(journal)
file.close()
else:
print("Sorry you need to be logged in first.!!")
elif task == "stats":
if logged_in_user:
########### overview #########
journals_found = []
file = open("notepad/journals.csv", "r")
for line in file.readlines():
username, title, body, date = line.split(",")
if logged_in_user == username:
journals_found.append({
"title":title,
"body":body
})
print("Total notes : ", len(journals_found))
else:
print("Sorry you need to be logged in first.!!") |
from django.contrib import admin
from planes.models import estadoPersonaPlan, personaPlan, plan
# Register your models here.
admin.site.register(estadoPersonaPlan)
admin.site.register(personaPlan)
admin.site.register(plan) |
import tools
print(tools.PI)
print(tools.GRAVITY)
print(tools.get_extension("test.txt"))
print(tools.highest_number([1,2,698,-5978,654,-65]))
# https://docs.python.org/3/py-modindex.html lista de modulos em python ja disponiveis para usar |
#!/usr/bin/env python3
from utils import db_connect
# make bson ObjectId class available for referencing
# bson objects inside a mongo query string
from bson.objectid import ObjectId
# connect to database
db = db_connect()
# output some header html
print("Content-Type: text/html\n")
print("""<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Hello Caflucks</title>
</head>
<body>
<h1>Welcome to Catflucks</h1>""")
# get one random document from images collection
result = db.images.aggregate(
[{ '$sample': { 'size': 1 } }]
)
# if a result came back
if result:
# iterate through objects in the cursor (should only be 1)
for img in result:
# pull out the img url and alt text
img_src = img['url']
img_alt = img['alt']
img_id = img['_id']
# find and count flucks with matching img_id and where is_flucked is 1
num_flucks = db.flucks.find( {"image_id": ObjectId(img_id), "is_flucked":1} ).count()
print("""<p>You are viewing a random image of a cat.</p>
<img src="{}" alt="{}" width=500>
<p>This poor cat has been flucked {} times already.</p>
<a href="/cgi-bin/serve_cat.py" title="serve cat">Serve new cat</a>
""".format( img_src, img_alt, str(num_flucks) ))
else:
print("<p>Oops. Something went wrong!</p>")
# output some footer html
print("</body></html>")
|
import torchvision as tv
from PIL import Image
import requests
import numpy as np
from configuration import Config
config = Config()
transform = tv.transforms.Compose([
tv.transforms.Resize((config.test_size, config.test_size)),
tv.transforms.ToTensor(),
tv.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])])
def prepare_image(image_url, psuedo_url):
psuedo_image = Image.open(requests.get(psuedo_url, stream=True).raw)
if psuedo_image.mode != "RGB":
psuedo_image = psuedo_image.convert("RGB")
image = Image.open(requests.get(image_url, stream=True).raw)
if image.mode != "RGB":
image = image.convert("RGB")
image = transform(image).unsqueeze(0)
psuedo_image = transform(psuedo_image).unsqueeze(0)
return image, psuedo_image
def split_class(path, w, h):
im = Image.open(path).convert('L')
im_array_red = np.array(im) # 0, 38
im_array_green = np.array(im) # 0, 75
uniquemidfinder = np.unique(im_array_red)
mid = uniquemidfinder[1]
print(np.unique(im_array_red))
im_array_red[im_array_red != 0] = 1
im_array_red[im_array_red == 0] = 255
im_array_red[im_array_red == 1] = 0
im_array_green[im_array_green != mid] = 0
im_array_green[im_array_green == mid] = 255
# Class1 = GroundGlassOpacities
# Class2 = Consolidation
class_one = Image.fromarray(im_array_red).convert('1').resize(size=(h, w))
class_two = Image.fromarray(im_array_green).convert('1').resize(size=(h, w))
return class_one, class_two
|
#!/usr/bin/env python
#
# Usage:
# ./autocommit.py path ext1,ext2,extn cmd
#
# Blocks monitoring |path| and its subdirectories for modifications on
# files ending with suffix |extk|. Run |cmd| each time a modification
# is detected. |cmd| is optional and defaults to git commit all and push.
#
# Example:
# ./autocommit.py ./test/ svg
#
# Dependencies:
# Linux, Python 2.6, Pyinotify
#
import subprocess
import sys
import pyinotify
import shlex
from pprint import pprint
from git import *
class OnWriteHandler(pyinotify.ProcessEvent):
def my_init(self, cwd, extension, cmds):
self.cwd = cwd
self.extensions = extension.split(',')
self.cmds = cmds
def _commit_push(self):
print '==> Modification detected'
subprocess.call(self.cmds[0], cwd=self.cwd)
subprocess.call(self.cmds[1], cwd=self.cwd)
def _merge(self, filename):
filename_parts = filename.split('_')
orig = filename_parts[-3]
target = filename_parts[-2]
print orig, target
#check if orig and target are branch heads
branch_heads = getBranchHeads()
if (branch_heads.has_key(orig)) and (branch_heads.has_key(target)):
#git merge
orig_branch = branch_heads[orig]
target_branch = branch_heads[target]
cmd_merge = shlex.split(('git merge %s/%s') % (orig_branch, target_branch))
def process_IN_MODIFY(self, event):
if event.pathname.split("/")[-1][:15] == "chalkflow_diff_":
self._merge(event.pathname)
return
if all(not event.pathname.endswith(ext) for ext in self.extensions):
return
self._commit_push()
def getBranchHeads():
repo = Repo("~/swchalkflow/", odbt=GitDB) #open the local git repo
assert repo.bare == False #assert that git repo already exists
branch_heads = {}
for branch in repo.branches:
branch_heads[repo.commit(branch.name).hexsha] = branch.name
return branch_heads
def auto_commit(path, extension, cmds):
wm = pyinotify.WatchManager()
handler = OnWriteHandler(cwd=path, extension=extension, cmds=cmds)
notifier = pyinotify.Notifier(wm, default_proc_fun=handler)
wm.add_watch(path, pyinotify.ALL_EVENTS, rec=True, auto_add=True)
print '==> Start monitoring %s (type c^c to exit)' % path
notifier.loop()
if __name__ == '__main__':
if len(sys.argv) < 3:
print >> sys.stderr, "Command line error: missing argument(s)."
sys.exit(1)
# Required arguments
path = sys.argv[1]
extension = sys.argv[2]
# Optional argument
cmd_commit = shlex.split('git commit -a -m "automated commit from file change"')
cmd_push = shlex.split('git push origin master')
cmds = [cmd_commit, cmd_push]
if len(sys.argv) == 4:
cmds = sys.argv[3]
# Blocks monitoring
auto_commit(path, extension, cmds)
|
class StudentOrder:
def func(self, n: int, high: str, weight: str) -> str:
weight_ary = list(map(int, weight.split(" ")))
high_ary = list(map(int, high.split(" ")))
dp = []
for i in range(n):
dp.append((i + 1, high_ary[i], weight_ary[i]))
dp = sorted(dp, key=lambda x: (x[1], x[2]))
res = [str(i[0]) for i in dp]
return ''.join(res)
while 1:
try:
lens = int(input())
a = list(map(int, input().split()))
b = list(map(int, input().split()))
nums = []
for i in range(1, lens + 1):
nums.append((i, a[i - 1], b[i - 1]))
nums = sorted(nums, key=lambda x: (x[1], x[2]))
print(" ".join([str(i[0]) for i in nums]))
except Exception as e:
break
|
import netCDF4 as nc
from netCDF4 import Dataset
import numpy as np
def load_data(filename):
ds = nc.Dataset(filename)
return ds
def save_data(filename, max_values_ta, ds):
ncfile = Dataset('data/max_ta.nc',mode='w',format='NETCDF4_CLASSIC')
print(ncfile)
lat_dim = ncfile.createDimension('lat', max_values_ta.shape[0]) # latitude axis, yc
lon_dim = ncfile.createDimension('lon', max_values_ta.shape[1]) # longitude axis, xc
ncfile.title='Max ta per cell'
print(ncfile.title)
ncfile.subtitle="Test data containing max temperature average per cell"
print(ncfile.subtitle)
print(ncfile)
# a conventional way to define "coordinate variables".
lat = ncfile.createVariable('lat', np.float64, ('lat',))
lat.units = 'degrees_north'
lat.long_name = 'latitude'
lon = ncfile.createVariable('lon', np.float64, ('lon',))
lon.units = 'degrees_east'
lon.long_name = 'longitude'
# Define a 2D variable to hold the data
max_ta = ncfile.createVariable('max_ta',np.float32,('lat','lon'))
max_ta.units = 'C' # degrees Celsius
max_ta.standard_name = 'max_temperature_in_input_timeslot'
print(max_ta)
# Write latitudes, longitudes, from input Dataset
lat[:] = ds['lat'][:]
lon[:] = ds['lon'][:]
# Write the data. This writes the whole 2D netCDF variable all at once.
max_ta[:,:] = max_values_ta # Appends data along lat and lon dimension
print("-- Wrote data, max_ta.shape is now ", max_ta.shape)
# read data back from variable (by slicing it), print min and max, excluding nan values
ta_max_masked = np.ma.masked_where(np.isnan(max_ta), max_ta)
print("-- Min/Max values:", ta_max_masked[:,:].min(), ta_max_masked[:,:].max())
|
# Samuel Veloso - Instituto Federal de Alagoas
# Estrutura de dados - Prof. Ricardo
# Simulação de um atendimento de uma fila.
# A cada um segundo um cliente novo chega.
# A cada dois clientes chegarem (dois segundos), um cliente é atendido.
import collections
import time
from random import *
import string
from cliente import Cliente
atender = True
quant_atendidos = 0
fila = collections.deque()
def gerar_nome():
import random
letras = string.ascii_uppercase
nome = ''.join(random.choice(letras) for _ in range(5))
return nome
def gerar_idade():
return randint(18, 100)
def eh_vip():
return randint(0,1)
def atender_cliente():
if quant_atendidos < 10:
quant_atendidos += 1
fila.popleft()
else:
quant_atendidos = 0
fila.pop()
def mostar_dados():
primeiro = fila.popleft()
fila.appendleft(primeiro)
if len(fila) > 1:
ultimo = fila.pop()
else:
ultimo = primeiro
fila.append(ultimo)
tam = str(len(fila))
print("\nCliente no inicio: " + primeiro.nome)
print("Tamanho da fila: " + tam)
print("Cliente no final: " + ultimo.nome)
while atender:
if eh_vip():
fila.appendleft(Cliente(gerar_nome(), gerar_idade()))
else:
fila.append(Cliente(gerar_nome(), gerar_idade()))
print('\nNovo cliente')
mostar_dados()
time.sleep(1)
if eh_vip():
fila.appendleft(Cliente(gerar_nome(), gerar_idade()))
else:
fila.append(Cliente(gerar_nome(), gerar_idade()))
print('\nNovo cliente - 1')
mostar_dados()
time.sleep(1)
if quant_atendidos < 10:
quant_atendidos += 1
fila.popleft()
else:
quant_atendidos = 0
fila.pop()
if len(fila) == 0:
atender = False
print('\nUm cliente atendido')
mostar_dados() |
cities = ["New York ", 'Kiev', "new dehli", 'Toronto']
print(cities)
print(len(cities))
print(cities[0])
print(cities[:-2])
print(cities[2].upper())
cities[2] = "Tula"
print(cities)
cities.append('Lvov')
print(cities)
cities.insert(0, "Turka")
print(cities)
del cities[1]
print(cities)
cities.remove("Kiev")
print(cities)
deleted_city = cities.pop()
print("Deleted city is : " + deleted_city)
print(cities)
cities.sort(reverse=True)
print(cities)
cities.reverse()
print(cities) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class KbAdvertCommissionClauseQuotaResponse(object):
def __init__(self):
self._quota_amount = None
@property
def quota_amount(self):
return self._quota_amount
@quota_amount.setter
def quota_amount(self, value):
self._quota_amount = value
def to_alipay_dict(self):
params = dict()
if self.quota_amount:
if hasattr(self.quota_amount, 'to_alipay_dict'):
params['quota_amount'] = self.quota_amount.to_alipay_dict()
else:
params['quota_amount'] = self.quota_amount
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KbAdvertCommissionClauseQuotaResponse()
if 'quota_amount' in d:
o.quota_amount = d['quota_amount']
return o
|
from PyQt5 import QtWidgets, uic, QtGui
import sys
import cv2
import numpy as np
#1. qt를 사용하여 GUI 프로그램 환경 구축
class Ui(QtWidgets.QDialog):
def __init__(self):
super(Ui, self).__init__()
uic.loadUi('test4.ui', self)
self.loadBtn = self.findChild(QtWidgets.QPushButton, 'loadBtn')
self.loadBtn.clicked.connect(self.loadBtnClicked)
self.procBtn = self.findChild(QtWidgets.QPushButton, 'procBtn')
self.procBtn.clicked.connect(self.procRunClicked)
self.photo = self.findChild(QtWidgets.QLabel, 'photo')
self.photo.setPixmap(QtGui.QPixmap("visionImage/21 L90_OK.bmp"))
self.photo.setScaledContents(True)
self.result = self.findChild(QtWidgets.QLabel, 'result')
self.fnameEdit = self.findChild(QtWidgets.QLineEdit,'fnameEdit')
# self.slidBar_1 = self.findChild(QtWidgets.QSlider, 'sliderBar_1')
# self.slidBar_1.valueChanged[int].connect(self.changeValue)
# self.slide1view = self.findChild(QtWidgets.QLabel, 'slide1view')
self.value = 50
self.fnameEdit.clear()
self.show()
# def changeValue(self,value):
# self.slide1view.setText('red : ' + str(value))
# self.value = value
# self.procRunClicked()
def processingImage(self, grayImage, rgbImage):
output = rgbImage.copy()
ret, binary = cv2.threshold(output, 150,255,cv2.THRESH_BINARY)
try:
src = cv2.pyrDown(output)
src_copy = src.copy()
gray = cv2.cvtColor(src, cv2.COLOR_BGR2GRAY)
ret, binary = cv2.threshold(gray, 150,255,cv2.THRESH_BINARY)
kernel = np.ones((7,7), np.uint8)
# 중앙 ====================
img_middle = binary[380:500,205:1100]
img_middle = cv2.morphologyEx(img_middle, cv2.MORPH_CLOSE, kernel)
contours_c, hierarchy = cv2.findContours(img_middle, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
mu = [None]*len(contours_c)
for i in range(len(contours_c)):
mu[i] = cv2.moments(contours_c[i])#중심점
mc = [None]*len(contours_c)
img_copy0 = src.copy()
img_copy0 = img_copy0[380:500,205:1100]
for i in range(len(contours_c)) :
mc[i] = (mu[i]['m10'] / (mu[i]['m00'] + 1e-5), mu[i]['m01'] / (mu[i]['m00'] + 1e-5))
center_i = 0
for i in range(len(contours_c)) :
c_area = cv2.contourArea(contours_c[i])
okcolor = (0, 255, 0)
ngcolor = (0, 0, 255)
cv2.drawContours(img_copy0, contours_c, i, okcolor, 2)
cv2.circle(img_copy0, (int(mc[i][0]), int(mc[i][1])), 4, okcolor, -1)
cv2.putText(img_copy0,"{} : {}".format(i,c_area) ,tuple(contours_c[i][0][0]),
cv2.FONT_HERSHEY_COMPLEX,0.4, (0,255,0),1)
if c_area > 3385 :
cv2.drawContours(img_copy0, contours_c, i, okcolor, 2)
cv2.circle(img_copy0, (int(mc[i][0]), int(mc[i][1])), 4, okcolor, -1)
cv2.putText(img_copy0,"{} : {}".format(i,c_area) ,tuple(contours_c[i][0][0]),
cv2.FONT_HERSHEY_COMPLEX,0.4, (0,255,0),1)
else:#NG일경우
cv2.drawContours(img_copy0, contours_c, i, ngcolor, 2)
cv2.circle(img_copy0, (int(mc[i][0]), int(mc[i][1])), 4, ngcolor, -1)
cv2.putText(img_copy0,"{} : {}".format(i,c_area) ,tuple(contours_c[i][0][0]),
cv2.FONT_HERSHEY_COMPLEX,0.4, (0,0,255),1)
center_i += 1
#왼쪽 ====================
ret_1, binary_1 = cv2.threshold(gray, 150,255,cv2.THRESH_BINARY)
kernel_1 = np.ones((7,7), np.uint8)
img_left = binary_1[220:470, 35:200]#y1:y2, x1:x2
img_left = cv2.morphologyEx(img_left, cv2.MORPH_CLOSE, kernel_1)
contours_1, hierarchy_1 = cv2.findContours(img_left, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
mu = [None]*len(contours_1)
for i in range(len(contours_1)):
mu[i] = cv2.moments(contours_1[i])#중심점
mc = [None]*len(contours_1)
img_copy1 = src.copy()
img_copy1 = img_copy1[220:470, 35:200]
for i in range(len(contours_1)) :
mc[i] = (mu[i]['m10'] / (mu[i]['m00'] + 1e-5), mu[i]['m01'] / (mu[i]['m00'] + 1e-5))
left_i = 0
for i in range(len(contours_1)) :
# print(i, ":", cv2.contourArea(contours_1[i]))
c_area = cv2.contourArea(contours_1[i])
okcolor = (0, 255, 0)
ngcolor = (0, 0, 255)
if c_area > 4000.0 :
cv2.drawContours(img_copy1, contours_1, i, okcolor, 2)
cv2.circle(img_copy1, (int(mc[i][0]), int(mc[i][1])), 4, okcolor, -1)
cv2.putText(img_copy1,"{} : {}".format(i,c_area) ,tuple(contours_1[i][0][0]),
cv2.FONT_HERSHEY_COMPLEX,0.4, (0,255,0),1)
elif c_area < 1050.0 and c_area > 850.0 :
cv2.drawContours(img_copy1, contours_1, i, okcolor, 2)
cv2.circle(img_copy1, (int(mc[i][0]), int(mc[i][1])), 4, okcolor, -1)
cv2.putText(img_copy1,"{} : {}".format(i,c_area) ,tuple(contours_1[i][0][0]),
cv2.FONT_HERSHEY_COMPLEX,0.4, (0,255,0),1)
else:#NG일경우
cv2.drawContours(img_copy1, contours_1, i, ngcolor, 2)
cv2.circle(img_copy1, (int(mc[i][0]), int(mc[i][1])), 4, ngcolor, -1)
cv2.putText(img_copy1,"{} : {}".format(i,c_area) ,tuple(contours_1[i][0][0]),
cv2.FONT_HERSHEY_COMPLEX,0.4, (0,0,255),1)
left_i += 1
#오른쪽 ====================
ret_2, binary_2 = cv2.threshold(gray, 150,255,cv2.THRESH_BINARY)
kernel_2 = np.ones((7,7), np.uint8)
img_right = binary_2[220:470, 1100:1270]#y1:y2, x1:x2
img_right = cv2.morphologyEx(img_right, cv2.MORPH_CLOSE, kernel_2)
contours_2, hierarchy_2 = cv2.findContours(img_right, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
mu = [None]*len(contours_2)
for i in range(len(contours_2)):
mu[i] = cv2.moments(contours_2[i])#중심점
mc = [None]*len(contours_2)
img_copy2 = src.copy()
img_copy2 = img_copy2[220:470, 1100:1270]
for i in range(len(contours_2)) :
mc[i] = (mu[i]['m10'] / (mu[i]['m00'] + 1e-5), mu[i]['m01'] / (mu[i]['m00'] + 1e-5))
right_i = 0
for i in range(len(contours_2)) :
# print(i, ":", cv2.contourArea(contours_2[i]))
c_area = cv2.contourArea(contours_2[i])
okcolor = (0, 255, 0)
ngcolor = (0, 0, 255)
if c_area > 3700.0 :
cv2.drawContours(img_copy2, contours_2, i, okcolor, 2)
cv2.circle(img_copy2, (int(mc[i][0]), int(mc[i][1])), 4, okcolor, -1)
cv2.putText(img_copy2,"{} : {}".format(i,c_area) ,tuple(contours_1[i][0][0]),
cv2.FONT_HERSHEY_COMPLEX,0.4, (0,255,0),1)
elif c_area < 1100.0 and c_area > 840.0 :
cv2.drawContours(img_copy2, contours_2, i, okcolor, 2)
cv2.circle(img_copy2, (int(mc[i][0]), int(mc[i][1])), 4, okcolor, -1)
cv2.putText(img_copy2,"{} : {}".format(i,c_area) ,tuple(contours_1[i][0][0]),
cv2.FONT_HERSHEY_COMPLEX,0.4, (0,255,0),1)
else:#NG일경우
cv2.drawContours(img_copy2, contours_2, i, ngcolor, 2)
cv2.circle(img_copy2, (int(mc[i][0]), int(mc[i][1])), 4, ngcolor, -1)
cv2.putText(img_copy2,"{} : {}".format(i,c_area) ,tuple(contours_1[i][0][0]),
cv2.FONT_HERSHEY_COMPLEX,0.4, (0,0,255),1)
right_i += 1
src_copy[220:470, 35:200] = img_copy1
src_copy[380:500,205:1100] = img_copy0
src_copy[220:470, 1100:1270] = img_copy2
result = left_i + center_i + right_i
if result == 0:
cv2.putText(src_copy,"OK",(500,632), cv2.FONT_HERSHEY_COMPLEX,3, (0,255,0),1)
else:
cv2.putText(src_copy,"NG",(500,632), cv2.FONT_HERSHEY_COMPLEX,3, (0,0,255),1)
cv2.rectangle(src_copy,(28,183) ,(200,491) ,(0,255,0),2)
cv2.rectangle(src_copy,(210,396) ,(1105,500) ,(0,255,0),2)
cv2.rectangle(src_copy,(1135,183) ,(1270,491) ,(0,255,0),2)
except Exception:
print(Exception.e)
return src_copy
def displayOutputImage(self, outImage, mode):
img_info = outImage.shape
if outImage.ndim == 2 :
qImg = QtGui.QImage(outImage, img_info[1], img_info[0], img_info[1]*1, QtGui.QImage.Format_Grayscale8)
else :
qImg = QtGui.QImage(outImage, img_info[1], img_info[0], img_info[1]*img_info[2], QtGui.QImage.Format_BGR888)
pixmap = QtGui.QPixmap.fromImage(qImg)
if mode == 0 :
self.photo.setPixmap(pixmap)
self.photo.setScaledContents(True)
else :
self.result.setPixmap(pixmap)
self.result.setScaledContents(True)
#cv2.imread가 한글 지원하지 않으므로 새로운 방식으로 파일 조합
def imread(self, filename, flags=cv2.IMREAD_COLOR, dtype=np.uint8):
try:
n = np.fromfile(filename, dtype)
img = cv2.imdecode(n, flags)
return img
except Exception as e:
print(e)
return None
def procRunClicked(self):
# src = self.imread(self.filename) #cv2.imread가 한글경로를 지원하지 않음
imgRGB = cv2.cvtColor(self.src, cv2.COLOR_BGR2RGB)
imgGRAY = cv2.cvtColor(self.src, cv2.COLOR_BGR2GRAY)
outImg = self.processingImage(imgGRAY,imgRGB)
self.displayOutputImage(outImg,1)
def loadBtnClicked(self):
path = 'visionImage'
filter = "All Images(*.jpg; *.png; *.bmp);;JPG (*.jpg);;PNG(*.png);;BMP(*.bmp)"
fname = QtWidgets.QFileDialog.getOpenFileName(self, "파일로드", path, filter)
filename = str(fname[0])
self.fnameEdit.setText(filename)
self.src = self.imread(filename) #cv2.imread가 한글경로를 지원하지 않음
img_rgb = cv2.cvtColor(self.src,cv2.COLOR_BGR2RGB)
self.displayOutputImage(self.src,0)
app = QtWidgets.QApplication(sys.argv)
window = Ui()
app.exec_() |
from flask import render_template, request, redirect, url_for, session, abort
from sqlalchemy import or_
from app import app, db
from app.models.students import Student
from app.models.needs import Need
from app.models.speakers import Speaker
@app.route('/student/need/<int:id>')
def get_need_page_student(id):
"""
Renvoie le résumé d'un need
:return:
"""
student = Student.query.filter_by(id_user=session['uid']).first()
try:
need = Need.query.get(id)
except:
abort(500)
return render_template('students/need-page-student.html',
data={'need': need,
'student': student},
title='Résumé du besoin')
@app.route('/student/need/<int:id>/close', methods=['POST'])
def close_need(id):
"""
Quand on clique sur 'terminer' un besoin validé par un intervenant
:param id: id du besoin
:return: dashboard étudiant
"""
try:
need = Need.query.get(id)
except:
abort(500)
need.status = 'Terminé'
need.team_conclusion = request.form.get('team_conclusion')
if need.team.tokens < 0:
need.team.tokens = 0
else:
need.team.tokens -= need.used_tokens
try:
db.session.commit()
except:
abort(500)
return redirect(url_for('get_student_dashboard'))
@app.route('/student/need/new/select-speaker')
def get_select_speaker():
"""
Renvoie la page de sélection d'un intervenant
:return:
"""
speakers = Speaker.query.filter_by(role=False).all()
student = Student.query.filter_by(id_user=session['uid']).first()
return render_template(
'students/speaker-choice.html',
current_route='get_select_speaker',
title='Choisir son intervenant',
data=speakers,
student=student)
@app.route('/students/need/new/<int:id>')
def get_create_need(id):
"""
Renvoie la page de création d'un besoin
:return:
"""
speaker = Speaker.query.get(id)
student = Student.query.filter_by(id_user=session['uid']).first()
return render_template('students/create-need.html',
title='Définir votre besoin',
data={'speaker': speaker,
'student': student}
)
@app.route('/students/need/new', methods=['POST'])
def create_need():
"""
Créé le besoin en db
:return:
"""
student = Student.query.filter_by(id_user=session['uid']).first()
title = request.form.get('title')
description = request.form.get('description')
speaker_id = request.form.get('speaker_id')
estimated_tokens = int(request.form.get('estimated_tokens'))
if estimated_tokens < 0:
estimated_tokens = 0
need = Need(
title=title,
description=description,
estimated_tokens=estimated_tokens,
status='En cours',
id_assigned_team=student.team.id,
id_assigned_speaker=speaker_id
)
db.session.add(need)
try:
db.session.commit()
except:
abort(500)
return redirect(url_for('get_student_dashboard'))
@app.route('/student/dashboard')
def get_student_dashboard():
"""
Pagine et renvoie le dashboard utilisateur
:return:
"""
student = Student.query.filter_by(id_user=session['uid']).first()
q = Need.query.filter_by(id_assigned_team=student.team.id)
page = request.args.get('page', default=1, type=int)
searched = request.args.get('search', default='')
if searched:
q = q.filter(or_(
Need.title.ilike('%' + searched + '%'),
Need.description.ilike('%' + searched + '%'),
Need.status.ilike('%' + searched + '%'),
))
needs = q.paginate(page, 5, False)
return render_template(
'students/project-stage.html',
current_route='get_student_dashboard',
title=student.team.project.title,
subtitle='Retrouvez ici l\'ensemble de vos demandes !',
data=needs,
student=student,
searched=searched
)
|
from WebLogAnalysis.logAnalysis import TotalPUv
from WebLogAnalysis.logAnalysis import TotalTopIp
from WebLogAnalysis.logAnalysis import TotalCode
import requests
import json
import pymysql
import time
def intoMysql(host,user,password,db,allpath):
date = time.strftime('%Y%m%d%H%M%S')
client = pymysql.connect(host,user,password,db)
try:
with client.cursor() as cursors:
pv, uv = TotalPUv(allpath)
PUv = "insert into puv values({},{},{})"
cursors.execute(PUv.format(date,pv,uv))
with client.cursor() as cursors:
TenIp = TotalTopIp(allpath)
TenIp1 = (str(TenIp[0][0])+'-'+str(TenIp[0][1]))
TenIp2 = (str(TenIp[1][0]) + '-' + str(TenIp[1][1]))
TenIp3 = (str(TenIp[2][0]) + '-' + str(TenIp[2][1]))
TenIp4 = (str(TenIp[3][0]) + '-' + str(TenIp[3][1]))
Ip = "insert into ip values({},'{}','{}','{}','{}')"
cursors.execute(Ip.format(date,TenIp1,TenIp2,TenIp3,TenIp4))
with client.cursor() as cursors:
totalCode = TotalCode(allpath)
code = [element[1] for element in totalCode.items()]
Code = "insert into code values({},{},{},{},{},{},{},{})"
cursors.execute(Code.format(date,code[0],code[1],code[2],code[3],code[4],code[5],code[6]))
finally:
client.commit()
client.close()
# intoMysql('192.168.40.142','Swords','(Swords..0908)','logInfo',['./access_log'])
def DingTalk(token,allpath):
api = "https://oapi.dingtalk.com/robot/send?access_token={}".format(token)
header = {'Content-type':'application/json'}
date = time.strftime('%Y%m%d%H%M%S')
pv, uv = TotalPUv(allpath)
TenIp = TotalTopIp(allpath)
TenIp1 = (str(TenIp[0][0]) + '-' + str(TenIp[0][1]))
TenIp2 = (str(TenIp[1][0]) + '-' + str(TenIp[1][1]))
TenIp3 = (str(TenIp[2][0]) + '-' + str(TenIp[2][1]))
TenIp4 = (str(TenIp[3][0]) + '-' + str(TenIp[3][1]))
totalCode = TotalCode(allpath)
code = [element[1] for element in totalCode.items()]
# messages = '''date:{}
# pv: {} , uv: {},
# ipfirst: {}, ipsecond: {}, ipthird: {}, ipfoutth: {},
# 200: {}, 302: {}, 304: {}, 404: {}, 502: {}, 503: {}, 504: {}
# '''.format(date, pv, uv, TenIp1, TenIp2, TenIp3, TenIp4, code[0], code[1], code[2], code[3], code[4], code[5],code[6])
# phone ='15779847379'
#
# data = {"msgtype": "text", "text": {"content": "{}".format(messages)}, 'at': {'atMobiles': ["{}".format(phone)]}, 'isAtAll': 'false'}
phone = '15779847379'
data = {
"msgtype": "markdown",
"markdown": {
"title":"日志分析报表",
"text":"#### 分析时间为{} @{}\n".format(date,phone) +
"> - pv :{} ,uv : {}\n".format(pv,uv) +
"> - ipfirst:{} , ipsecond : {} ,ipthird : {}, ipfourth : {}\n".format(TenIp1, TenIp2, TenIp3, TenIp4) +
"> - 200: {} , 302: {} , 304 : {} , 404 : {} , 502 : {} ,503 :{} ,504 : {}".format(code[0], code[1], code[2], code[3], code[4], code[5],code[6])
},
"at": {
"atMobiles": [
"15779847379"
],
"isAtAll": False
}
}
sendDate = json.dumps(data)
requests.post(url=api,data=sendDate,headers=header)
# DingTalk('adac656b42c272b2e62e5b4e1a0831e431b1bd8d7f23781160d6e856fd32cf3c',['./access_log'])
|
import View.CloudStorgeView
import View.DomainView
import View.Platform
import View.WeixingDelegate
def config(app, api):
View.CloudStorgeView.route_config(app, api)
View.DomainView.route_config(app, api)
View.Platform.route_config(app,api)
View.WeixingDelegate.route_config(app, api)
|
from xicam.gui.widgets.imageviewmixins import XArrayView, DepthPlot, BetterTicks, BetterLayout, BetterPlots
class CatalogViewerBlend(BetterPlots, BetterLayout, DepthPlot, XArrayView):
def __init__(self, *args, **kwargs):
# CatalogViewerBlend inherits methods from XArrayView and CatalogView
# super allows us to access both methods when calling super() from Blend
super(CatalogViewerBlend, self).__init__(*args, **kwargs) |
from .. import effects
from .. import colors
from ..skills import Skill
from .hero import Hero
import random
class Skill1(Skill):
name = "Магическое восстановление"
description = "Некромант восстанавливает себе HP на величину, равную его магии (magic). После этого он увеличивает свой показатель магии на 1."
cooldown = 3
def cast(self, hero, my_team, enemies_team):
print(f"{colors.CGREEN}{hero.name} восстанавливается {colors.CEND}")
hero.regen_hp(hero.magic)
hero.add_magic(1)
self.classic_after_cast(hero)
class Skill2(Skill):
name = "Призыв скелетона"
description = "Некромант призывает Скелетона"
cooldown = 5
def cast(self, hero, my_team, enemies_team):
skel = Skeleton(hero.team, hero.team_list, hero.dead_list)
my_team.append(skel)
self.classic_after_cast(hero)
class Nekromant(Hero):
hp = max_hp = 17
attack = 2
armor = 0
magic = 1
name = 'Nekromant'
skill1 = Skill1()
skill2 = Skill2()
def get_damage(self, damage):
super().get_damage(damage)
if self.alive and random.randint(1, 100) <= 30:
self.magic += 1
def add_magic(self, magic):
self.magic += magic
print(f"У {self.name} magic += {magic}. Теперь у него {self.magic}")
class Skeleton(Hero):
name = 'Skeleton'
hp = max_hp = 5
attack = 2
armor = 0
skill1 = Skill()
skill2 = Skill()
|
# 35. Write a Python program to iterate over dictionaries using for loops.
def dict_loop(data):
dict = {data[i]: data[i + 1] for i in range(0, len(data), 2)}
for key, value in dict.items():
print(key, 'corresponds to ', dict[key])
newsDictData = input("Enter a list elements separated by space: ")
userList = newsDictData.split()
dict_loop(userList) |
# Generated by Django 3.2.6 on 2021-08-26 12:53
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('book', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CartCoupon',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=30)),
('valid_from', models.DateTimeField()),
('valid_to', models.DateTimeField()),
('is_active', models.BooleanField(default=False)),
('discount_percent', models.IntegerField(default=0)),
],
options={
'verbose_name': 'تخفیف درصدی سبد',
'verbose_name_plural': 'تخفیف درصدی سبدهای خریده',
},
),
migrations.CreateModel(
name='BookPercentCoupon',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valid_from', models.DateTimeField()),
('valid_to', models.DateTimeField()),
('is_active', models.BooleanField(default=False)),
('discount_percent', models.IntegerField(default=0)),
('books', models.ManyToManyField(blank=True, to='book.Book')),
],
options={
'verbose_name': 'تخفیف درصدی کتاب',
'verbose_name_plural': 'تخفیف درصدی کتاب ها',
},
),
migrations.CreateModel(
name='BookCashCoupon',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valid_from', models.DateTimeField()),
('valid_to', models.DateTimeField()),
('is_active', models.BooleanField(default=False)),
('discount_price', models.IntegerField(default=0)),
('books', models.ManyToManyField(blank=True, to='book.Book')),
],
options={
'verbose_name': 'تخفیف نقدی کتاب',
'verbose_name_plural': 'تخفیف نقدی کتاب ها',
},
),
]
|
from corefunctions import core
import pandas as pd
def get_company_list():
df_company_list = pd.read_csv("./sg_company_list.csv")
return df_company_list
def sg_research_analysis():
filename= "/home/kasun/Documents/mvp/data/sginvestors.csv"
df = core.getData_fromcsv(filename) #all research analysis on singapore stock names
df_company_list = get_company_list()
core.find_key_themes(df_company_list['company'],df)
core.find_sentiment_scores(df_company_list['company'],df)
def get_key_phrases(companyName):
df_keyPhrases = pd.read_csv("./"+companyName+"_keyPrases.csv")
return df_keyPhrases
def get_analysis(companyName):
keyPrases = get_key_phrases(companyName)
for keyPhrase in keyPrases['keyPhrase']:
core.sentimentPlot(keyPrase,companyName)
def show_analysis(key_phrase,company):
df_list = core.get_impact_with_keyword(key_phrase,company)
print("Postives : \n")
for sent in df_list[0]['Sentence']:
print(sent)
print("\n")
print("Negatives : \n")
for sent in df_list[1]['Sentence']:
print(sent)
show_analysis("private wealth","dbs") # issue a qeury with key phrase and company name |
# -*- coding: utf-8 -*-
#Given an array of size n, find the majority element.
#The majority element is the element that appears more than ⌊ n/2 ⌋ times.
#You may assume that the array is non-empty and the majority element always exist in the array.
class Solution:
# @param {integer[]} nums
# @return {integer}
def majorityElement(self, nums):
dic = {}
rec = 0
out = None
for i in nums:
if i not in dic:
dic[i] = 1
else: dic[i] += 1
if dic[i] > rec: out = i
rec = max(rec, dic[i])
return out
if __name__ == '__main__':
nums = [1,2,1,2,4,2]
x = Solution()
print x.majorityElement(nums)
|
#! /usr/bin/env python
#Calculation of dynamic IR spectra
import h5py
import numpy as np
import matplotlib.pyplot as pt
from molmod.units import *
from molmod.constants import *
from yaff import *
from molmod.periodic import periodic
from yaff.pes.ext import Cell
f1 = np.genfromtxt('../dipole/E0/dipole.txt')
data = f1[:,0:3]
step = np.arange(0,len(data),1)
time = step*2.0*femtosecond
#Assure continuity of dipole moments
cell = np.genfromtxt('../dipole/cell.txt')
for i in xrange(len(data)-1):
value = data[i+1,:] - data[i,:]
ref = value.copy()
rvecs = Cell(np.array([cell[:,0],cell[:,1],cell[:,2]]))
rvecs.mic(value)
disp = value-ref
data[i+1,:] += disp
f = h5py.File('moments.h5', 'w')
f['trajectory/moments'] = data
f['trajectory/time'] = time
f['trajectory/step'] = step
f.close()
#Vibrational spectrum
f = h5py.File('moments.h5', 'a')
spectrum = Spectrum(f,path='trajectory/moments', start=0, end=10000, bsize=10000, key='ir', outpath='trajectory/IR_spectrum')
spectrum.compute_offline()
xunit = lightspeed/centimeter
spectrum.amps = spectrum.amps*(spectrum.freqs/xunit)**2
spectrum.freqs = spectrum.freqs/xunit
np.savetxt('IR_intensities', np.c_[spectrum.freqs, spectrum.amps])
|
import json
import os
import re
from datetime import datetime, timedelta
from statistics import mean
import requests
import mskai.globals as globals
from mskai.DxLogging import print_debug
from mskai.veconfig import loadveconfig
import subprocess
class virtualization():
def __init__(self, config, **kwargs):
# self.scriptname = os.path.basename(__file__)
# self.scriptdir = os.path.dirname(os.path.abspath(__file__))
self.enginelistfile = globals.enginelistfile
self.enginecpulistfile = globals.enginecpulistfile
self.config = config
if "config_file_path" in kwargs.keys():
self.config_file_path = kwargs['config_file_path']
if "outputdir" in kwargs.keys():
self.outputdir = kwargs['outputdir']
if "protocol" in kwargs.keys():
self.protocol = kwargs['protocol']
if "dxtoolkit_path" in kwargs.keys():
self.dxtoolkit_path = kwargs['dxtoolkit_path']
self.headers = {'Content-Type': 'application/json'}
try:
os.stat(self.outputdir)
except:
os.mkdir(self.outputdir)
if self.config.debug:
print_debug("Created directory {}".format(self.outputdir))
def create_api_session(self, ip_address, port=80):
protocol = self.protocol
print_debug("protocol = {}, port ={}".format(protocol,port))
if protocol == "https":
port = 443
print_debug("New protocol = {}, port ={}".format(protocol,port))
apiversion = {'type': 'APISession', 'version': {'type': 'APIVersion', "major": 1, "minor": 9, "micro": 3}}
api_url_base = '{}://{}:{}/resources/json/delphix/'.format(protocol, ip_address, port)
print_debug("api_url_base = {}".format(api_url_base))
headers = self.headers
api_url = '{0}session'.format(api_url_base)
try:
response = requests.post(api_url, headers=headers, json=apiversion, verify=False)
if response.status_code == 200:
data = json.loads(response.content.decode('utf-8'))
if data['status'] == "OK":
cookies = {'JSESSIONID': response.cookies['JSESSIONID']}
return cookies
else:
print_debug("Engine {} : Error connecting engine".format(ip_address))
return None
else:
print_debug("Engine {} : Error connecting engine".format(ip_address))
return None
except:
print_debug("Engine {} : Error connecting engine".format(ip_address))
return None
def login_api_session(self, ip_address, cookies, apicall, payload, port=80):
protocol = self.protocol
print_debug("protocol = {}, port ={}".format(protocol,port))
if protocol == "https":
port = 443
print_debug("New protocol = {}, port ={}".format(protocol,port))
api_url_base = '{}://{}:{}/resources/json/delphix/'.format(protocol,ip_address, port)
headers = self.headers
api_url = '{0}{1}'.format(api_url_base, apicall)
try:
response = requests.post(api_url, cookies=cookies, headers=headers, json=payload, verify=False)
if response.status_code == 200:
data = json.loads(response.content.decode('utf-8'))
if data['status'] == "OK":
cookies = {'JSESSIONID': response.cookies['JSESSIONID']}
return cookies
else:
print_debug("Engine {} : Error logging engine".format(ip_address))
return None
else:
print_debug("Engine {} : Error logging engine".format(ip_address))
return None
except:
print_debug("Engine {} : Error logging engine".format(ip_address))
return None
def get_api_response(self, ip_address, cookies, apicall, port=80):
protocol = self.protocol
print_debug("protocol = {}, port ={}".format(protocol,port))
if protocol == "https":
port = 443
print_debug("New protocol = {}, port ={}".format(protocol,port))
api_url_base = '{}://{}:{}/resources/json/delphix/'.format(protocol,ip_address, port)
headers = self.headers
api_url = '{0}{1}'.format(api_url_base, apicall)
try:
response = requests.get(api_url, cookies=cookies, headers=headers, verify=False)
if response.status_code == 200:
data = json.loads(response.content.decode('utf-8'))
if data['status'] == "OK":
return data['result']
else:
print_debug("Engine {} : Error fetching data".format(ip_address))
return None
else:
print_debug("Engine {} : Error fetching data".format(ip_address))
return None
except:
print_debug("Engine {} : Error fetching data".format(ip_address))
return None
def post_api_response(self, ip_address, cookies, apicall, payload, mrthod, port=80):
protocol = self.protocol
print_debug("protocol = {}, port ={}".format(protocol,port))
if protocol == "https":
port = 443
print_debug("New protocol = {}, port ={}".format(protocol,port))
api_url_base = '{}://{}:{}/resources/json/delphix/'.format(protocol,ip_address, port)
headers = self.headers
api_url = '{0}{1}'.format(api_url_base, apicall)
try:
response = requests.post(api_url, cookies=cookies, headers=headers, json=payload, verify=False)
if response.status_code == 200:
data = json.loads(response.content.decode('utf-8'))
if data['status'] == "OK":
return data['result']
else:
print_debug("Engine {} : Error fetching data".format(ip_address))
return None
else:
print_debug("Engine {} : Error fetching data".format(ip_address))
return None
except:
print_debug("Engine {} : Error fetching data".format(ip_address))
return None
def gen_cpu_file(self):
f = open(self.enginecpulistfile, "w")
f.write("{},{}\n".format("ip_address", "cpu"))
f.close()
dlpxconfig = loadveconfig()
config_file_path = self.config_file_path
dxtoolkit_path = self.dxtoolkit_path
dlpxconfig.get_config(config_file_path)
for engine in dlpxconfig.dlpx_engines:
try:
# print_debug(dlpxconfig.dlpx_engines[engine])
# self.get_cpu_raw_data(dlpxconfig.dlpx_engines[engine])
# print("engine = {}".format(engine))
print_debug("dxtoolkit_path: {}, config_file_path:{}, engine: {}".format(dxtoolkit_path + '/dx_get_cpu',config_file_path, engine))
out = subprocess.Popen([dxtoolkit_path + '/dx_get_cpu', '-d', engine, '-configfile', config_file_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
#print_debug("out = {}".format(out))
stdout, stderr = out.communicate()
print_debug("stdout: {} ,stderr: {}".format(stdout, stderr))
r1 = re.findall(r"Can't connect",stdout.decode("utf-8"))
if not r1:
rs = stdout.split()[0]
rs = rs.decode("utf-8")
print_debug("rs: {}".format(rs))
if rs == "OK:" or "CRITICAL:" or "WARNING:":
cpuvalue = stdout.split()[-1:][0]
cpuvalue = cpuvalue.decode("utf-8")
f = open(self.enginecpulistfile, "a")
f.write("{},{}\n".format(engine, cpuvalue))
f.close()
print_debug("Engine {} : pulled cpu data - OK".format(engine))
else:
print("Engine {} : Unable to pull cpu data".format(engine))
f = open(self.enginecpulistfile, "a")
f.write("{},{}\n".format(engine, "0"))
f.close()
else:
print("Engine {} : Unable to connect and pull cpu data. Defualt 0".format(engine))
f = open(self.enginecpulistfile, "a")
f.write("{},{}\n".format(engine, "0"))
f.close()
except:
#print_debug("Engine {} : Error for get_cpu_raw_data".format(engine['ip_address']))
print_debug("Engine {} : Unable to pull cpu data".format(engine))
def get_cpu_raw_data(self, engine):
# engine = {'ip_address' : 'ajaydlpx6pri.dcenter.delphix.com' , 'username' : 'admin' , 'password' : 'delphix'}
cookies = self.create_api_session(engine['ip_address'], port=80)
if cookies is not None:
print_debug("Engine {} : Session created".format(engine['ip_address']))
apicall = "login"
payload = {"type": "LoginRequest", "username": engine['username'], "password": engine['password']}
logincookies = self.login_api_session(engine['ip_address'], cookies, apicall, payload, port=80)
if logincookies is not None:
print_debug("Engine {} : Login Successful".format(engine['ip_address']))
apicall = "analytics"
analytics_list = self.get_api_response(engine['ip_address'], logincookies, apicall, port=80)
if analytics_list is not None:
cpu_data_list = []
for slice in analytics_list:
if slice['name'] == 'default.cpu':
five_minute = timedelta(minutes=5)
end_date = datetime.utcnow()
# end_date = datetime.today()
start_date = end_date - five_minute
start_date_isostr = "{}T{}.000Z".format(start_date.strftime('%Y-%m-%d'),
start_date.strftime('%H:%M:%S'))
end_date_isostr = "{}T{}.000Z".format(end_date.strftime('%Y-%m-%d'),
end_date.strftime('%H:%M:%S'))
print_debug('Engine {} : Parameters ({}, {}, {}, {})'.format(engine['ip_address'],
slice['reference'],
"resolution=1",
start_date_isostr,
end_date_isostr))
cpu_analytics_list = []
try:
apicall = "analytics/{}/getData?&resolution={}&numberofDatapoints={}&startTime={}&endTime={}".format(
slice['reference'], "1", "10000", start_date_isostr, end_date_isostr)
cpu_analytics_data = self.get_api_response(engine['ip_address'], logincookies, apicall,
port=80)
if cpu_analytics_data == []:
print_debug("Engine {} : No data found for engine".format(engine['ip_address']))
else:
for row in cpu_analytics_data['datapointStreams'][0]['datapoints']:
ts = row['timestamp'].split(".")[0].replace("T", " ")
idle = 0 if row['idle'] <= 0 else row['idle']
user = 0 if row['user'] <= 0 else row['user']
kernel = 1 if row['kernel'] <= 0 else row['kernel']
ttl_cpu = idle + kernel + user
util = 0 if (ttl_cpu == 0) else (((user + kernel) / (ttl_cpu)) * 100)
cpu_data_dict = {"ts": ts, "cpu": float(util)}
cpu_data_list.append(cpu_data_dict)
# print_debug(round(mean(k['cpu'] for k in cpu_data_list),2))
cpu_usage = round(mean(k['cpu'] for k in cpu_data_list), 2)
f = open(self.enginecpulistfile, "a")
f.write("{},{}\n".format(engine['ip_address'], cpu_usage))
f.close()
print_debug(
"+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++")
except Exception as e:
print_debug(
"Engine {} : Unable to pull cpu_analytics_data".format(engine['ip_address']))
return
else:
print_debug("Engine {} : Unable to pull data".format(engine['ip_address']))
else:
print_debug("Engine {} : Unable to login".format(engine['ip_address']))
# GET: http://dlpx-5381-mds-1048-d2b9cd33.dc4.delphix.com:80/resources/json/delphix/service/time
# GET: http://dlpx-5381-mds-1048-d2b9cd33.dc4.delphix.com:80/resources/json/service/configure/currentSystemTime
# GET: http://dlpx-5381-mds-1048-d2b9cd33.dc4.delphix.com:80/resources/json/delphix/analytics/ANALYTICS_STATISTIC_SLICE-1/getData?&resolution=1&numberofDatapoints=10000&startTime=2020-05-21T16%3A49%3A16.000Z
|
stooges = [ "curly","larry","moe",]
bankbal = [ 200,300,150,]
for indx in range (len(bankbal)):
print(stooges[indx],bankbal[indx])
print("---------------------------------------------")
stooges = stooges + ["shemp","curly jo"]
bankbal = bankbal + [400,1159]
print(stooges,bankbal)
##
##print("---------------------------------------------")
##stooges.append("frank")
##bankbal.append(50)
##
##print(stooges,bankbal)
##
##print("---------------------------------------------")
##
##stname = input("Please eneter a stooge name: ").strip()
##
##
##
##stindex = stooges.index(stname)
##print(stooges[stindex],bankbal[stindex])
##
##print("---------------------------------------------")
##
##backupstooges = stooges
##
##
##stoogesSorted = sorted(stooges)
##print(backupstooges,stoogesSorted)
##
##print("---------------------------------------------")
##
##del stooges[2]
##print(stooges)
##
##stooges.remove("larry")
##print(stooges)
##
##print("---------------------------------------------")
##
##print("High Bank Bal: ", max(bankbal))
##
##print("Min bank balance: ", min(bankbal))
##
##print("total bankbal is: ", sum(bankbal))
##
##print("---------------------------------------------")
##
##stooges.insert(4,"abelar")
##
##print(stooges[4])
##print(stooges)
##
##===========
##
##num_list= []
##x=0
##for i in range(11):
## num_list.insert(i,i)
##
##print(num_list)
##
##
##
##for num in range(0,11,1):
## num_list.append(num)
##
##print(num_list)
##
##
##for num in range(11):
## if num %2 !=0:
## num_list.append(num)
##
##
##num_list = [x for x in range (0,11,1)if x %2 !=0] #Pyhton list comprehension feature
##print(num_list,format(sum(num_list),",.1f"))
##
##
##
##print(format(sum([x for x in range (0,11,1)if x %2 !=0]),",.1f"))
##
##
##========================================================================
##list1= [ "curly","larry","moe",]
##tuple1= tuple(list1)
##
##print(tuple1)
##
##
##list2= list(tuple1)
##
##for i in range(len(tuple1)):
## print(tuple1[i])
##
##infile = open("schools.txt","r")
##
##
##
##colleges =[]
##
##
##for school in infile:
## colleges.append(school.strip())
## print ("Schools: ", school.strip())
##
##
##colleges.sort()
##
##sorted_colleges = sorted(colleges)
##
##print("Colleges :",colleges)
##print(sorted_colleges)
##
##
##
##=============================================
##
##stooges_data = [['curly',40,60],['larry',50,100],["moe",]
##
|
# Generated by Django 2.2.5 on 2019-11-05 12:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('VCS', '0002_thongtin'),
]
operations = [
migrations.AddField(
model_name='teach',
name='File',
field=models.ImageField(null=True, upload_to=''),
),
]
|
"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "https://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.reply(message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
|
from cmath import exp
import pytest
from puzzles.ransom_note import can_construct
@pytest.mark.parametrize(
"random_note, magazine, expected",
[
("a", "b", False),
("aa", "ab", False),
("aa", "aab", True),
],
)
def test_can_construct(random_note, magazine, expected):
assert can_construct(random_note, magazine) == expected
|
# BubbleSort Using Recurssion
def solution(array):
def BubbleSort(i, j, array):
if len(array) == 1:
return array
if array[j] > array[j + 1]:
array[j], array[j+1] = array[j+1], array[j]
if j < len(array) - 2:
j += 1
BubbleSort(i, j, array)
else:
if i < len(array):
i += 1
j = 0
BubbleSort(i, j, array)
return array
res = BubbleSort(0, 0, array)
return res
n = int(input())
array = list(map(int, input().split()))
print(solution(array))
|
from django.test import TestCase
from django.utils import timezone
from django.contrib.auth import get_user_model
from .forms import CookieForm
from .models import Cookie
# Create your tests here.
class CookieFormTest(TestCase):
def setUp(self):
user = get_user_model().objects.create_user('beezlebub')
self.cookie = Cookie.objects.create(cookie_type='samoas', seller=user, price=15, post_date=timezone.now())
def test_init(self):
CookieForm(request.POST)
def test_valid_data(self):
form = CookieForm({
'cookie_type': 'samoas',
'seller': user,
'price': 15,
'post_date': timezone.now(),
}, cookie=self.cookie)
self.assertTrue(form.is_valid())
comment = form.save()
self.assertEqual(cookie.cookie_type, 'samoas')
self.assertEqual(cookie.seller, user)
self.assertEqual(cookie.price, 15)
self.assertEqual(cookie.post_date, timezone.now())
def test_blank_data(self):
form = CookieForm({}, cookie=self.cookie)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {
'cookie_type': ['required'],
'seller': ['required'],
'price': ['required'],
'post_date': ['required'],
})
|
import math
global a, b
def check(a, b):
print("Euclidean distance from the points a and b to the origin (0, 0)")
print(math.sqrt(a * a + b * b))
check(7, 5)
check(2, 4)
check(4, 5)
check(3, 2) |
import sys
import os
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
from tensorflow.python.keras import optimizers
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.layers import Dropout, Flatten, Dense, Activation
from tensorflow.python.keras.layers import Convolution2D, MaxPooling2D
from tensorflow.python.keras import backend as K
K.clear_session()
data_train = './data/train'
data_test = './data/test'
epoca = 10
length, width = 100, 100
batchSize = 10
iterEpoca = 100
iterTrain = 200
filter1Cnn = 32
filter2Cnn = 64
filter1SizeCnn = (3, 3)
filter2SizeCnn = (2, 2)
poolSize = (2, 2)
classes = 1
lr = 0.0005
dataGenerateTrain = ImageDataGenerator(
rescale=1./255,
shear_range=0.3,
zoom_range=0.3,
horizontal_flip=True
)
dataGenerateTest = ImageDataGenerator(
rescale=1./255
)
imageTrain = dataGenerateTrain.flow_from_directory(
data_test,
target_size=(length, width),
batch_size=batchSize,
class_model='categorical'
)
imageTest = dataGenerateTest.flow_from_directory(
data_test,
target_size=(length, width),
batch_size=batchSize,
class_mode='categorical'
)
cnn = Sequential()
cnn.add(Convolution2D(filter1SizeCnn, filter1Cnn, padding='same', input_shape=(length, width, 3), activation='relu'))
cnn.add(MaxPooling2D(pool_size=poolSize))
cnn.add(Convolution2D(filter2SizeCnn, filter2Cnn, padding='same', activation='relu'))
cnn.add(MaxPooling2D(pool_size=poolSize))
cnn.add(Flatten())
cnn.add(Dense(150, activation='relu'))
cnn.add(Dropout(0.5))
ccn.add(Dense(classes, activation='softmax'))
cnn.compile(loss='categorical_crossentropy')
cnn.fit(imageTrain, steps_per_epoch=iterEpoca, epochs=epoca, validation_data=imageTest, validation_steps=iterTrain)
_dir = './model/'
if not os.path.exists(_dir):
os.mkdir(_dir)
cnn.save(_dir+'/model.h5')
cnn.save_weights(_dir+'/pesos.h5') |
# --------------------------- IMPORTS --------------------------- #
import mail_machine
import excel_machine
import word_machine
import corresponding_date
from prettytable import PrettyTable
# --------------------------- CONSTANT VARIABLES --------------------------- #
# Word:
WORD_TEMPLATE = r"C:\Users\Frederico\Desktop\Frederico Gago\Confere\Programas\mail_project\word_template\saft_mail_template.docx"
POPULATED_WORD = r"C:\Users\Frederico\Desktop\Frederico Gago\Confere\Programas\mail_project\word_template\Populated_template_V2.docx"
# Excel:
EXCEL_PATH = r"C:\Users\Frederico\Desktop\Frederico Gago\Confere\Programas\mail_project\excel_conference\Controle de Saft 2021 - Experiencia.xlsx"
SHEET = "Experiencia"
# --------------------------- DATE PAST MONTH --------------------------- #
month_year = corresponding_date.month_in_reference()
month = month_year[0]
year = month_year[1]
# --------------------------- EXCEL EXTRACTOR --------------------------- #
def excel_extractor():
excel_data = excel_machine.ExcelMachine(EXCEL_PATH, SHEET)
return excel_data.client_info
# --------------------------- EXCEL Printer --------------------------- #
def print_excel_list(company_info):
"""
Print a pretty Table with the info of the company's to send email.
:param company_info: Dict of all the company info
"""
show_clients_list = PrettyTable(["Store", "Nº", "Company", "NIF", "Mail-to"])
for company in company_info.values():
for store, info in company.items():
show_clients_list.add_row([store, info["Nº Emp."], info["EMPRESA"], info["NIF"], info["Mail - To"]])
print(show_clients_list)
# --------------------------- Save & Send --------------------------- #
def send_save_mail(data, mode):
for company in data.values():
for store, info in company.items():
company_name = info["EMPRESA"]
id_company = info["Nº Emp."]
nif_company = info["NIF"]
mail_to = info["Mail - To"]
mail_cc = info["Mail - CC"]
sent_mails_list = mail_machine.Mail.sent_mails
# Populate Word with info of the company:
word_transformation = word_machine.WordMachine(
WORD_TEMPLATE,
POPULATED_WORD,
empresa=company_name,
nif=nif_company,
id_empresa=id_company,
)
# Get info to send mail
mail_subject = word_transformation.subject_mail()
word_transformation.populate_word()
if mode == "save":
body_save = word_transformation.word_to_html()
mail = mail_machine.Mail(mail_subject, body_save, mail_to, mail_cc)
mail.save_mails()
elif mode == "send":
# Checks if already sent mail to the same email with the same company name
if id_company in [elem for sublist in sent_mails_list for elem in sublist] and \
mail_to in [elem for sublist in sent_mails_list for elem in sublist]:
continue
else:
sent_mails_list.append([id_company, mail_to])
body_send = word_transformation.message_to_mail()
mail = mail_machine.Mail(mail_subject, body_send, mail_to, mail_cc)
mail.send_mail()
# take note in excel
# Create Instance
input_excel = excel_machine.ExcelMachine(EXCEL_PATH, SHEET)
input_excel.introduce_info(company_number=int(id_company), store_number=int(store), info_saft="mail_enviado")
# --------------------------- PROGRAM FLOW --------------------------- #
menu_string = """Menu
1 - Imprimir lista de clientes com o SAFT pendente
2 - Enviar Mail's
0 - Exit
"""
companies_data = None
while True:
menu_choice = input(menu_string)
if menu_choice == "0":
exit()
elif menu_choice == "1":
companies_data = excel_extractor()
print_excel_list(companies_data)
elif menu_choice == "2" and companies_data:
send_save_mail(companies_data, "save")
check_mails = input(f"Mail have been saved to {mail_machine.ABSOLUTE_PATH_SAVE_MAILS[:84]},"
f" please check if everything ok!\nWrite 'Enviar Mail' to send mails\n"
f"To Cancel and return to menu press any other key:\n")
if check_mails.lower() == "enviar mail":
send_save_mail(companies_data, "send")
print(f"You've sent {mail_machine.Mail.count} e-mails")
else:
print("First choose option 1 to view data\n")
|
import os
import random
from datetime import datetime
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
def fix_seed(seed: int) -> None:
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
np.random.seed(seed)
random.seed(seed)
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0 / batch_size))
return res
class DistillationLoss(nn.Module):
def __init__(self, temp: float):
super(DistillationLoss, self).__init__()
self.T = temp
def forward(self, out1, out2):
loss = F.kl_div(
F.log_softmax(out1 / self.T, dim=1),
F.softmax(out2 / self.T, dim=1),
reduction="none",
)
return loss
class AverageMeter:
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class ResultWriter:
def __init__(self, directory):
self.dir = directory
self.hparams = None
self.load()
self.writer = dict()
def update(self, args, **results):
now = datetime.now()
date = "%s-%s %s:%s" % (now.month, now.day, now.hour, now.minute)
self.writer.update({"date": date})
self.writer.update(results)
self.writer.update(vars(args))
if self.hparams is None:
self.hparams = pd.DataFrame(self.writer, index=[0])
else:
self.hparams = self.hparams.append(self.writer, ignore_index=True)
self.save()
def save(self):
assert self.hparams is not None
self.hparams.to_csv(self.dir, index=False)
def load(self):
path = os.path.split(self.dir)[0]
if not os.path.exists(path):
os.makedirs(path)
self.hparams = None
elif os.path.exists(self.dir):
self.hparams = pd.read_csv(self.dir)
else:
self.hparams = None
|
'''
Created on Feb 23, 2013
@author: nino
'''
import unittest
from marx import __version__
from distutils.version import StrictVersion
class Test(unittest.TestCase):
def test_version(self):
assert StrictVersion(__version__) > StrictVersion('0.0.0')
|
from opentera.forms.TeraForm import *
from flask_babel import gettext
from opentera.db.models.TeraSessionType import TeraSessionType
class TeraSessionTypeConfigForm:
@staticmethod
def get_session_type_config_form(session_type: TeraSessionType):
# Handle session type configs for non-services session types
form = TeraForm("session_type_config")
return form.to_dict()
|
import os
import pickle
import argparse
import numpy as np
from tqdm import tqdm
# Configs
cifar10_dict = {
'list': ['data_batch_1', 'data_batch_2', 'data_batch_3', 'data_batch_4', 'data_batch_5'],
'sizes': [4, 25, 100, 400], # per-class
'num_classes': 10,
'val_size': 20 # per-class
}
def extract(base, lists):
# Original format: {'data': np.arr(n*3072, uint8),
# 'labels/fine_labels': [int, int, ...],
# ...}
# The generate file sizes are smaller because original version has redundant keys
data = []
targets = []
for file_name in lists:
file_path = os.path.join(base, file_name)
with open(file_path, 'rb') as f:
entry = pickle.load(f, encoding='latin1')
data.append(entry['data'])
if 'labels' in entry:
targets.extend(entry['labels'])
else:
targets.extend(entry['fine_labels'])
data = np.vstack(data)
targets = np.array(targets)
return data, targets
def shuffle(data, targets, sizes, num_classes, base, seed):
print('Shuffling...')
np.random.seed(seed)
final_data_labeled = [None for _ in range(len(sizes))]
final_targets_labeled = [[] for _ in range(len(sizes))]
final_data_unlabeled = [None for _ in range(len(sizes))]
final_targets_unlabeled = [[] for _ in range(len(sizes))]
for i in tqdm(range(num_classes)):
# Shuffle by class (as done by other methods)
total = data[targets == i]
np.random.shuffle(total)
# Assured superiority on training sets (i.e. larger sets include smaller sets)
for j in range(len(sizes)):
if final_data_labeled[j] is None:
final_data_labeled[j] = total[:sizes[j], :]
else:
final_data_labeled[j] = np.concatenate((final_data_labeled[j], total[:sizes[j], :]))
if final_data_unlabeled[j] is None:
final_data_unlabeled[j] = total[sizes[j]:, :]
else:
final_data_unlabeled[j] = np.concatenate((final_data_unlabeled[j], total[sizes[j]:, :]))
final_targets_labeled[j] += [i for _ in range(sizes[j])]
final_targets_unlabeled[j] += [i for _ in range(total.shape[0] - sizes[j])]
assert final_data_unlabeled[j].shape[0] == len(final_targets_unlabeled[j])
assert final_data_labeled[j].shape[0] == len(final_targets_labeled[j])
assert final_data_labeled[j].shape[0] / sizes[j] == (i + 1)
# Store
print('Saving splits...')
for i in tqdm(range(len(sizes))):
with open(os.path.join(base, str(sizes[i]) + '_seed' + str(seed) + '_labeled'), 'wb') as f:
pickle.dump({'data': final_data_labeled[i], 'labels': final_targets_labeled[i]}, f)
with open(os.path.join(base, str(sizes[i]) + '_seed' + str(seed) + '_unlabeled'), 'wb') as f:
pickle.dump({'data': final_data_unlabeled[i], 'labels': final_targets_unlabeled[i]}, f)
def train_val_split(data, targets, val_size, num_classes, base, seed):
# Split train & valtiny and store a full trainval as well
print('Saving trainval...')
assert data.shape[0] == len(targets)
with open(os.path.join(base, 'trainval_seed' + str(seed)), 'wb') as f:
pickle.dump({'data': data, 'labels': targets.tolist()}, f)
print('Shuffling...')
final_data_valtiny = None
final_data_train = None
final_targets_valtiny = []
final_targets_train = []
for i in tqdm(range(num_classes)):
# Shuffle by class (as done by other methods)
total = data[targets == i]
np.random.shuffle(total)
if final_data_train is None:
final_data_train = total[:-val_size, :]
else:
final_data_train = np.concatenate((final_data_train, total[:-val_size, :]))
if final_data_valtiny is None:
final_data_valtiny = total[-val_size:, :]
else:
final_data_valtiny = np.concatenate((final_data_valtiny, total[-val_size:, :]))
final_targets_train += [i for _ in range(total.shape[0] - val_size)]
final_targets_valtiny += [i for _ in range(val_size)]
print('Saving train & valtiny...')
assert final_data_train.shape[0] == len(final_targets_train)
assert final_data_valtiny.shape[0] == len(final_targets_valtiny)
with open(os.path.join(base, 'train_seed' + str(seed)), 'wb') as f:
pickle.dump({'data': final_data_train, 'labels': final_targets_train}, f)
with open(os.path.join(base, 'valtiny_seed' + str(seed)), 'wb') as f:
pickle.dump({'data': final_data_valtiny, 'labels': final_targets_valtiny}, f)
if __name__ == '__main__':
# Settings
parser = argparse.ArgumentParser(description='Generator')
parser.add_argument('--dataset', type=str, default='cifar10',
help='Cifar10 (default: cifar10)')
parser.add_argument('--seed', type=int, default=1,
help='Random seed (default: 1)')
parser.add_argument('--base', type=str, default='../',
help='Dataset directory (default: ../)')
parser.add_argument('--train-file', type=str, default=None,
help='File for the training set, w.o. val, None means doing train-val split (default: None)')
args = parser.parse_args()
if args.dataset == 'cifar10':
param_dict = cifar10_dict
else:
raise NotImplementedError
if args.train_file is None: # Split original data to trainval/train/valtiny
data, targets = extract(base=args.base, lists=param_dict['list'])
train_val_split(data=data, targets=targets, val_size=param_dict['val_size'],
num_classes=param_dict['num_classes'], base=args.base, seed=args.seed)
else: # Split train set to labeled & unlabeled
data, targets = extract(base=args.base, lists=[args.train_file])
shuffle(data=data, targets=targets, sizes=param_dict['sizes'], num_classes=param_dict['num_classes'],
base=args.base, seed=args.seed)
print('Splits done. Check ' + args.base)
|
#!/usr/bin/python
#coding=utf-8
import threading
from Queue import Queue
class ThreadUrl(threading.Thread):
'''
封装多线程库,用来多线程跑啊
'''
def __init__(self,queue,site):
threading.Thread.__init__(self)
self.queue = queue
self.site = site #传递的是一个class的实例或者引用
def run(self):
while True:
try:
uu = self.queue.get()
t = self.site(uu)
except:
pass
self.queue.task_done()
def mythread(urls,run,num=20):
'''
urls: url的列表
num: 结合队列,跑多线程的抓取,默认线程数是20个
run: 这个是threading之中run() 默认有一个参数,即urls中的元素
'''
queue = Queue()
for i in range(num):
t= ThreadUrl(queue,run)
t.setDaemon(True)
t.start()
for url in urls:
queue.put(url)
queue.join()
|
# Copyright 2018 Deep Topology All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# noinspection PyUnresolvedReferences
import pathmagic
import tensorflow as tf
import modules
class LstmLastHiddenModule(modules.BaseModule):
""" LSTM network that outputs the last hidden state. """
def __init__(self, lstm_size, lstm_layers, num_frames, output_dim, scope_id=None):
""" Initialize LSTM hidden module.
:param lstm_size: int
:param lstm_layers: int
:param num_frames: num_frames x 1
:param output_dim: int
:param scope_id: Object
"""
self.lstm_size = lstm_size
self.lstm_layers = lstm_layers
self.output_dim = output_dim
self.num_frames = num_frames
self.scope_id = scope_id
def forward(self, inputs, **unused_params):
""" Forward method for LstmLastHiddenModule.
:param inputs: batch_size x max_frames x num_features
:return: batch_size x output_dim
"""
stacked_lstm = tf.contrib.rnn.MultiRNNCell(
[
tf.contrib.rnn.BasicLSTMCell(
self.lstm_size, forget_bias=1.0)
for _ in range(self.lstm_layers)
])
outputs, state = tf.nn.dynamic_rnn(stacked_lstm, inputs,
sequence_length=self.num_frames,
dtype=tf.float32)
# Only output the hidden state at the end.
return state[-1].h
class LstmConcatAverageModule(modules.BaseModule):
""" LSTM layers with stores the average of previous layers. """
def __init__(self, lstm_size, num_layers, max_frame):
""" Initialize LSTM average concatenation module.
:param lstm_size: int
:param num_layers: int
:param max_frame: num_frames x 1
"""
self.lstm_size = lstm_size
self.num_layers = num_layers
self.max_frame = max_frame
def forward(self, inputs, **unused_params):
""" Forward method for LstmConcatAverageModule.
:param inputs: batch_size x max_frames x num_features
:return: batch_size x output_dim
"""
stacked_lstm = tf.contrib.rnn.MultiRNNCell(
[
tf.contrib.rnn.BasicLSTMCell(
self.lstm_size, forget_bias=1.0, state_is_tuple=False)
for _ in range(self.num_layers)
], state_is_tuple=False)
outputs, state = tf.nn.dynamic_rnn(stacked_lstm, inputs,
sequence_length=self.max_frame,
dtype=tf.float32)
context_memory = tf.nn.l2_normalize(tf.reduce_sum(outputs, axis=1), dim=1)
average_state = tf.nn.l2_normalize(tf.reduce_sum(inputs, axis=1), dim=1)
final_state = tf.concat([context_memory, state, average_state], 1)
return final_state
|
# The implementation is based on ULFD, available at
# https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB
from ..transforms import Compose, Resize, SubtractMeans, ToTensor
class PredictionTransform:
def __init__(self, size, mean=0.0, std=1.0):
self.transform = Compose([
Resize(size),
SubtractMeans(mean), lambda img, boxes=None, labels=None:
(img / std, boxes, labels),
ToTensor()
])
def __call__(self, image):
image, _, _ = self.transform(image)
return image
|
import numpy as np
import commp as cp
import itertools
import subprocess as sp
from scipy.cluster.hierarchy import linkage, fcluster
from scipy.spatial.distance import squareform
# cmd call tmalign (tmscore.sh)
# input two gene names (in string)
def _tmscore(param):
return sp.Popen(['tmscore.sh', param[0], param[1]], stdout=sp.PIPE).communicate()[0]
# test _tmscore function
# input two gene names
def tmscore(args):
assert len(args) == 2, 'Usage: python proc_dd575.py tmscore AG6000091 AG6000243'
print(_tmscore(args))
# pair all the genes
def pairgenes(args):
assert len(args) == 2, 'Usage: python proc_dd575.py pairgenes full.list out.pair.list'
genes = cp.loadlines(args[0])
fout = open(args[1], 'w')
for i in range(len(genes)):
for j in range(i+1, len(genes)):
fout.write('%s %s\n' % (genes[i], genes[j]))
cp._info('save paired genes to %s' % args[1])
# print total number of genes in a flat cluster file
def printtotal(args):
assert len(args) == 1, 'Usage: python proc_dd575.py totalgenes flatclusterfile'
flatfile = args[0]
glist = []
for c in cp.loadtuples(flatfile):
for g in c[7].split(','):
glist.append(g)
cp._info('total genes in %s : %d / %d' % (flatfile, len(glist), len(set(glist))))
#print ('%s\n' % '\n'.join(glist))
# output list of genes in a flat cluster file
def flat2vec(args):
assert len(args) == 2, 'Usage: python proc_dd575.py flat2vec clusterfile outfile'
flatfile = args[0]
outfile =args[1]
glist = []
for c in cp.loadtuples(flatfile):
for g in c[7].split(','):
glist.append(g)
with open(outfile, 'w') as fout:
fout.write('%s\n' % '\n'.join(glist))
cp._info('save %d genes to %s' % (len(glist), outfile))
# after cutoff expansion
# get rest single genes that cannot form clusters
def findsingle(args):
assert len(args) == 3, 'Usage: python proc_dd575.py findsingle 12100.f.out total.clusters out.single.list'
reffile =args[0]
clusterfile = args[1]
outfile = args[2]
rlist = set()
for r in cp.loadtuples(reffile): # cluster information using 12100 as cutoff
for g in r[7].split(','):
rlist.add(g)
cp._info('%d genes found in reference clusters' % len(rlist))
clist = set()
for c in cp.loadtuples(clusterfile):
for g in c[7].split(','):
clist.add(g)
cp._info('%d genes found in selected clusters' % len(clist))
outlist = rlist - clist
with open(outfile, 'w') as fout:
fout.write('%s\n' %('\n'.join(outlist)))
cp._info('save %d singlets to %s' % (len(outlist),outfile))
# output cluster information {id, number of members, tm.mean, tm.sd, tm.min, tm.max, member_list}
# input: the output from function clustering: cluster_id, member_id, sorted by cluster_id from func clustering()
# input: pairwise dist/similarity file, in this case tmalign scores
def flatcluster(args):
assert len(args) == 4, 'Usage: python proc_dd575.py flatcluster clustering.out tmscore.list filterlist outfile'
clusterfile = args[0]
scorefile = args[1]
filterfile = args[2] # selected clusters with high tmscores
outfile = args[3]
# load dist/similarity file into a dictionary
# AG6000091 AG6000243 0.36034
# AG6000091 AG6000244 0.40890
#print(cp.loadtuples(dfile))
sdict = dict(('%s %s' % (s[0], s[1]), float(s[2])) for s in cp.loadtuples(scorefile))
# print(take(10, dinfodict.iteritems())) # python 3.6
# load clustering information
# 431 AG6000091
# 521 AG6000243
full_cinfo = cp.loadtuples(clusterfile)
# load filered clusters
# 15500 7 2 0.6328 0.6328 0.6328 0.0000 AG6000799,AG6032177
filterlist = []
for s in cp.loadtuples(filterfile):
for g in s[7].split(','):
filterlist.append(g)
# filter out selected genes
cinfo = [c for c in full_cinfo if c[1] not in filterlist]
cp._info('full: %d - select: %d : current: %d' % (len(full_cinfo), len(filterlist), len(cinfo)))
# ('224', ['AG6006935', 'AG6007576', 'AG6010406', 'AG6033388']) ('xxx', [xxx])
cdict = dict((k, [i[1] for i in list(g)]) for k, g in itertools.groupby(cinfo, lambda x: x[0]))
# iterate all clusters to map pairwise scores (tm)
# for each cluster ID
fout = open(outfile, 'w')
for c in cdict:
genes = cdict[c]
#print(c, cdict[c])
# single member cluster
if len(genes) < 2:
outstr = '%s %d 0.0 0.0 0.0 0.0 %s' % (c, len(genes), ','.join(genes))
#print(outstr)
fout.write('%s\n' % outstr)
continue
# multi-member cluster
scores = []
for i in range(len(genes)):
for j in range(i+1, len(genes)):
k1 = '%s %s' % (genes[i], genes[j])
k2 = '%s %s' % (genes[j], genes[i])
score = sdict[k1] if k1 in sdict else sdict[k2]
scores.append(score)
ns = np.array(scores)
#print(ns)
# cluster_id, cluster_len, tm.mean, tm.min, tm.max, tm.std
outstr = '%s %d %.4f %.4f %.4f %.4f %s' % (c, len(genes), ns.mean(), ns.min(), ns.max(), ns.std(), ','.join(genes))
fout.write('%s\n' % outstr)
#print(outstr)
# output pymol scripts
pml = []
pml.append('delete all')
for n in genes:
pml.append('load %s.pdb' % n)
for n in genes[1:]:
pml.append('cealign %s, %s\ncenter' % (genes[0], n))
with open('%s' % c, 'w') as fml:
fml.write('%s\n' % '\n'.join(pml))
fout.close()
cp._info('save %d flatclusters to %s' % (len(cdict), outfile))
cp._info('cluster_id, cluster_len, tm.mean, tm.min, tm.max, tm.std')
# compare two clusters generated using different cutoffs
# input: cluster.12100.out cluster.13000.out (must be sorted by the clusters ID first)
# cluster.xxx.out format:
# 4 AG6000735
# 23 AG6000741
# 23 AG6000767
def cluster_comp(args):
assert len(args) ==3, 'Usage: python proc_dd575.py cluster_comp cluster.12100.out cluster.13000.out'
oldfile = args[0]
newfile = args[1]
outfile = args[2]
c_old = cp.loadtuples(oldfile)
c_new = cp.loadtuples(newfile)
# ('224', ['AG6006935', 'AG6007576', 'AG6010406', 'AG6033388']) ('xxx', [xxx])
c_old_dict = dict((k, [i[1] for i in list(g)]) for k, g in itertools.groupby(c_old, lambda x: x[0]))
c_new_dict = dict((k, [i[1] for i in list(g)]) for k, g in itertools.groupby(c_new, lambda x: x[0]))
if len(c_new_dict) > len(c_old_dict):
cp._err('args order error: old:%s new:%s' % (oldfile, newfile))
fout = open(outfile, 'w')
for p in c_old_dict:
oldc = set(c_old_dict[p])
for q in c_new_dict:
newc = set(c_new_dict[q])
if len(newc)>3:
c_inter = oldc.intersection(newc)
diff = newc - oldc
if len(c_inter)!=0 and len(diff)!=0:
if len(oldc)<4:
fout.write('oldc: %s n: %d newc: %s n: %d diff: %s\n' % (p, len(oldc), q, len(newc), ','.join([g for g in newc])))
else:
fout.write('oldc: %s n: %d newc: %s n: %d diff: %s\n' % (p, len(oldc), q, len(newc), ','.join([g for g in diff])))
fout.close()
cp._info('save comparison result to %s' % outfile)
# input: distance mat & names
# output sorted cluster information:
# 4 AG6000735
# 23 AG6000741
# 23 AG6000767
# python proc_dd575.py clustering 575.s.pairwise.pkl.txt 575.names.pkl.txt 7200 out
def clustering(args):
assert(len(args)==4), 'Usage: python proc_dd575.py clustering mat.txt name.txt cutoff outfile'
matfile = args[0]
namefile = args[1]
cutoff = float(args[2])
outfile = args[3]
mat = np.loadtxt(matfile, delimiter=' ')
#print(mat.min(), mat.max())
names = cp.loadlines(namefile)
dists = squareform(mat)
z = linkage(dists, 'complete')
clusters = fcluster(z, t=cutoff, criterion='distance')
c_sort = sorted([(clusters[i], names[i]) for i in range(len(clusters))], key = lambda x: x[0])
outstr = '\n'.join(['%d %s' % (c[0], c[1]) for c in c_sort])
#outstr = '\n'.join(['%d %s' % (clusters[i], names[i]) for i in range(len(clusters))])
with open(outfile, 'w') as fout:
fout.write('%s\n' % outstr)
#print(ret)
cp._info('save %d %f clusters info to %s' % (len(set(clusters)), cutoff, outfile))
# add cluster flags for coloring in dendrogram
# input: cat r0.cluster.txt r1.cluster.txt r2.cluster.txt r3.single.txt > combined.rcluster.txt
# output: genes, label
# output in .stub order to match the matrix index
def rlabelflat(args):
assert len(args)==3, 'Usage: python proc_dd575.py rlabelflat combined.rx.cluster.txt out.stub outfile'
infile = args[0]
stubfile = args[1]
outfile = args[2]
#cidset = set()
glist = []
for t in cp.loadtuples(infile):
if len(t) == 9 and t[8] in ['1','2']: # clusters r0,r1
cid='%s.%s' % (t[0],t[1])
#cidset.add(cid)
for g in t[7].split(','):
glist.append((cid, g))
elif len(t) > 1: # r2
for g in t[7].split(','):
glist.append(('single', g))
else: # singlets
#cidset.add('single')
g = t[0]
glist.append(('single', g))
#colordict = dict((cidset.pop(),i) for i in range(len(cidset)))
#print(colordict)
#outlist = ['%s %s %s' % (g[0], g[1], colordict[g[0]]) for g in glist]
stub = cp.loadlines(stubfile)
# outdict[xlabel]= color_flag
outdict = dict((g[1], g[0]) for g in glist)
#print(outlist)
outlist = ['%s %s' % (g, outdict[g]) for g in stub]
with open(outfile, 'w') as fout:
fout.write('%s\n' % '\n'.join(outlist))
cp._info('save labeled flat file: %s' % outfile)
# similar to rlabelflat
# for binary color labeling
def rlabelflat2(args):
assert len(args)==3, 'Usage: python proc_dd575.py rlabelflat combined.rx.cluster.txt out.stub outfile'
infile = args[0]
stubfile = args[1]
outfile = args[2]
#cidset = set()
glist = []
for t in cp.loadtuples(infile):
if len(t) == 9 and t[8] in ['1','2']: # clusters r0,r1
for g in t[7].split(','):
glist.append((t[8], g))
elif len(t) > 1: # r2
for g in t[7].split(','):
glist.append(('0', g))
else: # singlets r3
#cidset.add('single')
g = t[0]
glist.append(('0', g))
stub = cp.loadlines(stubfile)
# outdict[xlabel]= color_flag
outdict = dict((g[1], g[0]) for g in glist)
#print(outlist)
outlist = ['%s %s' % (g, outdict[g]) for g in stub]
with open(outfile, 'w') as fout:
fout.write('%s\n' % '\n'.join(outlist))
cp._info('save binary labeled flat file: %s' % outfile)
if __name__=='__main__':
cp.dispatch(__name__) |
from django.contrib import admin
from django.contrib.admin import ModelAdmin
from django.contrib.auth.models import User
from django.db import models
from django.views.generic import TemplateView
from whwn.models import ItemCategory, Item, UserProfile, Message
from adminplus import AdminSitePlus
class ItemAdmin(admin.ModelAdmin):
list_display = ('id', 'sku', 'possessor')
def queryset(self, request):
qs = self.model._default_manager.all_with_deleted()
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
class UserProfileInline(admin.StackedInline):
model = UserProfile
fk_name = 'user'
class UserProfileInline(admin.StackedInline):
model = UserProfile
fk_name = 'user'
class UserAdmin(admin.ModelAdmin):
list_display = ('username', 'email')
list_select_related = True
inlines = [UserProfileInline]
class UserAdmin(admin.ModelAdmin):
list_display = ('username', 'email')
list_select_related = True
inlines = [UserProfileInline]
admin.site = AdminSitePlus()
admin.site.register(Message, ModelAdmin)
admin.site.register(ItemCategory, ModelAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(User, UserAdmin)
class FaqAdminView(TemplateView):
template_name = "admin/question_input.html"
def get_context_data(self, **kwargs):
return {}
admin.site.register_view("faq", FaqAdminView.as_view())
|
import click
import time
import gym
import os
import numpy as np
import gym_goal
from agents.qpamdp import QPAMDPAgent
from agents.sarsa_lambda import SarsaLambdaAgent
from common.wrappers import ScaledStateWrapper, QPAMDPScaledParameterisedActionWrapper
from gym_goal.envs.config import GOAL_WIDTH, PITCH_WIDTH, PITCH_LENGTH
from gym.wrappers import Monitor
from common.goal_domain import CustomFourierBasis, GoalObservationWrapper
variances = [0.01, 0.01, 0.01]
xfear = 50.0 / PITCH_LENGTH
yfear = 50.0 / PITCH_WIDTH
caution = 5.0 / PITCH_WIDTH
kickto_weights = np.array([[2.5, 1, 0, xfear, 0], [0, 0, 1 - caution, 0, yfear]])
initial_parameter_weights = [
kickto_weights,
np.array([[GOAL_WIDTH / 2 - 1, 0]]),
np.array([[-GOAL_WIDTH / 2 + 1, 0]])
]
def evaluate(env, agent, episodes=1000):
returns = []
timesteps = []
for _ in range(episodes):
state, _ = env.reset()
terminal = False
t = 0
total_reward = 0.
while not terminal:
t += 1
state = np.array(state, dtype=np.float32, copy=False)
action = agent.act(state)
(state, _), reward, terminal, _ = env.step(action)
total_reward += reward
timesteps.append(t)
returns.append(total_reward)
return np.array(returns)
@click.command()
@click.option('--seed', default=7, help='Random seed.', type=int)
@click.option('--episodes', default=20000, help='Number of epsiodes.', type=int)
@click.option('--evaluation-episodes', default=100, help='Episodes over which to evaluate after training.', type=int)
@click.option('--scale', default=False, help='Scale inputs and actions.', type=bool) # default 50, 25 best
@click.option('--initialise-params', default=True, help='Initialise action parameters.', type=bool)
@click.option('--save-dir', default="results/goal", help='Output directory.', type=str)
@click.option('--title', default="QPAMDP", help="Prefix of output files", type=str)
def run(seed, episodes, evaluation_episodes, scale, initialise_params, save_dir, title):
alpha_param = 0.1
env = gym.make('Goal-v0')
env = GoalObservationWrapper(env)
if scale:
variances[0] = 0.0001
variances[1] = 0.0001
variances[2] = 0.0001
alpha_param = 0.06
initial_parameter_weights[0] = np.array([[-0.375, 0.5, 0, 0.0625, 0],
[0, 0, 0.8333333333333333333, 0, 0.111111111111111111111111]])
initial_parameter_weights[1] = np.array([0.857346647646219686, 0])
initial_parameter_weights[2] = np.array([-0.857346647646219686, 0])
env = ScaledStateWrapper(env)
env = QPAMDPScaledParameterisedActionWrapper(env)
dir = os.path.join(save_dir, title)
env = Monitor(env, directory=os.path.join(dir, str(seed)), video_callable=False, write_upon_reset=False, force=True)
env.seed(seed)
np.random.seed(seed)
action_obs_index = np.arange(14)
param_obs_index = np.array([
np.array([10, 11, 14, 15]), # ball_features
np.array([16]), # keeper_features
np.array([16]), # keeper_features
])
basis = CustomFourierBasis(14, env.observation_space.spaces[0].low[:14], env.observation_space.spaces[0].high[:14])
discrete_agent = SarsaLambdaAgent(env.observation_space.spaces[0], env.action_space.spaces[0], basis=basis, seed=seed, alpha=0.01,
lmbda=0.1, gamma=0.9, temperature=1.0, cooling=1.0, scale_alpha=False,
use_softmax=True,
observation_index=action_obs_index, gamma_step_adjust=False)
agent = QPAMDPAgent(env.observation_space.spaces[0], env.action_space, alpha=alpha_param, initial_action_learning_episodes=4000,
seed=seed, action_obs_index=action_obs_index, parameter_obs_index=param_obs_index,
variances=variances, discrete_agent=discrete_agent, action_relearn_episodes=2000,
parameter_updates=1000, parameter_rollouts=50, norm_grad=True, print_freq=100,
phi0_func=lambda state: np.array([1, state[1], state[1]**2]),
phi0_size=3)
# Alternating learning periods from original paper:
# QPAMDP(1) : init(2000), parameter_updates(50), relearn(50)
# QPAMDP(infinity) : init(2000), parameter_updates(1000), relearn(2000)
# needed to increase initial action learning episodes to 4000
if initialise_params:
for a in range(3):
agent.parameter_weights[a] = initial_parameter_weights[a]
max_steps = 150
start_time = time.time()
agent.learn(env, episodes, max_steps)
end_time = time.time()
agent.plot_reward()
agent.plot_p()
print("Training took %.2f seconds" % (end_time - start_time))
env.close()
returns = np.array(env.get_episode_rewards())
print("Saving training results to:",os.path.join(dir, "QPAMDP{}".format(str(seed))))
np.save(os.path.join(dir, title + "{}".format(str(seed))), returns)
print("Ave. return =", sum(returns) / len(returns))
print("Ave. last 100 episode return =", sum(returns[-100:]) / 100.)
print('Total P(S):{0:.4f}'.format((returns == 50.).sum() / len(returns)))
print('Ave. last 100 episode P(S):{0:.4f}'.format((returns[-100:] == 50.).sum() / 100.))
if evaluation_episodes > 0:
print("Evaluating agent over {} episodes".format(evaluation_episodes))
agent.variances = 0
agent.discrete_agent.epsilon = 0.
agent.discrete_agent.temperature = 0.
evaluation_returns = evaluate(env, agent, evaluation_episodes)
print("Ave. evaluation return =", sum(evaluation_returns) / len(evaluation_returns))
print("Ave. evaluation prob. =", sum(evaluation_returns == 50.) / len(evaluation_returns))
np.save(os.path.join(dir, title + "{}e".format(str(seed))), evaluation_returns)
if __name__ == '__main__':
run()
|
# Generated by Django 3.1.6 on 2021-02-14 15:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('resume', '0003_post_date'),
]
operations = [
migrations.CreateModel(
name='Form',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('your_name', models.CharField(max_length=30)),
('your_email', models.EmailField(max_length=254)),
('your_subject', models.CharField(max_length=30)),
('your_message', models.TextField()),
],
),
migrations.AlterField(
model_name='resume',
name='email',
field=models.EmailField(max_length=254),
),
]
|
from __future__ import with_statement, division
import subprocess, sys
from memoize import memoize
GOOD = True
BAD = False
INVALID = None
# http://python3porting.com/problems.html
if sys.version_info < (3,):
def b(x):
return x
def s(x):
return x
else:
def b(x):
return x.encode()
def s(x):
return x.decode()
def binary_search_on_string(f, arg):
start = 0
mid = 0
end = len(arg)
while mid < end and f(arg[start:end]) is not GOOD:
new_end = (mid + end) // 2
if new_end == end:
new_end -= 1
if new_end <= mid:
end = mid
else:
ret = f(arg[start:new_end])
if ret is GOOD:
mid = new_end
elif ret is BAD:
end = new_end
else:
orig_new_end = new_end
while ret is INVALID and new_end < end:
new_end += 1
ret = f(arg[start:new_end])
if mid < new_end and ret is GOOD:
mid = new_end
elif new_end < end and ret is BAD:
end = new_end
else:
new_end = orig_new_end
while ret is INVALID and mid < new_end:
new_end -= 1
ret = f(arg[start:new_end])
if mid == new_end:
end = new_end
elif ret is GOOD:
mid = new_end
elif ret is BAD:
end = new_end
else:
sys.stderr.write("This should be impossible\n")
end = mid
break
while end + 1 < len(arg) and f(arg[start:end + 1]) == GOOD:
sys.stderr.write("This should be impossible (2)\n")
end += 1
orig_end = end
while end + 1 < len(arg):
end += 1
if f(arg[start:end]) == GOOD:
orig_end = end
return arg[start:orig_end]
@memoize
def check_grep_for(in_str, search_for):
# print("echo %s | grep -q %s" % (repr(in_str), repr(search_for)))
p = subprocess.Popen(["timeout", "0.5s", "grep", search_for], universal_newlines=False, stderr=subprocess.PIPE, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
# print(search_for)
(stdout, stderr) = p.communicate(input=b(in_str))
p.stdin.close()
p.wait()
if stderr != b('') or p.returncode == 124: # timeout
return INVALID
return (GOOD if p.returncode == 0 else BAD)
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.stderr.write("USAGE: %s SEARCH_IN SEARCH_FOR\n" % sys.argv[0])
sys.exit(1)
def check_grep(search_for):
return check_grep_for(sys.argv[1], search_for)
search_for = binary_search_on_string(check_grep, sys.argv[2])
p = subprocess.Popen(["grep", "--color=auto", search_for], universal_newlines=False, stdin=subprocess.PIPE)
p.communicate(input=b(sys.argv[1]))
p.stdin.close()
p.wait()
if len(search_for) < len(sys.argv[2]):
print("Mismatch: good prefix:")
p = subprocess.Popen(["grep", "-o", "--color=auto", search_for], universal_newlines=False, stdin=subprocess.PIPE)
p.communicate(input=b(sys.argv[1]))
p.stdin.close()
p.wait()
p = subprocess.Popen(["grep", "-o", '^.*' + search_for], universal_newlines=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate(input=b(sys.argv[1]))
p.stdin.close()
p.wait()
print("Mismatch: good prefix search:\n%s" % search_for)
print("Mismatch: expected next characters at %d: %s" % (len(search_for), repr(sys.argv[2][len(search_for):][:10])))
print("Mismatch: actual next characters at %d: %s" % (len(stdout)-1, repr(sys.argv[1][len(stdout)-1:][:10])))
#sys.exit(p.errorcode)
|
# Sort a linked list in O(n log n) time using constant space complexity.
# Merge sort
# https://www.geeksforgeeks.org/merge-sort-for-linked-list/
class Node(object):
def __init__(self, data):
self.data = data
self.next = None
def sort(head):
if not head or not head.next:
return
first_half, second_half = split_list_by_half(head)
sort(first_half)
sort(second_half)
temp = merge(first_half, second_half)
head = temp
def split_list_by_half(head):
slow = head
fast = head.next
while fast:
fast = fast.next
if fast:
slow = slow.next
fast = fast.next
first_half = head
second_half = slow.next
slow.next = None
return (first_half, second_half)
def merge(a,b):
temp = None
if a is None:
return b
if b is None:
return a
if a.data <= b.data:
temp = a
temp.next = merge(a.next, b)
else:
temp = b
temp.next = merge(a, b.next)
return temp
def print_list(head):
to_print = []
while head:
to_print.append(str(head.data))
head = head.next
print '->'.join(to_print)
head = Node(6)
it = head
it.next = Node(22)
it = it.next
it.next = Node(33)
it = it.next
it.next = Node(4)
it = it.next
it.next = Node(35)
it = it.next
it.next = Node(61)
it = it.next
it.next = Node(7)
it = it.next
print_list(head)
sort(head)
print_list(head)
|
#!/usr/bin/python3
# Data reading, cleaning, and processing
import pandas as pd
import csv
import os
import re
# Database access
import sqlite3
# Get file path
cwd = os.getcwd()
file_path = cwd + r'/data/stem_center_sign_in'
# Takes in the name of a file, and returns the pandas dataframe containing that file's data
def read_file_as_df (file_name):
# Get semester, which should be in file at coordinates 2C, or 23. But the csv reader reads this as 12 (because python counts from 0)
semester = ''
# Get the number of rows to read (until the sign-in entries are done, and the csv file is just showing other statistics)
numRows = 7 # The first rows that is not blank in the leftmost column is row 7
# The number of rows to skip before reading the data
numToSkip = 6
with open(file_name, 'r') as f:
mycsv = csv.reader(f)
mycsv = list(mycsv)
semester = mycsv[1][2]
# get the number of rows to read by counting the amount of places until a blank row
numRows = 7
while (mycsv[numRows] != []):
numRows += 1
# Now semester will be something like 'STEM Center Consulting-AOA 130-Fall 2016'
# So we want to split on the '-' character, and then get the last substring 'Fall 2016'
semester = re.split(re.compile('-+'), semester)[-1]
semester = semester.strip() # Gets rid of leading and trailing whitespace. Just a precaution.
# Open csv, and read into pandas dataframe
# pandas read_csv applies the 'nrows' operation to calculate the last row to read, before applying the 'skiprows'
# operation to skip them. So the 'nrows' still includes all rows that are skipped. So we have to take these out to
# ensure not to read extra rows after. Also subtract 1, to exclude the 1st row, the column names.
numRows = numRows - numToSkip - 1
file = pd.read_csv(file_name, skiprows=numToSkip, nrows=numRows)
df = pd.DataFrame(file)
# Print file info:
print ('semester is: ' + semester + ', columns are: ' + ', '.join(df))
# Get rid of entries after the last one (we do not care about other data under the main sign in)
# https://stackoverflow.com/questions/41320406/drop-all-rows-after-first-occurance-of-nan-in-specific-column-pandas
# Add a column for the semester
df['Semester'] = semester
# Rename column names to not include special characters like whitespace or \. This is to make it easier to work with the SQLite database
#newNames = {df.columns[i].replace(' ', '_').replace('/', '_') for i in range(len(df.columns))}
column_name_mapping = {df.columns[i] : df.columns[i].replace(' ', '_').replace('/', '_') for i in range(len(df.columns))} # create a dictionary mapping old column names to new column names
df = df.rename(index=str, columns=column_name_mapping) # Rename old column names to new column names (same name, but all special characters are replaced with "_")
# The rename function made both the rows and columns accessed by string (ie file_data.loc['0']['Transaction_ID']), but we can always use the iloc function to access by index now (ie file_data.iloc[0][0])
# Drop NaN values, and replace them with the empty string
df = df.fillna('')
#print ('All data:')
#print(df.to_string())
#print("Data's head is: ")
#print(df.head)
# TODO: Fix bug where "Semester" field is set to "STEM Center Tutoring" for certain semesters from 2014-03-20 16:15:25 to 2015-03-18 15:16:43 and test for this using this query:
'''
SELECT Semester, COUNT(*), COUNT(DISTINCT Transaction_ID), MIN(Transaction_Date_Time) AS Start, MAX(Transaction_Date_Time) AS End
FROM signins
GROUP BY Semester
ORDER BY Start;
'''
# TODO: Save as excel file to processed data folder
# TODO: Delete index column, so that it will not be inserted into database later
return df
# ***** Create database *****SELECT Semester, COUNT(*), MIN(Transaction_Date_Time) AS Start, MAX(Transaction_Date_Time) AS End, COUNT(DISTINCT Transaction_ID) FROM signins GROUP BY Semester ORDER BY Start;
# Delete the old database, if it exists
#if (os.path.exists('STEM_Center.db')):
# os.remove('STEM_Center.db')
# Read the database specification:
#rf = open('STEM_Center_Sign_In_Tables.sql', 'r') # Read database, if it already exists
rf = open('STEM_Center_Sign_In_Tables.sql') # Create a new database
db_spec = rf.read()
rf.close()
# Connect to database
conn = sqlite3.connect("STEM_Center.db")
# Make the database file and tables, using the specification
#conn.executescript(db_spec)
# For debugging (with only 1 file in list)
#file_list = ['STEM Center Consulti_20180921_1326_3419.csv']
# ***** Add all STEM Center signins to database *****
for file_name in os.listdir(file_path): #file_list:
# ensure it is a CSV file
if (not(file_name.endswith(".csv"))):
continue # Skip over files that are not csv files
# Add file path to file name to get the path to read file from. Then read it as a pandas dataframe
file_path_and_name = file_path + "/" + file_name
print("Reading file: \"" + file_path_and_name + "\" into database\n\n\n")
file_data = read_file_as_df(file_path_and_name)
# Print (for debugging)
#print(file_data.head()) #print top portion of data
#print(file_data.to_string()) # print all data
# Insert dataframe into a database
file_data.to_sql('signins', con=conn, if_exists='append')
'''
for i in range(len(file_data)):
rowVals = tuple(file_data.ix[i])
conn.execute("INSERT INTO signins VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);", rowVals)
'''
'''
for i in range(len(file_data)):
conn.execute( "INSERT INTO signin
+ "(Transaction_ID, Passed_Denied, First_Name, Last_Name, Email, CatCard_ID, Active) VALUES"
+ "();"
'''
# Test by printing all data from database table signins
'''
print ("\n\n\nQUERYING DATABASE\n\n\n")
query = conn.execute("SELECT * FROM signins;")
for row in query:
print(row)
'''
# TODO: Read Bio 1 data into database
conn.close()
#TODO: For some reason, there is a space in front of all semester names. But for some Fall 2016 entries, there is not. So there are 2 fall 2016 semesters: 'Fall 2016' and ' Fall 2016'. Must take this away.
# This is because of the files
# "STEM Center Consulti_20180921_1326_3419.csv" and
# "STEM Center Tutoring_20180921_1327_3084.csv" have different entries in cell 2C.
# The former has entry
# "STEM Center Consulting-AOA 130-Fall 2016" and the latter has entry
# "STEM Center Tutoring - Fall 2016" so the stripper will get everything after the last "-"
# Then leading and trailing whitespace is removed with the strip() function.
# So this used to cause 2 semesters for Fall 2016
|
from django.dispatch import receiver
from django.db.models.signals import post_save
from lawyer.models import UserPermission, Role, ContentType
from django.contrib.auth.models import Permission, Group
@receiver(post_save, sender=UserPermission)
def auto_create_Permission(sender, instance, created, **kwargs):
if created:
Permission.objects.get_or_create(
name = instance.name,
content_type = ContentType.objects.get_for_model(UserPermission),
codename = instance.name
)
@receiver(post_save, sender=Role)
def auto_create_Group(sender, instance, created, **kwargs):
import pdb;
pdb.set_trace();
if created:
Group.objects.get_or_create(
name = f'{instance.value} {str(instance.id)}'
) |
_no_value = object()
def spam(a, b=_no_value):
if b is _no_value:
print('No b value supplied')
return b
spam(1)
# No b value supplied
spam(1, 2)
spam(1, None)
# 默认值是变量的话,只生效一次
x = 42
def spam(a,b=x):
print(a, b)
spam(1)
# 1 42
x = 43
spam(1)
# 1 42
# 默认值只传None, True,False, numbers, string这些,不要放[]这些
# 因为[]的值会被改
# 判断是否是None 用 is None来判断!!
|
"""block chain and smart contract helpers"""
import logging
import time
from binascii import hexlify, unhexlify
import eth_utils
import eth_abi
from web3 import Web3
import queue
from concurrent.futures import ThreadPoolExecutor
# const
CHAIN_FUNC_CALL_TIMEOUT_IN_S = 120
CHAIN_EVENT_GET_FAILED_WAIT_TIME_IN_S = 5
CHAIN_MAX_TASK_NUM = 30
CHAIN_MAX_FUNC_CALL_NUM = 20
CHAIN_TASK_NAME_PREFIX = 'chain_task'
CONTRACT_MAX_TASK_NUM = 30
CONTRACT_TASK_NAME_PREFIX = 'contract_task'
QUEUE_EXIT_SIGNAL = object()
class ThreadManager:
"""
ThreadManager control thread safe exit and max thread number
"""
def __init__(self, _queue: queue.Queue = None, max_workers=CHAIN_MAX_TASK_NUM,
thread_name_prefix=CHAIN_TASK_NAME_PREFIX):
self._pool = ThreadPoolExecutor(max_workers, thread_name_prefix=thread_name_prefix)
self._shutdown = False
self._queue = _queue
def submit(self, func, *args, **kwargs):
"""
create a new thread to run `func`
:param func:
:param args:
:param kwargs:
:return:
"""
self._pool.submit(func, *args, **kwargs)
def shutdown(self, wait=False):
"""
shutdown notify thread quit
:param wait: Whether to wait to release all resources
:return:
"""
if self._queue is not None:
self._queue.put(QUEUE_EXIT_SIGNAL)
self._shutdown = True
self._pool.shutdown(wait)
def has_shutdown(self):
"""
this is a judge condition for `while not` instead of `while True`
ensure thread could exit when has_shutdown() is True
:return:
"""
return self._shutdown
class W3Helper:
"""Web3 helper"""
def __init__(self, endpoint: str):
"""
:param endpoint: example: 'http://localhost:8545' or 'ws://localhost:8546'
"""
# __nonce maintain correct number of transactions,
# avoid transaction of same nonce in pending state could not execution.
self.__nonce = {}
if endpoint.startswith("http://") or endpoint.startswith("https://"):
self.web3 = Web3(Web3.HTTPProvider(endpoint))
elif endpoint.startswith("ws://") or endpoint.startswith("wss://"):
self.web3 = Web3(Web3.WebsocketProvider(endpoint))
else:
raise Exception("Unsupported protocol")
def __getattr__(self, name):
"""
:param name:
:return:
"""
if hasattr(self.web3, name):
return getattr(self.web3, name)
# eth api in web3 is the most commonly used
eth = getattr(self.web3, 'eth')
if hasattr(eth, name):
return getattr(eth, name)
raise AttributeError
def call_api(self, method, *args):
"""
:param method:
:param args:
:return:
"""
return self.web3.manager.request_blocking(method, list(args))
def eth_call(self, transaction: dict):
"""
:param transaction:
:return:
"""
return self.eth.call(transaction)
def sign_transaction(self, src_private_key: str, transaction: dict):
"""
sign transaction with src_private_key
:param src_private_key:
:param transaction:
:return:
"""
return self.eth.account.signTransaction(transaction, src_private_key)
def send_raw_transaction(self, raw_transaction):
"""
:param raw_transaction:
:return:
"""
return self.eth.sendRawTransaction(raw_transaction)
def execute_transaction(self, src_private_key: str, transaction: dict):
"""
sign transaction and send it's raw transaction
:param src_private_key:
:param transaction:
:return:
"""
signed_txn = self.sign_transaction(src_private_key, transaction)
return self.send_raw_transaction(signed_txn.rawTransaction)
def execute_and_wait_for_transaction(self, src_private_key: str, transaction: dict,
timeout=CHAIN_FUNC_CALL_TIMEOUT_IN_S):
"""
:param src_private_key:
:param transaction:
:param timeout:
:return:
"""
tx_hash = self.execute_transaction(src_private_key, transaction)
return self.wait_receipt_for_transaction(tx_hash, timeout)
def wait_receipt_for_transaction(self, tx_hash: str, timeout=CHAIN_FUNC_CALL_TIMEOUT_IN_S):
"""
:param tx_hash:
:param timeout:
:return:
"""
receipt = self.eth.waitForTransactionReceipt(tx_hash, timeout)
if receipt is None:
raise TimeoutError("Transaction %r timed out" % hexlify(tx_hash))
while not receipt['blockNumber'] and timeout > 0:
time.sleep(2)
timeout -= 1
receipt = self.eth.waitForTransactionReceipt(tx_hash, timeout)
return receipt
def get_nonce_for_next_transaction(self, acc, block_identifier='latest'):
"""
:param acc: account
:param block_identifier:
:return:
"""
if hasattr(self.__nonce, acc) is False:
self.__nonce[acc] = self.eth.getTransactionCount(acc, block_identifier) - 1
self.__nonce[acc] += 1
return self.__nonce[acc]
# return self.eth.getTransactionCount(acc, block_identifier)
class SmartContractFuncCall:
"""Smart Contract Function Call"""
def __init__(self, w3h: W3Helper, contract_addr: str, attr: dict):
"""
initialize necessary environment for smart contract function call
:param w3h:
:param contract_addr:
:param attr:
"""
self.log = logging.getLogger(SmartContractFuncCall.__name__)
self.w3h = w3h
self.contract_addr = contract_addr
self.ret_type = None
self.need_data = False
self.events = []
if isinstance(attr, tuple):
self.signature = attr[0]
self.need_data = attr[1]
for event in attr[2]:
self.events.append(SmartContractFuncCall.parse_event_signature(event))
else:
self.signature = attr
idx = self.signature.find(")")
if idx > 0:
self.ret_type = self.signature[idx+1:]
self.signature = self.signature[:idx+1]
else:
raise Exception('function signature is wrong')
# pylint: disable=unsubscriptable-object
def __call__(self, *args, **kwargs):
"""
smart contract function call
perform execute_call if no return value
perform view_call if have return value
:param args:
:param kwargs:
:return:
"""
private_key = None
extra_data = None
if not self.ret_type:
private_key = args[0]
args = args[1:]
if self.need_data:
extra_data = args[-1]
args = args[:-1]
data = ''
if args:
data = SmartContractFuncCall.encode_funcall(self.signature, *args)
else:
data = SmartContractFuncCall.encode_funcall(self.signature)
if extra_data:
data = "%s%s" % (data, extra_data[2:])
if private_key:
receipt = self.execute_call(private_key, data, **kwargs)
receipt = dict(receipt)
if self.events:
receipt['events'] = SmartContractFuncCall.decode_logs(self.events, receipt['logs'])
return receipt
try:
ret_data = self.view_call(data)
ret = eth_abi.decode_abi([self.ret_type], ret_data)[0]
if self.ret_type == "string":
return ret.decode()
return ret
except Exception:
return None
def view_call(self, data):
"""
Call readonly method of smart contract
without private key and spend balance
:param data:
:return:
"""
tx = {
"value": 0,
"to": self.contract_addr,
"data": data,
"from": self.contract_addr,
"gasPrice": self.w3h.web3.eth.gasPrice
}
gas = self.w3h.web3.eth.estimateGas(tx)
tx["gas"] = gas
return self.w3h.eth_call(tx)
def execute_call(self, private_key: str, _data, max_recall_count=CHAIN_MAX_FUNC_CALL_NUM, **kwargs):
"""
to call function of smart contract with private key and wait for transaction.
calling smart contract with private key which is necessary.
private key will unlock balance of account to pay fees about calling smart contract.
:param private_key:
:param _data:
:param max_recall_count: a number of max recall this function.
:param kwargs: default timeout is 60
:return:
"""
if max_recall_count <= 0:
raise ValueError('max_recall_count is', max_recall_count)
addr = self.w3h.web3.eth.account.privateKeyToAccount(private_key).address
tx = {
"value": 0,
"to": self.contract_addr,
"data": _data,
"from": addr,
"gasPrice": self.w3h.web3.eth.gasPrice
}
gas = self.w3h.web3.eth.estimateGas(tx)
tx["gas"] = gas
tx['nonce'] = self.w3h.get_nonce_for_next_transaction(addr)
timeout = kwargs.get("timeout", 60)
#print('###############tx:', tx)
# return self.w3h.execute_and_wait_for_transaction(private_key, tx, timeout=timeout)
try:
receipt = self.w3h.execute_and_wait_for_transaction(private_key, tx, timeout=timeout)
#print('transaction was submitted:', receipt)
return receipt
except ValueError:
#print('recall', self.execute_call)
time.sleep(1)
return self.execute_call(private_key, _data, **kwargs, max_recall_count=max_recall_count-1)
@staticmethod
def encode_funcall(func_type, *args):
"""
Encode for contract method call
:param func_type:
:param args:
:return:
"""
func_type = func_type.replace(' ', '')
signature = hexlify(eth_utils.keccak(func_type.encode()))[:8].decode()
params = []
values = list(args)
idx = func_type.find("(")
parmstr = func_type[idx+1:-1]
data = ''
if parmstr:
for parm in func_type[idx+1:-1].split(","):
params.append(parm.strip())
data = hexlify(eth_abi.encode_abi(params, values)).decode()
return "0x%s%s" % (signature, data)
@staticmethod
def parse_event_signature(event_signature: str):
"""parse event signatuer to dictionary"""
event_signature = event_signature.strip()
lidx = event_signature.find('(')
ridx = event_signature.find(')')
event_name = event_signature[:lidx]
params_str = event_signature[lidx + 1:ridx]
params = params_str.split(",")
parameters = []
for i in range(len(params)):
param_tokens = params[i].strip().split()
param = {'indexed': False, 'type': param_tokens[0]}
if len(param_tokens) == 1:
param['name'] = "arg%d" % i
else:
if len(param_tokens) == 3:
param['indexed'] = True
param['name'] = param_tokens[-1]
parameters.append(param)
event_type = '%s(%s)' % (event_name, ','.join([x['type'] for x in parameters]))
signature = '0x{}'.format(hexlify(eth_utils.keccak(event_type.encode())).decode())
return {"event_name": event_name, "signature": signature, "parameters": parameters}
@staticmethod
def decode_log(event: dict, log):
"""
Decode log
:param event:
:param log:
:return:
"""
event_name = event['event_name']
parameters = event['parameters']
signature = event['signature']
if log['topics'][0].hex() == signature:
res = {
"event_name": event_name,
'blockNumber': log['blockNumber'],
'transactionHash': log['transactionHash']
}
if parameters:
j = 1
for param in parameters:
if param['indexed']:
value = eth_abi.decode_single(param['type'], log['topics'][j])
j += 1
if isinstance(value, bytes):
res[param.name] = '0x%r' % hexlify(value.decode())
else:
res[param.name] = value
unindexed_types = [x['type'] for x in parameters if not x['indexed']]
if unindexed_types:
unindexed_values = eth_abi.decode_abi(
unindexed_types, unhexlify(log['data'][2:]))
unindexed_names = [x['name'] for x in parameters if not x['indexed']]
for i in range(len(unindexed_values)):
if isinstance(unindexed_values[i], bytes):
res[unindexed_names[i]] = '0x{}'.format(
hexlify(unindexed_values[i]).decode())
else:
res[unindexed_names[i]] = unindexed_values[i]
return res
return None
@staticmethod
def decode_logs(events, logs):
"""
Decode logs in receipt
:param events: example ['LogAdded(bytes, bytes)]
:param logs:
:return:
"""
ret = []
for log in logs:
for event in events:
try:
rret = SmartContractFuncCall.decode_log(event, log)
if rret:
ret.append(rret)
continue
except Exception:
pass
return ret
class SmartContract:
"""Contract Base"""
def __init__(self, w3h: W3Helper, contract_addr: str, queue_size: int):
self.log = logging.getLogger(self.__class__.__name__)
self.w3h = w3h
self.contract_addr = Web3.toChecksumAddress(contract_addr)
self.attrs = {} # type: dict
self.events_queue = queue.Queue(queue_size) # type: queue.Queue
self._thread_manager = ThreadManager(
_queue=self.events_queue,
max_workers=CONTRACT_MAX_TASK_NUM,
thread_name_prefix=CONTRACT_TASK_NAME_PREFIX
)
def __getattr__(self, name):
if name in self.attrs:
attr = self.attrs[name]
if isinstance(attr, str) and ' ' in attr:
attr = attr.replace(' ', '', -1)
return SmartContractFuncCall(self.w3h, self.contract_addr, attr)
raise AttributeError
def watch(self, event: str, _from: int):
"""
watch create a new thread to monitor chain event.
:param event:
:param _from: monitor from this block number
:return:
"""
self.log.debug("Start to watch event %s", event)
parsed_event = SmartContractFuncCall.parse_event_signature(event)
topics = [parsed_event['signature']]
if _from is None:
_from = self.w3h.blockNumber + 1
curr_block = self.w3h.blockNumber
event_filter = {
"address": self.contract_addr,
"topics": topics,
"fromBlock": hex(_from),
"toBlock": hex(curr_block)
}
while not self._thread_manager.has_shutdown():
if _from > curr_block:
time.sleep(CHAIN_EVENT_GET_FAILED_WAIT_TIME_IN_S)
else:
logs = self.w3h.eth.getLogs(event_filter)
events = SmartContractFuncCall.decode_logs([parsed_event], logs)
if events:
for evt in events:
self.log.debug("Got event %s", evt)
self.events_queue.put(evt)
else:
time.sleep(CHAIN_EVENT_GET_FAILED_WAIT_TIME_IN_S)
_from = curr_block + 1
curr_block = self.w3h.blockNumber
event_filter['fromBlock'] = hex(_from)
event_filter['toBlock'] = hex(curr_block)
self.log.debug("Stop to watch event %s", event)
def exit_all_watch(self, wait=False):
self._thread_manager.shutdown(wait=wait)
|
#!/usr/bin/python3
import time
import sys
import os
import random
import numpy as np
from execute.sequentialEvaluation import sequentialEvaluation #function
from execute.generateBatches import generateBatches #function
from execute.generateBehaviour import generateBehaviour #function
from execute.startHttpServer import startHttpServer #function
from execute.resultsVerification import resultsVerification #function
from batch.batch import Batch #class
from batchDefinition.aBatchDefinition import ABatchDefinition #class
def main2():
start = time.time()
sequentialEvaluation()
end = time.time()
print()
print("Time: " + format(end - start, '.5f') + " s")
if __name__ == "__main__":
#generateBatches()
if len(sys.argv) == 2 and sys.argv[1] == "-generateBatches":
generateBatches()
if len(sys.argv) == 2 and sys.argv[1] == "-generateBehaviours":
generateBehaviour()
if len(sys.argv) == 2 and sys.argv[1] == "-startHttpServer":
startHttpServer()
if len(sys.argv) == 2 and sys.argv[1] == "-resultVerification":
resultsVerification()
if len(sys.argv) == 1:
main2()
|
import itertools
import struct
from binascii import unhexlify
from codecs import getincrementaldecoder
from typing import Dict, Optional, Tuple, Union
import pytest
from wsproto import extensions as wpext, frame_protocol as fp
class TestBuffer:
def test_consume_at_most_zero_bytes(self) -> None:
buf = fp.Buffer(b"xxyyy")
assert buf.consume_at_most(0) == bytearray()
def test_consume_at_most_with_no_data(self) -> None:
buf = fp.Buffer()
assert buf.consume_at_most(1) == bytearray()
def test_consume_at_most_with_sufficient_data(self) -> None:
buf = fp.Buffer(b"xx")
assert buf.consume_at_most(2) == b"xx"
def test_consume_at_most_with_more_than_sufficient_data(self) -> None:
buf = fp.Buffer(b"xxyyy")
assert buf.consume_at_most(2) == b"xx"
def test_consume_at_most_with_insufficient_data(self) -> None:
buf = fp.Buffer(b"xx")
assert buf.consume_at_most(3) == b"xx"
def test_consume_exactly_with_sufficient_data(self) -> None:
buf = fp.Buffer(b"xx")
assert buf.consume_exactly(2) == b"xx"
def test_consume_exactly_with_more_than_sufficient_data(self) -> None:
buf = fp.Buffer(b"xxyyy")
assert buf.consume_exactly(2) == b"xx"
def test_consume_exactly_with_insufficient_data(self) -> None:
buf = fp.Buffer(b"xx")
assert buf.consume_exactly(3) is None
def test_feed(self) -> None:
buf = fp.Buffer()
assert buf.consume_at_most(1) == b""
assert buf.consume_exactly(1) is None
buf.feed(b"xy")
assert buf.consume_at_most(1) == b"x"
assert buf.consume_exactly(1) == b"y"
def test_rollback(self) -> None:
buf = fp.Buffer()
buf.feed(b"xyz")
assert buf.consume_exactly(2) == b"xy"
assert buf.consume_exactly(1) == b"z"
assert buf.consume_at_most(1) == b""
buf.rollback()
assert buf.consume_at_most(3) == b"xyz"
def test_commit(self) -> None:
buf = fp.Buffer()
buf.feed(b"xyz")
assert buf.consume_exactly(2) == b"xy"
assert buf.consume_exactly(1) == b"z"
assert buf.consume_at_most(1) == b""
buf.commit()
assert buf.consume_at_most(3) == b""
def test_length(self) -> None:
buf = fp.Buffer()
data = b"xyzabc"
buf.feed(data)
assert len(buf) == len(data)
class TestMessageDecoder:
def test_single_binary_frame(self) -> None:
payload = b"x" * 23
decoder = fp.MessageDecoder()
frame = fp.Frame(
opcode=fp.Opcode.BINARY,
payload=payload,
frame_finished=True,
message_finished=True,
)
frame = decoder.process_frame(frame)
assert frame.opcode is fp.Opcode.BINARY
assert frame.message_finished is True
assert frame.payload == payload
def test_follow_on_binary_frame(self) -> None:
payload = b"x" * 23
decoder = fp.MessageDecoder()
decoder.opcode = fp.Opcode.BINARY
frame = fp.Frame(
opcode=fp.Opcode.CONTINUATION,
payload=payload,
frame_finished=True,
message_finished=False,
)
frame = decoder.process_frame(frame)
assert frame.opcode is fp.Opcode.BINARY
assert frame.message_finished is False
assert frame.payload == payload
def test_single_text_frame(self) -> None:
text_payload = "fñör∂"
binary_payload = text_payload.encode("utf8")
decoder = fp.MessageDecoder()
frame = fp.Frame(
opcode=fp.Opcode.TEXT,
payload=binary_payload,
frame_finished=True,
message_finished=True,
)
frame = decoder.process_frame(frame)
assert frame.opcode is fp.Opcode.TEXT
assert frame.message_finished is True
assert frame.payload == text_payload
def test_follow_on_text_frame(self) -> None:
text_payload = "fñör∂"
binary_payload = text_payload.encode("utf8")
decoder = fp.MessageDecoder()
decoder.opcode = fp.Opcode.TEXT
decoder.decoder = getincrementaldecoder("utf-8")()
assert decoder.decoder.decode(binary_payload[:4]) == text_payload[:2]
binary_payload = binary_payload[4:-2]
text_payload = text_payload[2:-1]
frame = fp.Frame(
opcode=fp.Opcode.CONTINUATION,
payload=binary_payload,
frame_finished=True,
message_finished=False,
)
frame = decoder.process_frame(frame)
assert frame.opcode is fp.Opcode.TEXT
assert frame.message_finished is False
assert frame.payload == text_payload
def test_final_text_frame(self) -> None:
text_payload = "fñör∂"
binary_payload = text_payload.encode("utf8")
decoder = fp.MessageDecoder()
decoder.opcode = fp.Opcode.TEXT
decoder.decoder = getincrementaldecoder("utf-8")()
assert decoder.decoder.decode(binary_payload[:-2]) == text_payload[:-1]
binary_payload = binary_payload[-2:]
text_payload = text_payload[-1:]
frame = fp.Frame(
opcode=fp.Opcode.CONTINUATION,
payload=binary_payload,
frame_finished=True,
message_finished=True,
)
frame = decoder.process_frame(frame)
assert frame.opcode is fp.Opcode.TEXT
assert frame.message_finished is True
assert frame.payload == text_payload
def test_start_with_continuation(self) -> None:
payload = b"x" * 23
decoder = fp.MessageDecoder()
frame = fp.Frame(
opcode=fp.Opcode.CONTINUATION,
payload=payload,
frame_finished=True,
message_finished=True,
)
with pytest.raises(fp.ParseFailed):
decoder.process_frame(frame)
def test_missing_continuation_1(self) -> None:
payload = b"x" * 23
decoder = fp.MessageDecoder()
decoder.opcode = fp.Opcode.BINARY
frame = fp.Frame(
opcode=fp.Opcode.BINARY,
payload=payload,
frame_finished=True,
message_finished=True,
)
with pytest.raises(fp.ParseFailed):
decoder.process_frame(frame)
def test_missing_continuation_2(self) -> None:
payload = b"x" * 23
decoder = fp.MessageDecoder()
decoder.opcode = fp.Opcode.TEXT
frame = fp.Frame(
opcode=fp.Opcode.BINARY,
payload=payload,
frame_finished=True,
message_finished=True,
)
with pytest.raises(fp.ParseFailed):
decoder.process_frame(frame)
def test_incomplete_unicode(self) -> None:
payload = "fñör∂".encode()
payload = payload[:4]
decoder = fp.MessageDecoder()
frame = fp.Frame(
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=True,
message_finished=True,
)
with pytest.raises(fp.ParseFailed) as excinfo:
decoder.process_frame(frame)
assert excinfo.value.code is fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
def test_not_even_unicode(self) -> None:
payload = "fñörd".encode("iso-8859-1")
decoder = fp.MessageDecoder()
frame = fp.Frame(
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=True,
message_finished=False,
)
with pytest.raises(fp.ParseFailed) as excinfo:
decoder.process_frame(frame)
assert excinfo.value.code is fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
def test_bad_unicode(self) -> None:
payload = unhexlify("cebae1bdb9cf83cebcceb5eda080656469746564")
decoder = fp.MessageDecoder()
frame = fp.Frame(
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=True,
message_finished=True,
)
with pytest.raises(fp.ParseFailed) as excinfo:
decoder.process_frame(frame)
assert excinfo.value.code is fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
def test_split_message(self) -> None:
text_payload = "x" * 65535
payload = text_payload.encode("utf-8")
split = 32777
decoder = fp.MessageDecoder()
frame = fp.Frame(
opcode=fp.Opcode.TEXT,
payload=payload[:split],
frame_finished=False,
message_finished=True,
)
frame = decoder.process_frame(frame)
assert frame.opcode is fp.Opcode.TEXT
assert frame.message_finished is False
assert frame.payload == text_payload[:split]
frame = fp.Frame(
opcode=fp.Opcode.CONTINUATION,
payload=payload[split:],
frame_finished=True,
message_finished=True,
)
frame = decoder.process_frame(frame)
assert frame.opcode is fp.Opcode.TEXT
assert frame.message_finished is True
assert frame.payload == text_payload[split:]
def test_split_unicode_message(self) -> None:
text_payload = "∂" * 64
payload = text_payload.encode("utf-8")
split = 64
decoder = fp.MessageDecoder()
frame = fp.Frame(
opcode=fp.Opcode.TEXT,
payload=payload[:split],
frame_finished=False,
message_finished=True,
)
frame = decoder.process_frame(frame)
assert frame.opcode is fp.Opcode.TEXT
assert frame.message_finished is False
assert frame.payload == text_payload[: (split // 3)]
frame = fp.Frame(
opcode=fp.Opcode.CONTINUATION,
payload=payload[split:],
frame_finished=True,
message_finished=True,
)
frame = decoder.process_frame(frame)
assert frame.opcode is fp.Opcode.TEXT
assert frame.message_finished is True
assert frame.payload == text_payload[(split // 3) :]
def send_frame_to_validator(self, payload: bytes, finished: bool) -> None:
decoder = fp.MessageDecoder()
frame = fp.Frame(
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=finished,
message_finished=True,
)
frame = decoder.process_frame(frame)
class TestFrameDecoder:
def _single_frame_test(
self,
client: bool,
frame_bytes: bytes,
opcode: fp.Opcode,
payload: bytes,
frame_finished: bool,
message_finished: bool,
) -> None:
decoder = fp.FrameDecoder(client=client)
decoder.receive_bytes(frame_bytes)
frame = decoder.process_buffer()
assert frame is not None
assert frame.opcode is opcode
assert frame.payload == payload
assert frame.frame_finished is frame_finished
assert frame.message_finished is message_finished
def _split_frame_test(
self,
client: bool,
frame_bytes: bytes,
opcode: fp.Opcode,
payload: bytes,
frame_finished: bool,
message_finished: bool,
split: int,
) -> None:
decoder = fp.FrameDecoder(client=client)
decoder.receive_bytes(frame_bytes[:split])
assert decoder.process_buffer() is None
decoder.receive_bytes(frame_bytes[split:])
frame = decoder.process_buffer()
assert frame is not None
assert frame.opcode is opcode
assert frame.payload == payload
assert frame.frame_finished is frame_finished
assert frame.message_finished is message_finished
def _split_message_test(
self,
client: bool,
frame_bytes: bytes,
opcode: fp.Opcode,
payload: bytes,
split: int,
) -> None:
decoder = fp.FrameDecoder(client=client)
decoder.receive_bytes(frame_bytes[:split])
frame = decoder.process_buffer()
assert frame is not None
assert frame.opcode is opcode
assert frame.payload == payload[: len(frame.payload)]
assert frame.frame_finished is False
assert frame.message_finished is True
decoder.receive_bytes(frame_bytes[split:])
frame = decoder.process_buffer()
assert frame is not None
assert frame.opcode is fp.Opcode.CONTINUATION
assert frame.payload == payload[-len(frame.payload) :]
assert frame.frame_finished is True
assert frame.message_finished is True
def _parse_failure_test(
self, client: bool, frame_bytes: bytes, close_reason: fp.CloseReason
) -> None:
decoder = fp.FrameDecoder(client=client)
with pytest.raises(fp.ParseFailed) as excinfo:
decoder.receive_bytes(frame_bytes)
decoder.process_buffer()
assert excinfo.value.code is close_reason
def test_zero_length_message(self) -> None:
self._single_frame_test(
client=True,
frame_bytes=b"\x81\x00",
opcode=fp.Opcode.TEXT,
payload=b"",
frame_finished=True,
message_finished=True,
)
def test_short_server_message_frame(self) -> None:
self._single_frame_test(
client=True,
frame_bytes=b"\x81\x02xy",
opcode=fp.Opcode.TEXT,
payload=b"xy",
frame_finished=True,
message_finished=True,
)
def test_short_client_message_frame(self) -> None:
self._single_frame_test(
client=False,
frame_bytes=b"\x81\x82abcd\x19\x1b",
opcode=fp.Opcode.TEXT,
payload=b"xy",
frame_finished=True,
message_finished=True,
)
def test_reject_masked_server_frame(self) -> None:
self._parse_failure_test(
client=True,
frame_bytes=b"\x81\x82abcd\x19\x1b",
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
def test_reject_unmasked_client_frame(self) -> None:
self._parse_failure_test(
client=False,
frame_bytes=b"\x81\x02xy",
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
def test_reject_bad_opcode(self) -> None:
self._parse_failure_test(
client=True,
frame_bytes=b"\x8e\x02xy",
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
def test_reject_unfinished_control_frame(self) -> None:
self._parse_failure_test(
client=True,
frame_bytes=b"\x09\x02xy",
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
def test_reject_reserved_bits(self) -> None:
self._parse_failure_test(
client=True,
frame_bytes=b"\x91\x02xy",
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
self._parse_failure_test(
client=True,
frame_bytes=b"\xa1\x02xy",
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
self._parse_failure_test(
client=True,
frame_bytes=b"\xc1\x02xy",
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
def test_long_message_frame(self) -> None:
payload = b"x" * 512
payload_len = struct.pack("!H", len(payload))
frame_bytes = b"\x81\x7e" + payload_len + payload
self._single_frame_test(
client=True,
frame_bytes=frame_bytes,
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=True,
message_finished=True,
)
def test_very_long_message_frame(self) -> None:
payload = b"x" * (128 * 1024)
payload_len = struct.pack("!Q", len(payload))
frame_bytes = b"\x81\x7f" + payload_len + payload
self._single_frame_test(
client=True,
frame_bytes=frame_bytes,
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=True,
message_finished=True,
)
def test_insufficiently_long_message_frame(self) -> None:
payload = b"x" * 64
payload_len = struct.pack("!H", len(payload))
frame_bytes = b"\x81\x7e" + payload_len + payload
self._parse_failure_test(
client=True,
frame_bytes=frame_bytes,
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
def test_insufficiently_very_long_message_frame(self) -> None:
payload = b"x" * 512
payload_len = struct.pack("!Q", len(payload))
frame_bytes = b"\x81\x7f" + payload_len + payload
self._parse_failure_test(
client=True,
frame_bytes=frame_bytes,
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
def test_very_insufficiently_very_long_message_frame(self) -> None:
payload = b"x" * 64
payload_len = struct.pack("!Q", len(payload))
frame_bytes = b"\x81\x7f" + payload_len + payload
self._parse_failure_test(
client=True,
frame_bytes=frame_bytes,
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
def test_not_enough_for_header(self) -> None:
payload = b"xy"
frame_bytes = b"\x81\x02" + payload
self._split_frame_test(
client=True,
frame_bytes=frame_bytes,
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=True,
message_finished=True,
split=1,
)
def test_not_enough_for_long_length(self) -> None:
payload = b"x" * 512
payload_len = struct.pack("!H", len(payload))
frame_bytes = b"\x81\x7e" + payload_len + payload
self._split_frame_test(
client=True,
frame_bytes=frame_bytes,
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=True,
message_finished=True,
split=3,
)
def test_not_enough_for_very_long_length(self) -> None:
payload = b"x" * (128 * 1024)
payload_len = struct.pack("!Q", len(payload))
frame_bytes = b"\x81\x7f" + payload_len + payload
self._split_frame_test(
client=True,
frame_bytes=frame_bytes,
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=True,
message_finished=True,
split=7,
)
def test_eight_byte_length_with_msb_set(self) -> None:
frame_bytes = b"\x81\x7f\x80\x80\x80\x80\x80\x80\x80\x80"
self._parse_failure_test(
client=True,
frame_bytes=frame_bytes,
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
def test_not_enough_for_mask(self) -> None:
payload = bytearray(b"xy")
mask = bytearray(b"abcd")
masked_payload = bytearray([payload[0] ^ mask[0], payload[1] ^ mask[1]])
frame_bytes = b"\x81\x82" + mask + masked_payload
self._split_frame_test(
client=False,
frame_bytes=frame_bytes,
opcode=fp.Opcode.TEXT,
payload=payload,
frame_finished=True,
message_finished=True,
split=4,
)
def test_partial_message_frames(self) -> None:
chunk_size = 1024
payload = b"x" * (128 * chunk_size)
payload_len = struct.pack("!Q", len(payload))
frame_bytes = b"\x81\x7f" + payload_len + payload
header_len = len(frame_bytes) - len(payload)
decoder = fp.FrameDecoder(client=True)
decoder.receive_bytes(frame_bytes[:header_len])
assert decoder.process_buffer() is None
frame_bytes = frame_bytes[header_len:]
payload_sent = 0
expected_opcode = fp.Opcode.TEXT
for offset in range(0, len(frame_bytes), chunk_size):
chunk = frame_bytes[offset : offset + chunk_size]
decoder.receive_bytes(chunk)
frame = decoder.process_buffer()
payload_sent += chunk_size
all_payload_sent = payload_sent == len(payload)
assert frame is not None
assert frame.opcode is expected_opcode
assert frame.frame_finished is all_payload_sent
assert frame.message_finished is True
assert frame.payload == payload[offset : offset + chunk_size]
expected_opcode = fp.Opcode.CONTINUATION
def test_partial_control_frame(self) -> None:
chunk_size = 11
payload = b"x" * 64
frame_bytes = b"\x89" + bytearray([len(payload)]) + payload
decoder = fp.FrameDecoder(client=True)
for offset in range(0, len(frame_bytes) - chunk_size, chunk_size):
chunk = frame_bytes[offset : offset + chunk_size]
decoder.receive_bytes(chunk)
assert decoder.process_buffer() is None
decoder.receive_bytes(frame_bytes[-chunk_size:])
frame = decoder.process_buffer()
assert frame is not None
assert frame.opcode is fp.Opcode.PING
assert frame.frame_finished is True
assert frame.message_finished is True
assert frame.payload == payload
def test_long_message_sliced(self) -> None:
payload = b"x" * 65535
payload_len = struct.pack("!H", len(payload))
frame_bytes = b"\x81\x7e" + payload_len + payload
self._split_message_test(
client=True,
frame_bytes=frame_bytes,
opcode=fp.Opcode.TEXT,
payload=payload,
split=65535,
)
def test_overly_long_control_frame(self) -> None:
payload = b"x" * 128
payload_len = struct.pack("!H", len(payload))
frame_bytes = b"\x89\x7e" + payload_len + payload
self._parse_failure_test(
client=True,
frame_bytes=frame_bytes,
close_reason=fp.CloseReason.PROTOCOL_ERROR,
)
class TestFrameDecoderExtensions:
class FakeExtension(wpext.Extension):
name = "fake"
def __init__(self) -> None:
self._inbound_header_called = False
self._inbound_rsv_bit_set = False
self._inbound_payload_data_called = False
self._inbound_complete_called = False
self._fail_inbound_complete = False
self._outbound_rsv_bit_set = False
def enabled(self) -> bool:
return True
def offer(self) -> Union[bool, str]:
return "fake"
def frame_inbound_header(
self,
proto: Union[fp.FrameDecoder, fp.FrameProtocol],
opcode: fp.Opcode,
rsv: fp.RsvBits,
payload_length: int,
) -> Union[fp.CloseReason, fp.RsvBits]:
self._inbound_header_called = True
if opcode is fp.Opcode.PONG:
return fp.CloseReason.MANDATORY_EXT
self._inbound_rsv_bit_set = rsv.rsv3
return fp.RsvBits(False, False, True)
def frame_inbound_payload_data(
self, proto: Union[fp.FrameDecoder, fp.FrameProtocol], data: bytes
) -> Union[bytes, fp.CloseReason]:
self._inbound_payload_data_called = True
if data == b"party time":
return fp.CloseReason.POLICY_VIOLATION
elif data == b"ragequit":
self._fail_inbound_complete = True
if self._inbound_rsv_bit_set:
data = data.decode("utf-8").upper().encode("utf-8")
return data
def frame_inbound_complete(
self, proto: Union[fp.FrameDecoder, fp.FrameProtocol], fin: bool
) -> Union[bytes, fp.CloseReason, None]:
self._inbound_complete_called = True
if self._fail_inbound_complete:
return fp.CloseReason.ABNORMAL_CLOSURE
if fin and self._inbound_rsv_bit_set:
return "™".encode()
return None
def frame_outbound(
self,
proto: Union[fp.FrameDecoder, fp.FrameProtocol],
opcode: fp.Opcode,
rsv: fp.RsvBits,
data: bytes,
fin: bool,
) -> Tuple[fp.RsvBits, bytes]:
if opcode is fp.Opcode.TEXT:
rsv = fp.RsvBits(rsv.rsv1, rsv.rsv2, True)
self._outbound_rsv_bit_set = True
if fin and self._outbound_rsv_bit_set:
data += "®".encode()
self._outbound_rsv_bit_set = False
return rsv, data
def test_rsv_bit(self) -> None:
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
frame_bytes = b"\x91\x00"
decoder.receive_bytes(frame_bytes)
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
assert ext._inbound_rsv_bit_set
def test_wrong_rsv_bit(self) -> None:
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
frame_bytes = b"\xa1\x00"
decoder.receive_bytes(frame_bytes)
with pytest.raises(fp.ParseFailed) as excinfo:
decoder.receive_bytes(frame_bytes)
decoder.process_buffer()
assert excinfo.value.code is fp.CloseReason.PROTOCOL_ERROR
def test_header_error_handling(self) -> None:
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
frame_bytes = b"\x9a\x00"
decoder.receive_bytes(frame_bytes)
with pytest.raises(fp.ParseFailed) as excinfo:
decoder.receive_bytes(frame_bytes)
decoder.process_buffer()
assert excinfo.value.code is fp.CloseReason.MANDATORY_EXT
def test_payload_processing(self) -> None:
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
payload = "fñör∂"
expected_payload = payload.upper().encode("utf-8")
bytes_payload = payload.encode("utf-8")
frame_bytes = b"\x11" + bytearray([len(bytes_payload)]) + bytes_payload
decoder.receive_bytes(frame_bytes)
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
assert ext._inbound_rsv_bit_set
assert ext._inbound_payload_data_called
assert frame.payload == expected_payload
def test_no_payload_processing_when_not_wanted(self) -> None:
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
payload = "fñör∂"
expected_payload = payload.encode("utf-8")
bytes_payload = payload.encode("utf-8")
frame_bytes = b"\x01" + bytearray([len(bytes_payload)]) + bytes_payload
decoder.receive_bytes(frame_bytes)
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
assert not ext._inbound_rsv_bit_set
assert ext._inbound_payload_data_called
assert frame.payload == expected_payload
def test_payload_error_handling(self) -> None:
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
payload = b"party time"
frame_bytes = b"\x91" + bytearray([len(payload)]) + payload
decoder.receive_bytes(frame_bytes)
with pytest.raises(fp.ParseFailed) as excinfo:
decoder.receive_bytes(frame_bytes)
decoder.process_buffer()
assert excinfo.value.code is fp.CloseReason.POLICY_VIOLATION
def test_frame_completion(self) -> None:
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
payload = "fñör∂"
expected_payload = (payload + "™").upper().encode("utf-8")
bytes_payload = payload.encode("utf-8")
frame_bytes = b"\x91" + bytearray([len(bytes_payload)]) + bytes_payload
decoder.receive_bytes(frame_bytes)
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
assert ext._inbound_rsv_bit_set
assert ext._inbound_payload_data_called
assert ext._inbound_complete_called
assert frame.payload == expected_payload
def test_no_frame_completion_when_not_wanted(self) -> None:
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
payload = "fñör∂"
expected_payload = payload.encode("utf-8")
bytes_payload = payload.encode("utf-8")
frame_bytes = b"\x81" + bytearray([len(bytes_payload)]) + bytes_payload
decoder.receive_bytes(frame_bytes)
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
assert not ext._inbound_rsv_bit_set
assert ext._inbound_payload_data_called
assert ext._inbound_complete_called
assert frame.payload == expected_payload
def test_completion_error_handling(self) -> None:
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
payload = b"ragequit"
frame_bytes = b"\x91" + bytearray([len(payload)]) + payload
decoder.receive_bytes(frame_bytes)
with pytest.raises(fp.ParseFailed) as excinfo:
decoder.receive_bytes(frame_bytes)
decoder.process_buffer()
assert excinfo.value.code is fp.CloseReason.ABNORMAL_CLOSURE
def test_outbound_handling_single_frame(self) -> None:
ext = self.FakeExtension()
proto = fp.FrameProtocol(client=False, extensions=[ext])
payload = "😃😄🙃😉"
data = proto.send_data(payload, fin=True)
payload_bytes = (payload + "®").encode("utf8")
assert data == b"\x91" + bytearray([len(payload_bytes)]) + payload_bytes
def test_outbound_handling_multiple_frames(self) -> None:
ext = self.FakeExtension()
proto = fp.FrameProtocol(client=False, extensions=[ext])
payload = "😃😄🙃😉"
data = proto.send_data(payload, fin=False)
payload_bytes = payload.encode("utf8")
assert data == b"\x11" + bytearray([len(payload_bytes)]) + payload_bytes
payload = r"¯\_(ツ)_/¯"
data = proto.send_data(payload, fin=True)
payload_bytes = (payload + "®").encode("utf8")
assert data == b"\x80" + bytearray([len(payload_bytes)]) + payload_bytes
class TestFrameProtocolReceive:
def test_long_text_message(self) -> None:
payload = "x" * 65535
encoded_payload = payload.encode("utf-8")
payload_len = struct.pack("!H", len(encoded_payload))
frame_bytes = b"\x81\x7e" + payload_len + encoded_payload
protocol = fp.FrameProtocol(client=True, extensions=[])
protocol.receive_bytes(frame_bytes)
frames = list(protocol.received_frames())
assert len(frames) == 1
frame = frames[0]
assert frame.opcode == fp.Opcode.TEXT
assert len(frame.payload) == len(payload)
assert frame.payload == payload
def _close_test(
self,
code: Optional[int],
reason: Optional[str] = None,
reason_bytes: Optional[bytes] = None,
) -> None:
payload = b""
if code:
payload += struct.pack("!H", code)
if reason:
payload += reason.encode("utf8")
elif reason_bytes:
payload += reason_bytes
frame_bytes = b"\x88" + bytearray([len(payload)]) + payload
protocol = fp.FrameProtocol(client=True, extensions=[])
protocol.receive_bytes(frame_bytes)
frames = list(protocol.received_frames())
assert len(frames) == 1
frame = frames[0]
assert frame.opcode == fp.Opcode.CLOSE
assert frame.payload[0] == code or fp.CloseReason.NO_STATUS_RCVD
if reason:
assert frame.payload[1] == reason
else:
assert not frame.payload[1]
def test_close_no_code(self) -> None:
self._close_test(None)
def test_close_one_byte_code(self) -> None:
frame_bytes = b"\x88\x01\x0e"
protocol = fp.FrameProtocol(client=True, extensions=[])
with pytest.raises(fp.ParseFailed) as exc:
protocol.receive_bytes(frame_bytes)
list(protocol.received_frames())
assert exc.value.code == fp.CloseReason.PROTOCOL_ERROR
def test_close_bad_code(self) -> None:
with pytest.raises(fp.ParseFailed) as exc:
self._close_test(123)
assert exc.value.code == fp.CloseReason.PROTOCOL_ERROR
def test_close_unknown_code(self) -> None:
with pytest.raises(fp.ParseFailed) as exc:
self._close_test(2998)
assert exc.value.code == fp.CloseReason.PROTOCOL_ERROR
def test_close_local_only_code(self) -> None:
with pytest.raises(fp.ParseFailed) as exc:
self._close_test(fp.CloseReason.NO_STATUS_RCVD)
assert exc.value.code == fp.CloseReason.PROTOCOL_ERROR
def test_close_no_payload(self) -> None:
self._close_test(fp.CloseReason.NORMAL_CLOSURE)
def test_close_easy_payload(self) -> None:
self._close_test(fp.CloseReason.NORMAL_CLOSURE, "tarah old chap")
def test_close_utf8_payload(self) -> None:
self._close_test(fp.CloseReason.NORMAL_CLOSURE, "fñør∂")
def test_close_bad_utf8_payload(self) -> None:
payload = unhexlify("cebae1bdb9cf83cebcceb5eda080656469746564")
with pytest.raises(fp.ParseFailed) as exc:
self._close_test(fp.CloseReason.NORMAL_CLOSURE, reason_bytes=payload)
assert exc.value.code == fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
def test_close_incomplete_utf8_payload(self) -> None:
payload = "fñør∂".encode()[:-1]
with pytest.raises(fp.ParseFailed) as exc:
self._close_test(fp.CloseReason.NORMAL_CLOSURE, reason_bytes=payload)
assert exc.value.code == fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
def test_random_control_frame(self) -> None:
payload = b"give me one ping vasily"
frame_bytes = b"\x89" + bytearray([len(payload)]) + payload
protocol = fp.FrameProtocol(client=True, extensions=[])
protocol.receive_bytes(frame_bytes)
frames = list(protocol.received_frames())
assert len(frames) == 1
frame = frames[0]
assert frame.opcode == fp.Opcode.PING
assert len(frame.payload) == len(payload)
assert frame.payload == payload
class TestFrameProtocolSend:
def test_simplest_possible_close(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
data = proto.close()
assert data == b"\x88\x00"
def test_unreasoning_close(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
data = proto.close(code=fp.CloseReason.NORMAL_CLOSURE)
assert data == b"\x88\x02\x03\xe8"
def test_reasoned_close(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
reason = r"¯\_(ツ)_/¯"
expected_payload = struct.pack(
"!H", fp.CloseReason.NORMAL_CLOSURE
) + reason.encode("utf8")
data = proto.close(code=fp.CloseReason.NORMAL_CLOSURE, reason=reason)
assert data == b"\x88" + bytearray([len(expected_payload)]) + expected_payload
def test_overly_reasoned_close(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
reason = r"¯\_(ツ)_/¯" * 10
data = proto.close(code=fp.CloseReason.NORMAL_CLOSURE, reason=reason)
assert bytes(data[0:1]) == b"\x88"
assert len(data) <= 127
assert data[4:].decode("utf8")
def test_reasoned_but_uncoded_close(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
with pytest.raises(TypeError):
proto.close(reason="termites")
def test_no_status_rcvd_close_reason(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
data = proto.close(code=fp.CloseReason.NO_STATUS_RCVD)
assert data == b"\x88\x00"
def test_local_only_close_reason(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
data = proto.close(code=fp.CloseReason.ABNORMAL_CLOSURE)
assert data == b"\x88\x02\x03\xe8"
def test_ping_without_payload(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
data = proto.ping()
assert data == b"\x89\x00"
def test_ping_with_payload(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = r"¯\_(ツ)_/¯".encode()
data = proto.ping(payload)
assert data == b"\x89" + bytearray([len(payload)]) + payload
def test_pong_without_payload(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
data = proto.pong()
assert data == b"\x8a\x00"
def test_pong_with_payload(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = r"¯\_(ツ)_/¯".encode()
data = proto.pong(payload)
assert data == b"\x8a" + bytearray([len(payload)]) + payload
def test_single_short_binary_data(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = b"it's all just ascii, right?"
data = proto.send_data(payload, fin=True)
assert data == b"\x82" + bytearray([len(payload)]) + payload
def test_single_short_text_data(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = "😃😄🙃😉"
data = proto.send_data(payload, fin=True)
payload_bytes = payload.encode("utf8")
assert data == b"\x81" + bytearray([len(payload_bytes)]) + payload_bytes
def test_multiple_short_binary_data(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = b"it's all just ascii, right?"
data = proto.send_data(payload, fin=False)
assert data == b"\x02" + bytearray([len(payload)]) + payload
payload = b"sure no worries"
data = proto.send_data(payload, fin=True)
assert data == b"\x80" + bytearray([len(payload)]) + payload
def test_multiple_short_text_data(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = "😃😄🙃😉"
data = proto.send_data(payload, fin=False)
payload_bytes = payload.encode("utf8")
assert data == b"\x01" + bytearray([len(payload_bytes)]) + payload_bytes
payload = "🙈🙉🙊"
data = proto.send_data(payload, fin=True)
payload_bytes = payload.encode("utf8")
assert data == b"\x80" + bytearray([len(payload_bytes)]) + payload_bytes
def test_mismatched_data_messages1(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = "😃😄🙃😉"
data = proto.send_data(payload, fin=False)
payload_bytes = payload.encode("utf8")
assert data == b"\x01" + bytearray([len(payload_bytes)]) + payload_bytes
payload_bytes = b"seriously, all ascii"
with pytest.raises(TypeError):
proto.send_data(payload_bytes)
def test_mismatched_data_messages2(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = b"it's all just ascii, right?"
data = proto.send_data(payload, fin=False)
assert data == b"\x02" + bytearray([len(payload)]) + payload
payload_str = "✔️☑️✅✔︎☑"
with pytest.raises(TypeError):
proto.send_data(payload_str)
def test_message_length_max_short(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = b"x" * 125
data = proto.send_data(payload, fin=True)
assert data == b"\x82" + bytearray([len(payload)]) + payload
def test_message_length_min_two_byte(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = b"x" * 126
data = proto.send_data(payload, fin=True)
assert data == b"\x82\x7e" + struct.pack("!H", len(payload)) + payload
def test_message_length_max_two_byte(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = b"x" * (2**16 - 1)
data = proto.send_data(payload, fin=True)
assert data == b"\x82\x7e" + struct.pack("!H", len(payload)) + payload
def test_message_length_min_eight_byte(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = b"x" * (2**16)
data = proto.send_data(payload, fin=True)
assert data == b"\x82\x7f" + struct.pack("!Q", len(payload)) + payload
def test_client_side_masking_short_frame(self) -> None:
proto = fp.FrameProtocol(client=True, extensions=[])
payload = b"x" * 125
data = proto.send_data(payload, fin=True)
assert data[0] == 0x82
assert struct.unpack("!B", data[1:2])[0] == len(payload) | 0x80
masking_key = data[2:6]
maskbytes = itertools.cycle(masking_key)
assert data[6:] == bytearray(b ^ next(maskbytes) for b in bytearray(payload))
def test_client_side_masking_two_byte_frame(self) -> None:
proto = fp.FrameProtocol(client=True, extensions=[])
payload = b"x" * 126
data = proto.send_data(payload, fin=True)
assert data[0] == 0x82
assert data[1] == 0xFE
assert struct.unpack("!H", data[2:4])[0] == len(payload)
masking_key = data[4:8]
maskbytes = itertools.cycle(masking_key)
assert data[8:] == bytearray(b ^ next(maskbytes) for b in bytearray(payload))
def test_client_side_masking_eight_byte_frame(self) -> None:
proto = fp.FrameProtocol(client=True, extensions=[])
payload = b"x" * 65536
data = proto.send_data(payload, fin=True)
assert data[0] == 0x82
assert data[1] == 0xFF
assert struct.unpack("!Q", data[2:10])[0] == len(payload)
masking_key = data[10:14]
maskbytes = itertools.cycle(masking_key)
assert data[14:] == bytearray(b ^ next(maskbytes) for b in bytearray(payload))
def test_control_frame_with_overly_long_payload(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload = b"x" * 126
with pytest.raises(ValueError):
proto.pong(payload)
def test_data_we_have_no_idea_what_to_do_with(self) -> None:
proto = fp.FrameProtocol(client=False, extensions=[])
payload: Dict[str, str] = dict()
with pytest.raises(ValueError):
# Intentionally passing illegal type.
proto.send_data(payload) # type: ignore
def test_xor_mask_simple() -> None:
masker = fp.XorMaskerSimple(b"1234")
assert masker.process(b"") == b""
assert masker.process(b"some very long data for masking by websocket") == (
b"B]^Q\x11DVFH\x12_[_U\x13PPFR\x14W]A\x14\\S@_X\\T\x14SK\x13CTP@[RYV@"
)
|
import codecs
import csv
import numpy as np
import pandas as pd
import xlrd
from numpy.random import choice
class Data():
def __init__(self):
pass
'''数组写入excel表'''
def excelWriter(A, headers):
data = pd.DataFrame(columns=headers, data=A)
writer = pd.ExcelWriter('b.xlsx') # 写入Excel文件
data.to_excel(writer, 'page_1', float_format='%.5f', index=True, header=True,
) # ‘page_1’是写入excel的sheet名
writer.save()
writer.close()
def xlsx_to_csv(self):
workbook = xlrd.open_workbook('b.xlsx')
table = workbook.sheet_by_index(0)
with codecs.open('b.csv', 'w', encoding='utf-8') as f:
write = csv.writer(f)
for row_num in range(table.nrows):
row_value = table.row_values(row_num)
write.writerow(row_value)
# 订单到达时间
def data(self, headers):
time = [1]
a = 1
for i in range(0, 149):
n = choice([0, 1, 2],
p=[0.4, 0.3, 0.3])
a += n
time.append(a)
arrive_time = np.array(time)
customer_level = choice([1, 2, 3], size=150, p=[0.5, 0.3, 0.2])
delay_time = choice([3, 4, 5, 6],
size=150,
p=[0.2, 0.3, 0.3, 0.2])
lead_time_1 = choice([1, 2, 3],
size=150,
p=[0.5, 0.3, 0.2])
lead_time_2 = choice([1, 2, 3],
size=150,
p=[0.5, 0.3, 0.2])
lead_time_3 = choice([1, 2, 3],
size=150,
p=[0.5, 0.3, 0.2])
lead_time_4 = choice([1, 2, 3],
size=150,
p=[0.5, 0.3, 0.2])
daily_capacity_1 = choice([1, 2, 3],
size=150,
p=[0, 1, 0])
daily_capacity_2 = choice([1, 2, 3],
size=150,
p=[0, 1, 0])
daily_capacity_3 = choice([1, 2, 3],
size=150,
p=[0, 1, 0])
daily_capacity_4 = choice([1, 2, 3],
size=150,
p=[0, 1, 0])
revenue = choice([3, 4, 5, 6],
size=150,
p=[0.2, 0.3, 0.3, 0.2])
data = np.transpose(np.vstack((arrive_time, customer_level, delay_time, lead_time_1, lead_time_2,
lead_time_3, lead_time_4, daily_capacity_1, daily_capacity_2,
daily_capacity_3, daily_capacity_4, revenue)))
order_data = pd.DataFrame(columns=headers, data=data)
return order_data
if __name__ == '__main__':
headers = ['arrival_time', 'customer_level', 'delay_time', 'lead_time_1', 'lead_time_2',
'lead_time_3', 'lead_time_4', 'daily_capacity_1', 'daily_capacity_2', 'daily_capacity_3',
'daily_capacity_4',
'revenue']
data = Data()
order_data = data.data(headers)
|
#!/usr/bin/env python
import ROOT
import math
from functools import partial
import CombineHarvester.CombineTools.plotting as plot
import json
import argparse
import os.path
import os
import sys
ROOT.PyConfig.IgnoreCommandLineOptions = True
ROOT.gROOT.SetBatch(ROOT.kTRUE)
plot.ModTDRStyle(width=700, l=0.13)
ROOT.gStyle.SetNdivisions(510, "XYZ")
ROOT.gStyle.SetMarkerSize(0.7)
NAMECOUNTER = 0
def read(scan, param, files, ycut):
goodfiles = [f for f in files if plot.TFileIsGood(f)]
limit = plot.MakeTChain(goodfiles, 'limit')
graph = plot.TGraphFromTree(limit, param, '2*deltaNLL', 'quantileExpected > -1.5')
graph.SetName(scan)
graph.Sort()
plot.RemoveGraphXDuplicates(graph)
plot.RemoveGraphYAbove(graph, ycut)
# graph.Print()
return graph
def Eval(obj, x, params):
return obj.Eval(x[0])
def BuildScan(scan, param, files, color, yvals, ycut):
graph = read(scan, param, files, ycut)
bestfit = None
for i in xrange(graph.GetN()):
if graph.GetY()[i] == 0.:
bestfit = graph.GetX()[i]
graph.SetMarkerColor(color)
spline = ROOT.TSpline3("spline3", graph)
global NAMECOUNTER
func = ROOT.TF1('splinefn'+str(NAMECOUNTER), partial(Eval, spline), graph.GetX()[0], graph.GetX()[graph.GetN() - 1], 1)
NAMECOUNTER += 1
func.SetLineColor(color)
func.SetLineWidth(3)
assert(bestfit is not None)
crossings = {}
cross_1sig = None
cross_2sig = None
other_1sig = []
other_2sig = []
val = None
val_2sig = None
for yval in yvals:
crossings[yval] = plot.FindCrossingsWithSpline(graph, func, yval)
for cr in crossings[yval]:
cr["contains_bf"] = cr["lo"] <= bestfit and cr["hi"] >= bestfit
for cr in crossings[yvals[0]]:
if cr['contains_bf']:
val = (bestfit, cr['hi'] - bestfit, cr['lo'] - bestfit)
cross_1sig = cr
else:
other_1sig.append(cr)
if len(yvals) > 1:
for cr in crossings[yvals[1]]:
if cr['contains_bf']:
val_2sig = (bestfit, cr['hi'] - bestfit, cr['lo'] - bestfit)
cross_2sig = cr
else:
other_2sig.append(cr)
else:
val_2sig = (0., 0., 0.)
cross_2sig = cross_1sig
return {
"graph" : graph,
"spline" : spline,
"func" : func,
"crossings" : crossings,
"val" : val,
"val_2sig": val_2sig,
"cross_1sig" : cross_1sig,
"cross_2sig" : cross_2sig,
"other_1sig" : other_1sig,
"other_2sig" : other_2sig
}
boson = sys.argv[1]
channel = sys.argv[2]
year = sys.argv[3]
input4buildscan = sys.argv[4] #higgsCombineTest.MultiDimFit.mH200.root
othersinput = [sys.argv[5]] #higgsCombine.freezeAll.MultiDimFit.mH200.root:FreezeAll:2
isBlind = sys.argv[6]
URL_xsec = sys.argv[7]
URL_pdf_scale = sys.argv[8]
output4buildscan = boson + '_' + channel + '_' + year + '_signalstrength'
POI = 'r'
main_color = 1
yvals = [1., 4.]
y_cut = 7.
main_scan = BuildScan(output4buildscan, POI, [input4buildscan], main_color, yvals, y_cut)
other_scans = [ ]
other_scans_opts = [ ]
for oargs in othersinput:
splitargs = oargs.split(':')
other_scans_opts.append(splitargs)
other_scans.append(BuildScan(output4buildscan, POI, [splitargs[0]], int(splitargs[2]), yvals, y_cut))
canv = ROOT.TCanvas(output4buildscan, output4buildscan)
pads = plot.OnePad()
main_scan['graph'].SetMarkerColor(1)
main_scan['graph'].Draw('AP')
axishist = plot.GetAxisHist(pads[0])
new_min = axishist.GetXaxis().GetXmin()
new_max = axishist.GetXaxis().GetXmax()
mins = []
maxs = []
for other in other_scans:
mins.append(other['graph'].GetX()[0])
maxs.append(other['graph'].GetX()[other['graph'].GetN()-1])
if len(other_scans) > 0:
if min(mins) < main_scan['graph'].GetX()[0]:
new_min = min(mins) - (main_scan['graph'].GetX()[0] - new_min)
if max(maxs) > main_scan['graph'].GetX()[main_scan['graph'].GetN()-1]:
new_max = max(maxs) + (new_max - main_scan['graph'].GetX()[main_scan['graph'].GetN()-1])
axishist.GetXaxis().SetLimits(new_min, new_max)
crossings = main_scan['crossings']
val_nom = main_scan['val']
val_2sig = main_scan['val_2sig']
breakdown = 'Syst,Stat'
breakdown = breakdown.split(',')
v_hi = [val_nom[1]]
v_lo = [val_nom[2]]
for other in other_scans:
v_hi.append(other['val'][1])
v_lo.append(other['val'][2])
assert(len(v_hi) == len(breakdown))
mu = val_nom[0]
#syst
i = 0
if (abs(v_hi[1]) > abs(v_hi[0])):
print 'ERROR SUBTRACTION IS NEGATIVE FOR Syst HI'
hi_syst = 0.
else:
hi_syst = math.sqrt(v_hi[0]*v_hi[0] - v_hi[1]*v_hi[1])
if (abs(v_lo[1]) > abs(v_lo[0])):
print 'ERROR SUBTRACTION IS NEGATIVE FOR Syst LO'
lo_syst = 0.
else:
lo_syst = math.sqrt(v_lo[0]*v_lo[0] - v_lo[1]*v_lo[1])
#stat
hi_stat = v_hi[1]
lo_stat = v_lo[1]
# open theo xsec
#GenXsecAndErr = open(boson + '_' + channel + '_' + year + '_theoxsec.txt').readline().strip().split('(')[1].replace(')', '').replace(' +-', '')
#
#inputXsec = float(GenXsecAndErr.split(' ')[0]) * 1000
#inputXsecStatErr = float(GenXsecAndErr.split(' ')[1]) * 1000
# now theo xsec from RIVET
if boson == 'WGG':
inputXsec = 18.6953
inputXsecStatErr = 0.0321321
if boson == 'ZGG':
inputXsec = 5.95678
inputXsecStatErr = 0.0113444
combineXsec = float(inputXsec) * float(mu)
combineXsec_syst_hi = float(inputXsec) * float(hi_syst)
combineXsec_syst_lo = float(inputXsec) * float(lo_syst)
combineXsec_stat_hi = float(inputXsec) * float(hi_stat)
combineXsec_stat_lo = float(inputXsec) * float(lo_stat)
# open and read pdf unc
pdf_scale = 0.
if year == 'Run2':
linetoread = -1
if channel == 'ch_ele': linetoread = 28
elif channel == 'ch_muo': linetoread = 34
pdf_scale = open(boson + '_PDF.txt').readlines()
pdf_scale = float(pdf_scale[linetoread].strip().replace('+-', '').replace('(total error)', '').replace('--> ', '').replace('%', '').split(' ')[3])
# prepare strings
if isBlind == 'blind':
if boson == 'WGG':
if channel == 'ch_ele':
xsec_string = '\sigma(\PW\PGg\PGg)^\\text{{exp}}_{{\Pe\PGn}}&={0:.2f}'.format(combineXsec)
elif channel == 'ch_muo':
xsec_string = '\sigma(\PW\PGg\PGg)^\\text{{exp}}_{{\PGm\PGn}}&={0:.2f}'.format(combineXsec)
elif boson == 'ZGG':
if channel == 'ch_ele':
xsec_string = '\sigma(\PZ\PGg\PGg)^\\text{{exp}}_{{\Pe\Pe}}&={0:.2f}'.format(combineXsec)
elif channel == 'ch_muo':
xsec_string = '\sigma(\PZ\PGg\PGg)^\\text{{exp}}_{{\PGm\PGm}}&={0:.2f}'.format(combineXsec)
if isBlind == 'unblind':
if boson == 'WGG':
if channel == 'ch_ele':
xsec_string = '\sigma(\PW\PGg\PGg)^\\text{{meas}}_{{\Pe\PGn}}&={0:.2f}'.format(combineXsec)
elif channel == 'ch_muo':
xsec_string = '\sigma(\PW\PGg\PGg)^\\text{{meas}}_{{\PGm\PGn}}&={0:.2f}'.format(combineXsec)
elif boson == 'ZGG':
if channel == 'ch_ele':
xsec_string = '\sigma(\PZ\PGg\PGg)^\\text{{meas}}_{{\Pe\Pe}}&={0:.2f}'.format(combineXsec)
elif channel == 'ch_muo':
xsec_string = '\sigma(\PZ\PGg\PGg)^\\text{{meas.}}_{{\PGm\PGm}}&={0:.2f}'.format(combineXsec)
# syststat_string = ' ^{{{0:.2f}}}_{{{1:.2f}}} \mathrm{{(syst.)}} ^{{{2:.2f}}}_{{{3:.2f}}} \mathrm{{(stat.)}} '.format(abs(combineXsec_syst_hi), abs(combineXsec_syst_lo), abs(combineXsec_stat_hi), abs(combineXsec_stat_lo))
# output
output_file = open(boson + "_" + channel + "_" + year + "_xsec4paper" + "_" + isBlind + ".txt","w")
output_file.write('Generator xsec from : ' + URL_xsec + '\n')
output_file.write('pdf + scale uncertainty from : ' + URL_pdf_scale + '\n')
output_file.write('\n')
output_file.write('Generator xsec = {0} +- {1}\n'.format(inputXsec, inputXsecStatErr))
output_file.write('pdf + scale uncertainty (%) = {0}\n'.format(pdf_scale))
pdf_scale = pdf_scale * combineXsec / 100.
output_file.write('Mu = {0:.2f} +{1:.2f} -{2:.2f} (Syst.) +{3:.2f} -{4:.2f} (Stat.)\n'.format(abs(mu), abs(hi_syst), abs(lo_syst), abs(hi_stat), abs(lo_stat)))
output_file.write('Combine xsec = {0:.2f} +{1:.2f} -{2:.2f} (Stat.) +{3:.2f} -{4:.2f} (Syst.) + {5:.2f} (pdf + scale)\n'.format(abs(combineXsec), abs(combineXsec_stat_hi), abs(combineXsec_stat_lo), abs(combineXsec_syst_hi), abs(combineXsec_syst_lo), abs(pdf_scale)))
output_file.write('\n')
syststat_string = '^{{+'
syststat_string += '{0:.2f}'.format(abs(combineXsec_stat_hi))
syststat_string += '}}_{{-'
syststat_string += '{0:.2f}'.format(abs(combineXsec_stat_lo))
syststat_string += '}}\stat^{{+'
syststat_string += '{0:.2f}'.format(abs(combineXsec_syst_hi))
syststat_string += '}}_{{-'
syststat_string += '{0:.2f}'.format(abs(combineXsec_syst_lo))
syststat_string += '}}\syst'
pdf_scale_string = '\pm {0:.2f}\PDFscale'.format(abs(pdf_scale))
output_file.write('Latex 4 paper : ' + xsec_string + syststat_string + pdf_scale_string + '\fb')
output_file.close()
|
import unittest
import file_under_test
class TestIt(unittest.TestCase):
def test_something(self):
assert file_under_test.is_leap_year(2000) == True
if __name__ == '__main__':
unittest.main()
|
# Generated by Django 2.0.1 on 2019-02-26 03:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('autoTest', '0030_auto_20190226_0858'),
]
operations = [
migrations.DeleteModel(
name='Encryption',
),
migrations.RenameField(
model_name='function',
old_name='func_id',
new_name='function_id',
),
migrations.RemoveField(
model_name='function',
name='func_name',
),
migrations.AddField(
model_name='function',
name='function_name',
field=models.CharField(blank=True, default='', max_length=50),
),
migrations.AlterField(
model_name='function',
name='description',
field=models.CharField(blank=True, max_length=200, null=True),
),
]
|
import sys
import numpy as np
import cv2
import pymysql
import time
import serial
# ser = serial.Serial('/dev/ttyACM0', 115200)
model = './data/res10_300x300_ssd_iter_140000_fp16.caffemodel'
config = './data/deploy.prototxt'
eye_cascade = cv2.CascadeClassifier('./data/haarcascade_eye.xml')
eye_cascade1 = cv2.CascadeClassifier('./data/haarcascade_eye_tree_eyeglasses.xml')
eye_cascade2 = cv2.CascadeClassifier('./data/haarcascade_lefteye_2splits.xml')
eye_cascade3 = cv2.CascadeClassifier('./data/haarcascade_righteye_2splits.xml')
model_yolo = './data/yolov3-tiny.weights'
config_yolo = './data/yolov3-tiny.cfg'
class_labels = './data/coco.names'
confThreshold = 0.5
nmsThreshold = 0.4
cap = cv2.VideoCapture(0)
#init
eye_det_l = 0
eye_det_r = 0
frame_num = 0
sum_l_rev = 0
sum_r_rev = 0
eyeDet_nRcnt=0
eyeDet_nLcnt=0
sleep_value=50
PHONE_FLAG = 0
PHONE_AWAKE_FLAG = 0
FACE_AWAKE_FLAG = 0
FACE_NOT_DETECT_NUM = 0
EYE_NOT_DETECT_NUM = 0
EYE_AWAKE_FLAG = 0
START_FLAG = 1
START_TRAN_FLAG = 0
if not cap.isOpened():
print('Camera open failed!')
sys.exit()
##yolo_net read
net_yolo = cv2.dnn.readNet(model_yolo, config_yolo)
if net_yolo.empty():
print('Net open failed!')
sys.exit()
classes = []
with open(class_labels, 'rt') as f:
classes = f.read().rstrip('\n').split('\n')
colors = np.random.uniform(0, 255, size=(len(classes), 3))
layer_names = net_yolo.getLayerNames()
output_layers = [layer_names[i[0] - 1] for i in net_yolo.getUnconnectedOutLayers()]
#dnn_net read
net = cv2.dnn.readNet(model, config)
if net.empty():
print('Net open failed!')
sys.exit()
#INSERT db DATA
conn = pymysql.connect(host='192.168.0.194', user='root', password='qwerasdf12', db='raspi_db', charset='utf8')
cursor = conn.cursor()
cursor.execute("select * from collect_data")
sql = 0
while True:
# if ser.readable():
# START_FLAG = not START_FLAG
if START_FLAG :
if not START_TRAN_FLAG :
# ser.read(1)
sql = "INSERT INTO collect_data(TextData, LastUpdate) VALUES ('studystart', NOW())"
cursor.execute(sql)
conn.commit()
START_TRAN_FLAG = 1
ret, frame = cap.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
if not ret:
break
##yolo blob img
blob_yolo = cv2.dnn.blobFromImage(frame, 1/255., (320, 320), swapRB=True)
net_yolo.setInput(blob_yolo)
outs_yolo = net_yolo.forward(output_layers)
h, w = frame.shape[:2]
class_ids = []
confidences = []
boxes = []
#dnn blob img
blob = cv2.dnn.blobFromImage(frame, 1, (300, 300), (104, 177, 123))
net.setInput(blob)
out = net.forward()
detect = out[0, 0, :, :]
(h, w) = frame.shape[:2]
#yolo detection
for out in outs_yolo:
for detection in out:
scores = detection[5:]
class_id = np.argmax(scores)
confidence_yolo = scores[class_id]
if confidence_yolo > confThreshold:
cx = int(detection[0] * w)
cy = int(detection[1] * h)
bw = int(detection[2] * w)
bh = int(detection[3] * h)
sx = int(cx - bw / 2)
sy = int(cy - bh / 2)
boxes.append([sx, sy, bw, bh])
confidences.append(float(confidence_yolo))
class_ids.append(int(class_id))
indices = cv2.dnn.NMSBoxes(boxes, confidences, confThreshold, nmsThreshold)
#yolo cell phone detection
for i in indices:
i = i[0]
if classes[class_ids[i]] == 'cell phone' :
if not PHONE_FLAG and not PHONE_AWAKE_FLAG :
PHONE_FLAG = 1
sql = "INSERT INTO collect_data(TextData, LastUpdate) VALUES ('phone', NOW())"
cursor.execute(sql)
conn.commit()
PHONE_AWAKE_FLAG = 1
sx, sy, bw, bh = boxes[i]
label = '{0}: {1:.2f}'.format(classes[class_ids[i]],confidences[i])
color = colors[class_ids[i]]
cv2.rectangle(frame, (sx, sy, bw, bh), color, 2)
cv2.putText(frame, label, (sx, sy - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, color, 2, cv2.LINE_AA)
else :
PHONE_FLAG = 0
if PHONE_AWAKE_FLAG :
sql = "INSERT INTO collect_data(TextData, LastUpdate) VALUES ('awake', NOW())"
cursor.execute(sql)
conn.commit()
PHONE_AWAKE_FLAG = 0
t, _ = net_yolo.getPerfProfile()
label = 'Inference time: %.2f ms' % (t * 1000.0 / cv2.getTickFrequency())
cv2.putText(frame, label, (10, 30), cv2.FONT_HERSHEY_SIMPLEX,
0.7, (0, 0, 255), 1, cv2.LINE_AA)
#dnn face detection
if not PHONE_FLAG :
FACE_DETECT_FLAG = 0
for i in range(detect.shape[0]):
confidence = detect[i, 2]
if confidence < 0.5:
if FACE_NOT_DETECT_NUM>200 and not FACE_AWAKE_FLAG :
sql = "INSERT INTO collect_data(TextData, LastUpdate) VALUES ('sleep', NOW())"
cursor.execute(sql)
conn.commit()
FACE_AWAKE_FLAG = 1
FACE_NOT_DETECT_NUM+=1
break
FACE_DETECT_FLAG = 1
if FACE_AWAKE_FLAG and FACE_NOT_DETECT_NUM>199 :
sql = "INSERT INTO collect_data(TextData, LastUpdate) VALUES ('awake', NOW())"
cursor.execute(sql)
conn.commit()
FACE_AWAKE_FLAG = 0
FACE_NOT_DETECT_NUM = 0
x1 = int(detect[i, 3] * w)
y1 = int(detect[i, 4] * h)
x2 = int(detect[i, 5] * w)
y2 = int(detect[i, 6] * h)
cv2.rectangle(frame, (x1, y1), (x2, y2), (255,0, 255))
h = y2 - y1
w = x2 - x1
x = x1
y = y1
roi_gray_left = gray[y:y+h, (int)(x+w/2):x+w]
roi_gray_right = gray[y:y+h, x:x+(int)(w/2)]
roi_color_left = frame[y:y+(int)(h/2)+20, x+(int)(w/2):x+w]
roi_color_right = frame[y:y+(int)(h/2)+20, x:x+(int)(w/2)]
#eyedetection
if FACE_DETECT_FLAG :
eye_left = eye_cascade2.detectMultiScale(roi_gray_left,1.3,5,minSize=(20,20))
eye_right = eye_cascade3.detectMultiScale(roi_gray_right,1.3,5,minSize=(20,20))
for(ex, ey, ew, eh) in eye_left:
cv2.rectangle(roi_color_left,(ex,(int)(ey+ey/6)),(ex+ew,ey+eh),(0,255,0),2)
roi_eye_left = roi_gray_left[ey+(int)(ey/6): ey+eh, ex:ex+ew]
for(ex, ey, ew, eh) in eye_right:
cv2.rectangle(roi_color_right,(ex,(int)(ey+ey/6)),(ex+ew,ey+eh),(0,255,0),2)
roi_eye_right = roi_gray_right[ey+(int)(ey/6): ey+eh, ex:ex+ew]
#get_eye_edge
if (eye_left != ()) :
roi_eye_left_canny = roi_eye_left.copy()
roi_eye_left_canny_ret, roi_eye_left_canny_b = cv2.threshold(roi_eye_left_canny, 50,255, cv2.THRESH_BINARY)
roi_eye_left_canny_e = cv2.Canny(roi_eye_left_canny, 70, 150)
roi_eye_left_canny_ret, roi_eye_left_canny_e = cv2.threshold(roi_eye_left_canny_e, 50,255, cv2.THRESH_BINARY)
eye_det_l = 1
if (str(type(roi_eye_left_canny_b)) != "<type 'NoneType'>"):
cv2.imshow('roi_eye_left_canny', roi_eye_left_canny_e)
else:
eye_det_l = 0
eyeDet_nLcnt+=1
if (eye_right != ()) :
roi_eye_right_canny = roi_eye_right.copy()
roi_eye_right_canny_ret, roi_eye_right_canny_b = cv2.threshold(roi_eye_right_canny, 50,250, cv2.THRESH_BINARY)
roi_eye_right_canny_e = cv2.Canny(roi_eye_right_canny, 70, 150)
roi_eye_right_canny_ret, roi_eye_right_canny_e = cv2.threshold(roi_eye_right_canny_e, 50,255, cv2.THRESH_BINARY)
eye_det_r = 1
if (str(type(roi_eye_right_canny_e)) != "<type 'NoneType'>"):
cv2.imshow('roi_eye_right_canny', roi_eye_right_canny_e)
else :
eye_det_r = 0
eyeDet_nRcnt+=1
#sleep detection
if eye_det_l == 1 :
sum_l = 0
if (str(type(roi_eye_left_canny_e)) != "<type 'NoneType'>"):
height, width = roi_eye_left_canny_e.shape
for i in range(0,height) :
for j in range(0,width) :
if (str(type(roi_eye_left_canny_e)) != "<type 'NoneType'>"):
sum_l += roi_eye_left_canny_e[i][j]/255
else :
sum_l = 0
if eye_det_r == 1 :
sum_r = 0
if (str(type(roi_eye_right_canny_e)) != "<type 'NoneType'>"):
height, width = roi_eye_right_canny_e.shape
for i in range(0,height) :
for j in range(0,width) :
if (str(type(roi_eye_right_canny_e)) != "<type 'NoneType'>"):
sum_r += roi_eye_right_canny_e[i][j]/255
else :
sum_r = 0
if FACE_DETECT_FLAG :
if (frame_num<9):
sum_r_rev += sum_r
sum_l_rev += sum_l
frame_num+=1
else :
sum_r_rev += sum_r
sum_l_rev += sum_l
frame_num = 0
if(eyeDet_nLcnt<10 and eyeDet_nRcnt<10) :
sum_r_rev/=(10-eyeDet_nRcnt)
sum_l_rev/=(10-eyeDet_nLcnt)
else :
sum_l_rev = 0
sum_r_rev = 0
if ((sum_r_rev<sleep_value) and (sum_l_rev<sleep_value)) :
print('sleep')
EYE_NOT_DETECT_NUM+=1
if EYE_NOT_DETECT_NUM>20 and not EYE_AWAKE_FLAG:
sql = "INSERT INTO collect_data(TextData, LastUpdate) VALUES ('sleep', NOW())"
cursor.execute(sql)
conn.commit()
EYE_AWAKE_FLAG = 1
else :
print('dont sleep')
if EYE_NOT_DETECT_NUM > 19 and EYE_AWAKE_FLAG :
sql = "INSERT INTO collect_data(TextData, LastUpdate) VALUES ('awake', NOW())"
cursor.execute(sql)
conn.commit()
EYE_AWAKE_FLAG = 0
EYE_NOT_DETECT_NUM = 0
print('sum_l_rev = {0:>d}, sum_r_rev = {1:>4d}'.format(sum_l_rev, sum_r_rev))
eyeDet_nLcnt = 0
eyeDet_nRcnt = 0
print('sum_l = {0:>4d}, sum_r = {1:>4d}'.format(sum_l, sum_r))
else :
sum_l_rev = 0
sum_r_rev = 0
eyeDet_nLcnt = 0
eyeDet_nRcnt = 0
#initial value
eye_det_l = 0
eye_det_r = 0
roi_eye_left_canny_b = 0
roi_eye_left_canny_e = 0
roi_eye_left_canny_ret = 0
roi_eye_right_canny_b = 0
roi_eye_right_canny_e = 0
roi_eye_right_canny_ret = 0
cv2.imshow('frame', frame)
k = cv2.waitKey(1)
if k == ord('q'):
break
else :
START_TRAN_FLAG = 0
sql = "INSERT INTO collect_data(TextData, LastUpdate) VALUES ('studyfinish', NOW())"
cursor.execute(sql)
conn.commit()
cv2.destroyAllWindows() |
"""
Author: Isaac Lance, Weigang An
Date: 03/11/19
CIS422
GoalTracker
"""
#Standard imports: pytest and the file to be tested
import pytest
from Goal import Goal, SubGoal
from datetime import datetime as dt, timedelta
from GErrors import FlagError
from Model import Model
from AnalysisGenerator import AnalysisGenerator
#Setup values for tests.
m = Model([], 0, 0, {}) #create an empty Model object where everything is set to 0 or empty
#Setup goals
adue = dt(2014, 6, 12)
bdue = dt(2019, 9, 5)
cdue = dt(2020, 3, 12)
agoalInformation = {"name": "atest", "category" : "atest_category", "priority" : 1, "memo" : "testa", "dueDate" : adue}
bgoalInformation = {"name": "btest", "category" : "btest_category", "priority" : 2, "memo" : "testb", "dueDate" : bdue}
cgoalInformation = {"name": "ctest", "category" : "ctest_category", "priority" : 3, "memo" : "testc", "dueDate" : cdue}
nows = [dt.now() - timedelta(days = 2, hours = 12),
dt.now() - timedelta(days = 2, hours = 5),
dt.now() - timedelta(hours = 2)]
laters = [nows[0] + timedelta(hours=5), nows[1] + timedelta(hours = 7), nows[2] + timedelta(hours = 1)]
goalsinfo = [agoalInformation, bgoalInformation, cgoalInformation]
for i in range(3):
gid = m.addGoal()
m.editGoal(gid, goalsinfo[gid-1])
for i in range(3):
m.manuallyInputEffort(gid, nows[i], laters[i])
m.editSubGoal(gid, m.addSubGoal(gid), {"name" : "test_a"})
m.editSubGoal(gid, m.addSubGoal(gid), {"name" : "test_b"})
m.completeSubGoal(gid, 2)
def test_analysis():
m.goalList[2].startDate = nows[0]
ag = AnalysisGenerator(gid, m)
assert round(ag.getActiveTime(), 1) == 2.5
assert ag.trackProgress() == .5
|
import spacy
import pandas as pd
import numpy as np
from spacy.lang.en import English
import nltk
from nltk.stem.wordnet import WordNetLemmatizer
import random
spacy.load("en_core_web_sm")
#clean our texts and return a list of tokens
parser = English()
def tokenize(text):
lda_tokens = []
tokens = parser(text)
for token in tokens:
if token.orth_.isspace():
continue
elif token.like_url:
lda_tokens.append('URL')
elif token.orth_.startswith('@'):
lda_tokens.append('SCREEN_NAME')
else:
lda_tokens.append(token.lower_)
return lda_tokens
#find the meanings of words, synonyms, antonyms, and more
nltk.download('wordnet')
from nltk.corpus import wordnet as wn
def get_lemma(word):
lemma = wn.morphy(word)
if lemma is None:
return word
else:
return lemma
def get_lemma2(word):
return WordNetLemmatizer().lemmatize(word)
#Filter out stop words:
nltk.download('stopwords')
en_stop = set(nltk.corpus.stopwords.words('english'))
#define a function to prepare the text for topic modelling:
def prepare_text_for_lda(text):
tokens = tokenize(text)
tokens = [token for token in tokens if len(token) > 4]
tokens = [token for token in tokens if token not in en_stop]
tokens = [get_lemma(token) for token in tokens]
return tokens
#apply those function
data = pd.read_csv("/Users/sognoneve/Desktop/Unstra Data/FinalProject/pppp.csv")
data['Arrival'] = data.Title + ' ' + data.Text
text_data=[]
# with open(data1) as f:
for line in data['Arrival']:
tokens = prepare_text_for_lda(line)
text_data.append(tokens)
data['key']=text_data
userinput = []
crit = False
while crit == False:
inp = input("Enter the key word and enter the True if you end: ")
if inp == 'True':
crit = True
else:
userinput.append(inp.lower())
num = []
for j in text_data:
count = 0
for i in userinput:
if i in j:
count = count + 1
perc = count / len(userinput)
num.append(perc)
data['match'] = num
d1=data.sort_values(by=['match'],ascending=False)
d1['url'].head(5)
|
import numpy as np
import matplotlib.pyplot as plt
class System:
def __init__(self):
self.state = np.array([[0], [0]], dtype="float") # (pos, vel)T
self.acc = [0]
self.dt = 0.01
self.A = np.array([[1, self.dt], [0, 1]], dtype="float")
self.B = np.array([[0], [self.dt]], dtype="float")
def tick(self, input_u):
current_s = self.state[:,-1].reshape(-1, 1)
next_s = np.matmul(self.A, current_s) + self.B * input_u
self.state = np.hstack((self.state, next_s))
self.acc = np.hstack((self.acc, input_u))
def getSize(self):
return self.state.shape[1]
def getState(self):
return self.state[:,-1]
class Control:
def __init__(self, target):
self.last_diff = 0.0
self.iterm = 0.0
self.dt = 0.1
self.target = target
def control(self, index, state):
pos = state[0]
diff = pos - target[index]
self.iterm += diff * self.dt
dterm = (diff - self.last_diff) / self.dt
self.last_diff = diff
output = -4.0 * diff - 2.0 * dterm
return min(max(output, -4.0), 4.0)
system = System()
target = [0]
time_size = 700
for i in range(time_size):
if i < 50:
target.append(0.0)
elif i < 200:
target.append(1.0)
elif i < 400:
target.append(1.0+(i-200)*0.005)
else:
target.append(1.0)
pid_control = Control(target)
# control
for i in range(time_size):
if i%10 == 0:
state = system.getState()
output = pid_control.control(i, state)
system.tick(output)
# sys_size = system.getSize()
# x = np.linspace(0, sys_size * system.dt, sys_size)
# fig = plt.figure()
# ax_1 = fig.add_subplot(211)
# ax_1.plot(x, system.acc, marker='.', markersize=10, label="acc")
# ax_1.plot(x, system.state[1,:], marker='.', markersize=10, label="vel")
# ax_1.plot(x, system.state[0,:], marker='.', markersize=10, label="pos")
# ax_1.plot(x, target, marker='.', markersize=10, label="target")
# plt.legend(loc='best')
# error = [0]
# for i in range(time_size):
# error.append(system.state[0,i] - target[i])
# ax_2 = fig.add_subplot(212)
# ax_2.plot(x, error, marker='.', markersize=10, label="error")
# plt.legend(loc='best')
# plt.show()
s_dt =0.01
s_A = np.array([[1, s_dt], [0, 1]], dtype="float")
s_B = np.array([[0], [s_dt]], dtype="float")
G = np.zeros((2,4,4))
print(G)
print(np.matmul(G,G)) |
import datetime
from datetime import timedelta
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.conf import settings
from django.core.mail import send_mail
# Create your models here.
class Server(models.Model):
name = models.CharField(max_length=150)
ip_address = models.IPAddressField(default="0.0.0.0")
operating_system = models.CharField(max_length=150, default='')
cpu = models.CharField(max_length=150)
memory = models.IntegerField()
operational = models.BooleanField()
reservable = models.BooleanField()
notes = models.CharField(max_length=300, blank=True, null=True)
def __unicode__(self):
return self.name
class Reservation(models.Model):
server = models.ForeignKey(Server, blank=True, null=True)
reserved_by = models.ForeignKey(User, blank=True, null=True)
start_date = models.DateField('start date', default=datetime.date.today(), blank=True, null=True)
end_date = models.DateField('reservation end', default=datetime.date.today(), blank=True, null=True)
started = False
expired = False
upcoming_warn = False
def clean(self):
if (self.server.operational == False):
raise ValidationError(u"%s claims to be unoperational." % self.server.name)
start_overlap = False
end_overlap = False
if(self.start_date > self.end_date):
raise ValidationError(u"Reservation End cannot come before the Start Date")
all_reservations = Reservation.objects.all()
for reservation in all_reservations:
if (self.server == reservation.server) and not (self == reservation):
if (self.start_date == reservation.start_date):
raise ValidationError(u"Start date on this server matches a reservation of this server by %s." % reservation.reserved_by)
if (self.end_date == reservation.end_date):
raise ValidationError(u"End date on this server matches a reservation of this server by %s." % reservation.reserved_by)
if (reservation.start_date < self.start_date) and (self.start_date < reservation.end_date):
start_overlap = True
if (reservation.start_date < self.end_date) and (self.end_date < reservation.end_date):
end_overlap = True
if (start_overlap and end_overlap):
raise ValidationError(u"You're reservation falls inside of another reservation by %s, which starts %s and ends %s." % (reservation.reserved_by, reservation.start_date, reservation.end_date))
elif (start_overlap):
raise ValidationError(u"You're reservation starts inside of %s's reservation of the same server, which starts %s and ends %s." % (reservation.reserved_by, reservation.start_date, reservation.end_date))
elif (end_overlap):
raise ValidationError(u"You're reservation ends inside of %s's reservation of the same server, which starts %s and ends %s." % (reservation.reserved_by, reservation.start_date, reservation.end_date))
if(self.start_date != None):
if(self.start_date == datetime.date.today()) or (self.start_date < datetime.date.today()):
self.started = True
def __unicode__(self):
if self.reserved_by != None:
reserved_string = u"%s is reserved by %s" % (self.server.name, self.reserved_by)
else:
reserved_string = u"%s is not reserved" % self.server.name
return reserved_string
@receiver(post_save, sender=Server)
def create_reservation(sender, instance, created, **kwargs):
if(created):
reservation = Reservation(server=instance, reserved_by=None, start_date=None, end_date=None)
reservation.save()
def check_upcoming(res):
if res.started == False and res.start_date != None and res.upcoming_warn == False:
time_delta = datetime.date.today() - res.start_date
if time_delta < datetime.timedelta(7):
email_subject = u"[Serveservation] Reservation for %s Coming Soon" % res.server
email_body =u"Your reservation of a server is coming soon. \n\nReservation start date: %s\nReservation end date: %s\n\nServer Name: %s\nServer IP: %s\n" % (res.start_date, res.end_date, res.server.name, res.server.ip_address)
email_from="david.dropinski@unboundid.com"
email_to=[]
email_to.append(res.reserved_by.email)
send_mail(email_subject, email_body, email_from, email_to, fail_silently=False)
res.upcoming_warn = True
res.save()
def check_expired(res):
if (res.end_date != None) :
time_delta = res.end_date - datetime.date.today()
if time_delta < datetime.timedelta(0):
res.expired = True
if res.expired:
email_subject = u"[Serveservation] Reservation for %s Expired" % res.server
email_body =u"Your reservation of a server has ended. \n\nReservation start date: %s\nReservation end date: %s\n\nServer Name: %s\nServer IP: %s\n" % (res.start_date, res.end_date, res.server.name, res.server.ip_address)
email_from="david.dropinski@unboundid.com"
email_to=[]
email_to.append(res.reserved_by.email)
send_mail(email_subject, email_body, email_from, email_to, fail_silently=False)
expire_data(res)
def expire_data(res):
res.reserved_by = None
res.start_date = None
res.end_date = None
res.expired = False
res.save()
|
#Algoritmo que imprima los 15 primeros números impares negativos (iniciando en -1)
#Para variar un poco vamos a declarar un inicio y final para poder utilizar la función for mediante declarando los valores de x , muy parecido al primer ejercicio
x = [-1,-3,-5,-7,-9,-11,-13,-15,-17,-19,-21,-23,-25,-27,-29]
for x in x:
print("Los primeros 15 números impares negativos desde -1 son : {}".format(x))
#Desarrollado por Pedro Gómez / ID:000396221
|
from math import floor
from random import randint, random
def optimize(regionDemands, numServicesRunning, debug=True):
"""
Function optimize,
:param regionDemands: [demand1, demand2, demand3, ...]
:param numServicesRunning: 0 < numServices < 1000
:return newState: [region1Running, region2Running, ...]
"""
if debug: print("region demands", regionDemands, "sum", sum(regionDemands))
dataRange = max(regionDemands) - min(regionDemands) + 1
optimized = [floor(numServicesRunning * ((1 - ((regionDemand - min(regionDemands))/dataRange)) * (1 - (regionDemand/sum(regionDemands))) / 2)) for regionDemand in regionDemands]
if debug: print("optimzed:", optimized, sum(optimized))
if sum(optimized) < numServicesRunning:
while (numServicesRunning - sum(optimized)) != 0:
rand = random()
if rand < 0.25:
optimized[randint(0, len(optimized) - 1)] += 1
else:
optimized[optimized.index(max(optimized))] += 1
else:
while (sum(optimized) - numServicesRunning) != 0:
rand = random()
if rand < 0.25:
randInt = randint(0, len(optimized) - 1)
optimized[randInt] = optimized[randInt] - 1 if optimized[randInt] > 0 else 0
else:
optimized[optimized.index(max(optimized))] -= 1
if debug: print("optimized", optimized, sum(optimized), sum(optimized) == numServicesRunning)
return optimized
|
#!/usr/bin/env python
# Import required modules
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import str
import os
import argparse
import subprocess
import ICA_AROMA_functions as aromafunc
import shutil
# Change to script directory
cwd = os.path.realpath(os.path.curdir)
scriptDir = os.path.dirname(os.path.abspath(__file__))
os.chdir(scriptDir)
#-------------------------------------------- PARSER --------------------------------------------#
parser = argparse.ArgumentParser(description='Script to run ICA-AROMA v0.3 beta (\'ICA-based Automatic Removal Of Motion Artifacts\') on fMRI data. See the companion manual for further information.')
# Required options
reqoptions = parser.add_argument_group('Required arguments')
reqoptions.add_argument('-o', '-out', dest="outDir", required=True, help='Output directory name')
# Required options in non-Feat mode
nonfeatoptions = parser.add_argument_group('Required arguments - generic mode')
nonfeatoptions.add_argument('-i', '-in', dest="inFile", required=False, help='Input file name of fMRI data (.nii.gz)')
nonfeatoptions.add_argument('-mc', dest="mc", required=False, help='File name of the motion parameters obtained after motion realingment (e.g., FSL mcflirt). Note that the order of parameters does not matter, should your file not originate from FSL mcflirt. (e.g., /home/user/PROJECT/SUBJECT.feat/mc/prefiltered_func_data_mcf.par')
nonfeatoptions.add_argument('-a', '-affmat', dest="affmat", default="", help='File name of the mat-file describing the affine registration (e.g., FSL FLIRT) of the functional data to structural space (.mat file). (e.g., /home/user/PROJECT/SUBJECT.feat/reg/example_func2highres.mat')
nonfeatoptions.add_argument('-w', '-warp', dest="warp", default="", help='File name of the warp-file describing the non-linear registration (e.g., FSL FNIRT) of the structural data to MNI152 space (.nii.gz). (e.g., /home/user/PROJECT/SUBJECT.feat/reg/highres2standard_warp.nii.gz')
nonfeatoptions.add_argument('-m', '-mask', dest="mask", default="", help='File name of the mask to be used for MELODIC (denoising will be performed on the original/non-masked input data)')
# Required options in Feat mode
featoptions = parser.add_argument_group('Required arguments - FEAT mode')
featoptions.add_argument('-f', '-feat', dest="inFeat", required=False, help='Feat directory name (Feat should have been run without temporal filtering and including registration to MNI152)')
# Optional options
optoptions = parser.add_argument_group('Optional arguments')
optoptions.add_argument('-tr', dest="TR", help='TR in seconds', type=float)
optoptions.add_argument('-den', dest="denType", default="nonaggr", help='Type of denoising strategy: \'no\': only classification, no denoising; \'nonaggr\': non-aggresssive denoising (default); \'aggr\': aggressive denoising; \'both\': both aggressive and non-aggressive denoising (seperately)')
optoptions.add_argument('-md', '-meldir', dest="melDir", default="",help='MELODIC directory name, in case MELODIC has been run previously.')
optoptions.add_argument('-dim', dest="dim", default=0, help='Dimensionality reduction into #num dimensions when running MELODIC (default: automatic estimation; i.e. -dim 0)', type=int)
optoptions.add_argument('-ow', '-overwrite', dest="overwrite", action='store_true', help='Overwrite existing output', default=False)
optoptions.add_argument('-np', '-noplots', dest="generate_plots", action='store_false', help='Plot component classification overview similar to plot in the main AROMA paper', default=True)
print('\n------------------------------- RUNNING ICA-AROMA ------------------------------- ')
print('--------------- \'ICA-based Automatic Removal Of Motion Artifacts\' --------------- \n')
#--------------------------------------- PARSE ARGUMENTS ---------------------------------------#
args = parser.parse_args()
# Define variables based on the type of input (i.e. Feat directory or specific input arguments), and check whether the specified files exist.
cancel = False
if args.inFeat:
inFeat = args.inFeat
# Check whether the Feat directory exists
if not os.path.isdir(inFeat):
print('The specified Feat directory does not exist.')
print('\n----------------------------- ICA-AROMA IS CANCELED -----------------------------\n')
exit()
# Define the variables which should be located in the Feat directory
inFile = os.path.join(args.inFeat, 'filtered_func_data.nii.gz')
mc = os.path.join(args.inFeat, 'mc', 'prefiltered_func_data_mcf.par')
affmat = os.path.join(args.inFeat, 'reg', 'example_func2highres.mat')
warp = os.path.join(args.inFeat, 'reg', 'highres2standard_warp.nii.gz')
# Check whether these files actually exist
if not os.path.isfile(inFile):
print('Missing filtered_func_data.nii.gz in Feat directory.')
cancel = True
if not os.path.isfile(mc):
print('Missing mc/prefiltered_func_data_mcf.mat in Feat directory.')
cancel = True
if not os.path.isfile(affmat):
print('Missing reg/example_func2highres.mat in Feat directory.')
cancel = True
if not os.path.isfile(warp):
print('Missing reg/highres2standard_warp.nii.gz in Feat directory.')
cancel = True
# Check whether a melodic.ica directory exists
if os.path.isdir(os.path.join(args.inFeat, 'filtered_func_data.ica')):
melDir = os.path.join(args.inFeat, 'filtered_func_data.ica')
else:
melDir = args.melDir
else:
inFile = args.inFile
mc = args.mc
affmat = args.affmat
warp = args.warp
melDir = args.melDir
# Check whether the files exist
if not inFile:
print('No input file specified.')
else:
if not os.path.isfile(inFile):
print('The specified input file does not exist.')
cancel = True
if not mc:
print('No mc file specified.')
else:
if not os.path.isfile(mc):
print('The specified mc file does does not exist.')
cancel = True
if affmat:
if not os.path.isfile(affmat):
print('The specified affmat file does not exist.')
cancel = True
if warp:
if not os.path.isfile(warp):
print('The specified warp file does not exist.')
cancel = True
# Parse the arguments which do not depend on whether a Feat directory has been specified
outDir = args.outDir
dim = args.dim
denType = args.denType
# Check if the mask exists, when specified.
if args.mask:
if not os.path.isfile(args.mask):
print('The specified mask does not exist.')
cancel = True
# Check if the type of denoising is correctly specified, when specified
if not (denType == 'nonaggr') and not (denType == 'aggr') and not (denType == 'both') and not (denType == 'no'):
print('Type of denoising was not correctly specified. Non-aggressive denoising will be run.')
denType = 'nonaggr'
# If the criteria for file/directory specifications have not been met. Cancel ICA-AROMA.
if cancel:
print('\n----------------------------- ICA-AROMA IS CANCELED -----------------------------\n')
exit()
#------------------------------------------- PREPARE -------------------------------------------#
# Define the FSL-bin directory
fslDir = os.path.join(os.environ["FSLDIR"], 'bin', '')
# Create output directory if needed
if os.path.isdir(outDir) and args.overwrite is False:
print('Output directory', outDir, """already exists.
AROMA will not continue.
Rerun with the -overwrite option to explicitly overwrite existing output.""")
exit()
elif os.path.isdir(outDir) and args.overwrite is True:
print('Warning! Output directory', outDir, 'exists and will be overwritten.\n')
shutil.rmtree(outDir)
os.makedirs(outDir)
else:
os.makedirs(outDir)
# Get TR of the fMRI data, if not specified
if args.TR:
TR = args.TR
else:
cmd = ' '.join([os.path.join(fslDir, 'fslinfo'),
inFile,
'| grep pixdim4 | awk \'{print $2}\''])
TR = float(subprocess.getoutput(cmd))
# Check TR
if TR == 1:
print('Warning! Please check whether the determined TR (of ' + str(TR) + 's) is correct!\n')
elif TR == 0:
print('TR is zero. ICA-AROMA requires a valid TR and will therefore exit. Please check the header, or define the TR as an additional argument.\n----------------------------- ICA-AROMA IS CANCELED -----------------------------\n')
exit()
# Define/create mask. Either by making a copy of the specified mask, or by creating a new one.
mask = os.path.join(outDir, 'mask.nii.gz')
if args.mask:
shutil.copyfile(args.mask, mask)
else:
# If a Feat directory is specified, and an example_func is present use example_func to create a mask
if args.inFeat and os.path.isfile(os.path.join(inFeat, 'example_func.nii.gz')):
os.system(' '.join([os.path.join(fslDir, 'bet'),
os.path.join(inFeat, 'example_func.nii.gz'),
os.path.join(outDir, 'bet'),
'-f 0.3 -n -m -R']))
os.system(' '.join(['mv',
os.path.join(outDir, 'bet_mask.nii.gz'),
mask]))
if os.path.isfile(os.path.join(outDir, 'bet.nii.gz')):
os.remove(os.path.join(outDir, 'bet.nii.gz'))
else:
if args.inFeat:
print(' - No example_func was found in the Feat directory. A mask will be created including all voxels with varying intensity over time in the fMRI data. Please check!\n')
os.system(' '.join([os.path.join(fslDir, 'fslmaths'),
inFile,
'-Tstd -bin',
mask]))
#---------------------------------------- Run ICA-AROMA ----------------------------------------#
print('Step 1) MELODIC')
aromafunc.runICA(fslDir, inFile, outDir, melDir, mask, dim, TR)
print('Step 2) Automatic classification of the components')
print(' - registering the spatial maps to MNI')
melIC = os.path.join(outDir, 'melodic_IC_thr.nii.gz')
melIC_MNI = os.path.join(outDir, 'melodic_IC_thr_MNI2mm.nii.gz')
aromafunc.register2MNI(fslDir, melIC, melIC_MNI, affmat, warp)
print(' - extracting the CSF & Edge fraction features')
edgeFract, csfFract = aromafunc.feature_spatial(fslDir, outDir, scriptDir, melIC_MNI)
print(' - extracting the Maximum RP correlation feature')
melmix = os.path.join(outDir, 'melodic.ica', 'melodic_mix')
maxRPcorr = aromafunc.feature_time_series(melmix, mc)
print(' - extracting the High-frequency content feature')
melFTmix = os.path.join(outDir, 'melodic.ica', 'melodic_FTmix')
HFC = aromafunc.feature_frequency(melFTmix, TR)
print(' - classification')
motionICs = aromafunc.classification(outDir, maxRPcorr, edgeFract, HFC, csfFract)
if args.generate_plots:
from classification_plots import classification_plot
classification_plot(os.path.join(outDir, 'classification_overview.txt'),
outDir)
if (denType != 'no'):
print('Step 3) Data denoising')
aromafunc.denoising(fslDir, inFile, outDir, melmix, denType, motionICs)
# Revert to old directory
os.chdir(cwd)
print('\n----------------------------------- Finished -----------------------------------\n')
|
_base_ = '../htc/htc_x101_64x4d_fpn_16x1_20e_coco.py'
# learning policy
lr_config = dict(step=[24, 27])
runner = dict(type='EpochBasedRunner', max_epochs=28)
|
# coding: utf-8
# In[1]:
import xml.etree.cElementTree as ET
import pprint
import re
from collections import defaultdict
import csv
import codecs
import cerberus
import sqlite3
import schema
# In[20]:
#!/usr/bin/env python
#Section 1: Get partial records from original xml file
OSM_FILE = "shanghai_china.osm" # Replace this with your osm file
SAMPLE_FILE = "shanghai_sample.osm"
k = 100 # Parameter: take every k-th top level element
def get_element(osm_file, tags=('node', 'way', 'relation')):
"""Yield element if it is the right type of tag
Reference:
http://stackoverflow.com/questions/3095434/inserting-newlines-in-xml-file-generated-via-xml-etree-elementtree-in-python
"""
context = iter(ET.iterparse(osm_file, events=('start', 'end')))
_, root = next(context)
for event, elem in context:
if event == 'end' and elem.tag in tags:
yield elem
root.clear()
with open(SAMPLE_FILE, 'wb') as output:
output.write('<?xml version="1.0" encoding="UTF-8"?>\n')
output.write('<osm>\n ')
# Write every kth top level element
for i, element in enumerate(get_element(OSM_FILE)):
if i % k == 0:
output.write(ET.tostring(element, encoding='utf-8'))
output.write('</osm>')
# In[2]:
shanghai = "shanghai_sample.osm" #smaller file for faster process
# In[11]:
#Section 2: get number of tags
def count_tags(filename):
tree = ET.parse(filename)
root = tree.getroot()
tag_list = {}
for row in root.iter():
if row.tag not in tag_list:
tag_list[row.tag] = 1
else:
tag_list[row.tag] +=1
return tag_list
# In[21]:
count_tags(shanghai)
# In[23]:
#Section 3: Check to see the types of tags
lower = re.compile(r'^([a-z]|_)*$')
lower_colon = re.compile(r'^([a-z]|_)*:([a-z]|_)*$')
problemchars = re.compile(r'[=\+/&<>;\'"\?%#$@\,\. \t\r\n]')
def key_type(element, keys):
if element.tag == "tag":
if lower.search(element.attrib["k"]):
keys["lower"] += 1
elif lower_colon.search(element.attrib["k"]):
keys["lower_colon"] += 1
elif problemchars.search(element.attrib["k"]):
keys["problemchars"] += 1
else:
keys["other"] +=1
return keys
def find_colon(element, c_list):
if element.tag == 'tag':
if lower_colon.search(element.attrib["k"]):
if element.attrib["k"] not in c_list:
c_list[element.attrib["k"]] = 1
else:
c_list[element.attrib["k"]] += 1
return c_list
def process_map(filename):
keys = {"lower": 0, "lower_colon": 0, "problemchars": 0, "other": 0}
colon_list = {}
for _, element in ET.iterparse(filename):
keys = key_type(element, keys)
colon_list = find_colon(element, colon_list)
return keys, colon_list
# In[24]:
keys = process_map(shanghai)
pprint.pprint(keys)
# In[25]:
# Section 4 Optional: Check to see the value in name:en attribute
def process_road_name(filename):
en_roads = set()
for _, element in ET.iterparse(filename):
if element.tag == 'tag':
if element.attrib['k'] == 'name:en':
if element.attrib['v'] not in en_roads:
en_roads.add(element.attrib['v'])
else:
pass
return en_roads
# In[26]:
pprint.pprint(process_road_name(shanghai))
# In[27]:
#Section 4: Check to see possible errors
OSMFILE = "shanghai_sample.osm"
street_type_re = re.compile(r'\b\S+\.?$', re.IGNORECASE)
ewsn_street_re = re.compile(r"\(([ewsn]|.)*\)", re.IGNORECASE)
expected = ["Street", "Avenue", "Boulevard", "Drive", "Court", "Place", "Square", "Lane", "Road",
"Trail", "Parkway", "Commons", "Expressway", "Highway", "Tunnel", "River", "Campus", "Park", "River", "Mall", "Plaza", "Bridge", "Museum", "School"]
# UPDATE THIS VARIABLE
mapping = { "St": "Street",
"St.": "Street",
"Hwy": "Highway",
"Hwy.": "Highway",
"Rd.": "Road",
"Rd": "Road",
"Ave": "Avenue",
"Ave.": "Avenue",
"(S)": "South",
"(N)": "North",
"(W)": "West",
"(E)": "East",
"(S.)": "South",
"(N.)": "North",
"(W.)": "West",
"(E.)": "East",
"Lu": "Road",
}
def audit_street_type(street_types, street_name):
m = street_type_re.search(street_name)
if m:
street_type = m.group()
if street_type not in expected:
street_types[street_type].add(street_name)
def is_english_name(elem):
return (elem.attrib['k'] == "name:en")
def audit(osmfile):
osm_file = open(osmfile, "r")
street_types = defaultdict(set)
for _, elem in ET.iterparse(osm_file):
if elem.tag == "node" or elem.tag == "way":
for tag in elem.iter("tag"):
if is_english_name(tag):
audit_street_type(street_types, tag.attrib['v'])
#tag.attrib['v'] = update_name(tag.attrib['v'], mapping)
osm_file.close()
return street_types
#Section 5: name change function
def update_name(name, mapping):
m = street_type_re.search(name)
eswn = ewsn_street_re.search(name)
better_name = name
# condition: if the street name does have a last word
if m:
# check if the street type is a key in your mapping dictionary:
if m.group() in mapping.keys():
better_street_type = mapping[m.group()]
better_name = street_type_re.sub(better_street_type, name)
# if road ends in (S) and similar types, make correction.
if eswn:
if eswn.group() in mapping.keys():
better_street_type = mapping[eswn.group()]
better_name = ewsn_street_re.sub(better_street_type, name)
st_list = better_name.split()
old_end = st_list[-1]
del st_list[-1]
st_list.insert(0, old_end)
not_better_name = ' '.join(st_list)
d = street_type_re.search(not_better_name)
if d:
if d.group() in mapping.keys():
better_street_type = mapping[d.group()]
better_name = street_type_re.sub(better_street_type, not_better_name)
return better_name
def test():
st_types = audit(OSMFILE)
pprint.pprint(dict(st_types))
for st_type, ways in st_types.items():
for name in ways:
better_name = update_name(name, mapping)
print(name, "=>", better_name)
if __name__ == '__main__':
test()
# In[28]:
#!/usr/bin/env python
# Section 6: make corrections and save the result to csv
OSM_PATH = "shanghai_sample.osm"
NODES_PATH = "nodes.csv"
NODE_TAGS_PATH = "nodes_tags.csv"
WAYS_PATH = "ways.csv"
WAY_NODES_PATH = "ways_nodes.csv"
WAY_TAGS_PATH = "ways_tags.csv"
LOWER_COLON = re.compile(r'^([a-z]|_)+:([a-z]|_)+')
PROBLEMCHARS = re.compile(r'[=\+/&<>;\'"\?%#$@\,\. \t\r\n]')
SCHEMA = schema.schema
# Make sure the fields order in the csvs matches the column order in the sql table schema
NODE_FIELDS = ['id', 'lat', 'lon', 'user', 'uid', 'version', 'changeset', 'timestamp']
NODE_TAGS_FIELDS = ['id', 'key', 'value', 'type']
WAY_FIELDS = ['id', 'user', 'uid', 'version', 'changeset', 'timestamp']
WAY_TAGS_FIELDS = ['id', 'key', 'value', 'type']
WAY_NODES_FIELDS = ['id', 'node_id', 'position']
def shape_element(element, node_attr_fields=NODE_FIELDS, way_attr_fields=WAY_FIELDS,
problem_chars=PROBLEMCHARS, default_tag_type='regular'):
"""Clean and shape node or way XML element to Python dict"""
node_attribs = {}
way_attribs = {}
way_nodes = []
tags = [] # Handle secondary tags the same way for both node and way elements
if element.tag == 'node':
for key in element.attrib.keys():
if key in node_attr_fields:
node_attribs[key] = element.attrib[key]
for child in element:
if child.tag == 'tag':
if problem_chars.search(child.attrib['k']):
pass
else:
tags_list = {}
tags_list['id'] = element.attrib['id']
if child.attrib['k'] == 'name:en':
tags_list['value'] = update_name(child.attrib['v'], mapping)
else:
tags_list['value'] = child.attrib['v']
if LOWER_COLON.search(child.attrib['k']):
colon_position = child.attrib['k'].find(':')
tags_list['key'] = child.attrib['k'][colon_position+1:]
tags_list['type'] = child.attrib['k'][:colon_position]
else:
tags_list['key'] = child.attrib['k']
tags_list['type'] = 'regular'
tags.append(tags_list)
if element.tag == 'way':
for key in element.attrib.keys():
if key in way_attr_fields:
way_attribs[key] = element.attrib[key]
position = 0
for child in element:
if child.tag == 'nd':
way_nodes_list = {}
way_nodes_list['id'] = element.attrib['id']
way_nodes_list['node_id'] = child.attrib['ref']
way_nodes_list['position'] = position
position += 1
way_nodes.append(way_nodes_list)
if child.tag == 'tag':
if problem_chars.search(child.attrib['k']):
pass
else:
tags_list = {}
tags_list['id'] = element.attrib['id']
if child.attrib['k'] == 'name:en':
tags_list['value'] = update_name(child.attrib['v'], mapping)
else:
tags_list['value'] = child.attrib['v']
if LOWER_COLON.search(child.attrib['k']):
colon_position = child.attrib['k'].find(':')
tags_list['key'] = child.attrib['k'][colon_position+1:]
tags_list['type'] = child.attrib['k'][:colon_position]
else:
tags_list['key'] = child.attrib['k']
tags_list['type'] = 'regular'
tags.append(tags_list)
if element.tag == 'node':
return {'node': node_attribs, 'node_tags': tags}
elif element.tag == 'way':
return {'way': way_attribs, 'way_nodes': way_nodes, 'way_tags': tags}
# ================================================== #
# Helper Functions #
# ================================================== #
def get_element(osm_file, tags=('node', 'way', 'relation')):
"""Yield element if it is the right type of tag"""
context = ET.iterparse(osm_file, events=('start', 'end'))
_, root = next(context)
for event, elem in context:
if event == 'end' and elem.tag in tags:
yield elem
root.clear()
def validate_element(element, validator, schema=SCHEMA):
"""Raise ValidationError if element does not match schema"""
if validator.validate(element, schema) is not True:
field, errors = next(validator.errors.iteritems())
message_string = "\nElement of type '{0}' has the following errors:\n{1}"
error_string = pprint.pformat(errors)
raise Exception(message_string.format(field, error_string))
class UnicodeDictWriter(csv.DictWriter, object):
"""Extend csv.DictWriter to handle Unicode input"""
def writerow(self, row):
super(UnicodeDictWriter, self).writerow({
k: (v.encode('utf-8') if isinstance(v, unicode) else v) for k, v in row.iteritems()
})
def writerows(self, rows):
for row in rows:
self.writerow(row)
# ================================================== #
# Main Function #
# ================================================== #
def process_map(file_in, validate):
"""Iteratively process each XML element and write to csv(s)"""
with codecs.open(NODES_PATH, 'w') as nodes_file, codecs.open(NODE_TAGS_PATH, 'w') as nodes_tags_file, codecs.open(WAYS_PATH, 'w') as ways_file, codecs.open(WAY_NODES_PATH, 'w') as way_nodes_file, codecs.open(WAY_TAGS_PATH, 'w') as way_tags_file:
nodes_writer = UnicodeDictWriter(nodes_file, NODE_FIELDS)
node_tags_writer = UnicodeDictWriter(nodes_tags_file, NODE_TAGS_FIELDS)
ways_writer = UnicodeDictWriter(ways_file, WAY_FIELDS)
way_nodes_writer = UnicodeDictWriter(way_nodes_file, WAY_NODES_FIELDS)
way_tags_writer = UnicodeDictWriter(way_tags_file, WAY_TAGS_FIELDS)
nodes_writer.writeheader()
node_tags_writer.writeheader()
ways_writer.writeheader()
way_nodes_writer.writeheader()
way_tags_writer.writeheader()
validator = cerberus.Validator()
for element in get_element(file_in, tags=('node', 'way')):
el = shape_element(element)
if el:
if validate is True:
validate_element(el, validator)
if element.tag == 'node':
nodes_writer.writerow(el['node'])
node_tags_writer.writerows(el['node_tags'])
elif element.tag == 'way':
ways_writer.writerow(el['way'])
way_nodes_writer.writerows(el['way_nodes'])
way_tags_writer.writerows(el['way_tags'])
if __name__ == '__main__':
# Note: Validation is ~ 10X slower. For the project consider using a small
# sample of the map when validating.
process_map(OSM_PATH, validate=True)
# In[32]:
#Section 7: Create db file from all csv files
osmdb = 'osm2.db'
connection = sqlite3.connect(osmdb)
write_cursor = connection.cursor()
write_cursor.execute('''
CREATE TABLE nodes(id INTEGER, lat TEXT, lon TEXT, user TEXT, uid INTEGER, version TEXT, changeset TEXT, timestamp TEXT)''')
connection.commit()
with open('nodes.csv', 'r') as csvfile:
middleman = csv.DictReader(csvfile) # comma is default delimiter
to_db = [(i['id'].decode("utf-8"), i['lat'].decode("utf-8"), i['lon'].decode("utf-8"), i['user'].decode("utf-8"), i["uid"].decode("utf-8"), i["version"].decode("utf-8"),i["changeset"].decode("utf-8"),i["timestamp"].decode("utf-8")) for i in middleman]
write_cursor.executemany("INSERT INTO nodes (id, lat, lon, user, uid, version, changeset, timestamp) VALUES (?,?,?,?,?,?,?,?);", to_db)
connection.commit()
connection.close()
# In[40]:
connection = sqlite3.connect(osmdb)
write_cursor = connection.cursor()
write_cursor.execute('''
CREATE TABLE nodes_tags(id INTEGER, key TEXT, value TEXT, type TEXT)''')
connection.commit()
with open('nodes_tags.csv', 'r') as csvfile:
middleman = csv.DictReader(csvfile) # comma is default delimiter
to_db = [(i['id'].decode("utf-8"), i['key'].decode("utf-8"), i['value'].decode("utf-8"), i['type'].decode("utf-8")) for i in middleman]
write_cursor.executemany("INSERT INTO nodes_tags(id, key, value, type) VALUES (?,?,?,?);", to_db)
connection.commit()
connection.close()
# In[44]:
connection = sqlite3.connect(osmdb)
write_cursor = connection.cursor()
write_cursor.execute('''DROP TABLE ways_nodes ''')
connection.commit()
# In[41]:
connection = sqlite3.connect(osmdb)
write_cursor = connection.cursor()
write_cursor.execute('''
CREATE TABLE ways(id INTEGER, user TEXT, uid TEXT, version TEXT, changeset TEXT, timestamp TEXT)''')
connection.commit()
with open('ways.csv', 'r') as csvfile:
middleman = csv.DictReader(csvfile) # comma is default delimiter
to_db = [(i['id'].decode("utf-8"), i['user'].decode("utf-8"), i['uid'].decode("utf-8"), i['version'].decode("utf-8"), i["changeset"].decode("utf-8"), i["timestamp"].decode("utf-8")) for i in middleman]
write_cursor.executemany("INSERT INTO ways (id, user, uid, version, changeset, timestamp) VALUES (?,?,?,?,?,?);", to_db)
connection.commit()
connection.close()
# In[42]:
connection = sqlite3.connect(osmdb)
write_cursor = connection.cursor()
write_cursor.execute('''
CREATE TABLE ways_tags(id INTEGER, key TEXT, value TEXT, type TEXT)''')
connection.commit()
with open('ways_tags.csv', 'r') as csvfile:
middleman = csv.DictReader(csvfile) # comma is default delimiter
to_db = [(i['id'].decode("utf-8"), i['key'].decode("utf-8"), i['value'].decode("utf-8"), i['type'].decode("utf-8")) for i in middleman]
write_cursor.executemany("INSERT INTO ways_tags(id, key, value, type) VALUES (?,?,?,?);", to_db)
connection.commit()
connection.close()
# In[45]:
connection = sqlite3.connect(osmdb)
write_cursor = connection.cursor()
write_cursor.execute('''
CREATE TABLE ways_nodes(id INTEGER, node_id INTEGER, position INTEGER)''')
connection.commit()
with open('ways_nodes.csv', 'r') as csvfile:
middleman = csv.DictReader(csvfile) # comma is default delimiter
to_db = [(i['id'].decode("utf-8"), i['node_id'].decode("utf-8"), i['position'].decode("utf-8")) for i in middleman]
write_cursor.executemany("INSERT INTO ways_nodes(id, node_id, position) VALUES (?,?,?);", to_db)
connection.commit()
connection.close()
# In[4]:
#section 8: Additional error
def process_regular_name(filename):
en_roads = set()
for _, element in ET.iterparse(filename):
if element.tag == 'tag':
if element.attrib['k'] == 'name':
if element.attrib['v'] not in en_roads:
en_roads.add(element.attrib['v'])
else:
pass
return en_roads
pprint.pprint(process_regular_name(shanghai))
# In[ ]:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.