blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
11c2af59915f855e5b91696c43428b3b72ded724 | a08d62043d2f78215d6ba19079b931dab39618be | /series.py | 047e2d1de6ea98b9b8b7e81f0592b7b6d6f76182 | [] | no_license | deschila/Blob | 52c0b69cc9b7f3ed99e08a478e05283979043b00 | 6269194be613476eb8d6c156232946738f5b661a | refs/heads/master | 2016-09-01T16:55:52.081404 | 2014-03-06T16:39:51 | 2014-03-06T16:40:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,241 | py | '''
Created on Feb 26, 2014
@author: deschild
'''
import scipy.signal
import pylab
import numpy
from numpy import pi,exp,sqrt
sigmas=[0.25,0.5,1,2,4,8,16,32,64,128]
g=[]
def mexican_hat(a,sigma):
x=numpy.arange(-a//2,a//2+1)
return -sigma**2/(sigma*sqrt(2*pi))*(x**2/sigma**4-1/sigma**2)*exp(-x**2/(2*sigma**2))
# return -(x**2/sigma**2-1)*exp(-x**2/(2*sigma**2))
m=[]
# m=mexican_hat(100,1)
# pylab.plt.plot(m)
# pylab.show()
for sigma in sigmas:
w=10*sigma
if w%2==0:w+=1
c=(scipy.signal.gaussian(w, sigma))
g.append(c)
b=mexican_hat(w, sigma)
m.append(b)
# pylab.plot(w,b)
raw=numpy.concatenate(g)
# # pylab.imshow((b,b))
# pylab.show()
DoGs = []
b = numpy.convolve(raw, g[0], mode="same")
for gaus in g[1:]:
c = numpy.convolve(raw, gaus/gaus.sum(), mode="same")
DoGs.append(b-c)
b=c
LoGs = [ numpy.convolve(raw, mh, mode="same") for mh in m]
f2 = pylab.figure(2)
sp1 = f2.add_subplot(211)
for s,crv in zip(sigmas,LoGs):
sp1.plot(crv,label="$\sigma$=%s"%s)
sp2 = f2.add_subplot(212)
for s,crv in zip(sigmas,DoGs):
sp2.plot(crv,label="$\sigma$=%s"%s)
sp1.plot(raw,label="raw")
sp2.plot(raw,label="raw")
sp1.legend()
sp2.legend()
f2.show()
raw_input("enter to quit") | [
"auroredeschildre@gmail.com"
] | auroredeschildre@gmail.com |
def6c80510f1499535bb98a7f44dee2849ced408 | 309d86ef0dd2e3aab15f48476dc5464cd816001c | /get_gt_txt.py | 66c6dba090da1ed16d873e72a69a1df9012d9131 | [] | no_license | daiwei9501/pcb-defect | 424e4a0d2b802ac4f8a522e643f7a2f58d51c40b | 50cbda3963328f8f410cb9a927d6015a46581771 | refs/heads/master | 2022-11-07T17:55:27.839488 | 2020-06-22T11:42:40 | 2020-06-22T11:42:40 | 274,120,856 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,600 | py | #----------------------------------------------------#
# 获取测试集的ground-truth
#----------------------------------------------------#
import sys
import os
import glob
import xml.etree.ElementTree as ET
xmlfilepath=r'./model_data/label_train/'
temp_xml = os.listdir(xmlfilepath)
total_xml = []
for xml in temp_xml:
if xml.endswith(".xml"):
total_xml.append(xml)
ftrain = open('image_list.txt', 'w')
for i in range(len(total_xml)):
name = total_xml[i][:-4] + '\n'
ftrain.write(name)
ftrain.close()
image_ids = open('image_list.txt').read().strip().split()
print(image_ids)
if not os.path.exists("./input"):
os.makedirs("./input")
if not os.path.exists("./input/ground-truth"):
os.makedirs("./input/ground-truth")
for image_id in image_ids:
with open("./input/ground-truth/"+image_id+".txt", "w") as new_f:
root = ET.parse("model_data/label_train/" + image_id+".xml").getroot()
for obj in root.findall('object'):
if obj.find('difficult')!=None:
difficult = obj.find('difficult').text
if int(difficult)==1:
continue
obj_name = obj.find('name').text
if obj_name == 'double':
print(image_id)
bndbox = obj.find('bndbox')
left = bndbox.find('xmin').text
top = bndbox.find('ymin').text
right = bndbox.find('xmax').text
bottom = bndbox.find('ymax').text
new_f.write("%s %s %s %s %s\n" % (obj_name, left, top, right, bottom))
print("Conversion completed!")
| [
"kbe206@163.com"
] | kbe206@163.com |
9da29f74684274cea4a32592532b7803a906b035 | fd1616921e3611f8e2cfc1129cd23734879cd866 | /apps/guest_list/views.py | 38d58a78295cbcbb614cf948c99205bf86008dce | [] | no_license | tonjohn/wedding_planner | ae6ab8655cccd29831a767a8e0c02dc70e85a577 | 47c4f516b3ea38ea42f4ff39c843ac93786c93be | refs/heads/master | 2021-01-20T03:01:31.279355 | 2017-03-03T17:43:36 | 2017-03-03T17:43:36 | 83,824,238 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 837 | py | from django.shortcuts import render
from django.contrib.auth import login
# Create your views here.
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.urls import reverse_lazy
from .models import Wedding
from django.contrib.auth.models import User
def index(request):
return render(request, 'guest_list/placeholder/index.html')
def guestlist_view(request):
user = User.objects.get(pk=1)
login(request, user)
return render(request, "guest_list/guest_list_show.html")
class WeddingCreate(CreateView):
model = Wedding
fields = '__all__'
# initial={'date_of_death':'12/10/2016',}
class WeddingUpdate(UpdateView):
model = Wedding
fields = ['first_name','last_name','date_of_birth','date_of_death']
class WeddingDelete(DeleteView):
model = Wedding
# success_url = reverse_lazy('authors') | [
"git@tonjohn.com"
] | git@tonjohn.com |
d49a088bb0cfd1df5be0927b59cd9782ace85d05 | d0e83b3f551c6af16aa0c8ed4ff074b3ec268120 | /processors/feat.py | e48cc144ee2ba12b7865cdbb61a44eb472849820 | [] | no_license | SamuelLAN/kaggle_SCTP | cfb0228a81d71b2f1c315352bd6435042066967f | 50ff2895baa6de29bdb19bfb20ca76718079d188 | refs/heads/master | 2020-04-25T16:22:07.803524 | 2019-04-03T09:06:12 | 2019-04-03T09:06:12 | 172,909,260 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 866 | py | #!/usr/bin/Python
# -*- coding: utf-8 -*-
import numpy as np
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
def lda(train_x, train_y, val_x, test_x):
''' LDA reduce the dimensions of the features '''
_lda = LDA()
train_x = _lda.fit_transform(train_x, train_y)
val_x = _lda.transform(val_x)
test_x = _lda.transform(test_x)
return train_x, val_x, test_x
def add_lda(train_x, train_y, val_x, test_x):
''' LDA reduce the dimensions of the features; and add this lda feature to the origin features '''
_lda = LDA()
train_lda = _lda.fit_transform(train_x, train_y)
val_lda = _lda.transform(val_x)
test_lda = _lda.transform(test_x)
train_x = np.hstack([train_x, train_lda])
val_x = np.hstack([val_x, val_lda])
test_x = np.hstack([test_x, test_lda])
return train_x, val_x, test_x
| [
"412206186@qq.com"
] | 412206186@qq.com |
a00c26fde829171625876699fcb8f48c7456fb31 | b0f151047c8313fd18566b020dab374f0d696f96 | /academicstoday/tenant_foundation/migrations/0001_initial.py | 4697863b5fa9d9f2584ea2c1b7fe5bd71ebe30d6 | [
"BSD-3-Clause"
] | permissive | abhijitdalavi/Django-paas | 4c65477f3865a344a789e4ff0666f792dfda13a6 | cf58cf216d377ea97a2676cd594f96fb9d602a46 | refs/heads/master | 2022-04-01T06:27:50.409132 | 2018-04-09T23:35:32 | 2018-04-09T23:35:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,970 | py | # Generated by Django 2.0.4 on 2018-04-08 23:08
from decimal import Decimal
from django.db import migrations, models
import djmoney.models.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(db_index=True, help_text='The title of this course.', max_length=63, verbose_name='Title')),
('sub_title', models.CharField(blank=True, help_text='The sub-title of this course.', max_length=127, null=True, verbose_name='Sub-Title')),
('category_text', models.CharField(db_index=True, help_text='The category text of this course.', max_length=127, verbose_name='Category Text')),
('description', models.TextField(blank=True, help_text='The course description.', null=True, verbose_name='Description')),
('status', models.PositiveSmallIntegerField(blank=True, default=0, verbose_name='Description')),
('purchase_fee_currency', djmoney.models.fields.CurrencyField(choices=[('XUA', 'ADB Unit of Account'), ('AFN', 'Afghani'), ('DZD', 'Algerian Dinar'), ('ARS', 'Argentine Peso'), ('AMD', 'Armenian Dram'), ('AWG', 'Aruban Guilder'), ('AUD', 'Australian Dollar'), ('AZN', 'Azerbaijanian Manat'), ('BSD', 'Bahamian Dollar'), ('BHD', 'Bahraini Dinar'), ('THB', 'Baht'), ('PAB', 'Balboa'), ('BBD', 'Barbados Dollar'), ('BYN', 'Belarussian Ruble'), ('BYR', 'Belarussian Ruble'), ('BZD', 'Belize Dollar'), ('BMD', 'Bermudian Dollar (customarily known as Bermuda Dollar)'), ('BTN', 'Bhutanese ngultrum'), ('VEF', 'Bolivar Fuerte'), ('BOB', 'Boliviano'), ('XBA', 'Bond Markets Units European Composite Unit (EURCO)'), ('BRL', 'Brazilian Real'), ('BND', 'Brunei Dollar'), ('BGN', 'Bulgarian Lev'), ('BIF', 'Burundi Franc'), ('XOF', 'CFA Franc BCEAO'), ('XAF', 'CFA franc BEAC'), ('XPF', 'CFP Franc'), ('CAD', 'Canadian Dollar'), ('CVE', 'Cape Verde Escudo'), ('KYD', 'Cayman Islands Dollar'), ('CLP', 'Chilean peso'), ('XTS', 'Codes specifically reserved for testing purposes'), ('COP', 'Colombian peso'), ('KMF', 'Comoro Franc'), ('CDF', 'Congolese franc'), ('BAM', 'Convertible Marks'), ('NIO', 'Cordoba Oro'), ('CRC', 'Costa Rican Colon'), ('HRK', 'Croatian Kuna'), ('CUP', 'Cuban Peso'), ('CUC', 'Cuban convertible peso'), ('CZK', 'Czech Koruna'), ('GMD', 'Dalasi'), ('DKK', 'Danish Krone'), ('MKD', 'Denar'), ('DJF', 'Djibouti Franc'), ('STD', 'Dobra'), ('DOP', 'Dominican Peso'), ('VND', 'Dong'), ('XCD', 'East Caribbean Dollar'), ('EGP', 'Egyptian Pound'), ('SVC', 'El Salvador Colon'), ('ETB', 'Ethiopian Birr'), ('EUR', 'Euro'), ('XBB', 'European Monetary Unit (E.M.U.-6)'), ('XBD', 'European Unit of Account 17(E.U.A.-17)'), ('XBC', 'European Unit of Account 9(E.U.A.-9)'), ('FKP', 'Falkland Islands Pound'), ('FJD', 'Fiji Dollar'), ('HUF', 'Forint'), ('GHS', 'Ghana Cedi'), ('GIP', 'Gibraltar Pound'), ('XAU', 'Gold'), ('XFO', 'Gold-Franc'), ('PYG', 'Guarani'), ('GNF', 'Guinea Franc'), ('GYD', 'Guyana Dollar'), ('HTG', 'Haitian gourde'), ('HKD', 'Hong Kong Dollar'), ('UAH', 'Hryvnia'), ('ISK', 'Iceland Krona'), ('INR', 'Indian Rupee'), ('IRR', 'Iranian Rial'), ('IQD', 'Iraqi Dinar'), ('IMP', 'Isle of Man Pound'), ('JMD', 'Jamaican Dollar'), ('JOD', 'Jordanian Dinar'), ('KES', 'Kenyan Shilling'), ('PGK', 'Kina'), ('LAK', 'Kip'), ('KWD', 'Kuwaiti Dinar'), ('AOA', 'Kwanza'), ('MMK', 'Kyat'), ('GEL', 'Lari'), ('LVL', 'Latvian Lats'), ('LBP', 'Lebanese Pound'), ('ALL', 'Lek'), ('HNL', 'Lempira'), ('SLL', 'Leone'), ('LSL', 'Lesotho loti'), ('LRD', 'Liberian Dollar'), ('LYD', 'Libyan Dinar'), ('SZL', 'Lilangeni'), ('LTL', 'Lithuanian Litas'), ('MGA', 'Malagasy Ariary'), ('MWK', 'Malawian Kwacha'), ('MYR', 'Malaysian Ringgit'), ('TMM', 'Manat'), ('MUR', 'Mauritius Rupee'), ('MZN', 'Metical'), ('MXV', 'Mexican Unidad de Inversion (UDI)'), ('MXN', 'Mexican peso'), ('MDL', 'Moldovan Leu'), ('MAD', 'Moroccan Dirham'), ('BOV', 'Mvdol'), ('NGN', 'Naira'), ('ERN', 'Nakfa'), ('NAD', 'Namibian Dollar'), ('NPR', 'Nepalese Rupee'), ('ANG', 'Netherlands Antillian Guilder'), ('ILS', 'New Israeli Sheqel'), ('RON', 'New Leu'), ('TWD', 'New Taiwan Dollar'), ('NZD', 'New Zealand Dollar'), ('KPW', 'North Korean Won'), ('NOK', 'Norwegian Krone'), ('PEN', 'Nuevo Sol'), ('MRO', 'Ouguiya'), ('TOP', 'Paanga'), ('PKR', 'Pakistan Rupee'), ('XPD', 'Palladium'), ('MOP', 'Pataca'), ('PHP', 'Philippine Peso'), ('XPT', 'Platinum'), ('GBP', 'Pound Sterling'), ('BWP', 'Pula'), ('QAR', 'Qatari Rial'), ('GTQ', 'Quetzal'), ('ZAR', 'Rand'), ('OMR', 'Rial Omani'), ('KHR', 'Riel'), ('MVR', 'Rufiyaa'), ('IDR', 'Rupiah'), ('RUB', 'Russian Ruble'), ('RWF', 'Rwanda Franc'), ('XDR', 'SDR'), ('SHP', 'Saint Helena Pound'), ('SAR', 'Saudi Riyal'), ('RSD', 'Serbian Dinar'), ('SCR', 'Seychelles Rupee'), ('XAG', 'Silver'), ('SGD', 'Singapore Dollar'), ('SBD', 'Solomon Islands Dollar'), ('KGS', 'Som'), ('SOS', 'Somali Shilling'), ('TJS', 'Somoni'), ('SSP', 'South Sudanese Pound'), ('LKR', 'Sri Lanka Rupee'), ('XSU', 'Sucre'), ('SDG', 'Sudanese Pound'), ('SRD', 'Surinam Dollar'), ('SEK', 'Swedish Krona'), ('CHF', 'Swiss Franc'), ('SYP', 'Syrian Pound'), ('BDT', 'Taka'), ('WST', 'Tala'), ('TZS', 'Tanzanian Shilling'), ('KZT', 'Tenge'), ('XXX', 'The codes assigned for transactions where no currency is involved'), ('TTD', 'Trinidad and Tobago Dollar'), ('MNT', 'Tugrik'), ('TND', 'Tunisian Dinar'), ('TRY', 'Turkish Lira'), ('TMT', 'Turkmenistan New Manat'), ('TVD', 'Tuvalu dollar'), ('AED', 'UAE Dirham'), ('XFU', 'UIC-Franc'), ('USD', 'US Dollar'), ('USN', 'US Dollar (Next day)'), ('UGX', 'Uganda Shilling'), ('CLF', 'Unidad de Fomento'), ('COU', 'Unidad de Valor Real'), ('UYI', 'Uruguay Peso en Unidades Indexadas (URUIURUI)'), ('UYU', 'Uruguayan peso'), ('UZS', 'Uzbekistan Sum'), ('VUV', 'Vatu'), ('CHE', 'WIR Euro'), ('CHW', 'WIR Franc'), ('KRW', 'Won'), ('YER', 'Yemeni Rial'), ('JPY', 'Yen'), ('CNY', 'Yuan Renminbi'), ('ZMK', 'Zambian Kwacha'), ('ZMW', 'Zambian Kwacha'), ('ZWD', 'Zimbabwe Dollar A/06'), ('ZWN', 'Zimbabwe dollar A/08'), ('ZWL', 'Zimbabwe dollar A/09'), ('PLN', 'Zloty')], default='CAD', editable=False, max_length=3)),
('purchase_fee', djmoney.models.fields.MoneyField(blank=True, decimal_places=2, default=Decimal('0'), default_currency='CAD', help_text='The purchase fee that the student will be charged to enroll in this course.', max_digits=10, verbose_name='Purchase Fee')),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('last_modified_at', models.DateTimeField(auto_now=True, db_index=True)),
],
options={
'verbose_name': 'Course',
'verbose_name_plural': 'Courses',
'db_table': 'at_applications',
},
),
]
| [
"bart@mikasoftware.com"
] | bart@mikasoftware.com |
e3c3e76cd3f6345219ed73d91c75b8ea32a227b5 | eab1756b01717e81537133400f36aea4d7a0876f | /dawn/launch-tong.py | cc90b2066a548a7ed4ba16879b0631e9ccd5a8e5 | [] | no_license | bearpelican/cluster | d677fe392ac1196b77e3f8fb79e530ec8371080f | 2e316cf1def0b72b47f79a864ed3aa778c297b95 | refs/heads/master | 2020-03-21T06:52:57.514901 | 2018-08-10T10:20:26 | 2018-08-10T22:33:05 | 138,246,892 | 3 | 1 | null | 2018-06-22T02:51:07 | 2018-06-22T02:51:07 | null | UTF-8 | Python | false | false | 2,593 | py | #!/usr/bin/env python
# numpy01 image, see environment-numpy.org for construction
# (DL AMI v 3.0 based)
#
# us-east-1 AMIs
# numpy00: ami-f9d6dc83
# numpy01: ami-5b524f21
from collections import OrderedDict
import argparse
import os
import sys
import time
import boto3
module_path=os.path.dirname(os.path.abspath(__file__))
sys.path.append(module_path+'/..')
import util
util.install_pdb_handler()
parser = argparse.ArgumentParser(description='launch')
parser.add_argument('--ami', type=str, default='ami-5b524f21',
help="name of AMI to use ")
parser.add_argument('--group', type=str, default='dawn_runs',
help="name of the current run")
parser.add_argument('--name', type=str, default='baseline5-tong',
help="name of the current run")
parser.add_argument('--instance-type', type=str, default='p3.16xlarge',
help="type of instance")
parser.add_argument('--zone', type=str, default='us-east-1f',
help='which availability zone to use')
parser.add_argument('--linux-type', type=str, default='ubuntu',
help='which linux to use: ubuntu or amazon')
parser.add_argument('--role', type=str, default='launcher',
help='launcher or worker')
args = parser.parse_args()
def main():
import aws_backend
run = aws_backend.make_run(args.name, ami=args.ami,
availability_zone=args.zone,
linux_type=args.linux_type)
job = run.make_job('main', instance_type=args.instance_type)
job.wait_until_ready()
print(job.connect_instructions)
# if tensorboard is running, kill it, it will prevent efs logdir from being
# deleted
job.run("tmux kill-session -t tb || echo ok")
logdir = '/efs/runs/%s/%s'%(args.group, args.name)
job.run('rm -Rf %s || echo failed' % (logdir,)) # delete prev logs
# Launch tensorboard visualizer in separate tmux session
job.run("tmux new-session -s tb -n 0 -d")
job.run("tmux send-keys -t tb:0 'source activate mxnet_p36' Enter")
job.run("tmux send-keys -t tb:0 'tensorboard --logdir %s' Enter"%(logdir,))
job.run('source activate mxnet_p36')
job.run('killall python || echo failed') # kill previous run
job.run('pip install -U https://s3.amazonaws.com/inferno-dlami/tensorflow/p3/tensorflow-1.5.0-cp36-cp36m-linux_x86_64.whl')
job.upload('imagenet_utils.py')
job.upload('resnet_model.py')
job.upload('resnet.b512.baseline.py')
job.run_async('python resnet.b512.baseline.py --logdir=%s'%(logdir,))
if __name__=='__main__':
main()
| [
"yaroslavvb@gmail.com"
] | yaroslavvb@gmail.com |
29a331d74f6806dca2a533c596b4dc2abd4096e1 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_14345.py | d79cee8f8289303a6c465582266bd21f8614a8c8 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | # convert hex ascii mixed string like r'\x74op' to 'top' in python
s.decode('string_escape')
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
6a4e9dc367ef4b1f0f0362c7eb17f087cd7e3543 | 6c3eb7cdb18757bff3d60a76eb060d0f2e24ab01 | /extra/tests.py | 142c114321cf7990ed7acadc150ec9b294a27f23 | [
"MIT"
] | permissive | aldnav/eventure | f8951c24849d3c15927742ddb9875beb5c852708 | 024e3720205265e3c69e6c2c65f89a09c5aafdd8 | refs/heads/master | 2020-12-02T20:53:48.953796 | 2017-07-06T07:33:21 | 2017-07-06T07:33:21 | 96,227,271 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,059 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core import serializers
from django.test import TestCase
from .views import ActivitySerializer
from .models import Post
from activity.models import Activity, Verb
class SerializerTestCase(TestCase):
def setUp(self):
self.user1 = User.objects.create_user(username='Gilfoyle', password='pass')
self.user2 = User.objects.create_user(username='Dinesh', password='pass')
self.post = Post.objects.create(title='I Have a Dream')
def test_serialize(self):
like = Verb.objects.create(name='like')
action = Activity.objects.create(
actor=self.user1,
verb=like,
object_ref=self.post,
)
serializer = ActivitySerializer()
serialized = serializer.serialize(action, ['pk', 'published'])
self.assertIsNotNone(serialized)
self.assertEquals(serialized['pk'], action.pk)
# data = serializers.serialize('json', [action])
| [
"aldrinnavarro16@gmail.com"
] | aldrinnavarro16@gmail.com |
926b24d0a94b3e12e0e179860edc829514da68c5 | 44a240d2bda10e69e2d2ce20e1a947e4e0e41356 | /tests/sqlqa/tests/security/qi_ddl/tcase1.py | b479c91f8a9d49fefff843ad964d21f73cc6983a | [
"Apache-2.0"
] | permissive | trafodion/tests | 99c3da3a7b68805649df0950e5083eadc9cfc94a | d0227026fe9172f3749c453f76f249e27c91dc39 | refs/heads/master | 2021-01-10T01:56:22.515028 | 2015-06-25T18:37:27 | 2015-06-25T18:40:21 | 36,892,411 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 66,498 | py | # @@@ START COPYRIGHT @@@
#
# (C) Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");"""
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@@ END COPYRIGHT @@@
from ...lib import hpdci
import defs
import basic_defs
import unittest
import time
_testmgr = None
_testlist = []
_dci = None
def _init(hptestmgr, testlist=[]):
global _testmgr
global _testlist
global _dci
_testmgr = hptestmgr
_testlist = testlist
# default hpdci was created using 'SQL' as the proc name.
# this default instance shows 'SQL>' as the prompt in the log file.
_dci = _testmgr.get_default_dci_proc()
def testa01(desc="""DDL operations drop tables"""):
global _testmgr
global _testlist
global _dci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a01tab1(a int not null primary key, b int);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a01tab1 values(11,10), (1,4);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,2)
stmt = """grant select,insert on a01tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a01tab1 values(3,4);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """ select * from a01tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,3)
stmt = """ delete from a01tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a01tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a01tab1 values(1,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a01tab1 values(3,4);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
stmt = """ select * from a01tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
stmt = """ delete from a01tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user2()
stmt = """drop schema qi_schema1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa02(desc="""DDL operations,alter table add column/drop column"""):
global _testmgr
global _testlist
global _dci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a02tab1(a int, b int)no partition;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a02tab1 values(1,3),(9,8);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,2)
stmt = """ grant insert on a02tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a02tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
stmt = """ delete from a02tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
stmt = """ insert into a02tab1 values(11,34),(34,55);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,2)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a02tab1 add column c int;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a02tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,4)
stmt = """ insert into a02tab1 values (11,22,33);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a02tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
stmt = """ delete from a02tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
stmt = """ insert into a02tab1 values(11,34),(34,55);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4023')
stmt = """ insert into a02tab1 values(11,34,77);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a02tab1 add column d int;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a02tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a02tab1 values(11,34,79,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a02tab1 drop column b;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a02tab1 where(b>0);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4001')
stmt = """ insert into a02tab1 values(1,2,3,4);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4023')
stmt = """ insert into a02tab1 values(1,2,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a02tab1 drop column d;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a02tab1 where(d>0);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4001')
stmt = """ insert into a02tab1 values(2,3,4);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4023')
stmt = """ insert into a02tab1 values(2,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a02tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a02tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
stmt = """ insert into a02tab1 values(2,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user2()
stmt = """drop schema qi_schema2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa03(desc="""DDL operations,alter table add/drop check constraint"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a03tab1(a int, b int, constraint c1 check (a>b));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a03tab1 values (13,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a03tab1 values (12,14);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8101')
stmt = """grant insert on a03tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a03tab1 values (23,11);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a03tab1 values (22,24);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8101')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a03tab1 add constraint c2 check (a > 2*b);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a03tab1 values (33,11);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a03tab1 values (32,31);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8101')
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a03tab1 values (43,11);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a03tab1 values (42,34);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8101')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a03tab1 drop constraint c2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a03tab1 values (53,41);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a03tab1 values (52,54);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8101')
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a03tab1 values (63,51); """
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a03tab1 values (54,62);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8101')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a03tab1 drop constraint c1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a03tab1 values (53,70);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a03tab1 values (1,70);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a03tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema3;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa04(desc="""DDL operations,alter table add/drop unique constraint"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema4;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema4;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a04tab1( a int not null primary key, b int, c int,constraint cons4_1 unique(b));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a04tab1 values(1,2,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a04tab1 values(1,2,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8102')
stmt = """ grant insert on a04tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema4;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a04tab1 values(2,1,4);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema4;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a04tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """insert into a04tab1 values(2,1,4);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a04tab1 values(3,4,5);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema4;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a04tab1 add constraint cons42 unique(c);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a04tab1 values(11,7,6);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a04tab1 values(11,1,5);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8102')
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema4;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a04tab1 values(15,8,9);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a04tab1 values(11,1,6);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8102')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema4;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a04tab1 drop constraint cons42;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a04tab1 values(100,99,98);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a04tab1 values(100,99,98);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8102')
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema4;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a04tab1 values(87,98,98);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a04tab1 values(86,99,98);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8102')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema4;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a04tab1 drop constraint cons4_1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a04tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema4 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa05(desc="""DDL operations,alter table add/drop primary key"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema5;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema5;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a05tab1(a int, b int) no partition;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a05tab1 values(9,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """grant insert,select on a05tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema5;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a05tab1 values(9,1),(8,2);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,2)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema5;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a05tab1 add constraint cons51 primary key(a);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8110')
stmt = """delete from a05tab1 where a =9 ;"""
output = mydci.cmdexec(stmt)
mydci.expect_deleted_msg(output,2)
stmt = """alter table a05tab1 add constraint cons51 primary key(a);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a05tab1 values(8,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8102')
stmt = """ insert into a05tab1 values(7,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema5;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a05tab1 values(8,9);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8102')
stmt = """ insert into a05tab1 values(6,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema5;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a05tab1 drop constraint cons51;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a05tab1 values(9,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema5;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a05tab1 values(9,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """drop table a05tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema5;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa06(desc="""DDL operations,alter table add/drop foreign key (RI)"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema6;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema6;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a06tab1( a int not null primary key, b int);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create table a06tab2(c int, d int)no partition;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a06tab1 values(1,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """ insert into a06tab2 values(1,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt= """ grant insert,select on a06tab2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema6;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a06tab1 values(1,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
stmt = """ insert into a06tab2 values(1,1);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema6;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """alter table a06tab2 add constraint c61 foreign key(d) references a06tab1(a);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a06tab1 values(2,2);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """insert into a06tab2 values(1,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8103')
stmt = """insert into a06tab2 values(1,2);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema6;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a06tab2 values(1,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '8103')
stmt = """ insert into a06tab2 values(1,2);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema6;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ alter table a06tab2 drop constraint c61;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a06tab2 values(1,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema6;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ insert into a06tab2 values(1,3);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """ insert into a06tab2 values(1,2);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """drop table a06tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a06tab2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema6;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa07(desc="""DDL operations,create/drop index"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema7;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema7;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a07tab1( a int not null primary key, b int);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create index index1 on a07tab1(b desc);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """grant select, delete, insert on a07tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """populate index index1 on a07tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema7;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ populate index index1 on a07tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user2()
stmt = """revoke select, delete, insert on a07tab1 from qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema7;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ populate index index1 on a07tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
stmt = """grant component privilege "ALTER" on sql_operations to qauser11;"""
output = _dci.cmdexec(stmt)
_dci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema7;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """grant update on a07tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema7;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ populate index index1 on a07tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema7;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """revoke update on a07tab1 from qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema7;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ populate index index1 on a07tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """drop table a07tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop schema qi_schema7;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """revoke component privilege "ALTER" on sql_operations from qauser11;"""
output = _dci.cmdexec(stmt)
_dci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa08(desc="""create view depends on one table, drop view"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema8;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema8;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a08tab1( a int not null primary key, b int);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create view a08view1 as select * from a08tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """grant select on a08tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a08view1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema8;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a08tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """ select * from a08view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema8;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop view a08view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema8;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a08tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """select * from a08view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema8;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a08tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema8;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa09(desc="""create view depends on one table, drop table"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema9;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema9;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a09tab1( a int not null primary key, b int);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create view a09view1 as select * from a09tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """grant select on a09tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a09view1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema9;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a09tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """ select * from a09view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema9;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a09tab1 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema9;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a09tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
stmt = """select * from a09view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user2()
stmt = """drop schema qi_schema9;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa10(desc="""create view depends on mixed tables, drop table"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema10;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema10;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a10tab1( a1 int not null primary key, b1 int,c1 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a10tab2( a2 int not null primary key, b2 int,c2 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create view a10view1 as select * from a10tab1,a10tab2 where a1=a2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a10view1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema10;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a10view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema10;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a10tab1 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema10;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a10view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema10;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a10tab2 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema10;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa11(desc="""create view depends on mixed table and view, drop table"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a11tab1( a1 int not null primary key, b1 int,c1 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a11tab2( a2 int not null primary key, b2 int,c2 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a11tab3( a3 int not null primary key, b3 int,c3 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create view a11view1 as select a1,c2 from a11tab1, a11tab2 where a1>a2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create view a11view2 as select a1,b2 from a11tab2,a11view1 where a2=a1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a11view2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a11view1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a11tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a11tab2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a11tab3 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a11view2;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """select * from a11view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a11tab1 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a11view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
stmt = """select * from a11view2;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a11tab2 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a11tab3 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa12(desc="""create view depends on mixed table and view, drop view"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema12;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema12;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a12tab1( a1 int not null primary key, b1 int,c1 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a12tab2( a2 int not null primary key, b2 int,c2 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a12tab3( a3 int not null primary key, b3 int,c3 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create view a12view1 as select a1,c2 from a12tab1, a12tab2 where a1>a2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create view a12view2 as select a1,b2 from a12tab2,a12view1 where a2=a1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a12view2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a12view1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a12tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a12tab2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a12tab3 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema12;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a12view2;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """select * from a12view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema12;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop view a12view1 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema12;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a12view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
stmt = """select * from a12view2;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema12;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a12tab1 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a12tab2 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a12tab3 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema12;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa13(desc="""create view depends on an view, drop view"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema13;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema13;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a13tab1( a1 int not null primary key, b1 int,c1 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create view a13view1 as select a1,b1 from a13tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create view a13view2 as select b1 from a13view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a13tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a13view1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a13view2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema13;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a13view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """ select * from a13view2;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema13;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop view a13view1 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema13;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a13view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema13;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a13tab1 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop schema qi_schema13;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa14(desc="""create view depends on an view, drop table"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema14;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema14;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a14tab1( a1 int not null primary key, b1 int,c1 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create view a14view1 as select a1,b1 from a14tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create view a14view2 as select b1 from a14view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a14tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a14view1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a14view2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema14;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a14view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """ select * from a14view2;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema14;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a14tab1 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema14;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a14view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
stmt = """ select * from a14view2;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
mydci = basic_defs.switch_session_qi_user2()
stmt = """ drop schema qi_schema14;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa15(desc="""create view depends on mixed views, drop table"""):
global _testmgr
global _testlist
global mydci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create schema qi_schema15;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema15;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a15tab1( a1 int not null primary key, b1 int,c1 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a15tab2( a2 int not null primary key, b2 int,c2 varchar(10));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create view a15view1 as select a1 from a15tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create view a15view2 as select b2 from a15tab2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ create view a15view3 as select * from a15view1, a15view2 where a1>b2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a15tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a15tab2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a15view1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a15view2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a15view3 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema15;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a15view3;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """ select * from a15view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """ select * from a15view2;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema15;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a15tab1 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema15;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ select * from a15view3;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
stmt = """ select * from a15view1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4082')
stmt = """ select * from a15view2;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema15;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a15tab2 cascade;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema15;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa16(desc="""DDL operations,load/unload"""):
global _testmgr
global _testlist
global _dci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create shared schema qi_schema16;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema16;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a16tab1(a int, b int)no partition;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a16tab2(a int, b int)no partition;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a16tab1 values(1,2),(3,4);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,2)
stmt = """ grant insert,select on a16tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema16;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """insert into a16tab1 values(2,13);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
stmt = """load into a16tab2 select * from a16tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
stmt = """unload with purgedata from target into '/bulkload/4security'
select * from a16tab2;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema16;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant insert,select on a16tab2 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema16;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """select * from a16tab2;"""
output = mydci.cmdexec(stmt)
mydci.expect_selected_msg(output,0)
stmt = """load into a16tab2 select * from a16tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_any_substr(output,'Rows Processed: 3' );
stmt = """unload with purgedata from target into '/bulkload/4security'
select * from a16tab2;"""
output = mydci.cmdexec(stmt)
mydci.expect_any_substr(output,'Rows Processed: 3' );
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema16;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ revoke insert,select on a16tab1 from qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema16;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """load into a16tab2 select * from a16tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
stmt = """unload with purgedata from target into '/bulkload/4security'
select * from a16tab2;"""
output = mydci.cmdexec(stmt)
#mydci.expect_error_msg(output, '4481')
mydci.expect_any_substr(output,'5 row(s) unloaded' );
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema16;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a16tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop table a16tab2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema16;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa17(desc="""DDL operations,upsert"""):
global _testmgr
global _testlist
global _dci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create shared schema qi_schema17;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema17;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a17tab1(a int, b int)no partition;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema17;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ upsert into a17tab1 values(1,2);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema17;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant insert on a17tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema17;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ upsert into a17tab1 values(1,2);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema17;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ revoke insert on a17tab1 from qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema17;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ upsert into a17tab1 values(1,2);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema17;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a17tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema17;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa18(desc="""DDL operations,merge"""):
global _testmgr
global _testlist
global _dci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create shared schema qi_schema18;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema18;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a18tab1(a int not null, b int, primary key (a));"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema18;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ merge into a18tab1 on a= 10 when not matched then insert values(10,20);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema18;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant insert,update, select on a18tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema18;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ merge into a18tab1 on a= 10 when not matched then insert values(10,20);"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema18;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ revoke insert on a18tab1 from qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema18;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ merge into a18tab1 on a= 10 when not matched then insert values(10,20);"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema18;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a18tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema18;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
def testa19(desc="""DDL operations,create table as"""):
global _testmgr
global _testlist
global _dci
if not _testmgr.testcase_begin(_testlist): return
mydci = basic_defs.switch_session_qi_user2()
stmt = """create shared schema qi_schema19;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """set schema qi_schema19;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a19tab1(a int, b int)no partition;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema19;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a19tab2 as select * from a19tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema19;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ grant select on a19tab1 to qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema19;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a19tab2 as select * from a19tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,0)
stmt = """ insert into a19tab2 values(1,2);"""
output = mydci.cmdexec(stmt)
mydci.expect_inserted_msg(output,1)
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema19;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ revoke select on a19tab1 from qauser11;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
mydci = basic_defs.switch_session_qi_user3()
stmt = """set schema qi_schema19;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """create table a19tab3 as select * from a19tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_error_msg(output, '4481')
mydci = basic_defs.switch_session_qi_user2()
stmt = """set schema qi_schema19;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a19tab1;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """ drop table a19tab2;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
stmt = """drop schema qi_schema19;"""
output = mydci.cmdexec(stmt)
mydci.expect_complete_msg(output)
_testmgr.testcase_end(desc)
| [
"steve.varnau@hp.com"
] | steve.varnau@hp.com |
3ef43777b05972b64a9d10046115d44bce3e8128 | 0c672b0b8431064617831d16bf0982d5d3ce6c27 | /utils/proxy_api.py | bf5056d222433e6c27a71950ba9f9d043be6d898 | [] | no_license | buxuele/amazon_books | 617327376044ffd4e760fdc1a71962119717cfe8 | 691bd3e48bd1730dbc4a4a855e84e0b1c3e9c2ec | refs/heads/master | 2023-03-09T23:18:14.730828 | 2021-03-01T10:53:47 | 2021-03-01T10:53:47 | 342,610,804 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,017 | py | import time
import requests
from utils.my_timer import timer
from utils.get_user_agent import get_a_ua
from utils.mongoDB import Mongo
import config # 根目录 数据库名称。
from pprint import pprint
from concurrent.futures import ThreadPoolExecutor, wait, ALL_COMPLETED
class SmallProxy:
def __init__(self, china=True):
self.country = china
self.m = Mongo(config.proxy_db, config.proxy_coll)
self.url = "https://ip.jiangxianli.com/api/proxy_ips"
self.headers = {'User-Agent': get_a_ua()}
self.real_ip = self.find_myself()
@staticmethod
def find_myself():
target = 'http://httpbin.org/ip'
resp = requests.get(target)
return resp.json()["origin"]
# 获取更多的代理。这一部分写的很漂亮啊。自己写的就是很得意。
def make_payloads(self):
nations = ["俄罗斯", "美国", "加拿大", "日本", "德国", "香港", "印度尼西亚", "法国"]
if self.country:
pay = [{"page": c, "country": "中国", "order_by": "speed"} for c in range(1, 5)]
else:
pay = [{"page": 1, "country": b, "order_by": "speed"} for b in nations]
return pay
def greet(self, pay):
resp = requests.get(self.url, params=pay, headers=self.headers)
if resp.status_code == 200:
return resp.json()
else:
print(f"Sorry! 这个代理网站有问题!")
return None
@timer
def get_all_proxy(self):
temp = []
for k in self.make_payloads():
d = self.greet(k) # d <dict>
if d:
all_data = d["data"]["data"]
for t in all_data:
# if t["anonymity"] == 2: # 按匿名度来排除。
a = t["protocol"] + "://" + t["ip"] + ":" + t["port"]
temp.append(a)
print(temp)
print(len(temp))
return temp
def speed_status(self, proxy=None):
url = "http://httpbin.org/ip"
resp = requests.get(url, proxies={"http": proxy}, timeout=1)
# 只有当前使用的代理与自己真实的ip 不相等的时候,才说明这个代理是有效的。
if resp.status_code == 200 and resp.json()["origin"] != self.real_ip:
print("test ip", proxy)
print("real ip : ", resp.json()["origin"])
self.m.add_to_db({"url": proxy})
@timer
def run(self):
fake_proxy = self.get_all_proxy()
# 这里设置为20就很合适了,太多反而不利。
with ThreadPoolExecutor(max_workers=16) as executor:
future_tasks = [executor.submit(self.speed_status, p) for p in fake_proxy]
wait(future_tasks, return_when=ALL_COMPLETED)
def show_product(self):
self.m.get_unique(show=True)
if __name__ == '__main__':
p = SmallProxy(china=True)
# p.main()
p.run()
time.sleep(.1)
p.show_product()
| [
"baogexuxuele@163.com"
] | baogexuxuele@163.com |
8454ccc4b597b6f6f9f3ef369f835a61140dab1a | 4795a38e5d924301be3b599576f0bb4a114ff742 | /lambdata_wel51x/setup.py | b8c96a711954da53cce862130b915e55cef52b99 | [] | no_license | wel51x/lambdata | 8e9a3cf9612f1bb157b27838a457b268da1dc4ed | 3d001df4641442838b7506a72f38cb176371738d | refs/heads/master | 2020-04-29T18:37:28.238625 | 2019-03-19T22:34:47 | 2019-03-19T22:34:47 | 176,329,400 | 0 | 1 | null | 2019-03-19T20:32:55 | 2019-03-18T16:44:56 | Python | UTF-8 | Python | false | false | 757 | py | """
lambdata - a collection of Data Science helper functions
"""
import setuptools
REQUIRED = [
"numpy",
"pandas"
]
with open("README.md", "r") as fh:
LONG_DESCRIPTION = fh.read()
setuptools.setup(
name="lambdata-wel51x",
version="0.0.5",
author="Winston",
description="A collection of Data Science helper functions",
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
url="https://github.com/wel51x/lambdata",
packages=setuptools.find_packages(),
python_requires=">=3.5",
install_requires=REQUIRED,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
| [
"wxl51@yahoo.com"
] | wxl51@yahoo.com |
eecde9e85f8bbc1b9eda6d9cab643cadd93edcab | d970e32d23e84fe0f6b5ba1694e2958d52fce586 | /sample_scripts/sample_tokenization.py | f165ed859675d95ce1ca9d1aa24545228ddd3e2f | [
"MIT"
] | permissive | Kensuke-Mitsuzawa/sample-codes-supporters-tutorial | 8e6f1ed794732fa87176333286e65898e321f60f | ae9b544ddd3a782e76a30af257b43f88341ba696 | refs/heads/master | 2023-05-31T22:15:03.313349 | 2018-02-27T02:07:00 | 2018-02-27T02:07:00 | 79,502,186 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,661 | py | from JapaneseTokenizer import MecabWrapper
from typing import List, Tuple, Dict, Union, Any
import json
import logging
import collections
import itertools
logger = logging.getLogger()
logger.setLevel(10)
SLEEP_TIME = 2
"""形態素分割のサンプルコードを示します
Python3.5.1の環境下で動作を確認しています。
"""
__author__ = "Kensuke Mitsuzawa"
__author_email__ = "kensuke.mit@gmail.com"
__license_name__ = "MIT"
def tokenize_text(input_text:str,
tokenizer_obj:MecabWrapper,
pos_condition:List[Tuple[str,...]])->List[str]:
"""* What you can do
- 1文書に対して、形態素分割を実施する
"""
### 形態素分割;tokenize() -> 品詞フィルタリング;filter() -> List[str]に変換;convert_list_object()
return tokenizer_obj.tokenize(input_text).filter(pos_condition=pos_condition).convert_list_object()
### 原型(辞書系)に変換せず、活用された状態のまま、欲しい場合は is_surface=True のフラグを与える
#return tokenizer_obj.tokenize(input_text, is_surface=True).filter(pos_condition=pos_condition).convert_list_object()
def aggregate_words(seq_tokenized:List[List[str]])->collections.Counter:
"""* What you can do
- 形態素の集計カウントを実施する
* Params
- seq_tokenized
>>> [['スター・ウォーズ', 'エピソード4', '新たなる希望', 'スター・ウォーズ', 'エピソード4', 'なる', 'きぼう', 'STAR WARS', 'IV', 'A NEW HOPE', '1977年', 'する', 'アメリカ映画']]
"""
### 二次元リストを1次元に崩す; List[List[str]] -> List[str] ###
seq_words = itertools.chain.from_iterable(seq_tokenized)
word_frequency_obj = collections.Counter(seq_words)
return word_frequency_obj
def aggregate_words_by_label():
"""* What you can do
-
"""
pass
def main(tokenizer_obj:MecabWrapper,
seq_text_data:List[Dict[str,Any]],
pos_condition:List[Tuple[str,...]]):
"""* What you can do
- 形態素解析機の呼び出し
- 単語集計
"""
# --------------------------------------------------------------------------------------------------------------#
# 単純単語集計をする
### Python独特のリスト内包表記を利用する(リスト内包表記の方が実行速度が早い) ###
seq_tokenized_text = [
tokenize_text(input_text=wiki_text_obj['text'],tokenizer_obj=tokenizer_obj, pos_condition=pos_condition)
for wiki_text_obj in seq_text_data
]
### 単語集計を実施する ###
word_frequency_obj = aggregate_words(seq_tokenized_text)
### Counterオブジェクトはdict()関数で辞書化が可能 ###
dict(word_frequency_obj)
### 頻度順にソートするために [(word, 頻度)] の形にする
seq_word_frequency = [(word, frequency) for word, frequency in dict(word_frequency_obj).items()]
### 単語頻度順にソート ###
print('Top 100 word frequency without label')
print(sorted(seq_word_frequency, key=lambda x:x[1], reverse=True)[:100])
# --------------------------------------------------------------------------------------------------------------#
# ラベルごとに単語を集計する
### ラベル情報も保持しながら形態素分割の実行 ###
seq_tokenized_text = [
(wiki_text_obj['gold_label'], tokenize_text(input_text=wiki_text_obj['text'],tokenizer_obj=tokenizer_obj, pos_condition=pos_condition))
for wiki_text_obj in seq_text_data
]
#### ラベルごとの集約する ####
##### ラベルごとに集計するためのキーを返す匿名関数 #####
key_function= lambda x:x[0]
#### 必ず、groupbyの前にsortedを実施すること
g_object = itertools.groupby(sorted(seq_tokenized_text, key=key_function), key=key_function)
### リスト内包表記化も可能。わかりやすさのために、通常のループ表記をする ###
for label_name, element_in_label in g_object:
### element_in_label はgenerator objectで [(label, [word])]の構造を作る ###
seq_list_tokens_with_label = list(element_in_label)
seq_list_tokens = [label_tokens[1] for label_tokens in seq_list_tokens_with_label]
word_frequency_obj_label = aggregate_words(seq_list_tokens)
seq_word_frequency_label = [(word, frequency) for word, frequency in dict(word_frequency_obj_label).items()]
print('*'*30)
print('Top 100 words For label = {}'.format(label_name))
print(sorted(seq_word_frequency_label, key=lambda x:x[1], reverse=True)[:100])
if __name__ == '__main__':
### MecabWrapperを作る ###
mecab_obj = MecabWrapper(dictType='ipadic')
### 取得したい品詞だけを定義する ###
pos_condition = [('名詞', '固有名詞'), ('動詞', '自立'), ('形容詞', '自立')]
### wikipedia summaryデータを読み込み ###
print('=' * 50)
path_wikipedia_summary_json = './wikipedia_data/wikipedia-summary.json'
with open(path_wikipedia_summary_json, 'r') as f:
seq_wiki_summary_text = json.load(f)
main(tokenizer_obj=mecab_obj,
pos_condition=pos_condition,
seq_text_data=seq_wiki_summary_text)
### wikipedia fullデータを読み込み ###
print('=' * 50)
path_wikipedia_full_json = './wikipedia_data/wikipedia-full.json'
with open(path_wikipedia_full_json, 'r') as f:
seq_wiki_full_text = json.load(f)
main(tokenizer_obj=mecab_obj,
pos_condition=pos_condition,
seq_text_data=seq_wiki_full_text) | [
"kensuke.mit@gmail.com"
] | kensuke.mit@gmail.com |
7685a07ea0e0d44c960990af54d51824a35aed57 | 13153768dc35472d847353c6a3373fff24c159bb | /GA/aux.py | 7161765ea80b4841be6c60cd1e5c719085c61b3e | [] | no_license | enderteszla/TUNM | 8eab0716a7b22552f2b8a53c704bd59ee3965106 | d172e38691b21ce80c741cc0e6a8b76f96b54fd0 | refs/heads/master | 2016-09-06T01:03:21.384339 | 2015-03-29T23:32:37 | 2015-03-29T23:32:37 | 32,137,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | __all__ = ['best']
best = lambda population: [
ind for ind in population
if ind.fitness.values == min([i.fitness.values for i in population])
][0] | [
"lev.bezborodov@b8cebac3-3f1a-1c64-7dfa-60fbc24048fd"
] | lev.bezborodov@b8cebac3-3f1a-1c64-7dfa-60fbc24048fd |
25e16b899e4063fcda6e3fafd0bc309ec46ee237 | 58e09fac582a76428819e167e42e60765d11bb11 | /space/lib/python3.7/encodings/euc_jp.py | f2043f483c14dcf9d4db30740df0f2159cfe1ea2 | [] | no_license | shanthimadugundi/DB_Project | 25eb2a0e7504f81484ad11c0fa9e902b038c85b4 | b5ba55af1bcddde164cecc60d331d615dd477165 | refs/heads/master | 2020-04-27T05:14:56.107466 | 2019-03-06T05:31:23 | 2019-03-06T05:31:23 | 174,075,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py | /Users/shanthimadugundi/anaconda3/lib/python3.7/encodings/euc_jp.py | [
"shanthimadugundi@Shanthis-MacBook-Pro.local"
] | shanthimadugundi@Shanthis-MacBook-Pro.local |
36592ba9364dac1e2332e9d9e3ec3f1905d902af | 55aabd8c929bce49d782052a8ee4c2bad10c054a | /PTI/Semanas/semana_12/Floofy/floofy_project/group_chat/consumers.py | f202efd173711d9b3c5dde3df69e8f94c5d424a9 | [] | no_license | fc51648/floofpoof | 0d6504595f94d99789adf86342ebda316976d2f6 | 56f51857440580cba66453a0198cc047b6650894 | refs/heads/master | 2021-01-05T01:13:14.818140 | 2020-05-31T12:03:10 | 2020-05-31T12:03:10 | 240,823,265 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,494 | py | import asyncio
import json
from datetime import datetime
from channels.consumer import AsyncConsumer
from channels.db import database_sync_to_async
from .models import Thread, Message
from login.models import User
class GroupChatConsumer(AsyncConsumer):
async def websocket_connect(self,event):
print("connected", event)
await self.send({
"type": "websocket.accept"
})
groupId = self.scope['url_route']['kwargs']['groupId']
thread_obj = await self.get_thread(groupId)
self.thread_obj = thread_obj
chat_room = f"thread_{thread_obj.id}"
self.chat_room = chat_room
await self.channel_layer.group_add(
chat_room,
self.channel_name
)
async def websocket_receive(self,event):
print("received", event)
front_txt = event.get('text', None)
if front_txt is not None:
loaded_dict_data = json.loads(front_txt)
msg = loaded_dict_data.get('message')
senderId = loaded_dict_data.get('from')
sender = await self.get_user(senderId)
timestamp = str(datetime.now().strftime("%B %d, %Y, %I:%M %p"))
sender_name= 'not_auth'
if sender.is_authenticated:
sender_name = sender.__str__()
myResponse = {
'message': msg,
'sender_name': sender_name,
'timestamp': timestamp
}
await self.create_message(sender, msg)
#broadcasts the message event to be sent
await self.channel_layer.group_send(
self.chat_room,
{
"type": "chat_message",
"text": json.dumps(myResponse)
}
)
async def chat_message(self,event):
#sends the message
await self.send({
"type": "websocket.send",
"text": event['text']
})
async def websocket_disconnect(self,event):
print("disconnected", event)
@database_sync_to_async
def get_thread(self, groupId):
return Thread.objects.get_or_new(groupId)[0]
@database_sync_to_async
def get_user(self, senderId):
return User.objects.get(id=senderId)
@database_sync_to_async
def create_message(self, me, msg):
thread_obj = self.thread_obj
return Message.objects.create(thread= thread_obj, user= me, message= msg) | [
"50746014+mika359@users.noreply.github.com"
] | 50746014+mika359@users.noreply.github.com |
8b50a7f79df8def13350eb875f2f7b867647b445 | e4d67c58fc137a43219bd0929b1726ea3e7c162e | /crypto.py | 3ca54435fb39617611e1c75a243571c415ddc288 | [] | no_license | hanumancode/Crypto_Price_Search-Python | c073e5fddeee50f692575386da116503e572382e | 6f79f33f9f79ed36dbbee667d6cd7d0743a7e7cf | refs/heads/master | 2020-03-21T06:25:49.824151 | 2018-06-23T04:52:25 | 2018-06-23T04:52:25 | 138,218,901 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,273 | py | import requests
while True:
# base URLs using version 1 of the CoinMarketCap API (version 1 expires Nov. 2018)
globalURL = "https://api.coinmarketcap.com/v1/global/"
tickerURL = "https://api.coinmarketcap.com/v1/ticker/"
# get data from CoinMarketCap GlobalURL
r = requests.get(globalURL)
data = r.json()
globalMarketCap = data['total_market_cap_usd']
bitcoin_percentage_of_market_cap = data['bitcoin_percentage_of_market_cap']
# menu
print()
print("Crypto Price Query")
print("Global cap of all cryptos: $" + str(globalMarketCap))
print("Bitcoin dominance: " + str(bitcoin_percentage_of_market_cap) + "%")
print("Enter 'all' or 'name of crypto' i.e. bitcoin, ripple, tron, ethereum,...")
print()
choice = input("Enter crypto symbol or name: ")
if choice == "all":
r = requests.get(tickerURL)
data = r.json()
for x in data:
ticker = x['symbol']
price = x['price_usd']
print(ticker + ":\t\t$" + price)
print()
else:
tickerURL += '/'+choice+'/'
r = requests.get(tickerURL)
data = r.json()
ticker = data[0]['symbol']
price = data[0]['price_usd']
print(ticker + ":\t\t$" + price)
print()
choice2 = input("Search for another cryptocurrency price? (y/n)")
if choice2 == "y":
continue
if choice2 == "n":
break
| [
"garethjones@cybertron.local"
] | garethjones@cybertron.local |
9659ced7af3577de6990b453926b3485f97d8211 | 5fac032f8886a0d1ea1c46b6bb3d5e691dd93263 | /post_tags/__init__.py | 6b7a5bc215c0fa1eae93caf1444a95951533a6ae | [] | no_license | nss-day-cohort-50/rare-server-cohort-50-python-prowlers | b0959104ca034f7b71410a9abebcb19a229a3b92 | d4117e2eb0128d23eb539f2467afb886a93b7543 | refs/heads/main | 2023-08-23T01:27:40.017102 | 2021-10-29T16:21:42 | 2021-10-29T16:21:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35 | py | from .request import get_post_tags
| [
"t.hall89d@gmail.com"
] | t.hall89d@gmail.com |
fcef5eb2c6ab66ee64ce82f47999b9459c1ee354 | 833d4cc8ec460902d0a8beb7f2e1ab13ba9114d0 | /mppsolar/inout/mqttio.py | faa853fd2256a0d20b39218bd899ffb53f307496 | [
"MIT"
] | permissive | jblance/mpp-solar | 3d834e88715591ec63c2abbff97b41417286f451 | d541a7ec05754c570c44db21f271f5628c7f19e3 | refs/heads/master | 2023-08-31T10:09:36.558171 | 2023-08-24T22:42:36 | 2023-08-24T22:42:36 | 102,808,429 | 284 | 141 | MIT | 2023-09-11T20:47:11 | 2017-09-08T02:35:31 | Python | UTF-8 | Python | false | false | 4,050 | py | import binascii
import json as js
import logging
import time
import paho.mqtt.client as mqttc
# import paho.mqtt.publish as publish
# import paho.mqtt.subscribe as subscribe
from ..helpers import get_kwargs
from .baseio import BaseIO
log = logging.getLogger("MqttIO")
class MqttIO(BaseIO):
def __init__(self, *args, **kwargs) -> None:
# self._serial_port = device_path
# self._serial_baud = serial_baud
self.mqtt_broker = get_kwargs(kwargs, "mqtt_broker", "localhost")
self.mqtt_port = self.mqtt_broker.port
self.mqtt_user = self.mqtt_broker.username
self.mqtt_pass = self.mqtt_broker.password
# self.mqtt_port = get_kwargs(kwargs, "mqtt_port", 1883)
# self.mqtt_user = get_kwargs(kwargs, "mqtt_user")
# self.mqtt_pass = get_kwargs(kwargs, "mqtt_pass")
self.client_id = get_kwargs(kwargs, "client_id")
log.info(
f"__init__: client_id: {self.client_id}, mqtt_broker: {self.mqtt_broker}, port: {self.mqtt_port}, user: {self.mqtt_user}, pass: {self.mqtt_pass}"
)
self._msg = None
def sub_cb(self, client, userdata, message):
log.debug(f"Mqttio sub_cb got msg, topic: {message.topic}, payload: {message.payload}")
self._msg = message
def send_and_receive(self, *args, **kwargs) -> dict:
full_command = get_kwargs(kwargs, "full_command")
command = get_kwargs(kwargs, "command")
client_id = self.client_id
wait_time = 5
# response_line = None
command_topic = f"{client_id}/command"
result_topic = f"{client_id}/result"
# print(self.mqtt_broker)
# Create mqtt client
# Client(client_id="", clean_session=True, userdata=None, protocol=MQTTv311, transport="tcp")
mqtt_client = mqttc.Client()
# mqtt_client.on_connect = on_connect
if self.mqtt_user is not None and self.mqtt_pass is not None:
# auth = {"username": self.mqtt_user, "password": self.mqtt_pass}
log.info(
f"Using mqtt authentication, username: {self.mqtt_user}, password: [supplied]"
)
mqtt_client.username_pw_set(self.mqtt_user, password=self.mqtt_pass)
else:
log.debug("No mqtt authentication used")
# auth = None
mqtt_client.connect(self.mqtt_broker, port=self.mqtt_port)
command_hex = binascii.hexlify(full_command)
payload = {"command": command, "command_hex": command_hex.decode()}
payload = js.dumps(payload)
log.debug(f"Publishing {payload} to topic: {command_topic}")
mqtt_client.publish(command_topic, payload=payload)
mqtt_client.on_message = self.sub_cb
mqtt_client.subscribe(result_topic)
mqtt_client.loop_start()
time.sleep(wait_time)
mqtt_client.loop_stop(force=False)
if self._msg is None:
# Didnt get a result
return {
"ERROR": [
f"Mqtt result message not received on topic {result_topic} after {wait_time}sec",
"",
]
}
else:
msg_topic = self._msg.topic
# decode the payload
# payload should be a json dumped byte string
# payload: b'{"command_hex": "515049beac0d", "result": "", "command": "QPI"}'
log.debug(
f"mqtt raw response on {self._msg.topic} was: {self._msg.payload}, payload type: {type(self._msg.payload)}"
)
# Return the byte-string to a dict
payload_dict = js.loads(self._msg.payload)
# Get 'results', and convert back to bytes
result = binascii.unhexlify(payload_dict["result"])
# TODO: Currently ignoring this - might want to update return types at some point
cmd = payload_dict["command"]
self._msg = None
log.debug(f"mqtt response on {msg_topic} for command {cmd} was: {result}")
return result
| [
"john.blance@gmail.com"
] | john.blance@gmail.com |
963851975254a22ca724f644a061194ef41869cf | fd2f83e58ef2639e03645cc056208a05ed868321 | /linear_regression.py | d4b15c9d2f289a12c4ba30435b8f7ed025af9e0f | [] | no_license | shishirkumar1996/StockMarketPrediction | d642b6a87256e52127e627bff0c2d24d5f6a1598 | ebaa500dbdf1b992c5722c591da03193f3df92f2 | refs/heads/master | 2020-04-10T05:03:31.384883 | 2018-12-15T10:53:31 | 2018-12-15T10:53:31 | 124,264,408 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py | from statistics import mean
def best_fit_slope_and_intercept(xs, ys):
m = (((mean(xs) * mean(ys)) - mean(xs * ys)) /
((mean(xs) * mean(xs)) - mean(xs * xs)))
b = mean(ys) - m * mean(xs)
return m, b
| [
"shishirsharad@yahoo.com"
] | shishirsharad@yahoo.com |
54291cfa23973b4e7bcde6cafed94401af11aaf0 | 9b96bc6591bae6c82f8548badc92b522380806b0 | /mini_internet_shop/mini_internet_shop/urls.py | 2af1bdf8074ab95c522338d32a81039a7dcc6bd4 | [
"MIT"
] | permissive | Aijana-github/mini_shop | f694156a13e0eddd95de591a4c82dcb47213e553 | 1413b61177cc5ced7075c4ebd9f5e065950be0e2 | refs/heads/main | 2023-03-13T04:55:44.193121 | 2021-02-26T15:19:18 | 2021-02-26T15:19:18 | 341,837,635 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 889 | py | """mini_internet_shop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('Online_shop.urls')),
]
| [
"ayzhana.abdrahmanova@mail.ru"
] | ayzhana.abdrahmanova@mail.ru |
8d308bb5fcc1a686835c15b6f0c7d4dabfde7c44 | f9b7930e6f43eca26abf87b39961fc2d022db54a | /Python/medium/338. Counting Bits.py | 01ee506d021c0422aa75949e9d17355471bf95da | [] | no_license | LRenascence/LeetCode | 639452dd3bf65a14d0056c01e203a7082fbdc326 | 1a0e1d1503e0a7bff6917491a964a08c572827fb | refs/heads/master | 2021-05-12T03:41:35.346377 | 2021-01-07T23:39:14 | 2021-01-07T23:39:14 | 117,622,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 782 | py | """
Given a non negative integer number num. For every numbers i in the range 0 ≤ i ≤ num calculate the number of 1's in their binary representation and return them as an array.
Example 1:
Input: 2
Output: [0,1,1]
Example 2:
Input: 5
Output: [0,1,1,2,1,2]
Follow up:
It is very easy to come up with a solution with run time O(n*sizeof(integer)). But can you do it in linear time O(n) /possibly in a single pass?
Space complexity should be O(n).
Can you do it like a boss? Do it without using any builtin function like __builtin_popcount in c++ or in any other language.
"""
class Solution:
def countBits(self, num: int) -> List[int]:
result = [0] * (num + 1)
for i in range(num + 1):
result[i] = result[i >> 1] + (i & 1)
return result | [
"im.renascence@gmail.com"
] | im.renascence@gmail.com |
f2946a4c23ad594cc0f4b8c07936289cc61da596 | c851b56d27b8d9ccb8d288832ede6efe17449e50 | /app.py | cbf4b5f98c36c1df1c064e5d3b957662ebffdea6 | [] | no_license | Treviisolion/FlaskBlogly | 128aea5e98d3b1ea622ab56a31d9c283bbf9a730 | 5aacc997a304f07d91ce863b055eaafc5bccb099 | refs/heads/main | 2023-01-12T12:45:01.604980 | 2020-11-24T23:42:09 | 2020-11-24T23:42:09 | 310,968,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,118 | py | """Blogly application."""
from flask_debugtoolbar import DebugToolbarExtension
from flask import Flask, redirect, render_template, request, send_file
from models import db, connect_db, User, Post, Tag, PostTag, DEFAULT_IMAGE
import os
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', 'postgresql:///blogly')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_ECHO'] = True
connect_db(app)
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'JohnathonAppleseed452')
# app.config['DEBUG_TB_INTERCEPT_REDIRECTS'] = True
# debug = DebugToolbarExtension(app)
db.create_all()
@app.route('/')
def redirect_to_users():
"""Main page is /users redirect to there"""
return redirect('/users')
@app.route(DEFAULT_IMAGE)
def return_default_user():
"""Returns the default user profile"""
return send_file(DEFAULT_IMAGE[1:])
#########
# Users #
#########
@app.route('/users')
def show_users():
"""Show all users"""
users = User.query.all()
return render_template('users/users.html', users=users)
@app.route('/users/<int:userid>')
def show_user(userid):
"""Show a specific user"""
user = User.query.get_or_404(userid)
return render_template('users/user.html', user=user, posts=user.posts)
@app.route('/users/new', methods=['GET'])
def show_new_user_form():
"""Show the form for creating a new user"""
return render_template('users/new_user.html')
@app.route('/users/new', methods=['POST'])
def create_user():
"""Creates a user"""
first_name = request.form.get('first_name', None)
last_name = request.form.get('last_name', None)
image_url = request.form.get('image_url', None)
# If information is somehow not provided, will show warnings to user
missing_first_name = False
missing_last_name = False
if not first_name:
missing_first_name = True
if not last_name:
missing_last_name = True
if missing_first_name or missing_last_name:
return render_template('users/new_user.html', missing_first_name=missing_first_name, missing_last_name=missing_last_name)
if not image_url:
new_user = User(first_name=first_name, last_name=last_name)
else:
new_user = User(first_name=first_name,
last_name=last_name, image_url=image_url)
db.session.add(new_user)
db.session.commit()
return redirect(f'/users')
@app.route('/users/<int:userid>/edit', methods=['GET'])
def show_edit_user_form(userid):
"""Shows the form for editing the specified user"""
user = User.query.get_or_404(userid)
return render_template('users/edit_user.html', user=user)
@app.route('/users/<int:userid>/edit', methods=['POST'])
def edit_user(userid):
"""Edit user in database"""
user = User.query.get_or_404(userid)
first_name = request.form.get('first_name', None)
last_name = request.form.get('last_name', None)
image_url = request.form.get('image_url', None)
user.update_user(first_name, last_name, image_url)
db.session.commit()
return redirect('/users')
@app.route('/users/<int:userid>/delete', methods=['POST'])
def delete_user(userid):
"""Delete user in database"""
user = User.query.get_or_404(userid)
db.session.delete(user)
db.session.commit()
return redirect('/users')
#########
# Posts #
#########
@app.route('/users/<int:userid>/posts/new', methods=['GET'])
def show_new_post_form(userid):
"""Shows the form for creating a new post for the specified user"""
user = User.query.get_or_404(userid)
tags = Tag.query.all()
return render_template('posts/new_post.html', user=user, tags=tags)
@app.route('/users/<int:userid>/posts/new', methods=['POST'])
def create_post(userid):
"""Creates a new post for the specified user"""
user = User.query.get_or_404(userid)
title = request.form.get('title', None)
content = request.form.get('content', None)
tags = request.form.getlist('tags')
# If information is somehow not provided, will show warnings to user
missing_title = False
missing_content = False
if not title:
missing_title = True
if not content:
missing_content = True
if missing_title or missing_content:
tags = Tag.query.all()
return render_template('posts/new_post.html', user=user, missing_content=missing_content, missing_title=missing_title, tags=tags)
new_post = Post(title=title, content=content, user_id=user.id)
db.session.add(new_post)
db.session.commit()
for tag in tags:
new_post_tag = PostTag(post_id=new_post.id, tag_id=int(tag))
db.session.add(new_post_tag)
db.session.commit()
return redirect(f'/users/{userid}')
@app.route('/posts/<int:postid>')
def show_post(postid):
"""Shows the specified post"""
post = Post.query.get_or_404(postid)
return render_template('posts/post.html', post=post, user=post.user, tags=post.tags)
@app.route('/posts/<int:postid>/edit', methods=['GET'])
def show_edit_post_form(postid):
"""Shows the form for editing a post for the specified post"""
post = Post.query.get_or_404(postid)
tags = Tag.query.all()
return render_template('posts/edit_post.html', post=post, user=post.user, tags=tags)
@app.route('/posts/<int:postid>/edit', methods=['POST'])
def edit_post(postid):
"""Edits the specified post"""
post = Post.query.get_or_404(postid)
post_tags = post.post_tags
tags = Tag.query.all()
title = request.form.get('title', None)
content = request.form.get('content', None)
checked_tags = request.form.getlist('tags')
# Removes current post tags
for post_tag in post_tags:
db.session.delete(post_tag)
# Adds new post tags for all tags that were checked
for checked_tag in checked_tags:
new_post_tag = PostTag(post_id=postid, tag_id=int(checked_tag))
db.session.add(new_post_tag)
db.session.commit()
# # Go through each tag and compare with checked tags
# for tag in tags:
# for checked_tag in checked_tags:
# # Check if the checked tag matches the tag, if so break and move onto the next tag
# if int(checked_tag) == tag.id:
# # Check if there already exits a posttag for the given post and tag
# for post_tag in post_tags:
# if post_tag.tag_id == tag.id:
# break
# # If a posttag was not found then create one
# else:
# new_post_tag = PostTag(post_id=postid, tag_id=tag.id)
# db.session.add(new_post_tag)
# break
# # If tag was not in checked_tags then check if tag is in post_tags
# else:
# for post_tag in post_tags:
# if post_tag.tag_id == tag.id:
# db.session.delete(post_tag)
# break
db.session.commit()
post.update_post(title, content)
db.session.commit()
return redirect(f'/posts/{postid}')
@app.route('/posts/<int:postid>/delete', methods=['POST'])
def delete_post(postid):
"""Deletes the specified post"""
post = Post.query.get_or_404(postid)
user = post.user
db.session.delete(post)
db.session.commit()
return redirect(f'/users/{user.id}')
########
# Tags #
########
@app.route('/tags')
def show_tags():
"""Shows all the tags"""
tags = Tag.query.all()
return render_template('tags/tags.html', tags=tags)
@app.route('/tags/<int:tagid>')
def show_tag(tagid):
"""Shows the specified tag"""
tag = Tag.query.get_or_404(tagid)
posts = tag.posts
return render_template('tags/tag.html', tag=tag, posts=posts)
@app.route('/tags/new', methods=['GET'])
def show_new_tag_form():
"""Shows the form for creating a new tag"""
return render_template('tags/new_tag.html')
@app.route('/tags/new', methods=['POST'])
def create_tag():
"""Creates a tag"""
name = request.form.get('tag_name', None)
# If information is somehow not provided, will show warnings to user
if not name:
return render_template('tags/new_tag.html', missing_tag_name=True)
new_tag = Tag(name=name)
db.session.add(new_tag)
db.session.commit()
db.session.commit()
return redirect('/tags')
@app.route('/tags/<int:tagid>/edit', methods=['GET'])
def show_edit_tag_form(tagid):
"""Shows the form to edit a tag"""
tag = Tag.query.get_or_404(tagid)
return render_template('tags/edit_tag.html', tag=tag)
@app.route('/tags/<int:tagid>/edit', methods=['POST'])
def edit_tag(tagid):
"""Edits the specified tag"""
tag = Tag.query.get_or_404(tagid)
name = request.form.get('tag_name', None)
tag.update_tag(name)
db.session.commit()
return redirect('/tags')
@app.route('/tags/<int:tagid>/delete', methods=['POST'])
def delete_tag(tagid):
"""Deletes the specified tag"""
tag = Tag.query.get_or_404(tagid)
db.session.delete(tag)
db.session.commit()
return redirect('/tags')
| [
"trevyncase@gmail.com"
] | trevyncase@gmail.com |
8cb10c2000d6f8c26f827835a9ea942e31a6b379 | a72ddd835be672c700d30b75131b2cf1ebc79aef | /xserver-build/.jhbuildrc | de7978b93bf9e5ca3893cdd7b97eeec9567b9c10 | [] | no_license | wzyy2/Scripts | b029f48a010249815dce1b695cb73e33f542d37d | eee34c4a8e01c9365501df3e0cc22fa3b519a76e | refs/heads/master | 2021-09-27T03:30:29.736506 | 2021-09-09T08:14:30 | 2021-09-09T08:14:30 | 60,768,097 | 4 | 6 | null | null | null | null | UTF-8 | Python | false | false | 3,397 | jhbuildrc | #!python
#######################################################################################
# This is a checkout and build configuration for building Xorg
#
# This can be copied to ~/.jhbuildrc and then run 'jhbuild build xserver'
#
#######################################################################################
moduleset = 'http://cgit.freedesktop.org/xorg/util/modular/plain/xorg.modules'
checkoutroot = '~/sources/xorg/git'
modules = [ 'xorg' ]
prefix = os.environ['DISCIMAGE'] +'/usr/local'
autogenargs = ' --disable-static'
# autogenargs += ' --disable-dri2 --with-driver=dri'
autogenargs += ' --enable-dri2 -enable-dri'
autogenargs += ' --cache-file=' + checkoutroot + '/autoconf-cache'
# lots of people really like to always look in /var/log, but change if
# you want the log files out of place
autogenargs += ' --with-log-dir=/var/log'
autogenargs += ' --with-mesa-source=' + checkoutroot + '/mesa'
autogenargs += ' --enable-malloc0returnsnull'
os.environ['ACLOCAL'] = 'aclocal -I ' + prefix + '/share/aclocal/'
os.environ['INSTALL'] = os.path.expanduser('~/bin/install-check')
# Enabled debugging for xserver
os.environ['CFLAGS'] = '-g'
os.environ['CPPFLAGS'] = '-g'
# Setup environment for cross compiling
os.environ['BUILD'] = ''
os.environ['HOST'] = 'arm-linux-gnueabihf'
os.environ['TARGET'] = 'arm-linux-gnueabihf'
cross_compile_prefix = os.environ['CROSS_COMPILE']
tools = {'ADDR2LINE': 'addr2line',
'AS': 'as', 'CC': 'gcc', 'CPP': 'cpp',
'CPPFILT': 'c++filt', 'CXX': 'g++',
'GCOV': 'gcov', 'LD': 'ld', 'NM': 'nm',
'OBJCOPY': 'objcopy', 'OBJDUMP': 'objdump',
'READELF': 'readelf', 'SIZE': 'size',
'STRINGS': 'strings', 'AR': 'ar',
'RANLIB': 'ranlib', 'STRIP': 'strip'}
tools_args = str()
for tool in tools.keys():
fullpath_tool = cross_compile_prefix + tools[tool]
os.environ[tool] = fullpath_tool
autogenargs += ' --build='+os.environ['BUILD']
autogenargs += ' --host='+os.environ['HOST']
autogenargs += ' --target='+os.environ['TARGET']
for tool in ('AR', 'RANLIB', 'STRIP', 'AS', 'OBJDUMP', 'NM'):
autogenargs += ' '+tool+'="'+os.environ[tool]+'" '
module_autogenargs['libGL'] = autogenargs + ' --without-demos --with-dri-drivers="swrast" --disable-glw'
module_autogenargs['mesa-mesa'] = autogenargs + ' --with-dri-drivers=swrast --without-gallium-drivers'
module_autogenargs['libXt'] = autogenargs + ' --disable-install-makestrs'
module_autogenargs['xserver'] = autogenargs + ' --enable-debug'
module_autogenargs['pixman'] = autogenargs + ' --disable-gtk'
module_autogenargs['hal'] = autogenargs + ' --disable-pci-ids'
module_autogenargs['libXfont'] = autogenargs + ' --disable-freetype'
# For expat and zlib
os.environ['CFLAGS'] += ' -I' + os.environ['DISCIMAGE'] + '/usr/local/include/'
os.environ['CPPFLAGS'] += ' -IHello -I' + os.environ['DISCIMAGE'] + '/usr/local/include/'
os.environ['LDFLAGS'] = ' -L' + os.environ['DISCIMAGE'] + '/usr/local/lib/'
os.environ['LDFLAGS'] += ' -Wl,--rpath -Wl,' + '/usr/local/lib/' #rpath is relative to where it is run from - DISCIMAGE
# Just in case zlib or expat were installed here
os.environ['CFLAGS'] += ' -I' + os.environ['DISCIMAGE'] + '/usr/include/'
os.environ['CPPFLAGS'] += ' -I' + os.environ['DISCIMAGE'] + '/usr/include/'
os.environ['LDFLAGS'] += ' -L' + os.environ['DISCIMAGE'] + '/usr/lib/'
os.environ['LDFLAGS'] += ' -Wl,--rpath -Wl,' + '/usr/lib/'
| [
"jacob2.chen@rock-chips.com"
] | jacob2.chen@rock-chips.com |
34322ab0be08ec02c0cf670b8835ce5086251b9a | add5ca4ed6f5a5030cfcd60a09e502390ffc4936 | /full_code/paddle/conf/img_qa_gate2_gen.py | dd3f38a36d621d037da12a1a132552fe9d2eb6ae | [] | no_license | yangyi02/vision_language | 1f0b10e648a1ef0ea88edd30e41581d25969df27 | 9c55e5115d03bab58cf6165f63c9a6f426ed87ce | refs/heads/master | 2020-04-02T19:45:25.051432 | 2018-10-25T22:32:39 | 2018-10-25T22:32:39 | 154,745,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,331 | py | # -*- coding: utf-8 -*-
from math import sqrt
import os
import sys
from trainer.recurrent_units import LstmRecurrentUnit
model_type('recurrent_nn')
# data setting
gen_list = get_config_arg('gen_list', str, './gen.list')
result_file = get_config_arg('result_file', str, './result.txt')
# dictionary setting
dict_file = get_config_arg('dict_file', str, './dict.txt')
dict_pkl = get_config_arg('dict_pkl', str, './dict.pkl')
# image feature setting
img_feat_list = get_config_arg('img_feat_list', str, './img_feat.list')
# feature dimension setting
img_feat_dim = get_config_arg('img_feat_dim', int, 4096)
word_embedding_dim = 512
hidden_dim = 512
multimodal_dim = 1024
dict_dim = len(open(dict_file).readlines())
start_index = dict_dim-2
end_index = dict_dim-1
# hyperparameter setting
Settings(
batch_size = 8, # this must equal to trainer_count
learning_rate = 0,
)
# data provider setting
TestData(
PyData(
files = gen_list,
load_data_module = 'join_test',
load_data_object = 'processData',
load_data_args = ' '.join([dict_pkl, img_feat_list, str(img_feat_dim), '1.0'])
)
)
##### network #####
Inputs('question_id', 'img_feat', 'question')
Outputs('predict_word')
# data layers
DataLayer(name = 'question_id', size = 1)
DataLayer(name = 'img_feat', size = img_feat_dim)
DataLayer(name = 'question', size = dict_dim)
# question embedding input: question_embedding
MixedLayer(name = 'question_embedding',
size = word_embedding_dim,
bias = False,
inputs = TableProjection('question',
parameter_name = 'word_embedding',
),
)
# question hidden input
MixedLayer(name = 'question_input',
size = hidden_dim,
active_type = 'stanh',
inputs = FullMatrixProjection('question_embedding'),
)
# question hidden input: encoder
RecurrentLayerGroupBegin('encoder' + '_layer_group',
in_links = ['question_input'],
out_links = ['encoder'],
seq_reversed = False,
)
LstmRecurrentUnit(name = 'encoder',
size = hidden_dim/4,
active_type = 'relu',
state_active_type = 'linear',
gate_active_type = 'sigmoid',
inputs = [IdentityProjection('question_input')],
)
RecurrentLayerGroupEnd('encoder' + '_layer_group')
# get last of encoder
Layer(name = 'encoder_last',
type = 'seqlastins',
active_type = '',
bias = False,
inputs = [Input('encoder')],
)
# rnn1
RecurrentLayerGroupBegin('rnn1' + '_layer_group',
in_links = [],
out_links = ['predict_word'],
seq_reversed = False,
generator = Generator(
max_num_frames = 20,
beam_size = 5,
num_results_per_sample = 1,
),
)
img_feat_memory = Memory(name = 'img_feat_memory',
size = img_feat_dim,
boot_layer = 'img_feat',
is_sequence = False,
)
MixedLayer(name = 'img_feat_memory',
size = img_feat_dim,
bias = False,
inputs = IdentityProjection(img_feat_memory),
)
question_memory = Memory(name = 'question_memory',
size = hidden_dim/4,
boot_layer = 'encoder_last',
is_sequence = False,
)
MixedLayer(name = 'question_memory',
size = hidden_dim/4,
bias = False,
inputs = IdentityProjection(question_memory),
)
predict_word_memory = Memory(name = 'predict_word',
size = dict_dim,
boot_with_const_id = start_index,
)
MixedLayer(name = 'predict_word_embedding',
size = word_embedding_dim,
bias = False,
inputs = TableProjection(predict_word_memory,
parameter_name = 'word_embedding',
),
)
# hidden1
MixedLayer(name = 'hidden1',
size = hidden_dim,
active_type = 'stanh',
bias = Bias(parameter_name = '_hidden1.wbias'),
inputs = FullMatrixProjection('predict_word_embedding',
parameter_name = '_hidden1.w0'),
)
LstmRecurrentUnit(name = 'rnn1',
size = hidden_dim/4,
active_type = 'relu',
state_active_type = 'linear',
gate_active_type = 'sigmoid',
inputs = [IdentityProjection('hidden1')],
)
# language unit
MixedLayer(name = 'language',
size = multimodal_dim,
active_type = 'linear',
bias = Bias(parameter_name = '_language.wbias'),
inputs = [FullMatrixProjection(question_memory, parameter_name = '_language.w0'),
FullMatrixProjection('predict_word_embedding', parameter_name = '_language.w1'),
FullMatrixProjection('rnn1', parameter_name = '_language.w2'),
],
# drop_rate = 0.5,
)
MixedLayer(name = 'language_gate',
size = 1,
active_type = 'sigmoid',
bias = Bias(parameter_name = 'language_gate.b',
initial_std = 0.0, initial_mean = -2.0),
inputs = FullMatrixProjection('language',
parameter_name = 'language_gate_proj')
)
Layer(name = 'language_gate_expanded',
type = 'featmap_expand',
num_filters = multimodal_dim,
inputs = FullMatrixProjection('language_gate')
)
MixedLayer(name = 'gated_language',
size = multimodal_dim,
bias = False,
inputs = DotMulOperator(['language_gate_expanded', 'language'])
)
# hidden2
MixedLayer(name = 'hidden2',
size = multimodal_dim,
active_type = 'stanh',
bias = Bias(parameter_name = '_hidden2.wbias'),
inputs = [IdentityProjection('gated_language', parameter_name = '_hidden2.w0'),
FullMatrixProjection(img_feat_memory, parameter_name = '_hidden2.w1'),
],
# drop_rate = 0.5,
)
# hidden3
#Layer(
# name = 'hidden3',
# type = 'mixed',
# size = word_embedding_dim,
# active_type = 'stanh',
# inputs = FullMatrixProjection(
# 'hidden2',
# initial_std = sqrt(1. / multimodal_dim)),
#)
# output
Layer(name = 'output',
type = 'fc',
size = dict_dim,
active_type = 'softmax',
bias = Bias(parameter_name = '_output.wbias'),
inputs = [Input('hidden2', parameter_name = '_output.w0')],
#inputs = TransposedFullMatrixProjection(
# 'hidden3',
# parameter_name = 'wordvecs'),
)
Layer(
name = 'predict_word',
type = 'maxid',
inputs = 'output',
)
Layer(
name = 'eos_check',
type = 'eos_id',
eos_id = end_index,
inputs = ['predict_word'],
)
RecurrentLayerGroupEnd('rnn1' + '_layer_group')
# Write question and answer pairs to file
Evaluator(
name = 'caption_printer',
type = 'seq_text_printer',
dict_file = dict_file,
result_file = result_file,
#delimited = False,
inputs = ['question_id', 'question', 'predict_word'],
)
| [
"yangyi02@gmail.com"
] | yangyi02@gmail.com |
d6ec1defab5ed57216ed8a7c1927d4b569d4f5e7 | f8af2d190600221b7a597ef4de8ee15137e01266 | /django_mysite/polls/serializers.py | eef85178a606057b3aaaf04ed47a05c101d57c8e | [] | no_license | rifqirosyidi/REST-Framework-Searching | 3b4d64ca1d2217a48f1ec1c6591e1b7e1a42797d | 25481026728edfd564bb6ba18c8ce73040e07543 | refs/heads/master | 2023-04-26T02:11:43.684540 | 2021-04-12T09:43:09 | 2021-04-12T09:43:09 | 206,774,068 | 1 | 0 | null | 2023-04-21T20:36:46 | 2019-09-06T10:49:42 | Python | UTF-8 | Python | false | false | 202 | py | from rest_framework import serializers
from .models import Question, Choice
class QuestionSerializer(serializers.ModelSerializer):
class Meta:
model = Question
fields = '__all__'
| [
"rief.rosyidi@gmail.com"
] | rief.rosyidi@gmail.com |
35863bff5423a9fc84660b8948cb05b5a77d1862 | 16dc9fd93c3b19888d7762bfd3bbb81f40e8506f | /app/user/serializers.py | b2b85bc43ba7967f6725673d604147a094ad16d1 | [
"MIT"
] | permissive | nts2pd/recipe-app-api | 1c3c8832b550b8e5f55ed0e2bdf61a8dbfae2206 | e510fa826eb3f2f54a8cc568b7904725f9e2805f | refs/heads/master | 2022-12-11T21:25:38.733881 | 2020-09-04T21:04:50 | 2020-09-04T21:04:50 | 285,944,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,728 | py | from django.contrib.auth import get_user_model, authenticate
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class UserSerializer(serializers.ModelSerializer):
"""serializer for the users object"""
class Meta:
model = get_user_model()
fields = ('email', 'password', 'name')
extra_kwargs = {'password': {'write_only': True, 'min_length': 5}}
def create(self, validated_data):
"""create a new user with encrypted password and return it"""
return get_user_model().objects.create_user(**validated_data)
def update(self, instance, validated_date):
"""update a user, setting the password correctly and return it"""
password = validated_date.pop('password', None)
user = super().update(instance, validated_date)
if password:
user.set_password(password)
user.save()
return user
class AuthTokenSerializer(serializers.Serializer):
"""serializer for the user authentication object"""
email = serializers.CharField()
password = serializers.CharField(
style={'input_type': 'password'},
trim_whitespace=False
)
def validate(self, attrs):
"""validate and authenticate the user"""
email = attrs.get('email')
password = attrs.get('password')
user = authenticate(
request=self.context.get('request'),
username=email,
password=password
)
if not user:
msg = _('Unable to authenticate with provided credentials')
raise serializers.ValidationError(msg, code='authentication')
attrs['user'] = user
return attrs
| [
"nts2pd@gmail.com"
] | nts2pd@gmail.com |
6310185185262e0b8c9fbd9853f988e7da83bb3a | 9f45780bf3d8c45fb51ba3914cfa4b1b74d606c1 | /Through/dataset.py | 14fbd188e2c6c6e8027312a754e168ddcf03c3f3 | [] | no_license | panyang/NanGeMT | da6788d4be30e68bc7ae07491a3489ee6bbf5a68 | 251a6c421e44d706217692851e74837efab5002b | refs/heads/master | 2021-06-20T06:50:44.571301 | 2017-07-23T12:58:37 | 2017-07-23T12:58:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,414 | py | class cfgc:
def __init__(self):
self.reduct=""
self.part=[]
class prsrc:
def __init__(self):
self.prsid=""
self.cfg=cfgc()
self.head=0
self.bind=""
class segc:
def __init__(self):
self.seq=""
self.flag="" # 'w': word string; 't': word and its tag; 'p': phrase
class regionc:
def __init__(self,begin=0,end=0):
self.begin=begin
self.end=end
class tagc:
def __init__(self):
self.sentreg=regionc()
self.word=""
self.cat=""
self.catid=""
self.feat=""
self.ruleset=[]
class edgec:
def __init__(self):
self.id=None
self.prsid=None
self.ruleid=""
self.feat=""
self.dotreg=regionc()
self.sentreg=regionc()
self.son=[]
self.father=-2
self.source="" # dict or prsrbase or input, d or p or i
self.state="" # active or silent or fix, a or s or f, fix
class loccatc:
def __init__(self,loc=0,cat=""):
self.loc=loc
self.cat=cat
class compedgekeyc:
def __init__(self,reduct,sentreg,feat):
self.reduct=reduct
self.sentreg=sentreg
self.feat=feat
class incedgekeyc:
def __init__(self,prsid="",dotreg=regionc(),sentreg=regionc(),son=[]):
self.prsid=prsid
self.dotreg=dotreg
self.sentreg=sentreg
self.son=son
| [
"liuquncn@gmail.com"
] | liuquncn@gmail.com |
99b24238cb0af0983f78fec0cc28bb231aeb9fff | 40666ecc1de4c13e82c95a084b6b7cd98b413fc5 | /data/test.py | 8a2f9f794e8afcff2444c5191109265ab6b56d4f | [
"MIT"
] | permissive | aakashvarma/sms.ai | 6d7ee0b8e02449515237aaf7af9965b260df56ac | 87c07e279b38d25af4524c6738333710c9bc701c | refs/heads/master | 2021-10-25T03:44:55.271647 | 2021-10-18T18:30:53 | 2021-10-18T18:30:53 | 135,322,480 | 0 | 3 | MIT | 2018-10-30T17:49:53 | 2018-05-29T16:14:34 | Python | UTF-8 | Python | false | false | 2,223 | py | import os, csv, re, nltk
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn import preprocessing
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.naive_bayes import BernoulliNB
from sklearn.metrics import confusion_matrix
from sklearn import model_selection
from sklearn.metrics import accuracy_score
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
from nltk.stem.porter import PorterStemmer
from nltk.stem.wordnet import WordNetLemmatizer
import snowballstemmer
import pickle
# dada input
f = open('spam.csv', 'r')
file = csv.reader(f, delimiter = ',')
df = pd.DataFrame(np.array(list(file)))
df.columns = df.iloc[0]
df = df[1:]
le = preprocessing.LabelEncoder()
le.fit(df['v1'])
df['v1'] = le.transform(df['v1'])
# Clean texts
stop = set(stopwords.words('english'))
lmtzr = WordNetLemmatizer()
stemmer = snowballstemmer.stemmer('english')
corpus = np.array(df.v2)
c = []
for i in range(len(df.v2)):
review = re.sub('[^a-zA-Z]', ' ', corpus[i])
review = [i for i in review.lower().split() if i not in stop]
l = [lmtzr.lemmatize(x) for x in review]
s = stemmer.stemWords(l)
review = ' '.join(s)
c.append(review)
# Creating the Bag of Words model
# cv = CountVectorizer()
# X = cv.fit_transform(c).toarray()
y = df.v1
vectorizer = TfidfVectorizer()
X = vectorizer.fit_transform(c).toarray()
# Splitting the dataset into the Training set and Test set
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.20)
# Fitting Naive Bayes to the Training set
classifier = BernoulliNB()
classifier.fit(X_train, y_train)
# Save the model to disk
filename = 'finalized_model.sav'
pickle.dump(classifier, open(filename, 'wb'))
# Some time later .......
loaded_model = pickle.load(open(filename, 'rb'))
result = loaded_model.score(X_test, y_test)
# Predicting the Test set results
# y_pred = classifier.predict(X_test)
# Making the Confusion Matrix
# cm = confusion_matrix(y_test, y_pred)
# print (cm)
# Score
# print(accuracy_score(y_test, y_pred))
print (result)
| [
"aakashvarma18@gmail.com"
] | aakashvarma18@gmail.com |
9b1c082e80afc41f2c9bef919bf8a6880f827d6b | 585700f820e4eff76654735ec74a48350709dbab | /app/face_rec/setlcd.py | fe5e3b4bf985f2f4af916ea5cb4fb1473b11eb26 | [] | no_license | mozzielol/MonitoringSystem | 109259be0aaf06f36712e32fbf368146303e474c | e2fd68449dafe6e97d804da1e43a1a442dfa41ab | refs/heads/master | 2021-01-23T09:35:13.897172 | 2017-09-06T21:14:00 | 2017-09-06T21:14:00 | 102,583,062 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | import sys
import time
import mraa
import pyupm_i2clcd as lcd
import threading
class lcdDisplay(threading.Thread):
def __init__(self,message,color=(255,0,0),time_wait=10):
threading.Thread.__init__(self)
self.mLcd = lcd.Jhd1313m1(0, 0x3E, 0x62)
self.mLcd.setCursor(0, 0)
self.mLcd.setColor(color[0],color[1],color[2])
self.message = message
self.time_wait = time_wait
def run(self):
self.mLcd.write(self.message)
time.sleep(self.time_wait)
if __name__=='__main__':
lcdDisplay('hello').start()
| [
"noreply@github.com"
] | noreply@github.com |
ad2ce039c61d85c1c0c5640333adb3f2fc42b67e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03369/s319661754.py | ec44e017019af609a02068a512dee343db5acc9e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48 | py | S = list(input())
print(700+100*(S.count('o')))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
28eaaf517b953e0bcb7b5cc92e74bf856f9820ef | 7552bac9fefb9c6076b34e30696f2f744cae0262 | /ode_methods/ode_range_methods.py | 73306fd7cc5995da98189d8d28ed90ccbe581e06 | [] | no_license | affanshahid/es-341-project | b9d3e6863fc21f6be973765a8f39523475b59122 | 642bb0240d1bfdff2b8943ddb9701b331371a04c | refs/heads/master | 2016-09-12T13:12:01.107085 | 2016-04-13T06:31:01 | 2016-04-13T06:31:01 | 55,982,902 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,011 | py | from utils import frange
from .midpoint import midpoint
from .euler import euler
from .euler_modified import euler_modified
from .runge_kutta_4 import runge_kutta_4
def range(ode_method, ode, initial_val, start, end, step_size=0.01):
values = []
old_val = initial_val
for t in frange(start, end + 0.00001, step_size):
values.append((t, old_val))
new_val = ode_method(ode, old_val, t, step_size)
old_val = new_val
return values
def euler_range(ode, initial_val, start, end, step_size):
return range(euler, ode, initial_val, start, end, step_size)
def midpoint_range(ode, initial_val, start, end, step_size):
return range(midpoint, ode, initial_val, start, end, step_size)
def euler_modified_range(ode, initial_val, start, end, step_size):
return range(euler_modified, ode, initial_val, start, end, step_size)
def runge_kutta_4_range(ode, initial_val, start, end, step_size):
return range(runge_kutta_4, ode, initial_val, start, end, step_size)
| [
"affan.shahid.94@gmail.com"
] | affan.shahid.94@gmail.com |
ed81e2525e1801ed6e29af98eae7ec1f7769c79c | 62c11d52bad9deafb5839d9567dd4a443621d3c2 | /moreonseek.py | 2c57e5d3980e005174579b8bd77ecdfd5f6ee68e | [] | no_license | vijendra21vijendra/python | 1e5996d5cd71c4476ee6c9ef46d7b27a0065e79a | 16a69b46e4f163e0ce3bab75bb907a6bf15a33b0 | refs/heads/master | 2020-08-07T06:55:29.365026 | 2019-10-13T06:51:18 | 2019-10-13T06:51:18 | 213,342,122 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py | f = open("viju.txt")
print(f.tell())
f.seek(12)
print(f.read())
print(f.tell())
f.seek(0)
print(f.read())
f.close() | [
"noreply@github.com"
] | noreply@github.com |
da5c3f07a89c6f202d1f08e7549f778c93418d77 | f9263163151d49e5e1ae570e2a1b79650457d695 | /trivia/console_interface.py | 75bd10146c2729788c05bddd0d6e0ac12f1058eb | [] | no_license | xvicmanx/py-practices | 08b0b6604e3ee9ee0227f4fc8c9e3c20f0122b0d | 15e6d37155f5212b210c74d3d29e0806c165a36f | refs/heads/main | 2023-02-26T13:09:56.504833 | 2021-02-02T14:36:32 | 2021-02-02T14:36:32 | 331,462,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | class ConsoleInterface:
def __init__(self, quiz):
self.__quiz = quiz
def run(self):
while self.__quiz.has_next_question():
question = self.__quiz.get_next_question()
answer = input(question + ' (True/False): ')
correct = self.__quiz.check_answer(answer)
if correct:
print('Your answer is correct!\n\n')
else:
print('Your answer is wrong!\n\n')
print('Your score is: ' + str(self.__quiz.get_score())) | [
"vic3jo@gmail.com"
] | vic3jo@gmail.com |
e95708b1aa8ef5936b84897bd1b1b8c5084aa0fd | 1065145ffd39e3b030a5c18ff6b219241fefadcd | /backend/src/settings.py | 982ba9e26a3d82ff470444f7f463d193a78431c0 | [
"MIT"
] | permissive | aleksandr-kiriushin/billogram-test-app | 4c2dfd13be4fa05b99a70a57becea92281bfb0d2 | e7be0bd6276e72693c539daf5cb7ab9e043c8802 | refs/heads/main | 2023-01-18T19:27:26.628312 | 2020-11-22T21:29:56 | 2020-11-22T21:29:56 | 314,905,113 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | """ Settings for application set by envs """
import os
API_USER = os.getenv('API_USER')
API_PASSWORD = os.getenv('API_PASSWORD')
API_URL = os.getenv('API_URL')
SERVER_PORT = int(os.getenv('SERVER_PORT'))
| [
"aleksandr.kiriushin@gatech.global"
] | aleksandr.kiriushin@gatech.global |
dcfd94c35e67a747655ee2e3637d2bdc440bb4b4 | 803bc4e71e882463f1f1b43e1d8f10f5ca7e6a59 | /Python/serialTkinterMatplotlib.py | c0712746706bfc78bc4276fb8f8e08f1dbd125ba | [] | no_license | Freezesc/cubeUI | 924ca03fb77d949a62ef14bc179bca0140c314d8 | e6e0b09eb9362175428d35c5af7c8b0fcbe48433 | refs/heads/main | 2023-03-12T01:49:51.635291 | 2021-03-04T11:14:06 | 2021-03-04T11:14:06 | 341,109,144 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,872 | py | from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
import tkinter as tk
import numpy as np
import serial as sr
# ------global variables
data = np.array([])
cond = False
# -----plot data-----
def plot_data():
global cond, data
if (cond == True):
a = s.readline()
a.decode()
if (len(data) < 100):
data = np.append(data, float(a[0:4]))
else:
data[0:99] = data[1:100]
data[99] = float(a[0:4])
lines.set_xdata(np.arange(0, len(data)))
lines.set_ydata(data)
canvas.draw()
root.after(1, plot_data)
def plot_start():
global cond
cond = True
s.reset_input_buffer()
def plot_stop():
global cond
cond = False
# -----Main GUI code-----
root = tk.Tk()
root.title('Real Time Plot')
root.configure(background='light blue')
root.geometry("700x500") # set the window size
# ------create Plot object on GUI----------
# add figure canvas
fig = Figure()
ax = fig.add_subplot(111)
# ax = plt.axes(xlim=(0,100), ylim=(0, 120)); #displaying only 100 samples
ax.set_title('Serial Data')
ax.set_xlabel('Sample')
ax.set_ylabel('Voltage')
ax.set_xlim(0, 100)
ax.set_ylim(-0.5, 6)
lines = ax.plot([], [])[0]
canvas = FigureCanvasTkAgg(fig, master=root) # A tk.DrawingArea.
canvas.get_tk_widget().place(x=10, y=10, width=500, height=400)
canvas.draw()
# ----------create button---------
root.update()
start = tk.Button(root, text="Start", font=('calbiri', 12), command=lambda: plot_start())
start.place(x=100, y=450)
root.update()
stop = tk.Button(root, text="Stop", font=('calbiri', 12), command=lambda: plot_stop())
stop.place(x=start.winfo_x() + start.winfo_reqwidth() + 20, y=450)
# ----start serial port----
s = sr.Serial('COM8', 115200)
s.reset_input_buffer()
root.after(1, plot_data)
root.mainloop() | [
"tristan.renon@hes-so.ch"
] | tristan.renon@hes-so.ch |
8a9a508c011d5569a130e6b4262c6a5004ac248c | c44f38cb20d35aa54126fb0102baf40dae78dccd | /hhr/tools/test_tools.py | 314a6ef73ffda36dfde8a320bf885dbadf7ee421 | [] | no_license | aixin2000/Test_Scripts | d2221c93eaf3fd7e0d618ca646dce6200fa3cf75 | cf92e8e81ceb5cb67217bf36993cf94fe470fd0b | refs/heads/master | 2023-03-13T08:24:34.661616 | 2021-03-04T13:40:46 | 2021-03-04T13:40:46 | 344,461,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,420 | py | import configparser
import random
import os
import time
class Test_Tools:
def randomPhone(self):
headList = ["130", "131", "132", "133", "134", "135", "136", "137", "138", "139",
"147", "150", "151", "152", "153", "155", "156", "157", "158", "159",
"186", "187", "188", "189"]
return random.choice(headList) + "".join(random.choice("0123456789") for i in range(8))
def read_config(self):
file = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + '/config/config.ini'
# 创建配置文件对象
con = configparser.ConfigParser()
# 读取文件
con.read(file, encoding='utf-8')
# 获取所有section
sections = con.sections()
# ['url', 'email']
# 获取特定section
items = con.items('testServer') # 返回结果为元组
# [('baidu','http://www.baidu.com'),('port', '80')] # 数字也默认读取为字符串
# 可以通过dict方法转换为字典
items1 = dict(items)
return items1
# def savescreenshot(self, driver, file_name):
# if not os.path.exists('./image'):
# os.makedirs('./image')
# now = time.strftime("%Y%m%d-%H%M%S", time.localtime(time.time()))
# # 截图保存
# driver.get_screenshot_as_file('./image/' + now + '-' + file_name)
# # time.sleep(1)
| [
"1454622738@qq.com"
] | 1454622738@qq.com |
e097a16f0379513c2092c5425fad847644f49308 | 091e97bcfe5acc0635bd601aa8497e377b74d41a | /openshift/installer/vendored/openshift-ansible-3.11.28-1/roles/lib_openshift/library/oc_serviceaccount_secret.py | 4670e3e95ddc2ebdd06ba5c38649e792aac37e53 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | openshift/openshift-tools | d59b63778f25cb8fb3c7a0253afe22a173e72f9d | e342f6659a4ef1a188ff403e2fc6b06ac6d119c7 | refs/heads/prod | 2023-08-30T01:52:04.108978 | 2022-03-23T21:07:28 | 2022-03-23T21:07:28 | 36,827,699 | 170 | 254 | Apache-2.0 | 2022-06-16T12:11:51 | 2015-06-03T20:09:22 | Python | UTF-8 | Python | false | false | 60,388 | py | #!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import fcntl
import json
import time
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/serviceaccount_secret -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_serviceaccount_secret
short_description: Module to manage openshift service account secrets
description:
- Manage openshift service account secrets programmatically.
options:
state:
description:
- If present, the service account will be linked with the secret if it is not already. If absent, the service account will be unlinked from the secret if it is already linked. If list, information about the service account secrets will be gathered and returned as part of the Ansible call results.
required: false
default: present
choices: ["present", "absent", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: false
aliases: []
service_account:
description:
- Name of the service account.
required: true
default: None
aliases: []
namespace:
description:
- Namespace of the service account and secret.
required: true
default: None
aliases: []
secret:
description:
- The secret that should be linked to the service account.
required: false
default: None
aliases: []
author:
- "Kenny Woodson <kwoodson@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: get secrets of a service account
oc_serviceaccount_secret:
state: list
service_account: builder
namespace: default
register: sasecretout
- name: Link a service account to a specific secret
oc_serviceaccount_secret:
service_account: builder
secret: mynewsecret
namespace: default
register: sasecretout
'''
# -*- -*- -*- End included fragment: doc/serviceaccount_secret -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
class YeditException(Exception): # pragma: no cover
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class Yedit(object): # pragma: no cover
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup_ext=None,
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
if backup_ext is None:
self.backup_ext = ".{}".format(time.strftime("%Y%m%dT%H%M%S"))
else:
self.backup_ext = backup_ext
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for separator '''
return self._separator
@separator.setter
def separator(self, inc_sep):
''' setter method for separator '''
self._separator = inc_sep
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def remove_entry(data, key, index=None, value=None, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
if value is not None:
data.pop(value)
elif index is not None:
raise YeditException("remove_entry for a dictionary does not have an index {}".format(index))
else:
data.clear()
return True
elif key == '' and isinstance(data, list):
ind = None
if value is not None:
try:
ind = data.index(value)
except ValueError:
return False
elif index is not None:
ind = index
else:
del data[:]
if ind is not None:
data.pop(ind)
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
fcntl.flock(yfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
yfd.write(contents)
fcntl.flock(yfd, fcntl.LOCK_UN)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, '{}{}'.format(self.filename, self.backup_ext))
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
if self.content_type == 'yaml':
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
elif self.content_type == 'json':
Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
else:
raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
'Please specify a content_type of yaml or json.')
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path, index=None, value=None):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.safe_load(str(invalue))
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
'''run through a list of edits and process them one-by-one'''
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
content_type=params['content_type'],
backup_ext=params['backup_ext'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key'])
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'], params['index'], params['value'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
updated = False
if content is not None:
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
updated = True
elif edits is not None:
results = Yedit.process_edits(edits, yed)
if results['changed']:
updated = True
if updated:
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-p')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None, field_selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
if field_selector is not None:
cmd.append('--field-selector={}'.format(field_selector))
# Name cannot be used with selector or field_selector.
if selector is None and field_selector is None and name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object): # pragma: no cover
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
version = version[1:] # Remove the 'v' prefix
versions_dict[tech + '_numeric'] = version.split('+')[0]
# "3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = "{}.{}".format(*version.split('.'))
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/serviceaccount.py -*- -*- -*-
class ServiceAccountConfig(object):
'''Service account config class
This class stores the options and returns a default service account
'''
# pylint: disable=too-many-arguments
def __init__(self, sname, namespace, kubeconfig, secrets=None, image_pull_secrets=None):
self.name = sname
self.kubeconfig = kubeconfig
self.namespace = namespace
self.secrets = secrets or []
self.image_pull_secrets = image_pull_secrets or []
self.data = {}
self.create_dict()
def create_dict(self):
''' instantiate a properly structured volume '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'ServiceAccount'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['secrets'] = []
if self.secrets:
for sec in self.secrets:
self.data['secrets'].append({"name": sec})
self.data['imagePullSecrets'] = []
if self.image_pull_secrets:
for sec in self.image_pull_secrets:
self.data['imagePullSecrets'].append({"name": sec})
class ServiceAccount(Yedit):
''' Class to wrap the oc command line tools '''
image_pull_secrets_path = "imagePullSecrets"
secrets_path = "secrets"
def __init__(self, content):
'''ServiceAccount constructor'''
super(ServiceAccount, self).__init__(content=content)
self._secrets = None
self._image_pull_secrets = None
@property
def image_pull_secrets(self):
''' property for image_pull_secrets '''
if self._image_pull_secrets is None:
self._image_pull_secrets = self.get(ServiceAccount.image_pull_secrets_path) or []
return self._image_pull_secrets
@image_pull_secrets.setter
def image_pull_secrets(self, secrets):
''' property for secrets '''
self._image_pull_secrets = secrets
@property
def secrets(self):
''' property for secrets '''
if not self._secrets:
self._secrets = self.get(ServiceAccount.secrets_path) or []
return self._secrets
@secrets.setter
def secrets(self, secrets):
''' property for secrets '''
self._secrets = secrets
def delete_secret(self, inc_secret):
''' remove a secret '''
remove_idx = None
for idx, sec in enumerate(self.secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.secrets[remove_idx]
return True
return False
def delete_image_pull_secret(self, inc_secret):
''' remove a image_pull_secret '''
remove_idx = None
for idx, sec in enumerate(self.image_pull_secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.image_pull_secrets[remove_idx]
return True
return False
def find_secret(self, inc_secret):
'''find secret'''
for secret in self.secrets:
if secret['name'] == inc_secret:
return secret
return None
def find_image_pull_secret(self, inc_secret):
'''find secret'''
for secret in self.image_pull_secrets:
if secret['name'] == inc_secret:
return secret
return None
def add_secret(self, inc_secret):
'''add secret'''
if self.secrets:
self.secrets.append({"name": inc_secret}) # pylint: disable=no-member
else:
self.put(ServiceAccount.secrets_path, [{"name": inc_secret}])
def add_image_pull_secret(self, inc_secret):
'''add image_pull_secret'''
if self.image_pull_secrets:
self.image_pull_secrets.append({"name": inc_secret}) # pylint: disable=no-member
else:
self.put(ServiceAccount.image_pull_secrets_path, [{"name": inc_secret}])
# -*- -*- -*- End included fragment: lib/serviceaccount.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
class OCServiceAccountSecret(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'sa'
def __init__(self, config, verbose=False):
''' Constructor for OpenshiftOC '''
super(OCServiceAccountSecret, self).__init__(config.namespace, kubeconfig=config.kubeconfig, verbose=verbose)
self.config = config
self.verbose = verbose
self._service_account = None
@property
def service_account(self):
''' Property for the service account '''
if not self._service_account:
self.get()
return self._service_account
@service_account.setter
def service_account(self, data):
''' setter for the service account '''
self._service_account = data
def exists(self, in_secret):
''' verifies if secret exists in the service account '''
result = self.service_account.find_secret(in_secret)
if not result:
return False
return True
def get(self):
''' get the service account definition from the master '''
sao = self._get(OCServiceAccountSecret.kind, self.config.name)
if sao['returncode'] == 0:
self.service_account = ServiceAccount(content=sao['results'][0])
sao['results'] = self.service_account.get('secrets')
return sao
def delete(self):
''' delete secrets '''
modified = []
for rem_secret in self.config.secrets:
modified.append(self.service_account.delete_secret(rem_secret))
if any(modified):
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
def put(self):
''' place secrets into sa '''
modified = False
for add_secret in self.config.secrets:
if not self.service_account.find_secret(add_secret):
self.service_account.add_secret(add_secret)
modified = True
if modified:
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
@staticmethod
# pylint: disable=too-many-return-statements,too-many-branches
# TODO: This function should be refactored into its individual parts.
def run_ansible(params, check_mode):
''' run the oc_serviceaccount_secret module'''
sconfig = ServiceAccountConfig(params['service_account'],
params['namespace'],
params['kubeconfig'],
[params['secret']],
None)
oc_sa_sec = OCServiceAccountSecret(sconfig, verbose=params['debug'])
state = params['state']
api_rval = oc_sa_sec.get()
#####
# Get
#####
if state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': "list"}
########
# Delete
########
if state == 'absent':
if oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have removed the " + \
"secret from the service account.'}
api_rval = oc_sa_sec.delete()
return {'changed': True, 'results': api_rval, 'state': "absent"}
return {'changed': False, 'state': "absent"}
if state == 'present':
########
# Create
########
if not oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have added the ' + \
'secret to the service account.'}
# Create it here
api_rval = oc_sa_sec.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_sa_sec.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': "present"}
return {'changed': False, 'results': api_rval, 'state': "present"}
return {'failed': True,
'changed': False,
'msg': 'Unknown state passed. %s' % state,
'state': 'unknown'}
# -*- -*- -*- End included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
def main():
'''
ansible oc module to manage service account secrets.
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
namespace=dict(default=None, required=True, type='str'),
secret=dict(default=None, type='str'),
service_account=dict(required=True, type='str'),
),
supports_check_mode=True,
)
rval = OCServiceAccountSecret.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
| [
"mwoodson@redhat.com"
] | mwoodson@redhat.com |
691a09c696e5d06361215ef05998a05a23437589 | 6d1380a38aeb89df5db2f742ca0665f877a01133 | /extract.py | 294ccc36295e2534490615af52969899c62233dc | [] | no_license | marijnkoolen/constitution-reference-parser | 937ddbfdb56a1cba78093c7568e311ca6790f4f4 | 4083461abb4dd4cc8639625f9305b580eb69ec04 | refs/heads/master | 2021-01-02T09:27:17.951140 | 2015-09-29T12:47:49 | 2015-09-29T12:47:49 | 40,536,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,420 | py | import re
import patterns
from document import ReferenceList, Reference
def extract_refs(document, sentence):
sentenceDone = 0
# returns a dictionary of document specific patterns
pattern = patterns.makeRefPatterns(document.RefUnits())
refList = ReferenceList(sentence, pattern)
while not sentenceDone:
# Start of a reference
matchStart = re.search(refList.pattern['refStart'], refList.sentence)
if matchStart:
extract_start_ref(matchStart, refList, document)
while pattern['refDummy'] in refList.sentence:
extract_sequence_refs(refList, document)
else:
# assumption: there is no reference in this sentence
# action: signal extraction is done
refList.FinishCurrent()
sentenceDone = 1
# check if this is a complex reference sequence
return refList
def extract_start_ref(matchStart, refList, document):
refList.sentence = re.sub(matchStart.group(0), refList.pattern['refDummy'], refList.sentence, 1)
refType, num1, rangeSymbol, num2 = matchStart.groups()
refType = refType.lower()
refNums = makeRange(num1, rangeSymbol, num2)
if refType in document.SkipUnits:
refList.sentence = re.sub(refList.pattern['refDummy'], "", refList.sentence, 1)
return 0
addToRefList(refType, refNums, refList)
refList.UpdatePrev(refType)
return 0
def extract_sequence_refs(refList, document):
refNums = []
refType = None
sep, conj, part, refType, refNums = findSequenceType(refList, document)
if refNums == []:
# assumption: if there is no next pattern, the sequence is done
# action: remove the reference dummy
refList.sentence = re.sub(refList.pattern['refDummy'], "", refList.sentence, 1)
refList.FinishCurrent()
refList.UpdatePrev('')
return 0
elif refType:
refType = refType.lower()
# if found type is too deep in hierarchy, ignore it
# e.g. we don't consider paragraphs and refList.sentences as part of the reference
if refType in document.SkipUnits:
refList.UpdatePrev(refType)
return 0
elif refType == None:
# if previous type is too deep in hierarchy, ignore it
# e.g. we don't consider paragraphs and refList.sentences as part of the reference
if refList.prevUnit in document.SkipUnits:
refNums = []
if sep:
parse_separator_ref(refType, refNums, refList, document)
elif conj:
parse_conjunction_ref(refType, refNums, refList, document)
elif part:
parse_part_of_ref(refType, refNums, refList)
if refType != None:
refList.UpdatePrev(refType)
def findSequenceType(refList, document):
mSepConjNumber = re.search(refList.pattern['refSepConjNumber'], refList.sentence)
mSepConjPartTypeNumber = re.search(refList.pattern['refSepConjPartTypeNumber'], refList.sentence)
sep = None
conj = None
part = None
refType = None
refNums = []
if mSepConjNumber:
refList.sentence = re.sub(mSepConjNumber.group(0), refList.pattern['refDummy'], refList.sentence, 1)
sep, conj, num1, rangeSymbol, num2 = mSepConjNumber.groups()
refNums = makeRange(num1, rangeSymbol, num2)
elif mSepConjPartTypeNumber:
refList.sentence = re.sub(mSepConjPartTypeNumber.group(0), refList.pattern['refDummy'], refList.sentence, 1)
sep, conj, part, refType, num1, rangeSymbol, num2 = mSepConjPartTypeNumber.groups()
refNums = makeRange(num1, rangeSymbol, num2)
return (sep, conj, part, refType, refNums)
def parse_separator_ref(refType, refNums, refList, document):
# 1. ref sep number -> new ref of same type
# assumption: type of new ref is implicit
# action: add refs similar to previous type
if refType == None:
addToRefList(None, refNums, refList)
# 2. ref sep type number -> new ref of same type
# assumption: type of new ref is explicit and of same type
elif refType == refList.prevUnit:
addToRefList(None, refNums, refList)
# 3. ref sep type number -> specification of existing ref
# assumption: hierarchical relations are written from high to low
# action: replace previous reference with hierarchical reference
elif refType in document.ContainedBy and refList.prevUnit in document.ContainedBy[refType]:
prevRef = refList.Last()
refList.RemoveLast()
for refNum in refNums:
reference = Reference()
reference.CopyFrom(prevRef)
reference.AddPart(refType, refNum)
refList.AddCurrent(reference)
# 4. ref sep type number -> new ref of different type
# assumption: previous ref was hierarchical, new ref is higher in hierarchy
# action: add refType as new reference
else:
addToRefList(refType, refNums, refList)
def parse_conjunction_ref(refType, refNums, refList, document):
# ref conj number -> ref
# assumptions:
# 1. no mention of type suggests these are
# references of the same type as the
# previous reference
if refType == None:
addToRefList(None, refNums, refList)
# ref conj type number -> ref
# previous reference has same type and higher
# level type
# assumptions:
# 2. explicit mention of type suggest this is a
# separate reference, but share higher level
# type
elif refType == refList.prevUnit:
prevRef = refList.Last()
for container in document.ContainedBy[refType]:
if container in prevRef.TargetParts:
for refNum in refNums:
reference = Reference()
reference.CopyFrom(prevRef)
reference.AddPart(refType, refNum)
refList.AddCurrent(reference)
break
# ref conj type number -> ref
# assumptions:
# 3. explicit mention of type suggests these are
# separate references
else:
addToRefList(refType, refNums, refList)
def parse_part_of_ref(refType, refNums, refList):
# ref part type number -> ref
# assumptions:
# 1. part of signals end of sequence
# 2. new type is container of all refs in sequence
for refNum in refNums:
for reference in refList.current:
reference.AddPart(refType, refNum)
refList.prevUnit = ''
refList.FinishCurrent()
# remove dummy reference
refList.sentence = re.sub(refList.pattern['refDummy'], "", refList.sentence, 1)
def addToRefList(refType, refNums, refList):
#print "DEBUG: addToRefList"
for refNum in refNums:
reference = Reference()
#print "adding reference of type {0} with number {1}".format(refType, refNum)
if refType == None:
reference.CopyFrom(refList.Last())
refType = refList.prevUnit
reference.AddPart(refType, refNum)
refList.AddCurrent(reference)
def makeRange(num1, rangeSymbol, num2):
if rangeSymbol and num2:
if int(num2) < int(num1):
return [num1]
return [unicode(num) for num in range(int(num1), int(num2)+1)]
return [num1]
| [
"marijn.koolen@gmail.com"
] | marijn.koolen@gmail.com |
804861121ec5dd38d2d654fa3b12e263b371c486 | fa346a2d5886420e22707a7be03599e634b230a9 | /temboo/Library/Amazon/IAM/__init__.py | 59787a0664534645fc9a01dd8d74b838ef9e46c0 | [] | no_license | elihuvillaraus/entity-resolution | cebf937499ed270c3436b1dd25ab4aef687adc11 | 71dd49118a6e11b236861289dcf36436d31f06bc | refs/heads/master | 2021-12-02T17:29:11.864065 | 2014-01-08T04:29:30 | 2014-01-08T04:29:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,056 | py | from ChangePassword import *
from UpdateGroup import *
from CreateAccessKey import *
from ListRolePolicies import *
from UpdateLoginProfile import *
from GetUserPolicy import *
from UpdateServerCertificate import *
from DeleteServerCertificate import *
from DeactivateMFADevice import *
from UpdateAccountPasswordPolicy import *
from ListAccessKeys import *
from DeleteAccessKey import *
from GetUser import *
from DeleteInstanceProfile import *
from RemoveUserFromGroup import *
from DeleteGroup import *
from GetAccountPasswordPolicy import *
from CreateUser import *
from ListInstanceProfilesForRole import *
from ListGroups import *
from ResyncMFADevice import *
from GetAccountSummary import *
from ListMFADevices import *
from CreateGroup import *
from DeleteGroupPolicy import *
from CreateLoginProfile import *
from GetLoginProfile import *
from DeleteRolePolicy import *
from GetRole import *
from GetGroupPolicy import *
from ListUsers import *
from EnableMFADevice import *
from ListVirtualMFADevices import *
from DeleteRole import *
from UpdateAccessKey import *
from ListUserPolicies import *
from UploadSigningCertificate import *
from RemoveRoleFromInstanceProfile import *
from AddUserToGroup import *
from ListServerCertificates import *
from GetServerCertificate import *
from ListInstanceProfiles import *
from CreateInstanceProfile import *
from ListSigningCertificates import *
from AddRoleToInstanceProfile import *
from CreateAccountAlias import *
from ListGroupPolicies import *
from ListRoles import *
from ListGroupsForUser import *
from UpdateSigningCertificate import *
from DeleteAccountAlias import *
from ListAccountAliases import *
from DeleteUser import *
from DeleteAccountPasswordPolicy import *
from DeleteLoginProfile import *
from UploadServerCertificate import *
from GetInstanceProfile import *
from UpdateUser import *
from DeleteUserPolicy import *
from DeleteSigningCertificate import *
from GetRolePolicy import *
from GetGroup import *
from DeleteVirtualMFADevice import *
from CreateVirtualMFADevice import *
| [
"cedric.warny@gmail.com"
] | cedric.warny@gmail.com |
a755aa7a12874059c62a013d32c7aff02bd0ef46 | 94f8127cfb1d4e98b9e249556fc8f15be6e7f92e | /interview/reversed_linkedlist.py | 69f28fb3e1be8c11a7d4a8c7d73010ea15f6c490 | [
"WTFPL"
] | permissive | pranavgarg/algorithms | dfcb987db4064e6930ed84200e983fca396e0263 | 09fbdce0062ca2ffa22de6c595525d09f8a2d086 | refs/heads/master | 2020-06-30T08:08:41.593316 | 2018-07-12T23:56:13 | 2018-07-12T23:56:13 | 74,384,440 | 4 | 1 | null | 2018-07-12T23:56:14 | 2016-11-21T16:44:34 | Python | UTF-8 | Python | false | false | 2,295 | py | from __future__ import print_function
class Node(object):
"""
Node implementation
"""
def __init__(self, payload=None, next_node=None):
self.payload = payload
self.next_node = next_node
def get_payload(self):
return self.payload
def get_next(self):
return self.next_node
def has_next(self):
return bool(self.next_node)
def set_next(self, new_next):
self.next_node = new_next
return self.next_node
def __str__(self):
return self.payload
class LinkedList(object):
"""
Singly-linked list implementation
"""
def __init__(self, head=None):
self.head = head
def get_head(self):
return self.head
def get_tail(self):
current = self.head
if not current or not current.has_next():
return current
while current.has_next():
current = current.get_next()
return current
def insert(self, payload, new_node=None):
if not new_node:
new_node = Node(payload)
new_node.set_next(self.head)
self.head = new_node
return self
def append(self, payload, new_node=None):
if not new_node:
new_node = Node(payload)
self.get_tail().set_next(new_node)
return self
def size(self):
current = self.head
node_count = 0
while current:
node_count += 1
current = current.get_next()
return node_count
def reverse(self):
new_head = self.get_head()
new_list = LinkedList( head=Node( self.head.get_payload()) )
current = self.head.get_next()
while current:
new_list.insert(current)
current = current.get_next()
return new_list
def __repr__(self):
current = self.head
line = ""
while current:
line += "({}) -> ".format(current.__str__())
current = current.get_next()
line += "None"
return line
if __name__ == '__main__':
my_list = LinkedList( Node(payload='D') ).insert('C').insert('B').insert('A').append('E')
print(my_list)
new_list = my_list.reverse()
print(new_list)
| [
"jo.chasinga@gmail.com"
] | jo.chasinga@gmail.com |
476e8f2d422cf9b9348b2be998dbf5b010ef7f87 | 620b58e17d4851e43bd1270cabc8c26f43629a7b | /lib/candy_editor/AssetEditor/EngineAsset/ShaderAsset.py | fd3756f724e956b29e72c34bc709b54269fc049f | [
"MIT"
] | permissive | lihaochen910/Candy | 78b9862cf06748b365b6fb35ac23f0e7a00ab558 | d12cb964768459c22f30c22531d3e1734901e814 | refs/heads/master | 2022-11-25T19:12:34.533828 | 2021-11-07T16:11:07 | 2021-11-07T16:11:07 | 141,284,960 | 1 | 1 | NOASSERTION | 2022-11-22T09:20:08 | 2018-07-17T12:12:02 | Lua | UTF-8 | Python | false | false | 2,551 | py | import os.path
import logging
import subprocess
import shutil
import json
from candy_editor.core import *
from candy_editor.moai.MOAIRuntime import _CANDY
##----------------------------------------------------------------##
class ShaderAssetManager ( AssetManager ):
def getName ( self ):
return 'asset_manager.shader'
def getMetaType ( self ):
return 'script'
def acceptAssetFile ( self, filePath ):
if not os.path.isfile ( filePath ): return False
name, ext = os.path.splitext ( filePath )
if not ext in [ '.shader' ]: return False
return True
def importAsset ( self, node, reload = False ):
node.assetType = 'shader'
node.setObjectFile ( 'def', node.getFilePath () )
return True
# def onRegister ( self ):
#check builtin shaders
# def editAsset (self, node):
# editor = app.getModule ( 'framebuffer_editor' )
# if not editor:
# return alertMessage ( 'Editor not load', 'shader Editor not found!' )
# editor.openAsset ( node )
##----------------------------------------------------------------##
class ShaderAssetCreator ( AssetCreator ):
def getAssetType ( self ):
return 'shader'
def getLabel ( self ):
return 'Shader'
def createAsset ( self, name, contextNode, assetType ):
ext = '.shader'
filename = name + ext
if contextNode.isType ( 'folder' ):
nodepath = contextNode.getChildPath ( filename )
else:
nodepath = contextNode.getSiblingPath ( filename )
fullpath = AssetLibrary.get ().getAbsPath ( nodepath )
_CANDY.createEmptySerialization ( fullpath, 'candy.Shader' )
return nodepath
class ShaderScriptAssetManager ( AssetManager ):
def getName ( self ):
return 'asset_manager.shader_script'
def getMetaType ( self ):
return 'script'
def acceptAssetFile ( self, filePath ):
if not os.path.isfile ( filePath ): return False
name, ext = os.path.splitext ( filePath )
if not ext in [ '.vsh', '.fsh' ]: return False
return True
def importAsset ( self, node, reload = False ):
name, ext = os.path.splitext ( node.getFilePath () )
if ext == '.vsh':
node.assetType = 'vsh'
elif ext == '.fsh':
node.assetType = 'fsh'
node.setObjectFile ( 'src', node.getFilePath () )
return True
##----------------------------------------------------------------##
ShaderAssetManager ().register ()
ShaderAssetCreator ().register ()
ShaderScriptAssetManager ().register ()
AssetLibrary.get ().setAssetIcon ( 'shader', 'shader' )
AssetLibrary.get ().setAssetIcon ( 'vsh', 'text-red' )
AssetLibrary.get ().setAssetIcon ( 'fsh', 'text-yellow' )
| [
"lihaochen910@hotmail.com"
] | lihaochen910@hotmail.com |
706d2dfa25e4b34f7b1c276ec5e138a4f741136f | d140ad879742ecbe23138f9ff80d9fd71e9430f3 | /load_data.py | bbf01decf981664bb3d185521d3339ce88c1cc97 | [] | no_license | benjaminpjl/Data-Challenge | 78a6c6699ae78d81bae6b31e21389d1dc1d0e94b | bc803cc72f7fec5c9f43173a0c1ab5cec04a8824 | refs/heads/master | 2020-06-21T17:55:02.393488 | 2016-11-22T16:48:45 | 2016-11-22T16:48:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,114 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Nov 4 19:16:46 2016
@author: Team BigInJapan
"""
import pandas as pd
import numpy as np
import datetime as dt
from functools import partial
def extract_date(string):
d = dt.datetime.strptime(string, "%Y-%m-%d %H:%M:%S.000")
return(d)
def extract_weekday(date):
return(date.weekday())
def extract_hour(date):
return(date.hour)
def extract_month(date):
return(date.month)
def extract_year(date):
return(date.year)
def hourlymean_past2weeks(date, y):
nday_before = 7
ysel = y.loc[(y.index > date - dt.timedelta(nday_before))]
ysel = ysel.loc[(ysel.index < date)]
h = extract_hour(date)
if (len(ysel.loc[ysel.loc[:,'HOUR'] == h]) < 0):
return(0)
else:
return(ysel.loc[ysel.loc[:,'HOUR'] == h].loc[:,"CSPL_RECEIVED_CALLS"].mean())
def lastvalue(date, y):
nday_before = 7
try:
v = y.loc[date - dt.timedelta(nday_before)]["CSPL_RECEIVED_CALLS"]
try:
w = y.loc[date - 2*dt.timedelta(nday_before)]["CSPL_RECEIVED_CALLS"]
except:
w = v
except KeyError:
v = y.loc[date]["CSPL_RECEIVED_CALLS"] + np.random.normal(scale = 3)
w = v
return((3*v + w)/4)
def load_data(path, ass, nrows = None):
## Loading the data
col_loaded = ['DATE', 'DAY_OFF', 'WEEK_END',
'ASS_ASSIGNMENT','TPER_TEAM', 'CSPL_RECEIVED_CALLS']
if (nrows == None):
data = pd.read_csv(path,
sep = ";",
usecols = col_loaded)
else:
data = pd.read_csv(path,
sep = ";",
usecols = col_loaded,
nrows = nrows)
nrows = len(data.index)
#data = data.set_index('DATE')
print(str(nrows) + " rows imported.")
#data.info()
## Creating features jour et nuit
print("Creating features Jour et Nuit.")
tper_team = data['TPER_TEAM'].values.tolist()
# jour = []
nuit = []
for i in range(nrows):
if(tper_team[i] == "Jours"):
# jour.append(1)
nuit.append(0)
else:
nuit.append(1)
# jour.append(0)
# data['JOUR'] = jour
data['NUIT'] = nuit
## Selecting Data
print("Selecting used Data.")
col_used = ['DATE', 'DAY_OFF', 'WEEK_END',
'ASS_ASSIGNMENT', 'NUIT', 'CSPL_RECEIVED_CALLS']
data = data[col_used]
data.sort_values(["ASS_ASSIGNMENT", "DATE"], inplace = True)
## Selecting one ASS_ASSIGNEMENT (One model for each)
l = len(ass)
preproc_data = []
rcvcall_data = []
features_data = []
norm = []
for i in range(l):
print("Preprocessing n°" + str(i + 1) + "/" + str(l))
assignement = ass[i]
print("Selecting one ASS_ASSIGNEMENT : " + assignement)
preproc_data.append(data.loc[data.loc[:,"ASS_ASSIGNMENT"] == assignement, :])
preproc_data[i].drop(["ASS_ASSIGNMENT"],1, inplace = True)
nrows = len(preproc_data[i].index)
print(str(nrows) + " rows to process.")
print("Grouping by dates...")
rcvcall = preproc_data[i].groupby(["DATE"])['CSPL_RECEIVED_CALLS'].sum()
preproc_data[i] = preproc_data[i].groupby(["DATE"]).mean()
preproc_data[i].loc[:, 'CSPL_RECEIVED_CALLS'] = rcvcall
preproc_data[i].loc[:, 'DATE'] = preproc_data[i].index
nrows = len(preproc_data[i])
print(str(nrows) + " rows to process.")
# print(preproc_data)
## Feature engineering
print("Feature engineering...")
# Paramètres pour les dates
dates = preproc_data[i]['DATE'].apply(extract_date, 1)
#print(dates)
hours = dates.apply(extract_hour, 1)
preproc_data[i].loc[:,"WEEKDAY"] = dates.apply(extract_weekday, 1)
preproc_data[i].loc[:,"HOUR"] = hours
preproc_data[i].loc[:,"MONTH"] = dates.apply(extract_month, 1)
preproc_data[i].loc[:,"MONTH_YEAR"] = preproc_data[i].loc[:,"MONTH"] + dates.apply(extract_year, 1)
preproc_data[i].loc[:,"DATE"] = dates
preproc_data[i].index = dates
#print(used_data.describe())
# Paramètre moyenne et variance de la cible sur chaque jour de la semaine
m = preproc_data[i].groupby(["WEEKDAY", "MONTH_YEAR"])["CSPL_RECEIVED_CALLS"].transform(np.mean)
s = preproc_data[i].groupby(["WEEKDAY", "MONTH_YEAR"])["CSPL_RECEIVED_CALLS"].transform(np.std)
preproc_data[i].loc[:, "WEEKDAY_MEAN"] = m
preproc_data[i].loc[:, "WEEKDAY_STD"] = s
print("Retrieving past data...")
y = pd.DataFrame(rcvcall)
y.loc[:,"HOUR"] = hours
y.index = dates
fun = partial(lastvalue, y = y)
preproc_data[i].loc[:, 'RCV_7DAY'] = dates.apply(fun, 1)
#print(preproc_data)
rcvcall_data.append(preproc_data[i]['CSPL_RECEIVED_CALLS'])
features_data.append(preproc_data[i])
features_data[i].drop(["CSPL_RECEIVED_CALLS", "DATE", "WEEKDAY", "MONTH_YEAR"], 1, inplace = True)
# print(len(preproc_data))
# print(len(rcvcall_data))
# print(len(features_data))
# rcvcall_data[2].plot()
## Normalization
print("Normalizing the data...")
n_call = max(rcvcall_data[i])
# std_call = rcvcall_data[i].std()
n_mean = max(features_data[i].loc[:,"WEEKDAY_MEAN"])
n_std = max(features_data[i].loc[:,"WEEKDAY_STD"])
norm.append(n_call)
# norm.append(std_call)
if n_call!=0:
# rcvcall_data[i] /= std_call
rcvcall_data[i] /= n_call
# features_data[i].loc[:,'RCV_7DAY'] /= std_call
features_data[i].loc[:,'RCV_7DAY'] /= n_call
if n_mean != 0:
features_data[i].loc[:,'WEEKDAY_MEAN'] /= n_mean
if n_std != 0:
features_data[i].loc[:,'WEEKDAY_STD'] /= n_std
return features_data, rcvcall_data, preproc_data, norm
#FOR TESTING"
if __name__ == '__main__':
import os
os.chdir("/home/nicolas/Documents/INF554 - Machine Learning/AXA Data Challenge")
ass = ['CMS', 'Crises', 'Domicile', 'Gestion', 'Gestion - Accueil Telephonique',
'Gestion Assurances', 'Gestion Relation Clienteles', 'Gestion Renault', 'Japon', 'Médical',
'Nuit', 'RENAULT', 'Regulation Medicale', 'SAP', 'Services', 'Tech. Axa', 'Tech. Inter', 'Téléphonie',
'Tech. Total', 'Mécanicien', 'CAT', 'Manager', 'Gestion Clients', 'Gestion DZ', 'RTC', 'Prestataires']
features_data, rcvcall_data, preproc_data, norm = load_data("train_2011_2012_2013.csv",
ass = ass,
nrows = 200000)
| [
"nicolas.rahmouni@polytechnique.edu"
] | nicolas.rahmouni@polytechnique.edu |
9840040315f9fdf4d3c22de338e2ace8d80de7a0 | fad702beb35d587278010e570a923bc84a4dda4a | /code/pyorg/scripts/tests/uni_2nd_speedup.py | 13519cc3f8dc80adcdd125cde94a260a0bee67ba | [
"Apache-2.0"
] | permissive | anmartinezs/pyseg_system | f7769ec3dcaf243895ec1cf13ac6e1da1ab2a92a | 1370bfedae2ad5e6cdd1dc08395eb9e95b4a8596 | refs/heads/master | 2023-02-23T06:23:10.087737 | 2023-01-30T13:24:36 | 2023-01-30T13:24:36 | 227,147,753 | 15 | 4 | NOASSERTION | 2023-02-10T17:18:20 | 2019-12-10T14:58:22 | C | UTF-8 | Python | false | false | 7,837 | py | """
Measures the speed-up for computing univarite 2nd oder models and simulate CSRV instances
"""
################# Package import
import os
import sys
import math
import time
import numpy as np
import multiprocessing as mp
from scipy.optimize import curve_fit
from pyorg.surf.model import ModelCSRV, gen_tlist
from pyorg.surf.utils import disperse_io
from matplotlib import pyplot as plt, rcParams
plt.switch_backend('agg')
###### Global variables
__author__ = 'Antonio Martinez-Sanchez'
########################################################################################
# PARAMETERS
########################################################################################
try:
root_path = sys.argv[1]
except IndexError:
root_path = os.path.split(os.path.abspath(__file__))[0] + '/../../../tests'
out_dir = root_path + '/results'
# Synthetic data generation variables
sdat_surf = root_path + '/../pyorg/surf/test/in/sph_rad_5_surf.vtp'
sdat_tomo_shape = (500, 500, 100)
sdat_n_tomos = 5
sdat_n_sims = None # 20
sdat_n_part_tomo = 600 # 200
# Analysis variables
ana_npr_rg = [1, 2, 4, 8, 16, 24, 32, 36] # [1, 2, 4, 16] # It must start with 1
ana_rad_rg = np.arange(4, 250, 1) # np.arange(4, 180, 3)
ana_shell_thick = None
ana_fmm = False # True
# Plotting settings
rcParams['axes.labelsize'] = 14
rcParams['xtick.labelsize'] = 14
rcParams['ytick.labelsize'] = 14
rcParams['patch.linewidth'] = 2
########################################################################################
# HELPING FUNCTIONS
########################################################################################
def gen_rect_voi_array(shape):
"""
Generates a rectangular array VOI
:param shape: 3-tuple with the length of the three rectangle sides
:return: a binary ndarray object
"""
seg = np.zeros(shape=np.asarray(shape) + 1, dtype=bool)
seg[1:shape[0], 1:shape[1], 1:shape[2]] = True
return seg
def amdahls(x, p):
"""
Computes Amdal's Law speed-up
:param x: is the speedup of the part of the task that benefits from improved system resources
:param p: is the proportion of execution time that the part benefiting from improved resources originally occupied
:return: the computed speed-up
"""
return 1. / (1. - p + p/x)
########################################################################################
# MAIN ROUTINE
########################################################################################
########## Print initial message
print('Test for measuring univariate 2nd order and simulations computation speed-up.')
print('\tAuthor: ' + __author__)
print('\tDate: ' + time.strftime("%c") + '\n')
print('\tSynthetic data generations settings: ')
print('\t\t-Particle surface path: ' + str(sdat_surf))
print('\t\t-Tomogram shape: ' + str(sdat_tomo_shape))
print('\t\t-Number of tomograms: ' + str(sdat_n_tomos))
if sdat_n_sims is None:
print('\t\t-Number of simulations per tomogram are set to the number of processess.')
else:
print('\t\t-Number of simulations per tomogram: ' + str(sdat_n_sims))
print('\t\t-Number of particles per tomogram: ' + str(sdat_n_part_tomo))
print('\tAnalysis settings: ')
print('\t\t-Number of parallel processes to check: ' + str(ana_npr_rg))
print('\t\t-Scale samplings array: ' + str(ana_rad_rg))
if ana_shell_thick is None:
print('\t\t-Functions L is computed.')
else:
print('\t\t-Function O is computed with shell thickness: ' + str(ana_shell_thick))
if ana_fmm:
print('\t\t-Geodesic metric.')
else:
print('\t\t-Euclidean metric.')
print('')
######### Main process
print('Main Routine: ')
print('\t-Initialization...')
voi = gen_rect_voi_array(sdat_tomo_shape)
part = disperse_io.load_poly(sdat_surf)
model_csrv = ModelCSRV()
ltomos_csrv = gen_tlist(sdat_n_tomos, sdat_n_part_tomo, model_csrv, voi, sdat_surf, mode_emb='center',
npr=max(ana_rad_rg))
cu_i = 1. / float(sdat_n_tomos * sdat_n_part_tomo)
cpus = mp.cpu_count()
print('\t\t+CPUs found: ' + str(cpus))
# Loop for the of processors
print('\t-Measurements loops: ')
comp_times = np.zeros(shape=len(ana_npr_rg), dtype=np.float32)
sim_times = np.zeros(shape=len(ana_npr_rg), dtype=np.float32)
for i, npr in enumerate(ana_npr_rg):
print('\t\t+Number of processes: ' + str(npr))
# Computations loop
comp_time, sim_time = 0, 0
for tkey in ltomos_csrv.get_tomo_fname_list():
hold_time = time.time()
hold_tomo = ltomos_csrv.get_tomo_by_key(tkey)
hold_tomo.compute_uni_2nd_order(ana_rad_rg, thick=None, border=True, conv_iter=None, max_iter=None, fmm=ana_fmm,
npr=npr)
comp_time += (time.time() - hold_time)
if sdat_n_sims is None:
hold_n_sims = npr
else:
hold_n_sims = sdat_n_sims
cu_sim_i = 1. / float(sdat_n_tomos * sdat_n_part_tomo * hold_n_sims)
hold_time = time.time()
hold_sim = hold_tomo.simulate_uni_2nd_order(hold_n_sims, model_csrv, part, 'center', ana_rad_rg, thick=None,
border=True, conv_iter=None, max_iter=None, fmm=ana_fmm,
npr=npr)
sim_time += (time.time() - hold_time)
comp_times[i], sim_times[i] = comp_time * cu_i, sim_time * cu_sim_i
print('\t\t\t*Computation time per c.u.: ' + str(comp_times[i]) + ' [secs]')
print('\t\t\t*Computation time per c.u. and null-model simulations time: ' + str(sim_times[i]) + ' [secs]')
print('\tPlotting: ')
# plt.figure()
# plt.xlabel('# processes')
# plt.ylabel('Time/c.u. [s]')
# plt.plot(ana_npr_rg, comp_times, linewidth=2.0, linestyle='-', color='b', label='C')
# plt.plot(ana_npr_rg, sim_times, linewidth=2.0, linestyle='-', color='g', label='C+S')
# plt.tight_layout()
# plt.legend(loc=0)
# if out_dir is not None:
# out_fig_times = out_dir + '/times.png'
# print '\t\t-Storing the time figure in: ' + out_fig_times
# plt.savefig(out_fig_times)
# else:
# plt.show(block=True)
# plt.close()
# Speed up fitting:
processes = np.asarray(ana_npr_rg, dtype=float)
processes_ex = np.logspace(0, np.log2(cpus), num=50, base=2)
sup_comp = comp_times[0] / comp_times
sup_sim = sim_times[0] / sim_times
popt_comp, pcov_comp = curve_fit(amdahls, processes, sup_comp)
popt_sim, pcov_sim = curve_fit(amdahls, processes, sup_sim)
sup_comp_f = amdahls(processes_ex, popt_comp)
sup_sim_f = amdahls(processes_ex, popt_sim)
fig, ax1 = plt.subplots()
ax1.set_xlabel('# processes')
ax1.set_ylabel('Time/c.u. [s]')
# ax1.set_xlim((1, processes_ex.max()))
ax1.plot(ana_npr_rg, comp_times, linewidth=2.0, linestyle='--', color='b', label='C Time')
ax1.plot(ana_npr_rg, sim_times, linewidth=2.0, linestyle='--', color='g', label='C&S Time')
ax2 = ax1.twinx()
ax2.set_ylabel('Speedup')
# plt.plot(processes_ex, processes_ex, linewidth=1.0, linestyle='--', color='k', label='IDEAL')
# plt.plot((16, 16), (0, 16), linewidth=1.0, linestyle='-.', color='k')
# plt.plot((36, 36), (0, 36), linewidth=1.0, linestyle='-.', color='k')
ax2.plot(processes, sup_comp, linewidth=4.0, linestyle='-', marker='*', color='b', label='C Speedup')
# ax2.plot(processes_ex, sup_comp_f, linewidth=2.0, linestyle='-', color='b', label='C Speedup')
ax2.plot(processes, sup_sim, linewidth=4.0, linestyle='-', marker='s', color='g', label='C&S Speedup')
# ax2.plot(processes_ex, sup_sim_f, linewidth=2.0, linestyle='-', color='g', label='C&S Speedup')
# ax2.set_ylim((1, processes_ex.max()))
fig.tight_layout()
# fig.legend(loc=9)
if out_dir is not None:
out_fig_speed = out_dir + '/speed_up_time.png'
print('\t\t-Storing the time figure in: ' + out_fig_speed)
plt.savefig(out_fig_speed)
else:
plt.show(block=True)
plt.close()
print('Terminated. (' + time.strftime("%c") + ')')
| [
"an.martinez.s.sw@gmail.com"
] | an.martinez.s.sw@gmail.com |
110496e18fa67c64c20bfd271e9accc1b77ca647 | 615e9d142587c965d4f593ce68cae1811824026d | /19-functions/javoblar-19-07.py | 3078014c425e95b4785cee83aa845fd53d1e7442 | [] | no_license | XurshidbekDavronov/python-darslar | 0100bb8ea61c355949e81d1d3f3b923befeb80c9 | 4fcf9a3e0c2facdedaed9b53ef806cdc0095fd9d | refs/heads/main | 2023-06-21T03:33:19.509225 | 2021-07-13T13:04:56 | 2021-07-13T13:04:56 | 377,176,205 | 1 | 0 | null | 2021-06-15T13:40:33 | 2021-06-15T13:40:32 | null | UTF-8 | Python | false | false | 510 | py | """
16/12/2020
Dasturlash asoslari
#19-dars: FUNCTIONS (FUNKSIYALAR)
Muallif: Anvar Narzullaev
Web sahifa: https://python.sariq.dev
"""
# Foydalanuvchidan son qabul qilib, sonni 2, 3, 4 va 5 ga qoldiqsiz bo'linishini tekshiruvchi
# funksiya yozing.
# Natijalarni konsolga chiqaring ("15 soni 3 ga qoldiqsiz bo'linadi" ko'rinishida)
def bolinish_alomatlari(son):
for n in range(2,11):
if not son%n:
print(f"{son} {n} ga qoldiqsiz bo'linadi")
bolinish_alomatlari(20)
| [
"anvarbek@gmail.com"
] | anvarbek@gmail.com |
7df9dcc7b35ce702c5fdf33e237c3bb866b1708a | afbaa5685bf737ec7d16fee2bab54ae13caf96f9 | /geekbang/core/ch17/Demo1.py | 98dd62e83056057241e556d48e785f0e1f247874 | [] | no_license | ykdsg/myPython | 9dcc9afe6f595e51b72257875d66ada1ba04bba6 | 77d2eaa2acb172664b632cc2720cef62dff8f235 | refs/heads/master | 2023-06-10T20:11:08.061075 | 2023-06-03T11:39:53 | 2023-06-03T11:39:53 | 10,655,956 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | def func(message):
print('got a message:{}'.format(message))
# 函数赋予变量
send_message = func
send_message('hello world')
def get_message(message):
return 'got a message:' + message
def root_call(func, message):
print(func(message))
| [
"17173as@163.com"
] | 17173as@163.com |
1e4de7da1382a4829bfdea39369b835759395aa8 | 7f36bd66ff8a20ed25dfd1da6eebb3f5aeb8732c | /TAFEICTPRG301ScottMorrisProject1/scope.py | 71797b6efc31bf9615540796e5c71bcaee0a4c58 | [] | no_license | scottmmorris/tafe-web-programming | a58d19942ca2a10e811ad5629d9fc8e266b6ebfe | d87314c2fe56025b36bfdc8ccaaa07e8f5488ea0 | refs/heads/main | 2023-06-25T23:38:36.110034 | 2021-07-30T10:17:41 | 2021-07-30T10:17:41 | 391,021,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | def name_function():
global firstName
firstName = input("Enter your first name: ")
name_function()
print("My name is "+ firstName) | [
"32733642+scottmmorris@users.noreply.github.com"
] | 32733642+scottmmorris@users.noreply.github.com |
54fde227ed8dfee25b2b3b501946d685bfb1ac18 | 23ff67b67ea8ea49bd8c31aaa622d05b38715463 | /Lineas_de_Barrer/lineas_de_barrer.py | 1503541eb3850b3ed65a70d8e75719c825b26448 | [] | no_license | JuanPabloRosas/Analisis-de-Algoritmos | d48d6db744a3322c99c320a562a6f2af56fb476e | 9ba76dec05ff6a596d0aac246bc92e31a73ae545 | refs/heads/master | 2021-07-06T03:26:20.884428 | 2021-05-19T18:43:18 | 2021-05-19T18:43:18 | 81,850,492 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,173 | py | # EQUIPO:
#Jose Anastacio Hernandez Saldaña
#Juan Pablo Rosas Baldazo
#######################################
from heapq import heappush, heappop
import bintrees as b
def lineas_de_barrer(archivo):
heap = []
cont = -1
file = archivo.open(archivo,'r')
sl = 0
sr = 0
puntos = {}
for line in file:
cont += 1
v = line.split(' ')
puntos[cont] = v
if v[0] <= v[2]:
heappush(heap,(v[0],v[1],'C',cont,'-'))
heappush(heap,(v[2],v[3],'F',cont,'-'))
else:
heappush(heap,(v[0],v[1],'F',cont,'-'))
heappush(heap,(v[2],v[3],'C',cont,'-'))
arbol = b.AVLTree()
while len(heap) > 0:
menor = heappop(heap)
if menor[2] =='C':
arbol[menor[1]] = menor
try:
pred = arbol.prev_item(menor[1])
except:
pred = None
if pred:
cont1 = menor[3]
cont2 = pred[3]
ix,iy = interseccion(puntos[cont1][0],puntos[cont1][1],puntos[cont1][2],puntos[cont1][3],puntos[cont2][0],puntos[cont2][1],puntos[cont2][2],puntos[cont2][3])
if ix > menor[0]:
heappush(heap,(ix,iy,'i',pred[3],cont))
try:
suc = arbol.succ_item(menor[1])
except:
suc = None
if suc:
cont1 = menor[3]
cont2 = suc[3]
ix,iy = interseccion(puntos[cont1][0],puntos[cont1][1],puntos[cont1][2],puntos[cont1][3],puntos[cont2][0],puntos[cont2][1],puntos[cont2][2],puntos[cont2][3])
if ix > menor[0]:
heappush(heap,(ix,iy,'i',pred[3],cont))
if menor[2] == 'F':
try:
pred = arbol.prev_item(menor[1])
except:
pred = None
try:
suc = arbol.succ_item(menor[1])
except:
suc = None
arbol.remove(menor)
if pred is None and suc is None:
cont1 = pred[3]
cont2 = suc[3]
ix,iy = interseccion(puntos[cont1][0],puntos[cont1][1],puntos[cont1][2],puntos[cont1][3],puntos[cont2][0],puntos[cont2][1],puntos[cont2][2],puntos[cont2][3])
if ix > menor[0]:
heappush(heap,(ix,iy,'i',pred[3],suc[3]))
if menor[2] == 'i':
i = menor[3]
j = menor[4]
print(menor)
def interseccion(xc1,yc1,xf1,yf1,xc2,yc2,xf2,yf2):
m1 = (yf1-yc1)/(xf1-xc1)
m2 = (yf2-yc2)/(xf2-xc2)
b1 = yc1-(m1*xc1)
b2 = yc2-(m2*xc2)
x = ((m1*x1)+b1-b2)/m2
y = (m2*xc2) + b2
return x,y
| [
"juanpablo.rosasbaldazo@gmail.com"
] | juanpablo.rosasbaldazo@gmail.com |
fd7cdd39e9a8db86129719f700f436d19b4bc19f | 1b36425f798f484eda964b10a5ad72b37b4da916 | /posthog/models/event/event.py | 2e6d0625403431f36a01778187c27ed6f634ddce | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | dorucioclea/posthog | 0408baa2a7ae98e5bea352c516f741ddc17c0a3e | 8848981baf237117fb22d28af0770a0165881423 | refs/heads/master | 2023-01-23T11:01:57.942146 | 2023-01-13T09:03:00 | 2023-01-13T09:03:00 | 241,222,000 | 0 | 0 | MIT | 2020-02-17T22:34:37 | 2020-02-17T22:34:36 | null | UTF-8 | Python | false | false | 5,830 | py | import copy
import datetime
import re
from collections import defaultdict
from typing import Dict, List, Optional, Union
from dateutil.relativedelta import relativedelta
from django.db import models
from django.utils import timezone
from posthog.models.team import Team
SELECTOR_ATTRIBUTE_REGEX = r"([a-zA-Z]*)\[(.*)=[\'|\"](.*)[\'|\"]\]"
LAST_UPDATED_TEAM_ACTION: Dict[int, datetime.datetime] = {}
TEAM_EVENT_ACTION_QUERY_CACHE: Dict[int, Dict[str, tuple]] = defaultdict(dict)
# TEAM_EVENT_ACTION_QUERY_CACHE looks like team_id -> event ex('$pageview') -> query
TEAM_ACTION_QUERY_CACHE: Dict[int, str] = {}
DEFAULT_EARLIEST_TIME_DELTA = relativedelta(weeks=1)
class SelectorPart:
direct_descendant = False
unique_order = 0
def __init__(self, tag: str, direct_descendant: bool, escape_slashes: bool):
self.direct_descendant = direct_descendant
self.data: Dict[str, Union[str, List]] = {}
self.ch_attributes: Dict[str, Union[str, List]] = {} # attributes for CH
result = re.search(SELECTOR_ATTRIBUTE_REGEX, tag)
if result and "[id=" in tag:
self.data["attr_id"] = result[3]
self.ch_attributes["attr_id"] = result[3]
tag = result[1]
if result and "[" in tag:
self.data[f"attributes__attr__{result[2]}"] = result[3]
self.ch_attributes[result[2]] = result[3]
tag = result[1]
if "nth-child(" in tag:
parts = tag.split(":nth-child(")
self.data["nth_child"] = parts[1].replace(")", "")
self.ch_attributes["nth-child"] = self.data["nth_child"]
tag = parts[0]
if "." in tag:
parts = tag.split(".")
# Strip all slashes that are not followed by another slash
self.data["attr_class__contains"] = [self._unescape_class(p) if escape_slashes else p for p in parts[1:]]
tag = parts[0]
if tag:
self.data["tag_name"] = tag
@property
def extra_query(self) -> Dict[str, List[Union[str, List[str]]]]:
where: List[Union[str, List[str]]] = []
params: List[Union[str, List[str]]] = []
for key, value in self.data.items():
if "attr__" in key:
where.append(f"(attributes ->> 'attr__{key.split('attr__')[1]}') = %s")
else:
if "__contains" in key:
where.append(f"{key.replace('__contains', '')} @> %s::varchar(200)[]")
else:
where.append(f"{key} = %s")
params.append(value)
return {"where": where, "params": params}
def _unescape_class(self, class_name):
r"""Separate all double slashes "\\" (replace them with "\") and remove all single slashes between them."""
return "\\".join([p.replace("\\", "") for p in class_name.split("\\\\")])
class Selector:
parts: List[SelectorPart] = []
def __init__(self, selector: str, escape_slashes=True):
self.parts = []
# Sometimes people manually add *, just remove them as they don't do anything
selector = selector.replace("> * > ", "").replace("> *", "").strip()
tags = list(self._split(selector))
tags.reverse()
# Detecting selector parts
for index, tag in enumerate(tags):
if tag == ">" or tag == "":
continue
direct_descendant = index > 0 and tags[index - 1] == ">"
part = SelectorPart(tag, direct_descendant, escape_slashes)
part.unique_order = len([p for p in self.parts if p.data == part.data])
self.parts.append(copy.deepcopy(part))
def _split(self, selector):
in_attribute_selector = False
in_quotes: Optional[str] = None
part: List[str] = []
for char in selector:
if char == "[" and in_quotes is None:
in_attribute_selector = True
if char == "]" and in_quotes is None:
in_attribute_selector = False
if char in "\"'":
if in_quotes is not None:
if in_quotes == char:
in_quotes = None
else:
in_quotes = char
if char == " " and not in_attribute_selector:
yield "".join(part)
part = []
else:
part.append(char)
yield "".join(part)
class Event(models.Model):
class Meta:
indexes = [
models.Index(fields=["elements_hash"]),
models.Index(fields=["timestamp", "team_id", "event"]),
# Separately managed:
# models.Index(fields=["created_at"]),
# NOTE: The below index has been added as a manual migration in
# `posthog/migrations/0024_add_event_distinct_id_index.py, but I'm
# adding this here to improve visibility.
# models.Index(fields=["distinct_id"], name="idx_distinct_id"),
]
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, null=True, blank=True)
team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE)
event: models.CharField = models.CharField(max_length=200, null=True, blank=True)
distinct_id: models.CharField = models.CharField(max_length=200)
properties: models.JSONField = models.JSONField(default=dict)
timestamp: models.DateTimeField = models.DateTimeField(default=timezone.now, blank=True)
elements_hash: models.CharField = models.CharField(max_length=200, null=True, blank=True)
site_url: models.CharField = models.CharField(max_length=200, null=True, blank=True)
# DEPRECATED: elements are stored against element groups now
elements: models.JSONField = models.JSONField(default=list, null=True, blank=True)
| [
"noreply@github.com"
] | noreply@github.com |
b98865a3d5843d01b5c45a64127f469187453a5c | 6ca384633472d8fe303262dca320ee711acc0a6f | /DeepLearning/1. tf_practice/placeholder.py | d5c2a9a03dd773fa48ca366eef3ae88c7475d9ec | [] | no_license | lim3944/DeepLearning | 97e019996adf993e4d2eb6832f2d41334fefdd3e | 8402f26f0283cafcc49491c7b9ed493e52cab4bb | refs/heads/master | 2020-05-29T20:18:52.038121 | 2019-06-04T14:00:39 | 2019-06-04T14:00:39 | 189,349,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | import tensorflow as tf
X = tf.placeholder(tf.float32, [None,3])
print(X)
x_data = [[1,2,3],[4,5,6]]
W = tf.Variable(tf.random_normal([3,2]))
b = tf.Variable(tf.random_normal([2,1]))
expr = tf.matmul(X,W) +b
sess = tf.Session()
sess.run(tf.global_variables_initializer())
print(x_data)
print(sess.run(W))
print(sess.run(b))
print(sess.run(expr,feed_dict={X: x_data}))
sess.close()
| [
"lim3944@gmail.com"
] | lim3944@gmail.com |
22254545f9a1cc0c5bd2eb4c3f056ed34bc7a22d | bcddca991afe606180dbb5ce6c033d8fb611154c | /docs/idf_extensions/include_build_file.py | b11a2128667b50bd2c713b8038e7b3dbc90675fd | [
"Apache-2.0"
] | permissive | EmbeddedSystemClass/esp-idf | 8ac5a312be41936b1e2dc5c68b7b68c9b4c1e488 | 92db6a3dabc1106b72865b8bd91d9bdd54fbdf6c | refs/heads/master | 2022-12-31T19:57:49.052365 | 2020-10-22T19:19:01 | 2020-10-22T19:19:01 | 259,859,439 | 0 | 0 | Apache-2.0 | 2020-04-29T07:47:48 | 2020-04-29T07:47:47 | null | UTF-8 | Python | false | false | 764 | py | import os.path
from docutils.parsers.rst import directives
from docutils.parsers.rst.directives.misc import Include as BaseInclude
from sphinx.util.docutils import SphinxDirective
class IncludeBuildFile(BaseInclude, SphinxDirective):
"""
Like the standard "Include" directive, but relative to the app
build directory
"""
def run(self):
abspath = os.path.join(self.env.config.build_dir, self.arguments[0])
self.arguments[0] = abspath
self.env.note_included(abspath)
return super(IncludeBuildFile, self).run()
def setup(app):
directives.register_directive('include-build-file', IncludeBuildFile)
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
| [
"koson.trachu@gmail.com"
] | koson.trachu@gmail.com |
eeab12b862d7bd0661014cd1be6c5a6eb28aa58a | 571374f5ef4c5e5c460eccdf87225f5e3768023c | /CS271HMK_Assignment7_Duong_3857/q7.py | 89466682eaee2159194c0abc1f9a3aadb1ed1706 | [] | no_license | giangduong96/Master-Machine_Learning-CS-271 | 80b8a3455ebaca0a11df80a7820442b88a8c2647 | 7a16a9e9d179829d2cf94a4a99373c62daab5c54 | refs/heads/main | 2023-01-28T05:53:54.042338 | 2020-12-08T00:39:04 | 2020-12-08T00:39:04 | 319,477,804 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 994 | py | """
Name: Giang Duong
ID : 014533857
# Chapter 6 Question 7:
"""
import math
X1, X2, X3, X4, X5 = (8, 5, 9, 4, 7) #hardcoded data in (6.14)
XList = (X1, X2, X3, X4, X5)
#After the 1st EM-Step, then parameters converged to: (Section 6.5.3)
theta1 = 0.6918
theta2 = 0.5597
t1 = 0.7593
t2 = 0.2407
def nCr(n,r):
fact = math.factorial
return fact(n) // fact(r) // fact(n-r)
def f(xi, theta):
return nCr(10, xi) * math.pow(theta, xi) * math.pow(1-theta, 10-xi)
def getPji(t1, theta1, t2, theta2): #compute all pji
pjiList = [[], []] #j = 1 or 2
for xi in XList: #(6.10)
_a = t1*f(xi, theta1)
_b = t2*f(xi, theta2)
pjiList[0].append(_a / (_a + _b))
pjiList[1].append(_b / (_a + _b))
return pjiList
#get all the pji values for the second E step:
pjiList = getPji(t1, theta1, t2, theta2)
print("pji probabilities for the second E step:")
for i in range(len(XList)):
for j in range(2):
print("p_%d,%d = %.4f" % (j+1, i+1, pjiList[j][i]))
| [
"noreply@github.com"
] | noreply@github.com |
947e92b0bbb03b5bdf5cca5308740ce0b6e02f4d | 7f79ab2e15d05da67ed264ad79f0b46d338dd61f | /Analysis/CrimeDesc_vs_Boroughs_map.py | ff26e505e4a2eecb0a6c695da8c811bf43f8ec82 | [] | no_license | amiteshsah/Big-Data-Project | befd559c1da98044cf4f11bf2183a5b05c9d8810 | c37f77fd2f24db516cd57c7023d2a49913e8da44 | refs/heads/master | 2021-01-23T03:53:41.290757 | 2017-10-31T02:26:05 | 2017-10-31T02:26:05 | 86,132,104 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 771 | py | #!/usr/bin/env python
# map function to find the each type of LAW_CAT_CD commited each year
import sys
import os
import csv
firstline = True
#with open('/Users/tejaswivinod/Big-Data-Project/output_2.csv', 'rb') as csvfile:
for line in csv.reader(sys.stdin):
#for line in csv.reader(csvfile):
#if firstline: #skip first line
# firstline = False
#continue
crime_det=line[7]
boroughs = line[13]
try:
if len(crime_det)!=0 and crime_det != 'NULL' or 'INVALID' or "" and boroughs != 'NULL' or 'INVALID' :
#print '%s\t%s' % ((crime_Type, boroughs), 1)
print crime_det+", "+boroughs+ "\t1"
else:
continue
except:
continue
| [
"prasad@Prasads-MacBook-Air.local"
] | prasad@Prasads-MacBook-Air.local |
93b24835b5b197d6dfa82c2fe29f8f644663b0b2 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/detection/FairMOT/src/lib/datasets/dataset/jde.py | 3e7156e32fca35205bf1eac8143fcd1fc065e94c | [
"GPL-1.0-or-later",
"BSD-3-Clause",
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 23,094 | py | # BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
import glob
import math
import os
import os.path as osp
import random
import time
from collections import OrderedDict
import cv2
import json
import numpy as np
import torch
import copy
from torch.utils.data import Dataset
from torchvision.transforms import transforms as T
from cython_bbox import bbox_overlaps as bbox_ious
from opts import opts
from utils.image import gaussian_radius, draw_umich_gaussian, draw_msra_gaussian
from utils.utils import xyxy2xywh, generate_anchors, xywh2xyxy, encode_delta
class LoadImages: # for inference
def __init__(self, path, img_size=(1088, 608)):
if os.path.isdir(path):
image_format = ['.jpg', '.jpeg', '.png', '.tif']
self.files = sorted(glob.glob('%s/*.*' % path))
self.files = list(filter(lambda x: os.path.splitext(x)[1].lower() in image_format, self.files))
elif os.path.isfile(path):
self.files = [path]
self.nF = len(self.files) # number of image files
self.width = img_size[0]
self.height = img_size[1]
self.count = 0
assert self.nF > 0, 'No images found in ' + path
def __iter__(self):
self.count = -1
return self
def __next__(self):
self.count += 1
if self.count == self.nF:
raise StopIteration
img_path = self.files[self.count]
# Read image
img0 = cv2.imread(img_path) # BGR
assert img0 is not None, 'Failed to load ' + img_path
# Padded resize
img, _, _, _ = letterbox(img0, height=self.height, width=self.width)
# Normalize RGB
img = img[:, :, ::-1].transpose(2, 0, 1)
img = np.ascontiguousarray(img, dtype=np.float32)
img /= 255.0
# cv2.imwrite(img_path + '.letterbox.jpg', 255 * img.transpose((1, 2, 0))[:, :, ::-1]) # save letterbox image
return img_path, img, img0
def __getitem__(self, idx):
idx = idx % self.nF
img_path = self.files[idx]
# Read image
img0 = cv2.imread(img_path) # BGR
assert img0 is not None, 'Failed to load ' + img_path
# Padded resize
img, _, _, _ = letterbox(img0, height=self.height, width=self.width)
# Normalize RGB
img = img[:, :, ::-1].transpose(2, 0, 1)
img = np.ascontiguousarray(img, dtype=np.float32)
img /= 255.0
return img_path, img, img0
def __len__(self):
return self.nF # number of files
class LoadVideo: # for inference
def __init__(self, path, img_size=(1088, 608)):
self.cap = cv2.VideoCapture(path)
self.frame_rate = int(round(self.cap.get(cv2.CAP_PROP_FPS)))
self.vw = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
self.vh = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
self.vn = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))
self.width = img_size[0]
self.height = img_size[1]
self.count = 0
self.w, self.h = 1920, 1080
print('Lenth of the video: {:d} frames'.format(self.vn))
def get_size(self, vw, vh, dw, dh):
wa, ha = float(dw) / vw, float(dh) / vh
a = min(wa, ha)
return int(vw * a), int(vh * a)
def __iter__(self):
self.count = -1
return self
def __next__(self):
self.count += 1
if self.count == len(self):
raise StopIteration
# Read image
res, img0 = self.cap.read() # BGR
assert img0 is not None, 'Failed to load frame {:d}'.format(self.count)
img0 = cv2.resize(img0, (self.w, self.h))
# Padded resize
img, _, _, _ = letterbox(img0, height=self.height, width=self.width)
# Normalize RGB
img = img[:, :, ::-1].transpose(2, 0, 1)
img = np.ascontiguousarray(img, dtype=np.float32)
img /= 255.0
# cv2.imwrite(img_path + '.letterbox.jpg', 255 * img.transpose((1, 2, 0))[:, :, ::-1]) # save letterbox image
return self.count, img, img0
def __len__(self):
return self.vn # number of files
class LoadImagesAndLabels: # for training
def __init__(self, path, img_size=(1088, 608), augment=False, transforms=None):
with open(path, 'r') as file:
self.img_files = file.readlines()
self.img_files = [x.replace('\n', '') for x in self.img_files]
self.img_files = list(filter(lambda x: len(x) > 0, self.img_files))
self.label_files = [x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt')
for x in self.img_files]
self.nF = len(self.img_files) # number of image files
self.width = img_size[0]
self.height = img_size[1]
self.augment = augment
self.transforms = transforms
def __getitem__(self, files_index):
img_path = self.img_files[files_index]
label_path = self.label_files[files_index]
return self.get_data(img_path, label_path)
def get_data(self, img_path, label_path):
height = self.height
width = self.width
img = cv2.imread(img_path) # BGR
if img is None:
raise ValueError('File corrupt {}'.format(img_path))
augment_hsv = True
if self.augment and augment_hsv:
# SV augmentation by 50%
fraction = 0.50
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
S = img_hsv[:, :, 1].astype(np.float32)
V = img_hsv[:, :, 2].astype(np.float32)
a = (random.random() * 2 - 1) * fraction + 1
S *= a
if a > 1:
np.clip(S, a_min=0, a_max=255, out=S)
a = (random.random() * 2 - 1) * fraction + 1
V *= a
if a > 1:
np.clip(V, a_min=0, a_max=255, out=V)
img_hsv[:, :, 1] = S.astype(np.uint8)
img_hsv[:, :, 2] = V.astype(np.uint8)
cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img)
h, w, _ = img.shape
img, ratio, padw, padh = letterbox(img, height=height, width=width)
# Load labels
if os.path.isfile(label_path):
labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6)
# Normalized xywh to pixel xyxy format
labels = labels0.copy()
labels[:, 2] = ratio * w * (labels0[:, 2] - labels0[:, 4] / 2) + padw
labels[:, 3] = ratio * h * (labels0[:, 3] - labels0[:, 5] / 2) + padh
labels[:, 4] = ratio * w * (labels0[:, 2] + labels0[:, 4] / 2) + padw
labels[:, 5] = ratio * h * (labels0[:, 3] + labels0[:, 5] / 2) + padh
else:
labels = np.array([])
# Augment image and labels
if self.augment:
img, labels, M = random_affine(img, labels, degrees=(-5, 5), translate=(0.10, 0.10), scale=(0.50, 1.20))
plotFlag = False
if plotFlag:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.figure(figsize=(50, 50))
plt.imshow(img[:, :, ::-1])
plt.plot(labels[:, [1, 3, 3, 1, 1]].T, labels[:, [2, 2, 4, 4, 2]].T, '.-')
plt.axis('off')
plt.savefig('test.jpg')
time.sleep(10)
nL = len(labels)
if nL > 0:
# convert xyxy to xywh
labels[:, 2:6] = xyxy2xywh(labels[:, 2:6].copy()) # / height
labels[:, 2] /= width
labels[:, 3] /= height
labels[:, 4] /= width
labels[:, 5] /= height
if self.augment:
# random left-right flip
lr_flip = True
if lr_flip & (random.random() > 0.5):
img = np.fliplr(img)
if nL > 0:
labels[:, 2] = 1 - labels[:, 2]
img = np.ascontiguousarray(img[:, :, ::-1]) # BGR to RGB
if self.transforms is not None:
img = self.transforms(img)
return img, labels, img_path, (h, w)
def __len__(self):
return self.nF # number of batches
def letterbox(img, height=608, width=1088,
color=(127.5, 127.5, 127.5)): # resize a rectangular image to a padded rectangular
shape = img.shape[:2] # shape = [height, width]
ratio = min(float(height) / shape[0], float(width) / shape[1])
new_shape = (round(shape[1] * ratio), round(shape[0] * ratio)) # new_shape = [width, height]
dw = (width - new_shape[0]) / 2 # width padding
dh = (height - new_shape[1]) / 2 # height padding
top, bottom = round(dh - 0.1), round(dh + 0.1)
left, right = round(dw - 0.1), round(dw + 0.1)
img = cv2.resize(img, new_shape, interpolation=cv2.INTER_AREA) # resized, no border
img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # padded rectangular
return img, ratio, dw, dh
def random_affine(img, targets=None, degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-2, 2),
borderValue=(127.5, 127.5, 127.5)):
# torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-10, 10))
# https://medium.com/uruvideo/dataset-augmentation-with-random-homographies-a8f4b44830d4
border = 0 # width of added border (optional)
height = img.shape[0]
width = img.shape[1]
# Rotation and Scale
R = np.eye(3)
a = random.random() * (degrees[1] - degrees[0]) + degrees[0]
# a += random.choice([-180, -90, 0, 90]) # 90deg rotations added to small rotations
s = random.random() * (scale[1] - scale[0]) + scale[0]
R[:2] = cv2.getRotationMatrix2D(angle=a, center=(img.shape[1] / 2, img.shape[0] / 2), scale=s)
# Translation
T = np.eye(3)
T[0, 2] = (random.random() * 2 - 1) * translate[0] * img.shape[0] + border # x translation (pixels)
T[1, 2] = (random.random() * 2 - 1) * translate[1] * img.shape[1] + border # y translation (pixels)
# Shear
S = np.eye(3)
S[0, 1] = math.tan((random.random() * (shear[1] - shear[0]) + shear[0]) * math.pi / 180) # x shear (deg)
S[1, 0] = math.tan((random.random() * (shear[1] - shear[0]) + shear[0]) * math.pi / 180) # y shear (deg)
M = S @ T @ R # Combined rotation matrix. ORDER IS IMPORTANT HERE!!
imw = cv2.warpPerspective(img, M, dsize=(width, height), flags=cv2.INTER_LINEAR,
borderValue=borderValue) # BGR order borderValue
# Return warped points also
if targets is not None:
if len(targets) > 0:
n = targets.shape[0]
points = targets[:, 2:6].copy()
area0 = (points[:, 2] - points[:, 0]) * (points[:, 3] - points[:, 1])
# warp points
xy = np.ones((n * 4, 3))
xy[:, :2] = points[:, [0, 1, 2, 3, 0, 3, 2, 1]].reshape(n * 4, 2) # x1y1, x2y2, x1y2, x2y1
xy = (xy @ M.T)[:, :2].reshape(n, 8)
# create new boxes
x = xy[:, [0, 2, 4, 6]]
y = xy[:, [1, 3, 5, 7]]
xy = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T
# apply angle-based reduction
radians = a * math.pi / 180
reduction = max(abs(math.sin(radians)), abs(math.cos(radians))) ** 0.5
x = (xy[:, 2] + xy[:, 0]) / 2
y = (xy[:, 3] + xy[:, 1]) / 2
w = (xy[:, 2] - xy[:, 0]) * reduction
h = (xy[:, 3] - xy[:, 1]) * reduction
xy = np.concatenate((x - w / 2, y - h / 2, x + w / 2, y + h / 2)).reshape(4, n).T
# reject warped points outside of image
#np.clip(xy[:, 0], 0, width, out=xy[:, 0])
#np.clip(xy[:, 2], 0, width, out=xy[:, 2])
#np.clip(xy[:, 1], 0, height, out=xy[:, 1])
#np.clip(xy[:, 3], 0, height, out=xy[:, 3])
w = xy[:, 2] - xy[:, 0]
h = xy[:, 3] - xy[:, 1]
area = w * h
ar = np.maximum(w / (h + 1e-16), h / (w + 1e-16))
i = (w > 4) & (h > 4) & (area / (area0 + 1e-16) > 0.1) & (ar < 10)
targets = targets[i]
targets[:, 2:6] = xy[i]
targets = targets[targets[:, 2] < width]
targets = targets[targets[:, 4] > 0]
targets = targets[targets[:, 3] < height]
targets = targets[targets[:, 5] > 0]
return imw, targets, M
else:
return imw
def collate_fn(batch):
imgs, labels, paths, sizes = zip(*batch)
batch_size = len(labels)
imgs = torch.stack(imgs, 0)
max_box_len = max([l.shape[0] for l in labels])
labels = [torch.from_numpy(l) for l in labels]
filled_labels = torch.zeros(batch_size, max_box_len, 6)
labels_len = torch.zeros(batch_size)
for i in range(batch_size):
isize = labels[i].shape[0]
if len(labels[i]) > 0:
filled_labels[i, :isize, :] = labels[i]
labels_len[i] = isize
return imgs, filled_labels, paths, sizes, labels_len.unsqueeze(1)
class JointDataset(LoadImagesAndLabels): # for training
default_resolution = [1088, 608]
mean = None
std = None
num_classes = 1
def __init__(self, opt, root, paths, img_size=(1088, 608), augment=False, transforms=None):
self.opt = opt
dataset_names = paths.keys()
self.img_files = OrderedDict()
self.label_files = OrderedDict()
self.tid_num = OrderedDict()
self.tid_start_index = OrderedDict()
self.num_classes = 1
for ds, path in paths.items():
with open(path, 'r') as file:
self.img_files[ds] = file.readlines()
self.img_files[ds] = [osp.join(root, x.strip()) for x in self.img_files[ds]]
self.img_files[ds] = list(filter(lambda x: len(x) > 0, self.img_files[ds]))
self.label_files[ds] = [
x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt')
for x in self.img_files[ds]]
for ds, label_paths in self.label_files.items():
max_index = -1
for lp in label_paths:
lb = np.loadtxt(lp)
if len(lb) < 1:
continue
if len(lb.shape) < 2:
img_max = lb[1]
else:
img_max = np.max(lb[:, 1])
if img_max > max_index:
max_index = img_max
self.tid_num[ds] = max_index + 1
last_index = 0
for i, (k, v) in enumerate(self.tid_num.items()):
self.tid_start_index[k] = last_index
last_index += v
self.nID = int(last_index + 1)
self.nds = [len(x) for x in self.img_files.values()]
self.cds = [sum(self.nds[:i]) for i in range(len(self.nds))]
self.nF = sum(self.nds)
self.width = img_size[0]
self.height = img_size[1]
self.max_objs = opt.K
self.augment = augment
self.transforms = transforms
print('=' * 80)
print('dataset summary')
print(self.tid_num)
print('total # identities:', self.nID)
print('start index')
print(self.tid_start_index)
print('=' * 80)
def __getitem__(self, files_index):
for i, c in enumerate(self.cds):
if files_index >= c:
ds = list(self.label_files.keys())[i]
start_index = c
img_path = self.img_files[ds][files_index - start_index]
label_path = self.label_files[ds][files_index - start_index]
imgs, labels, img_path, (input_h, input_w) = self.get_data(img_path, label_path)
for i, _ in enumerate(labels):
if labels[i, 1] > -1:
labels[i, 1] += self.tid_start_index[ds]
output_h = imgs.shape[1] // self.opt.down_ratio
output_w = imgs.shape[2] // self.opt.down_ratio
num_classes = self.num_classes
num_objs = labels.shape[0]
hm = np.zeros((num_classes, output_h, output_w), dtype=np.float32)
if self.opt.ltrb:
wh = np.zeros((self.max_objs, 4), dtype=np.float32)
else:
wh = np.zeros((self.max_objs, 2), dtype=np.float32)
reg = np.zeros((self.max_objs, 2), dtype=np.float32)
ind = np.zeros((self.max_objs, ), dtype=np.int64)
reg_mask = np.zeros((self.max_objs, ), dtype=np.uint8)
ids = np.zeros((self.max_objs, ), dtype=np.int64)
bbox_xys = np.zeros((self.max_objs, 4), dtype=np.float32)
draw_gaussian = draw_msra_gaussian if self.opt.mse_loss else draw_umich_gaussian
for k in range(num_objs):
label = labels[k]
bbox = label[2:]
cls_id = int(label[0])
bbox[[0, 2]] = bbox[[0, 2]] * output_w
bbox[[1, 3]] = bbox[[1, 3]] * output_h
bbox_amodal = copy.deepcopy(bbox)
bbox_amodal[0] = bbox_amodal[0] - bbox_amodal[2] / 2.
bbox_amodal[1] = bbox_amodal[1] - bbox_amodal[3] / 2.
bbox_amodal[2] = bbox_amodal[0] + bbox_amodal[2]
bbox_amodal[3] = bbox_amodal[1] + bbox_amodal[3]
bbox[0] = np.clip(bbox[0], 0, output_w - 1)
bbox[1] = np.clip(bbox[1], 0, output_h - 1)
h = bbox[3]
w = bbox[2]
bbox_xy = copy.deepcopy(bbox)
bbox_xy[0] = bbox_xy[0] - bbox_xy[2] / 2
bbox_xy[1] = bbox_xy[1] - bbox_xy[3] / 2
bbox_xy[2] = bbox_xy[0] + bbox_xy[2]
bbox_xy[3] = bbox_xy[1] + bbox_xy[3]
if h > 0 and w > 0:
radius = gaussian_radius((math.ceil(h), math.ceil(w)))
radius = max(0, int(radius))
radius = 6 if self.opt.mse_loss else radius
#radius = max(1, int(radius)) if self.opt.mse_loss else radius
ct = np.array(
[bbox[0], bbox[1]], dtype=np.float32)
ct_int = ct.astype(np.int32)
draw_gaussian(hm[cls_id], ct_int, radius)
if self.opt.ltrb:
wh[k] = ct[0] - bbox_amodal[0], ct[1] - bbox_amodal[1], \
bbox_amodal[2] - ct[0], bbox_amodal[3] - ct[1]
else:
wh[k] = 1. * w, 1. * h
ind[k] = ct_int[1] * output_w + ct_int[0]
reg[k] = ct - ct_int
reg_mask[k] = 1
ids[k] = label[1]
bbox_xys[k] = bbox_xy
ret = {'input': imgs, 'hm': hm, 'reg_mask': reg_mask, 'ind': ind, 'wh': wh, 'reg': reg, 'ids': ids, 'bbox': bbox_xys}
return ret
class DetDataset(LoadImagesAndLabels): # for training
def __init__(self, root, paths, img_size=(1088, 608), augment=False, transforms=None):
dataset_names = paths.keys()
self.img_files = OrderedDict()
self.label_files = OrderedDict()
self.tid_num = OrderedDict()
self.tid_start_index = OrderedDict()
for ds, path in paths.items():
with open(path, 'r') as file:
self.img_files[ds] = file.readlines()
self.img_files[ds] = [osp.join(root, x.strip()) for x in self.img_files[ds]]
self.img_files[ds] = list(filter(lambda x: len(x) > 0, self.img_files[ds]))
self.label_files[ds] = [
x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt')
for x in self.img_files[ds]]
for ds, label_paths in self.label_files.items():
max_index = -1
for lp in label_paths:
lb = np.loadtxt(lp)
if len(lb) < 1:
continue
if len(lb.shape) < 2:
img_max = lb[1]
else:
img_max = np.max(lb[:, 1])
if img_max > max_index:
max_index = img_max
self.tid_num[ds] = max_index + 1
last_index = 0
for i, (k, v) in enumerate(self.tid_num.items()):
self.tid_start_index[k] = last_index
last_index += v
self.nID = int(last_index + 1)
self.nds = [len(x) for x in self.img_files.values()]
self.cds = [sum(self.nds[:i]) for i in range(len(self.nds))]
self.nF = sum(self.nds)
self.width = img_size[0]
self.height = img_size[1]
self.augment = augment
self.transforms = transforms
print('=' * 80)
print('dataset summary')
print(self.tid_num)
print('total # identities:', self.nID)
print('start index')
print(self.tid_start_index)
print('=' * 80)
def __getitem__(self, files_index):
for i, c in enumerate(self.cds):
if files_index >= c:
ds = list(self.label_files.keys())[i]
start_index = c
img_path = self.img_files[ds][files_index - start_index]
label_path = self.label_files[ds][files_index - start_index]
if os.path.isfile(label_path):
labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6)
imgs, labels, img_path, (h, w) = self.get_data(img_path, label_path)
for i, _ in enumerate(labels):
if labels[i, 1] > -1:
labels[i, 1] += self.tid_start_index[ds]
return imgs, labels0, img_path, (h, w)
| [
"wangjiangben@huawei.com"
] | wangjiangben@huawei.com |
920a987d618391c105ce6e38bd90c99b02c18e9a | 29430dba82162e97038db14934ed4e5411a46a8b | /mygui.py | 642b24bda64b6cb11d12a4306182976c928e9555 | [] | no_license | PavelGnedin/Pocker | 8795d6c9aab1a90086c544b0f34a23fa54239921 | 53913228320d94606456b24ef1af5fd298a8331a | refs/heads/master | 2020-12-25T05:44:52.204354 | 2016-06-06T19:17:59 | 2016-06-06T19:17:59 | 60,551,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137,603 | py | # -*- coding:utf-8 -*-
import tkinter
from tkinter import *
import math
from CheckHOLEcards import CheckHoleCards
from CheckHOLEcards import CheckHoleCardsForBot
from CheckOuts import CheckOuts
from Game import Game
from checkCombOnHand import CheckComb
class Diler:
def __init__(self):
self.flagOfblind=0
self.flagofDiler=0
self.flagOfCall=False
self.mystavka=0
self.flagOfBet=False
self.flagGetWinner=0
self.flagRound=0
self.botpower=0
self.flagOfReStavka=0
self.flagofbet = 0
self.stavka = 0
self.amount_players = 10
self.bankStack = 0
self.myPlayerstack = 2000
self.player1Stack = 2000
self.player2Stack = 2000
self.player3Stack = 2000
self.player4Stack = 2000
self.player5Stack = 2000
self.player6Stack = 2000
self.player7Stack = 2000
self.player8Stack = 2000
self.player9Stack = 2000
self.flagstep = 0
if (self.amount_players == 10):
self.diler = 0
self.smallBlind = self.diler + 1
self.BigBlind = self.diler + 2
self.utg = self.diler + 3
self.utg1 = self.diler + 4
self.utg2 = self.diler + 5
self.utg3 = self.diler + 6
self.utg4 = self.diler + 7
self.cutOff = self.amount_players - 1
self.hiJack = self.amount_players - 2
elif (self.amount_players == 9):
self.diler = 0
self.smallBlind = self.diler + 1
self.BigBlind = self.diler + 2
self.utg = self.diler + 3
self.utg1 = self.diler + 4
self.utg2 = self.diler + 5
self.utg3 = self.diler + 6
self.cutOff = self.amount_players - 1
self.hiJack = self.amount_players - 2
elif (self.amount_players == 8):
self.diler = 0
self.smallBlind = self.diler + 1
self.BigBlind = self.diler + 2
self.utg = self.diler + 3
self.utg1 = self.diler + 4
self.utg2 = self.diler + 5
self.cutOff = self.amount_players - 1
self.hiJack = self.amount_players - 2
elif (self.amount_players == 7):
self.diler = 0
self.smallBlind = self.diler + 1
self.BigBlind = self.diler + 2
self.utg = self.diler + 3
self.utg1 = self.diler + 4
self.cutOff = self.amount_players - 1
self.hiJack = self.amount_players - 2
elif (self.amount_players == 6):
self.diler = 0
self.smallBlind = self.diler + 1
self.BigBlind = self.diler + 2
self.utg = self.diler + 3
self.cutOff = self.amount_players - 1
self.hiJack = self.amount_players - 2
elif (self.amount_players == 5):
self.diler = 0
self.smallBlind = self.diler + 1
self.BigBlind = self.diler + 2
self.cutOff = self.amount_players - 1
self.hiJack = self.amount_players - 2
elif (self.amount_players == 4):
self.diler = 0
self.smallBlind = self.diler + 1
self.BigBlind = self.diler + 2
self.cutOff = self.amount_players - 1
elif (self.amount_players == 3):
self.diler = 0
self.smallBlind = self.diler + 1
self.BigBlind = self.diler + 2
elif (self.amount_players == 2):
self.diler = 0
self.smallBlind = self.diler + 1
def potOdds(self, playermoney):
if self.bankStack == 0:
return -1
else:
return math.ceil(playermoney /(playermoney +self.bankStack) * 100)
def playerOdds(self, amountOfouts):
if amountOfouts > 1 and amountOfouts <= 3:
return amountOfouts * 2
elif amountOfouts > 3 and amountOfouts <= 11:
return amountOfouts * 2 + 1
else:
return amountOfouts * 2 + 2
def update_position(self):
if (self.amount_players == 2):
if (self.diler == 0):
self.diler = 1
self.smallBlind = 0
else:
self.diler = 0
self.smallBlind = 1
elif (self.amount_players == 3):
if (self.diler == 0):
self.BigBlind = self.diler
self.diler = self.diler + 1
self.smallBlind = self.smallBlind + 1
elif (self.diler == 1):
self.smallBlind = self.BigBlind
self.BigBlind = self.diler
self.diler = self.diler + 1
else:
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.BigBlind + 1
elif (self.amount_players == 4):
if (self.diler == 0):
self.cutOff = self.diler
self.diler = self.diler + 1
self.smallBlind = self.smallBlind + 1
self.BigBlind = self.BigBlind + 1
elif (self.diler == 1):
self.smallBlind = self.BigBlind
self.BigBlind = self.cutOff
self.cutOff = self.diler
self.diler = self.diler + 1
elif (self.diler == 2):
self.smallBlind = self.BigBlind
self.BigBlind = self.cutOff
self.cutOff = self.diler
self.diler = self.diler + 1
else:
self.smallBlind = self.BigBlind
self.BigBlind = self.cutOff
self.cutOff = self.diler
self.diler = 0
elif (self.amount_players == 5):
if (self.diler == 0):
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.diler + 1
self.smallBlind = self.smallBlind + 1
self.BigBlind = self.BigBlind + 1
elif (self.diler == 1):
self.smallBlind = self.BigBlind
self.BigBlind = self.hiJack
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.diler + 1
elif (self.diler == 2):
self.smallBlind = self.BigBlind
self.BigBlind = self.hiJack
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.diler + 1
elif (self.diler == 3):
self.smallBlind = self.BigBlind
self.BigBlind = self.hiJack
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.diler + 1
else:
self.smallBlind = self.BigBlind
self.BigBlind = self.hiJack
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = 0
elif (self.amount_players == 6):
if (self.diler == self.amount_players - 1):
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = 0
elif (self.smallBlind == self.amount_players - 1):
self.BigBlind = self.utg
self.utg = self.utg + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = 0
elif (self.BigBlind == self.amount_players - 1):
self.utg = self.utg + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = 0
elif (self.utg == self.amount_players - 1):
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = 0
elif (self.hiJack == self.amount_players - 1):
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.hiJack = 0
elif (self.cutOff == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.cutOff = 0
else:
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
elif (self.amount_players == 7):
if (self.diler == self.amount_players - 1):
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = 0
elif (self.smallBlind == self.amount_players - 1):
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = 0
elif (self.BigBlind == self.amount_players - 1):
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = 0
elif (self.utg == self.amount_players - 1):
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = 0
self.utg1 = self.utg1 + 1
elif (self.hiJack == self.amount_players - 1):
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.hiJack = 0
elif (self.cutOff == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.cutOff = 0
elif (self.utg1 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = 0
self.cutOff = self.cutOff + 1
else:
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
elif (self.amount_players == 8):
if (self.diler == self.amount_players - 1):
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = 0
elif (self.smallBlind == self.amount_players - 1):
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = 0
elif (self.BigBlind == self.amount_players - 1):
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = 0
elif (self.utg == self.amount_players - 1):
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = 0
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
elif (self.hiJack == self.amount_players - 1):
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.hiJack = 0
elif (self.cutOff == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.cutOff = 0
elif (self.utg1 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg2 = self.utg2 + 1
self.utg1 = 0
self.cutOff = self.cutOff + 1
elif (self.utg2 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = 0
self.cutOff = self.cutOff + 1
else:
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
elif (self.amount_players == 9):
if (self.diler == self.amount_players - 1):
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = 0
elif (self.smallBlind == self.amount_players - 1):
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = 0
elif (self.BigBlind == self.amount_players - 1):
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = 0
elif (self.utg == self.amount_players - 1):
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = 0
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
elif (self.hiJack == self.amount_players - 1):
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.hiJack = 0
elif (self.cutOff == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.cutOff = 0
elif (self.utg1 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.utg1 = 0
self.cutOff = self.cutOff + 1
elif (self.utg2 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg3 = self.utg3 + 1
self.utg2 = 0
self.cutOff = self.cutOff + 1
elif (self.utg3 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = 0
self.cutOff = self.cutOff + 1
else:
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
elif (self.amount_players == 10):
if (self.diler == self.amount_players - 1):
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.utg4 = self.utg4 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = 0
elif (self.smallBlind == self.amount_players - 1):
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.utg4 = self.utg4 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = 0
elif (self.BigBlind == self.amount_players - 1):
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.utg4 = self.utg4 + 1
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = 0
elif (self.utg == self.amount_players - 1):
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = 0
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.utg4 = self.utg4 + 1
elif (self.hiJack == self.amount_players - 1):
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.utg4 = self.utg4 + 1
self.hiJack = 0
elif (self.cutOff == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.utg4 = self.utg4 + 1
self.cutOff = 0
elif (self.utg1 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.utg4 = self.utg4 + 1
self.utg1 = 0
self.cutOff = self.cutOff + 1
elif (self.utg2 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg3 = self.utg3 + 1
self.utg4 = self.utg4 + 1
self.utg2 = 0
self.cutOff = self.cutOff + 1
elif (self.utg3 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg4 = self.utg4 + 1
self.utg3 = 0
self.cutOff = self.cutOff + 1
elif (self.utg4 == self.amount_players - 1):
self.hiJack = self.cutOff
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
self.utg1 = self.utg1 + 1
self.utg2 = self.utg2 + 1
self.utg3 = self.utg3 + 1
self.utg4 = 0
self.cutOff = self.cutOff + 1
else:
self.hiJack = self.cutOff
self.cutOff = self.diler
self.diler = self.smallBlind
self.smallBlind = self.BigBlind
self.BigBlind = self.utg
self.utg = self.utg + 1
def show_position(self):
if (self.diler == 0):
return "diler"
elif (self.smallBlind == 0):
return "smallBlind"
elif (self.BigBlind == 0):
return "BigBlind"
elif (self.utg == 0):
return "utg"
elif (self.utg1 == 0):
return "utg1"
elif (self.utg2 == 0):
return "utg2"
elif (self.utg3 == 0):
return "utg3"
elif (self.utg4 == 0):
return "utg4"
elif (self.hiJack == 0):
return "hijack"
elif (self.cutOff == 0):
return "cutOff"
def show_diler(self):
return self.diler
class mGUI:
def metod(self):
self.diler = Diler()
self.root = Toplevel() # окно
self.root.title("POKER")
def checkcomb(self, comb):
# t=self.comb.check_para()
# if t==0:
# self.comblpara=Label(self.master,text="НЕТ ПАРЫ! ",bg="red")
# self.comblpara.place(relx=0.75, rely=0.70, anchor=CENTER)
# else:
# self.comblpara=Label(self.master,text="ПАРА! "+str(t),bg="red")
# self.comblpara.place(relx=0.75, rely=0.70, anchor=CENTER)
#
# t=self.comb.check_2para()
# if t==[]:
# self.combl2para=Label(self.master,text="НЕТ 2ПАРЫ! ",bg="red")
# self.combl2para.place(relx=0.75, rely=0.75, anchor=CENTER)
# else:
# print("2para : "+str(t[0])+" "+str(t[1]))
# self.combl2para=Label(self.master,text="2ПАРЫ! "+str(t[0])+" "+str(t[1]),bg="red")
# self.combl2para.place(relx=0.75, rely=0.75, anchor=CENTER)
t = self.comb.mycombination(self.comb)
# if t==0:
self.comblset = Label(self.separator1, text="Current Combination: " + str(t),bg="sea green",fg='#4B0082')
#self.comblset.place(relx=0.85, rely=0.80, anchor=CENTER)
self.comblset.place(relx=0.5, rely=0.4, anchor=CENTER)
# else:
# self.comblset=Label(self.master,text="Cэт! "+str(t),bg="red")
# self.comblset.place(relx=0.75, rely=0.80, anchor=CENTER)
def fall(self):
if (self.diler.flagofbet == 1):
self.UpdateLabel()
self.diler.flagofbet = 0
self.diler.mystavka=0
else:
popupmsg1("Wait Your Step !")
def check(self):
if self.diler.stavka==0:
self.diler.flagofbet = 0
self.diler.mystavka=0
else:
popupmsg1("You Can not check!")
def call(self):
if (self.diler.flagofbet == 1):
if self.diler.myPlayerstack>=self.diler.stavka:
if self.diler.stavka==0:
self.diler.stavka=5
self.diler.bankStack += self.diler.stavka
self.BANKbet.destroy()
self.diler.myPlayerstack -= self.diler.stavka
self.playerbet = Label(self.root,text="YOUR STACK : " + str(self.diler.myPlayerstack), border=3, font=("Times",10,"bold"),bg="green4", fg="#4B0082")
self.playerbet.place(relx=0.057, rely=0.785, anchor=CENTER)
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.diler.flagofbet = 0
self.diler.mystavka=self.diler.stavka
else:
popupmsg1("You Have No Money!")
else:
popupmsg1("Wait Your Step !")
def botcall(self, botstack, bot):
if self.game.tableCards==[]:
cl=CheckHoleCardsForBot(bot.card1,bot.card2,'diler',9,'act_call')
mystr=cl.check_power()
print (mystr)
if mystr=="Fall !":
bot.active=False
if (bot == self.game.players[1]):
print("OFFFFFF")
self.player1s.destroy()
self.player1s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player1s.place(relx=0.23, rely=0.03, anchor=CENTER)
elif (bot == self.game.players[2]):
self.player2s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player2s.place(relx=0.33, rely=0.03, anchor=CENTER)
elif (bot == self.game.players[3]):
self.player3s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player3s.place(relx=0.43, rely=0.08, anchor=CENTER)
elif (bot == self.game.players[4]):
self.player4s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player4s.place(relx=0.43, rely=0.16, anchor=CENTER)
elif (bot == self.game.players[5]):
self.player5s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player5s.place(relx=0.43, rely=0.24, anchor=CENTER)
elif (bot == self.game.players[6]):
self.player6s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player6s.place(relx=0.33, rely=0.28, anchor=CENTER)
elif (bot == self.game.players[7]):
self.player7s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player7s.place(relx=0.23, rely=0.28, anchor=CENTER)
elif (bot == self.game.players[8]):
self.player8s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player8s.place(relx=0.093, rely=0.24, anchor=CENTER)
elif (bot == self.game.players[9]):
self.player9s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player9s.place(relx=0.083, rely=0.09, anchor=CENTER)
elif mystr=="Raise !":
raise_stavka=self.diler.stavka+50
if bot == self.game.players[1]:
self.diler.bankStack+=raise_stavka
self.diler.player1Stack -= raise_stavka
self.diler.botpower=0
self.diler.stavka=raise_stavka
elif bot == self.game.players[2]:
self.diler.bankStack+=raise_stavka
self.diler.player2Stack -= raise_stavka
self.diler.botpower=0
self.diler.stavka=raise_stavka
elif bot == self.game.players[3]:
self.diler.bankStack+=raise_stavka
self.diler.player3Stack -= raise_stavka
self.diler.botpower=0
self.diler.stavka=raise_stavka
elif bot == self.game.players[4]:
self.diler.bankStack+=raise_stavka
self.diler.player4Stack -= raise_stavka
self.diler.botpower=0
self.diler.stavka=raise_stavka
elif bot == self.game.players[5]:
self.diler.bankStack+=raise_stavka
self.diler.player5Stack -= raise_stavka
self.diler.botpower=0
self.diler.stavka=raise_stavka
elif bot == self.game.players[6]:
self.diler.bankStack+=raise_stavka
self.diler.player6Stack -= raise_stavka
self.diler.botpower=0
self.diler.stavka=raise_stavka
elif bot == self.game.players[7]:
self.diler.bankStack+=raise_stavka
self.diler.player7Stack -= raise_stavka
self.diler.botpower=0
self.diler.stavka=raise_stavka
elif bot == self.game.players[8]:
self. diler.bankStack+=raise_stavka
self.diler.player8Stack -= raise_stavka
self.diler.botpower=0
self.diler.stavka=raise_stavka
elif bot == self.game.players[9]:
self.diler.bankStack+=raise_stavka
self.diler.player9Stack -= raise_stavka
self.diler.botpower=0
self.diler.stavka=raise_stavka
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
if bot == self.game.players[1]:
self.player1s = Label(self.root,text=self.diler.player1Stack, bg="PaleGreen2")
self.player1s.place(relx=0.23, rely=0.03, anchor=CENTER)
elif bot == self.game.players[2]:
self.player2s = Label(self.root,text=self.diler.player2Stack, bg="PaleGreen2")
self.player2s.place(relx=0.33, rely=0.03, anchor=CENTER)
elif bot == self.game.players[3]:
self.player3s = Label(self.root,text=self.diler.player3Stack, bg="PaleGreen2")
self.player3s.place(relx=0.43, rely=0.08, anchor=CENTER)
elif bot == self.game.players[4]:
self.player4s = Label(self.root,text=self.diler.player4Stack, bg="PaleGreen2")
self.player4s.place(relx=0.43, rely=0.16, anchor=CENTER)
elif bot == self.game.players[5]:
self.player5s = Label(self.root,text=self.diler.player5Stack, bg="PaleGreen2")
self.player5s.place(relx=0.43, rely=0.24, anchor=CENTER)
elif bot == self.game.players[6]:
self.player6s = Label(self.root,text=self.diler.player6Stack, bg="PaleGreen2")
self.player6s.place(relx=0.33, rely=0.28, anchor=CENTER)
elif bot == self.game.players[7]:
self.player7s = Label(self.root,text=self.diler.player7Stack, bg="PaleGreen2")
self.player7s.place(relx=0.23, rely=0.28, anchor=CENTER)
elif bot == self.game.players[8]:
self.player8s = Label(self.root,text=self.diler.player8Stack, bg="PaleGreen2")
self.player8s.place(relx=0.093, rely=0.24, anchor=CENTER)
elif bot == self.game.players[9]:
self.player9s = Label(self.root,text=self.diler.player9Stack, bg="PaleGreen2")
self.player9s.place(relx=0.083, rely=0.09, anchor=CENTER)
elif mystr=="Call !":
self.diler.bankStack += self.diler.stavka
if bot == self.game.players[1]:
self.diler.player1Stack -=self.diler.stavka
elif bot == self.game.players[2]:
self.diler.player2Stack -= self.diler.stavka
elif bot == self.game.players[3]:
self.diler.player3Stack -= self.diler.stavka
elif bot == self.game.players[4]:
self.diler.player4Stack -=self.diler.stavka
elif bot == self.game.players[5]:
self.diler.player5Stack -= self.diler.stavka
elif bot == self.game.players[6]:
self.diler.player6Stack -= self.diler.stavka
elif bot == self.game.players[7]:
self.diler.player7Stack -= self.diler.stavka
elif bot == self.game.players[8]:
self.diler.player8Stack -= self.diler.stavka
elif bot == self.game.players[9]:
self.diler.player9Stack -= self.diler.stavka
botstack -= self.diler.stavka
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
if bot == self.game.players[1]:
self.player1s = Label(self.root,text=self.diler.player1Stack, bg="PaleGreen2")
self.player1s.place(relx=0.23, rely=0.03, anchor=CENTER)
elif bot == self.game.players[2]:
self.player2s = Label(self.root,text=self.diler.player2Stack, bg="PaleGreen2")
self.player2s.place(relx=0.33, rely=0.03, anchor=CENTER)
elif bot == self.game.players[3]:
self.player3s = Label(self.root,text=self.diler.player3Stack, bg="PaleGreen2")
self.player3s.place(relx=0.43, rely=0.08, anchor=CENTER)
elif bot == self.game.players[4]:
self.player4s = Label(self.root,text=self.diler.player4Stack, bg="PaleGreen2")
self.player4s.place(relx=0.43, rely=0.16, anchor=CENTER)
elif bot == self.game.players[5]:
self.player5s = Label(self.root,text=self.diler.player5Stack, bg="PaleGreen2")
self.player5s.place(relx=0.43, rely=0.24, anchor=CENTER)
elif bot == self.game.players[6]:
self.player6s = Label(self.root,text=self.diler.player6Stack, bg="PaleGreen2")
self.player6s.place(relx=0.33, rely=0.28, anchor=CENTER)
elif bot == self.game.players[7]:
self.player7s = Label(self.root,text=self.diler.player7Stack, bg="PaleGreen2")
self.player7s.place(relx=0.23, rely=0.28, anchor=CENTER)
elif bot == self.game.players[8]:
self.player8s = Label(self.root,text=self.diler.player8Stack, bg="PaleGreen2")
self.player8s.place(relx=0.093, rely=0.24, anchor=CENTER)
elif bot == self.game.players[9]:
self.player9s = Label(self.root,text=self.diler.player9Stack, bg="PaleGreen2")
self.player9s.place(relx=0.083, rely=0.09, anchor=CENTER)
else:
if (bot == self.game.players[1]):
if self.player1s['text']=="OFF":
print("OFFFFFF")
return 0
elif (bot == self.game.players[2]):
if self.player2s['text']=="OFF":
print("OFFFFFF")
return 0
elif (bot == self.game.players[3]):
if self.player3s['text']=="OFF":
print("OFFFFFF")
return 0
elif (bot == self.game.players[4]):
if self.player4s['text']=="OFF":
print("OFFFFFF")
return 0
elif (bot == self.game.players[5]):
if self.player5s['text']=="OFF":
print("OFFFFFF")
return 0
elif (bot == self.game.players[6]):
if self.player6s['text']=="OFF":
print("OFFFFFF")
return 0
elif (bot == self.game.players[7]):
if self.player7s['text']=="OFF":
print("OFFFFFF")
return 0
elif (bot == self.game.players[8]):
if self.player8s['text']=="OFF":
print("OFFFFFF")
return 0
elif (bot == self.game.players[9]):
if self.player9s['text']=="OFF":
print("OFFFFFF")
return 0
botandtable = self.game.tableCards.copy()
if (bot == self.game.players[1]):
botandtable.append(self.game.players[1].card1)
print("КАРТА БОТА " + str(self.game.players[1].card1.card_rating) + " " + (
str(self.game.players[1].card1.card_suit)))
botandtable.append(self.game.players[1].card2)
print("КАРТА БОТА " + str(self.game.players[1].card2.card_rating) + " " + (
str(self.game.players[1].card2.card_suit)))
gamebot = Game(10)
gamebot.my__init__(10, self.game.players[1].card1.card_suit,
self.game.players[1].card1.card_rating, self.game.players[1].card2.card_suit,
self.game.players[1].card2.card_rating)
gamebot.tableCards = self.game.tableCards.copy()
print("TABLE CARDS")
for i in gamebot.tableCards:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
gamebot.myCardWithTable = botandtable
gamebot.koloda = []
for k in range(0, 4):
gamebot.koloda.append([])
for i in range(13):
gamebot.koloda[k].append(i + 2)
gamebot.koloda[self.game.players[1].card1.card_suit - 1][self.game.players[1].card1.card_rating - 2] = 0
gamebot.koloda[self.game.players[1].card2.card_suit - 1][self.game.players[1].card2.card_rating - 2] = 0
if (gamebot.tableCards != []):
gamebot.koloda[gamebot.tableCards[0].card_suit - 1][gamebot.tableCards[0].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[1].card_suit - 1][gamebot.tableCards[1].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[2].card_suit - 1][gamebot.tableCards[2].card_rating - 2] = 0
#
#
print("TABLE CARDS BOTANDTABLE")
for i in botandtable:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
bot1hand =CheckComb(botandtable).mycombinationForOut()
if (bot1hand=="kiker"):
bot1power=0
elif (bot1hand=="para"):
bot1power=1
elif (bot1hand=="2para"):
bot1power=2
elif (bot1hand=="set"):
bot1power=3
elif (bot1hand=="strit"):
bot1power=4
elif (bot1hand=="flesh"):
bot1power=5
elif (bot1hand=="FH"):
bot1power=6
elif (bot1hand=="kare"):
bot1power=7
elif (bot1hand=="SF"):
bot1power=8
elif (bot1hand=="royal"):
bot1power=9
self.diler.botpower=bot1power
print("DILERBOTPOWER:"+str(self.diler.botpower))
botout = CheckOuts(gamebot, CheckComb(botandtable))
print("1 bot")
botout.checkOut()
amountOfoutsofbot = botout.outs
elif (bot == self.game.players[2]):
botandtable.append(self.game.players[2].card1)
botandtable.append(self.game.players[2].card2)
gamebot = Game(10)
gamebot.my__init__(10, self.game.players[2].card1.card_suit,
self.game.players[2].card1.card_rating, self.game.players[2].card2.card_suit,
self.game.players[2].card2.card_rating)
gamebot.tableCards = self.game.tableCards
gamebot.myCardWithTable = botandtable
gamebot.koloda = []
for k in range(0, 4):
gamebot.koloda.append([])
for i in range(13):
gamebot.koloda[k].append(i + 2)
gamebot.koloda[self.game.players[2].card1.card_suit - 1][self.game.players[2].card1.card_rating - 2] = 0
gamebot.koloda[self.game.players[2].card2.card_suit - 1][self.game.players[2].card2.card_rating - 2] = 0
if (gamebot.tableCards != []):
gamebot.koloda[gamebot.tableCards[0].card_suit - 1][gamebot.tableCards[0].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[1].card_suit - 1][gamebot.tableCards[1].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[2].card_suit - 1][gamebot.tableCards[2].card_rating - 2] = 0
print("TABLE CARDS BOTANDTABLE")
for i in botandtable:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
bot1hand =CheckComb(botandtable).mycombinationForOut()
if (bot1hand=="kiker"):
bot1power=0
elif (bot1hand=="para"):
bot1power=1
elif (bot1hand=="2para"):
bot1power=2
elif (bot1hand=="set"):
bot1power=3
elif (bot1hand=="strit"):
bot1power=4
elif (bot1hand=="flesh"):
bot1power=5
elif (bot1hand=="FH"):
bot1power=6
elif (bot1hand=="kare"):
bot1power=7
elif (bot1hand=="SF"):
bot1power=8
elif (bot1hand=="royal"):
bot1power=9
self.diler.botpower=bot1power
print("DILERBOTPOWER:"+str(self.diler.botpower))
botout = CheckOuts(gamebot, CheckComb(botandtable))
print("2 bot")
botout.checkOut()
amountOfoutsofbot = botout.outs
elif (bot == self.game.players[3]):
botandtable.append(self.game.players[3].card1)
botandtable.append(self.game.players[3].card2)
gamebot = Game(10)
gamebot.my__init__(10, self.game.players[3].card1.card_suit,
self.game.players[3].card1.card_rating, self.game.players[3].card2.card_suit,
self.game.players[3].card2.card_rating)
gamebot.tableCards = self.game.tableCards
gamebot.myCardWithTable = botandtable
gamebot.koloda = []
for k in range(0, 4):
gamebot.koloda.append([])
for i in range(13):
gamebot.koloda[k].append(i + 2)
gamebot.koloda[self.game.players[3].card1.card_suit - 1][self.game.players[3].card1.card_rating - 2] = 0
gamebot.koloda[self.game.players[3].card2.card_suit - 1][self.game.players[3].card2.card_rating - 2] = 0
if (gamebot.tableCards != []):
gamebot.koloda[gamebot.tableCards[0].card_suit - 1][gamebot.tableCards[0].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[1].card_suit - 1][gamebot.tableCards[1].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[2].card_suit - 1][gamebot.tableCards[2].card_rating - 2] = 0
print("TABLE CARDS BOTANDTABLE")
for i in botandtable:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
bot1hand =CheckComb(botandtable).mycombinationForOut()
if (bot1hand=="kiker"):
bot1power=0
elif (bot1hand=="para"):
bot1power=1
elif (bot1hand=="2para"):
bot1power=2
elif (bot1hand=="set"):
bot1power=3
elif (bot1hand=="strit"):
bot1power=4
elif (bot1hand=="flesh"):
bot1power=5
elif (bot1hand=="FH"):
bot1power=6
elif (bot1hand=="kare"):
bot1power=7
elif (bot1hand=="SF"):
bot1power=8
elif (bot1hand=="royal"):
bot1power=9
self.diler.botpower=bot1power
print("DILERBOTPOWER:"+str(self.diler.botpower))
botout = CheckOuts(gamebot, CheckComb(botandtable))
print("3 bot")
botout.checkOut()
amountOfoutsofbot = botout.outs
elif (bot == self.game.players[4]):
botandtable.append(self.game.players[4].card1)
botandtable.append(self.game.players[4].card2)
gamebot = Game(10)
gamebot.my__init__(10, self.game.players[4].card1.card_suit,
self.game.players[4].card1.card_rating, self.game.players[4].card2.card_suit,
self.game.players[4].card2.card_rating)
gamebot.tableCards = self.game.tableCards
gamebot.myCardWithTable = botandtable
gamebot.koloda = []
for k in range(0, 4):
gamebot.koloda.append([])
for i in range(13):
gamebot.koloda[k].append(i + 2)
gamebot.koloda[self.game.players[4].card1.card_suit - 1][self.game.players[4].card1.card_rating - 2] = 0
gamebot.koloda[self.game.players[4].card2.card_suit - 1][self.game.players[4].card2.card_rating - 2] = 0
if (gamebot.tableCards != []):
gamebot.koloda[gamebot.tableCards[0].card_suit - 1][gamebot.tableCards[0].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[1].card_suit - 1][gamebot.tableCards[1].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[2].card_suit - 1][gamebot.tableCards[2].card_rating - 2] = 0
print("TABLE CARDS BOTANDTABLE")
for i in botandtable:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
bot1hand =CheckComb(botandtable).mycombinationForOut()
if (bot1hand=="kiker"):
bot1power=0
elif (bot1hand=="para"):
bot1power=1
elif (bot1hand=="2para"):
bot1power=2
elif (bot1hand=="set"):
bot1power=3
elif (bot1hand=="strit"):
bot1power=4
elif (bot1hand=="flesh"):
bot1power=5
elif (bot1hand=="FH"):
bot1power=6
elif (bot1hand=="kare"):
bot1power=7
elif (bot1hand=="SF"):
bot1power=8
elif (bot1hand=="royal"):
bot1power=9
self.diler.botpower=bot1power
print("DILERBOTPOWER:"+str(self.diler.botpower))
botout = CheckOuts(gamebot, CheckComb(botandtable))
print("4 bot")
botout.checkOut()
amountOfoutsofbot = botout.outs
elif (bot == self.game.players[5]):
botandtable.append(self.game.players[5].card1)
botandtable.append(self.game.players[5].card2)
gamebot = Game(10)
gamebot.my__init__(10, self.game.players[5].card1.card_suit,
self.game.players[5].card1.card_rating, self.game.players[5].card2.card_suit,
self.game.players[5].card2.card_rating)
gamebot.tableCards = self.game.tableCards
gamebot.myCardWithTable = botandtable
gamebot.koloda = []
for k in range(0, 4):
gamebot.koloda.append([])
for i in range(13):
gamebot.koloda[k].append(i + 2)
gamebot.koloda[self.game.players[5].card1.card_suit - 1][self.game.players[5].card1.card_rating - 2] = 0
gamebot.koloda[self.game.players[5].card2.card_suit - 1][self.game.players[5].card2.card_rating - 2] = 0
if (gamebot.tableCards != []):
gamebot.koloda[gamebot.tableCards[0].card_suit - 1][gamebot.tableCards[0].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[1].card_suit - 1][gamebot.tableCards[1].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[2].card_suit - 1][gamebot.tableCards[2].card_rating - 2] = 0
print("TABLE CARDS BOTANDTABLE")
for i in botandtable:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
bot1hand =CheckComb(botandtable).mycombinationForOut()
if (bot1hand=="kiker"):
bot1power=0
elif (bot1hand=="para"):
bot1power=1
elif (bot1hand=="2para"):
bot1power=2
elif (bot1hand=="set"):
bot1power=3
elif (bot1hand=="strit"):
bot1power=4
elif (bot1hand=="flesh"):
bot1power=5
elif (bot1hand=="FH"):
bot1power=6
elif (bot1hand=="kare"):
bot1power=7
elif (bot1hand=="SF"):
bot1power=8
elif (bot1hand=="royal"):
bot1power=9
self.diler.botpower=self.bot1power
print("DILERBOTPOWER:"+str(self.diler.botpower))
botout = CheckOuts(gamebot, CheckComb(botandtable))
print("5 bot")
botout.checkOut()
amountOfoutsofbot = botout.outs
elif (bot == self.game.players[6]):
botandtable.append(self.game.players[6].card1)
botandtable.append(self.game.players[6].card2)
gamebot = Game(10)
gamebot.my__init__(10, self.game.players[6].card1.card_suit,
self.game.players[6].card1.card_rating, self.game.players[6].card2.card_suit,
self.game.players[6].card2.card_rating)
gamebot.tableCards = self.game.tableCards
gamebot.myCardWithTable = botandtable
gamebot.koloda = []
for k in range(0, 4):
gamebot.koloda.append([])
for i in range(13):
gamebot.koloda[k].append(i + 2)
gamebot.koloda[self.game.players[6].card1.card_suit - 1][self.game.players[6].card1.card_rating - 2] = 0
gamebot.koloda[self.game.players[6].card2.card_suit - 1][self.game.players[6].card2.card_rating - 2] = 0
if (gamebot.tableCards != []):
gamebot.koloda[gamebot.tableCards[0].card_suit - 1][gamebot.tableCards[0].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[1].card_suit - 1][gamebot.tableCards[1].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[2].card_suit - 1][gamebot.tableCards[2].card_rating - 2] = 0
print("TABLE CARDS BOTANDTABLE")
for i in botandtable:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
bot1hand =CheckComb(botandtable).mycombinationForOut()
if (bot1hand=="kiker"):
bot1power=0
elif (bot1hand=="para"):
bot1power=1
elif (bot1hand=="2para"):
bot1power=2
elif (bot1hand=="set"):
bot1power=3
elif (bot1hand=="strit"):
bot1power=4
elif (bot1hand=="flesh"):
bot1power=5
elif (bot1hand=="FH"):
bot1power=6
elif (bot1hand=="kare"):
bot1power=7
elif (bot1hand=="SF"):
bot1power=8
elif (bot1hand=="royal"):
bot1power=9
self.diler.botpower=bot1power
print("DILERBOTPOWER:"+str(self.diler.botpower))
botout = CheckOuts(gamebot, CheckComb(botandtable))
print("6 bot")
botout.checkOut()
amountOfoutsofbot = botout.outs
elif (bot == self.game.players[7]):
botandtable.append(self.game.players[7].card1)
botandtable.append(self.game.players[7].card2)
gamebot = Game(10)
gamebot.my__init__(10, self.game.players[7].card1.card_suit,
self.game.players[7].card1.card_rating, self.game.players[7].card2.card_suit,
self.game.players[7].card2.card_rating)
gamebot.tableCards = self.game.tableCards
gamebot.myCardWithTable = botandtable
gamebot.koloda = []
for k in range(0, 4):
gamebot.koloda.append([])
for i in range(13):
gamebot.koloda[k].append(i + 2)
gamebot.koloda[self.game.players[7].card1.card_suit - 1][self.game.players[7].card1.card_rating - 2] = 0
gamebot.koloda[self.game.players[7].card2.card_suit - 1][self.game.players[7].card2.card_rating - 2] = 0
if (gamebot.tableCards != []):
gamebot.koloda[gamebot.tableCards[0].card_suit - 1][gamebot.tableCards[0].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[1].card_suit - 1][gamebot.tableCards[1].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[2].card_suit - 1][gamebot.tableCards[2].card_rating - 2] = 0
print("TABLE CARDS BOTANDTABLE")
for i in botandtable:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
bot1hand =CheckComb(botandtable).mycombinationForOut()
if (bot1hand=="kiker"):
bot1power=0
elif (bot1hand=="para"):
bot1power=1
elif (bot1hand=="2para"):
bot1power=2
elif (bot1hand=="set"):
bot1power=3
elif (bot1hand=="strit"):
bot1power=4
elif (bot1hand=="flesh"):
bot1power=5
elif (bot1hand=="FH"):
bot1power=6
elif (bot1hand=="kare"):
bot1power=7
elif (bot1hand=="SF"):
bot1power=8
elif (bot1hand=="royal"):
bot1power=9
self.diler.botpower=bot1power
print("DILERBOTPOWER:"+str(self.diler.botpower))
botout = CheckOuts(gamebot, CheckComb(botandtable))
print("7 bot")
botout.checkOut()
amountOfoutsofbot = botout.outs
elif (bot == self.game.players[8]):
botandtable.append(self.game.players[8].card1)
botandtable.append(self.game.players[8].card2)
gamebot = Game(10)
gamebot.my__init__(10, self.game.players[8].card1.card_suit,
self.game.players[8].card1.card_rating, self.game.players[8].card2.card_suit,
self.game.players[8].card2.card_rating)
gamebot.tableCards = self.game.tableCards
gamebot.myCardWithTable = botandtable
gamebot.koloda = []
for k in range(0, 4):
gamebot.koloda.append([])
for i in range(13):
gamebot.koloda[k].append(i + 2)
gamebot.koloda[self.game.players[8].card1.card_suit - 1][self.game.players[8].card1.card_rating - 2] = 0
gamebot.koloda[self.game.players[8].card2.card_suit - 1][self.game.players[8].card2.card_rating - 2] = 0
if (gamebot.tableCards != []):
gamebot.koloda[gamebot.tableCards[0].card_suit - 1][gamebot.tableCards[0].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[1].card_suit - 1][gamebot.tableCards[1].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[2].card_suit - 1][gamebot.tableCards[2].card_rating - 2] = 0
print("TABLE CARDS BOTANDTABLE")
for i in botandtable:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
bot1hand =CheckComb(botandtable).mycombinationForOut()
if (bot1hand=="kiker"):
bot1power=0
elif (bot1hand=="para"):
bot1power=1
elif (bot1hand=="2para"):
bot1power=2
elif (bot1hand=="set"):
bot1power=3
elif (bot1hand=="strit"):
bot1power=4
elif (bot1hand=="flesh"):
bot1power=5
elif (bot1hand=="FH"):
bot1power=6
elif (bot1hand=="kare"):
bot1power=7
elif (bot1hand=="SF"):
bot1power=8
elif (bot1hand=="royal"):
bot1power=9
self.diler.botpower=bot1power
print("DILERBOTPOWER:"+str(self.diler.botpower))
botout = CheckOuts(gamebot, CheckComb(botandtable))
print("8 bot")
botout.checkOut()
amountOfoutsofbot = botout.outs
elif (bot == self.game.players[9]):
botandtable.append(self.game.players[9].card1)
botandtable.append(self.game.players[9].card2)
gamebot = Game(10)
gamebot.my__init__(10, self.game.players[9].card1.card_suit,
self.game.players[9].card1.card_rating, self.game.players[9].card2.card_suit,
self.game.players[9].card2.card_rating)
gamebot.tableCards = self.game.tableCards
gamebot.myCardWithTable = botandtable
gamebot.koloda = []
for k in range(0, 4):
gamebot.koloda.append([])
for i in range(13):
gamebot.koloda[k].append(i + 2)
gamebot.koloda[self.game.players[9].card1.card_suit - 1][self.game.players[9].card1.card_rating - 2] = 0
gamebot.koloda[self.game.players[9].card2.card_suit - 1][self.game.players[9].card2.card_rating - 2] = 0
if (gamebot.tableCards != []):
gamebot.koloda[gamebot.tableCards[0].card_suit - 1][gamebot.tableCards[0].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[1].card_suit - 1][gamebot.tableCards[1].card_rating - 2] = 0
gamebot.koloda[gamebot.tableCards[2].card_suit - 1][gamebot.tableCards[2].card_rating - 2] = 0
print("TABLE CARDS BOTANDTABLE")
for i in botandtable:
print(str(i.card_rating) + " " + (str(i.card_suit)))
print("----------------------")
bot1hand =CheckComb(botandtable).mycombinationForOut()
if (bot1hand=="kiker"):
bot1power=0
elif (bot1hand=="para"):
bot1power=1
elif (bot1hand=="2para"):
bot1power=2
elif (bot1hand=="set"):
bot1power=3
elif (bot1hand=="strit"):
bot1power=4
elif (bot1hand=="flesh"):
bot1power=5
elif (bot1hand=="FH"):
bot1power=6
elif (bot1hand=="kare"):
bot1power=7
elif (bot1hand=="SF"):
bot1power=8
elif (bot1hand=="royal"):
bot1power=9
self.diler.botpower=bot1power
print("DILERBOTPOWER:"+str(self.diler.botpower))
botout = CheckOuts(gamebot, CheckComb(botandtable))
print("9 bot")
botout.checkOut()
amountOfoutsofbot = botout.outs
# print("BOT-------------------------------------------------------")
# print("TABLE CARDS :")
# for i in botandtable:
# print(str(i.card_suit) + " " + str(i.card_rating))
# print("ENDTABLE CARDS :")
# print(str(bot.card1.card_suit) + " " + str(bot.card1.card_rating))
# print(str(bot.card2.card_suit) + " " + str(bot.card2.card_rating))
# print(amountOfoutsofbot)
# print(diler.playerOdds(amountOfoutsofbot))
# print(diler.potOdds(diler.stavka))
# print("END BOT-------------------------------------------------------")
# botstack-=5
if self.diler.playerOdds(amountOfoutsofbot) > self.diler.potOdds(self.diler.stavka) or self.diler.botpower>0 :
print ("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA")
if (self.diler.botpower>0):
if bot == self.game.players[1]:
self.diler.bankStack+=50
self.diler.player1Stack -= 50
self.diler.botpower=0
self.diler.stavka=50
elif bot == self.game.players[2]:
self.diler.bankStack+=50
self.diler.player2Stack -= 50
self.diler.botpower=0
self.diler.stavka=50
elif bot == self.game.players[3]:
self.diler.bankStack+=50
self.diler.player3Stack -= 50
self.diler.botpower=0
self.diler.stavka=50
elif bot == self.game.players[4]:
self.diler.bankStack+=50
self.diler.player4Stack -= 50
self.diler.botpower=0
self.diler.stavka=50
elif bot == self.game.players[5]:
self.diler.bankStack+=50
self.diler.player5Stack -= 50
self.diler.botpower=0
self.diler.stavka=50
elif bot == self.game.players[6]:
self.diler.bankStack+=50
self.diler.player6Stack -= 50
self.diler.botpower=0
self.diler.stavka=50
elif bot == self.game.players[7]:
self.diler.bankStack+=50
self.diler.player7Stack -= 50
self.diler.botpower=0
self.diler.stavka=50
elif bot == self.game.players[8]:
self.diler.bankStack+=50
self.diler.player8Stack -= 50
self.diler.botpower=0
self.diler.stavka=50
elif bot == self.game.players[9]:
self.diler.bankStack+=50
self.diler.player9Stack -= 50
self.diler.botpower=0
self.diler.stavka=50
else:
print("PO AUTAM!")
self.diler.bankStack += self.diler.stavka
if bot == self.game.players[1]:
self.diler.player1Stack -= self.diler.stavka
elif bot == self.game.players[2]:
self.diler.player2Stack -= self.diler.stavka
elif bot == self.game.players[3]:
self.diler.player3Stack -= self.diler.stavka
elif bot == self.game.players[4]:
self.diler.player4Stack -= self.diler.stavka
elif bot == self.game.players[5]:
self.diler.player5Stack -= self.diler.stavka
elif bot == self.game.players[6]:
self.diler.player6Stack -= self.diler.stavka
elif bot == self.game.players[7]:
self.diler.player7Stack -= self.diler.stavka
elif bot == self.game.players[8]:
self.diler.player8Stack -= self.diler.stavka
elif bot == self.game.players[9]:
self.diler.player9Stack -= self.diler.stavka
botstack -= self.diler.stavka
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
if bot == self.game.players[1]:
self.player1s = Label(self.root,text=self.diler.player1Stack, bg="PaleGreen2")
self.player1s.place(relx=0.23, rely=0.03, anchor=CENTER)
elif bot == self.game.players[2]:
self.player2s = Label(self.root,text=self.diler.player2Stack, bg="PaleGreen2")
self.player2s.place(relx=0.33, rely=0.03, anchor=CENTER)
elif bot == self.game.players[3]:
self.player3s = Label(self.root,text=self.diler.player3Stack, bg="PaleGreen2")
self.player3s.place(relx=0.43, rely=0.08, anchor=CENTER)
elif bot == self.game.players[4]:
self.player4s = Label(self.root,text=self.diler.player4Stack, bg="PaleGreen2")
self.player4s.place(relx=0.43, rely=0.16, anchor=CENTER)
elif bot == self.game.players[5]:
self.player5s = Label(self.root,text=self.diler.player5Stack, bg="PaleGreen2")
self.player5s.place(relx=0.43, rely=0.24, anchor=CENTER)
elif bot == self.game.players[6]:
self.player6s = Label(self.root,text=self.diler.player6Stack, bg="PaleGreen2")
self.player6s.place(relx=0.33, rely=0.28, anchor=CENTER)
elif bot == self.game.players[7]:
self.player7s = Label(self.root,text=self.diler.player7Stack, bg="PaleGreen2")
self.player7s.place(relx=0.23, rely=0.28, anchor=CENTER)
elif bot == self.game.players[8]:
self.player8s = Label(self.root,text=self.diler.player8Stack, bg="PaleGreen2")
self.player8s.place(relx=0.093, rely=0.24, anchor=CENTER)
elif bot == self.game.players[9]:
self.player9s = Label(self.root,text=self.diler.player9Stack, bg="PaleGreen2")
self.player9s.place(relx=0.083, rely=0.09, anchor=CENTER)
else:
if (bot == self.game.players[1]):
print("OFFFFFF")
self.player1s.destroy()
self.player1s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player1s.place(relx=0.23, rely=0.03, anchor=CENTER)
elif (bot == self.game.players[2]):
self.player2s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player2s.place(relx=0.33, rely=0.03, anchor=CENTER)
elif (bot == self.game.players[3]):
self.player3s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player3s.place(relx=0.43, rely=0.08, anchor=CENTER)
elif (bot == self.game.players[4]):
self.player4s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player4s.place(relx=0.43, rely=0.16, anchor=CENTER)
elif (bot == self.game.players[5]):
self.player5s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player5s.place(relx=0.43, rely=0.24, anchor=CENTER)
elif (bot == self.game.players[6]):
self.player6s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player6s.place(relx=0.33, rely=0.28, anchor=CENTER)
elif (bot == self.game.players[7]):
self.player7s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player7s.place(relx=0.23, rely=0.28, anchor=CENTER)
elif (bot == self.game.players[8]):
self.player8s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player8s.place(relx=0.093, rely=0.24, anchor=CENTER)
elif (bot == self.game.players[9]):
self.player9s = Label(self.root,text="OFF", bg="PaleGreen2")
self.player9s.place(relx=0.083, rely=0.09, anchor=CENTER)
botandtable.pop()
botandtable.pop()
self.diler.botpower=0
print("AEEEEEEEE"+str(self.player1s.cget("text")))
if (str(self.player1s.cget("text"))=="OFF" and str(self.player2s.cget("text"))=="OFF"and str(self.player3s.cget("text"))=="OFF"
and str(self.player4s.cget("text"))=="OFF"and str(self.player5s.cget("text"))=="OFF"and str(self.player6s.cget("text"))=="OFF"
and str(self.player7s.cget("text"))=="OFF"and str(self.player8s.cget("text"))=="OFF"and str(self.player9s.cget("text"))=="OFF"):
self.diler.myPlayerstack+=self.diler.bankStack
self.diler.bankStack=0
self.playerbet = Label(self.root, text="YOUR STACK : " + str(self.diler.myPlayerstack), font=("Times",10,"bold"), border=3,bg="green4", fg="#4B0082")
self.playerbet.place(relx=0.057, rely=0.785, anchor=CENTER)
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
popupmsg1(" You won! Other players fall")
def step1(self):
if(self.diler.flagofDiler==0):
# if (self.diler.flagofbet == 0):
self.diler.flagOfBet=True
if(self.diler.flagOfblind==0):
if(self.diler.BigBlind==2):
self.diler.bankStack+=6
self.diler.player1Stack -= 6
self.diler.botpower=0
self.diler.stavka=6
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player1s = Label(self.root,text=self.diler.player1Stack, bg="PaleGreen2")
self.player1s.place(relx=0.23, rely=0.03, anchor=CENTER)
self.diler.bankStack+=12
self.diler.player2Stack -= 12
self.diler.botpower=0
self.diler.stavka=12
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player2s = Label(self.root,text=self.diler.player2Stack, bg="PaleGreen2")
self.player2s.place(relx=0.33, rely=0.03, anchor=CENTER)
#self.diler.flagstep=self.diler.utg
elif(self.diler.BigBlind==3):
self.diler.bankStack+=6
self.diler.player2Stack -= 6
self.diler.botpower=0
self.diler.stavka=6
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player2s = Label(self.root,text=self.diler.player2Stack, bg="PaleGreen2")
self.player2s.place(relx=0.33, rely=0.03, anchor=CENTER)
#self.diler.flagstep=self.diler.utg
self.diler.bankStack+=12
self.diler.player3Stack -= 12
self.diler.botpower=0
self.diler.stavka=12
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player3s = Label(self.root,text=self.diler.player3Stack, bg="PaleGreen2")
self.player3s.place(relx=0.43, rely=0.08, anchor=CENTER)
#self.diler.flagstep=self.diler.utg
elif(self.diler.BigBlind==4):
self.diler.bankStack+=6
self.diler.player3Stack -= 6
self.diler.botpower=0
self.diler.stavka=6
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player3s = Label(self.root,text=self.diler.player3Stack, bg="PaleGreen2")
self.player3s.place(relx=0.43, rely=0.08, anchor=CENTER)
self.diler.bankStack+=12
self.diler.player4Stack -= 12
self.diler.botpower=0
self.diler.stavka=12
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player4s = Label(self.root,text=self.diler.player4Stack, bg="PaleGreen2")
self.player4s.place(relx=0.43, rely=0.16, anchor=CENTER)
elif(self.diler.BigBlind==5):
self.diler.bankStack+=6
self.diler.player4Stack -= 6
self.diler.botpower=0
self.diler.stavka=6
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player4s = Label(self.root,text=self.diler.player4Stack, bg="PaleGreen2")
self.player4s.place(relx=0.43, rely=0.16, anchor=CENTER)
self.diler.bankStack+=12
self.diler.player5Stack -= 12
self.diler.botpower=0
self.diler.stavka=12
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player5s = Label(self.root,text=self.diler.player5Stack, bg="PaleGreen2")
self.player5s.place(relx=0.43, rely=0.24, anchor=CENTER)
elif(self.diler.BigBlind==6):
self.diler.bankStack+=6
self.diler.player5Stack -= 6
self.diler.botpower=0
self.diler.stavka=6
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player5s = Label(self.root,text=self.diler.player5Stack, bg="PaleGreen2")
self.player5s.place(relx=0.43, rely=0.24, anchor=CENTER)
self.diler.bankStack+=12
self.diler.player6Stack -= 12
self.diler.botpower=0
self.diler.stavka=12
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player6s = Label(self.root,text=self.diler.player6Stack, bg="PaleGreen2")
self.player6s.place(relx=0.33, rely=0.28, anchor=CENTER)
elif(self.diler.BigBlind==7):
self.diler.bankStack+=6
self.diler.player6Stack -= 6
self.diler.botpower=0
self.diler.stavka=6
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player6s = Label(self.root,text=self.diler.player6Stack, bg="PaleGreen2")
self.player6s.place(relx=0.33, rely=0.28, anchor=CENTER)
self.diler.bankStack+=12
self.diler.player7Stack -= 12
self.diler.botpower=0
self.diler.stavka=12
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player7s = Label(self.root,text=self.diler.player7Stack, bg="PaleGreen2")
self.player7s.place(relx=0.23, rely=0.28, anchor=CENTER)
elif(self.diler.BigBlind==8):
self.diler.bankStack+=6
self.diler.player7Stack -= 6
self.diler.botpower=0
self.diler.stavka=6
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player7s = Label(self.root,text=self.diler.player7Stack, bg="PaleGreen2")
self.player7s.place(relx=0.23, rely=0.28, anchor=CENTER)
self.diler.bankStack+=12
self.diler.player8Stack -= 12
self.diler.botpower=0
self.diler.stavka=12
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player8s = Label(self.root,text=self.diler.player8Stack, bg="PaleGreen2")
self.player8s.place(relx=0.093, rely=0.24, anchor=CENTER)
# elif(self.diler.BigBlind==9):
#
# self.diler.bankStack+=6
# self.diler.player8Stack -= 6
# self.diler.botpower=0
# self.diler.stavka=6
#
#
# strin="BANK : " + str(self.diler.bankStack)
# self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
# self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
#
#
# self.player8s = Label(self.root,text=self.diler.player8Stack, bg="PaleGreen2")
# self.player8s.place(relx=0.093, rely=0.24, anchor=CENTER)
#
#
# self.diler.bankStack+=12
# self.diler.player9Stack -= 12
# self.diler.botpower=0
# self.diler.stavka=12
#
#
# strin="BANK : " + str(self.diler.bankStack)
# self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
# self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
#
#
# self.player9s = Label(self.root,text=self.diler.player9Stack, bg="PaleGreen2")
# self.player9s.place(relx=0.083, rely=0.09, anchor=CENTER)
self.diler.flagstep=self.diler.utg
while (self.diler.flagstep != 9):
if(self.diler.flagstep==1):
self.diler.stavka=self.diler.stavka-6;
self.step()
self.diler.stavka=self.diler.stavka+6;
else:
self.step()
self.botcall(self.diler.player9Stack, self.game.players[9])
self.diler.flagstep = 1
self.diler.flagofbet = 1
self.diler.flagofDiler=1
# else:
# self.diler.flagofbet = 1
# popupmsg1("You Dont Make Step")
else:
if (self.diler.flagofbet == 0):
self.diler.flagOfBet=True
self.diler.flagstep=1
while (self.diler.flagstep != self.diler.utg):
self.step()
self.botcall(self.diler.player9Stack, self.game.players[9])
self.diler.flagstep = 1
self.diler.flagofbet = 1
self.diler.flagofDiler=0
else:
self.diler.flagofbet = 1
popupmsg1("You Dont Make Step")
if (self.diler.potOdds(self.diler.stavka) == -1):
self.lbodds = Label(self.separator, text="Pot odds can not be determined at this time ",
bg="sea green",fg='#4B0082')
else:
self.lbodds = Label(self.separator, text="Pot Odds: " + str(self.diler.potOdds(self.diler.mystavka)) + " %",
bg="sea green",fg='#4B0082')
self.lbodds.place(relx=0.5, rely=0.4, anchor=CENTER)
def step2(self):
if (self.diler.flagofbet == 0):
self.diler.flagOfBet=True
while (self.diler.flagstep != 9):
self.step()
self.botcall(self.diler.player9Stack, self.game.players[9])
self.diler.flagstep = 1
self.diler.flagofbet = 1
else:
popupmsg1("You Dont Make Step")
def step(self):
if (self.diler.flagstep == 1):
self.botcall(self.diler.player1Stack, self.game.players[1])
self.diler.flagstep = 2
elif (self.diler.flagstep == 2):
self.botcall(self.diler.player2Stack, self.game.players[2])
self.diler.flagstep = 3
elif (self.diler.flagstep == 3):
self.botcall(self.diler.player3Stack, self.game.players[3])
self.diler.flagstep = 4
elif (self.diler.flagstep == 4):
self.botcall(self.diler.player4Stack, self.game.players[4])
self.diler.flagstep = 5
elif (self.diler.flagstep == 5):
self.botcall(self.diler.player5Stack, self.game.players[5])
self.diler.flagstep = 6
elif (self.diler.flagstep == 6):
self.botcall(self.diler.player6Stack, self.game.players[6])
self.diler.flagstep = 7
elif (self.diler.flagstep == 7):
self.botcall(self.diler.player7Stack, self.game.players[7])
self.diler.flagstep = 8
elif (self.diler.flagstep == 8):
self.botcall(self.diler.player8Stack, self.game.players[8])
self.diler.flagstep = 9
elif (self.diler.flagstep == 9):
self.botcall(self.diler.player9Stack, self.game.players[9])
self.diler.flagstep = 1
def mraise(self):
if (self.diler.flagofbet == 1):
self.scal = self.scale1.get()
if (self.scal > self.diler.myPlayerstack):
popupmsg("YOU HAVE NO MONEY!Try to make make less bet")
else:
if self.scal<=self.diler.stavka:
popupmsg1("You Cant Bet Less Then Current: "+str(self.diler.stavka))
else:
self.diler.stavka = self.scal
self.diler.bankStack += self.scal
self.BANKbet.destroy()
self.diler.myPlayerstack -= self.scal
self.playerbet = Label(self.root,text="YOUR STACK : " + str(self.diler.myPlayerstack), font=("Times",10,"bold"),border=3,bg="green4", fg="#4B0082")
self.playerbet.place(relx=0.057, rely=0.785, anchor=CENTER)
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.diler.mystavka=self.scal
self.diler.flagofbet = 0
else:
popupmsg1("Wait Your Step!")
def getWinner(self):
if (self.flagGetWinner==1):
bot= self.game.players[self.comb.checkWinner1(self.comb, self.game.players, self.game.tableCards)]
if bot == self.game.players[1]:
self.diler.player1Stack+=self.diler.bankStack
self.diler.bankStack=0
self.player1s = Label(self.root,text=self.diler.player1Stack, bg="PaleGreen2")
self.player1s.place(relx=0.23, rely=0.03, anchor=CENTER)
elif bot == self.game.players[2]:
self.diler.player2Stack+=self.diler.bankStack
self.diler.bankStack=0
self.player2s = Label(self.root,text=self.diler.player2Stack, bg="PaleGreen2")
self.player2s.place(relx=0.33, rely=0.03, anchor=CENTER)
elif bot == self.game.players[3]:
self.diler.player3Stack+=self.diler.bankStack
self.diler.bankStack=0
self.player3s = Label(self.root,text=self.diler.player3Stack, bg="PaleGreen2")
self.player3s.place(relx=0.43, rely=0.08, anchor=CENTER)
elif bot == self.game.players[4]:
self.diler.player4Stack+=self.diler.bankStack
self.diler.bankStack=0
self.player4s = Label(self.root,text=self.diler.player4Stack, bg="PaleGreen2")
self.player4s.place(relx=0.43, rely=0.16, anchor=CENTER)
elif bot == self.game.players[5]:
self.diler.player5Stack+=self.diler.bankStack
self.diler.bankStack=0
self.player5s = Label(self.root,text=self.diler.player5Stack, bg="PaleGreen2")
self.player5s.place(relx=0.43, rely=0.24, anchor=CENTER)
elif bot == self.game.players[6]:
self.diler.player6Stack+=self.diler.bankStack
self.diler.bankStack=0
self.player6s = Label(self.root,text=self.diler.player6Stack, bg="PaleGreen2")
self.player6s.place(relx=0.33, rely=0.28, anchor=CENTER)
elif bot == self.game.players[7]:
self.diler.player7Stack+=self.diler.bankStack
self.diler.bankStack=0
self.player7s = Label(self.root,text=self.diler.player7Stack, bg="PaleGreen2")
self.player7s.place(relx=0.23, rely=0.28, anchor=CENTER)
elif bot == self.game.players[8]:
self.diler.player8Stack+=self.diler.bankStack
self.diler.bankStack=0
self.player8s = Label(self.root,text=self.diler.player8Stack, bg="PaleGreen2")
self.player8s.place(relx=0.093, rely=0.24, anchor=CENTER)
elif bot == self.game.players[9]:
self.diler.player9Stack+=self.diler.bankStack
self.diler.bankStack=0
self.player9s = Label(self.root,text=self.diler.player9Stack, bg="PaleGreen2")
self.player9s.place(relx=0.083, rely=0.09, anchor=CENTER)
elif bot == self.game.players[0]:
self.diler.myPlayerstack+=self.diler.bankStack
self.diler.bankStack=0
self.playerbet = Label(self.root, text="YOUR STACK : " + str(self.diler.myPlayerstack), font=("Times",10,"bold"), border=3,bg="green4", fg="#4B0082")
self.playerbet.place(relx=0.057, rely=0.785, anchor=CENTER)
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
popupmsg2(self.comb.checkWinner(self.comb, self.game.players, self.game.tableCards),
self.game.players[self.comb.checkWinner1(self.comb, self.game.players, self.game.tableCards)])
# self.diler.massive=[]
# if (bot == self.game.players[0]):
# self.diler.massive.append(self.game.players[0])
#
# if (bot == self.game.players[1]):
# if self.player1s['text']!="OFF":
# self.diler.massive.append(self.game.players[1])
#
# elif (bot == self.game.players[2]):
# if self.player2s['text']!="OFF":
# self.diler.massive.append(self.game.players[2])
#
# elif (bot == self.game.players[3]):
# if self.player3s['text']!="OFF":
# self.diler.massive.append(self.game.players[3])
# elif (bot == self.game.players[4]):
# if self.player4s['text']!="OFF":
# self.diler.massive.append(self.game.players[4])
# elif (bot == self.game.players[5]):
# if self.player5s['text']!="OFF":
# self.diler.massive.append(self.game.players[5])
# elif (bot == self.game.players[6]):
# if self.player6s['text']!="OFF":
# self.diler.massive.append(self.game.players[6])
# elif (bot == self.game.players[7]):
# if self.player7s['text']!="OFF":
# self.diler.massive.append(self.game.players[7])
# elif (bot == self.game.players[8]):
# if self.player8s['text']!="OFF":
# self.diler.massive.append(self.game.players[8])
# elif (bot == self.game.players[9]):
# if self.player9s['text']!="OFF":
# self.diler.massive.append(self.game.players[9])
#
#
# strin="BANK : " + str(self.diler.bankStack)
# self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
# self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
#
#
# popupmsg2(self.comb.checkWinner(self.comb, self.diler.massive, self.game.tableCards),
# self.diler.massive[self.comb.checkWinner1(self.comb, self.diler.massive, self.game.tableCards)])
#
def openriver(self):
self.diler.flagofDiler=0
print(self.diler.mystavka)
print(self.diler.stavka)
if self.diler.mystavka<self.diler.stavka:
self.diler.flagOfReStavka=1
else:
self.diler.flagOfReStavka=0
if(self.diler.flagOfReStavka==0):
if (self.diler.flagOfBet):
self.diler.flagOfBet=False
self.lbodds.destroy()
self.lpodds.destroy()
self.diler.flagGetWinner=0
if (self.flagtern != 0):
if (self.flagriver != 1):
self.diler.stavka=0
# self.comblpara.destroy()
# self.combl2para.destroy()
self.comblset.destroy()
# self.combl.destroy()
self.flagriver = 1
self.game.update_table_riverRandom()
self.river = self.game.tableCards[4]
if (self.river.card_suit == 1):
self.flag5 = "б"
elif (self.river.card_suit == 2):
self.flag5 = "ч"
elif (self.river.card_suit == 3):
self.flag5 = "п"
elif (self.river.card_suit == 4):
self.flag5 = "к"
self.card5flop = str(self.flag5) + str(self.river.card_rating)
self.photof5 = PhotoImage(file="./images/" + self.card5flop + ".gif")
self.cardf5 = Label(self.root, image=self.photof5)
self.cardf5.place(relx=0.93, rely=0.225, anchor=CENTER)
self.comb = CheckComb(self.game.myCardWithTable)
self.combl = Label(self.separator1, text="Combinations: ", bg="green",fg='#4B0082')
#self.combl.place(relx=0.75, rely=0.65, anchor=CENTER)
self.combl.place(relx=0.3, rely=0.2, anchor=CENTER)
self.checkcomb(self.comb)
self.mcombl.destroy()
if (self.diler.potOdds(self.diler.mystavka) == -1):
self.lbodds = Label(self.separator, text="Pot odds can not be determined at this time ",
bg="sea green",fg='#4B0082')
else:
self.lbodds = Label(self.separator, text="Pot Odds: " + str(self.diler.potOdds(self.diler.mystavka)) + " %",
bg="sea green",fg='#4B0082')
#self.lbodds.place(relx=0.144, rely=0.90, anchor=CENTER)
self.lbodds.place(relx=0.5, rely=0.4, anchor=CENTER)
self.lpodds = Label(self.separator, text="Your ODDS: " + str(self.diler.playerOdds(self.amountOfouts)) + " %",
bg="sea green",fg='#4B0082')
#self.lpodds.place(relx=0.112, rely=0.95, anchor=CENTER)
self.lpodds.place(relx=0.5, rely=0.6, anchor=CENTER)
for i in self.game.tableCards:
print(str(i.card_rating) + " " + str(i.card_suit))
self.flagGetWinner=1
# popupmsg2(self.comb.checkWinner(self.comb, self.game.players, self.game.tableCards),
# self.game.players[self.comb.checkWinner1(self.comb, self.game.players, self.game.tableCards)])
self.flagofbet = 0
self.diler.flagofbet = 0
self.diler.flagOfblind=1
else:
popupmsg1("You Can not make open because you don't equalize!")
def opentern(self):
self.diler.flagofDiler=0
print(self.diler.mystavka)
print(self.diler.stavka)
if self.diler.mystavka<self.diler.stavka:
self.diler.flagOfReStavka=1
else:
self.diler.flagOfReStavka=0
if(self.diler.flagOfReStavka==0):
if (self.diler.flagOfBet):
self.diler.flagOfBet=False
self.diler.flagGetWinner=0
self.lbodds.destroy()
self.lpodds.destroy()
if (self.flagtern != 1):
self.diler.stavka=0
# self.comblpara.destroy()
# self.combl2para.destroy()
self.comblset.destroy()
self.mcombl.destroy()
# self.combl.destroy()
self.flagtern = 1
self.game.update_table_ternRandom()
self.tern = self.game.tableCards[3]
if (self.tern.card_suit == 1):
self.flag5 = "б"
elif (self.tern.card_suit == 2):
self.flag5 = "ч"
elif (self.tern.card_suit == 3):
self.flag5 = "п"
elif (self.tern.card_suit == 4):
self.flag5 = "к"
self.card4flop = str(self.flag5) + str(self.tern.card_rating)
self.photof4 = PhotoImage(file="./images/" + self.card4flop + ".gif")
self.cardf4 = Label(self.root, image=self.photof4)
self.cardf4.place(relx=0.84, rely=0.225, anchor=CENTER)
self.comb = CheckComb(self.game.myCardWithTable)
self.combl = Label(self.separator1, text="Combinations: ", bg="green",fg='#4B0082')
#self.combl.place(relx=0.75, rely=0.65, anchor=CENTER)
self.combl.place(relx=0.3, rely=0.2, anchor=CENTER)
self.checkcomb(self.comb)
self.out = CheckOuts(self.game, CheckComb(self.game.myCardWithTable))
self.out.checkOut()
self.amountOfouts = self.out.outs
self.mcombl = Label(self.separator1, text="Amounts of OUTS: " + str(self.amountOfouts), bg="sea green",fg='#4B0082')
#self.mcombl.place(relx=0.85, rely=0.65, anchor=CENTER)
self.mcombl.place(relx=0.5, rely=0.6, anchor=CENTER)
if (self.diler.potOdds(self.diler.stavka) == -1):
self.lbodds = Label(self.separator,text="Pot odds can not be determined at this time ",
bg="sea green",fg='#4B0082')
else:
self.lbodds = Label(self.separator,text="Pot Odds: " + str(self.diler.potOdds(self.diler.mystavka)) + " %",
bg="sea green",fg='#4B0082')
#self.lbodds.place(relx=0.144, rely=0.90, anchor=CENTER)
self.lbodds.place(relx=0.5, rely=0.4, anchor=CENTER)
self.lpodds = Label(self.separator, text="Your ODDS: " + str(self.diler.playerOdds(self.amountOfouts)) + " %",
bg="sea green",fg='#4B0082')
#self.lpodds.place(relx=0.112, rely=0.95, anchor=CENTER)
self.lpodds.place(relx=0.5, rely=0.6, anchor=CENTER)
self.flagofbet = 0
self.diler.flagofbet = 0
self.diler.flagOfblind=1
else:
popupmsg1("You Can not make open because you don't equalize!")
def openflop(self):
self.diler.flagofDiler=0
if self.diler.mystavka<self.diler.stavka:
print(self.diler.mystavka)
print(self.diler.stavka)
self.diler.flagOfReStavka=1
else:
print(self.diler.mystavka)
print(self.diler.stavka)
self.diler.flagOfReStavka=0
if(self.diler.flagOfReStavka==0):
if (self.diler.flagOfBet):
self.diler.flagOfBet=False
self.diler.flagGetWinner=0
if (self.flagofflop != 1):
self.flagofflop = 1
self.diler.stavka=0
self.game.update_table_flopRandom()
self.flop = self.game.tableCards
if (self.flop[0].card_suit == 1):
self.flag2 = "б"
elif (self.flop[0].card_suit == 2):
self.flag2 = "ч"
elif (self.flop[0].card_suit == 3):
self.flag2 = "п"
elif (self.flop[0].card_suit == 4):
self.flag2 = "к"
if (self.flop[1].card_suit == 1):
self.flag3 = "б"
elif (self.flop[1].card_suit == 2):
self.flag3 = "ч"
elif (self.flop[1].card_suit == 3):
self.flag3 = "п"
elif (self.flop[1].card_suit == 4):
self.flag3 = "к"
if (self.flop[2].card_suit == 1):
self.flag4 = "б"
elif (self.flop[2].card_suit == 2):
self.flag4 = "ч"
elif (self.flop[2].card_suit == 3):
self.flag4 = "п"
elif (self.flop[2].card_suit == 4):
self.flag4 = "к"
self.card1flop = str(self.flag2) + str(self.flop[0].card_rating)
self.card2flop = str(self.flag3) + str(self.flop[1].card_rating)
self.card3flop = str(self.flag4) + str(self.flop[2].card_rating)
# self.flopCard = Label(self.master, text="Карты стола: ", bg="red")
# self.flopCard.place(relx=0.75, rely=0.03, anchor=CENTER)
self.photof1 = PhotoImage(file="./images/" + self.card1flop + ".gif")
self.cardf1 = Label(self.root, image=self.photof1)
self.cardf1.place(relx=0.55, rely=0.225, anchor=CENTER)
self.photof2 = PhotoImage(file="./images/" + self.card2flop + ".gif")
self.cardf2 = Label(self.root, image=self.photof2)
self.cardf2.place(relx=0.65, rely=0.225, anchor=CENTER)
self.photof3 = PhotoImage(file="./images/" + self.card3flop + ".gif")
self.cardf3 = Label(self.root, image=self.photof3)
self.cardf3.place(relx=0.75, rely=0.225, anchor=CENTER)
self.comb = CheckComb(self.game.myCardWithTable)
self.combl = Label(self.separator1, text="Combinations: ", bg="green",fg='#4B0082')
#self.combl.place(relx=0.75, rely=0.65, anchor=CENTER)
self.combl.place(relx=0.3, rely=0.2, anchor=CENTER)
self.checkcomb(self.comb)
self.out = CheckOuts(self.game, CheckComb(self.game.myCardWithTable))
self.out.checkOut()
self.amountOfouts = self.out.outs
self.mcombl = Label(self.separator1, text="Amounts of OUTS: " + str(self.amountOfouts),bg="sea green",fg='#4B0082')
#self.mcombl.place(relx=0.85, rely=0.65, anchor=CENTER)
self.mcombl.place(relx=0.5, rely=0.6, anchor=CENTER)
if (self.diler.potOdds(self.diler.stavka) == -1):
self.lbodds = Label(self.separator, text="Pot odds can not be determined at this time ",
bg="sea green",fg='#4B0082')
else:
self.lbodds = Label(self.separator, text="Pot Odds: " + str(self.diler.potOdds(self.diler.mystavka)) + " %",
bg="sea green",fg='#4B0082')
#self.lbodds.place(relx=0.144, rely=0.90, anchor=CENTER)
self.lbodds.place(relx=0.5, rely=0.4, anchor=CENTER)
self.lpodds = Label(self.separator, text="Your ODDS: " + str(self.diler.playerOdds(self.amountOfouts)) + " %",
bg="sea green",fg='#4B0082')
# self.lpodds.place(relx=0.112, rely=0.95, anchor=CENTER)
self.lpodds.place(relx=0.5, rely=0.6, anchor=CENTER)
self.flagofbet = 0
self.diler.flagofbet = 0
self.diler.flagOfblind=1
else:
popupmsg1("You Can not make open because you don't equalize!")
def UpdateLabel(self):
#self.lbodds.destroy()
#self.lpodds.destroy()
self.diler.flagOfblind=0
self.diler.flagofDiler=0
self.flagofbet = 0
self.diler.flagofbet = 0
self.diler.stavka=0
self.diler.flagOfBet=False
self.diler.flagGetWinner=0
self.diler.bankStack = 0
self.BANKbet.destroy()
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.delete()
for i in self.game.players:
i.active=True
self.MyPosition.destroy()
self.dilerPosition.destroy()
self.my__init__(self.root, self.diler)
self.diler.update_position()
self.MyPosition.destroy()
self.MyPosition = Label(self.root,text=self.diler.show_position(), bg='green4',fg='#00ffBB')
self.MyPosition.place(relx=0.1, rely=0.43, anchor=CENTER)
self.dilerPosition.destroy()
self.dilerPosition = Label(self.root,text="Diler", bg='gray20',fg='#00ffBB',font=("Times", 10,"bold"))
if (self.diler.show_diler() == 0):
self.playerYOU = Label(self.root,self.root, text="YOU : ", bg='gray20',fg='red2',font=("Times", 15,"bold"))
self.playerYOU.place(relx=0.132, rely=0.03, anchor=CENTER)
self.dilerPosition.place(relx=0.102, rely=0.43, anchor=CENTER)
elif (self.diler.show_diler() == 1):
self.dilerPosition.place(relx=0.25, rely=0.04, anchor=CENTER)
elif (self.diler.show_diler() == 2):
self.dilerPosition.place(relx=0.35, rely=0.04, anchor=CENTER)
elif (self.diler.show_diler() == 3):
self.dilerPosition.place(relx=0.45, rely=0.09, anchor=CENTER)
elif (self.diler.show_diler() == 4):
self.dilerPosition.place(relx=0.45, rely=0.17, anchor=CENTER)
elif (self.diler.show_diler() == 5):
self.dilerPosition.place(relx=0.45, rely=0.25, anchor=CENTER)
elif (self.diler.show_diler() == 6):
self.dilerPosition.place(relx=0.35, rely=0.25, anchor=CENTER)
elif (self.diler.show_diler() == 7):
self.dilerPosition.place(relx=0.25, rely=0.25, anchor=CENTER)
elif (self.diler.show_diler() == 8):
self.dilerPosition.place(relx=0.113, rely=0.25, anchor=CENTER)
elif (self.diler.show_diler() == 9):
self.dilerPosition.place(relx=0.103, rely=0.1, anchor=CENTER)
# self.mcombl.destroy()
def my__init__(self, master, diler):
#self.metod()
self.diler.flagGetWinner=0
self.flagtern = 0
self.flagofflop = 0
self.flagriver = 0
self.master = master
self.game = Game(10)
self.bankStack = diler.bankStack
self.playerbet = diler.myPlayerstack
self.comb = CheckComb(self.game.myCardWithTable)
# print(self.game.myPlayer.card1.card_suit)
# print(self.game.myPlayer.card1.card_rating)
# print(self.game.myPlayer.card2.card_suit)
# print(self.game.myPlayer.card2.card_rating)
if (self.game.myPlayer.card1.card_suit == 1):
self.flag = "б"
elif (self.game.myPlayer.card1.card_suit == 2):
self.flag = "ч"
elif (self.game.myPlayer.card1.card_suit == 3):
self.flag = "п"
elif (self.game.myPlayer.card1.card_suit == 4):
self.flag = "к"
if (self.game.myPlayer.card2.card_suit == 1):
self.flag1 = "б"
elif (self.game.myPlayer.card2.card_suit == 2):
self.flag1 = "ч"
elif (self.game.myPlayer.card2.card_suit == 3):
self.flag1 = "п"
elif (self.game.myPlayer.card2.card_suit == 4):
self.flag1 = "к"
self.card1inc = str(self.flag) + str(self.game.myPlayer.card1.card_rating)
self.card2inc = str(self.flag1) + str(self.game.myPlayer.card2.card_rating)
self.playerCard = Label(self.root, text="YOUR POSITION : ", bg="green4", fg="#4B0082",font=("Times",10,"bold"))
self.playerCard.place(relx=0.048, rely=0.43, anchor=CENTER)
#self.playerb = Label(self.root, text="Your BET : ", bg="black", fg="green")
#self.playerb.place(relx=0.058, rely=0.79, anchor=CENTER)
self.scale1 = Scale(self.root, orient=HORIZONTAL, width=5, length=300, from_=5, to=2000, tickinterval=350,
resolution=5, bg='gray5',fg='#00ffBB',font=("Times", 11,"bold"))
self.scale1.place(relx=0.118, rely=0.650, anchor=CENTER)
self.playerbet = Label(self.root, text="YOUR STACK : " + str(diler.myPlayerstack), font=("Times",10,"bold"),border=3,bg="green4", fg="#4B0082")
self.playerbet.place(relx=0.057, rely=0.785, anchor=CENTER)
strin="BANK : " + str(self.diler.bankStack)
self.BANKbet = Label(self.root,text='%-13s' % strin, bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.BANKbet.place(relx=0.74, rely=0.05, anchor=CENTER)
self.player1 = Label(self.root, text="Lucke : ", bg='gray20',fg='RoyalBlue2',font=("Times", 10,"bold"))
self.player1.place(relx=0.2, rely=0.03, anchor=CENTER)
self.player2 = Label(self.root, text="Olivia : ", bg='gray20',fg='RoyalBlue2',font=("Times", 10,"bold"))
self.player2.place(relx=0.3, rely=0.03, anchor=CENTER)
self.player3 = Label(self.root, text="Ashley : ", bg='gray20',fg='RoyalBlue2',font=("Times", 10,"bold"))
self.player3.place(relx=0.4, rely=0.08, anchor=CENTER)
self.player4 = Label(self.root, text="Jacob : ", bg='gray20',fg='RoyalBlue2',font=("Times", 10,"bold"))
self.player4.place(relx=0.4, rely=0.16, anchor=CENTER)
self.player5 = Label(self.root, text="Ethan : ", bg='gray20',fg='RoyalBlue2',font=("Times", 10,"bold"))
self.player5.place(relx=0.4, rely=0.24, anchor=CENTER)
self.player6 = Label(self.root, text="Daniel : ", bg='gray20',fg='RoyalBlue2',font=("Times", 10,"bold"))
self.player6.place(relx=0.3, rely=0.28, anchor=CENTER)
self.player7 = Label(self.root, text="Kayla : ", bg='gray20',fg='RoyalBlue2',font=("Times", 10,"bold"))
self.player7.place(relx=0.2, rely=0.28, anchor=CENTER)
self.player8 = Label(self.root, text="John : ", bg='gray20',fg='RoyalBlue2',font=("Times", 10,"bold"))
self.player8.place(relx=0.062, rely=0.24, anchor=CENTER)
self.player9 = Label(self.root, text="Tyler : ", bg='gray20',fg='RoyalBlue2',font=("Times", 10,"bold"))
self.player9.place(relx=0.062, rely=0.09, anchor=CENTER)
self.playerYOU = Label(self.root, text="YOU : ", bg='gray20',fg='#00ffBB',font=("Times", 15,"bold"))
self.playerYOU.place(relx=0.132, rely=0.03, anchor=CENTER)
self.player1s = Label(self.root, text=diler.player1Stack,bg='turquoise4',fg='PaleGreen2')
self.player1s.place(relx=0.23, rely=0.03, anchor=CENTER)
self.player2s = Label(self.root, text=diler.player2Stack,bg='turquoise4',fg='PaleGreen2')
self.player2s.place(relx=0.33, rely=0.03, anchor=CENTER)
self.player3s = Label(self.root, text=diler.player3Stack, bg='turquoise4',fg='PaleGreen2')
self.player3s.place(relx=0.43, rely=0.08, anchor=CENTER)
self.player4s = Label(self.root, text=diler.player4Stack, bg='turquoise4',fg='PaleGreen2')
self.player4s.place(relx=0.43, rely=0.16, anchor=CENTER)
self.player5s = Label(self.root, text=diler.player5Stack, bg='turquoise4',fg='PaleGreen2')
self.player5s.place(relx=0.43, rely=0.24, anchor=CENTER)
self.player6s = Label(self.root, text=diler.player6Stack, bg='turquoise4',fg='PaleGreen2')
self.player6s.place(relx=0.33, rely=0.28, anchor=CENTER)
self.player7s = Label(self.root, text=diler.player7Stack, bg='turquoise4',fg='PaleGreen2')
self.player7s.place(relx=0.23, rely=0.28, anchor=CENTER)
self.player8s = Label(self.root, text=diler.player8Stack, bg='turquoise4',fg='PaleGreen2')
self.player8s.place(relx=0.093, rely=0.24, anchor=CENTER)
self.player9s = Label(self.root, text=diler.player9Stack, bg='turquoise4',fg='PaleGreen2')
self.player9s.place(relx=0.083, rely=0.09, anchor=CENTER)
self.photoMycard1 = PhotoImage(file="./images/" + self.card1inc + ".gif")
self.card1 = Label(self.root,image=self.photoMycard1)
self.card1.place(relx=0.032, rely=0.53, anchor=CENTER)
self.photoMycard2 = PhotoImage(file="./images/" + self.card2inc + ".gif")
self.card2 = Label(self.root, image=self.photoMycard2)
self.card2.place(relx=0.1, rely=0.53, anchor=CENTER)
self.amount_players = self.game.amount_players
# if (self.amount_players == 10):
# self.diler = 0
# self.smallBlind = self.diler + 1
# self.BigBlind = self.diler + 2
# self.utg = self.diler + 3
# self.utg1 = self.diler + 4
# self.utg2 = self.diler + 5
# self.utg3 = self.diler + 6
# self.utg4 = self.diler + 7
# self.cutOff = self.amount_players - 1
# self.hiJack = self.amount_players - 2
# elif (self.amount_players == 9):
# self.diler = 0
# self.smallBlind = self.diler + 1
# self.BigBlind = self.diler + 2
# self.utg = self.diler + 3
# self.utg1 = self.diler + 4
# self.utg2 = self.diler + 5
# self.utg3 = self.diler + 6
# self.cutOff = self.amount_players - 1
# self.hiJack = self.amount_players - 2
# elif (self.amount_players == 8):
# self.diler = 0
# self.smallBlind = self.diler + 1
# self.BigBlind = self.diler + 2
# self.utg = self.diler + 3
# self.utg1 = self.diler + 4
# self.utg2 = self.diler + 5
# self.cutOff = self.amount_players - 1
# self.hiJack = self.amount_players - 2
# elif (self.amount_players == 7):
# self.diler = 0
# self.smallBlind = self.diler + 1
# self.BigBlind = self.diler + 2
# self.utg = self.diler + 3
# self.utg1 = self.diler + 4
# self.cutOff = self.amount_players - 1
# self.hiJack = self.amount_players - 2
# elif (self.amount_players == 6):
# self.diler = 0
# self.smallBlind = self.diler + 1
# self.BigBlind = self.diler + 2
# self.utg = self.diler + 3
# self.cutOff = self.amount_players - 1
# self.hiJack = self.amount_players - 2
# elif (self.amount_players == 5):
# self.diler = 0
# self.smallBlind = self.diler + 1
# self.BigBlind = self.diler + 2
# self.cutOff = self.amount_players - 1
# self.hiJack = self.amount_players - 2
# elif (self.amount_players == 4):
# self.diler = 0
# self.smallBlind = self.diler + 1
# self.BigBlind = self.diler + 2
# self.cutOff = self.amount_players - 1
# elif (self.amount_players == 3):
# self.diler = 0
# self.smallBlind = self.diler + 1
# self.BigBlind = self.diler + 2
# elif (self.amount_players == 2):
# self.diler = 0
# self.smallBlind = self.diler + 1
self.MyPosition = Label(self.root, text=diler.show_position(), bg='green4',fg='#00ffBB')
self.MyPosition.place(relx=0.1, rely=0.43, anchor=CENTER)
self.dilerPosition = Label(self.root, text="Diler", bg='gray20',fg='#00ffBB',font=("Times", 10,"bold"))
if (diler.show_diler() == 0):
self.dilerPosition.place(relx=0.102, rely=0.43, anchor=CENTER)
elif (diler.show_diler() == 1):
self.dilerPosition.place(relx=0.102, rely=0.5, anchor=CENTER)
elif (diler.show_diler() == 2):
self.dilerPosition.place(relx=0.102, rely=0.7, anchor=CENTER)
elif (diler.show_diler() == 3):
self.dilerPosition.place(relx=0.102, rely=0.03, anchor=CENTER)
elif (diler.show_diler() == 4):
self.dilerPosition.place(relx=0.102, rely=0.03, anchor=CENTER)
elif (diler.show_diler() == 5):
self.dilerPosition.place(relx=0.102, rely=0.03, anchor=CENTER)
elif (diler.show_diler() == 6):
self.dilerPosition.place(relx=0.102, rely=0.29, anchor=CENTER)
elif (diler.show_diler() == 7):
self.dilerPosition.place(relx=0.102, rely=0.43, anchor=CENTER)
elif (diler.show_diler() == 8):
self.dilerPosition.place(relx=0.102, rely=0.03, anchor=CENTER)
self.dilerPosition.place(relx=0.102, rely=0.43, anchor=CENTER)
self.preflop = CheckHoleCards(self.game.myPlayer.card1, self.game.myPlayer.card2, diler.show_position(),
self.game.amount_players, "act_raise")
self.powerPreflop = self.preflop.check_power()
self.separator = Frame(self.root,bg="green",height=150,width=300, bd=1, relief=SUNKEN)
self.separator.place(relx=0.3, rely=0.9, anchor=CENTER)
self.separator1 = Frame(self.root,bg="green",height=150,width=300, bd=1, relief=SUNKEN)
self.separator1.place(relx=0.64, rely=0.9, anchor=CENTER)
self.power = Label(self.separator, text="Actions On Preflop: " + str(self.powerPreflop), bg="sea green",fg='#4B0082')
self.power.place(relx=0.5, rely=0.2, anchor=CENTER)
#self.power.place(relx=0.113, rely=0.85, anchor=CENTER)
# self.powerpreflop=Label(master,text=str(self.powerPreflop),bg="grey")
# self.powerpreflop.place(relx=0.062, rely=0.59, anchor=CENTER)
self.buttonNewGame = Button(self.root, text="New Game: ",width=10,fg='#00ffBB', bg='gray20',font=("Times", 15,"bold"), command=self.UpdateLabel)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttonNewGame.place(relx=0.87, rely=0.6, anchor=CENTER)
self.buttonflop = Button(self.root, text="Open Flop: ",width=10,fg='#00ffBB', bg='gray20',font=("Times", 15,"bold"), command=self.openflop)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttonflop.place(relx=0.87, rely=0.68, anchor=CENTER)
self.buttontern = Button(self.root, text="Open Tern: ",width=10,fg='#00ffBB', bg='gray20',font=("Times", 15,"bold"), command=self.opentern)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttontern.place(relx=0.87, rely=0.74, anchor=CENTER)
self.buttonriver = Button(self.root, text="Open River: ", width=10,fg='#00ffBB', bg='gray20',font=("Times", 15,"bold"),command=self.openriver)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttonriver.place(relx=0.87, rely=0.8, anchor=CENTER)
self.buttonCALL = Button(self.root, text="Call", command=self.call, fg='#00ffBB', bg='gray20',width=10,font=("Times", 15,"bold"),)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttonCALL.place(relx=0.067, rely=0.72, anchor=CENTER)
self.buttonFALL = Button(self.root, text="Fall", fg='#00ffBB', bg='gray20',width=10,font=("Times", 15,"bold"),command=self.fall)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttonFALL.place(relx=0.165, rely=0.72, anchor=CENTER)
self.buttonRaise = Button(self.root, text="Raise", command=self.mraise, fg='#00ffBB', bg='gray20',width=10,font=("Times", 15,"bold"),)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttonRaise.place(relx=0.287, rely=0.65, anchor=CENTER)
self.buttonCheck = Button(self.root, text="Check", command=self.check, fg='#00ffBB', bg='gray20',width=10,font=("Times", 15,"bold"),)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttonCheck.place(relx=0.287, rely=0.725, anchor=CENTER)
self.buttonSTEP = Button(self.root, text="STEP", fg='#00ffBB', bg='gray20',width=10,font=("Times", 15,"bold"),command=self.step1)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttonSTEP.place(relx=0.87, rely=0.9, anchor=CENTER)
self.buttonGetWinner = Button(self.root, text="Get Winner", fg='#00ffBB',width=10, bg='gray20',font=("Times", 15,"bold"),command=self.getWinner)
# self.buttonNewGame.bind("<Button-1>",newGame)
self.buttonGetWinner.place(relx=0.87, rely=0.95, anchor=CENTER)
def delete(self):
self.power.destroy()
self.BANKbet.destroy()
if self.flagofflop == 1:
self.cardf1.destroy()
self.cardf2.destroy()
self.cardf3.destroy()
# self.comblpara.destroy()
# self.combl2para.destroy()
self.comblset.destroy()
self.mcombl.destroy()
if self.flagtern == 1:
self.cardf4.destroy()
# self.comblpara.destroy()
# self.combl2para.destroy()
self.comblset.destroy()
self.mcombl.destroy()
if self.flagriver == 1:
self.cardf5.destroy()
# self.comblpara.destroy()
# self.combl2para.destroy()
self.comblset.destroy()
self.mcombl.destroy()
def popupmsg(msg):
popup = Toplevel()
popup["bg"] = "OrangeRed3"
popup.wm_title("!Be careful!")
label = Label(popup, text=msg, fg="white", bg="red4")
label.pack(side="top", fill="x", pady=10)
B1 = Button(popup, text="Ok", command=popup.destroy, fg="white", bg="red4")
B1.pack()
popup.mainloop()
def popupmsg1(msg):
popup = Toplevel()
popup["bg"] = "OrangeRed3"
popup.wm_title("!Be careful!")
label = Label(popup, text=msg, fg="white", bg="red4")
label.pack(side="top", fill="x", pady=10)
B1 = Button(popup, text="Ok", command=popup.destroy, fg="white", bg="red4")
B1.pack()
popup.mainloop()
def popupmsg2(msg, player):
mpopup = Toplevel()
mpopup["bg"] = "OrangeRed3"
mpopup.wm_title("!winner!")
flag = ""
flag1 = ""
if (player.card1.card_suit == 1):
flag = "б"
elif (player.card1.card_suit == 2):
flag = "ч"
elif (player.card1.card_suit == 3):
flag = "п"
elif (player.card1.card_suit == 4):
flag = "к"
if (player.card2.card_suit == 1):
flag1 = "б"
elif (player.card2.card_suit == 2):
flag1 = "ч"
elif (player.card2.card_suit == 3):
flag1 = "п"
elif (player.card2.card_suit == 4):
flag1 = "к"
card1inc = str(flag) + str(player.card1.card_rating)
card2inc = str(flag1) + str(player.card2.card_rating)
label = Label(mpopup, text=msg, fg='#00ffBB', bg='gray20')
label.pack(side="top", fill="x", pady=10)
photoMycard111 = PhotoImage(file="./images/" + card1inc + ".gif")
card1 = Label(mpopup, image=photoMycard111)
card1.pack(side="left", pady=50, padx=10)
photoMycard222 = PhotoImage(file="./images/" + card2inc + ".gif")
card2 = Label(mpopup, image=photoMycard222)
card2.pack(side="right", pady=50, padx=10)
B1 = Button(mpopup, text="OK", command=mpopup.destroy, fg='#00ffBB', bg='gray20')
B1.pack(side="bottom")
x = (mpopup.winfo_screenwidth() - mpopup.winfo_reqwidth()) / 2
y = (mpopup.winfo_screenheight() - mpopup.winfo_reqheight()) / 2
mpopup.wm_geometry("+%d+%d" % (x, y))
mpopup.mainloop()
def mymain():
t=mGUI()
t.metod()
canvas = Canvas(t.root,width=50, height=900,highlightbackground='green4', bg = 'green4')
canvas.place(relx=0.47, rely=0.5, anchor=CENTER)
canvas.create_line(25, 0, 25, 900,fill="black",width=5)
csanvas = Canvas(t.root,width=700, height=50,highlightbackground='green4', bg = 'green4')
csanvas.place(relx=0.21, rely=0.41, anchor=CENTER)
csanvas.create_line(0, 25, 700, 25,fill="black",width=5)
# csanvas1 = Canvas(t.root,width=1700, height=50,highlightbackground='green4', bg = 'green4')
# csanvas1.place(relx=0.9, rely=0.63, anchor=CENTER)
# csanvas1.create_line(0, 25, 1700, 25,fill="black")
canvas1 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
canvas1.place(relx=0.132, rely=0.09, anchor=CENTER)
image1 = tkinter.PhotoImage(file = './ImPeople/p11.png')
canvas1.create_image(0, 0,image = image1, anchor = NW)
t.canvas2 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
t.canvas2.place(relx=0.22, rely=0.09, anchor=CENTER)
t.image2 = tkinter.PhotoImage(file = './ImPeople/p222.png')
t.canvas2.create_image(0, 0,image = t.image2, anchor = NW)
t.canvas3 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
t.canvas3.place(relx=0.32, rely=0.09, anchor=CENTER)
t.image3 = tkinter.PhotoImage(file = './ImPeople/p101010.png')
t.canvas3.create_image(0, 0,image = t.image3, anchor = NW)
t.canvas4 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
t.canvas4.place(relx=0.42, rely=0.13, anchor=CENTER)
t.image4 = tkinter.PhotoImage(file = './ImPeople/p333.png')
t.canvas4.create_image(0, 0,image = t.image4, anchor = NW)
t.canvas5 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
t.canvas5.place(relx=0.42, rely=0.215, anchor=CENTER)
t.image5 = tkinter.PhotoImage(file = './ImPeople/p444.png')
t.canvas5.create_image(0, 0,image = t.image5, anchor = NW)
t.canvas6 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
t.canvas6.place(relx=0.42, rely=0.3, anchor=CENTER)
t.image6 = tkinter.PhotoImage(file = './ImPeople/p666.png')
t.canvas6.create_image(0, 0,image = t.image6, anchor = NW)
t.canvas7 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
t.canvas7.place(relx=0.32, rely=0.33, anchor=CENTER)
t.image7 = tkinter.PhotoImage(file = './ImPeople/p777.png')
t.canvas7.create_image(0, 0,image = t.image7, anchor = NW)
t.canvas8 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
t.canvas8.place(relx=0.08, rely=0.3, anchor=CENTER)
t.image8 = tkinter.PhotoImage(file = './ImPeople/p888.png')
t.canvas8.create_image(0, 0,image = t.image8, anchor = NW)
t.canvas9 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
t.canvas9.place(relx=0.08, rely=0.15, anchor=CENTER)
t.image9 = tkinter.PhotoImage(file = './ImPeople/p555.png')
t.canvas9.create_image(0, 0,image = t.image9, anchor = NW)
t.canvas10 = Canvas(t.root,width=50, height=50,highlightbackground='green4', bg = 'green4')
t.canvas10.place(relx=0.22, rely=0.34, anchor=CENTER)
t.image10 = tkinter.PhotoImage(file = './ImPeople/p999.png')
t.canvas10.create_image(0, 0,image = t.image10, anchor = NW)
t.canvas = Canvas(t.root,width=700, height=550, highlightbackground='green4',bg = 'green4')
t.canvas.place(relx=0.74, rely=0.37, anchor=CENTER)
t.image = tkinter.PhotoImage(file = './mfon/table1.png')
t.canvas.create_image(0, 0, image = t.image, anchor = NW)
t.root["bg"] = "green4"
# t.root.protocol('WM_DELETE_WINDOW', t.root.destroy())
t.root.state("zoomed")
t.root.resizable(False, False)
t.my__init__(t.root, t.diler)
t.root.mainloop()
| [
"endnikita@gmail.com"
] | endnikita@gmail.com |
f205af874bfd19c543b990383520db2dc51ce796 | 297c30dc0120c2920c86c8257bc530db1bb1114a | /Application/Application_Pandas/panda_DataFrame_Test_1.py | e7ad323507d330adb254dc3f79e9571b82741412 | [] | no_license | whoiszyc/Repo_python | 76e248b350a3f109c53bfb1f3abe59b903a98e46 | bdc3f39883aed5b2e85624525c662c00f60d35e3 | refs/heads/master | 2021-07-06T04:48:04.973680 | 2020-07-27T03:55:58 | 2020-07-27T03:55:58 | 139,599,645 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,236 | py | import numpy as np
import pandas as pd
# use string as key
mydict0 = [{'a': 10, 'b': 20, 'c': 30, 'd': 40},{'a': 100, 'b': 200, 'c': 300, 'd': 400},{'a': 1000, 'b': 2000, 'c': 3000, 'd': 4000 }]
# use int number as key
mydict1 = [{0: 10, 1: 20, 2: 30, 3: 40},{0: 100, 1: 200, 2: 300, 3: 400},{0: 1000, 1: 2000, 2: 3000, 3: 4000 }]
# test the first data frame
df=pd.DataFrame(mydict0)
print(df)
# general information of the data frame
print('Total number of data entries in the data frame is {}'.format(df.size))
print('Dimension of data entries in the data frame is {} by {}'.format(df.shape[0], df.shape[1]))
# get entry by location
print('Second column of the data frame')
print(df.iloc[:,1])
print('Second to third column of the data frame')
print(df.iloc[:,1:2])
print('Second to third row of the data frame')
print(df.iloc[1:2,:])
# get entry by key
print('The column that key equals to "a" is:')
print(df['a'])
# save data frame to csv
df.to_csv('test_1.csv')
df.to_excel('test_1.xls')
# test the second data frame
# get entry by key
df=pd.DataFrame(mydict1)
print(df)
print('The column that key equals to 0 is:')
print(df[0])
# save data frame to csv
df.to_csv('test_2.csv', encoding='utf-8')
df.to_excel('test_2.xls') | [
"31248391+whoiszyc@users.noreply.github.com"
] | 31248391+whoiszyc@users.noreply.github.com |
619e237a4faf772e892747ff7c3e8b05f3a6b00e | 8c917dc4810e2dddf7d3902146280a67412c65ea | /v_7/Dongola/wafi/account_custom_wafi/account_custom.py | f19ef70a021724ff48985fea969d72415668236c | [] | no_license | musabahmed/baba | d0906e03c1bbd222d3950f521533f3874434b993 | 0b997095c260d58b026440967fea3a202bef7efb | refs/heads/master | 2021-10-09T02:37:32.458269 | 2018-12-20T06:00:00 | 2018-12-20T06:00:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,557 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# NCTR, Nile Center for Technology Research
# Copyright (C) 2011-2012 NCTR (<http://www.nctr.sd>).
#
##############################################################################
from openerp.osv import osv, fields, orm
from datetime import datetime
from datetime import timedelta
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP, float_compare
from openerp.tools.translate import _
#----------------------------------------------------------
# Account Account (Inherit)
#----------------------------------------------------------
class account_account(osv.Model):
_inherit = "account.account"
_columns = {
'ceiling': fields.float('Ceiling'),
'min_ceiling': fields.float('Minimum Ceiling'),
'payment_ceiling': fields.float('Payment Ceiling'),
}
class account_journal(osv.Model):
_inherit = 'account.journal'
def check_ceiling(self, cr, uid, ids, context=None):
journal = self.browse(cr, uid, ids, context=context)[0]
recipient_partners = []
for user in journal.user_id:
recipient_partners.append(
(4, user.partner_id.id)
)
ceil_msg = []
msg = ""
flag= False
if journal.default_debit_account_id.balance >= journal.default_debit_account_id.ceiling :
ceil_msg.append(_(" Maximum ceiling %s for %s ' %s ' has been exceed") % (journal.default_debit_account_id.ceiling,journal.default_debit_account_id.name,journal.default_debit_account_id.balance))
flag = True
if journal.default_credit_account_id.balance >= journal.default_credit_account_id.ceiling:
ceil_msg.append(_("\nMaximum ceiling %s for %s ' %s ' has been exceed") % (journal.default_credit_account_id.ceiling,journal.default_credit_account_id.name,journal.default_credit_account_id.balance))
flag = True
#raise orm.except_orm(_('Warning !'), _('(Maximum ceiling %s for %s " %s " has been exceed')%(account.ceiling,account.name,account.balance))
if journal.default_debit_account_id.balance <= journal.default_debit_account_id.min_ceiling:
ceil_msg.append(_("\nMinimum ceiling %s for %s ' %s ' has been exceed") % (journal.default_debit_account_id.min_ceiling,journal.default_debit_account_id.name,journal.default_debit_account_id.balance))
flag = True
if journal.default_credit_account_id.balance <= journal.default_credit_account_id.min_ceiling:
ceil_msg.append(_("\nMinimum ceiling %s for %s ' %s ' has been exceed") % (journal.default_credit_account_id.min_ceiling,journal.default_credit_account_id.name,journal.default_credit_account_id.balance))
flag = True
if flag == True:
for msg_rec in ceil_msg:
msg = min_msg +','+ msg
post_vars = {'subject': "notification about ceiling",
'body': msg,
'partner_ids': recipient_partners,} # Where "4" adds the ID to the list
# of followers and "3" is the partner ID
thread_pool = self.pool.get('mail.thread')
thread_pool.message_post(
cr, uid, False,
type="notification",
subtype="mt_comment",
context=context,
**post_vars)
raise orm.except_orm(_('Warning !'), _('Minimum ceiling %s for %s " %s " has been exceed')%(account.min_ceiling,account.name,account.balance))
return True
class account_period(osv.Model):
_inherit = "account.period"
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100):
"""
Inherit name_search method to display only open period
unless order close period by sending closed=True in context
@return: super name_search
"""
if args is None:
args = []
if context is None:
context = {}
if not context.get('closed',False):
args.append(('state', '=', 'draft'))
return super(account_period, self).name_search(cr, uid, name, args=args, operator='ilike', context=context, limit=limit)
def action_draft(self, cr, uid, ids, context=None):
"""
Inherit action_draft method to prevent reopening statement
@return: super action_draft
"""
if self.search(cr, uid, [('id', 'in', ids), ('fiscalyear_id.state', '!=', 'draft')], context=context):
raise osv.except_osv(_('Warning!'), _('You can not re-open a period which belongs to closed fiscal year'))
return super(account_period, self).action_draft(cr, uid, ids, context)
class account_fiscalyear(osv.Model):
"""
Inherit fiscal year model to modify it's states according to government requirements
"""
_inherit = "account.fiscalyear"
_columns = {
'state': fields.selection([('draft', 'Open'), ('locked_temp', 'Locked Temporarily'),
('open_ext_period' , 'Open Extension Period'),
('close_ext_period', 'Close Extension Period'),
('first_lock', 'First Lock'), ('done', 'Final Lock')],
'State',size=64, readonly=True),
}
def action_locked_temporarily(self, cr, uid, ids, context=None):
"""
Method to check that all fiscal year's periods closed or not.
@return: change record state to 'locked temporarily' or raise exception
"""
if self.pool.get('account.period').search(cr, uid, [('state','=','draft'),('fiscalyear_id','in',ids)], context=context):
raise orm.except_orm(_('Error'), _('You Must Close Open Periods First'))
return self.write(cr, uid, ids, {'state': 'locked_temp'}, context=context)
def action_close_extension_period(self, cr, uid, ids, context=None):
"""
@return Change record state to 'Close Extension Period'.
"""
return self.write(cr, uid, ids, {'state': 'close_ext_period'}, context=context)
def action_first_lock(self, cr, uid, ids, context=None):
"""
@return: Change record state to 'First Lock'.
"""
self.write(cr, uid, ids, {'state': 'first_lock'}, context=context)
return {
'id': 'account_custom.action_account_pl_close',
'context': {'default_fiscalyear_id': ids}
}
#----------------------------------------------------------
# Account Move Line(Inherit)
#----------------------------------------------------------
class account_move_line(osv.Model):
_inherit = 'account.move.line'
def _query_get(self, cr, uid, obj='l', context=None):
"""
use in account arabic reports and chart of account to balance the credit and debit
@return: string of the where statement
"""
fiscalyear_obj = self.pool.get('account.fiscalyear')
company_obj = self.pool.get('res.company')
fiscalperiod_obj = self.pool.get('account.period')
account_obj = self.pool.get('account.account')
journal_obj = self.pool.get('account.journal')
initial_bal = context.get('initial_bal', False)
fiscalyear_ids = []
if context is None:
context = {}
#Only Valid Move Lines (BALANCE MOVES)
query = obj+".state <> 'draft' "
#Filter by Company
if context.get('company_id', False):
query += " AND " +obj+".company_id = %s" % context['company_id']
if context.get('unit_type', False):
if context.get('unit_type', False) == 'ministry':
company_ids = company_obj.search(cr,uid, [ ('type', '=', 'other')])
elif context.get('unit_type', False) == 'locality':
company_ids = company_obj.search(cr,uid, [ ('type', '=', 'loc_sub')])
else:
types=('other','loc_sub')
company_ids = company_obj.search(cr,uid, [ ('type', 'in', types)])
company_ids2 = ','.join(map(str, company_ids))
query += " AND " +obj+".company_id in (%s)" % company_ids2
#Filter by Move State
if context.get('state', False):
if type(context['state']) in (list,tuple) :
query += " AND "+obj+".move_id IN (SELECT id FROM account_move WHERE state !='reversed') "
# query += " AND "+obj+".move_id IN (SELECT id FROM account_move WHERE state IN ("+st+")) "
elif context['state'].lower() != 'all':
query += " AND "+obj+".move_id IN (SELECT id FROM account_move WHERE account_move.state != '"+context['state']+"') "
#Get Selected FiscalYear
if not context.get('fiscalyear', False):
if context.get('all_fiscalyear', False):
fiscalyear_ids = fiscalyear_obj.search(cr, uid, [('company_id', 'in', company_ids)])
else:
if context.get('date_from', False):
#fiscalyear_ids = fiscalyear_obj.search(cr, uid, [])
date_from=context.get('date_from', False)
date_from2 = datetime.strptime( date_from, '%Y-%m-%d')
f_code=date_from2.year
fiscalyear_ids = fiscalyear_obj.search(cr,uid, [('company_id', 'in', company_ids), ('code', '=', f_code)])
else:
fiscalyear_ids = fiscalyear_obj.search(cr, uid, [('company_id', 'in', company_ids)])
else:
#make the context['fiscalyear'] in one dimention list or ids
fiscalyear_ids = type(context['fiscalyear']) is list and context['fiscalyear'] or [context['fiscalyear']]
fiscalyear_clause = (','.join(map(str, fiscalyear_ids)))
#Duration Filters
if context.get('date_from', False) and context.get('date_to', False):
if initial_bal:
init_period = fiscalperiod_obj.search(cr, uid, [('special', '=', True), ('fiscalyear_id', 'in', fiscalyear_ids)])
date_start = fiscalperiod_obj.browse(cr, uid, init_period[0], context=context).date_start
query += " AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s) ) " % (fiscalyear_clause,)
date_from=context['date_from']
if context.get('date_from', False)==date_start:
date_1 = datetime.strptime(date_from, DEFAULT_SERVER_DATE_FORMAT)
date_from= date_1+timedelta(days=1)
query += " AND " +obj+".move_id IN (SELECT id FROM account_move WHERE date <='%s') " %(context['date_from'],)
query += " AND " +obj+".move_id IN (SELECT id FROM account_move WHERE date <'%s') " %(date_from,)
else:
if context['type']=='statement':
query += " AND " +obj+".move_id IN (SELECT id FROM account_move WHERE date >= '%s' AND date <= '%s') "%(context['date_from'],context['date_to'])
elif context['type']=='balance':
init_period = fiscalperiod_obj.search(cr, uid, [('special', '=', True), ('fiscalyear_id', 'in', fiscalyear_ids)])
date_start = fiscalperiod_obj.browse(cr, uid, init_period[0], context=context).date_start
date_from=context['date_from']
if context.get('date_from', False)==date_start:
date_1 = datetime.strptime(date_from, DEFAULT_SERVER_DATE_FORMAT)
date_from= date_1+timedelta(days=1)
query += " AND " +obj+".move_id IN (SELECT id FROM account_move WHERE date > '%s' AND date <= '%s') "%(date_from,context['date_to'])
query += " AND " +obj+".move_id IN (SELECT id FROM account_move WHERE date >= '%s' AND date <= '%s') "%(context['date_from'],context['date_to'])
if context.get('period_from', False) and context.get('period_to', False) and not context.get('periods', False) and context.get('type', False)!='statement':
if initial_bal:
period_company_id = fiscalperiod_obj.browse(cr, uid, context['period_from'], context=context).company_id.id
first_period = fiscalperiod_obj.search(cr, uid, [('company_id', '=', period_company_id), ('fiscalyear_id', 'in', fiscalyear_ids)], order='date_start')
context['periods'] = fiscalperiod_obj.build_ctx_periods(cr, uid, first_period[0], first_period[first_period.index(context['period_from'])-1])
else:
context['periods'] = fiscalperiod_obj.build_ctx_periods(cr, uid, context['period_from'], context['period_to'])
if context.get('periods', False) and context.get('type', False)!='statement':
period_ids = ','.join(map(str, context['periods']))
query += " AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s) AND id IN (%s)) " % (fiscalyear_clause, period_ids)
else:
sub_query = ""
if not context.get('date_from', False) or context.get('period_from', False):
special = initial_bal and (not context.get('date_from', False))
sub_query = "AND special = %s"%(special,)
query += " AND "+obj+".period_id IN (SELECT id FROM account_period WHERE fiscalyear_id IN (%s) %s) " % (fiscalyear_clause, sub_query)
#Filter by Journal
#situation_journal = set(journal_obj.search(cr, uid, [('type', '=', 'situation')], context=context))
#selected_journals = set(context.get('journal_ids', False) or journal_obj.search(cr, uid, [], context=context))
#TEST: situation journal when opening balance & not
#journal_ids = context.get('selected_journals', False) and selected_journals or \
# (initial_bal and list(selected_journals | situation_journal) or list(selected_journals-situation_journal))
# if journal_ids:
# query += ' AND '+obj+'.journal_id IN (%s) ' % ','.join(map(str, journal_ids))
#if not context.get('selected_journals', False) and not initial_bal and situation_journal:
#query += ' AND '+obj+'.journal_id NOT IN (%s) ' % ','.join(map(str, situation_journal))
#Filter by chart of Account
if context.get('chart_account_id', False):
child_ids = account_obj._get_children_and_consol(cr, uid, [context['chart_account_id']], context=context)
query += ' AND '+obj+'.account_id IN (%s) ' % ','.join(map(str, child_ids))
#Filter by Move Line Statement
if 'statement_id' in context:
if context.get('statement_id', False):
query += ' AND '+obj+'.statement_id IN (%s) ' % ','.join(map(str, context['statement_id']))
else:
query += ' AND '+obj+'.statement_id IS NULL '
#Filter by Move Line
if context.get('move_line_ids', False):
query += ' AND '+obj+'.id IN (%s) ' % ','.join(map(str, context['move_line_ids']))
#Filter by Analytic Account Type
if context.get('analytic_display', False):
query += ' AND '+obj+".analytic_account_id IN (SELECT id FROM account_analytic_account WHERE analytic_type=%s) " % (context.get('analytic_display', False).id,)
return query
class account_voucher(osv.osv):
"""
Customize account voucher.
"""
_inherit='account.voucher'
_columns = {
'invoice_id': fields.many2one('account.invoice','Invoice'),
}
class res_company(osv.Model):
"""
Inherit company model to add restricted payment scheduler as configurable option
"""
_inherit = "res.company"
_columns = {
'interval_number': fields.integer('Interval Number'),
}
_defaults = {
'interval_number': 2,
}
#----------------------------------------------------------
# Account Config (Inherit)
#----------------------------------------------------------
class account_config_settings(osv.osv_memory):
"""
Inherit account configuration setting model to define and display
the restricted payment scheduler' field value
"""
_inherit = 'account.config.settings'
_columns = {
'interval_number': fields.related('company_id', 'interval_number', type='integer', string='Interval Number'),
}
def onchange_company_id(self, cr, uid, ids, company_id, context=None):
"""
Update dict. of values to set interval_number depend on company_id
@param company_id: user company_id
@return: dict. of new values
"""
# update related fields
values =super(account_config_settings,self).onchange_company_id(cr, uid, ids, company_id, context=context).get('value',{})
if company_id:
company = self.pool.get('res.company').browse(cr, uid, company_id, context=context)
values.update({
'interval_number': company.interval_number
})
return {'value': values}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"bakry@exp-sa.com"
] | bakry@exp-sa.com |
08241ca33d0d08c4b7977714c1b9eef5676f3ab5 | dd694c300d0380df35c62f7ab7667346214ea296 | /rwlaunchpad/test/mano_ut.py | ee095d1f55d1a6facbe64532bbb9685047d453d1 | [] | no_license | RIFTIO/SO | 9412858132db0430217a2c5c55fb4b1db89290fa | 697160573011d47f45bd0b955a291a46063d3b15 | refs/heads/RIFT.ware-4.3.3 | 2021-06-22T13:42:40.860291 | 2016-12-29T21:47:25 | 2016-12-29T21:47:25 | 75,762,640 | 1 | 1 | null | 2017-02-08T19:31:28 | 2016-12-06T19:11:20 | Python | UTF-8 | Python | false | false | 45,990 | py | #!/usr/bin/env python3
#
# Copyright 2016 RIFT.IO Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import asyncio
import os
import sys
import unittest
import uuid
import xmlrunner
import argparse
import logging
import time
import types
import gi
gi.require_version('RwCloudYang', '1.0')
gi.require_version('RwDts', '1.0')
gi.require_version('RwNsmYang', '1.0')
gi.require_version('RwLaunchpadYang', '1.0')
gi.require_version('RwResourceMgrYang', '1.0')
gi.require_version('RwcalYang', '1.0')
gi.require_version('RwNsrYang', '1.0')
gi.require_version('NsrYang', '1.0')
gi.require_version('RwlogMgmtYang', '1.0')
from gi.repository import (
RwCloudYang as rwcloudyang,
RwDts as rwdts,
RwLaunchpadYang as launchpadyang,
RwNsmYang as rwnsmyang,
RwNsrYang as rwnsryang,
NsrYang as nsryang,
RwResourceMgrYang as rmgryang,
RwcalYang as rwcalyang,
RwConfigAgentYang as rwcfg_agent,
RwlogMgmtYang
)
from gi.repository.RwTypes import RwStatus
import rift.mano.examples.ping_pong_nsd as ping_pong_nsd
import rift.tasklets
import rift.test.dts
import rw_peas
openstack_info = {
'username': 'pluto',
'password': 'mypasswd',
'auth_url': 'http://10.66.4.27:5000/v3/',
'project_name': 'demo',
'mgmt_network': 'private',
}
if sys.version_info < (3, 4, 4):
asyncio.ensure_future = asyncio.async
class XPaths(object):
@staticmethod
def nsd(k=None):
return ("C,/nsd:nsd-catalog/nsd:nsd" +
("[nsd:id='{}']".format(k) if k is not None else ""))
@staticmethod
def vld(k=None):
return ("C,/vld:vld-catalog/vld:vld" +
("[vld:id='{}']".format(k) if k is not None else ""))
@staticmethod
def vnfd(k=None):
return ("C,/vnfd:vnfd-catalog/vnfd:vnfd" +
("[vnfd:id='{}']".format(k) if k is not None else ""))
@staticmethod
def vnfr(k=None):
return ("D,/vnfr:vnfr-catalog/vnfr:vnfr" +
("[vnfr:id='{}']".format(k) if k is not None else ""))
@staticmethod
def vlr(k=None):
return ("D,/vlr:vlr-catalog/vlr:vlr" +
("[vlr:id='{}']".format(k) if k is not None else ""))
@staticmethod
def nsd_ref_count(k=None):
return ("D,/nsr:ns-instance-opdata/rw-nsr:nsd-ref-count" +
("[rw-nsr:nsd-id-ref='{}']".format(k) if k is not None else ""))
@staticmethod
def vnfd_ref_count(k=None):
return ("D,/vnfr:vnfr-catalog/rw-vnfr:vnfd-ref-count" +
("[rw-nsr:nsd-id-ref='{}']".format(k) if k is not None else ""))
@staticmethod
def nsr_config(k=None):
return ("C,/nsr:ns-instance-config/nsr:nsr" +
("[nsr:id='{}']".format(k) if k is not None else ""))
@staticmethod
def nsr_opdata(k=None):
return ("D,/nsr:ns-instance-opdata/nsr:nsr" +
("[nsr:ns-instance-config-ref='{}']".format(k) if k is not None else ""))
@staticmethod
def nsr_config_status(k=None):
return ("D,/nsr:ns-instance-opdata/nsr:nsr" +
("[nsr:ns-instance-config-ref='{}']/config_status".format(k) if k is not None else ""))
@staticmethod
def cm_state(k=None):
if k is None:
return ("D,/rw-conman:cm-state/rw-conman:cm-nsr")
else:
return ("D,/rw-conman:cm-state/rw-conman:cm-nsr" +
("[rw-conman:id='{}']".format(k) if k is not None else ""))
@staticmethod
def nsr_scale_group_instance(nsr_id=None, group_name=None, index=None):
return (("D,/nsr:ns-instance-opdata/nsr:nsr") +
("[nsr:ns-instance-config-ref='{}']".format(nsr_id) if nsr_id is not None else "") +
("/nsr:scaling-group-record") +
("[nsr:scaling-group-name-ref='{}']".format(group_name) if group_name is not None else "") +
("/nsr:instance") +
("[nsr:scaling-group-index-ref='{}']".format(index) if index is not None else ""))
@staticmethod
def nsr_scale_group_instance_config(nsr_id=None, group_name=None, index=None):
return (("C,/nsr:ns-instance-config/nsr:nsr") +
("[nsr:id='{}']".format(nsr_id) if nsr_id is not None else "") +
("/nsr:scaling-group") +
("[nsr:scaling-group-name-ref='{}']".format(group_name) if group_name is not None else "") +
("/nsr:instance") +
("[nsr:index='{}']".format(index) if index is not None else ""))
class ManoQuerier(object):
def __init__(self, log, dts):
self.log = log
self.dts = dts
@asyncio.coroutine
def _read_query(self, xpath, do_trace=False):
self.log.debug("Running XPATH read query: %s (trace: %s)", xpath, do_trace)
flags = rwdts.XactFlag.MERGE
flags += rwdts.XactFlag.TRACE if do_trace else 0
res_iter = yield from self.dts.query_read(
xpath, flags=flags
)
results = []
for i in res_iter:
result = yield from i
if result is not None:
results.append(result.result)
return results
@asyncio.coroutine
def get_cm_state(self, nsr_id=None):
return (yield from self._read_query(XPaths.cm_state(nsr_id), False))
@asyncio.coroutine
def get_nsr_opdatas(self, nsr_id=None):
return (yield from self._read_query(XPaths.nsr_opdata(nsr_id), False))
@asyncio.coroutine
def get_nsr_scale_group_instance_opdata(self, nsr_id=None, group_name=None, index=None):
return (yield from self._read_query(XPaths.nsr_scale_group_instance(nsr_id, group_name, index), False))
#return (yield from self._read_query(XPaths.nsr_scale_group_instance(nsr_id, group_name), True))
@asyncio.coroutine
def get_nsr_configs(self, nsr_id=None):
return (yield from self._read_query(XPaths.nsr_config(nsr_id)))
@asyncio.coroutine
def get_nsr_config_status(self, nsr_id=None):
return (yield from self._read_query(XPaths.nsr_config_status(nsr_id)))
@asyncio.coroutine
def get_vnfrs(self, vnfr_id=None):
return (yield from self._read_query(XPaths.vnfr(vnfr_id)))
@asyncio.coroutine
def get_vlrs(self, vlr_id=None):
return (yield from self._read_query(XPaths.vlr(vlr_id)))
@asyncio.coroutine
def get_nsd_ref_counts(self, nsd_id=None):
return (yield from self._read_query(XPaths.nsd_ref_count(nsd_id)))
@asyncio.coroutine
def get_vnfd_ref_counts(self, vnfd_id=None):
return (yield from self._read_query(XPaths.vnfd_ref_count(vnfd_id)))
@asyncio.coroutine
def delete_nsr(self, nsr_id):
with self.dts.transaction() as xact:
yield from self.dts.query_delete(
XPaths.nsr_config(nsr_id),
0
#rwdts.XactFlag.TRACE,
#rwdts.Flag.ADVISE,
)
@asyncio.coroutine
def delete_nsd(self, nsd_id):
nsd_xpath = XPaths.nsd(nsd_id)
self.log.debug("Attempting to delete NSD with path = %s", nsd_xpath)
with self.dts.transaction() as xact:
yield from self.dts.query_delete(
nsd_xpath,
rwdts.XactFlag.ADVISE,
)
@asyncio.coroutine
def delete_vnfd(self, vnfd_id):
vnfd_xpath = XPaths.vnfd(vnfd_id)
self.log.debug("Attempting to delete VNFD with path = %s", vnfd_xpath)
with self.dts.transaction() as xact:
yield from self.dts.query_delete(
vnfd_xpath,
rwdts.XactFlag.ADVISE,
)
@asyncio.coroutine
def update_nsd(self, nsd_id, nsd_msg):
nsd_xpath = XPaths.nsd(nsd_id)
self.log.debug("Attempting to update NSD with path = %s", nsd_xpath)
with self.dts.transaction() as xact:
yield from self.dts.query_update(
nsd_xpath,
rwdts.XactFlag.ADVISE,
nsd_msg,
)
@asyncio.coroutine
def update_vnfd(self, vnfd_id, vnfd_msg):
vnfd_xpath = XPaths.vnfd(vnfd_id)
self.log.debug("Attempting to delete VNFD with path = %s", vnfd_xpath)
with self.dts.transaction() as xact:
yield from self.dts.query_update(
vnfd_xpath,
rwdts.XactFlag.ADVISE,
vnfd_msg,
)
@asyncio.coroutine
def update_nsr_config(self, nsr_id, nsr_msg):
nsr_xpath = XPaths.nsr_config(nsr_id)
self.log.debug("Attempting to update NSR with path = %s", nsr_xpath)
with self.dts.transaction() as xact:
yield from self.dts.query_update(
nsr_xpath,
rwdts.XactFlag.ADVISE|rwdts.XactFlag.REPLACE,
nsr_msg,
)
class ManoTestCase(rift.test.dts.AbstractDTSTest):
@asyncio.coroutine
def verify_nsr_state(self, nsr_id, state):
nsrs = yield from self.querier.get_nsr_opdatas(nsr_id)
self.assertEqual(1, len(nsrs))
nsr = nsrs[0]
self.log.debug("Got nsr = %s", nsr)
self.assertEqual(state, nsr.operational_status)
@asyncio.coroutine
def verify_vlr_state(self, vlr_id, state):
vlrs = yield from self.querier.get_vlrs(vlr_id)
self.assertEqual(1, len(vlrs))
vlr = vlrs[0]
self.assertEqual(state, vlr.operational_status)
def verify_vdu_state(self, vdu, state):
self.assertEqual(state, vdu.operational_status)
@asyncio.coroutine
def verify_vnf_state(self, vnfr_id, state):
vnfrs = yield from self.querier.get_vnfrs(vnfr_id)
self.assertEqual(1, len(vnfrs))
vnfr = vnfrs[0]
self.assertEqual(state, vnfr.operational_status)
@asyncio.coroutine
def terminate_nsr(self, nsr_id):
self.log.debug("Terminating nsr id: %s", nsr_id)
yield from self.querier.delete_nsr(nsr_id)
@asyncio.coroutine
def verify_nsr_deleted(self, nsr_id):
nsr_opdatas = yield from self.querier.get_nsr_opdatas(nsr_id)
self.assertEqual(0, len(nsr_opdatas))
nsr_configs = yield from self.querier.get_nsr_configs(nsr_id)
self.assertEqual(0, len(nsr_configs))
@asyncio.coroutine
def verify_num_vlrs(self, num_vlrs):
vlrs = yield from self.querier.get_vlrs()
self.assertEqual(num_vlrs, len(vlrs))
@asyncio.coroutine
def get_nsr_vlrs(self, nsr_id):
nsrs = yield from self.querier.get_nsr_opdatas(nsr_id)
return [v.vlr_ref for v in nsrs[0].vlr]
@asyncio.coroutine
def get_nsr_vnfs(self, nsr_id):
nsrs = yield from self.querier.get_nsr_opdatas(nsr_id)
return nsrs[0].constituent_vnfr_ref
@asyncio.coroutine
def get_vnf_vlrs(self, vnfr_id):
vnfrs = yield from self.querier.get_vnfrs(vnfr_id)
return [i.vlr_ref for i in vnfrs[0].internal_vlr]
@asyncio.coroutine
def verify_num_nsr_vlrs(self, nsr_id, num_vlrs):
vlrs = yield from self.get_nsr_vlrs(nsr_id)
self.assertEqual(num_vlrs, len(vlrs))
@asyncio.coroutine
def verify_num_nsr_vnfrs(self, nsr_id, num_vnfs):
vnfs = yield from self.get_nsr_vnfs(nsr_id)
self.assertEqual(num_vnfs, len(vnfs))
@asyncio.coroutine
def verify_num_vnfr_vlrs(self, vnfr_id, num_vlrs):
vlrs = yield from self.get_vnf_vlrs(vnfr_id)
self.assertEqual(num_vlrs, len(vlrs))
@asyncio.coroutine
def get_vnf_vdus(self, vnfr_id):
vnfrs = yield from self.querier.get_vnfrs(vnfr_id)
return [i for i in vnfrs[0].vdur]
@asyncio.coroutine
def verify_num_vnfr_vdus(self, vnfr_id, num_vdus):
vdus = yield from self.get_vnf_vdus(vnfr_id)
self.assertEqual(num_vdus, len(vdus))
@asyncio.coroutine
def verify_num_vnfrs(self, num_vnfrs):
vnfrs = yield from self.querier.get_vnfrs()
self.assertEqual(num_vnfrs, len(vnfrs))
@asyncio.coroutine
def verify_nsd_ref_count(self, nsd_id, num_ref):
nsd_ref_counts = yield from self.querier.get_nsd_ref_counts(nsd_id)
self.assertEqual(num_ref, nsd_ref_counts[0].instance_ref_count)
class DescriptorPublisher(object):
def __init__(self, log, loop, dts):
self.log = log
self.loop = loop
self.dts = dts
self._registrations = []
@asyncio.coroutine
def publish(self, w_path, path, desc):
ready_event = asyncio.Event(loop=self.loop)
@asyncio.coroutine
def on_ready(regh, status):
self.log.debug("Create element: %s, obj-type:%s obj:%s",
path, type(desc), desc)
with self.dts.transaction() as xact:
regh.create_element(path, desc, xact.xact)
self.log.debug("Created element: %s, obj:%s", path, desc)
ready_event.set()
handler = rift.tasklets.DTS.RegistrationHandler(
on_ready=on_ready
)
self.log.debug("Registering path: %s, obj:%s", w_path, desc)
reg = yield from self.dts.register(
w_path,
handler,
flags=rwdts.Flag.PUBLISHER | rwdts.Flag.NO_PREP_READ
)
self._registrations.append(reg)
self.log.debug("Registered path : %s", w_path)
yield from ready_event.wait()
return reg
def unpublish_all(self):
self.log.debug("Deregistering all published descriptors")
for reg in self._registrations:
reg.deregister()
class PingPongNsrConfigPublisher(object):
XPATH = "C,/nsr:ns-instance-config"
def __init__(self, log, loop, dts, ping_pong, cloud_account_name):
self.dts = dts
self.log = log
self.loop = loop
self.ref = None
self.querier = ManoQuerier(log, dts)
self.nsr_config = rwnsryang.YangData_Nsr_NsInstanceConfig()
nsr = rwnsryang.YangData_Nsr_NsInstanceConfig_Nsr()
nsr.id = str(uuid.uuid4())
nsr.name = "ns1.{}".format(nsr.id)
nsr.nsd = nsryang.YangData_Nsr_NsInstanceConfig_Nsr_Nsd()
nsr.nsd.from_dict(ping_pong.ping_pong_nsd.nsd.as_dict())
nsr.cloud_account = cloud_account_name
nsr.vnf_cloud_account_map.add().from_dict({
'member_vnf_index_ref': nsr.nsd.constituent_vnfd[0].member_vnf_index,
'config_agent_account': 'RiftCA',
#'cloud_account':'mock_account1'
})
inputs = nsryang.YangData_Nsr_NsInstanceConfig_Nsr_InputParameter()
inputs.xpath = "/nsd:nsd-catalog/nsd:nsd[nsd:id={}]/nsd:name".format(ping_pong.nsd_id)
inputs.value = "inigo montoya"
fast_cpu = {'metadata_key': 'FASTCPU', 'metadata_value': 'True'}
self.create_nsd_placement_group_map(nsr,
group_name = 'Orcus',
cloud_type = 'openstack',
construct_type = 'host_aggregate',
construct_value = [fast_cpu])
fast_storage = {'metadata_key': 'FASTSSD', 'metadata_value': 'True'}
self.create_nsd_placement_group_map(nsr,
group_name = 'Quaoar',
cloud_type = 'openstack',
construct_type = 'host_aggregate',
construct_value = [fast_storage])
fast_cpu = {'metadata_key': 'BLUE_HW', 'metadata_value': 'True'}
self.create_vnfd_placement_group_map(nsr,
group_name = 'Eris',
vnfd_id = ping_pong.ping_vnfd_id,
cloud_type = 'openstack',
construct_type = 'host_aggregate',
construct_value = [fast_cpu])
fast_storage = {'metadata_key': 'YELLOW_HW', 'metadata_value': 'True'}
self.create_vnfd_placement_group_map(nsr,
group_name = 'Weywot',
vnfd_id = ping_pong.pong_vnfd_id,
cloud_type = 'openstack',
construct_type = 'host_aggregate',
construct_value = [fast_storage])
nsr.input_parameter.append(inputs)
self._nsr = nsr
self.nsr_config.nsr.append(nsr)
self._ready_event = asyncio.Event(loop=self.loop)
asyncio.ensure_future(self.register(), loop=loop)
@asyncio.coroutine
def register(self):
@asyncio.coroutine
def on_ready(regh, status):
self._ready_event.set()
self.log.debug("Registering path: %s", PingPongNsrConfigPublisher.XPATH)
self.reg = yield from self.dts.register(
PingPongNsrConfigPublisher.XPATH,
flags=rwdts.Flag.PUBLISHER,
handler=rift.tasklets.DTS.RegistrationHandler(
on_ready=on_ready,
),
)
@asyncio.coroutine
def publish(self):
self.log.debug("Publishing NSR: {}".format(self.nsr_config))
yield from self._ready_event.wait()
with self.dts.transaction() as xact:
self.reg.create_element(
PingPongNsrConfigPublisher.XPATH,
self.nsr_config,
xact=xact.xact,
)
return self._nsr.id
@asyncio.coroutine
def create_scale_group_instance(self, group_name, index):
index = 1
scaling_group = self.nsr_config.nsr[0].scaling_group.add()
scaling_group.from_dict({
"scaling_group_name_ref": group_name,
"instance": [{"index": index}],
})
with self.dts.transaction() as xact:
self.reg.update_element(
PingPongNsrConfigPublisher.XPATH,
self.nsr_config,
xact=xact.xact,
)
return index
def create_nsd_placement_group_map(self,
nsr,
group_name,
cloud_type,
construct_type,
construct_value):
placement_group = nsr.nsd_placement_group_maps.add()
placement_group.from_dict({
"placement_group_ref" : group_name,
"cloud_type" : cloud_type,
construct_type : construct_value,
})
def create_vnfd_placement_group_map(self,
nsr,
group_name,
vnfd_id,
cloud_type,
construct_type,
construct_value):
placement_group = nsr.vnfd_placement_group_maps.add()
placement_group.from_dict({
"placement_group_ref" : group_name,
"vnfd_id_ref" : vnfd_id,
"cloud_type" : cloud_type,
construct_type : construct_value,
})
@asyncio.coroutine
def delete_scale_group_instance(self, group_name, index):
self.log.debug("Deleting scale group %s instance %s", group_name, index)
#del self.nsr_config.nsr[0].scaling_group[0].instance[0]
xpath = XPaths.nsr_scale_group_instance_config(self.nsr_config.nsr[0].id, group_name, index)
yield from self.dts.query_delete(xpath, flags=rwdts.XactFlag.ADVISE)
#with self.dts.transaction() as xact:
# self.reg.update_element(
# PingPongNsrConfigPublisher.XPATH,
# self.nsr_config,
# flags=rwdts.XactFlag.REPLACE,
# xact=xact.xact,
# )
def deregister(self):
if self.reg is not None:
self.reg.deregister()
def create_nsr_vl(self):
vld = self.nsr_config.nsr[0].nsd.vld.add()
vld.id = 'ping_pong_vld_2'
vld.name = 'ping_pong_vld_2' # hard coded
vld.short_name = vld.name
vld.vendor = 'RIFT.io'
vld.description = 'Toy VL'
vld.version = '1.0'
vld.type_yang = 'ELAN'
# cpref = vld.vnfd_connection_point_ref.add()
# cpref.member_vnf_index_ref = cp[0]
# cpref.vnfd_id_ref = cp[1]
# cpref.vnfd_connection_point_ref = cp[2]
vld = self.nsr_config.nsr[0].vl_cloud_account_map.add()
vld.vld_id_ref = 'ping_pong_vld_2'
vld.cloud_accounts = ["mock_account"]
@asyncio.coroutine
def add_nsr_vl(self):
self.create_nsr_vl()
yield from self.querier.update_nsr_config(
self.nsr_config.nsr[0].id,
self.nsr_config.nsr[0],
)
@asyncio.coroutine
def del_nsr_vl(self):
for vld in self.nsr_config.nsr[0].nsd.vld:
if vld.id == 'ping_pong_vld_2':
self.nsr_config.nsr[0].nsd.vld.remove(vld)
break
yield from self.querier.update_nsr_config(
self.nsr_config.nsr[0].id,
self.nsr_config.nsr[0],
)
def update_vnf_cloud_map(self,vnf_cloud_map):
self.log.debug("Modifying NSR to add VNF cloud account map: {}".format(vnf_cloud_map))
for vnf_index,cloud_acct in vnf_cloud_map.items():
vnf_maps = [vnf_map for vnf_map in self.nsr_config.nsr[0].vnf_cloud_account_map if vnf_index == vnf_map.member_vnf_index_ref]
if vnf_maps:
vnf_maps[0].cloud_account = cloud_acct
else:
self.nsr_config.nsr[0].vnf_cloud_account_map.add().from_dict({
'member_vnf_index_ref':vnf_index,
'cloud_account':cloud_acct
})
class PingPongDescriptorPublisher(object):
def __init__(self, log, loop, dts, num_external_vlrs=1, num_internal_vlrs=1, num_ping_vms=1):
self.log = log
self.loop = loop
self.dts = dts
self.querier = ManoQuerier(self.log, self.dts)
self.publisher = DescriptorPublisher(self.log, self.loop, self.dts)
self.ping_vnfd, self.pong_vnfd, self.ping_pong_nsd = \
ping_pong_nsd.generate_ping_pong_descriptors(
pingcount=1,
external_vlr_count=num_external_vlrs,
internal_vlr_count=num_internal_vlrs,
num_vnf_vms=2,
mano_ut=True,
use_scale_group=True,
use_mon_params=False,
)
self.config_dir = os.path.join(os.getenv('RIFT_ARTIFACTS'),
"launchpad/libs",
self.ping_pong_nsd.id,
"config")
@property
def nsd_id(self):
return self.ping_pong_nsd.id
@property
def ping_vnfd_id(self):
return self.ping_vnfd.id
@property
def pong_vnfd_id(self):
return self.pong_vnfd.id
@asyncio.coroutine
def publish_desciptors(self):
# Publish ping_vnfd
xpath = XPaths.vnfd(self.ping_vnfd_id)
xpath_wild = XPaths.vnfd()
for obj in self.ping_vnfd.descriptor.vnfd:
self.log.debug("Publishing ping_vnfd path: %s - %s, type:%s, obj:%s",
xpath, xpath_wild, type(obj), obj)
yield from self.publisher.publish(xpath_wild, xpath, obj)
# Publish pong_vnfd
xpath = XPaths.vnfd(self.pong_vnfd_id)
xpath_wild = XPaths.vnfd()
for obj in self.pong_vnfd.descriptor.vnfd:
self.log.debug("Publishing pong_vnfd path: %s, wild_path: %s, obj:%s",
xpath, xpath_wild, obj)
yield from self.publisher.publish(xpath_wild, xpath, obj)
# Publish ping_pong_nsd
xpath = XPaths.nsd(self.nsd_id)
xpath_wild = XPaths.nsd()
for obj in self.ping_pong_nsd.descriptor.nsd:
self.log.debug("Publishing ping_pong nsd path: %s, wild_path: %s, obj:%s",
xpath, xpath_wild, obj)
yield from self.publisher.publish(xpath_wild, xpath, obj)
self.log.debug("DONE - publish_desciptors")
def unpublish_descriptors(self):
self.publisher.unpublish_all()
@asyncio.coroutine
def delete_nsd(self):
yield from self.querier.delete_nsd(self.ping_pong_nsd.id)
@asyncio.coroutine
def delete_ping_vnfd(self):
yield from self.querier.delete_vnfd(self.ping_vnfd.id)
@asyncio.coroutine
def update_nsd(self):
yield from self.querier.update_nsd(
self.ping_pong_nsd.id,
self.ping_pong_nsd.descriptor.nsd[0]
)
@asyncio.coroutine
def update_ping_vnfd(self):
yield from self.querier.update_vnfd(
self.ping_vnfd.id,
self.ping_vnfd.descriptor.vnfd[0]
)
class ManoTestCase(rift.test.dts.AbstractDTSTest):
"""
DTS GI interface unittests
Note: Each tests uses a list of asyncio.Events for staging through the
test. These are required here because we are bring up each coroutine
("tasklet") at the same time and are not implementing any re-try
mechanisms. For instance, this is used in numerous tests to make sure that
a publisher is up and ready before the subscriber sends queries. Such
event lists should not be used in production software.
"""
@classmethod
def configure_suite(cls, rwmain):
vns_dir = os.environ.get('VNS_DIR')
vnfm_dir = os.environ.get('VNFM_DIR')
nsm_dir = os.environ.get('NSM_DIR')
rm_dir = os.environ.get('RM_DIR')
rwmain.add_tasklet(vns_dir, 'rwvnstasklet')
rwmain.add_tasklet(vnfm_dir, 'rwvnfmtasklet')
rwmain.add_tasklet(nsm_dir, 'rwnsmtasklet')
rwmain.add_tasklet(rm_dir, 'rwresmgrtasklet')
rwmain.add_tasklet(rm_dir, 'rwconmantasklet')
@classmethod
def configure_schema(cls):
return rwnsmyang.get_schema()
@classmethod
def configure_timeout(cls):
return 240
@staticmethod
def get_cal_account(account_type, account_name):
"""
Creates an object for class RwcalYang.Clo
"""
account = rwcloudyang.CloudAccount()
if account_type == 'mock':
account.name = account_name
account.account_type = "mock"
account.mock.username = "mock_user"
elif ((account_type == 'openstack_static') or (account_type == 'openstack_dynamic')):
account.name = account_name
account.account_type = 'openstack'
account.openstack.key = openstack_info['username']
account.openstack.secret = openstack_info['password']
account.openstack.auth_url = openstack_info['auth_url']
account.openstack.tenant = openstack_info['project_name']
account.openstack.mgmt_network = openstack_info['mgmt_network']
return account
@asyncio.coroutine
def configure_cloud_account(self, dts, cloud_type, cloud_name="cloud1"):
account = self.get_cal_account(cloud_type, cloud_name)
account_xpath = "C,/rw-cloud:cloud/rw-cloud:account[rw-cloud:name='{}']".format(cloud_name)
self.log.info("Configuring cloud-account: %s", account)
yield from dts.query_create(account_xpath,
rwdts.XactFlag.ADVISE,
account)
@asyncio.coroutine
def wait_tasklets(self):
yield from asyncio.sleep(5, loop=self.loop)
def configure_test(self, loop, test_id):
self.log.debug("STARTING - %s", self.id())
self.tinfo = self.new_tinfo(self.id())
self.dts = rift.tasklets.DTS(self.tinfo, self.schema, self.loop)
self.ping_pong = PingPongDescriptorPublisher(self.log, self.loop, self.dts)
self.querier = ManoQuerier(self.log, self.dts)
self.nsr_publisher = PingPongNsrConfigPublisher(
self.log,
loop,
self.dts,
self.ping_pong,
"mock_account",
)
def test_create_nsr_record(self):
@asyncio.coroutine
def verify_cm_state(termination=False, nsrid=None):
self.log.debug("Verifying cm_state path = %s", XPaths.cm_state(nsrid))
#print("###>>> Verifying cm_state path:", XPaths.cm_state(nsrid))
loop_count = 10
loop_sleep = 10
while loop_count:
yield from asyncio.sleep(loop_sleep, loop=self.loop)
loop_count -= 1
cm_nsr = None
cm_nsr_i = yield from self.querier.get_cm_state(nsr_id=nsrid)
if (cm_nsr_i is not None and len(cm_nsr_i) != 0):
self.assertEqual(1, len(cm_nsr_i))
cm_nsr = cm_nsr_i[0].as_dict()
#print("###>>> cm_nsr=", cm_nsr)
if termination:
if len(cm_nsr_i) == 0:
print("\n###>>> cm-state NSR deleted OK <<<###\n")
return
elif (cm_nsr is not None and
'state' in cm_nsr and
(cm_nsr['state'] == 'ready')):
self.log.debug("Got cm_nsr record %s", cm_nsr)
print("\n###>>> cm-state NSR 'ready' OK <<<###\n")
return
# if (len(cm_nsr_i) == 1 and cm_nsr_i[0].state == 'ready'):
# self.log.debug("Got cm_nsr record %s", cm_nsr)
# else:
# yield from asyncio.sleep(10, loop=self.loop)
print("###>>> Failed cm-state, termination:", termination)
self.assertEqual(1, loop_count)
@asyncio.coroutine
def verify_nsr_opdata(termination=False):
self.log.debug("Verifying nsr opdata path = %s", XPaths.nsr_opdata())
while True:
nsrs = yield from self.querier.get_nsr_opdatas()
if termination:
if len(nsrs) != 0:
for i in range(10):
nsrs = yield from self.querier.get_nsr_opdatas()
if len(nsrs) == 0:
self.log.debug("No active NSR records found. NSR termination successful")
return
else:
self.assertEqual(0, len(nsrs))
self.log.error("Active NSR records found. NSR termination failed")
else:
self.log.debug("No active NSR records found. NSR termination successful")
self.assertEqual(0, len(nsrs))
return
nsr = nsrs[0]
self.log.debug("Got nsr record %s", nsr)
if nsr.operational_status == 'running':
self.log.debug("!!! Rcvd NSR with running status !!!")
self.assertEqual("configuring", nsr.config_status)
break
self.log.debug("Rcvd NSR with %s status", nsr.operational_status)
self.log.debug("Sleeping for 10 seconds")
yield from asyncio.sleep(10, loop=self.loop)
@asyncio.coroutine
def verify_nsr_config(termination=False):
self.log.debug("Verifying nsr config path = %s", XPaths.nsr_config())
nsr_configs = yield from self.querier.get_nsr_configs()
self.assertEqual(1, len(nsr_configs))
nsr_config = nsr_configs[0]
self.assertEqual(
"/nsd:nsd-catalog/nsd:nsd[nsd:id={}]/nsd:name".format(self.ping_pong.nsd_id),
nsr_config.input_parameter[0].xpath,
)
@asyncio.coroutine
def verify_nsr_config_status(termination=False, nsrid=None):
if termination is False and nsrid is not None:
self.log.debug("Verifying nsr config status path = %s", XPaths.nsr_opdata(nsrid))
loop_count = 6
loop_sleep = 10
while loop_count:
loop_count -= 1
yield from asyncio.sleep(loop_sleep, loop=self.loop)
nsr_opdata_l = yield from self.querier.get_nsr_opdatas(nsrid)
self.assertEqual(1, len(nsr_opdata_l))
nsr_opdata = nsr_opdata_l[0].as_dict()
self.log.debug("NSR opdata: {}".format(nsr_opdata))
if ("configured" == nsr_opdata['config_status']):
print("\n###>>> NSR Config Status 'configured' OK <<<###\n")
return
self.assertEqual("configured", nsr_opdata['config_status'])
@asyncio.coroutine
def verify_vnfr_record(termination=False):
self.log.debug("Verifying vnfr record path = %s, Termination=%d",
XPaths.vnfr(), termination)
if termination:
for i in range(10):
vnfrs = yield from self.querier.get_vnfrs()
if len(vnfrs) == 0:
return True
for vnfr in vnfrs:
self.log.debug("VNFR still exists = %s", vnfr)
yield from asyncio.sleep(.5, loop=self.loop)
assert len(vnfrs) == 0
while True:
vnfrs = yield from self.querier.get_vnfrs()
if len(vnfrs) != 0 and termination is False:
vnfr = vnfrs[0]
self.log.debug("Rcvd VNFR with %s status", vnfr.operational_status)
if vnfr.operational_status == 'running':
self.log.debug("!!! Rcvd VNFR with running status !!!")
return True
elif vnfr.operational_status == "failed":
self.log.debug("!!! Rcvd VNFR with failed status !!!")
return False
self.log.debug("Sleeping for 10 seconds")
yield from asyncio.sleep(10, loop=self.loop)
@asyncio.coroutine
def verify_vnfr_cloud_account(vnf_index, cloud_account):
self.log.debug("Verifying vnfr record Cloud account for vnf index = %d is %s", vnf_index,cloud_account)
vnfrs = yield from self.querier.get_vnfrs()
cloud_accounts = [vnfr.cloud_account for vnfr in vnfrs if vnfr.member_vnf_index_ref == vnf_index]
self.log.debug("VNFR cloud account for index %d is %s", vnf_index,cloud_accounts[0])
assert cloud_accounts[0] == cloud_account
@asyncio.coroutine
def verify_vlr_record(termination=False):
vlr_xpath = XPaths.vlr()
self.log.debug("Verifying vlr record path = %s, termination: %s",
vlr_xpath, termination)
res_iter = yield from self.dts.query_read(vlr_xpath)
for i in res_iter:
result = yield from i
if termination:
self.assertIsNone(result)
self.log.debug("Got vlr record %s", result)
@asyncio.coroutine
def verify_vlrs(nsr_id, count=0):
while True:
nsrs = yield from self.querier.get_nsr_opdatas()
nsr = nsrs[0]
self.log.debug("Got nsr record %s", nsr)
if nsr.operational_status == 'running':
self.log.debug("!!! Rcvd NSR with running status !!!")
# Check the VLR count
if (len(nsr.vlr)) == count:
self.log.debug("NSR %s has %d VLRs", nsr_id, count)
break
self.log.debug("Rcvd NSR %s with %s status", nsr_id, nsr.operational_status)
self.log.debug("Sleeping for 10 seconds")
yield from asyncio.sleep(10, loop=self.loop)
@asyncio.coroutine
def verify_nsd_ref_count(termination):
self.log.debug("Verifying nsd ref count= %s", XPaths.nsd_ref_count())
res_iter = yield from self.dts.query_read(XPaths.nsd_ref_count())
for i in res_iter:
result = yield from i
self.log.debug("Got nsd ref count record %s", result)
@asyncio.coroutine
def verify_vnfd_ref_count(termination):
self.log.debug("Verifying vnfd ref count= %s", XPaths.vnfd_ref_count())
res_iter = yield from self.dts.query_read(XPaths.vnfd_ref_count())
for i in res_iter:
result = yield from i
self.log.debug("Got vnfd ref count record %s", result)
@asyncio.coroutine
def verify_scale_group_reaches_state(nsr_id, scale_group, index, state, timeout=1000):
start_time = time.time()
instance_state = None
while (time.time() - start_time) < timeout:
results = yield from self.querier.get_nsr_opdatas(nsr_id=nsr_id)
if len(results) == 1:
result = results[0]
if len(result.scaling_group_record) == 0:
continue
if len(result.scaling_group_record[0].instance) == 0:
continue
instance = result.scaling_group_record[0].instance[0]
self.assertEqual(instance.scaling_group_index_ref, index)
instance_state = instance.op_status
if instance_state == state:
self.log.debug("Scale group instance reached %s state", state)
return
yield from asyncio.sleep(1, loop=self.loop)
self.assertEqual(state, instance_state)
@asyncio.coroutine
def verify_results(termination=False, nsrid=None):
yield from verify_vnfr_record(termination)
#yield from verify_vlr_record(termination)
yield from verify_nsr_opdata(termination)
yield from verify_nsr_config(termination)
yield from verify_nsd_ref_count(termination)
yield from verify_vnfd_ref_count(termination)
# Config Manager
yield from verify_cm_state(termination, nsrid)
yield from verify_nsr_config_status(termination, nsrid)
@asyncio.coroutine
def verify_scale_instance(index):
self.log.debug("Verifying scale record path = %s, Termination=%d",
XPaths.vnfr(), termination)
if termination:
for i in range(5):
vnfrs = yield from self.querier.get_vnfrs()
if len(vnfrs) == 0:
return True
for vnfr in vnfrs:
self.log.debug("VNFR still exists = %s", vnfr)
assert len(vnfrs) == 0
while True:
vnfrs = yield from self.querier.get_vnfrs()
if len(vnfrs) != 0 and termination is False:
vnfr = vnfrs[0]
self.log.debug("Rcvd VNFR with %s status", vnfr.operational_status)
if vnfr.operational_status == 'running':
self.log.debug("!!! Rcvd VNFR with running status !!!")
return True
elif vnfr.operational_status == "failed":
self.log.debug("!!! Rcvd VNFR with failed status !!!")
return False
self.log.debug("Sleeping for 10 seconds")
yield from asyncio.sleep(10, loop=self.loop)
@asyncio.coroutine
def terminate_ns(nsr_id):
xpath = XPaths.nsr_config(nsr_id)
self.log.debug("Terminating network service with path %s", xpath)
yield from self.dts.query_delete(xpath, flags=rwdts.XactFlag.ADVISE)
self.log.debug("Terminated network service with path %s", xpath)
@asyncio.coroutine
def run_test():
yield from self.wait_tasklets()
cloud_type = "mock"
yield from self.configure_cloud_account(self.dts, cloud_type, "mock_account")
yield from self.configure_cloud_account(self.dts, cloud_type, "mock_account1")
yield from self.ping_pong.publish_desciptors()
# Attempt deleting VNFD not in use
yield from self.ping_pong.update_ping_vnfd()
# Attempt updating NSD not in use
yield from self.ping_pong.update_nsd()
# Attempt deleting VNFD not in use
yield from self.ping_pong.delete_ping_vnfd()
# Attempt deleting NSD not in use
yield from self.ping_pong.delete_nsd()
yield from self.ping_pong.publish_desciptors()
nsr_id = yield from self.nsr_publisher.publish()
yield from verify_results(nsrid=nsr_id)
# yield from self.nsr_publisher.create_scale_group_instance("ping_group", 1)
# yield from verify_scale_group_reaches_state(nsr_id, "ping_group", 1, "running")
# yield from self.nsr_publisher.delete_scale_group_instance("ping_group", 1)
yield from asyncio.sleep(10, loop=self.loop)
# Attempt deleting VNFD in use
yield from self.ping_pong.delete_ping_vnfd()
# Attempt updating NSD in use
yield from self.ping_pong.update_nsd()
# Update NSD in use with new VL
yield from self.nsr_publisher.add_nsr_vl()
# Verify the new VL has been added
yield from verify_vlrs(nsr_id, count=2)
# Delete the added VL
yield from self.nsr_publisher.del_nsr_vl()
# Verify the new VL has been added
yield from verify_vlrs(nsr_id, count=1)
# Attempt deleting NSD in use
yield from self.ping_pong.delete_nsd()
yield from terminate_ns(nsr_id)
yield from asyncio.sleep(25, loop=self.loop)
self.log.debug("Verifying termination results")
yield from verify_results(termination=True, nsrid=nsr_id)
self.log.debug("Verified termination results")
# Multi site NS case
self.log.debug("Testing multi site NS")
self.nsr_publisher.update_vnf_cloud_map({1:"mock_account1",2:"mock_account"})
nsr_id = yield from self.nsr_publisher.publish()
yield from verify_results(nsrid=nsr_id)
yield from verify_vnfr_cloud_account(1,"mock_account1")
yield from verify_vnfr_cloud_account(2,"mock_account")
yield from verify_vlrs(nsr_id, count=2)
yield from terminate_ns(nsr_id)
yield from asyncio.sleep(25, loop=self.loop)
self.log.debug("Verifying termination results for multi site NS")
yield from verify_results(termination=True, nsrid=nsr_id)
self.log.debug("Verified termination results for multi site NS")
self.log.debug("Attempting to delete VNFD for real")
yield from self.ping_pong.delete_ping_vnfd()
self.log.debug("Attempting to delete NSD for real")
yield from self.ping_pong.delete_nsd()
future = asyncio.ensure_future(run_test(), loop=self.loop)
self.run_until(future.done)
if future.exception() is not None:
self.log.error("Caught exception during test")
raise future.exception()
def main():
plugin_dir = os.path.join(os.environ["RIFT_INSTALL"], "usr/lib/rift/plugins")
if 'VNS_DIR' not in os.environ:
os.environ['VNS_DIR'] = os.path.join(plugin_dir, 'rwvns')
if 'VNFM_DIR' not in os.environ:
os.environ['VNFM_DIR'] = os.path.join(plugin_dir, 'rwvnfm')
if 'NSM_DIR' not in os.environ:
os.environ['NSM_DIR'] = os.path.join(plugin_dir, 'rwnsm')
if 'RM_DIR' not in os.environ:
os.environ['RM_DIR'] = os.path.join(plugin_dir, 'rwresmgrtasklet')
runner = xmlrunner.XMLTestRunner(output=os.environ["RIFT_MODULE_TEST"])
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('-n', '--no-runner', action='store_true')
args, unittest_args = parser.parse_known_args()
if args.no_runner:
runner = None
ManoTestCase.log_level = logging.DEBUG if args.verbose else logging.WARN
unittest.main(testRunner=runner, argv=[sys.argv[0]] + unittest_args)
if __name__ == '__main__':
main()
# vim: sw=4
| [
"Leslie.Giles@riftio.com"
] | Leslie.Giles@riftio.com |
5b23b60fb17671eaecb7874ceadf7a5a0779f564 | b2705a15339f8620b70cf7a1759c3d4a4e8073ec | /widgets.py | f5f9edfe6da8721d7b29bff0232581e18eecd46c | [] | no_license | JonahGoetze/CSC-CAN-logger-Display | 4f2cd0ebe93052ad0775d426fd05e67817e54571 | facd3c2d384ce74a25024b787984463d83feb0e9 | refs/heads/master | 2020-04-11T10:13:44.731163 | 2019-03-04T03:59:53 | 2019-03-04T03:59:53 | 161,707,481 | 0 | 1 | null | 2019-03-04T03:59:54 | 2018-12-13T23:38:27 | Python | UTF-8 | Python | false | false | 2,530 | py | import random
import math
import queue as Q
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty, ListProperty, StringProperty
class Gague(Widget):
max_value = NumericProperty(1)
value = NumericProperty(0)
current_gague_width = NumericProperty(100)
threshold_1 = NumericProperty(65)
threshold_2 = NumericProperty(80)
threshold_3 = NumericProperty(95)
threshold_1_color = ListProperty([1, 1, 0, 1])
threshold_2_color = ListProperty([1, 0.5, 0, 1])
threshold_3_color = ListProperty([1, 0, 0, 1])
default_bar_color = ListProperty([0, 1, 0, 1])
bar_color = ListProperty([0, 1, 0, 1])
title = StringProperty("Temperature")
def set_value(self, value):
self.value = value
percent = self.value / self.max_value
self.current_gague_width = math.floor(self.width * percent)
percent
if (self.threshold_3 != 0 and
percent >= (self.threshold_3/self.max_value)):
self.bar_color = self.threshold_3_color
elif(self.threshold_2 != 0 and
percent >= (self.threshold_2/self.max_value)):
self.bar_color = self.threshold_2_color
elif(self.threshold_1 != 0 and
percent >= (self.threshold_1/self.max_value)):
self.bar_color = self.threshold_1_color
else:
self.bar_color = self.default_bar_color
class Root(Widget):
speed_gague = ObjectProperty(None)
throttle_gague = ObjectProperty(None)
rpm_gague = ObjectProperty(None)
temp_gague = ObjectProperty(None)
count = 0
gps_speed = 0
engine_speed = 0
rpm = 0
throttle = 0
coolant_temp = 0
def update(self, delta):
#if self.count < 100:
# self.count = min(self.count+1, 100)
#else:
# self.count = max(self.count-random.randint(0, 50), 0)
try:
self.gps_speed = self.gps_queue.get_nowait()
self.gps_speed = self.gps_speed if self.gps_speed > 3 else 0
except Q.Empty as e:
pass # don't change speed
#try:
# self.engine_speed, self.rpm, self.throttle, self.coolant_temp = self.obdii_queue.get_nowait()
#except Q.Empty as e:
# pass # don't change speed
self.speed_gague.set_value(self.gps_speed)
#self.rpm_gague.set_value(self.rpm)
#self.throttle_gague.set_value(self.throttle)
#self.temp_gague.set_value(self.coolant_temp)
| [
"jghibiki.games@gmail.com"
] | jghibiki.games@gmail.com |
55ca9242e06690db3374ef83a64763ce439b5b3d | cf028679971e64b48e357d8dcc5f7ed4423a7c8b | /keyboard/tests.py | 614f5acf28879b408b6bfb257ae8c4f3c7152b6f | [] | no_license | tuantphan/KeyboardReview2021 | f2cf3756c5f582310c010ccf6f21b3830baa88c3 | 35b3336ed06fb8c5dfddada482e7e09a10f2c8bf | refs/heads/main | 2023-03-27T07:54:23.820924 | 2021-03-27T22:39:04 | 2021-03-27T22:39:04 | 352,071,413 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,997 | py | from django.test import TestCase
from django.contrib.auth.models import User
from .models import KeyboardReview, KeyboardName, KeyboardType
from .forms import KeyboardForm, KeyboardImageForm
import datetime
from django.urls import reverse_lazy, reverse
# Create your tests here.
class KeyboardTypeTest(TestCase):
def setUp(self):
self.type=KeyboardType(typename='membrane')
def test_typestring(self):
self.assertEqual(str(self.type), 'membrane')
def test_tablename(self):
self.assertEqual(str(KeyboardType._meta.db_table), 'keyboardtype')
class KeyboardNameTest(TestCase):
def setUp(self):
self.type=KeyboardType(typename='mechanical')
self.user=User(username='batman')
self.keyboardname=KeyboardName(keyboardname='K65 Mini', keyboardtype=self.type, user=self.user, dateentered=datetime.date(2021, 10, 20), price=90, keyboardurl='https://www.corsair.com/us/en/Categories/Products/Gaming-Keyboards/RGB-Mechanical-Gaming-Keyboards/K65-RGB-MINI-60%25-Mechanical-Gaming-Keyboard/p/CH-9194014-NA', description="good keyboard")
def test_typestring(self):
self.assertEqual(str(self.keyboardname), 'K65 Mini')
def test_tablename(self):
self.assertEqual(str(KeyboardName._meta.db_table), 'keyboardname')
class KeyboardReviewTest(TestCase):
def setUp(self):
self.name=KeyboardName(keyboardname='K65 Mini')
self.user=User(username='batman')
self.keyboardreview=KeyboardReview(keyboardname=self.name, user=self.user, dateentered=datetime.date(2021, 10, 20), keyboardimage="https://www.vortez.net/news_file/17397_logitech_g_pro_x_mechanical_gaming_keyboard.jpg", keyboardreview="A good keyboard")
def test_typestring(self):
self.assertEqual(str(self.keyboardreview), 'https://www.vortez.net/news_file/17397_logitech_g_pro_x_mechanical_gaming_keyboard.jpg')
def test_tablename(self):
self.assertEqual(str(KeyboardReview._meta.db_table), 'keyboardimage')
class NewKeyboardForm(TestCase):
def test_keyboardform(self):
data={
'keyboardname' : 'K65 Mini',
'keyboardtype' : 'mechanical',
'user' : 'batman',
'dateentered' : '2021-10-20',
'price' : '90',
'keyboardurl' : 'https://www.vortez.net/news_file/17397_logitech_g_pro_x_mechanical_gaming_keyboard.jpg',
'description' : 'A good keyboard'
}
form=KeyboardForm(data)
self.assertTrue(form.is_valid)
class newKeyboardImage(TestCase):
def test_keyboardimageform(self):
data={
'keyboardname' : 'K65 Mini',
'user' : 'batman',
'dateentered' : '2021-10-20',
'keyboardimage' : 'https://www.vortez.net/news_file/17397_logitech_g_pro_x_mechanical_gaming_keyboard.jpg',
'keyboardreview' : 'A good keyboard'
}
form=KeyboardImageForm(data)
self.assertTrue(form.is_valid)
class New_Keyboard_Authentication_Test(TestCase):
def setUp(self):
self.test_user=User.objects.create_user(username='user1', password='123456')
self.type=KeyboardType.objects.create(typename='membrane')
self.name=KeyboardName(keyboardname='K65 Mini', keyboardtype=self.type, user=self.test_user, dateentered=datetime.date(2021, 10, 20), price=90, keyboardurl='https://www.corsair.com/us/en/Categories/Products/Gaming-Keyboards/RGB-Mechanical-Gaming-Keyboards/K65-RGB-MINI-60%25-Mechanical-Gaming-Keyboard/p/CH-9194014-NA', description="good keyboard")
self.review=KeyboardReview(keyboardname=self.name, user=self.test_user, dateentered=datetime.date(2021, 10, 20), keyboardimage="https://www.vortez.net/news_file/17397_logitech_g_pro_x_mechanical_gaming_keyboard.jpg", keyboardreview="A good keyboard")
def test_redirect_if_not_logged_in(self):
response=self.client.get(reverse('newkeyboard'))
self.assertRedirects(response, '/accounts/login/?next=/keyboard/newkeyboard/') | [
"tuantphan83@gmail.com"
] | tuantphan83@gmail.com |
48633be75b15841f7563e8d03e543ca67de4603c | 51293d7faa6a2d32f6dbbcb1ce6c1bcfc470274a | /Layer.py | d8bcb258f7f6939940990f43c16689619725857e | [] | no_license | Clientrace/BasicANN | c4e6ada54c0c3d66accbc99db48b2f27e7801641 | e7bb327de9d55b0d845cfd9c978ee9590d280882 | refs/heads/master | 2020-12-26T00:45:59.886771 | 2016-09-23T09:51:05 | 2016-09-23T09:51:05 | 68,792,010 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 449 | py | import random
class Layer(object):
def __init__(self, name, node_number):
self.nodes = []
self.name = name
self.data = 0
self.weights = []
for i in range(0,node_number):
self.nodes.append(0)
self.weights.append(random.random())
def getData(self, n):
return self.nodes[n]
def setData(self, data, n):
self.nodes[n] = data
def setWeight(self, w, n):
self.weight[n] = w
def getWeight(self, n):
return self.weights[n] | [
"pkimclarence@yahoo.com.ph"
] | pkimclarence@yahoo.com.ph |
9201198e9def073496cc6d1892b30ee684872cbc | 4d7f7bed6bbe8ea23de2bffd36f15992bcfe4707 | /tools/test.py | 84317b73364d78fa09b8353dba13cea67230303a | [
"MIT"
] | permissive | saketkunwar/cvwc2019_pose | 9ac3bd4776aed38544597519a1f4039320c3b85b | 3c4dce527c05b7a903eab17bb6f97d14d25207b6 | refs/heads/master | 2020-06-24T23:28:25.440338 | 2019-07-27T09:39:34 | 2019-07-27T09:39:34 | 199,124,841 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,000 | py | # ------------------------------------------------------------------------------
# pose.pytorch
# Copyright (c) 2018-present Microsoft
# Licensed under The Apache-2.0 License [see LICENSE for details]
# Written by Bin Xiao (Bin.Xiao@microsoft.com)
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import pprint
import torch
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torch.utils.data.distributed
import torchvision.transforms as transforms
import _init_paths
from config import cfg
from config import update_config
from core.loss import JointsMSELoss
from core.function import validate, test
from utils.utils import create_logger
import dataset
import models
def parse_args():
parser = argparse.ArgumentParser(description='Train keypoints network')
# general
parser.add_argument('--cfg',
help='experiment configure file name',
required=True,
type=str)
parser.add_argument('opts',
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER)
parser.add_argument('--modelDir',
help='model directory',
type=str,
default='')
parser.add_argument('--logDir',
help='log directory',
type=str,
default='')
parser.add_argument('--dataDir',
help='data directory',
type=str,
default='')
parser.add_argument('--prevModelDir',
help='prev Model directory',
type=str,
default='')
args = parser.parse_args()
return args
def main():
args = parse_args()
update_config(cfg, args)
logger, final_output_dir, tb_log_dir = create_logger(
cfg, args.cfg, 'valid')
logger.info(pprint.pformat(args))
logger.info(cfg)
# cudnn related setting
cudnn.benchmark = cfg.CUDNN.BENCHMARK
torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC
torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED
model = eval('models.'+cfg.MODEL.NAME+'.get_pose_net')(
cfg, is_train=False
)
if cfg.TEST.MODEL_FILE:
logger.info('=> loading model from {}'.format(cfg.TEST.MODEL_FILE))
model.load_state_dict(torch.load(cfg.TEST.MODEL_FILE), strict=False)
else:
model_state_file = os.path.join(
final_output_dir, 'final_state.pth'
)
logger.info('=> loading model from {}'.format(model_state_file))
model.load_state_dict(torch.load(model_state_file))
model = torch.nn.DataParallel(model, device_ids=cfg.GPUS).cuda()
# define loss function (criterion) and optimizer
criterion = JointsMSELoss(
use_target_weight=cfg.LOSS.USE_TARGET_WEIGHT
).cuda()
# Data loading code
normalize = transforms.Normalize(
mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
)
valid_dataset = eval('dataset.'+cfg.DATASET.DATASET)(
cfg, cfg.DATASET.ROOT, cfg.DATASET.TEST_SET, False,
transforms.Compose([
transforms.ToTensor(),
normalize,
])
)
valid_loader = torch.utils.data.DataLoader(
valid_dataset,
batch_size=cfg.TEST.BATCH_SIZE_PER_GPU*len(cfg.GPUS),
shuffle=False,
num_workers=cfg.WORKERS,
pin_memory=True
)
# evaluate on validation set
validate(cfg, valid_loader, valid_dataset, model, criterion,
final_output_dir, tb_log_dir)
if __name__ == '__main__':
main()
| [
"saketkunwar2005@gmail.com"
] | saketkunwar2005@gmail.com |
09a65523f111e9651bfad0dac1d89f22fb18732a | 6b7230155432f8122931e1ef657079125de77ef2 | /bgunfolding/svd.py | bdf3c36d7ca324d9fb9e91905128cc9a9db1ca5a | [
"MIT"
] | permissive | lrsppp/bgunfolding | 1ce241682445bb2b5a997437e81e5dd5b2d6811c | 0e60ebd88906d5cd50df6e926e855d5a7ee3051f | refs/heads/main | 2023-08-28T20:31:00.824607 | 2021-11-06T23:39:37 | 2021-11-06T23:39:37 | 425,059,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,477 | py | import numpy as np
import matplotlib.pyplot as plt
from bgunfolding.base import UnfoldingBase
from bgunfolding.tikhonov_matrices import *
from scipy.optimize import minimize
import numdifftools as nd
from joblib import Parallel, delayed
class SVD(UnfoldingBase):
def __init__(self, C = 'second_order', weighted = False):
'''
C: str
Tikhonov matrix. Choose between: `second_order`, `identity`, `selective_identity`
second_order is default. Forces flat spectrum.
'''
super(UnfoldingBase, self).__init__()
self.C = C
self.weighted = weighted
def __repr__(self):
return 'svd'
def _parallel_estimate_tau(self, i, tau):
f_est, cov, lx, ly = self.predict(tau)
glob_cc = self.calc_global_cc(cov)
return f_est, cov, lx, ly, glob_cc
def estimate_tau(self, tau_min, tau_max, n_tau, n_jobs = 2):
"""
Parameters
----------
tau_min : float
10**tau_min
tau_max : float
10**tau_max
n_tau : int
Returns
-------
tau_est : float
L1 : array-like
L2 : array-like
"""
tau_space = np.logspace(tau_min, tau_max, n_tau)
f_est_array = np.zeros((n_tau, self.n_bins_true))
cov_array = np.zeros((n_tau, self.n_bins_true, self.n_bins_true))
glob_cc = np.zeros(n_tau)
LX = np.zeros(n_tau)
LY = np.zeros(n_tau)
# parallelize
r = Parallel(n_jobs = n_jobs, backend = 'loky', verbose = 0)(delayed(self._parallel_estimate_tau)(i, tau) for i, tau in enumerate(tau_space))
# store
f_est_array = np.array([r_[0] for r_ in r])
cov_array = np.array([r_[1] for r_ in r])
LX = np.array([r_[2] for r_ in r])
LY = np.array([r_[3] for r_ in r])
glob_cc = np.array([r_[4] for r_ in r])
nan_idx = np.isnan(glob_cc)
glob_cc_ = glob_cc[nan_idx == False]
tau_est = tau_space[np.where(glob_cc_ == np.min(glob_cc_))[0][0]]
self.tau_est = tau_est
d = {'tau_est': tau_est,
'cov_array': cov_array,
'tau_space': tau_space,
'glob_cc': glob_cc,
'nan_idx': nan_idx,
'f_est_array': f_est_array}
# l-curve criterion
d_curv = self.curvature_criterion(tau_space, LX, LY)
d.update(d_curv)
return d
def curvature_criterion(self, tau_space, lx, ly):
"""
Find maximum of L-Curve curvature.
"""
rho = np.log(lx)
xi = np.log(ly)
drho = np.gradient(rho)
ddrho = np.gradient(drho)
dxi = np.gradient(xi)
ddxi = np.gradient(dxi)
curv = 2 * (drho * ddxi - ddrho * dxi) / (drho**2 + dxi**2)**(3/2)
max_idx = np.where(curv == np.max(curv))[0][0]
tau_est_curv = tau_space[max_idx]
curv_max = curv[max_idx]
# l curve max curvature
rho_max = rho[max_idx]
xi_max = xi[max_idx]
d = {'rho': rho,
'xi': xi,
'lx': lx,
'ly': ly,
'drho': drho,
'ddrho': ddrho,
'dxi': dxi,
'ddxi': ddxi,
'curv': curv,
'max_idx': max_idx,
'tau_est_curv': tau_est_curv,
'curv_max': curv_max,
'rho_max': rho_max,
'xi_max': xi_max}
return d
def calc_global_cc(self, cov):
return np.mean(np.sqrt(1 - 1 / ( np.diag(cov) * np.diag(np.linalg.inv(cov)))))
def svd(self):
"""
"""
if self.is_fitted == True:
self.u, self.s, self.vh = np.linalg.svd(self.A)
return self.u, self.s, self.vh
else:
print('Not fitted yet.')
def calc_filter_factors(self, tau, s):
filter_factors = s**2 / (s**2 + tau**2)
return filter_factors
def predict(self, tau):
"""
Parameters
----------
tau : float
Regularization Parameter
Returns
-------
f_est : array-like
cov : array-like
resid : float
Represents Lx in an L-Curve plot.
regul : float
Represents Ly in an L-Curve plot.
"""
if tau == None:
raise Exception('Regularization Parameter not defined (None-Type).')
# Weighted and regularized least squares fit
cov_n = self.cov_n
cov_n_inv = np.linalg.pinv(cov_n)
# https://stats.stackexchange.com/questions/52704/covariance-of-linear-regression-coefficients-in-weighted-least-squares-method
# Tikhonov Matrices
if self.C == 'second_order':
# second order derivative linear operator
self.rmatrix= second_order_central(len(self.f) - 2) # cut off over- and underflow
elif self.C == 'identity':
# identity matrix
self.rmatrix = np.eye(len(self.f) - 2)
elif self.C == 'selective_identity':
# svd
u, s, vt = self.svd()
x = tau**2 - s**2
# no damping of solution components with small index
self.rmatrix = np.diag(np.max(np.diag(x[1:-1]), 0))
# minimize
x0 = np.ones(self.n_bins_true)
bounds = [[1e-4, None] for i in range(self.n_bins_true)]
res = minimize(self.residuals, x0, bounds = bounds, args = (tau, self.weighted))
# estimated density
f_est = res.x
hess = nd.Hessian(self.residuals, method = 'complex')
cov = np.linalg.inv(hess(f_est, tau, self.weighted))
# l curve (unweighted )
lx = np.linalg.norm(self.A @ f_est + self.b - self.g)
ly = np.linalg.norm(self.rmatrix @ np.log10(f_est / self.acceptance)[1:-1])
self.f_est = f_est
self.cov = cov
return f_est, cov, lx, ly
def residuals(self, f, tau, weighted = False):
self.W = np.linalg.inv(np.diag(self.g + self.b))
if f.any() <= 0:
return np.inf
else:
f_eff = np.log10(f[1:-1] / self.acceptance[1:-1])
# weight residuals with errors
if weighted:
S = (self.A @ f + self.b - self.g).T @ self.W @ (self.A @ f + self.b - self.g) + tau**2 * (self.rmatrix @ f_eff).T @ (self.rmatrix @ f_eff)
else:
S = (self.A @ f + self.b - self.g).T @ (self.A @ f + self.b - self.g) + tau**2 * (self.rmatrix @ f_eff).T @ (self.rmatrix @ f_eff)
return S
def estimate_minimum(self, tau_space, glob_cc):
try:
tau_est = tau_space[np.where(glob_cc == np.min(glob_cc))[0][0]]
return tau_est
except:
print('Could not estimate regularization parameter.')
return None
def plot_glob_cc(self):
plt.plot(self.tau_space, self.glob_cc, label = 'Mean of Global Correlation Coefficients')
plt.xscale('log')
plt.xlabel(r'$\mathrm{Regularization\,Parameter}\,\tau$')
plt.ylabel(r'$\hat{\rho}_{\tau}$')
plt.legend(loc = 'best')
plt.tight_layout() | [
"lars.poppe@tu-dortmund.de"
] | lars.poppe@tu-dortmund.de |
0f7299c58842c2c3bee15207f4016fe5b990612d | db00be688b0e7bfb3a31c5620126fe4c9c0f79a8 | /snippets/formatting.py | d2faee63fb7c75b27466e5187e74e995f5818482 | [] | no_license | daveckw/eliteoneSales | 82c7bbe79ce4da04f5f7f5c67d90b534279939e7 | 8503e473e9fd4d9ff85a1d4f280db616ad47b459 | refs/heads/master | 2022-12-15T04:54:14.559826 | 2019-04-19T18:21:59 | 2019-04-19T18:21:59 | 181,590,177 | 0 | 0 | null | 2022-12-08T04:59:24 | 2019-04-16T01:16:22 | HTML | UTF-8 | Python | false | false | 527 | py | # Python format() function example
# Variable declaration
val = 10
# Calling function
print("decimal: {0:d}".format(val)); # display decimal result
print("hex: {0:x}".format(val)); # display hexadecimal result
print("octal: {0:o}".format(val)); # display octal result
print("binary: {0:b}".format(val)); # display binary result
val = 100000000
# Calling function
print("decimal: {:,.2f}".format(val)); # formatting float value
print("decimal: {:.2%}".format(56/9)); # formatting percentile value | [
"daveckw@gmail.com"
] | daveckw@gmail.com |
eb52b1979452a41d387d08c3d84777d0e8880f46 | e627fe13adc0a7b542f90bedac343796d6009c8a | /Day8.py | 476db44c1b67f9880aa89cc7b578de944361b4f9 | [] | no_license | MPomeroy/AdventOfCodeSolutions | 664204c830e71fbfc116bc67a7541b6953ba8a50 | 9165b9744445ed91bcf32028ffddd28895b2e7b3 | refs/heads/master | 2021-01-10T17:03:36.577034 | 2016-03-07T04:08:09 | 2016-03-07T04:08:09 | 48,891,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,120 | py | # To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
__author__ = "Mason"
__date__ = "$9-Jan-2016 6:59:33 PM$"
def main():
import FileLoadWrapper
import binascii
puzzleNumber = input('Which Puzzle number?')
runTestCase = input("Run the test case?(y/n):")
if(runTestCase == 'y' or runTestCase == 'Y'):
loader = FileLoadWrapper.FileLoader('TestCase', '8')
if(runTestCase == 'n' or runTestCase == 'N'):
loader = FileLoadWrapper.FileLoader('Input', '8')
firstReading = loader.read()
strForReading = firstReading
strForReading = strForReading.replace("\n", '')
fileLength = len(strForReading)
lines = firstReading.split("\n")
condensedLine = ''
if(puzzleNumber == '1'):
for line in lines:
line = line[1:len(line)-1]
condensedString = ''
interpretSlash = False
interpretHex = False
hexBuffer = ''
for i in list(range(len(line))):
char = line[i]
if interpretHex != False:
hexBuffer = str(hexBuffer)+str(char)
interpretHex = interpretHex-1
if interpretHex == 0:
char = 'a'#it doesn't matter what the hex character is just stick something in
condensedString = condensedString+str(char)
interpretHex = False
elif interpretSlash == True:
if(char == 'x'):
interpretHex = 2
else:
condensedString = condensedString+char
interpretSlash = False
elif(char == "\\"):
interpretSlash = True
continue
else:
condensedString = condensedString+char
condensedLine = condensedLine+condensedString
print(fileLength)
print(len(condensedLine))
print('The difference between the file and string is:'+str(fileLength-len(condensedLine)))
else:
for line in lines:
line = line[1:len(line)-1]
condensedString = '"\\"'
for i in list(range(len(line))):
char = line[i]
if(char == "\\"):
condensedString = condensedString+'\\'+'\\'
continue
elif(char == '"'):
condensedString = condensedString+'\\'+char
continue
else:
condensedString = condensedString+char
print(condensedString+'\\""')
condensedLine = condensedLine+condensedString+'\\""'
print(fileLength)
print('The condensedLine has '+str(len(condensedLine))+' chars(in this puzzle this is actually an expanded string.)')
print('The difference between the file and string is:'+str(len(condensedLine)-fileLength))
file.close()
main()
| [
"mason.pomeroy@hotmail.com"
] | mason.pomeroy@hotmail.com |
bff9bc282bfc2698e045bd967b62ba879fee5036 | dd7faf12b2e05d7dde21eca637447ad3facf032e | /app01/models.py | 93de89bf39c112dd5fd852b80fc612aaf44d3160 | [
"Apache-2.0"
] | permissive | CrackerCat/ksDjango | 6cf8b3cc45a67e3f6bfb06f7a7d37b4c9ccf7d2b | 0c0f4a5842cf225e77035b716979fcf9b8d03311 | refs/heads/master | 2023-03-28T15:46:31.224423 | 2021-03-24T00:57:50 | 2021-03-24T00:57:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,757 | py | from django.db import models
from datetime import datetime
# Create your models here.
class UserTitle(models.Model):
#女为F,男为M
GENDER = [
(0,"未知"),
(1,"男"),
(2,"女")
]
STATE = [
(0,"0初次爬取"),
(1,"1ksVideo"),
(2,"1ksLive"),
(3,"2ksVideo+ksLive"),
(4,"3videoMP4"),
(5,"4vieo+liveMP4")
]
USERIMG = "https://tx2.a.yximgs.com/uhead/AB/2020/08/17/09/BMjAyMDA4MTcwOTM2MDNfMjQ0NzAyMDZfMV9oZDM4Nl8xODU=_s.jpg"
userID = models.CharField(max_length=256,unique=True,verbose_name="用户id")
userName = models.CharField(max_length=256,verbose_name="用户名")
createTime = models.DateTimeField(default=datetime.now,verbose_name="创建时间")
stateUser = models.IntegerField(choices=STATE,verbose_name="用户信息状态",default=0)
ksID = models.CharField(max_length=128,verbose_name="快手id",default="xxxxxxxxxxxxxx")
user_text = models.CharField(max_length=2560,verbose_name="用户简述",default="xxxxxxxxxxxxx")
gender = models.IntegerField(choices=GENDER,verbose_name="性别",default=0)
fan = models.CharField(max_length=32,verbose_name="粉丝数",default="-1")
xinzuo = models.CharField(max_length=32,verbose_name="星座",default="未知")
cityName = models.CharField(max_length=32,verbose_name="地址",default="未知")
follow = models.CharField(max_length=32,verbose_name="关注的数量",default="-1")
photo = models.CharField(max_length=32,verbose_name="作品数量",default="-1")
userImg = models.CharField(max_length=256,verbose_name="图片地址",default=USERIMG)
def __str__(self):
return self.userName
class Mate:
verbose_name = verbose_name_plural = "用户ID和名字"
class UserVideo(models.Model):
STATE = [
(1,"默认ksVideo"),
(2,"ksVideo+ksLive")
]
# 当被参照删除时,自己也被删除
theUser = models.ForeignKey(UserTitle,on_delete=models.CASCADE)
videoID = models.CharField(max_length=128,default="xxxxxxxxxxxxxx",verbose_name="视频id")
caption = models.CharField(max_length=512,default="暂无",verbose_name="视频描述")
coversUrl = models.CharField(max_length=512,default="xxxxxxxxxxx",verbose_name="视频封面")
videoPath = models.CharField(max_length=512,default="xxxxxxxxxxxxx",verbose_name="视频地址")
realLikeCount = models.CharField(max_length=64,default="xxxxxxxxxxx",verbose_name="具体点赞数量")
animatedCoverUrl = models.CharField(max_length=512,default="xxxxxxxx",verbose_name="封面动画")
stateVideo = models.IntegerField(choices=STATE,default=1,verbose_name="状态")
displayView = models.CharField(max_length=64,default="-1",verbose_name="播放量")
displayComment = models.CharField(max_length=64,default="-1",verbose_name="评论数")
def __str__(self):
return self.videoID
class Mate:
verbose_name = verbose_name_plural = "视频信息"
class UserPhoto(models.Model):
thephotoUser = models.ForeignKey(UserTitle,on_delete=models.CASCADE)
photoID = models.CharField(max_length=128,verbose_name="相册id",default="xxxxxxxx")
caption = models.CharField(max_length=512,verbose_name="相册描述",default="暂无")
displayView = models.CharField(max_length=32,verbose_name="播放量",default="-1")
displayLike = models.CharField(max_length=32,verbose_name="点赞数",default="-1")
displayComment = models.CharField(max_length=32,verbose_name="评论数",default="-1")
imgUrls = models.CharField(max_length=5000,default=" ")
def __str__(self):
return self.photoID
class Mate:
verbose_name = verbose_name_plural = "相册信息" | [
"liangxinxin5102@163.com"
] | liangxinxin5102@163.com |
7a28f24d0a6faf49ea00304d8ca51cfb2d5b84ef | f305f84ea6f721c2391300f0a60e21d2ce14f2a5 | /6_tree/经典题/后序dfs统计信息/换根dp/hard/abc-233-G - Vertex Deletion-每个点是否在树的最大匹配中.py | 284d716c8e41ab42dbe6165859649c030080a298 | [] | no_license | 981377660LMT/algorithm-study | f2ada3e6959338ae1bc21934a84f7314a8ecff82 | 7e79e26bb8f641868561b186e34c1127ed63c9e0 | refs/heads/master | 2023-09-01T18:26:16.525579 | 2023-09-01T12:21:58 | 2023-09-01T12:21:58 | 385,861,235 | 225 | 24 | null | null | null | null | UTF-8 | Python | false | false | 1,506 | py | # abc-233-G - Vertex Deletion-每个点是否在树的最大匹配中
# https://atcoder.jp/contests/abc223/tasks/abc223_g
# 给定一棵树
# 对每个结点i为根,删除根连接的所有边后,
# !使得剩下的树的最大匹配和原树最大匹配相等
# 求这样的根的个数
# !解:即不参与二分图的最大匹配
# https://yukicoder.me/problems/2085
# 二分图博弈
# Alice和Bob在树上博弈
# 先手放一个棋子,后手在相邻的结点放一个棋子
# 交替放棋子,直到不能放棋子的时候,输
# !问先手是否必胜 => 如果起点不在二分图的最大匹配中,先手必胜
from Rerooting import Rerooting
if __name__ == "__main__":
E = int # 当前节点是否构成子树的最大匹配, 0: 不参与, 1: 参与
def e(root: int) -> E:
return 0
def op(childRes1: E, childRes2: E) -> E:
return childRes1 | childRes2
def composition(fromRes: E, parent: int, cur: int, direction: int) -> E:
"""direction: 0: cur -> parent, 1: parent -> cur"""
return fromRes ^ 1 # 孩子参与匹配则父亲不参与, 反之成立
n = int(input())
edges = []
for _ in range(n - 1):
u, v = map(int, input().split())
edges.append((u - 1, v - 1))
R = Rerooting(n)
for u, v in edges:
R.addEdge(u, v)
dp = R.rerooting(e=e, op=op, composition=composition, root=0)
print(dp.count(0)) # 不在最大匹配中的点的个数
| [
"lmt2818088@gmail.com"
] | lmt2818088@gmail.com |
dca4f6431539dae2d455d8de6975681729cfe086 | 1a0a41b7e34de5e32c6c357d301ae47c9e6aaabe | /scripts/ui/progress_bar.py | 04e420865006eabbf65a67e557d3378bc8c112b3 | [
"MIT"
] | permissive | Kateskim/git-clone | b8d9f987c0a53bdf4db00d1373cd55e34d1c412b | 5b78ff0604239955a112001f2116a2892e9f316a | refs/heads/master | 2021-05-25T08:57:25.414937 | 2020-04-07T09:48:53 | 2020-04-07T09:48:53 | 253,750,948 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,500 | py |
import maya.cmds as cmds
import maya.mel as mel
class ProgressBar(object):
def __init__(self, status='Busy...', start=0, end=100, interruptable=False):
self._start = start
self._end = end
self._status = status
self._interruptable = interruptable
self._progress_bar = mel.eval('$tmp = $gMainProgressBar')
def increment(self, val=1):
cmds.progressBar(self._progress_bar, e=1, s=val)
def status(self, msg):
cmds.progressBar(self._progress_bar, e=1, st=msg)
def interrupted(self):
# cmds.waitCursor(state=False)
return cmds.progressBar(self._progress_bar, q=1, ic=True)
def run(self):
# cmds.waitCursor(status=1)
cmds.progressBar(self._progress_bar,
e=1,
bp=1,
ii=self._interruptable,
status=self._status,
min=self._start,
max=self._end
)
cmds.refresh()
def stop(self):
# cmds.waitCursor(state=False)
cmds.progressBar(self._progress_bar, e=1, ep=1)
def __call__(self, func):
def wrapper(*args, **kwargs):
self.run()
result = func(*args, **kwargs)
self.stop()
return result
wrapper.increment = self.increment
wrapper.status = self.status
wrapper.interrupt = self.interrupted
return wrapper
| [
"anno_schachner@gmx.at"
] | anno_schachner@gmx.at |
771ccd414db8453576bddfd17a4d33078ee24a06 | 49b86797ac3b08f256dc8b2e4e076380dd218bdd | /venv/Scripts/pip3-script.py | 6a2384ebd80b9fc6a73bda4c8f73319884e29a24 | [] | no_license | AlphaJet45/ParkingRennes | 1e8837fa6da6f04f0bf12bd6ee15003197ee0efe | 1915cb3cdc57b10c8e87a6c8182529a6b142d700 | refs/heads/master | 2020-04-07T02:53:11.283797 | 2018-11-17T13:12:10 | 2018-11-17T13:12:10 | 157,984,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | #!"C:\Users\damie\Documents\MDS\Outils de la data\Parking\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
)
| [
"damien.m.45@gmail.com"
] | damien.m.45@gmail.com |
ab952754fa0a7759070cf3e509300c8ac2bcf78b | 17c7495075c8a48f9fe7337610ef1490159c66e8 | /addons/udes_common/tests/test_selection_display_name.py | ad66bb2bf2c96f517a56e45ca148ef705332b5b0 | [] | no_license | unipartdigital/udes-open | 7ad017843348712d2b33b080e5914724d2a76244 | 0f69491b1538892c1921ae8063d9ea269e15d9ce | refs/heads/14.0 | 2023-09-01T02:44:41.653640 | 2023-08-31T15:52:14 | 2023-08-31T15:52:14 | 138,622,881 | 7 | 6 | null | 2023-09-14T16:20:39 | 2018-06-25T16:40:40 | Python | UTF-8 | Python | false | false | 4,555 | py | """Tests for selection field display name"""
from odoo.exceptions import ValidationError
from odoo.tests import common, tagged
@tagged("post_install")
class TestSelectionDisplayName(common.SavepointCase):
"""Tests for selection_display_name()"""
@classmethod
def setUpClass(cls):
"""
Pull in recordsets defined in `base` modules data, to allow us to test
without needing to mock selection fields everywhere
NB: No related selection fields exist in `base` i.e
my_field = fields.Selection(related="state")
and it is not possible to mock fields in tests (as far as i am aware)
so there is no unittest for this scenario, however it has been manually tested
"""
super(TestSelectionDisplayName, cls).setUpClass()
ResPartnerBank = cls.env["res.partner.bank"]
IrFilters = cls.env["ir.filters"]
# A random recordset on a model with a normal selection field
cls.model_overview_report = cls.env.ref("base.report_ir_model_overview")
# A random recordset on a model with a generated selection field (function)
cls.main_partner = cls.env.ref("base.main_partner")
# There are no recordsets on res.partner.bank, but it has a lambda generated
# selection field, so we just create one
cls.res_partner_bank_recordset = ResPartnerBank.create(
dict(
acc_number="60-16-13 31926819",
partner_id=cls.main_partner.id,
)
)
# There are no recordsets on ir.filters, but it has a generated
# selection field (by string name of function), so we just create one
cls.ir_filters_recordset = IrFilters.create(
dict(
name="TestABC",
user_id=cls.env.user.id,
model_id="res.groups",
)
)
def test_basic_selection_field(self):
"""Check function gives the expected value from a normal selection field"""
normal_output = self.model_overview_report.report_type
self.assertEqual(normal_output, "qweb-pdf")
alt_output = self.model_overview_report.selection_display_name("report_type")
self.assertEqual(alt_output, "PDF")
def test_generated_selection_field(self):
"""Check function gives the expected value from a selection field
which uses a function to determine its selection values
(where the function is defined in the field as a callable that lives outside the class)
"""
normal_output = self.main_partner.lang
self.assertEqual(normal_output, "en_US")
alt_output = self.main_partner.selection_display_name("lang")
self.assertEqual(alt_output, "English (US)")
def test_generated_selection_field2(self):
"""Check function gives the expected value from a selection field
which uses a function to determine its selection values
(where the function is defined in the field as a string)
"""
normal_output = self.ir_filters_recordset.model_id
self.assertEqual(normal_output, "res.groups")
alt_output = self.ir_filters_recordset.selection_display_name("model_id")
self.assertEqual(alt_output, "Access Groups")
def test_generated_lambda_selection_field(self):
"""Check function gives the expected value from a selection field
which uses a lambda function to determine its selection values
"""
normal_output = self.res_partner_bank_recordset.acc_type
self.assertEqual(normal_output, "bank")
alt_output = self.res_partner_bank_recordset.selection_display_name("acc_type")
self.assertEqual(alt_output, "Normal")
def test_fetching_nonexistent_field(self):
"""Ensure function fails with appropriate warning message if developer
attempts to get a selection field which does not exist on the model
in place of a KeyError
"""
with self.assertRaises(ValidationError):
self.res_partner_bank_recordset.selection_display_name("state")
def test_fetching_unset_selection_field(self):
"""Ensure function returns False if the selection field on the recordset is not set"""
self.model_overview_report.report_type = False
normal_output = self.model_overview_report.report_type
self.assertEqual(normal_output, False)
alt_output = self.model_overview_report.selection_display_name("report_type")
self.assertEqual(alt_output, False)
| [
"peteralabaster@gmail.com"
] | peteralabaster@gmail.com |
8005f712888eadc76c4175f8f80c9cffde0caff8 | 5793c121656d8db90eec3d8e271ef1e94c84fe69 | /shape_generator/example_cross_sections.py | 73cedb9dc1cbd33227674711f28e8687be0ff0c3 | [
"MIT"
] | permissive | timebridge/SWMM_xsections_shape_generator | 01c02cd145d8d771fcf1ed664912b71c99046c22 | f237e472ab5e4ab13c332c69d3d9cba6546bf943 | refs/heads/master | 2023-01-15T05:25:25.210545 | 2020-11-18T16:02:40 | 2020-11-18T16:02:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,986 | py | from . import Circle, CrossSection
class EggSection(CrossSection):
"""
egg shaped cross section
.. figure:: images/ei.gif
:align: center
:alt: egg
:figclass: align-center
Egg Section (DWA-A 110, 2006)
"""
def __init__(self, r, label=None, description=None):
"""init
egg shaped cross section
Args:
r (float): radius of the egg
label (str): name/label/number of the cross section; dafault = "Ei <width>/<height>"
description (str): optional description of the cross section
"""
R = 3 * r
roh = r / 2
height = r * 3
width = r * 2
# h1 = roh - (r + roh) / (R - roh) * roh
h1 = 0.2 * r
if label is None:
label = 'Ei {:0.0f}/{:0.0f}'.format(width, height)
CrossSection.__init__(self, label=label, description=description, width=width, height=height)
self.add(Circle(roh, x_m=roh))
self.add(h1)
self.add(Circle(R, x_m=2 * r, y_m=-(R - r)))
self.add(2 * r)
self.add(Circle(r, x_m=2 * r))
class CircleSection(CrossSection):
"""
circle cross section
.. figure:: images/kreis1.gif
:align: center
:alt: circle
:figclass: align-center
Circle Section (DWA-A 110, 2006)
"""
def __init__(self, r, label=None, description=None):
"""init
circle cross section
Args:
r (float): radius of the circle
label (str): name/label/number of the cross section; dafault = "DN <diameter>"
description (str): optional description of the cross section
"""
d = 2 * r
height = d
width = d
if label is None:
label = 'DN {:0.0f}'.format(d)
CrossSection.__init__(self, label=label, description=description, width=width, height=height)
self.add(Circle(r, x_m=r))
| [
"markus.pichler@tugraz.at"
] | markus.pichler@tugraz.at |
b0d451dea5d617604a2cb9d1c05eab2bd487e4d5 | bae75bf1de75fb1b76e19b0d32c778e566de570a | /smodels-database/8TeV/CMS/CMS-EXO-12-026/validation/TRHadUM1_2EqMassAx.py | 096085bd68122b2c206bfd2aa0ce12b8264addfb | [] | no_license | andlessa/RDM | 78ae5cbadda1875c24e1bb726096b05c61627249 | ac6b242871894fee492e089d378806c2c2e7aad8 | refs/heads/master | 2023-08-16T00:47:14.415434 | 2021-09-21T20:54:25 | 2021-09-21T20:54:25 | 228,639,778 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,099 | py | validationData = [{'slhafile': 'TRHadUM1_1000_1000.slha', 'error': 'no results', 'axes': {'x': 1000.0, 'y': 1000.0}}, {'slhafile': 'TRHadUM1_1020_1020.slha', 'error': 'no results', 'axes': {'x': 1020.0, 'y': 1020.0}}, {'slhafile': 'TRHadUM1_1040_1040.slha', 'error': 'no results', 'axes': {'x': 1040.0, 'y': 1040.0}}, {'slhafile': 'TRHadUM1_1060_1060.slha', 'error': 'no results', 'axes': {'x': 1060.0, 'y': 1060.0}}, {'slhafile': 'TRHadUM1_1080_1080.slha', 'error': 'no results', 'axes': {'x': 1080.0, 'y': 1080.0}}, {'slhafile': 'TRHadUM1_1100_1100.slha', 'error': 'no results', 'axes': {'x': 1100.0, 'y': 1100.0}}, {'slhafile': 'TRHadUM1_1120_1120.slha', 'error': 'no results', 'axes': {'x': 1120.0, 'y': 1120.0}}, {'slhafile': 'TRHadUM1_1140_1140.slha', 'error': 'no results', 'axes': {'x': 1140.0, 'y': 1140.0}}, {'slhafile': 'TRHadUM1_1160_1160.slha', 'error': 'no results', 'axes': {'x': 1160.0, 'y': 1160.0}}, {'slhafile': 'TRHadUM1_1180_1180.slha', 'error': 'no results', 'axes': {'x': 1180.0, 'y': 1180.0}}, {'slhafile': 'TRHadUM1_1200_1200.slha', 'error': 'no results', 'axes': {'x': 1200.0, 'y': 1200.0}}, {'slhafile': 'TRHadUM1_1220_1220.slha', 'error': 'no results', 'axes': {'x': 1220.0, 'y': 1220.0}}, {'slhafile': 'TRHadUM1_1240_1240.slha', 'error': 'no results', 'axes': {'x': 1240.0, 'y': 1240.0}}, {'slhafile': 'TRHadUM1_1260_1260.slha', 'error': 'no results', 'axes': {'x': 1260.0, 'y': 1260.0}}, {'slhafile': 'TRHadUM1_1280_1280.slha', 'error': 'no results', 'axes': {'x': 1280.0, 'y': 1280.0}}, {'slhafile': 'TRHadUM1_1300_1300.slha', 'error': 'no results', 'axes': {'x': 1300.0, 'y': 1300.0}}, {'slhafile': 'TRHadUM1_1320_1320.slha', 'error': 'no results', 'axes': {'x': 1320.0, 'y': 1320.0}}, {'slhafile': 'TRHadUM1_1340_1340.slha', 'error': 'no results', 'axes': {'x': 1340.0, 'y': 1340.0}}, {'slhafile': 'TRHadUM1_1360_1360.slha', 'error': 'no results', 'axes': {'x': 1360.0, 'y': 1360.0}}, {'slhafile': 'TRHadUM1_1380_1380.slha', 'error': 'no results', 'axes': {'x': 1380.0, 'y': 1380.0}}, {'slhafile': 'TRHadUM1_1400_1400.slha', 'error': 'no results', 'axes': {'x': 1400.0, 'y': 1400.0}}, {'slhafile': 'TRHadUM1_1420_1420.slha', 'error': 'no results', 'axes': {'x': 1420.0, 'y': 1420.0}}, {'slhafile': 'TRHadUM1_1440_1440.slha', 'error': 'no results', 'axes': {'x': 1440.0, 'y': 1440.0}}, {'slhafile': 'TRHadUM1_1460_1460.slha', 'error': 'no results', 'axes': {'x': 1460.0, 'y': 1460.0}}, {'slhafile': 'TRHadUM1_1480_1480.slha', 'error': 'no results', 'axes': {'x': 1480.0, 'y': 1480.0}}, {'slhafile': 'TRHadUM1_1500_1500.slha', 'error': 'no results', 'axes': {'x': 1500.0, 'y': 1500.0}}, {'slhafile': 'TRHadUM1_1550_1550.slha', 'error': 'no results', 'axes': {'x': 1550.0, 'y': 1550.0}}, {'slhafile': 'TRHadUM1_1600_1600.slha', 'error': 'no results', 'axes': {'x': 1600.0, 'y': 1600.0}}, {'slhafile': 'TRHadUM1_1650_1650.slha', 'error': 'no results', 'axes': {'x': 1650.0, 'y': 1650.0}}, {'slhafile': 'TRHadUM1_1700_1700.slha', 'error': 'no results', 'axes': {'x': 1700.0, 'y': 1700.0}}, {'slhafile': 'TRHadUM1_1750_1750.slha', 'error': 'no results', 'axes': {'x': 1750.0, 'y': 1750.0}}, {'slhafile': 'TRHadUM1_1800_1800.slha', 'error': 'no results', 'axes': {'x': 1800.0, 'y': 1800.0}}, {'slhafile': 'TRHadUM1_1850_1850.slha', 'error': 'no results', 'axes': {'x': 1850.0, 'y': 1850.0}}, {'slhafile': 'TRHadUM1_1900_1900.slha', 'error': 'no results', 'axes': {'x': 1900.0, 'y': 1900.0}}, {'slhafile': 'TRHadUM1_1950_1950.slha', 'error': 'no results', 'axes': {'x': 1950.0, 'y': 1950.0}}, {'slhafile': 'TRHadUM1_400_400.slha', 'axes': {'x': 400.0}, 't': 0.12478996423574594, 'signal': 356.83, 'UL': 3.9098280556377634, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_420_420.slha', 'axes': {'x': 420.0}, 't': 0.12478996423574594, 'signal': 262.683, 'UL': 3.5613225806451614, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_440_440.slha', 'axes': {'x': 440.0}, 't': 0.12478996423574594, 'signal': 195.812, 'UL': 3.2128171056525603, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_460_460.slha', 'axes': {'x': 460.0}, 't': 0.12478996423574594, 'signal': 147.49200000000002, 'UL': 2.8643116306599588, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_480_480.slha', 'axes': {'x': 480.0}, 't': 0.12478996423574594, 'signal': 112.24099999999999, 'UL': 2.515806155667357, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_500_500.slha', 'axes': {'x': 500.0}, 't': 0.12478996423574594, 'signal': 85.5847, 'UL': 2.1673006806747557, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_520_520.slha', 'axes': {'x': 520.0}, 't': 0.12478996423574594, 'signal': 66.0189, 'UL': 1.9066133516148691, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_540_540.slha', 'axes': {'x': 540.0}, 't': 0.12478996423574594, 'signal': 51.174699999999994, 'UL': 1.6489229189518586, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_560_560.slha', 'axes': {'x': 560.0}, 't': 0.12478996423574594, 'signal': 39.9591, 'UL': 1.3912324862888483, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_580_580.slha', 'axes': {'x': 580.0}, 't': 0.12478996423574594, 'signal': 31.3654, 'UL': 1.133542053625838, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_600_600.slha', 'axes': {'x': 600.0}, 't': 0.12478996423574594, 'signal': 24.8009, 'UL': 0.8877338919241085, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_620_620.slha', 'axes': {'x': 620.0}, 't': 0.12478996423574594, 'signal': 19.6331, 'UL': 0.900095072017483, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_640_640.slha', 'axes': {'x': 640.0}, 't': 0.12478996423574594, 'signal': 15.5809, 'UL': 0.9124562521108572, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_660_660.slha', 'axes': {'x': 660.0}, 't': 0.12478996423574594, 'signal': 12.539299999999999, 'UL': 0.9248174322042316, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_680_680.slha', 'axes': {'x': 680.0}, 't': 0.12478996423574594, 'signal': 10.0516, 'UL': 0.937178612297606, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_700_700.slha', 'axes': {'x': 700.0}, 't': 0.12478996423574594, 'signal': 8.1141, 'UL': 0.9494650238379022, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_720_720.slha', 'axes': {'x': 720.0}, 't': 0.12478996423574594, 'signal': 6.56729, 'UL': 0.9547053893524037, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_740_740.slha', 'axes': {'x': 740.0}, 't': 0.12478996423574594, 'signal': 5.3260499999999995, 'UL': 0.959945754866905, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_760_760.slha', 'axes': {'x': 760.0}, 't': 0.12478996423574594, 'signal': 4.336880000000001, 'UL': 0.9651861203814065, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_780_780.slha', 'axes': {'x': 780.0}, 't': 0.12478996423574594, 'signal': 3.5421099999999996, 'UL': 0.9704264858959079, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_800_800.slha', 'axes': {'x': 800.0}, 't': 0.12478996423574594, 'signal': 2.89588, 'UL': 0.9756668514104093, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_820_820.slha', 'axes': {'x': 820.0}, 't': 0.12478996423574594, 'signal': 2.37168, 'UL': 0.9964521451776649, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_840_840.slha', 'axes': {'x': 840.0}, 't': 0.12478996423574594, 'signal': 1.9517200000000001, 'UL': 1.017611536040609, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_860_860.slha', 'axes': {'x': 860.0}, 't': 0.12478996423574594, 'signal': 1.60403, 'UL': 1.0387709269035534, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_880_880.slha', 'axes': {'x': 880.0}, 't': 0.12478996423574594, 'signal': 1.32077, 'UL': 1.0599303177664976, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_900_900.slha', 'axes': {'x': 900.0}, 't': 0.12478996423574594, 'signal': 1.09501, 'UL': 1.0817074977325407, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_920_920.slha', 'axes': {'x': 920.0}, 't': 0.12478996423574594, 'signal': 0.907494, 'UL': 1.1148627935100273, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_940_940.slha', 'axes': {'x': 940.0}, 't': 0.12478996423574594, 'signal': 0.753768, 'UL': 1.1480180892875138, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_960_960.slha', 'axes': {'x': 960.0}, 't': 0.12478996423574594, 'signal': 0.626876, 'UL': 1.1811733850650004, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}, {'slhafile': 'TRHadUM1_980_980.slha', 'axes': {'x': 980.0}, 't': 0.12478996423574594, 'signal': 0.5224949999999999, 'UL': 1.214328680842487, 'condition': 0.0, 'dataset': None, 'kfactor': 1.0}] | [
"lessa.a.p@gmail.com"
] | lessa.a.p@gmail.com |
1a8a98d1d72ed575dc8ecaae762fb47701f670cd | 859e55b5463ed97bde29d2e1528867a17d75a44c | /tests/test_tiktok-bot.py | 1f41a673db436fd1635d5c49fda7ef5950caa165 | [
"MIT"
] | permissive | steffanjensen/tiktok_bot | 9005aaf187f56c36fd9ece98bf6c96b9cbe75edd | 30404c0cd9ae1d52eb5b8818fbf282af1f68ee7a | refs/heads/master | 2020-09-20T16:20:18.220850 | 2019-11-26T16:06:45 | 2019-11-26T16:06:45 | 224,535,231 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | from tiktok_bot import __version__
def test_version():
assert __version__ == "0.6.1"
| [
"eskemerov@gmail.com"
] | eskemerov@gmail.com |
fbc1bd3dfea20f09348cd0214822aab875ca4c53 | 5fb547b95a08282f4037901fac5f2d6e25190395 | /src/app_with_sidebar.py | 85f21b31250e0815ac784cf08e1fd7ac58b929db | [] | no_license | TomHarned/fooled_by_covid | 10a965705a1f74a07e9d5aa29a5aa2e335798360 | 33a8157f064ddfc5ec3c0b70283ef0659c3656c7 | refs/heads/main | 2023-03-21T11:30:14.704050 | 2021-03-02T03:00:59 | 2021-03-02T03:00:59 | 315,403,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,981 | py | import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import dash_bootstrap_components as dbc
import plotly.express as px
import pandas as pd
import json
from import_and_calculate import import_data, group_filter_and_calculate
from filter_and_display_graph import display_graph
# from layout_files.layouts import sidebar
# !!!save this off in a different file and try to mimic the example without the
# sidebar, then add the sidebar back in later
df = import_data()
# This function allows for more adjustment in the future
df = group_filter_and_calculate(df)
df['date'] = df.index
with open('../data/states.json', 'r') as f:
states = json.load(f)
# Create graph to display in main pane
app = dash.Dash(external_stylesheets=[dbc.themes.BOOTSTRAP])
# the style arguments for the sidebar. We use position:fixed and a fixed width
# SIDEBAR_STYLE = {
# "position": "fixed",
# "top": 0,
# "left": 0,
# "bottom": 0,
# "width": "16rem",
# "padding": "2rem 1rem",
# "background-color": "#f8f9fa",
# }
#
# # the styles for the main content position it to the right of the sidebar and
# # add some padding.
# CONTENT_STYLE = {
# "margin-left": "18rem",
# "margin-right": "2rem",
# "padding": "2rem 1rem",
# }
#
# sidebar = html.Div(
# [
# html.H2("Sidebar", className="display-4"),
# html.Hr(),
# html.P(
# "A simple sidebar layout with navigation links", className="lead"
# ),
# dcc.Dropdown(
# id='state-input',
# options=[
# states
# ],
# value='All'
# ),
# # style=SIDEBAR_STYLE
# ])
# content = html.Div(id="page-content", style=CONTENT_STYLE)
# app.layout = html.Div([dcc.Location(id="url"), sidebar, content])
app.layout = html.Div([
html.Div([
dcc.Dropdown(
id='select-state'
options=
)
])
])
@app.callback(
Output(component_id='output-graph', component_property='children'),
Input(component_id='state-selection', component_property='value')
)
def display_graph(df: pd.DataFrame,
state: str = 'All') -> pd.DataFrame:
"""
Takes the full cleaned COVID data set and filters it by state
Inputs:
df (pd.DataFrame): Dataframe containing the cleaned NYT COVID dataset
state (str): The full name of the state to filter by, e.g. "Ohio"
Returns:
df_state (pd.DataFrame): Dataframe containing cleaned NYT COVID data for
the selected state.
"""
if state == 'All':
df_state = df.groupby('date').sum()
else:
state_filter_exp = df.state == state
df_state = df.copy().loc[state_filter_exp]
df_state = df_state.groupby(['date', 'state']).sum()
fig = px.bar(df_state, x='date', y='daily_cases')
return fig
@app.callback(Output("page-content", "children"), [Input("url", "pathname")])
def render_page_content(pathname):
if pathname == "/":
page_one =html.Div(children=[
html.H1(children='Hello Dash'),
html.Div(children='''
Dash: A web application framework for Python.
'''),
dcc.Graph(
id='output-graph',
figure=fig
)
])
return page_one
elif pathname == "/page-1":
return page_one
# html.P("This is the content of page 1. Yay!")
elif pathname == "/page-2":
return html.P("Oh cool, this is page 2!")
# If the user tries to reach a different page, return a 404 message
return dbc.Jumbotron(
[
html.H1("404: Not found", className="text-danger"),
html.Hr(),
html.P(f"The pathname {pathname} was not recognised..."),
]
)
if __name__ == "__main__":
app.run_server(port=8080, debug=True)
| [
"thomas.harned@gmail.com"
] | thomas.harned@gmail.com |
d1fd86edc5d3190435ea38f5012768e9d9dc15a1 | 5c39e166d0c6ec68f71c4c7627956fdd6f28c15e | /andrew/hypothesis_testing/exercise_1.py | a41999f0c624325725871690b4b0bb155c55d97c | [] | no_license | zero-one-group/zot-internship | a8ae21029086188f4c8ca348c50e445a86a01179 | 9bbc421252b42f1bc85de66d22f48266c59113f0 | refs/heads/master | 2023-01-12T07:48:39.766136 | 2020-10-15T09:03:24 | 2020-10-15T09:03:24 | 279,190,490 | 3 | 3 | null | 2020-11-08T21:30:30 | 2020-07-13T02:32:56 | Python | UTF-8 | Python | false | false | 1,376 | py | '''
Let X ~ Beta(1.8, 1). An unsuspecting researcher has 100 i.i.d samples of X, and would like to conduct the following test at 10% significance - H0: E(X) = 2, H1: E(X) < 1.
■ What’s the probability that the researcher rejects the null hypothesis?
■ What does 10% in 10% significance level mean? Show your argument using a simulation.
■ Explain, in your own words, what Type I and Type II errors are.
'''
import numpy as np
from scipy import stats
num_of_samples = 100
beta_dist = np.random.beta(1.8, 1, size=num_of_samples)
sample_mean = np.mean(beta_dist)
sample_std = np.std(beta_dist)
h_0 = 0.65
alpha = 0.1
# Test statistic is not normal and population standard deviation is unknown, use T-test
t_value = (sample_mean-h_0) / (sample_std/np.sqrt(num_of_samples))
critical_value = stats.t.ppf(1-alpha, df=num_of_samples-1) #one-sided test
p_value = stats.t.sf(np.abs(t_value), num_of_samples-1) #one-sided test
print("T-value =", t_value)
print("Critical value =", critical_value)
print("Probability of rejecting null hypothesis =", 1-p_value)
# 10% significance level means that there is a 10% probability of rejecting the null hypothesis when it is true
# A type I error refers to the situation where the null hypothesis has been rejected even though it is true. A type II error occurs when a null hypothesis is not rejected even though it is false.
| [
"AHW@macbook-pro-2.mynet"
] | AHW@macbook-pro-2.mynet |
cfb87afb87aaff28c9ee4fdbd07bbda5cbe18cf5 | 9be731c909b1b81acb57515e70202a65d4cf4c0b | /dvr_2d.py | cfbe4dc245a77fa8d7238ad9e272328537def0fe | [
"MIT"
] | permissive | adatar2003/dvr_py | 01db12508af841172290e3f54b86d804806ba8bf | d549c56647828e0652c351e54f219de6c8fdc220 | refs/heads/master | 2021-12-22T04:38:03.987187 | 2017-10-10T19:06:46 | 2017-10-10T19:06:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,186 | py | """
Use a simple Discrete Variable Representation method to solve
one-dimensional potentials.
A good general introduction to DVR methods is
Light and Carrington, Adv. Chem. Phys. 114, 263 (2000)
"""
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import scipy.sparse.linalg as sla
import scipy.sparse as sp
import scipy.special.orthogonal as ortho
import dvr_1d
# These are the "Tableau 20" colors as RGB.
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# These are the "Tableau 10 Medium" colors as RGB.
tableau10m = [(114, 158, 206), (255, 158, 74), (103, 191, 92), (237, 102, 93),
(173, 139, 201), (168, 120, 110), (237, 151, 202),
(162, 162, 162), (205, 204, 93), (109, 204, 218)]
# These are the Tableau "Color Blind 10" colors as RGB
colorblind10 = [(0, 107, 164), (255, 128, 14), (171, 171, 171), (89, 89, 89),
(95, 158, 209), (200, 82, 0), (137, 137, 137), (162, 200, 236),
(255, 188, 121), (207, 207, 207)]
# Scale the RGB values to [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
for i in range(len(tableau10m)):
r, g, b = tableau10m[i]
tableau10m[i] = (r / 255., g / 255., b / 255.)
r, g, b = colorblind10[i]
colorblind10[i] = (r / 255., g / 255., b / 255.)
class DVR(object):
def __cartesian_product(self, arrays):
"""A fast cartesion product function that I blatantly stole from
user senderle on stackoverflow.com"""
broadcastable = np.ix_(*arrays)
broadcasted = np.broadcast_arrays(*broadcastable)
rows, cols = reduce(np.multiply, broadcasted[0].shape), len(broadcasted)
out = np.empty(rows * cols, dtype=broadcasted[0].dtype)
start, end = 0, rows
for a in broadcasted:
out[start:end] = a.reshape(-1)
start, end = end, end + rows
return out.reshape(cols, rows).T
def __init__(self, dvr1d):
self.dvr1d = dvr1d
self.x = dvr1d.x
self.y = dvr1d.x
self.xy = np.fliplr(self.__cartesian_product([self.x, self.y]))
def v(self, V):
"""Return the potential matrix with the given potential.
Usage:
v_matrix = self.v(V)
@param[in] V potential function
@returns v_matrix potential matrix
"""
return sp.diags(diagonals=V(self.xy), offsets=0)
def t(self):
"""Return the kinetic energy matrix.
Usage:
T = self.t()
@returns T kinetic energy matrix
"""
t1d = self.dvr1d.t()
eye = sp.identity(self.dvr1d.npts)
return sp.kron(eye, t1d) + sp.kron(t1d, eye)
def h(self, V):
"""Return the hamiltonian matrix with the given potential.
Usage:
H = self.h(V)
@param[in] V potential function
@returns H potential matrix
"""
return self.t() + self.v(V)
def plot(self, V, E, U, **kwargs):
doshow = kwargs.get('doshow', False)
nplot = kwargs.get('nplot', 5)
uscale = kwargs.get('uscale', 1.)
xmin = kwargs.get('xmin', self.xy[:,0].min())
xmax = kwargs.get('xmax', self.xy[:,0].max())
ymin = kwargs.get('ymin', self.xy[:,1].min())
ymax = kwargs.get('ymax', self.xy[:,1].max())
zmin = kwargs.get('zmin', np.ceil(V(self.xy).min() - 1.))
zmax = kwargs.get('zmax',
np.floor(max(U.max()+E.max()+1.,
V(self.xy).max()+1.)))
npts = self.dvr1d.npts
xy = self.xy.reshape((npts, npts, 2))
vp = V(self.xy).reshape((npts, npts))
colors = tableau20
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot_surface(xy[:,:,0], xy[:,:,1], vp,
alpha=0.15, rstride=2, cstride=2)
for i in range(nplot):
if i == 0:
ax.plot_surface(xy[:,:,0], xy[:,:,1],
uscale * abs(U[:, i].reshape((npts, npts))) + E[i],
alpha=0.3, color=colors[i],
rstride=2, cstride=2)
else:
ax.plot_surface(xy[:,:,0], xy[:,:,1],
uscale * U[:, i].reshape((npts, npts)) + E[i],
alpha=0.3, color=colors[i],
rstride=2, cstride=2)
ax.set_xlim3d(xmin, xmax)
ax.set_ylim3d(ymin, ymax)
ax.set_zlim3d(zmin, zmax)
if doshow: plt.show()
return
def test_potential(self, V, num_eigs = 5, **kwargs):
h = self.h(V)
# Get the eigenpairs
# There are multiple options here.
# If the user is asking for all of the eigenvalues,
# then we need to use np.linalg.eigh()
if num_eigs == h.shape[0]:
E, U = np.linalg.eigh(h)
# But if we don't need all eigenvalues, only the smallest ones,
# then when the size of the H matrix becomes large enough, it is
# better to use sla.eigsh() with a shift-invert method. Here we
# have to have a good guess for the smallest eigenvalue so we
# ask for eigenvalues closest to the minimum of the potential.
else:
E, U = sla.eigsh(h, k=num_eigs, which='LM',
sigma=V(self.xy).min())
precision = kwargs.get('precision', 8)
# Print and plot stuff
print 'The first {n:d} energies are:'.format(n=num_eigs)
print np.array_str(E[:num_eigs], precision=precision)
doshow = kwargs.get('doshow', False)
if doshow:
uscale = kwargs.get('uscale', 1.)
xmin = kwargs.get('xmin', self.xy[:,0].min())
xmax = kwargs.get('xmax', self.xy[:,0].max())
ymin = kwargs.get('ymin', self.xy[:,1].min())
ymax = kwargs.get('ymax', self.xy[:,1].max())
zmin = kwargs.get('zmin', np.ceil(V(self.xy).min() - 1.))
zmax = kwargs.get('zmax',
np.floor(max(U.max()+E.max()+1.,
V(self.xy).max()+1.)))
self.plot(V, E, U, nplot=num_eigs,
xmin=xmin, xmax=xmax,
ymin=ymin, ymax=ymax,
zmin=zmin, zmax=zmax,
uscale=uscale, doshow=doshow)
return E, U
def sho_test(self, k = 1., num_eigs=5, precision=8,
uscale=1., doshow=False):
print 'Testing 2-D DVR with an SHO potential'
vF = VFactory()
V = vF.sho(k=k)
E, U = self.test_potential(V, doshow=doshow, num_eigs=num_eigs,
precision=precision, uscale=uscale,
xmin=-3.5, xmax=3.5,
ymin=-3.5, ymax=3.5,
zmin=-0.05, zmax=4.)
print
return E, U
# Factory functions to build different potentials:
# A factory is a function that makes a function.
class VFactory(object):
"""Factory functions to build different potentials
A factory is a function that returns other functions.
"""
# def square_well(self, depth = 1., width = 1.,
# origin = 0., o_val = 0.):
# """Usage:
# V = square_well_factory(**kwargs)
# Returns a function of a single variable V(x),
# representing the square-well potential:
# (-A/2, V0) (A/2, V0)
# ------------ + ----------------
# | |
# | |
# | |
# | |
# (-A/2, 0) |-------+-------| (A/2, 0)
# (0, 0)
# Keyword arguments:
# @param[in] depth Depth of the potential well (default=1)
# @param[in] width Width of the potential well (default=1)
# @param[in] origin Location of the well's center (default=0)
# @param[in] o_val Value of the potential at origin (default=0)
# @returns V The square well potential function V(x)
# """
# def V(x):
# interior_idx = np.abs(x - origin) < width / 2.
# V = np.ones_like(x) * (depth + o_val)
# V[interior_idx] = o_val
# return V
# return V
# def double_well(self, x1 = -2., x2 = -1., x3 = 1.,
# x4 = 2., V1 = 1., V2 = 0.,
# V3 = 1., V4 = 0., V5 = 1.):
# """Usage:
# V = double_square_well_factory(**kwargs)
# Returns a one-dimensional potential function that represents
# a double-square-well potential. The potential looks like
# (x1, V1) (x2, V3) (x3, V3) (x4, V5)
# ---------- --------- ----------
# | | | |
# | | | |
# | | | |
# | | | |
# |____________| |____________|
# (x1, V2) (x2, V2) (x3, V4) (x4, V4)
# Keywork arguments
# @param[in] x1 x-coordinate x1 above (default=-2)
# @param[in] x2 x-coordinate x2 above (default=-1)
# @param[in] x3 x-coordinate x3 above (default=1)
# @param[in] x4 x-coordinate x4 above (default=2)
# @param[in] V1 constant V1 above (default=1)
# @param[in] V2 constant V2 above (default=0)
# @param[in] V3 constant V3 above (default=1)
# @param[in] V4 constant V4 above (default=0)
# @param[in] V5 constant V5 above (default=1)
# @returns V double square-well potential V(x)
# """
# assert (x1 < x2 < x3 < x4), \
# "x-coordinates do not satisfy x1 < x2 < x3 < x4"
# def V(x):
# l_well_idx = np.logical_and(x < x2, x > x1)
# r_well_idx = np.logical_and(x < x4, x > x3)
# middle_idx = np.logical_and(x >= x2, x <= x3)
# far_rt_idx = np.greater_equal(x, x4)
# V = np.ones_like(x) * V1
# V[l_well_idx] = V2
# V[middle_idx] = V3
# V[r_well_idx] = V4
# V[far_rt_idx] = V5
# return V
# return V
def sho(self, k = 1., x0 = 0., y0 = 0.):
"""Usage:
V = harmosc_factory(**kwargs)
Return a two-dimensional harmonic oscillator potential V(x, y)
with wavenumber k.
i.e. V(x, y) = 1/2 * k * ((x - x0)^2 + (y - y0)^2)
Keyword arguments
@param[in] k wavenumber of the SHO potential (default=1)
@param[in] x0 x-displacement from origin (default=0)
@param[in] y0 y-displacement from origin (default=0)
@returns V 2-D SHO potential V(x)
"""
def V(xy): return 0.5 * k * (np.square(xy[:,0] - x0)
+ np.square(xy[:,1] - y0))
return V
# def power(self, a = 1., p=1., x0 = 0.):
# """Usage:
# V = self.power(**kwargs)
# Return a potential V(x) = a * (x - x0)^p
# Keyword arguments
# @param[in] a coefficient (default=1)
# @param[in] p power to raise x (default=1)
# @param[in] x0 displacement from origin (default=0)
# @returns V 1-D cubic potential V(x)
# """
# def V(x): return a * np.power(x - x0, p)
# return V
# def morse(self, D = 1., a = 1., x0 = 0.):
# """Usage:
# V = morse_factory(**kwargs)
# Return a one-dimensional Morse potential V(x)
# i.e. V(x) = D * (1 - exp(-a * (x - x0)))^2 - D
# Keyword arguments
# @param[in] D dissociation depth
# @param[in] a inverse "width" of the potential
# @param[in] x0 equilibrium bond distance
# @returns V Morse potential V(x)
# """
# def V(x):
# return D * np.power(1. - np.exp(-a * (x - x0)), 2.) - D
# return V
# def sombrero(self, a = -10., b = 1.):
# """Usage:
# V = sombrero_factory(**kwargs)
# Return a one-dimensional version of the sombrero potential
# i.e. V(x) = a * x^2 + b * x^4
# This function asserts a < 0 and b > 0
# Keyword arguments
# @param[in] a coefficient of the x^2 term (default=-10)
# @param[in] b coefficient of the x^4 term (default=1)
# @returns V 1-D Mexican hat potential V(x)
# """
# assert (a < 0), "Coefficient a must be negative"
# assert (b > 0), "Coefficient b must be positive"
# def V(x):
# return a * np.square(x) + b * np.power(x, 4)
# return V
# def woods_saxon(self, V0 = 50., z = 0.5, r0 = 1.2, A = 16):
# """Usage:
# V = woods_saxon_factory(**kwargs)
# Return a Woods-Saxon potential
# i.e. V(r) = - V0 / (1. + exp((r - R) / z))
# where R = r0 * A^(1/3)
# Keyword arguments
# @param[in] V0 potential depth (default=50.)
# @param[in] z surface thickness (default=0.5)
# @param[in] r0 rms nuclear radius (default=1.2)
# @param[in] A mass number (default=16)
# @returns V Woods-Saxon potential V(r)
# """
# def V(r):
# x0 = r0 * np.power(A, 1. / 3.)
# return -V0 / (1. + np.exp((r - x0)/ z))
# return V
| [
"richford@uw.edu"
] | richford@uw.edu |
33a71e71ff9018b19823a1c3481dabfbf256ef91 | f3360b809d7e8e26c8904365b5e4df0dca69225d | /userprofile/migrations/0005_catatanmodal_parent_id.py | ea8293c76b529176e1c90697977c68a4d2c09e6b | [] | no_license | cursecan/epayment | 0bcd272a6479847ad60507daf2cf74ee95002924 | be9df7034261fa9f9eaafb157309b4955b793cfb | refs/heads/master | 2020-03-15T05:52:34.556971 | 2018-07-30T12:01:21 | 2018-07-30T12:01:21 | 131,996,100 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | # Generated by Django 2.0.4 on 2018-05-27 10:10
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0004_catatanmodal'),
]
operations = [
migrations.AddField(
model_name='catatanmodal',
name='parent_id',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='userprofile.CatatanModal'),
),
]
| [
"anderi.setiawan@gmail.com"
] | anderi.setiawan@gmail.com |
1c8d7e664a52297cfd690bdbac1717ee6262c187 | a50386f9b6c0cc2c0789a84c3acedfd33a4eaf0f | /CursoOpenCVcomDaniel/esqueleto/test.py | 18cc78f446fc6a4fd48d48f195ac38f37d13e0f9 | [] | no_license | juanengml/RemoteWebCamWithOpenCV | 66b19e174d724b2584a7f1d07c5d9ee698ff0809 | caa4a0b52be1ac66bcb1b401485fb427746c31ef | refs/heads/master | 2020-03-23T02:49:56.853490 | 2019-07-23T01:15:33 | 2019-07-23T01:15:33 | 140,994,675 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | from skimage.morphology import skeletonize
ske = (skeletonize("01.jpg"//255) * 255).astype(np.uint8)
print ske
| [
"juanengml@gmail.com"
] | juanengml@gmail.com |
9b02a2388f7584c36eb653440a59a11092a4e816 | 76c9df5cd69e202d6bc9204616cc1d83a85de31c | /assignment3/HashRing.py | 8ee1901503c8d383e1021f48cae4c9fb2cf667ce | [
"MIT"
] | permissive | siddharth-daftari/Expense-Management-System-data-sharding | d805252aae23a243b916310d44c1a0f5ead8bcf4 | 3ba6464845434fd4a11ba80cddc34fdd53241a1c | refs/heads/master | 2021-01-13T16:37:46.445702 | 2017-01-19T14:56:20 | 2017-01-19T14:56:20 | 78,920,695 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,018 | py | import bisect
import md5
class ConsistentHashRing(object):
"""Implement a consistent hashing ring."""
def __init__(self, replicas=1):
"""Create a new ConsistentHashRing.
:param replicas: number of replicas.
"""
self.replicas = replicas
self._keys = []
self._nodes = {}
def _hash(self, key):
"""Given a string key, return a hash value."""
return long(md5.md5(key).hexdigest(), 16)
def _repl_iterator(self, nodename):
"""Given a node name, return an iterable of replica hashes."""
return (self._hash("%s:%s" % (nodename, i))
for i in xrange(self.replicas))
def __setitem__(self, nodename, node):
"""Add a node, given its name.
The given nodename is hashed
among the number of replicas.
"""
#print(self._repl_iterator(nodename).__sizeof__())
for hash_ in self._repl_iterator(nodename):
if hash_ in self._nodes:
raise ValueError("Node name %r is "
"already present" % nodename)
self._nodes[hash_] = node
bisect.insort(self._keys, hash_)
def __delitem__(self, nodename):
"""Remove a node, given its name."""
for hash_ in self._repl_iterator(nodename):
# will raise KeyError for nonexistent node name
del self._nodes[hash_]
index = bisect.bisect_left(self._keys, hash_)
del self._keys[index]
def __getitem__(self, key):
"""Return a node, given a key.
The node replica with a hash value nearest
but not less than that of the given
name is returned. If the hash of the
given name is greater than the greatest
hash, returns the lowest hashed node.
"""
hash_ = self._hash(key)
start = bisect.bisect(self._keys, hash_)
if start == len(self._keys):
start = 0
return self._nodes[self._keys[start]] | [
"siddharthrajesh.daftari@sjsu.edu"
] | siddharthrajesh.daftari@sjsu.edu |
132c2ed038ea4aad456f0596f83bff90e7476e46 | 4942ff3e1a416efe700bcc4dd9aeb2721b7cfb48 | /mysite/mysite/settings.py | 71e6ef76d44d89bb18606414b09fa862754510fe | [] | no_license | Micontrerash/gymnotita | 48e9e8a6ea086b3d6464d0506eaf647e5e6c991c | c500b796693a1adf50c4bb0cbd5e9225843c18d1 | refs/heads/master | 2020-09-26T12:04:06.363190 | 2019-12-06T05:23:37 | 2019-12-06T05:23:37 | 226,251,352 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,207 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.2.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '87enh(87&)1b($oa!u$wl6bztd7yk&*_x8tfdi8n=cod@n3o4i'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', '.pythonanywhere.com']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog.apps.BlogConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'es-es'
TIME_ZONE = 'Europe/Berlin'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
| [
"miguel@mail.com"
] | miguel@mail.com |
acb2fc903d2a0616fd16767c00059ce86cc7baa7 | 0116bfbdff160b028b18040df9b59d99d4a824e4 | /social/migrations/0011_question_user_name.py | 3ce91059651fb603746d5d812d40a3346826f1af | [] | no_license | Subhash1998/social-welfare | d9cd2897154f2da0afd9484fe33be7f8cf1a0390 | d2e59d511481fcb33a45c0d6d65ad1e97070f0b4 | refs/heads/master | 2022-12-14T15:49:23.851170 | 2018-06-02T03:36:41 | 2018-06-02T03:36:41 | 125,677,783 | 3 | 0 | null | 2022-11-22T02:05:53 | 2018-03-17T23:39:24 | Python | UTF-8 | Python | false | false | 471 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-03-17 07:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('social', '0010_remove_question_user_name'),
]
operations = [
migrations.AddField(
model_name='question',
name='user_name',
field=models.CharField(blank=True, max_length=100),
),
]
| [
"you@example.com"
] | you@example.com |
1af412ff0d24a8876c3345bd2bfffe447cf4d939 | 1493997bb11718d3c18c6632b6dd010535f742f5 | /wintst.py | 409947c6ddc3e24a514f1bbbe46278b39d660f5b | [] | no_license | kovrov/scrap | cd0cf2c98a62d5af6e4206a2cab7bb8e4560b168 | b0f38d95dd4acd89c832188265dece4d91383bbb | refs/heads/master | 2021-01-20T12:21:34.742007 | 2010-01-12T19:53:23 | 2010-01-12T19:53:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,876 | py | from ctypes import *
from ctypes import wintypes
WM_NCCREATE = 0x81
WM_DESTROY = 0x02
CS_VREDRAW = 0x01
CS_HREDRAW = 0x02
WS_OVERLAPPEDWINDOW = 0xCF0000
CW_USEDEFAULT = -0x80000000
SW_SHOWNORMAL = 0x01
FORMAT_MESSAGE_FROM_SYSTEM = 0x1000
# LRESULT is defined as LONG_PTR (signed type)
if sizeof(c_int32) == sizeof(c_void_p): LRESULT = c_int32
elif sizeof(c_int64) == sizeof(c_void_p): LRESULT = c_int64
WNDPROC = WINFUNCTYPE(LRESULT, wintypes.HWND, wintypes.UINT, wintypes.WPARAM, wintypes.LPARAM)
class WNDCLASS(Structure):
_fields_ = [('style', wintypes.UINT),
('lpfnWndProc', WNDPROC),
('cbClsExtra', c_int),
('cbWndExtra', c_int),
('hInstance', wintypes.HINSTANCE),
('hIcon', wintypes.HICON),
('hCursor', wintypes.HICON),
('hbrBackground', wintypes.HBRUSH),
('lpszMenuName', wintypes.LPCWSTR),
('lpszClassName', wintypes.LPCWSTR)]
def assert_nonzerro(res):
if res == 0:
errno = windll.kernel32.GetLastError()
message = create_string_buffer(1024)
windll.kernel32.FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM,
c_void_p(), errno, 0,
message, len(message),
c_void_p())
raise Exception(message.value)
return res
RegisterClass = windll.user32.RegisterClassW
RegisterClass.restype = assert_nonzerro
RegisterClass.argtypes = [POINTER(WNDCLASS)]
CreateWindowEx = windll.user32.CreateWindowExW
CreateWindowEx.restype = assert_nonzerro
CreateWindowEx.argtypes = [wintypes.DWORD, wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD, c_int, c_int, c_int, c_int,
wintypes.HWND, wintypes.HMENU, wintypes.HINSTANCE,
wintypes.LPVOID]
DefWindowProc = windll.user32.DefWindowProcW
DefWindowProc.restype = LRESULT
DefWindowProc.argtypes = [wintypes.HWND, wintypes.UINT, wintypes.WPARAM, wintypes.LPARAM]
GetMessage = windll.user32.GetMessageW
GetMessage.restype = wintypes.BOOL
GetMessage.argtypes = [POINTER(wintypes.MSG), wintypes.HWND, wintypes.UINT, wintypes.UINT]
DispatchMessage = windll.user32.DispatchMessageW
DispatchMessage.restype = LRESULT
DispatchMessage.argtypes = [POINTER(wintypes.MSG)]
class CREATESTRUCT(Structure):
_fields_ = [('lpCreateParams', wintypes.LPVOID),
('hInstance', wintypes.HINSTANCE),
('hMenu', wintypes.HMENU),
('hwndParent', wintypes.HWND),
('cy', c_int),
('cx', c_int),
('y', c_int),
('x', c_int),
('style', wintypes.LONG),
('lpszName', wintypes.LPCWSTR),
('lpszClass', wintypes.LPCWSTR),
('dwExStyle', wintypes.DWORD)]
#-------------------------------------------------------------------------------
def wnd_proc(hWnd, message, wParam, lParam):
if message == WM_NCCREATE:
cs = CREATESTRUCT.from_address(lParam)
print cs.lpszName
print cs.lpszClass
assert 1 == DefWindowProc(hWnd, message, wParam, lParam)
return 1
elif message == WM_DESTROY:
windll.user32.PostQuitMessage(0)
else:
return DefWindowProc(hWnd, message, wParam, lParam)
return 0
WndProc = WNDPROC(wnd_proc)
def main():
# register wndclass
wndclass = WNDCLASS()
wndclass.style = CS_HREDRAW | CS_VREDRAW
wndclass.lpfnWndProc = WndProc
wndclass.hInstance = windll.kernel32.GetModuleHandleW(None)
wndclass.hbrBackground = 1
wndclass.lpszClassName = u"WINTEST"
RegisterClass(byref(wndclass))
# create window
hWnd = CreateWindowEx(0, u"WINTEST", u"test window", WS_OVERLAPPEDWINDOW,
CW_USEDEFAULT, 0, CW_USEDEFAULT, 0,
0, 0, windll.kernel32.GetModuleHandleW(None), None)
windll.user32.ShowWindow(hWnd, SW_SHOWNORMAL)
windll.user32.UpdateWindow(hWnd)
# pump messages
msg = wintypes.MSG()
pMsg = pointer(msg)
while GetMessage(pMsg, None, 0, 0):
#windll.user32.TranslateMessage(pMsg)
DispatchMessage(pMsg)
return msg.wParam
if __name__ == "__main__": main()
| [
"kovrov@gmail.com"
] | kovrov@gmail.com |
b15598eadeb78e2994d75c0367214ad181d903fe | 7d2692834242cf63ad10cb5a39476210c92dbc5d | /utils/linematch.py | 0b2066976bad0557413cf2acd73e55fbfaafbaa3 | [
"MIT"
] | permissive | shengzhang90/deep_gcns_torch | 0ebec94568f8035c425138a4cf23276829ffe16d | 9282e2e7a928029e549150c5dcab2d0db279fed3 | refs/heads/master | 2020-08-22T18:28:50.883259 | 2019-10-20T12:43:47 | 2019-10-20T12:43:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 807 | py | import subprocess
import os
import sys
def find_last_line(path, exp):
ans = []
if not os.path.exists(path):
raise Exception('Path does not exist')
for r, d, f in os.walk(path):
for file in f:
fullfile = os.path.join(r,file)
line = subprocess.check_output(['tail', '-10', fullfile])
match = line.find(bytearray(exp, 'utf-8'))
if (match != -1):
ans.append((fullfile, line.decode()))
return ans
def main(argv):
path = argv[0]
exp = argv[1]
matches = find_last_line(path, exp)
if len(matches) == 0:
print("Couldn't find any match")
else:
for match in matches:
print('{} in file {}'.format(match[1], match[0]))
if __name__ == "__main__":
main(sys.argv[1:])
| [
"gordonqian2017@gmail.com"
] | gordonqian2017@gmail.com |
800fa6caac9000d9001428fc19bd06a386df9d03 | 5f4bf4c3caff64a6e82e6a522c85c4029f07ad68 | /pydatastream/__init__.py | 6f89e9ccaf7ff5a05e3e9239a55edcf315bb4fc5 | [
"MIT"
] | permissive | ceaza/pydatastream | ee8ecffd766823b76a0b579d1c34e2b25375d77c | 062b56ca1a6aebf00f9191a6c02e8e4eea063702 | refs/heads/master | 2021-01-18T02:28:04.830572 | 2016-03-16T19:10:00 | 2016-03-16T19:10:00 | 54,023,093 | 0 | 0 | null | 2016-03-16T10:37:14 | 2016-03-16T10:37:13 | null | UTF-8 | Python | false | false | 51 | py | ### Dummy __init__ file
from pydatastream import *
| [
"vfilimonov@ethz.ch"
] | vfilimonov@ethz.ch |
9a9c205fa38d5e32eb069139605bfd188370c64b | 99c353ac90be2b607881498b05075cd21b216314 | /simulatedData/plotter.py | b02336e0cd1319caeb0a557cb73b98f0957eb4b0 | [] | no_license | sahil00199/Covering-Simulations | 0014eb9f5b4138cf5d41153b111988c7a4071669 | 39da59439c53b0c63a6b80fe469824a63d685de1 | refs/heads/master | 2020-03-22T05:19:57.104657 | 2018-07-09T13:02:34 | 2018-07-09T13:02:34 | 139,558,444 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | import matplotlib.pyplot as plt
name = "right"
file = open(name, 'r')
data = ""
for chunk in file:
data += chunk
file.close()
data = data.split("\n")
data = [x for x in data if x != ""]
x = []
y = []
for element in data:
x.append(float(element.split()[0]))
y.append(float(element.split()[1]))
plt.plot(x, y)
plt.axis([0, 3.8, 0, 1.1])
plt.grid(True)
plt.savefig("../plots/"+name+".png")
| [
"sahilshah00199@gmail.com"
] | sahilshah00199@gmail.com |
391eea6aba9ccba16e5fc376b74dcd70462bc6fb | 74975046450684cb9c9770f42119575a16ce4c18 | /basics.py | 299ebde08f574d2174785993aeff6db20e152531 | [] | no_license | d0uble0deven/Python_Playground_One | 1324cb9d3002ed3b77aa9f1a27ef7a48237c5a8c | 1f09cee08d8b9685cd93eedd80e185b23e9e4721 | refs/heads/master | 2023-02-03T18:19:54.091657 | 2019-10-29T21:42:16 | 2019-10-29T21:42:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,167 | py | import datetime
now = datetime.datetime.now()
print("The date and time is", now)
myNumber = 10
myText = "Hello"
print(myNumber, myText)
x = 10
y = "10"
z = 10.1
sum1 = x + x
sum2 = y + y
print(sum1, sum2)
print(type(x), type(y), type(z))
student_grades = [9.1, 9.8, 7.5]
grade_range = list(range(1, 10))
range_step = list(range(1, 10, 3))
print(student_grades)
print(grade_range)
print(range_step)
mySum = sum(student_grades)
length = len(student_grades)
mean = mySum / length
print(mean)
names_grades = {"Mary": 9.1, "Sim": 8.8, "John": 7.5}
newSum = sum(names_grades.values())
newLength = len(names_grades)
newMean = newSum / newLength
print(newMean)
keys = names_grades.keys()
print(keys)
monday_temperatures = [8.8, 9.1, 9.9]
print(monday_temperatures)
print("printing mean2 below")
def mean2(value):
if type(value) == dict:
the_mean = sum(value.values()) / len(value)
else:
the_mean = sum(value) / len(value)
return the_mean
print(names_grades)
print(monday_temperatures)
print(mean2(monday_temperatures))
print(type(mean2), type(sum))
if 3 > 1:
print('a')
print('aa')
print('aaa')
def foo():
pass
| [
"friendsofdev94@gmail.com"
] | friendsofdev94@gmail.com |
406b33597d9802bc2210fa169fe5e9edff9992d3 | 5ffc57ca00b370751fb65a1f1efe973781100a0e | /appone/views.py | cdc62c4821cdd007d9de93bc92a4cb1a54f0fc62 | [] | no_license | swapnali0212/schoolapp | af9ed65b61034a75ead8d0fdc403d36a74c9c3b9 | 70032c9b35cf913ec7e807e02146fef19b306258 | refs/heads/main | 2023-01-23T23:10:43.807172 | 2020-11-30T19:54:29 | 2020-11-30T19:54:29 | 317,327,581 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 789 | py | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from django.shortcuts import render
from rest_framework.viewsets import ModelViewSet
from appone.serializer import *
from appone.models import *
class HelloView(APIView):
permission_classes = (IsAuthenticated,)
def get(self, request):
content = {'message': 'Hello, World!'}
return Response(content)
class StudentOps(ModelViewSet):
#permission_classes = (IsAuthenticated,)
queryset =Student.objects.all()
serializer_class = StudentSerializer
class UserProfiOps(ModelViewSet):
#permission_classes = (IsAuthenticated,)
queryset =UserProfi.objects.all()
serializer_class = UserProfiSerializer
| [
"swapnalikapare02@gmail.com"
] | swapnalikapare02@gmail.com |
9e3014da0abcabebac335b1dc52a83d5201e2749 | a8fe10debc89eac786ebdfa9cbaa5dd7b6315bf2 | /EngageApp/urls.py | be77b0598e3dc032613d0185af6dac1eec3f90ba | [] | no_license | shekarneo/backup_recom | d9dc7dc1a752eccfeec70a13e112545955ff8e24 | eb17354620013c94297d1805c403ae92fa5f3c55 | refs/heads/main | 2023-03-20T06:38:32.070302 | 2021-03-11T09:53:36 | 2021-03-11T09:53:36 | 346,997,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,092 | py | from django.conf.urls import include, url
from . import views
from EngageProject import settings
# from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
app_name = 'EngageApp'
urlpatterns = [
url(r'^date_filter/upcoming3/$', views.filter_date_event1),
url(r'^date_filter/upcomingall/$', views.filter_date_event2),
url(r'^date_filter/past3/$', views.filter_date_event3),
url(r'^date_filter/pastall/$', views.filter_date_event4),
url(r'^association_list/$', views.association_data),
url(r'^association_list_order/$', views.association_data_order),
url(r'^event_detail/$', views.Eventslist.as_view()),
url(r'^alleventslist/$', views.AllEventslist.as_view()),
url(r'^delegateslist/$', views.EventDelegateslist.as_view()),
url(r'^eventdates/$', views.EventDateslist.as_view()),
url(r'^loglist/$', views.EventLoglist.as_view()),
url(r'^partners/$', views.EventPartnerslist.as_view()),
url(r'^partnerships/$', views.EventPartnershipslist.as_view()),
url(r'^salesperson/$', views.EventSalespersonlist.as_view()),
url(r'^speakers/$', views.EventSpeakerslist.as_view()),
url(r'^supported_by/$', views.EventSupportedlist.as_view()),
url(r'^testimonials/$', views.EventTestimonialslist.as_view()),
url(r'^visitors/$', views.EventVisitorslist.as_view()),
url(r'^event_data/$', views.EventData.as_view()),
url(r'^about_us/$', views.AboutUsView.as_view()),
url(r'^gallery/$',views.EventGallary.as_view()),
url(r'^child_event_data/$',views.ChildEventData.as_view()),
# API'S FOR POSTING THE DATA INTO THE TABLES ###
url(r'^event_register/$', views.EventRegisterView.as_view()),
url(r'^event_date_register/$', views.EventDateRegisterView.as_view()),
url(r'^contact_register/$', views.ContactDetailsRegister.as_view()),
url(r'^partnership_register/$', views.PartnershipRegister.as_view()),
url(r'^exhibitor_register/$', views.ExhibitorRegister.as_view()),
url(r'^conference_register/$', views.ConferenceRegister.as_view()),
url(r'^visitor_register/$', views.VisitorRegister.as_view()),
url(r'^virtual_register/$', views.VirtualRegisterView.as_view()),
url(r'^required/$', views.EventRequired.as_view()),
url(r'^digital_partners/$', views.DigitalPartner.as_view()),
url(r'^associate_partners/$', views.AssociatePartner.as_view()),
url(r'^media_partners/$', views.MediaPartner.as_view()),
url(r'^media/$', views.MediaPartnerView.as_view()),
url(r'^associations/$', views.AssociateView.as_view()),
url(r'^coupon/$', views.CouponValidate.as_view()),
url(r'^get_in_touch/$', views.GetInRegister.as_view()),
url(r'^test/$', views.AndroidTest.as_view()),
]
# if settings.DEBUG:
# urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
# urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"shekaransd@gmail.com"
] | shekaransd@gmail.com |
47b5da87fe99d20e11b17a2a3b32e0c365054fce | 5fba99e6ec9f23ae4c0b8d28dbb86cd6d23c3887 | /djcelery_ses/__init__.py | e7c12d2851b5fb9f4f5c09ac96f1550b3b94c82b | [
"MIT"
] | permissive | StreetVoice/django-celery-ses | 2e8fe75c9c83028ba345bf3a4296a290ef06e54f | 0c2a2cae6f3ff641c496c1e0815e2cd5c697f8a1 | refs/heads/master | 2023-08-31T16:12:29.493185 | 2023-08-24T05:31:19 | 2023-08-24T05:31:19 | 6,350,221 | 19 | 9 | MIT | 2023-08-24T05:31:20 | 2012-10-23T10:00:37 | Python | UTF-8 | Python | false | false | 22 | py | __version__ = '2.0.3'
| [
"cha798215@gmail.com"
] | cha798215@gmail.com |
b4e786cac92727c050d451fab734c2221ae028c1 | 7d19797caa0abf8875dba77f5e63e19c3aa646b4 | /redis2kfk.py | 430efc252fb9a7857f400542b8fec07deaf94662 | [] | no_license | xiaoerli520/redis2kafka | e2d58d860220e6b14d1005f492d4aae1ef2532eb | 8b297da10f1bbedf8a49ec10ea53d1d750dac796 | refs/heads/master | 2020-04-03T01:02:38.403133 | 2018-10-27T02:58:50 | 2018-10-27T02:58:50 | 154,917,511 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,614 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import signal
import sys
import time
import logging
import threading
import traceback
import yaml
from kafka.errors import KafkaTimeoutError, NotLeaderForPartitionError
from redis import ConnectionError
from model import kfk_producer, redis_consumer, redis_monitor, noticer, global_vars as gl
gl._init()
gl.set_value('NEED_KILL', False)
gl.set_value('IS_DEBUG', False)
gl.set_value('TASK_NUM', 0)
logging.basicConfig(
level=logging.ERROR,
filename='/data1/ms/log/redis2kafka/redis2kfk-%s.log' % time.strftime("%Y-%m-%d"),
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(process)d %(threadName)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
)
def do_exit():
gl.set_value('NEED_KILL', True)
def bind_signal():
signal.signal(signal.SIGINT, do_exit)
signal.signal(signal.SIGTERM, do_exit)
signal.signal(signal.SIGHUP, do_exit)
signal.signal(signal.SIGQUIT, do_exit)
def main():
try:
with open(".env") as env:
env_context = env.readline()
env_debug = env.readline()
except Exception as e:
print "failed to read env"
logging.error(e)
exit()
mode = env_context.split("=")
mode[1] = mode[1].strip('\n\r') # remove special chars
if "dev" not in mode and "prod" not in mode:
print "env:mode is not valid"
exit()
debug = env_debug.split("=")
debug = debug[1].strip("\n\r")
if int(debug) not in [1, 0]:
print "env:debug is not valid"
if debug == "1":
gl.set_value('IS_DEBUG', True)
mode = mode[1]
print "Curr Mode is " + mode
try:
with open("./conf/" + mode + ".yml") as conf:
conf_context = conf.read()
except Exception as e:
print "failed to read conf"
logging.error(e)
conf_map = yaml.load(conf_context)
params = conf_map['settings']
threadRate = params['ThreadsNumberPerTask']
conf_map = conf_map['kafka']
redis_map = []
for service in conf_map:
for act in conf_map[service]['actionTopics']:
redis_task = {}
redis_task['service'] = service
redis_task['act'] = act
redis_task['dsn'] = conf_map[service]['actionTopics'][act]['redis']['dsn']
redis_task['redis_host'] = redis_task['dsn'].split(":")[0]
redis_task['redis_port'] = redis_task['dsn'].split(":")[1]
redis_task['redis_key'] = conf_map[service]['actionTopics'][act]['redis']['queue_key']
redis_task['servers'] = conf_map[service]['servers']
redis_task['topic'] = conf_map[service]['actionTopics'][act]['topic']
redis_task['kfk_username'] = conf_map[service]['actionTopics'][act]['username']
redis_task['kfk_password'] = conf_map[service]['actionTopics'][act]['password']
redis_task['api_version'] = conf_map[service]['actionTopics'][act]['apiVersion']
redis_map.append(redis_task)
if gl.get_value('IS_DEBUG', False):
print redis_map
gl.set_value('TASK_NUM', len(redis_map))
# start multi task
for item in redis_map:
# start consumer
ThreadsNumberPerTask = 0
while ThreadsNumberPerTask < threadRate:
_thread = threading.Thread(target=multi_work, args=(
item['servers'], item['topic'], item['redis_host'], item['redis_port'], item['redis_key'], params,
item['kfk_username'],
item['kfk_password'], item['api_version']))
_thread.setDaemon(True)
_thread.start()
ThreadsNumberPerTask = ThreadsNumberPerTask + 1
# start monitor
_thread_monitor = threading.Thread(target=redis_list_monitor,
args=(item['redis_host'], item['redis_port'], item['redis_key'],
params))
_thread_monitor.setDaemon(True)
_thread_monitor.start()
while True:
if gl.get_value('NEED_KILL', False) is True:
print "you kill this process"
exit(0)
else:
time.sleep(0.5)
def redis_list_monitor(redis_host, redis_port, redis_key, params):
while True:
rm = redis_monitor.RedisMonitor(redis_host, redis_port, redis_key)
if rm.list_length() > int(params['maxRedisList']):
is_list_long = True
noticer.Noticer.send_wehcat("redis2kafka", "RedisListTooLong", params['mailTo'],
"RedisListTooLong :" + redis_host + "::" + redis_port + " " + " :: " + redis_key + "\n Length is " + str(rm.list_length()))
if not gl.get_value('IS_DEBUG', False):
noticer.Noticer.send_email("redis2kafka", "redis2Kafka_Warning", params['mailTo'],
"redisList too Long :" + redis_host + "::" + redis_port + " " + redis_key + " curr length: " + str(
rm.list_length()))
else:
is_list_long = False
if gl.get_value('NEED_KILL', False) is True:
return
if gl.get_value('IS_DEBUG', False):
print "redisList :" + redis_host + "::" + redis_port + " " + redis_key + " curr length: " + str(
rm.list_length())
time.sleep(params['debugListLenSleep'])
elif is_list_long:
time.sleep(params['listLongSleep'])
else:
time.sleep(params['listSleep'])
def multi_work(kfk_servers, kfk_topic, redis_host, redis_port, redis_key, params, username=None, password=None,
api_version=None):
print "Thread" + " " + redis_key + "\n"
servers = ",".join(kfk_servers)
while True:
try:
if gl.get_value('NEED_KILL', False):
print "Thread gracefully quit thread"
return
producer = kfk_producer.KfkProducer(servers, username, password, api_version, int(params['maxBlock']),
int(params['metaDataMaxAge']), int(params['requestTimeOut']),
int(params['kafkaTimeOut']))
redis = redis_consumer.RedisClient(redis_host, redis_port, int(params['redisPopTimeOut']))
while True:
item = redis.load_data(redis_key)
if item is not None:
producer.send_data(item[1], kfk_topic)
if gl.get_value('NEED_KILL', False):
print "Thread gracefully quit thread"
return
except ConnectionError as e:
noticer.Noticer.send_wehcat("redis2kafka", "RedisConnectionError", params['mailTo'],
"redis cannot connect :" + redis_host + "::" + redis_port + " " + redis_key + "\n err msg: " + e.message)
noticer.Noticer.send_email("redis2kafka", "redis2Kafka_Warning", params['mailTo'],
"redis cannot connect :" + redis_host + "::" + redis_port + " " + redis_key + "\n err msg: " + e.message)
logging.error('%s %s::%s \nData: %s' % (e, redis_host, redis_port, item[1]))
time.sleep(params['redisTimeoutSleep'])
except KafkaTimeoutError as e:
redis.push_back(redis_key, item[1])
noticer.Noticer.send_wehcat("redis2kafka", "KafkaTimeoutError", params['mailTo'],
"kafka cannot connect :" + servers + "::" + kfk_topic + " " + "\n err msg: " + e.message)
noticer.Noticer.send_email("redis2kafka", "redis2Kafka_Warning", params['mailTo'],
"kafka cannot connect :" + servers + "::" + kfk_topic + " " + "\n err msg: " + e.message)
logging.error('%s %s %s \nData: %s' % (e, servers, kfk_topic, item[1]))
time.sleep(params['kafkaTimeoutSleep'])
except NotLeaderForPartitionError as e:
redis.push_back(redis_key, item[1])
noticer.Noticer.send_wehcat("redis2kafka", "NotLeaderForPartitionError", params['mailTo'],
"kafka errors :" + servers + "::" + kfk_topic + " " + "\n err msg: " + e.message)
noticer.Noticer.send_email("redis2kafka", "redis2Kafka_Warning", params['mailTo'],
"kafka errors :" + servers + "::" + kfk_topic + " " + "\n err msg: " + e.message)
logging.error('%s %s %s \nData: %s' % (e, servers, kfk_topic, item[1]))
time.sleep(params['kafkaNoLeaderSleep'])
except Exception as e:
# push back data
noticer.Noticer.send_wehcat("redis2kafka", "Exception", params['mailTo'],
"Exception :" + "\n err msg: " + e.message)
noticer.Noticer.send_email("redis2kafka", "Exception", params['mailTo'],
"Exception :" + "\n err msg: " + e.message)
redis.push_back(redis_key, item[1])
traceback.print_exc(e)
logging.error('%s \nData: %s' % (e, item[1]))
time.sleep(params['BaseExceptionSleep'])
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
gl.set_value('NEED_KILL', True)
while gl.get_value('NEED_KILL', False) and threading.active_count() > gl.get_value('TASK_NUM', 0) + 1:
print "waiting for produce threads exit..."
time.sleep(1)
print "Main Thread Exit"
sys.exit()
| [
"shixi_qingzhe@staff.sina.com.cn"
] | shixi_qingzhe@staff.sina.com.cn |
6aef1372f3256427de61edd2f22042759ba8be37 | 26e4e8cc485a00bac7a0a7b4b7e215ef48314bf1 | /django_mongo_web/django_mongo_web/gcs_app/models.py | a26b31fa2856a3b9917bea5a0b1de174782827ff | [] | no_license | Gogistics/prjDjangoMongoOnGCE | b2ae9c24c55dd637809dda52d658a048946feb9f | b64720cc5ed33cf5e281bb14124772d122e6ab16 | refs/heads/master | 2020-07-25T07:23:37.897311 | 2014-12-02T07:31:32 | 2014-12-02T07:31:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | from mongoengine import *
from django_mongo_web.settings import DBNAME
connect (DBNAME)
class Post(Document):
title = StringField(max_length = 120, required = True)
content = StringField(max_length = 500, required = True)
last_update = DateTimeField(required = True)
| [
"root@gogistics-server.c.gogistics-cloud-1014.internal"
] | root@gogistics-server.c.gogistics-cloud-1014.internal |
4b92ff7e467ed8e25c639496ee88dec5d048fa53 | 1fc6386856f6145d74c2a4efdc6597fc8512badb | /formatters/harmoniaParser.py | ec5a34a456fcc9c7b9c03365c7f3aab82aee4b52 | [] | no_license | lab11/gatd-lab11 | 6684ea94748ddc9f41d95917d4e8de522c4ab661 | 806c7aac1d99ff405c108933ac1c7bcbd136c13c | refs/heads/master | 2021-01-21T04:40:16.898997 | 2016-06-13T21:49:00 | 2016-06-13T21:49:00 | 16,458,375 | 0 | 0 | null | 2014-10-23T21:45:14 | 2014-02-02T16:51:44 | JavaScript | UTF-8 | Python | false | false | 812 | py | import IPy
import json
import struct
import parser
import binascii
import datetime
import pytz
import time
class harmoniaParser ( ):
name = 'Harmonia'
description = 'Harmonic Localization System.'
def __init__ (self):
pass
def parse (self, data, meta, extra, settings):
def convert_bcd(raw_t):
h = hex(raw_t)
t = int(h.split('x')[1])
return t
ret = {}
n = datetime.datetime.fromtimestamp(meta['time']/1000)
#Harmonia-specific
s = struct.unpack('!10s I I H H 8B H 8B', data)
gatd_profile_id = s[0]
ret['seq'] = s[1]
ret['reset_counter'] = s[2]
ret['id'] = s[3]
ret['last_heard_id'] = s[4]
ret['m_full_time'] = list(s[5:13])
ret['dt_ul_rf'] = s[13]
ret['last_full_time'] = list(s[14:22])
ret['range'] = float(ret['dt_ul_rf'])/32000.0 * 340.29 - .12
return ret
| [
"bpkempke@umich.edu"
] | bpkempke@umich.edu |
fd383de4e6b89efa815286ba137152c793ddc76d | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03261/s807506520.py | 99d06e27e156d735988991a822f6233b91e07407 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | py | N=int(input())
H=input()
word=[]
word.append(H)
for i in range(N-1):
S=input()
if S in word:
print('No')
exit()
else:
if H[-1]==S[0]:
H=S
word.append(H)
else:
print('No')
exit()
print('Yes') | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
2a8671467188ac4a0c4de391016897269906b824 | cbf8b6e17389bb544c98c86a59d5f64f30e78625 | /ReuterCrawler.py | 2fad26761a9b5895e0c4deadd2e9b598d102113b | [] | no_license | jinyi12/StockMarketPrediction | d76f3ac0a4eeab3bab847e6793498f0ae638ac82 | 0618b671bce39a666dc653af00b40d4ec68e83c2 | refs/heads/master | 2022-09-26T04:17:11.032643 | 2020-06-07T15:08:05 | 2020-06-07T15:08:05 | 270,252,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | import os
import sys
import datetime
# import news data scraping utilities from parent directory
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
sys.path.insert(0, parentdir)
import utilities
| [
"jinyi12@outlook.com"
] | jinyi12@outlook.com |
ae2f22f7a27f5e9ef722dd41bdc6c39b09e1074f | 7cbcaaf610b69c61b1aa2834d43ae7999f472c9e | /accounts/apps.py | c073b96d51055de37de686ff356664f2437d4ce7 | [] | no_license | youcanshine/djangomall | c7da0e34d3beb04ab2b1c4c53492c33d0319e7f0 | 976dab8948039212f6adb20690ce3ae36afa1df3 | refs/heads/master | 2023-01-30T12:48:13.358251 | 2020-12-13T07:35:16 | 2020-12-13T07:35:16 | 315,939,931 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | from django.apps import AppConfig
class AccountsConfig(AppConfig):
name = 'accounts'
verbose_name = '账户'
| [
"youcanshine@foxmail.com"
] | youcanshine@foxmail.com |
0797f8a1e6c95d8a19772812284eb5df51f20fa4 | 753b238c50e4f456d6e4cc81d8301a868c117d33 | /src/GraphicsView.py | 00981faf3c31fd133c794d561edb542d1a80ed67 | [
"MIT"
] | permissive | chiefenne/PyAero | 7c04a81e479bbe3e722779ba1b8a4ef530cd84c7 | 04f12b3fe84f97efcfd48a815934fa5af330cd31 | refs/heads/master | 2023-06-22T02:27:43.049172 | 2023-06-15T12:59:36 | 2023-06-15T12:59:36 | 145,753,681 | 127 | 23 | MIT | 2023-06-15T12:59:38 | 2018-08-22T19:30:13 | Python | UTF-8 | Python | false | false | 17,730 | py | import os
import math
from PySide6 import QtGui, QtCore, QtWidgets
from Settings import ZOOMANCHOR, SCALEINC, MINZOOM, MAXZOOM, \
MARKERSIZE, RUBBERBANDSIZE, VIEWSTYLE
import logging
logger = logging.getLogger(__name__)
# put constraints on rubberband zoom (relative rectangle wdith)
RUBBERBANDSIZE = min(RUBBERBANDSIZE, 1.0)
RUBBERBANDSIZE = max(RUBBERBANDSIZE, 0.05)
class GraphicsView(QtWidgets.QGraphicsView):
"""The graphics view is the canvas where airfoils are drawn upon
Its coordinates are in pixels or "physical" coordinates.
Attributes:
origin (QPoint): stores location of mouse press
parent (QMainWindow): mainwindow instance
rubberband (QRubberBand): an instance of the custom rubberband class
used for zooming and selecting
sceneview (QRectF): stores current view in scene coordinates
"""
def __init__(self, parent=None, scene=None):
"""Default settings for graphicsview instance
Args:
parent (QMainWindow, optional): mainwindow instance
"""
super().__init__(scene)
self.parent = parent
self._leftMousePressed = False
# allow drops from drag and drop
self.setAcceptDrops(True)
# use custom rubberband
self.rubberband = RubberBand(QtWidgets.QRubberBand.Rectangle, self)
# needed for correct mouse wheel zoom
# otherwise mouse anchor is wrong; it would use (0, 0)
self.setInteractive(True)
# set QGraphicsView attributes
self.setRenderHints(QtGui.QPainter.Antialiasing |
QtGui.QPainter.TextAntialiasing)
self.setViewportUpdateMode(QtWidgets.QGraphicsView.FullViewportUpdate)
self.setResizeAnchor(QtWidgets.QGraphicsView.AnchorViewCenter)
# view behaviour when zooming
if ZOOMANCHOR == 'mouse':
# point under mouse pointer stays fixed during zoom
self.setTransformationAnchor(
QtWidgets.QGraphicsView.AnchorUnderMouse)
else:
# view center stays fixed during zoom
self.setTransformationAnchor(
QtWidgets.QGraphicsView.AnchorViewCenter)
self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
# normally (0, 0) is upperleft corner of view
# swap y-axis in order to make (0, 0) lower left
# and y-axis pointing upwards
self.scale(1, -1)
# cache view to be able to keep it during resize
self.getSceneFromView()
# set background style and color for view
self.setBackground(VIEWSTYLE)
def setBackground(self, styletype):
"""Switches between gradient and simple background using style sheets.
border-color (in HTML) works only if border-style is set.
"""
if styletype == 'gradient':
style = """
border-style:solid; border-color: lightgrey;
border-width: 1px; background-color: QLinearGradient(x1: 0.0, y1: 0.0,
x2: 0.0, y2: 1.0, stop: 0.3 white, stop: 1.0 #263a5a);
"""
# if more stops are needed
# stop: 0.3 white, stop: 0.6 #4b73b4, stop: 1.0 #263a5a); } """)
else:
style = ("""
border-style:solid; border-color: lightgrey; \
border-width: 1px; background-color: white;""")
self.setStyleSheet(style)
def resizeEvent(self, event):
"""Re-implement QGraphicsView's resizeEvent handler"""
# call corresponding base class method
super().resizeEvent(event)
# scrollbars need to be switched off when calling fitinview from
# within resize event otherwise strange recursion can occur
self.fitInView(self.sceneview,
aspectRadioMode=QtCore.Qt.KeepAspectRatio)
def mousePressEvent(self, event):
"""Re-implement QGraphicsView's mousePressEvent handler"""
# status of CTRL key
ctrl = event.modifiers() == QtCore.Qt.ControlModifier
# if a mouse event happens in the graphics view
# put the keyboard focus to the view as well
self.setFocus()
self.origin = event.pos()
# do rubberband zoom only with left mouse button
if event.button() == QtCore.Qt.LeftButton:
self._leftMousePressed = True
self._dragPos = event.pos()
if ctrl:
self.setCursor(QtCore.Qt.ClosedHandCursor)
else:
# initiate rubberband origin and size (zero at first)
self.rubberband.setGeometry(QtCore.QRect(self.origin,
QtCore.QSize()))
# show, even at zero size
# allows to check later using isVisible()
self.rubberband.show()
# call corresponding base class method
super().mousePressEvent(event)
def mouseMoveEvent(self, event):
"""Re-implement QGraphicsView's mouseMoveEvent handler"""
# if a mouse event happens in the graphics view
# put the keyboard focus to the view as well
self.setFocus()
# status of CTRL key
ctrl = event.modifiers() == QtCore.Qt.ControlModifier
# pan the view with the left mouse button and CRTL down
if self._leftMousePressed and ctrl:
self.setCursor(QtCore.Qt.ClosedHandCursor)
newPos = event.pos()
diff = newPos - self._dragPos
self._dragPos = newPos
# this actually does the pan
# no matter if scroll bars are displayed or not
self.horizontalScrollBar().setValue(
self.horizontalScrollBar().value() - diff.x())
self.verticalScrollBar().setValue(
self.verticalScrollBar().value() - diff.y())
if self.rubberband.isVisible() and not ctrl:
self.setInteractive(False)
self.rubberband.setGeometry(
QtCore.QRect(self.origin, event.pos()).normalized())
# call corresponding base class method
super().mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
"""Re-implement QGraphicsView's mouseReleaseEvent handler"""
self._leftMousePressed = False
self.setCursor(QtCore.Qt.ArrowCursor)
# do zoom wrt to rect of rubberband
if self.rubberband.isVisible():
self.rubberband.hide()
rect = self.rubberband.geometry()
rectf = self.mapToScene(rect).boundingRect()
# zoom the selected rectangle (works on scene coordinates)
# zoom rect must be at least 5% of view width to allow zoom
if self.rubberband.allow_zoom:
self.fitInView(rectf,
aspectRadioMode=QtCore.Qt.KeepAspectRatio)
# rescale markers during zoom
# i.e. keep them constant size
self.adjustMarkerSize()
# reset to True, so that mouse wheel zoom anchor works
self.setInteractive(True)
# reset ScrollHandDrag if it was active
if self.dragMode() == QtWidgets.QGraphicsView.ScrollHandDrag:
self.setDragMode(QtWidgets.QGraphicsView.NoDrag)
# call corresponding base class method
super().mouseReleaseEvent(event)
def wheelEvent(self, event):
"""Re-implement QGraphicsView's wheelEvent handler"""
f = SCALEINC
# wheelevent.angleDelta() returns a QPoint instance
# the angle increment of the wheel is stored on the .y() attribute
angledelta = event.angleDelta().y()
if math.copysign(1, angledelta) > 0:
f = 1.0 / SCALEINC
self.scaleView(f)
# DO NOT CONTINUE HANDLING EVENTS HERE!!!
# this would destroy the mouse anchor
# call corresponding base class method
# super().wheelEvent(event)
def keyPressEvent(self, event):
"""Re-implement QGraphicsView's keyPressEvent handler"""
key = event.key()
if key == QtCore.Qt.Key_Plus or key == QtCore.Qt.Key_PageDown:
f = SCALEINC
# if scaling with the keys, the do not use mouse as zoom anchor
anchor = self.transformationAnchor()
self.setTransformationAnchor(QtWidgets.QGraphicsView.AnchorViewCenter)
self.scaleView(f)
self.setTransformationAnchor(anchor)
if key == QtCore.Qt.Key_PageDown:
# return here so that later base class is NOT called
# because QAbstractScrollArea would otherwise handle
# the event and do something we do not want
return
elif key == QtCore.Qt.Key_Minus or key == QtCore.Qt.Key_PageUp:
f = 1.0 / SCALEINC
# if scaling with the keys, the do not use mouse as zoom anchor
anchor = self.transformationAnchor()
self.setTransformationAnchor(QtWidgets.QGraphicsView.AnchorViewCenter)
self.scaleView(f)
self.setTransformationAnchor(anchor)
if key == QtCore.Qt.Key_PageUp:
# return here so that later base class is NOT called
# because QAbstractScrollArea would otherwise handle
# the event and do something we do not want
return
elif key == QtCore.Qt.Key_Home:
self.parent.slots.onViewAll()
elif key == QtCore.Qt.Key_Delete:
# removes all selected airfoils
self.parent.slots.removeAirfoil()
# call corresponding base class method
super().keyPressEvent(event)
def keyReleaseEvent(self, event):
"""Re-implement QGraphicsView's keyReleaseEvent handler"""
# call corresponding base class method
super().keyReleaseEvent(event)
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
event.accept()
else:
event.ignore()
def dragLeaveEvent(self, event):
pass
def dragMoveEvent(self, event):
if event.mimeData().hasUrls():
if event.mimeData().hasText():
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
else:
event.ignore()
def dropEvent(self, event):
for url in event.mimeData().urls():
path = url.toLocalFile()
if os.path.isfile(path):
self.parent.slots.loadAirfoil(path, comment='#')
def scaleView(self, factor):
# check if zoom limits are exceeded
# m11 = x-scaling
sx = self.transform().m11()
too_big = sx > MAXZOOM and factor > 1.0
too_small = sx < MINZOOM and factor < 1.0
if too_big or too_small:
return
# do the actual zooming
self.scale(factor, factor)
# rescale markers during zoom, i.e. keep them constant size
self.adjustMarkerSize()
# cache view to be able to keep it during resize
self.getSceneFromView()
def adjustMarkerSize(self):
"""Adjust marker size during zoom. Marker items are circles
which are otherwise affected by zoom. Using MARKERSIZE from
Settings a fixed markersize (e.g. 3 pixels) can be kept.
This method immitates the behaviour of pen.setCosmetic()
"""
# FIXME
# FIXME this fixes an accidential call of this method
# FIXME should be fixed by checking when called
# FIXME
if not self.parent.airfoil:
return
#
current_zoom = self.transform().m11()
scale_marker = 1. + 3. * (current_zoom - MINZOOM) / (MAXZOOM - MINZOOM)
# scale_marker = 100.
# logger.info(f'Current zoom value {current_zoom}')
# logger.info(f'Scale factor for markers {scale_marker}')
# markers are drawn in GraphicsItem using scene coordinates
# in order to keep them constant size, also when zooming
# a fixed pixel size (MARKERSIZE from settings) is mapped to
# scene coordinates
# depending on the zoom, this leads to always different
# scene coordinates
# map a square with side length of MARKERSIZE to the scene coords
mappedMarker = self.mapToScene(
QtCore.QRect(0, 0, MARKERSIZE*scale_marker, MARKERSIZE*scale_marker))
mappedMarkerWidth = mappedMarker.boundingRect().width()
if self.parent.airfoil.contourPolygon:
markers = self.parent.airfoil.polygonMarkers
x, y = self.parent.airfoil.raw_coordinates
for i, marker in enumerate(markers):
# in case of circle, args is a QRectF
marker.args = [QtCore.QRectF(x[i] - mappedMarkerWidth,
y[i] - mappedMarkerWidth,
2. * mappedMarkerWidth,
2. * mappedMarkerWidth)]
# if self.parent.airfoil.contourSpline:
if hasattr(self.parent.airfoil, 'contourSpline'):
markers = self.parent.airfoil.splineMarkers
x, y = self.parent.airfoil.spline_data[0]
for i, marker in enumerate(markers):
# in case of circle, args is a QRectF
marker.args = [QtCore.QRectF(x[i] - mappedMarkerWidth,
y[i] - mappedMarkerWidth,
2. * mappedMarkerWidth,
2. * mappedMarkerWidth)]
def getSceneFromView(self):
"""Cache view to be able to keep it during resize"""
# map view rectangle to scene coordinates
polygon = self.mapToScene(self.rect())
# sceneview describes the rectangle which is currently
# being viewed in scene coordinates
# this is needed during resizing to be able to keep the view
self.sceneview = QtCore.QRectF(polygon[0], polygon[2])
def contextMenuEvent(self, event):
"""creates popup menu for the graphicsview"""
menu = QtWidgets.QMenu(self)
fitairfoil = menu.addAction('Fit airfoil in view')
fitairfoil.setShortcut('CTRL+f')
fitall = menu.addAction('Fit all items in view')
fitall.setShortcut('HOME, CTRL+SHIFT+f')
menu.addSeparator()
delitems = menu.addAction('Delete airfoil')
delitems.setShortcut('Del')
menu.addSeparator()
togglebg = menu.addAction('Toggle background')
togglebg.setShortcut('CTRL+b')
action = menu.exec_(self.mapToGlobal(event.pos()))
if action == togglebg:
self.parent.slots.onBackground()
elif action == fitairfoil:
self.parent.slots.fitAirfoilInView()
elif action == fitall:
self.parent.slots.onViewAll()
# remove all selected items from the scene
elif action == delitems:
self.parent.slots.removeAirfoil()
# call corresponding base class method
super().contextMenuEvent(event)
class RubberBand(QtWidgets.QRubberBand):
"""Custom rubberband
from: http://stackoverflow.com/questions/25642618
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.view = args[1]
# set pen and brush (filling)
self.pen = QtGui.QPen()
self.pen.setStyle(QtCore.Qt.DotLine)
self.pen.setColor(QtGui.QColor(80, 80, 100))
self.brush = QtGui.QBrush()
color = QtGui.QColor(20, 20, 80, 30)
self.brush.setColor(color)
# self.brush.setStyle(QtCore.Qt.NoBrush)
self.brush.setStyle(QtCore.Qt.SolidPattern)
# set style selectively for the rubberband like that
# see: http://stackoverflow.com/questions/25642618
# required as opacity might not work
# NOTE: opacity removed here
self.setStyle(QtWidgets.QStyleFactory.create('windowsvista'))
# set boolean for allowing zoom
self.allow_zoom = False
def paintEvent(self, QPaintEvent):
painter = QtGui.QPainter(self)
self.pen.setColor(QtGui.QColor(80, 80, 100))
self.pen.setWidthF(1.5)
self.pen.setStyle(QtCore.Qt.DotLine)
# zoom rect must be at least RUBBERBANDSIZE % of view to allow zoom
if (QPaintEvent.rect().width() < RUBBERBANDSIZE * self.view.width()) \
or \
(QPaintEvent.rect().height() < RUBBERBANDSIZE * self.view.height()):
self.brush.setStyle(QtCore.Qt.NoBrush)
# set boolean for allowing zoom
self.allow_zoom = False
else:
# if rubberband rect is big enough indicate this by fill color
color = QtGui.QColor(10, 30, 140, 45)
self.brush.setColor(color)
self.brush.setStyle(QtCore.Qt.SolidPattern)
# set boolean for allowing zoom
self.allow_zoom = True
painter.setBrush(self.brush)
painter.setPen(self.pen)
painter.drawRect(QPaintEvent.rect())
| [
"andreas.ennemoser@aon.at"
] | andreas.ennemoser@aon.at |
ea1150a925ad744777f4984d38f49553a2e72068 | 11611a4c44e3da661990bf0ca585b24932fe4d8d | /5-多线程/11.py | 26c742d993f2c807ea4ddc701d7e2ccd9441e32a | [] | no_license | Marina-lyy/basics | d94532bac0d45eb73a9e8bfd317770f8b660b3f2 | 333ec857b79ef516e655779849fe13769281471e | refs/heads/master | 2022-04-01T23:49:55.366658 | 2020-02-02T13:35:56 | 2020-02-02T13:35:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 636 | py | import threading
sum = 0
loopSum = 1000000
def myAdd():
global sum, loopSum
for i in range(1,loopSum):
sum += 1
def myMinu():
global sum, loopSum
for i in range(1,loopSum):
sum -+ 1
'''
if __name__ == '__main__':
myAdd()
print(sum)
myMinu()
print(sum)
'''
if __name__ == '__main__':
print("Starting....{0}".format(sum))
# 开始用多线程的实例,看执行结果是否一样
t1 = threading.Thread(target=myAdd, args=())
t2 = threading.Thread(target=myMinu,args=())
t1.start()
t2.start()
t1.join()
t2.join()
print("Done....{0}".format(sum))
| [
"1295278001@qq.com"
] | 1295278001@qq.com |
34ae09386144daba8c693cd839c94b11c7223dcd | e3edf9f0e8cd1c0471f59f1fd81f37b3694b5211 | /photo_organizer/logic/photo_size_adjuster.py | 0a34f0f1d8bd79addae1ce353071e21ff5e111ce | [] | no_license | not-so-fat/photo_organizer_py | f1ff4663b834f91cece3180cc02664fb7b3d1df1 | 2268685ad788c32f5d6cf056991a8efb787d6b4c | refs/heads/master | 2023-02-17T20:05:02.625355 | 2021-01-09T17:14:22 | 2021-01-09T17:14:22 | 326,556,289 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,235 | py | from photo_organizer.settings import TOP_BUTTON_HEIGHT, MARGIN_BOTTOM, MARGIN_LEFT_RIGHT
def get_new_image(pil_image, screenheight, screenwidth):
original_width = pil_image.width
original_height = pil_image.height
max_height = screenheight - TOP_BUTTON_HEIGHT - MARGIN_BOTTOM
max_width = screenwidth - 2 * MARGIN_LEFT_RIGHT
scale_factor = determine_scale(original_height, original_width, max_height, max_width)
if scale_factor != 1:
new_width = int(original_width * scale_factor)
new_height = int(original_height * scale_factor)
pil_image = pil_image.resize((new_width, new_height))
else:
new_width = original_width
new_height = original_height
new_x = int((max_width - new_width) / 2)
new_y = int((TOP_BUTTON_HEIGHT + max_height - new_height) / 2)
return pil_image, new_x, new_y
def determine_scale(original_height, original_width, max_height, max_width):
scale_factor_height, scale_factor_width = 1, 1
if original_height > max_height:
scale_factor_height = max_height / original_height
if original_width > max_width:
scale_factor_width = max_width / original_width
return min(scale_factor_width, scale_factor_height)
| [
"conjurer.not.so.fat@gmail.com"
] | conjurer.not.so.fat@gmail.com |
2870d8fd2d7988664ac85e49aef508886060949c | 4b678cec44b3630b19589ad7fe54f0002badce04 | /Fifa_2019.py | aa4d3c5ebabac40ebdb9d9353cfc798ab4204dbb | [] | no_license | Ibravic/FIFA_2019 | 4d2d83cccdfa487d2a970a349d735da2fcd2b8d5 | 9797b11bc542e012e138b15f618914d1bcaaae9f | refs/heads/main | 2023-03-18T10:28:24.705764 | 2021-03-13T19:18:27 | 2021-03-13T19:18:27 | 345,064,932 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,610 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 24 21:40:01 2021
@author: Ibrahim
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import scipy.stats as stats
import seaborn as sns
from sklearn import linear_model
df_fifa=pd.read_excel("f:\\Personal\\Data Analysis\\Fifa2019Data.xlsx")
#Missing Data
freq=df_fifa["Crossing"].value_counts().idxmax()
df_fifa["Crossing"].replace(np.nan,freq , inplace=True)
df_fifa.dropna(axis=0,inplace=True)
#Data Formatting
real_madrid={"R.Madrid":"Real Madrid","Real M.":"Real Madrid","R.M":"Real Madrid"}
df_fifa["Club"].replace(real_madrid.keys(),real_madrid.values(),inplace=True)
df_fifa["Value"]=df_fifa["Value"]/1000000
df_fifa.rename(columns={"Value":"Value in Millions"},inplace=True)
#Categorize Data
bins=np.linspace(min(df_fifa["Value in Millions"]),max(df_fifa["Value in Millions"]),4)
group_names=["low","medium","high"]
df_fifa["value-group"]=pd.cut(df_fifa["Value in Millions"],bins,labels=group_names,include_lowest=True)
#Descriptive Statistics
value_group_count=df_fifa["value-group"].value_counts().to_frame()
value_group_count.rename(columns={"value-group":"category-counts"},inplace=True)
#using box plot
df_fifa["Age"].plot(kind="box")
df_fifa["Age"].plot(kind="box",figsize=(15,10),grid="on")
#Scatter plot
plt.figure(figsize=(15,10))
plt.scatter(df_fifa["Age"],df_fifa["Value in Millions"])
plt.grid()
plt.xlabel("Age")
plt.ylabel("Values in Millions")
plt.show()
plt.figure(figsize=(15,10))
plt.scatter(df_fifa["Overall"],df_fifa["Value in Millions"])
plt.grid()
plt.xlabel("Overall")
plt.ylabel("Values in Millions")
plt.show()
#group data
df_nation=df_fifa[["Nationality","Overall"]]
df_grp=df_nation.groupby(["Nationality"],as_index=False).mean()
df_grp.sort_values(by="Overall",ascending=False, inplace=True)
df_nation=df_fifa[["Nationality","Position","Overall"]]
df_grp=df_nation.groupby(["Nationality","Position"],as_index=False).mean()
df_grp.pivot(index="Nationality",columns="Position")
#Correlation
p_coef,p_value=stats.pearsonr(df_fifa["Overall"],df_fifa["Value in Millions"])
p_coef2,p_value2=stats.pearsonr(df_fifa["Age"],df_fifa["Value in Millions"])
#headmap
corr=df_fifa.corr()
sns.heatmap(corr, xticklabels=corr.columns, yticklabels=corr.columns)
df_corr=df_fifa[["Age","Overall","Potential","Finishing","SprintSpeed","Value in Millions"]]
corr2=df_corr.corr()
sns.heatmap(corr2, xticklabels=corr2.columns, yticklabels=corr2.columns, cmap="YlGnBu")
#Linear Regression
lm=linear_model.LinearRegression()
X=df_fifa[["Overall"]]
Y=df_fifa["Value in Millions"]
lm.fit(X,Y)
Yhat=lm.predict(X)
b0=lm.intercept_
b1=lm.coef_
results=pd.DataFrame({"Overall":df_fifa["Overall"], "Actual value":df_fifa["Value in Millions"],"Predicted value":Yhat})
new_player=pd.DataFrame({"Overall":[99,95,89]})
Yhat2=lm.predict(new_player[["Overall"]])
fig,ax=plt.subplots(figsize=(15,10))
sns.regplot(x="Overall",y="Value in Millions",data=df_fifa,ax=ax)
plt.grid()
plt.show()
#Multiple Linear Regression
Z=df_fifa[["Overall","Age"]]
lm.fit(Z,df_fifa["Value in Millions"])
Yhat3=lm.predict(Z)
results=pd.DataFrame({"Overall":df_fifa["Overall"],"Age":df_fifa["Age"], "Actual value":df_fifa["Value in Millions"],"Predicted value":Yhat3})
fig,ax=plt.subplots(figsize=(15,10))
sns.residplot(x="Age",y="Value in Millions",data=df_fifa,ax=ax)
plt.grid()
plt.show()
#polynomial Regression
from sklearn.preprocessing import PolynomialFeatures
pr=PolynomialFeatures(degree=2, include_bias=False)
x_poly=pr.fit_transform(df_fifa[["Overall","Age"]])
lin_reg2 = linear_model.LinearRegression()
lin_reg2.fit(x_poly,Y)
Yhat4=lin_reg2.predict(x_poly)
results=pd.DataFrame({"Overall":df_fifa["Overall"],"Age":df_fifa["Age"], "Actual value":df_fifa["Value in Millions"],"Linear value":Yhat3,"Polynomial value":Yhat4})
#pipeline
from sklearn.pipeline import Pipeline
Input=[(('polynomial'),PolynomialFeatures(degree=2)),("mode",linear_model.LinearRegression())]
pipe=Pipeline(Input)
pipe.fit(df_fifa[["Overall","Age"]],df_fifa["Value in Millions"])
Yhat5=pipe.predict(df_fifa[["Overall","Age"]])
results_compare=pd.DataFrame({"Polynomial value":Yhat4,"Pipeline":Yhat5})
Input=[(('polynomial'),PolynomialFeatures(degree=3)),("mode",linear_model.LinearRegression())]
pipe=Pipeline(Input)
pipe.fit(df_fifa[["Overall","Age"]],df_fifa["Value in Millions"])
Yhat6=pipe.predict(df_fifa[["Overall","Age"]])
results2=pd.DataFrame({"Overall":df_fifa["Overall"],"Age":df_fifa["Age"], "Actual value":df_fifa["Value in Millions"],"Linear value":Yhat3,"2nd-degree":Yhat5,"3rd-degree":Yhat6})
#Mean Sqaure Error
from sklearn.metrics import mean_squared_error
MSE_SL=mean_squared_error(df_fifa["Value in Millions"],Yhat)
MSE_ML=mean_squared_error(df_fifa["Value in Millions"],Yhat3)
MSE_2d=mean_squared_error(df_fifa["Value in Millions"],Yhat5)
MSE_3d=mean_squared_error(df_fifa["Value in Millions"],Yhat6)
#R-Squared
from sklearn.metrics import r2_score
r_SL=r2_score(df_fifa[["Value in Millions"]],Yhat)
r_ML=r2_score(df_fifa[["Value in Millions"]],Yhat3)
r_2d=r2_score(df_fifa[["Value in Millions"]],Yhat4)
r_3d=r2_score(df_fifa[["Value in Millions"]],Yhat6)
#Use for loop to get the best model
from sklearn.pipeline import Pipeline
r2=[]
MSE=[]
degree=[1,2,3,4,5,6,7,8,9,10]
for i in degree:
Input=[(('polynomial'),PolynomialFeatures(degree=i)),("mode",linear_model.LinearRegression())]
pipe=Pipeline(Input)
pipe.fit(df_fifa[["Overall","Age"]],df_fifa["Value in Millions"])
Yhat=pipe.predict(df_fifa[["Overall","Age"]])
r2_value=r2_score(df_fifa[["Value in Millions"]],Yhat)
r2.append(r2_value)
MSE_value=mean_squared_error(df_fifa["Value in Millions"],Yhat)
MSE.append(MSE_value)
plt.figure(figsize=(15,8))
plt.scatter(df_fifa.index,df_fifa["Value in Millions"])
plt.plot(df_fifa.index,Yhat)
plt.grid()
plt.figure(figsize=(15,10))
plt.plot(degree,r2)
plt.grid()
plt.xlabel("degree")
plt.ylabel("r2")
#Choosen Model
Input=[(('polynomial'),PolynomialFeatures(degree=9)),("mode",linear_model.LinearRegression())]
pipe=Pipeline(Input)
pipe.fit(df_fifa[["Overall","Age"]],df_fifa["Value in Millions"])
Yhat=pipe.predict(df_fifa[["Overall","Age"]])
result_final=pd.DataFrame({"Overall":df_fifa["Overall"],"Age":df_fifa["Age"], "Actual value":df_fifa["Value in Millions"],"Predicted value":Yhat}) | [
"noreply@github.com"
] | noreply@github.com |
9080767c9492cc2dc984b1b52d3f9dbdbf137669 | 3a83a18c97e27bc89b3718b50edd32a1bc57eaaf | /qualitylevel_jccs.py | 5f4884209b49a0ebeac84d03a73e9df4b6c48f13 | [] | no_license | liudoudou86/Bug_calc | 3217856442f34ad679b1b38120e5ff5246b53b93 | 81dd96ce7a8d8a926b44f3bbab2efb67353d1d71 | refs/heads/main | 2023-02-25T11:02:20.639069 | 2021-02-03T05:24:27 | 2021-02-03T05:24:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,945 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:lz
import data
# 集成测试的质量等级计算
def jccs_x():
while True:
bug_jccs = (data.cs.fatal_jccs()*15)+(data.cs.serious_jccs()*1.5)+(data.cs.general_jccs()*0.3)+(data.cs.slight_jccs()*0)
if 30 < bug_jccs <= 100:
print("Bug值:",bug_jccs)
print("质量等级:D")
break
elif 20 < bug_jccs <= 30:
print("Bug值:",bug_jccs)
print("质量等级:C")
break
elif 10 < bug_jccs <= 20:
print("Bug值:",bug_jccs)
print("质量等级:B")
break
elif 0 < bug_jccs <= 10:
print("Bug值:",bug_jccs)
print("质量等级:A")
break
else:
print("Bug值:",bug_jccs)
print("质量等级超出范围")
break
def jccs_z():
while True:
bug_jccs = (data.cs.fatal_jccs()*15)+(data.cs.serious_jccs()*1.5)+(data.cs.general_jccs()*0.3)+(data.cs.slight_jccs()*0)
if 40 < bug_jccs <= 100:
print("Bug值:",bug_jccs)
print("质量等级:D")
break
elif 30 < bug_jccs <= 40:
print("Bug值:",bug_jccs)
print("质量等级:C")
break
elif 20 < bug_jccs <= 30:
print("Bug值:",bug_jccs)
print("质量等级:B")
break
elif 0 < bug_jccs <= 20:
print("Bug值:",bug_jccs)
print("质量等级:A")
break
else:
print("Bug值:",bug_jccs)
print("质量等级超出范围")
break
def jccs_jd():
while True:
bug_jccs = (data.cs.fatal_jccs()*15)+(data.cs.serious_jccs()*1.5)+(data.cs.general_jccs()*0.3)+(data.cs.slight_jccs()*0)
if 50 < bug_jccs <= 100:
print("Bug值:",bug_jccs)
print("质量等级:D")
break
elif 40 < bug_jccs <= 50:
print("Bug值:",bug_jccs)
print("质量等级:C")
break
elif 30 < bug_jccs <= 40:
print("Bug值:",bug_jccs)
print("质量等级:B")
break
elif 0 < bug_jccs <= 30:
print("Bug值:",bug_jccs)
print("质量等级:A")
break
else:
print("Bug值:",bug_jccs)
print("质量等级超出范围")
break
def jccs_d():
while True:
bug_jccs = (data.cs.fatal_jccs()*15)+(data.cs.serious_jccs()*1.5)+(data.cs.general_jccs()*0.3)+(data.cs.slight_jccs()*0)
if 120 < bug_jccs <= 150:
print("Bug值:",bug_jccs)
print("质量等级:D")
break
elif 100 < bug_jccs <= 120:
print("Bug值:",bug_jccs)
print("质量等级:C")
break
elif 80 < bug_jccs <= 100:
print("Bug值:",bug_jccs)
print("质量等级:B")
break
elif 0 < bug_jccs <= 80:
print("Bug值:",bug_jccs)
print("质量等级:A")
break
else:
print("Bug值:",bug_jccs)
print("质量等级超出范围")
break
def jccs_td():
while True:
bug_jccs = (data.cs.fatal_jccs()*15)+(data.cs.serious_jccs()*1.5)+(data.cs.general_jccs()*0.3)+(data.cs.slight_jccs()*0)
if 180 < bug_jccs <= 240:
print("Bug值:",bug_jccs)
print("质量等级:D")
break
elif 150 < bug_jccs <= 180:
print("Bug值:",bug_jccs)
print("质量等级:C")
break
elif 100 < bug_jccs <= 150:
print("Bug值:",bug_jccs)
print("质量等级:B")
break
elif 0 < bug_jccs <= 100:
print("Bug值:",bug_jccs)
print("质量等级:A")
break
else:
print("Bug值:",bug_jccs)
print("质量等级超出范围")
break
def jccs_cd():
while True:
bug_jccs = (data.cs.fatal_jccs()*15)+(data.cs.serious_jccs()*1.5)+(data.cs.general_jccs()*0.3)+(data.cs.slight_jccs()*0)
if 200 < bug_jccs <= 300:
print("Bug值:",bug_jccs)
print("质量等级:D")
break
elif 180 < bug_jccs <= 200:
print("Bug值:",bug_jccs)
print("质量等级:C")
break
elif 120 < bug_jccs <= 180:
print("Bug值:",bug_jccs)
print("质量等级:B")
break
elif 0 < bug_jccs <= 120:
print("Bug值:",bug_jccs)
print("质量等级:A")
break
else:
print("Bug值:",bug_jccs)
print("质量等级超出范围")
break
| [
"noreply@github.com"
] | noreply@github.com |
cde33c3fb73f89f70fcf662ed9338c01dcaaa9c0 | 95a3ca5eed239f4084d9f1788cc8e355ad9caa3f | /setup.py | b3da51f2332290688d8fb1d5252849d9ddfc0190 | [
"BSD-2-Clause",
"MIT"
] | permissive | ktp-forked-repos/esperanto-analyzer | 5290e45613d0aa48c3a0a7aa463901d0c9d3b4b7 | af1e8609ec0696e3d1975aa0ba0c88e5f04f8468 | refs/heads/master | 2020-05-14T10:26:02.148538 | 2018-09-06T21:33:11 | 2018-09-06T21:33:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,799 | py | # pylint: disable=missing-docstring,no-self-use,invalid-name
# Learn more: https://github.com/kennethreitz/setup.py
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
with open('LICENSE') as f:
lib_license = f.read()
with open('requirements.txt') as fd:
requirements = [line.rstrip() for line in fd]
with open('test_requirements.txt') as fd:
test_requirements = [line.rstrip() for line in fd]
setup(
name='esperanto-analyzer',
version='0.0.3',
description='Morphological and syntactic analysis of Esperanto sentences.',
long_description=readme,
author='Rafael Fidelis',
author_email='rafaelfid3lis@gmail.com',
url='https://github.com/fidelisrafael/esperanto-analyzer',
license=lib_license,
packages=find_packages(exclude=('tests', 'docs')),
install_requires=requirements,
tests_require=test_requirements,
classifiers=[
'Programming Language :: Python :: 3.7',
"Programming Language :: Python :: 3",
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Human Machine Interfaces',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Text Processing',
'Topic :: Text Processing :: Filters',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Indexing',
'Topic :: Text Processing :: Linguistic',
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
]
)
| [
"rafa_fidelis@yahoo.com.br"
] | rafa_fidelis@yahoo.com.br |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.