blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e4382c577641a37d4901e2851a5234ef5ca7c454 | 2d7d3dab4ca34a85be82d9b3acaf0875af28e65d | /broadcast.py | 48193142b4704117c3a4edf377699464642a51b3 | [] | no_license | home9464/selfdrivingcar | 1ae21d63e17f1eebe1786a236c15659106ca141f | 29dde037b583cee9f33b029c5fb67e3eb4a3e344 | refs/heads/master | 2023-07-02T07:22:58.431879 | 2021-07-26T17:06:45 | 2021-07-26T17:06:45 | 228,827,200 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,192 | py | import asyncio
import threading
import ctypes
import time
from camsrv import broadcast
class VideoBroadcastThread(threading.Thread):
def __init__(self, name='thread1'):
threading.Thread.__init__(self)
self.name = name
def run(self):
# target function of the thread class
try:
video_loop = asyncio.new_event_loop()
video_loop.create_task(broadcast(video_loop))
video_loop.run_forever()
finally:
video_loop.close()
print('broadcast ended')
def get_id(self):
# returns id of the respective thread
if hasattr(self, '_thread_id'):
return self._thread_id
for id, thread in threading._active.items():
if thread is self:
return id
def raise_exception(self):
thread_id = self.get_id()
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(thread_id,
ctypes.py_object(SystemExit))
if res > 1:
print(res)
ctypes.pythonapi.PyThreadState_SetAsyncExc(thread_id, 0)
print('Exception raise failure')
| [
"home9464@gmail.com"
] | home9464@gmail.com |
8cbf854e20549e3c55fba9a464eed3e5afcac7a8 | 4297039ec28e275dc4f091d59d3eec4d01cbeb87 | /Problem3.py | 8c9fc0c926c2089c47f1de1d46579a21e3cf72c4 | [] | no_license | shantanu609/Binary-Search-4 | 6218015f272f2edf5c300b90110f12520c587134 | 6bc949768388b697bb3db8a7c1aa789951a5fe08 | refs/heads/master | 2022-12-02T12:34:32.611823 | 2020-07-21T00:24:04 | 2020-07-21T00:24:04 | 280,953,150 | 0 | 0 | null | 2020-07-19T21:24:55 | 2020-07-19T21:24:54 | null | UTF-8 | Python | false | false | 1,429 | py | # Time Complexity : O(log(n)) where n is the length of smaller array.
# Space Complexity : O(1)
# Did this code successfully run on Leetcode : Yes
# Any problem you faced while coding this : No
# Your code here along with comments explaining your approach
class Solution:
def findMedianSortedArrays(self, nums1, nums2):
n1 = len(nums1)
n2 = len(nums2)
if n1 > n2:
return self.findMedianSortedArrays(nums2, nums1)
low = 0
high = n1
while low <= high:
partX = (low+high)//2
partY = (n1+n2+1) // 2 - partX
l1 = float('-inf') if partX == 0 else nums1[partX-1]
R1 = float('inf') if partX == n1 else nums1[partX]
l2 = float('-inf') if partY == 0 else nums2[partY-1]
R2 = float('inf') if partY == n2 else nums2[partY]
if (l1 <= R2 and l2 <= R1):
# correct partition
if (n1+n2) % 2 == 0: # even case
return (max(l1, l2) + min(R1, R2)) / 2
else:
return max(l1, l2)
elif l1 > R2:
high = partX - 1
else:
low = partX + 1
return -1
if __name__ == "__main__":
s = Solution()
# Test Case 1
nums1 = [0,0]
nums2 = [0,0]
res = s.findMedianSortedArrays(nums1, nums2)
print(res)
| [
"shantanu_shinde_@mail.fresnostate.edu"
] | shantanu_shinde_@mail.fresnostate.edu |
0ca3f2b59f63bd59d912c3a689244f7d0f73d269 | 2b05f6a7c8719cba8c18131f51bfabc1beb99786 | /userExtends/models.py | 43d097aaf1cfda349902405e4f112cd4110aa5eb | [] | no_license | peoplein333/peoplein333 | 272e8266f9ae09d68cb2b8ed6bc0718249793026 | eb1e4526aa775d2e9add66420293d0d0529fda4c | refs/heads/master | 2020-08-03T06:50:54.212471 | 2019-10-06T11:52:22 | 2019-10-06T11:52:22 | 211,659,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,831 | py |
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Profile(models.Model):
GANGNAM = 1
SUNGDONG = 2
YEONGDEUNGPO = 3
GWANGJIN = 4
MAPO = 5
SONGPA =6
AREA_CHOICES = (
(GANGNAM, '강남구'),
(SUNGDONG, '성동구'),
(YEONGDEUNGPO, '영등포구'),
(GWANGJIN, '광진구'),
(MAPO, '마포구'),
(SONGPA, '송파구')
)
GENDER = (
('여성', '여성'),
('남성', '남성'),
)
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
name = models.CharField(('name'), max_length=30, blank=True)
birthdate = models.DateField(null=True, blank=True, verbose_name='생일')
gender = models.CharField(max_length=7, choices=GENDER, default='', verbose_name='성별')
in_area = models.PositiveSmallIntegerField(choices=AREA_CHOICES, null=True, blank=True, verbose_name='관심지역')
class Meta:
verbose_name = 'profile'
verbose_name_plural = 'profiles'
def __str__(self):
return self.user.username
# @receiver(post_save, sender=User)
# def create_or_update_user_profile(sender, instance, created, **kwargs):
# if created:
# Profile.objects.create(user=instance)
# instance.profile.save()
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save() | [
"peoplein3c@gmail.com"
] | peoplein3c@gmail.com |
6262dbbc67ab32cdde9d64c08ae395031c327f52 | 6657685c66c5741dc6e944f00fe9a4c74eba3a61 | /model 1.0/Temperature.py | 5f0598f3ba53b106c4ade440b9b139ab29366204 | [] | no_license | vgmontenegro/AgriculturalCropModels | c5db923234d7efabf1233e547d65208357c33bb8 | 56a6159525ac83a4533ceb0c6980ceede57aac45 | refs/heads/master | 2023-05-09T10:23:51.829676 | 2021-06-03T12:56:31 | 2021-06-03T12:56:31 | 358,241,622 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 620 | py | def ftar(Tar, Tb, To1, To2, TB):
global FTar
if Tar < Tb:
FTar = float(0)
return FTar
else:
if Tar < To1:
FTar = float((Tar - Tb) / (To1 - Tb))
return FTar
else:
if Tar <= To2:
FTar = float(1)
return FTar
else:
if Tar < TB:
FTar = float((TB - Tar) / (TB - To2))
return FTar
else:
FTar = float(0)
return FTar
def grausdia(Tar, Tb, GD=0):
GD = GD + (float(Tar) - Tb)
return GD | [
"82038809+vgmontenegro@users.noreply.github.com"
] | 82038809+vgmontenegro@users.noreply.github.com |
97894c654a06e3ef8c20e075e4b88d4cb949fcf4 | 78de1bbbdf92678d14005f50498abb569f36b1d2 | /tests/settings.py | 27238f45e9c6ceb2ff9f88d9a80817036d7b9fcc | [] | no_license | auf/auf_django_permissions | d1138cfff19ca86de67ebdfe6bd5bbf1e6f326ed | 0f078b890be71ec13ae7373f54c2b08c2094d471 | refs/heads/master | 2021-01-16T21:07:58.483239 | 2016-08-04T20:09:01 | 2016-08-04T20:09:01 | 64,491,411 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 589 | py | DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'auf.django.permissions',
'tests.simpletests',
)
AUTHENTICATION_BACKENDS = (
'auf.django.permissions.AuthenticationBackend',
)
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
)
ROLE_PROVIDERS = (
'tests.simpletests.role_provider',
)
SECRET_KEY = 'not-very-secret'
| [
"eric.mcsween@auf.org"
] | eric.mcsween@auf.org |
c460207202a359d3d379b774a17bac103c0adcb8 | 05b2bfe4c5bb716aa2e283567095d613b26f3667 | /listings/models.py | 0a00986df3ec557a00b5cddd29ba9caeae4881d7 | [] | no_license | anshupal11/Ashiyana-Estate | b0b8d09e42846b1cf4fc556e41557fe8523bd114 | ffdb8e0ae477c189b3002b6765d66eef638b0a4a | refs/heads/main | 2023-06-06T21:01:31.698523 | 2021-07-14T11:44:43 | 2021-07-14T11:44:43 | 360,515,280 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,433 | py | from django.db import models
from datetime import datetime
from realtors.models import Realtor
class Listing(models.Model):
relators = models.ForeignKey(Realtor, on_delete=models.DO_NOTHING)
title = models.CharField(max_length=200)
address = models.CharField(max_length=200)
city = models.CharField(max_length=100)
state = models.CharField(max_length=80)
zipcode = models.CharField(max_length=20)
description = models.TextField(blank=True)
price = models.IntegerField()
bedrooms = models.IntegerField()
bathrooms = models.DecimalField(max_digits=2, decimal_places=1)
garbage = models.IntegerField(default=0)
sqft = models.IntegerField()
lot_size = models.DecimalField(max_digits=5, decimal_places=1)
photo_main = models.ImageField(upload_to='photos/%Y/%m/%d/')
photo_1 = models.ImageField(upload_to='photos/%Y/%m/%d/',blank=True)
photo_2 = models.ImageField(upload_to='photos/%Y/%m/%d/',blank=True)
photo_3 = models.ImageField(upload_to='photos/%Y/%m/%d/',blank=True)
photo_4 = models.ImageField(upload_to='photos/%Y/%m/%d/',blank=True)
photo_5 = models.ImageField(upload_to='photos/%Y/%m/%d/',blank=True)
photo_6 = models.ImageField(upload_to='photos/%Y/%m/%d/',blank=True)
is_published = models.BooleanField(default=True)
list_date = models.DateTimeField(default=datetime.now, blank=True)
def __str__(self):
return self.title
| [
"anshu.pal108@gmail.com"
] | anshu.pal108@gmail.com |
9e3871b7925111c5f1251f76b0cfd2efe5d46095 | bd47c4c5f40184443889593e1f8885618c22b396 | /Versions/GPSAutoScripter_beta.py | d6c7ea2c51fcc0a75aa44ab834c48c4029431d79 | [] | no_license | AlexanderLutz75/GPSAutoUpdater | ad265582dfdc497abbf68abeca7b68a86e0f9f9d | 35e9351d2a151d690c8761ee0a483dbb01176c7a | refs/heads/master | 2020-04-27T19:50:18.507939 | 2019-03-09T01:54:00 | 2019-03-09T01:54:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,706 | py | from tkinter import *
from tkinter import filedialog
import serial
import serial.tools.list_ports
import time
import re
class GPSUpdater:
def __init__(self):
self.status = []
self.GUIstatus=[]
def statusUpdater(self,statusIndex,message): #helper function used to accomplish the repetative task of updating the status array
try:
self.status[statusIndex] = message
self.GUIstatus[statusIndex].initialize(message)
root.update()
except Exception:
self.status.insert(statusIndex,message)
self.GUIstatus[statusIndex].initialize(message)
root.update()
def selectSource(self):
file = filedialog.askopenfilename()
if(file)=='':
print("no file selected")
return
txtfile=open(file,"r")
if(txtfile)=='':
print("file opening failed")
return
self.message=txtfile.read()
txtfile.close()
print("file slected and ready for upload")
def scanPorts(self):
self.ports = list(serial.tools.list_ports.comports())
self.ports.sort()
self.ports[0]="NOT A GPS"
if(self.ports==[]): #keeps scanning for ports until one connects
print("no devices connected, Waiting for device")
time.sleep(1)
self.scanPorts()
for index, p in enumerate(self.ports):
Label(root, text=p[0]).grid(row=(index+1),column=0) # com port label creation
self.var = StringVar() #create a empty stringVar
self.GUIstatus.insert(index,self.var) #put into the array as a placeholder
Label(root, textvariable=self.GUIstatus[index]).grid(row=(index+1),column=1) #status label creation uses the GUIstatus as text
def findUnits(self): #Creates the logic for checking all connected units
for index, p in enumerate(self.ports):
print("----------------------------------------")
self.handleUnit(p[0],index) #pass the status index
print(self.status[index] + ": " + p[0])
print(self.status) #prints the entire status array for debugging.
def handleUnit(self,port,statusIndex): #Opens a single COM port and updates the status
#unfortunately the first pass will crash because the status array doesnt exist yet.
#This try-except block attempts to work around that.
try:
#try to read status array index
currentStatus = self.status[statusIndex]
except:
#if the status array index doesnt exist it is created
self.status.insert(statusIndex,"NOT CONNECTED")
self.GUIstatus[statusIndex].initialize("NOT CONNECTED")
root.update()
#-------------Port opening------------------------------
print("opening serial port: " + str(port))
#create the serial connection to the GPS
try:
ser = serial.Serial(port, 115200, timeout=1)
except Exception:
print("Cannot open port")
time.sleep(1)
return
#--------------------------------------------------------
#---------------is this port a factory reset GPS unit?-----------------
#Since a programmed GPS is 9600 this will not restart the upload process
if(self.status[statusIndex] == "NOT CONNECTED"):
print("checking for gps on: " + str(port))
checkIfGPS = "AT!GXBREAK"
ser.write(checkIfGPS.encode())
response = ser.readline()
print(response)
if(response == b'AT!GXBREAK'): #is connected
print("GPS detected on :" + str(port))
self.statusUpdater(statusIndex,"READY")
if(response == b''):
self.statusUpdater(statusIndex,"NOT CONNECTED")
if(response == b'\x00\x00'): #if interupted while waiting we need to skip
self.statusUpdater(statusIndex,"WAITING FOR GPS")
#--------------------------------------------------------
#-----------------is the unit ready for upload?----------
if(self.status[statusIndex] == "READY"):
ser.write(self.message.encode())
#check if the commands actually got written
while True: #read responses until its finishd
response = ser.readline()
print(response)
if(response == b'AT!GXAPP SETPARAM UART_BAUD=3; AT!GXAPP SETPARAM UART_FUNCTION=15;\n'):
print("Standard Script sent on: " + str(port))
self.statusUpdater(statusIndex,"DOWNLOADING")
break
if(response == b'AT!GXAPP GETFILE VIAFTP 64.87.28.100 FILENAME G604_08_02kX_KEYCRC_757E.gxe OTAP;'):
print("Upgrade Script sent on: " + str(port))
self.statusUpdater(statusIndex,"DOWNLOADING")
break
#------------------------------------------------------------
#----------------send poll to see if download finished-------
if(self.status[statusIndex] == "DOWNLOADING"):
try:
ser.baudrate = 115200
except Exception:
print("Couldnt Open port at 115200 baud")
try:
ser.baudrate = 9600
except Exception:
print("Couldnt Open port at 9600 baud")
print("Sending BREAK to check if download finished")
checkIfGPS = "AT!GXBREAK"
ser.write(checkIfGPS.encode())
response = ser.readline()
print("the response to AT!BREAK is: " + str(response))
if(response == b''): #Means GPS is not on 9600 baud yet.
self.statusUpdater(statusIndex,"DOWNLOADING")
if(response == b'AT!GXBREAK'): #means GPS accepts inputs again
self.statusUpdater(statusIndex,"WAITING FOR GPS")
#--------------When finished we need to check for disconects--------------
#The GPS should be on 9600 while we send on 9600
if(self.status[statusIndex] == "WAITING FOR GPS"):
try:
ser.baudrate = 9600
except Exception:
print("Couldnt Open port at 9600 baud")
poll = "AT!GXAPP POLL"
ser.write(poll.encode())
response = ser.read(2000)
print("the response to AT!POLL is: " + str(response))
#Keep spamming the unit with POLL until it provides GPS coordinates
GPScoordinates = re.search('LL:(.+?),', str(response))
if GPScoordinates: #resonds to poll
GotGPS = GPScoordinates.group(1)
if(float(GotGPS) > 0): #GPS coordinates aquired.
print("Remove this UNIT: " + str(port))
self.statusUpdater(statusIndex,"READY TO REMOVE")
else: #doesnt respond to the poll so we keep waiting
print("Unit waiting for GPS: " + str(port))
self.statusUpdater(statusIndex,"WAITING FOR GPS")
#The GPS should be fully programmed and ready to be removed
if(self.status[statusIndex] == "READY TO REMOVE"):
try:
ser.baudrate = 9600
except Exception:
print("Couldnt Open port at 9600 baud")
poll = "AT!GXAPP POLL"
ser.write(poll.encode())
response = ser.readline()
if(response != b''):#if the GPS answers then its ready to be removed but still connected
print("Remove this unit: " + str(port))
self.statusUpdater(statusIndex,"READY TO REMOVE")
if(response == b''): #if the GPS doenst answer its not connected
print("Completed unit has been unplugged: " + str(port))
self.statusUpdater(statusIndex,"NOT CONNECTED")
#Python Main method
if __name__ == "__main__":
root = Tk()
root.title("FieldLogix GPS Updater")
root.geometry("250x250")
Label(root, text="PORT NUMBER: ").grid(row=0,column=0)
Label(root, text="STATUS: ").grid(row=0,column=1)
myUpdater = GPSUpdater()
#Select Source
myUpdater.selectSource()
#Scan available com ports
myUpdater.scanPorts()
#check unit status THIS REPEATS FORVEVER
while(True):
myUpdater.findUnits()
root.update()
time.sleep(1)
| [
"noreply@github.com"
] | AlexanderLutz75.noreply@github.com |
f2c25027c45b40a93490886664660304da59568a | eab7a30463a6176a3fdf24cd43a1919e14284376 | /conftest.py | f52a1b640b5acc64fdf4a5ab9c82b336c4100e43 | [] | no_license | WwuTT/R28_Camera_autotest | 04635a2000231a1ee1f675e275774703110d8b88 | 747064f3b8d4482c85acacd45389c1739a5fc695 | refs/heads/master | 2022-10-12T08:40:17.463589 | 2020-06-09T07:02:27 | 2020-06-09T07:02:27 | 270,927,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,431 | py | # 写在conftest.py
import os
import allure
from selenium import webdriver
import pytest
# 添加报错截图到allure报告里
driver = None
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_runtest_makereport(item, call):
'''
hook pytest失败
:param item:
:param call:
:return:
'''
# execute all other hooks to obtain the report object
outcome = yield
rep = outcome.get_result()
# we only look at actual failing test calls, not setup/teardown
if rep.when == "call" and rep.failed:
mode = "a" if os.path.exists("failures") else "w"
with open("failures", mode) as f:
# let's also access a fixture for the fun of it
if "tmpdir" in item.fixturenames:
extra = " (%s)" % item.funcargs["tmpdir"]
else:
extra = ""
f.write(rep.nodeid + extra + "\n")
# pic_info = adb_screen_shot()
with allure.step('添加失败截图...'):
allure.attach(driver.get_screenshot_as_png(), "失败截图", allure.attachment_type.PNG)
# def adb_screen_shot():
# driver.get_screenshot_as_png()
# driver.get_screenshot_as_base64()
# driver.get_screenshot_as_file("122.jpg")
# os.popen("adb screen -p testfailue.jpg")
@pytest.fixture(scope='session', autouse=True)
def browser():
global driver
if driver is None:
driver = webdriver.Chrome()
return driver | [
"18770210865@163.com"
] | 18770210865@163.com |
f8ba0392696152c9b0153c42e7340ebb511a2e0a | 32bfc07c9661b0820e525158ef9a03c1d3256ecd | /Week 2/mysite-link1/django-polls/polls/migrations/0001_initial.py | 8f55db1363fc94de11712f49c0f9b7f97cca9bdc | [] | no_license | Aktoty00/BFDjango | c4d42d0f8d11a14813dbf2d67830531193b81417 | 95e28e9c56b1c1a3a286a1919b942512efdd585a | refs/heads/master | 2021-09-25T15:35:16.722971 | 2020-04-19T11:43:27 | 2020-04-19T11:43:27 | 234,919,812 | 0 | 0 | null | 2021-09-22T18:39:00 | 2020-01-19T15:16:34 | Python | UTF-8 | Python | false | false | 1,178 | py | # Generated by Django 2.1.7 on 2020-01-22 15:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='django-polls.polls.Question'),
),
]
| [
"aktoty.rysdaulet@gmail.com"
] | aktoty.rysdaulet@gmail.com |
63bf625090b06ae9b28675a62fe4a5cf740ede90 | c22b5c68727ef7d351be6db6e4f68965dfd02146 | /blockchain/Mail-Spam-Filtering-master/Mail-Spam-Filtering-master/enron-spamfilter.py | 81199ced603b28acc1bcce61a7b7880418000384 | [] | no_license | louiewuliyu/mail-by-blockchain | b3baf8a297c78aa0adcba83e5117e079047e0b6d | 5f36f1d0a73027c7db26585ec26b85c36e8f40bb | refs/heads/master | 2020-03-30T15:40:52.549313 | 2018-10-03T07:16:59 | 2018-10-03T07:16:59 | 151,374,742 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,198 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jan 31 15:00:44 2017
@author: Abhijeet Singh
"""
import os
import numpy as np
from collections import Counter
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import MultinomialNB
from sklearn.metrics import confusion_matrix
from sklearn.svm import LinearSVC
def make_Dictionary(root_dir):
emails_dirs = [os.path.join(root_dir,f) for f in os.listdir(root_dir)]
all_words = []
for emails_dir in emails_dirs:
dirs = [os.path.join(emails_dir,f) for f in os.listdir(emails_dir)]
for d in dirs:
emails = [os.path.join(d,f) for f in os.listdir(d)]
for mail in emails:
with open(mail) as m:
for line in m:
words = line.split()
all_words += words
dictionary = Counter(all_words)
list_to_remove = dictionary.keys()
for item in list_to_remove:
if item.isalpha() == False:
del dictionary[item]
elif len(item) == 1:
del dictionary[item]
dictionary = dictionary.most_common(3000)
np.save('dict_enron.npy',dictionary)
return dictionary
def extract_features(root_dir):
emails_dirs = [os.path.join(root_dir,f) for f in os.listdir(root_dir)]
docID = 0
features_matrix = np.zeros((33716,3000))
train_labels = np.zeros(33716)
for emails_dir in emails_dirs:
dirs = [os.path.join(emails_dir,f) for f in os.listdir(emails_dir)]
for d in dirs:
emails = [os.path.join(d,f) for f in os.listdir(d)]
for mail in emails:
with open(mail) as m:
all_words = []
for line in m:
words = line.split()
all_words += words
for word in all_words:
wordID = 0
for i,d in enumerate(dictionary):
if d[0] == word:
wordID = i
features_matrix[docID,wordID] = all_words.count(word)
train_labels[docID] = int(mail.split(".")[-2] == 'spam')
docID = docID + 1
return features_matrix,train_labels
#Create a dictionary of words with its frequency
root_dir = 'Enron-data-set'
dictionary = make_Dictionary(root_dir)
#Prepare feature vectors per training mail and its labels
features_matrix,labels = extract_features(root_dir)
np.save('enron_features_matrix.npy',features_matrix)
np.save('enron_labels.npy',labels)
#train_matrix = np.load('enron_features_matrix.npy');
#labels = np.load('enron_labels.npy');
print features_matrix.shape
print labels.shape
print sum(labels==0),sum(labels==1)
X_train, X_test, y_train, y_test = train_test_split(features_matrix, labels, test_size=0.40)
## Training models and its variants
model1 = LinearSVC()
model2 = MultinomialNB()
model1.fit(X_train,y_train)
model2.fit(X_train,y_train)
result1 = model1.predict(X_test)
result2 = model2.predict(X_test)
print confusion_matrix(y_test, result1)
print confusion_matrix(y_test, result2)
| [
"lywu0420@hotmail.com"
] | lywu0420@hotmail.com |
895df5a001edf0d2ca8348d795a5ff6e44f9bb5f | 9869e159caaefe0d0d5dc4f3d723a4c0315b5c8e | /apc_util/setup.py | 36167cb74a89f269ba58ac6a92933f8cb180245a | [] | no_license | yazici/motoman_control | 2208bd2d5a3355de3493f47c0d00b82fcbe26e01 | dd2fb2f47b07c9ff1cb4cd4f6c2962b181514de2 | refs/heads/master | 2020-05-21T02:35:55.745700 | 2015-05-27T23:03:42 | 2015-05-27T23:03:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 308 | py | ## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
# fetch values from package.xml
setup_args = generate_distutils_setup(
packages=['apc_util'],
package_dir={'': 'src'},
)
setup(**setup_args)
| [
"alex@thoriumrobotics.com"
] | alex@thoriumrobotics.com |
a789ad6f90b611c1ab8c53baa204e144607c2690 | e7dfccc8136776443461b6580752c7f0f50556b3 | /matrix_webhook/__main__.py | 18d4fccae9584210927760e0ca5fa6e165449fa1 | [
"BSD-2-Clause"
] | permissive | nim65s/matrix-webhook | f223e404922860dfae711b3017664b976fd9d4e2 | ad74f632c630a748577ba201c5e89dfa02eece4d | refs/heads/master | 2023-09-01T01:02:28.097429 | 2023-08-01T11:09:14 | 2023-08-01T11:09:14 | 171,114,171 | 97 | 32 | NOASSERTION | 2023-09-06T13:53:04 | 2019-02-17T11:29:31 | Python | UTF-8 | Python | false | false | 334 | py | """Matrix Webhook module entrypoint."""
import logging
from . import app, conf
def main():
"""Start everything."""
log_format = "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
logging.basicConfig(level=50 - 10 * conf.VERBOSE, format=log_format)
app.run()
if __name__ == "__main__":
main()
| [
"guilhem.saurel@laas.fr"
] | guilhem.saurel@laas.fr |
6e21093a45d1d4012510faa84dcab11f52ef934d | 4de7bf04f09fcafcfdc029f2ec47f49a9eead50f | /Iterators_and_Generators/prime_numbers.py | b90067fc85f8a43d437a1e14fdff8293c67cb5ab | [] | no_license | Vikadie/Python-OOP | 33ca51b2169a2611f4707863b960bfef0ca4b673 | b1abe1f79f333148e3dd1bc3dc55fbaf814d4a39 | refs/heads/master | 2023-03-24T06:06:18.687388 | 2021-03-26T20:12:48 | 2021-03-26T20:12:48 | 308,573,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 321 | py | def get_primes(lst):
def is_prime(num):
if num <= 1:
return False
for i in range(2, num):
if num % i == 0:
return False
return True
for num in lst:
if is_prime(num):
yield num
print(list(get_primes([2, 4, 3, 5, 6, 9, 1, 0]))) | [
"68245263+Vikadie@users.noreply.github.com"
] | 68245263+Vikadie@users.noreply.github.com |
7bbe6ab67c86d308fae5338d2825b658c95f31c9 | 6fab0568ba5244b31e3c260df72b07bd40396429 | /models.py | c551c6a0aa70b3c94fdf8c6e115ba4e6c6c7b9d8 | [] | no_license | okamomosan/my_public_goods | 729928b6b7991072e755e2eca3227e78b7945758 | a18cd93067b5a34afd3615147169a751e4332d71 | refs/heads/master | 2022-12-09T11:02:23.516530 | 2020-08-29T09:14:39 | 2020-08-29T09:14:39 | 291,196,062 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,201 | py |
import random
from otree.api import (
models,
widgets,
BaseConstants,
BaseSubsession,
BaseGroup,
BasePlayer,
Currency as c,
currency_range,
)
author = 'Your name here'
doc = """
Your app description
"""
class Constants(BaseConstants):
name_in_url = 'my_public_goods'
players_per_group = 3
num_rounds = 1
rd = random.randint(50, 100)
endowment = c(rd) # c() means it's a currency
multiplier = 2
class Subsession(BaseSubsession):
pass
class Group(BaseGroup):
total_contribution = models.CurrencyField()
individual_share = models.CurrencyField()
def set_payoffs(self):
players = self.get_players()
contributions = [p.contribution for p in players]
self.total_contribution = sum(contributions)
self.individual_share = self.total_contribution * Constants.multiplier / Constants.players_per_group
for p in players:
p.payoff = Constants.endowment - p.contribution + self.individual_share
class Player(BasePlayer):
contribution = models.CurrencyField(
min=0,
max=Constants.endowment,
label="いくら公共財に投資しますか?"
)
| [
"jst0515@live.jp"
] | jst0515@live.jp |
15c83f62c9fd56c469799186fc20478de46552d4 | 054eefaa17157b32869ea986347b3e539d2bf06b | /big_o_coding/Blue_13/Homework/day_12_eko_spoj.py | 23dcd5c8db290cfe538fb92b5da5ca59e51c778e | [] | no_license | baocogn/self-learning | f2cb2f45f05575b6d195fc3c407daf4edcfe7d0e | f50a3946966354c793cac6b28d09cb5dba2ec57a | refs/heads/master | 2021-07-12T23:32:14.728163 | 2019-02-10T14:24:46 | 2019-02-10T14:24:46 | 143,170,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | import sys
input = sys.stdin.readline
N, M = map(int, input().split())
heights = list(map(int, input().split()))
def getCutted(height):
return sum(max(0, h - height) for h in heights)
left = 0
right = max(heights)
res = 0
while (left <= right):
mid = left + (right - left) // 2
if getCutted(mid) >= M:
res = mid
left = mid + 1
else:
right = mid - 1
print(res) | [
"baocogn123@gmail.com"
] | baocogn123@gmail.com |
064bb76c7c62f304ae205b982893d13f9243fac9 | 1c4110a0bdbb888fd7a82579810cda2c73b52dba | /20210715 Pycharm/Pycharm/venv/Lib/site-packages/bamboo/common/colours.py | 389df001c9cd8b21e7310bebdda8bb08960fbeee | [] | no_license | DrillND/python | d09786e2937a10c9c67170826131b8ee204e0b37 | f6aa1d4d29e4519f89a63af4c3c8f83ed60630ea | refs/heads/main | 2023-06-19T11:51:14.307597 | 2021-07-16T07:18:52 | 2021-07-16T07:18:52 | 355,095,502 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py |
class bcolours:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
| [
"gornhub13@gmail.com"
] | gornhub13@gmail.com |
b20007cdf4b7fcb8e0d8a1ea67f2a59e65abc61b | 487473cd4aa7a823140b86de5030f5bc8f7d7e64 | /horizon/dashboards/syspanel/registers/tables.py | a5b989efaf6bdff6e60ae8ed24dec5e34d7edf61 | [] | no_license | sunxin3/horizon_application | 7a30abb1e5ae3d25e57551f22ca00bee2219bb40 | 157b79b8ed59685143b2082bd1a45b9695569d07 | refs/heads/master | 2020-05-17T10:30:32.627608 | 2012-11-26T06:48:49 | 2012-11-26T06:48:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,905 | py | import logging
from django import shortcuts
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from horizon import api
from horizon import tables
LOG = logging.getLogger(__name__)
class CreateUserLink(tables.LinkAction):
name = "create"
verbose_name = _("Create User")
url = "horizon:registers:users:create"
classes = ("ajax-modal", "btn-create")
class EditUserLink(tables.LinkAction):
name = "edit"
verbose_name = _("Edit")
url = "horizon:registers:users:update"
classes = ("ajax-modal", "btn-edit")
class EnableUsersAction(tables.Action):
name = "enable"
verbose_name = _("Enable")
verbose_name_plural = _("Enable Users")
classes = ("btn-enable",)
def allowed(self, request, user):
return not user.enabled
def handle(self, data_table, request, object_ids):
failures = 0
enabled = []
for obj_id in object_ids:
try:
api.keystone.user_update_enabled(request, obj_id, True)
enabled.append(obj_id)
except Exception, e:
failures += 1
messages.error(request, _("Error enabling user: %s") % e)
LOG.exception("Error enabling user.")
if failures:
messages.info(request, _("Enabled the following users: %s")
% ", ".join(enabled))
else:
messages.success(request, _("Successfully enabled users: %s")
% ", ".join(enabled))
return shortcuts.redirect('horizon:registers:users:index')
class DisableUsersAction(tables.Action):
name = "disable"
verbose_name = _("Disable")
verbose_name_plural = _("Disable Users")
classes = ("btn-disable",)
def allowed(self, request, user):
return user.enabled
def handle(self, data_table, request, object_ids):
failures = 0
disabled = []
for obj_id in object_ids:
if obj_id == request.user.id:
messages.info(request, _('You cannot disable the user you are '
'currently logged in as.'))
continue
try:
api.keystone.user_update_enabled(request, obj_id, False)
disabled.append(obj_id)
except Exception, e:
failures += 1
messages.error(request, _("Error disabling user: %s") % e)
LOG.exception("Error disabling user.")
if failures:
messages.info(request, _("Disabled the following users: %s")
% ", ".join(disabled))
else:
if disabled:
messages.success(request, _("Successfully disabled users: %s")
% ", ".join(disabled))
return shortcuts.redirect('horizon:registers:users:index')
class DeleteUsersAction(tables.DeleteAction):
data_type_singular = _("User")
data_type_plural = _("Users")
def allowed(self, request, datum):
if datum and datum.id == request.user.id:
return False
return True
def delete(self, request, obj_id):
api.keystone.user_delete(request, obj_id)
class UserFilterAction(tables.FilterAction):
def filter(self, table, users, filter_string):
""" Really naive case-insensitive search. """
# FIXME(gabriel): This should be smarter. Written for demo purposes.
q = filter_string.lower()
def comp(user):
if any([q in (user.name or "").lower(),
q in (user.email or "").lower()]):
return True
return False
return filter(comp, users)
class UsersTable(tables.DataTable):
STATUS_CHOICES = (
("true", True),
("false", False)
)
id = tables.Column('id', verbose_name=_('ID'))
name = tables.Column('name', verbose_name=_('User Name'))
email = tables.Column('email', verbose_name=_('Email'))
# Default tenant is not returned from Keystone currently.
#default_tenant = tables.Column('default_tenant',
# verbose_name=_('Default Project'))
enabled = tables.Column('enabled', verbose_name=_('Enabled'),
status=True,
status_choices=STATUS_CHOICES)
class Meta:
name = "users"
verbose_name = _("Users")
if api.keystone_can_edit_user():
row_actions = (EditUserLink, EnableUsersAction, DisableUsersAction,
DeleteUsersAction)
table_actions = (UserFilterAction, CreateUserLink,
DeleteUsersAction)
else:
row_actions = (EnableUsersAction, DisableUsersAction)
table_actions = (UserFilterAction,)
| [
"sunxin3@lenovo.com"
] | sunxin3@lenovo.com |
920de23a7e0cea19331ff01b03c78d59e3cc6c47 | 338f35a5b1e556cba067bc6ffa3778332267fcdd | /niji/tests.py | 0ca66ce396962321b2f159f3be7467a063224192 | [
"LicenseRef-scancode-sata"
] | permissive | fdl66/OnlineJudge | 50f3273b9d967ca870175a2edf9cb99cedaba6e4 | 4f0ae896694c93788bbb42eddb509fd6fc7aa41a | refs/heads/master | 2020-12-30T13:46:11.188596 | 2017-12-12T10:25:09 | 2017-12-12T10:25:09 | 91,249,578 | 0 | 1 | null | 2017-05-14T14:39:26 | 2017-05-14T14:39:26 | null | UTF-8 | Python | false | false | 34,441 | py | # -*- coding: utf-8 -*-
from django.test import TestCase, LiveServerTestCase
from django.utils.translation import ugettext as _
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from django.core.urlresolvers import reverse
from rest_framework.reverse import reverse as api_reverse
from django.contrib.auth.models import User
from .models import Topic, Node, Post, Notification, Appendix
from django.test.utils import override_settings
import random
import requests
import json
import time
import os
if os.environ.get('TEST_USE_FIREFOX'):
from selenium.webdriver.firefox.webdriver import WebDriver
elif os.environ.get('TEST_USE_CHROME'):
from selenium.webdriver.chrome.webdriver import WebDriver
else:
from selenium.webdriver.phantomjs.webdriver import WebDriver
def login(browser, username_text, password_text):
login_btn = browser.find_element_by_xpath(
"//*[@id=\"main\"]/div/div[2]/div[1]/div[2]/div/div[1]/a"
)
login_btn.click()
username = browser.find_element_by_name('username')
password = browser.find_element_by_name('password')
username.send_keys(username_text)
password.send_keys(password_text)
password.send_keys(Keys.RETURN)
class APITest(LiveServerTestCase):
def setUp(self):
self.browser = WebDriver()
self.browser.implicitly_wait(3)
self.n1 = Node.objects.create(
title='TestNodeOne',
description='The first test node'
)
self.u1 = User.objects.create_user(
username='test1', email='1@q.com', password='111'
)
self.u1 = User.objects.create_user(
username='test2', email='2@q.com', password='222'
)
self.super_user = User.objects.create_user(
username='super', email='super@example.com', password='123'
)
self.super_user.is_superuser = True
self.super_user.is_staff = True
self.super_user.save()
# Create 99 topics
for i in range(1, 100):
setattr(
self,
't%s' % i,
Topic.objects.create(
title='Test Topic %s' % i,
user=self.u1,
content_raw='This is test topic __%s__' % i,
node=self.n1
)
)
# Create 99 replies to self.t1
for i in range(1, 100):
Post.objects.create(
topic=self.t1,
user=self.u1,
content_raw='This is reply to topic 1 (__%s__)' % i
)
def tearDown(self):
self.browser.quit()
def test_unauthorized_access(self):
d = requests.get(self.live_server_url+api_reverse('niji:topic-list'))
self.assertEqual(d.status_code, 403)
d = requests.get(self.live_server_url+api_reverse('niji:topic-detail', kwargs={"pk": self.t1.pk}))
self.assertEqual(d.status_code, 403)
self.browser.get(self.live_server_url+reverse("niji:index"))
login(self.browser, 'test1', '111')
cookies = self.browser.get_cookies()
s = requests.Session()
s.headers = {'Content-Type': 'application/json'}
for cookie in cookies:
if cookie['name'] == 'csrftoken':
continue
s.cookies.set(cookie['name'], cookie['value'])
d = s.get(self.live_server_url + api_reverse('niji:topic-list'))
self.assertEqual(d.status_code, 403)
d = s.get(self.live_server_url + api_reverse('niji:topic-detail', kwargs={"pk": self.t1.pk}))
self.assertEqual(d.status_code, 403)
def test_move_topic_up(self):
lucky_topic1 = getattr(self, 't%s' % random.randint(1, 50))
d = requests.patch(
self.live_server_url + api_reverse('niji:topic-detail', kwargs={"pk": lucky_topic1.pk}),
json.dumps({"order": 1})
)
self.assertEqual(d.status_code, 403)
self.browser.get(self.live_server_url + reverse("niji:index"))
login(self.browser, 'super', '123')
cookies = self.browser.get_cookies()
s = requests.Session()
s.headers = {'Content-Type': 'application/json'}
for cookie in cookies:
if cookie['name'] == 'csrftoken':
continue
s.cookies.set(cookie['name'], cookie['value'])
d = s.patch(
self.live_server_url+api_reverse('niji:topic-detail', kwargs={"pk": lucky_topic1.pk}),
json.dumps({"order": 1})
).json()
self.assertEqual(d["order"], 1)
def test_close_open_topic(self):
lucky_topic1 = getattr(self, 't%s' % random.randint(1, 50))
d = requests.patch(
self.live_server_url + api_reverse('niji:topic-detail', kwargs={"pk": lucky_topic1.pk}),
json.dumps({"closed": True})
)
self.assertEqual(d.status_code, 403)
self.browser.get(self.live_server_url + reverse("niji:index"))
login(self.browser, 'super', '123')
cookies = self.browser.get_cookies()
s = requests.Session()
s.headers = {'Content-Type': 'application/json'}
for cookie in cookies:
if cookie['name'] == 'csrftoken':
continue
s.cookies.set(cookie['name'], cookie['value'])
d = s.patch(
self.live_server_url + api_reverse('niji:topic-detail', kwargs={"pk": lucky_topic1.pk}),
json.dumps({"closed": True})
).json()
self.assertEqual(d["closed"], True)
d = s.patch(
self.live_server_url + api_reverse('niji:topic-detail', kwargs={"pk": lucky_topic1.pk}),
json.dumps({"closed": False})
).json()
self.assertEqual(d["closed"], False)
def test_hide_topic(self):
lucky_topic1 = getattr(self, 't%s' % random.randint(1, 50))
d = requests.patch(
self.live_server_url + api_reverse('niji:topic-detail', kwargs={"pk": lucky_topic1.pk}),
json.dumps({"closed": True})
)
self.assertEqual(d.status_code, 403)
self.browser.get(self.live_server_url + reverse("niji:index"))
login(self.browser, 'super', '123')
cookies = self.browser.get_cookies()
s = requests.Session()
s.headers = {'Content-Type': 'application/json'}
for cookie in cookies:
if cookie['name'] == 'csrftoken':
continue
s.cookies.set(cookie['name'], cookie['value'])
d = s.patch(
self.live_server_url + api_reverse('niji:topic-detail', kwargs={"pk": lucky_topic1.pk}),
json.dumps({"hidden": True})
).json()
self.assertEqual(d["hidden"], True)
def test_hide_post(self):
lucky_post = random.choice(Post.objects.visible().all())
d = requests.patch(
self.live_server_url + api_reverse('niji:post-detail', kwargs={"pk": lucky_post.pk}),
json.dumps({"hidden": True})
)
self.assertEqual(d.status_code, 403)
self.browser.get(self.live_server_url + reverse("niji:index"))
login(self.browser, 'super', '123')
self.assertIn("Log out", self.browser.page_source)
cookies = self.browser.get_cookies()
s = requests.Session()
s.headers = {'Content-Type': 'application/json'}
for cookie in cookies:
if cookie['name'] == 'csrftoken':
continue
s.cookies.set(cookie['name'], cookie['value'])
d = s.patch(
self.live_server_url + api_reverse('niji:post-detail', kwargs={"pk": lucky_post.pk}),
json.dumps({"hidden": True})
).json()
self.assertEqual(d["hidden"], True)
class StickToTopTest(LiveServerTestCase):
def setUp(self):
self.browser = WebDriver()
self.browser.implicitly_wait(3)
self.n1 = Node.objects.create(
title='TestNodeOne',
description='The first test node'
)
self.u1 = User.objects.create_user(
username='test1', email='1@q.com', password='111'
)
self.super_user = User.objects.create_user(
username='super', email='super@example.com', password='123'
)
self.super_user.is_superuser = True
self.super_user.is_staff = True
self.super_user.save()
# Create 99 topics
for i in range(1, 100):
setattr(
self,
't%s' % i,
Topic.objects.create(
title='Test Topic %s' % i,
user=self.u1,
content_raw='This is test topic __%s__' % i,
node=self.n1
)
)
def tearDown(self):
self.browser.quit()
def test_stick_to_top_admin(self):
self.browser.get(self.live_server_url + reverse("niji:index"))
login(self.browser, 'super', '123')
self.assertIn("Log out", self.browser.page_source)
lucky_topic1 = getattr(self, 't%s' % random.randint(1, 50))
self.browser.get(self.live_server_url+reverse('niji:topic', kwargs={"pk": lucky_topic1.pk}))
self.browser.find_element_by_class_name('move-topic-up').click()
up_level = WebDriverWait(
self.browser, 10
).until(
expected_conditions.presence_of_element_located(
(By.NAME, 'move-topic-up-level')
)
)
up_level = Select(up_level)
up_level.select_by_visible_text('1')
time.sleep(1)
self.browser.execute_script("$('.modal-confirm').click()")
self.browser.get(self.live_server_url+reverse('niji:index'))
first_topic_title = self.browser.find_elements_by_class_name('entry-link')[0].text
self.assertEqual(first_topic_title, lucky_topic1.title)
class TopicOrderingTest(LiveServerTestCase):
def setUp(self):
self.browser = WebDriver()
self.browser.implicitly_wait(3)
self.n1 = Node.objects.create(
title='TestNodeOne',
description='The first test node'
)
self.u1 = User.objects.create_user(
username='test1', email='1@q.com', password='111'
)
# Create 99 topics
for i in range(1, 100):
setattr(
self,
't%s' % i,
Topic.objects.create(
title='Test Topic %s' % i,
user=self.u1,
content_raw='This is test topic __%s__' % i,
node=self.n1
)
)
def tearDown(self):
self.browser.quit()
def test_default_ordering_without_settings(self):
self.browser.get(self.live_server_url+reverse("niji:index"))
first_topic_title = self.browser.find_element_by_class_name(
"entry-link"
).text
self.assertEqual(first_topic_title, self.t99.title)
Post.objects.create(
topic=self.t1,
content_raw='reply to post __1__',
user=self.u1,
)
self.browser.get(self.browser.current_url)
first_topic_title = self.browser.find_element_by_class_name(
"entry-link"
).text
self.assertEqual(first_topic_title, self.t1.title)
@override_settings(NIJI_DEFAULT_TOPIC_ORDERING="-pub_date")
def test_default_ordering_with_settings(self):
self.browser.get(self.live_server_url+reverse("niji:index"))
first_topic_title = self.browser.find_element_by_class_name(
"entry-link"
).text
self.assertEqual(first_topic_title, self.t99.title)
Post.objects.create(
topic=self.t1,
content_raw='reply to post __1__',
user=self.u1,
)
self.browser.get(self.browser.current_url)
first_topic_title = self.browser.find_element_by_class_name(
"entry-link"
).text
self.assertEqual(first_topic_title, self.t99.title)
def test_user_specified_ordering_last_replied(self):
self.browser.get(self.live_server_url+reverse("niji:index"))
self.browser.find_element_by_link_text(
"Last Replied"
).click()
first_topic_title = self.browser.find_element_by_class_name(
"entry-link"
).text
self.assertEqual(first_topic_title, self.t99.title)
def test_user_specified_ordering_pub_date(self):
Post.objects.create(
topic=self.t1,
content_raw='reply to post __1__',
user=self.u1,
)
self.browser.get(self.live_server_url+reverse("niji:index"))
self.browser.find_element_by_link_text(
"Topic Date"
).click()
first_topic_title = self.browser.find_element_by_class_name(
"entry-link"
).text
self.assertEqual(first_topic_title, self.t99.title)
def test_user_specified_ordering_last_replied_pagination(self):
self.browser.get(self.live_server_url+reverse("niji:index"))
self.browser.find_element_by_link_text(
"Last Replied"
).click()
res = self.client.get(self.browser.current_url)
request = res.wsgi_request
self.assertEqual(request.GET.get("order"), "-last_replied")
self.browser.find_element_by_link_text("»").click()
res = self.client.get(self.browser.current_url)
request = res.wsgi_request
self.assertEqual(request.GET.get("order"), "-last_replied")
def test_user_specified_ordering_node_view(self):
Post.objects.create(
topic=self.t1,
content_raw='reply to post __1__',
user=self.u1,
)
self.browser.get(
self.live_server_url+reverse(
"niji:node",
kwargs={"pk": self.n1.pk}
)
)
self.browser.find_element_by_link_text(
"Topic Date"
).click()
first_topic_title = self.browser.find_element_by_class_name(
"entry-link"
).text
self.assertEqual(first_topic_title, self.t99.title)
def test_user_specified_ordering_search_view(self):
Post.objects.create(
topic=self.t1,
content_raw='reply to post __1__',
user=self.u1,
)
self.browser.get(
self.live_server_url+reverse(
"niji:search",
kwargs={"keyword": "test"}
)
)
self.browser.find_element_by_link_text(
"Topic Date"
).click()
first_topic_title = self.browser.find_element_by_class_name(
"entry-link"
).text
self.assertEqual(first_topic_title, self.t99.title)
class LoginRegUrlSettingsTest(LiveServerTestCase):
def setUp(self):
self.browser = WebDriver()
self.browser.implicitly_wait(3)
self.n1 = Node.objects.create(
title='TestNodeOne',
description='The first test node'
)
self.u1 = User.objects.create_user(
username='test1', email='1@q.com', password='111'
)
self.t1 = Topic.objects.create(
title='Test Topic 1',
user=self.u1,
content_raw='This is test topic __1__',
node=self.n1,
)
def tearDown(self):
self.browser.quit()
@override_settings(NIJI_LOGIN_URL_NAME="niji:reg")
def test_login_url_name(self):
self.browser.get(self.live_server_url+reverse("niji:index"))
login_btn = self.browser.find_element_by_link_text("Log in")
self.assertEqual(login_btn.get_attribute("href"), self.live_server_url+reverse("niji:reg"))
self.browser.get(self.live_server_url+reverse("niji:topic", kwargs={"pk": self.t1.pk}))
login_link = self.browser.find_element_by_link_text("Login")
self.assertEqual(login_link.get_attribute("href"), self.live_server_url+reverse("niji:reg"))
@override_settings(NIJI_REG_URL_NAME="niji:login")
def test_reg_url_name(self):
self.browser.get(self.live_server_url+reverse("niji:index"))
reg_btn = self.browser.find_element_by_link_text("Reg")
self.assertEqual(reg_btn.get_attribute("href"), self.live_server_url+reverse("niji:login"))
self.browser.get(self.live_server_url+reverse("niji:topic", kwargs={"pk": self.t1.pk}))
reg_link = self.browser.find_element_by_link_text("Create a New User")
self.assertEqual(reg_link.get_attribute("href"), self.live_server_url+reverse("niji:login"))
class TopicModelTest(TestCase):
def setUp(self):
self.n1 = Node.objects.create(
title='TestNodeOne',
description='The first test node'
)
self.u1 = User.objects.create_user(
username='test1', email='1@q.com', password='111'
)
self.u2 = User.objects.create_user(
username='test2', email='2@q.com', password='222'
)
self.t1 = Topic.objects.create(
title='Test Topic 1',
user=self.u1,
content_raw='This is test topic __1__',
node=self.n1,
)
self.t2 = Topic.objects.create(
title='Test Topic 2',
user=self.u1,
content_raw='This is test topic __2__',
node=self.n1,
)
def test_hidden_topic(self):
self.assertEqual(Topic.objects.visible().count(), 2)
self.t1.hidden = True
self.t1.save()
self.assertEqual(Topic.objects.visible().count(), 1)
def test_topic_order(self):
self.assertEqual(Topic.objects.visible().first(), self.t2)
self.t1.order = 9
self.t1.save()
self.assertEqual(Topic.objects.visible().first(), self.t1)
def test_topic_content_hash(self):
original_hash = self.t1.raw_content_hash
self.t1.content_raw = 'fdsfds'
self.t1.save()
self.assertNotEqual(original_hash, self.t1.raw_content_hash)
self.t1.content_raw = 'This is test topic __1__'
self.t1.save()
self.assertEqual(original_hash, self.t1.raw_content_hash)
def test_content_render(self):
self.assertIn('<strong>1</strong>', self.t1.content_rendered)
self.t1.content_raw = 'This is the __first__ topic'
self.t1.save()
self.assertIn('<strong>first</strong>', self.t1.content_rendered)
def test_last_replied(self):
p = Post()
p.topic = self.t1
p.content_raw = 'reply to post __1__'
p.user = self.u1
p.save()
self.assertEqual(self.t1.last_replied, p.pub_date)
p2 = Post()
p2.topic = self.t1
p2.content_raw = '2nd reply to post __1__'
p2.user = self.u1
p2.save()
self.assertEqual(self.t1.last_replied, p2.pub_date)
p2.delete()
self.assertEqual(self.t1.last_replied, p.pub_date)
p.delete()
self.assertEqual(self.t1.last_replied, self.t1.pub_date)
def test_reply_count(self):
p = Post()
p.topic = self.t1
p.content_raw = '2nd reply to post __1__'
p.user = self.u1
p.save()
self.assertEqual(self.t1.reply_count, 1)
p.pk += 1
p.save()
self.assertEqual(self.t1.reply_count, 2)
p.hidden = True
p.save()
self.assertEqual(self.t1.reply_count, 1)
p.hidden = False
p.save()
self.assertEqual(self.t1.reply_count, 2)
p.delete()
self.assertEqual(self.t1.reply_count, 1)
@override_settings(CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
CELERY_ALWAYS_EAGER=True,
BROKER_BACKEND='memory')
def test_other_user_mention(self):
t = Topic.objects.create(
title='topic mention test',
user=self.u1,
content_raw='test mention @test2',
node=self.n1,
)
self.assertEqual(self.u2.received_notifications.count(), 1)
notification = Notification.objects.get(pk=1)
self.assertIn(
'<a href="%s">test2</a>' % (reverse("niji:user_info", kwargs={"pk": self.u2.pk})),
t.content_rendered
)
self.assertEqual(notification.topic_id, t.pk)
self.assertEqual(notification.sender_id, self.u1.pk)
self.assertEqual(notification.to_id, self.u2.pk)
@override_settings(CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
CELERY_ALWAYS_EAGER=True,
BROKER_BACKEND='memory')
def test_self_mention(self):
Topic.objects.create(
title='topic mention test',
user=self.u1,
content_raw='test mention myself @test1',
node=self.n1,
)
self.assertEqual(self.u1.received_notifications.count(), 0)
class PostModelTest(TestCase):
def setUp(self):
self.n1 = Node.objects.create(
title='TestNodeOne',
description='The first test node'
)
self.u1 = User.objects.create_user(
username='test1', email='1@q.com', password='111'
)
self.u2 = User.objects.create_user(
username='test2', email='2@q.com', password='222'
)
self.t1 = Topic.objects.create(
title='Test Topic 1',
user=self.u1,
content_raw='This is test topic __1__',
node=self.n1,
)
self.p1 = Post.objects.create(
topic=self.t1,
content_raw='reply to post __1__',
user=self.u1,
)
self.p2 = Post.objects.create(
topic=self.t1,
content_raw='reply to post __2__',
user=self.u1,
)
def test_content_render(self):
self.assertIn('<strong>1</strong>', self.p1.content_rendered)
self.p1.content_raw = 'This is the __first__ reply'
self.p1.save()
self.assertIn('<strong>first</strong>', self.p1.content_rendered)
def test_hidden(self):
self.assertEqual(self.t1.replies.visible().count(), 2)
self.p1.hidden = True
self.p1.save()
self.assertEqual(self.t1.replies.visible().count(), 1)
@override_settings(CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
CELERY_ALWAYS_EAGER=True,
BROKER_BACKEND='memory')
def test_other_user_mention(self):
p = Post.objects.create(
user=self.u1,
content_raw='test mention @test2',
topic=self.t1,
)
self.assertEqual(self.u2.received_notifications.count(), 1)
notification = Notification.objects.get(pk=1)
self.assertEqual(notification.post_id, p.pk)
self.assertEqual(notification.sender_id, self.u1.pk)
self.assertEqual(notification.to_id, self.u2.pk)
@override_settings(CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
CELERY_ALWAYS_EAGER=True,
BROKER_BACKEND='memory')
def test_self_mention(self):
Post.objects.create(
user=self.u1,
content_raw='test to mention myself @test1',
topic=self.t1,
)
self.assertEqual(self.u1.received_notifications.count(), 0)
class AppendixModelTest(TestCase):
def setUp(self):
self.n1 = Node.objects.create(
title='TestNodeOne',
description='The first test node'
)
self.u1 = User.objects.create_user(
username='test1', email='1@q.com', password='111'
)
self.t1 = Topic.objects.create(
title='Test Topic 1',
user=self.u1,
content_raw='This is test topic __1__',
node=self.n1,
)
self.a1 = Appendix.objects.create(
topic=self.t1,
content_raw='appendix to topic __1__',
)
def test_content_render(self):
self.assertIn('<strong>1</strong>', self.a1.content_rendered)
self.a1.content_raw = 'appendix to the __first__ topic'
self.a1.save()
self.assertIn('<strong>first</strong>', self.a1.content_rendered)
class VisitorTest(LiveServerTestCase):
"""
Test as a visitor (unregistered user)
"""
def setUp(self):
self.browser = WebDriver()
self.browser.implicitly_wait(3)
self.n1 = Node.objects.create(
title='TestNodeOne',
description='The first test node'
)
self.u1 = User.objects.create_user(
username='test1', email='1@q.com', password='111'
)
self.u2 = User.objects.create_user(
username='test2', email='2@q.com', password='222'
)
# Create 99 topics
for i in range(1, 100):
setattr(
self,
't%s' % i,
Topic.objects.create(
title='Test Topic %s' % i,
user=self.u1,
content_raw='This is test topic __%s__' % i,
node=self.n1
)
)
def tearDown(self):
self.browser.quit()
def test_index(self):
self.browser.get(self.live_server_url+reverse('niji:index'))
self.assertIn('niji', self.browser.page_source.lower())
def test_topic_page_content(self):
self.browser.get(self.live_server_url+reverse('niji:topic', kwargs={'pk': self.t88.pk}))
self.assertIn('This is test topic <strong>88</strong>', self.browser.page_source)
def test_hidden_post(self):
hidden_post = Post.objects.create(
topic=self.t1,
content_raw="i'm a reply 12138",
user=self.u1
)
self.browser.get(self.live_server_url+reverse('niji:topic', kwargs={'pk': self.t1.pk}))
self.assertIn("i'm a reply 12138", self.browser.page_source)
hidden_post.hidden = True
hidden_post.save()
self.browser.get(self.browser.current_url)
self.assertNotIn("i'm a reply 12138", self.browser.page_source)
def test_node_page(self):
self.browser.get(self.live_server_url+reverse('niji:node', kwargs={'pk': self.n1.pk}))
topics = self.browser.find_elements_by_css_selector('ul.topic-list > li')
self.assertEqual(len(topics), 30)
def test_user_login(self):
self.browser.get(self.live_server_url+reverse('niji:index'))
self.assertNotIn("Log out", self.browser.page_source)
login(self.browser, "test1", "111")
self.assertEqual(self.browser.current_url, self.live_server_url+reverse("niji:index"))
self.assertIn("Log out", self.browser.page_source)
def test_usr_reg(self):
self.browser.get(self.live_server_url+reverse('niji:index'))
self.browser.find_element_by_link_text("Reg").click()
self.assertEqual(self.browser.current_url, self.live_server_url+reverse("niji:reg"))
username = self.browser.find_element_by_name('username')
email = self.browser.find_element_by_name('email')
password1 = self.browser.find_element_by_name('password1')
password2 = self.browser.find_element_by_name('password2')
username.send_keys("test3")
password1.send_keys("333")
password2.send_keys("333")
email.send_keys("test3@example.com")
password1.send_keys(Keys.RETURN)
self.assertEqual(self.browser.current_url, self.live_server_url+reverse("niji:index"))
self.assertIn("Log out", self.browser.page_source)
self.assertIn("test3", self.browser.page_source)
def test_user_topic(self):
self.browser.get(self.live_server_url+reverse("niji:user_topics", kwargs={"pk": self.u1.id}))
self.assertIn("UID:", self.browser.page_source)
def test_user_info(self):
self.browser.get(self.live_server_url+reverse("niji:user_info", kwargs={"pk": self.u1.id}))
self.assertIn("Topics created by %s" % self.u1.username, self.browser.page_source)
def test_search(self):
self.browser.get(self.live_server_url+reverse("niji:search", kwargs={"keyword": "test"}))
self.assertIn("Search: test", self.browser.page_source)
def test_pagination(self):
self.browser.get(self.live_server_url+reverse("niji:index", kwargs={"page": 2}))
self.assertIn("«", self.browser.page_source)
prev = self.browser.find_element_by_link_text("«")
prev.click()
self.assertNotIn("«", self.browser.page_source)
self.assertIn("»", self.browser.page_source)
nxt = self.browser.find_element_by_link_text("»")
nxt.click()
self.assertEqual(self.browser.current_url, self.live_server_url+reverse("niji:index", kwargs={"page": 2}))
class RegisteredUserTest(LiveServerTestCase):
"""
Test as a registered user
"""
def setUp(self):
self.browser = WebDriver()
self.browser.implicitly_wait(3)
self.n1 = Node.objects.create(
title='TestNodeOne',
description='The first test node'
)
self.u1 = User.objects.create_user(
username='test1', email='1@q.com', password='111'
)
self.u2 = User.objects.create_user(
username='test2', email='2@q.com', password='222'
)
# Create 198 topics
for i in range(1, 100):
setattr(
self,
't%s' % i,
Topic.objects.create(
title='Test Topic %s' % i,
user=self.u1,
content_raw='This is test topic __%s__' % i,
node=self.n1
)
)
for i in range(100, 199):
setattr(
self,
't%s' % i,
Topic.objects.create(
title='Test Topic %s' % i,
user=self.u2,
content_raw='This is test topic __%s__' % i,
node=self.n1
)
)
def tearDown(self):
self.browser.quit()
def test_edit_own_topic(self):
self.browser.get(self.live_server_url+reverse('niji:index'))
login(self.browser, "test1", "111")
self.assertIn("Log out", self.browser.page_source)
own_topic = getattr(self, "t%s" % (random.choice(range(1, 100))))
self.browser.get(self.live_server_url+reverse("niji:topic", kwargs={"pk": own_topic.id}))
self.browser.find_element_by_link_text("Edit").click()
content_raw = self.browser.find_element_by_name("content_raw")
content_raw.clear()
content_raw.send_keys("This topic is edited")
self.browser.find_element_by_name("submit").click()
self.assertIn("This topic is edited", self.browser.page_source)
def test_edit_others_topic(self):
self.browser.get(self.live_server_url+reverse('niji:index'))
login(self.browser, "test1", "111")
self.assertIn("Log out", self.browser.page_source)
others_topic = getattr(self, "t%s" % (random.choice(range(100, 199))))
self.browser.get(self.live_server_url+reverse("niji:topic", kwargs={"pk": others_topic.id}))
self.assertNotIn(
"<span class=\"label label-success\">Edit</span>",
self.browser.page_source
)
self.browser.get(
self.live_server_url+reverse("niji:edit_topic", kwargs={"pk": others_topic.id})
)
self.assertIn("You are not allowed to edit other's topic", self.browser.page_source)
def test_reply_topic(self):
self.browser.get(self.live_server_url+reverse('niji:index'))
login(self.browser, "test1", "111")
self.assertIn("Log out", self.browser.page_source)
topic = getattr(self, "t%s" % (random.choice(range(1, 199))))
self.browser.get(self.live_server_url+reverse("niji:topic", kwargs={"pk": topic.id}))
content_raw = self.browser.find_element_by_name("content_raw")
content_raw.clear()
content_raw.send_keys("This is a reply")
self.browser.find_element_by_name("submit").click()
self.assertIn("This is a reply", self.browser.page_source)
def test_closed_topic(self):
self.browser.get(self.live_server_url + reverse('niji:index'))
login(self.browser, "test1", "111")
self.assertIn("Log out", self.browser.page_source)
topic = getattr(self, "t%s" % (random.choice(range(1, 199))))
topic.closed = True
topic.save()
self.browser.get(self.live_server_url + reverse("niji:topic", kwargs={"pk": topic.id}))
self.assertIn(_("This topic is closed"), self.browser.page_source)
def test_create_topic(self):
self.browser.get(self.live_server_url+reverse('niji:index'))
login(self.browser, "test1", "111")
self.assertIn("Log out", self.browser.page_source)
self.browser.get(self.live_server_url+reverse("niji:create_topic"))
node = self.browser.find_element_by_name("node")
node = Select(node)
title = self.browser.find_element_by_name("title")
content_raw = self.browser.find_element_by_name("content_raw")
node.select_by_visible_text(self.n1.title)
title.send_keys("test title")
content_raw.send_keys("this is content")
self.browser.find_element_by_name("submit").click()
self.assertIn("this is content", self.browser.page_source)
def test_create_appendix(self):
self.browser.get(self.live_server_url+reverse('niji:index'))
login(self.browser, "test1", "111")
self.assertIn("Log out", self.browser.page_source)
own_topic = getattr(self, "t%s" % (random.choice(range(1, 100))))
self.browser.get(self.live_server_url+reverse("niji:topic", kwargs={"pk": own_topic.id}))
self.browser.find_element_by_link_text("Append").click()
content_raw = self.browser.find_element_by_name("content_raw")
content_raw.clear()
content_raw.send_keys("This is an appendix")
self.browser.find_element_by_name("submit").click()
self.assertIn("This is an appendix", self.browser.page_source)
| [
"fan0816fan@163.com"
] | fan0816fan@163.com |
9e9471568bc5a8d75796a3682326046ac8831a50 | a80f56fef7b398fae646ec3220f8230d28513930 | /Recognition.py | aea6eb491cec7da925da6cf6ac9ef3d97e360c89 | [] | no_license | ev108/MFFBot | 244b646f08ec9058fae2f7c50d5e78f6aeb5dbff | bb47391c5f9d1b1e6b06e643aeb7022294cd9db8 | refs/heads/master | 2020-06-26T07:22:56.838482 | 2020-04-29T18:02:43 | 2020-04-29T18:02:43 | 199,570,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 925 | py | import pyautogui
import Constants
import time
def locate(s):
acc = 0
while True:
acc = acc + 1
if (acc > 3):
return 0
im1 = pyautogui.screenshot()
coord = pyautogui.locateOnScreen(Constants.ImagePathENG + s, confidence = 0.8)
if (coord is not None):
buttonpoint = pyautogui.center(coord)
x, y = buttonpoint
time.sleep(0.5)
print(x,y)
return 1
def getXYCoord(s):
acc = 0
while True:
acc = acc + 1
if (acc > 3):
return -999, -999
im1 = pyautogui.screenshot()
coord = pyautogui.locateOnScreen(Constants.ImagePathENG + s, confidence = 0.8)
if (coord is not None):
buttonpoint = pyautogui.center(coord)
x, y = buttonpoint
time.sleep(0.5)
return x, y
| [
"noreply@github.com"
] | ev108.noreply@github.com |
caae4574f3a9d4ee99d07f1fe8a8fa13f4a68803 | dea56c4d044a55ccbbc63224e99cdf5c0a37fd8a | /python/ccxt/probit.py | 3fa95319921ee39c8030da45e246abf27193cb2a | [
"MIT"
] | permissive | Biboxcom/ccxt | ece93a53e6dc3b402f068a5aa39bbf9a47b88e47 | a82a15718aa2fe430dbc09fe10cc99575e5d2b35 | refs/heads/master | 2023-04-12T12:56:25.782008 | 2020-12-24T00:08:07 | 2020-12-24T00:08:07 | 324,118,781 | 2 | 0 | MIT | 2023-03-21T09:05:59 | 2020-12-24T09:31:56 | null | UTF-8 | Python | false | false | 48,425 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import BadResponse
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.decimal_to_precision import TICK_SIZE
class probit(Exchange):
def describe(self):
return self.deep_extend(super(probit, self).describe(), {
'id': 'probit',
'name': 'ProBit',
'countries': ['SC', 'KR'], # Seychelles, South Korea
'rateLimit': 250, # ms
'has': {
'CORS': True,
'fetchTime': True,
'fetchMarkets': True,
'fetchCurrencies': True,
'fetchTickers': True,
'fetchTicker': True,
'fetchOHLCV': True,
'fetchOrderBook': True,
'fetchTrades': True,
'fetchBalance': True,
'createOrder': True,
'createMarketOrder': True,
'cancelOrder': True,
'fetchOrder': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchMyTrades': True,
'fetchDepositAddress': True,
'withdraw': True,
'signIn': True,
},
'timeframes': {
'1m': '1m',
'3m': '3m',
'5m': '5m',
'10m': '10m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'4h': '4h',
'6h': '6h',
'12h': '12h',
'1d': '1D',
'1w': '1W',
'1M': '1M',
},
'version': 'v1',
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/79268032-c4379480-7ea2-11ea-80b3-dd96bb29fd0d.jpg',
'api': {
'accounts': 'https://accounts.probit.com',
'public': 'https://api.probit.com/api/exchange',
'private': 'https://api.probit.com/api/exchange',
},
'www': 'https://www.probit.com',
'doc': [
'https://docs-en.probit.com',
'https://docs-ko.probit.com',
],
'fees': 'https://support.probit.com/hc/en-us/articles/360020968611-Trading-Fees',
'referral': 'https://www.probit.com/r/34608773',
},
'api': {
'public': {
'get': [
'market',
'currency',
'currency_with_platform',
'time',
'ticker',
'order_book',
'trade',
'candle',
],
},
'private': {
'post': [
'new_order',
'cancel_order',
'withdrawal',
],
'get': [
'balance',
'order',
'open_order',
'order_history',
'trade_history',
'deposit_address',
],
},
'accounts': {
'post': [
'token',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.2 / 100,
'taker': 0.2 / 100,
},
},
'exceptions': {
'exact': {
'UNAUTHORIZED': AuthenticationError,
'INVALID_ARGUMENT': BadRequest, # Parameters are not a valid format, parameters are empty, or out of range, or a parameter was sent when not required.
'TRADING_UNAVAILABLE': ExchangeNotAvailable,
'NOT_ENOUGH_BALANCE': InsufficientFunds,
'NOT_ALLOWED_COMBINATION': BadRequest,
'INVALID_ORDER': InvalidOrder, # Requested order does not exist, or it is not your order
'RATE_LIMIT_EXCEEDED': RateLimitExceeded, # You are sending requests too frequently. Please try it later.
'MARKET_UNAVAILABLE': ExchangeNotAvailable, # Market is closed today
'INVALID_MARKET': BadSymbol, # Requested market is not exist
'INVALID_CURRENCY': BadRequest, # Requested currency is not exist on ProBit system
'TOO_MANY_OPEN_ORDERS': DDoSProtection, # Too many open orders
'DUPLICATE_ADDRESS': InvalidAddress, # Address already exists in withdrawal address list
},
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'precisionMode': TICK_SIZE,
'options': {
'createMarketBuyOrderRequiresPrice': True,
'timeInForce': {
'limit': 'gtc',
'market': 'ioc',
},
},
'commonCurrencies': {
'BTCBEAR': 'BEAR',
'BTCBULL': 'BULL',
'CBC': 'CryptoBharatCoin',
'UNI': 'UNICORN Token',
},
})
def fetch_markets(self, params={}):
response = self.publicGetMarket(params)
#
# {
# "data":[
# {
# "id":"MONA-USDT",
# "base_currency_id":"MONA",
# "quote_currency_id":"USDT",
# "min_price":"0.001",
# "max_price":"9999999999999999",
# "price_increment":"0.001",
# "min_quantity":"0.0001",
# "max_quantity":"9999999999999999",
# "quantity_precision":4,
# "min_cost":"1",
# "max_cost":"9999999999999999",
# "cost_precision":8,
# "taker_fee_rate":"0.2",
# "maker_fee_rate":"0.2",
# "show_in_ui":true,
# "closed":false
# },
# ]
# }
#
markets = self.safe_value(response, 'data', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'base_currency_id')
quoteId = self.safe_string(market, 'quote_currency_id')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
closed = self.safe_value(market, 'closed', False)
active = not closed
amountPrecision = self.safe_integer(market, 'quantity_precision')
costPrecision = self.safe_integer(market, 'cost_precision')
precision = {
'amount': 1 / math.pow(10, amountPrecision),
'price': self.safe_float(market, 'price_increment'),
'cost': 1 / math.pow(10, costPrecision),
}
takerFeeRate = self.safe_float(market, 'taker_fee_rate')
makerFeeRate = self.safe_float(market, 'maker_fee_rate')
result.append({
'id': id,
'info': market,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'precision': precision,
'taker': takerFeeRate / 100,
'maker': makerFeeRate / 100,
'limits': {
'amount': {
'min': self.safe_float(market, 'min_quantity'),
'max': self.safe_float(market, 'max_quantity'),
},
'price': {
'min': self.safe_float(market, 'min_price'),
'max': self.safe_float(market, 'max_price'),
},
'cost': {
'min': self.safe_float(market, 'min_cost'),
'max': self.safe_float(market, 'max_cost'),
},
},
})
return result
def fetch_currencies(self, params={}):
response = self.publicGetCurrencyWithPlatform(params)
#
# {
# "data":[
# {
# "id":"USDT",
# "display_name":{"ko-kr":"테더","en-us":"Tether"},
# "show_in_ui":true,
# "platform":[
# {
# "id":"ETH",
# "priority":1,
# "deposit":true,
# "withdrawal":true,
# "currency_id":"USDT",
# "precision":6,
# "min_confirmation_count":15,
# "require_destination_tag":false,
# "display_name":{"name":{"ko-kr":"ERC-20","en-us":"ERC-20"}},
# "min_deposit_amount":"0",
# "min_withdrawal_amount":"1",
# "withdrawal_fee":[
# {"amount":"0.01","priority":2,"currency_id":"ETH"},
# {"amount":"1.5","priority":1,"currency_id":"USDT"},
# ],
# "deposit_fee":{},
# "suspended_reason":"",
# "deposit_suspended":false,
# "withdrawal_suspended":false
# },
# {
# "id":"OMNI",
# "priority":2,
# "deposit":true,
# "withdrawal":true,
# "currency_id":"USDT",
# "precision":6,
# "min_confirmation_count":3,
# "require_destination_tag":false,
# "display_name":{"name":{"ko-kr":"OMNI","en-us":"OMNI"}},
# "min_deposit_amount":"0",
# "min_withdrawal_amount":"5",
# "withdrawal_fee":[{"amount":"5","priority":1,"currency_id":"USDT"}],
# "deposit_fee":{},
# "suspended_reason":"wallet_maintenance",
# "deposit_suspended":false,
# "withdrawal_suspended":false
# }
# ],
# "stakeable":false,
# "unstakeable":false,
# "auto_stake":false,
# "auto_stake_amount":"0"
# }
# ]
# }
#
currencies = self.safe_value(response, 'data')
result = {}
for i in range(0, len(currencies)):
currency = currencies[i]
id = self.safe_string(currency, 'id')
code = self.safe_currency_code(id)
displayName = self.safe_value(currency, 'display_name')
name = self.safe_string(displayName, 'en-us')
platforms = self.safe_value(currency, 'platform', [])
platformsByPriority = self.sort_by(platforms, 'priority')
platform = self.safe_value(platformsByPriority, 0, {})
precision = self.safe_integer(platform, 'precision')
depositSuspended = self.safe_value(platform, 'deposit_suspended')
withdrawalSuspended = self.safe_value(platform, 'withdrawal_suspended')
active = not (depositSuspended and withdrawalSuspended)
withdrawalFees = self.safe_value(platform, 'withdrawal_fee', {})
withdrawalFeesByPriority = self.sort_by(withdrawalFees, 'priority')
withdrawalFee = self.safe_value(withdrawalFeesByPriority, 0, {})
fee = self.safe_float(withdrawalFee, 'amount')
result[code] = {
'id': id,
'code': code,
'info': currency,
'name': name,
'active': active,
'fee': fee,
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'price': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'cost': {
'min': None,
'max': None,
},
'deposit': {
'min': self.safe_float(platform, 'min_deposit_amount'),
'max': None,
},
'withdraw': {
'min': self.safe_float(platform, 'min_withdrawal_amount'),
'max': None,
},
},
}
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.privateGetBalance(params)
#
# {
# data: [
# {
# "currency_id":"XRP",
# "total":"100",
# "available":"0",
# }
# ]
# }
#
data = self.safe_value(response, 'data')
result = {'info': data}
for i in range(0, len(data)):
balance = data[i]
currencyId = self.safe_string(balance, 'currency_id')
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_float(balance, 'total')
account['free'] = self.safe_float(balance, 'available')
result[code] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
}
response = self.publicGetOrderBook(self.extend(request, params))
#
# {
# data: [
# {side: 'buy', price: '0.000031', quantity: '10'},
# {side: 'buy', price: '0.00356007', quantity: '4.92156877'},
# {side: 'sell', price: '0.1857', quantity: '0.17'},
# ]
# }
#
data = self.safe_value(response, 'data', [])
dataBySide = self.group_by(data, 'side')
return self.parse_order_book(dataBySide, None, 'buy', 'sell', 'price', 'quantity')
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
request = {}
if symbols is not None:
marketIds = self.market_ids(symbols)
request['market_ids'] = ','.join(marketIds)
response = self.publicGetTicker(self.extend(request, params))
#
# {
# "data":[
# {
# "last":"0.022902",
# "low":"0.021693",
# "high":"0.024093",
# "change":"-0.000047",
# "base_volume":"15681.986",
# "quote_volume":"360.514403624",
# "market_id":"ETH-BTC",
# "time":"2020-04-12T18:43:38.000Z"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_tickers(data, symbols)
def parse_tickers(self, rawTickers, symbols=None):
tickers = []
for i in range(0, len(rawTickers)):
tickers.append(self.parse_ticker(rawTickers[i]))
return self.filter_by_array(tickers, 'symbol', symbols)
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'market_ids': market['id'],
}
response = self.publicGetTicker(self.extend(request, params))
#
# {
# "data":[
# {
# "last":"0.022902",
# "low":"0.021693",
# "high":"0.024093",
# "change":"-0.000047",
# "base_volume":"15681.986",
# "quote_volume":"360.514403624",
# "market_id":"ETH-BTC",
# "time":"2020-04-12T18:43:38.000Z"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
ticker = self.safe_value(data, 0)
if ticker is None:
raise BadResponse(self.id + ' fetchTicker() returned an empty response')
return self.parse_ticker(ticker, market)
def parse_ticker(self, ticker, market=None):
#
# {
# "last":"0.022902",
# "low":"0.021693",
# "high":"0.024093",
# "change":"-0.000047",
# "base_volume":"15681.986",
# "quote_volume":"360.514403624",
# "market_id":"ETH-BTC",
# "time":"2020-04-12T18:43:38.000Z"
# }
#
timestamp = self.parse8601(self.safe_string(ticker, 'time'))
marketId = self.safe_string(ticker, 'market_id')
symbol = self.safe_symbol(marketId, market, '-')
close = self.safe_float(ticker, 'last')
change = self.safe_float(ticker, 'change')
percentage = None
open = None
if change is not None:
if close is not None:
open = close - change
percentage = (change / open) * 100
baseVolume = self.safe_float(ticker, 'base_volume')
quoteVolume = self.safe_float(ticker, 'quote_volume')
vwap = self.vwap(baseVolume, quoteVolume)
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': vwap,
'open': open,
'close': close,
'last': close,
'previousClose': None, # previous day close
'change': change,
'percentage': percentage,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
request = {
'limit': 100,
'start_time': self.iso8601(0),
'end_time': self.iso8601(self.milliseconds()),
}
if symbol is not None:
market = self.market(symbol)
request['market_id'] = market['id']
if since is not None:
request['start_time'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = self.privateGetTradeHistory(self.extend(request, params))
#
# {
# data: [
# {
# "id":"BTC-USDT:183566",
# "order_id":"17209376",
# "side":"sell",
# "fee_amount":"0.657396569175",
# "fee_currency_id":"USDT",
# "status":"settled",
# "price":"6573.96569175",
# "quantity":"0.1",
# "cost":"657.396569175",
# "time":"2018-08-10T06:06:46.000Z",
# "market_id":"BTC-USDT"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
'limit': 100,
'start_time': '1970-01-01T00:00:00.000Z',
'end_time': self.iso8601(self.milliseconds()),
}
if since is not None:
request['start_time'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = self.publicGetTrade(self.extend(request, params))
#
# {
# "data":[
# {
# "id":"ETH-BTC:3331886",
# "price":"0.022981",
# "quantity":"12.337",
# "time":"2020-04-12T20:55:42.371Z",
# "side":"sell",
# "tick_direction":"down"
# },
# {
# "id":"ETH-BTC:3331885",
# "price":"0.022982",
# "quantity":"6.472",
# "time":"2020-04-12T20:55:39.652Z",
# "side":"sell",
# "tick_direction":"down"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "id":"ETH-BTC:3331886",
# "price":"0.022981",
# "quantity":"12.337",
# "time":"2020-04-12T20:55:42.371Z",
# "side":"sell",
# "tick_direction":"down"
# }
#
# fetchMyTrades(private)
#
# {
# "id":"BTC-USDT:183566",
# "order_id":"17209376",
# "side":"sell",
# "fee_amount":"0.657396569175",
# "fee_currency_id":"USDT",
# "status":"settled",
# "price":"6573.96569175",
# "quantity":"0.1",
# "cost":"657.396569175",
# "time":"2018-08-10T06:06:46.000Z",
# "market_id":"BTC-USDT"
# }
#
timestamp = self.parse8601(self.safe_string(trade, 'time'))
id = self.safe_string(trade, 'id')
marketId = None
if id is not None:
parts = id.split(':')
marketId = self.safe_string(parts, 0)
marketId = self.safe_string(trade, 'market_id', marketId)
symbol = self.safe_symbol(marketId, market, '-')
side = self.safe_string(trade, 'side')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'quantity')
cost = None
if price is not None:
if amount is not None:
cost = price * amount
orderId = self.safe_string(trade, 'order_id')
feeCost = self.safe_float(trade, 'fee_amount')
fee = None
if feeCost is not None:
feeCurrencyId = self.safe_string(trade, 'fee_currency_id')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
def fetch_time(self, params={}):
response = self.publicGetTime(params)
#
# {"data":"2020-04-12T18:54:25.390Z"}
#
timestamp = self.parse8601(self.safe_string(response, 'data'))
return timestamp
def normalize_ohlcv_timestamp(self, timestamp, timeframe, after=False):
duration = self.parse_timeframe(timeframe)
if timeframe == '1M':
iso8601 = self.iso8601(timestamp)
parts = iso8601.split('-')
year = self.safe_string(parts, 0)
month = self.safe_integer(parts, 1)
if after:
month = self.sum(month, 1)
if month < 10:
month = '0' + str(month)
else:
month = str(month)
return year + '-' + month + '-01T00:00:00.000Z'
elif timeframe == '1w':
timestamp = int(timestamp / 1000)
firstSunday = 259200 # 1970-01-04T00:00:00.000Z
difference = timestamp - firstSunday
numWeeks = self.integer_divide(difference, duration)
previousSunday = self.sum(firstSunday, numWeeks * duration)
if after:
previousSunday = self.sum(previousSunday, duration)
return self.iso8601(previousSunday * 1000)
else:
timestamp = int(timestamp / 1000)
timestamp = duration * int(timestamp / duration)
if after:
timestamp = self.sum(timestamp, duration)
return self.iso8601(timestamp * 1000)
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
interval = self.timeframes[timeframe]
limit = 100 if (limit is None) else limit
requestLimit = self.sum(limit, 1)
requestLimit = min(1000, requestLimit) # max 1000
request = {
'market_ids': market['id'],
'interval': interval,
'sort': 'asc', # 'asc' will always include the start_time, 'desc' will always include end_time
'limit': requestLimit, # max 1000
}
now = self.milliseconds()
duration = self.parse_timeframe(timeframe)
startTime = since
endTime = now
if since is None:
if limit is None:
raise ArgumentsRequired(self.id + ' fetchOHLCV requires either a since argument or a limit argument')
else:
startTime = now - limit * duration * 1000
else:
if limit is None:
endTime = now
else:
endTime = self.sum(since, self.sum(limit, 1) * duration * 1000)
startTimeNormalized = self.normalize_ohlcv_timestamp(startTime, timeframe)
endTimeNormalized = self.normalize_ohlcv_timestamp(endTime, timeframe, True)
request['start_time'] = startTimeNormalized
request['end_time'] = endTimeNormalized
response = self.publicGetCandle(self.extend(request, params))
#
# {
# "data":[
# {
# "market_id":"ETH-BTC",
# "open":"0.02811",
# "close":"0.02811",
# "low":"0.02811",
# "high":"0.02811",
# "base_volume":"0.0005",
# "quote_volume":"0.000014055",
# "start_time":"2018-11-30T18:19:00.000Z",
# "end_time":"2018-11-30T18:20:00.000Z"
# },
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_ohlcvs(data, market, timeframe, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# "market_id":"ETH-BTC",
# "open":"0.02811",
# "close":"0.02811",
# "low":"0.02811",
# "high":"0.02811",
# "base_volume":"0.0005",
# "quote_volume":"0.000014055",
# "start_time":"2018-11-30T18:19:00.000Z",
# "end_time":"2018-11-30T18:20:00.000Z"
# }
#
return [
self.parse8601(self.safe_string(ohlcv, 'start_time')),
self.safe_float(ohlcv, 'open'),
self.safe_float(ohlcv, 'high'),
self.safe_float(ohlcv, 'low'),
self.safe_float(ohlcv, 'close'),
self.safe_float(ohlcv, 'base_volume'),
]
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
since = self.parse8601(since)
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['market_id'] = market['id']
response = self.privateGetOpenOrder(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_orders(data, market, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
request = {
'start_time': self.iso8601(0),
'end_time': self.iso8601(self.milliseconds()),
'limit': 100,
}
market = None
if symbol is not None:
market = self.market(symbol)
request['market_id'] = market['id']
if since:
request['start_time'] = self.iso8601(since)
if limit:
request['limit'] = limit
response = self.privateGetOrderHistory(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_orders(data, market, since, limit)
def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
}
clientOrderId = self.safe_string_2(params, 'clientOrderId', 'client_order_id')
if clientOrderId is not None:
request['client_order_id'] = clientOrderId
else:
request['order_id'] = id
query = self.omit(params, ['clientOrderId', 'client_order_id'])
response = self.privateGetOrder(self.extend(request, query))
data = self.safe_value(response, 'data', [])
order = self.safe_value(data, 0)
return self.parse_order(order, market)
def parse_order_status(self, status):
statuses = {
'open': 'open',
'cancelled': 'canceled',
'filled': 'closed',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# {
# id: string,
# user_id: string,
# market_id: string,
# type: 'orderType',
# side: 'side',
# quantity: string,
# limit_price: string,
# time_in_force: 'timeInForce',
# filled_cost: string,
# filled_quantity: string,
# open_quantity: string,
# cancelled_quantity: string,
# status: 'orderStatus',
# time: 'date',
# client_order_id: string,
# }
#
status = self.parse_order_status(self.safe_string(order, 'status'))
id = self.safe_string(order, 'id')
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
marketId = self.safe_string(order, 'market_id')
symbol = self.safe_symbol(marketId, market, '-')
timestamp = self.parse8601(self.safe_string(order, 'time'))
price = self.safe_float(order, 'limit_price')
filled = self.safe_float(order, 'filled_quantity')
remaining = self.safe_float(order, 'open_quantity')
canceledAmount = self.safe_float(order, 'cancelled_quantity')
if canceledAmount is not None:
remaining = self.sum(remaining, canceledAmount)
amount = self.safe_float(order, 'quantity', self.sum(filled, remaining))
cost = self.safe_float_2(order, 'filled_cost', 'cost')
if type == 'market':
price = None
average = None
if filled is not None:
if cost is None:
if price is not None:
cost = price * filled
if cost is not None:
if filled > 0:
average = cost / filled
clientOrderId = self.safe_string(order, 'client_order_id')
if clientOrderId == '':
clientOrderId = None
timeInForce = self.safe_string_upper(order, 'time_in_force')
return {
'id': id,
'info': order,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'side': side,
'status': status,
'price': price,
'stopPrice': None,
'amount': amount,
'filled': filled,
'remaining': remaining,
'average': average,
'cost': cost,
'fee': None,
'trades': None,
}
def cost_to_precision(self, symbol, cost):
return self.decimal_to_precision(cost, TRUNCATE, self.markets[symbol]['precision']['cost'], self.precisionMode)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
options = self.safe_value(self.options, 'timeInForce')
defaultTimeInForce = self.safe_value(options, type)
timeInForce = self.safe_string_2(params, 'timeInForce', 'time_in_force', defaultTimeInForce)
request = {
'market_id': market['id'],
'type': type,
'side': side,
'time_in_force': timeInForce,
}
clientOrderId = self.safe_string_2(params, 'clientOrderId', 'client_order_id')
if clientOrderId is not None:
request['client_order_id'] = clientOrderId
costToPrecision = None
if type == 'limit':
request['limit_price'] = self.price_to_precision(symbol, price)
request['quantity'] = self.amount_to_precision(symbol, amount)
elif type == 'market':
# for market buy it requires the amount of quote currency to spend
if side == 'buy':
cost = self.safe_float(params, 'cost')
createMarketBuyOrderRequiresPrice = self.safe_value(self.options, 'createMarketBuyOrderRequiresPrice', True)
if createMarketBuyOrderRequiresPrice:
if price is not None:
if cost is None:
cost = amount * price
elif cost is None:
raise InvalidOrder(self.id + " createOrder() requires the price argument for market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options['createMarketBuyOrderRequiresPrice'] = False and supply the total cost value in the 'amount' argument or in the 'cost' extra parameter(the exchange-specific behaviour)")
else:
cost = amount if (cost is None) else cost
costToPrecision = self.cost_to_precision(symbol, cost)
request['cost'] = costToPrecision
else:
request['quantity'] = self.amount_to_precision(symbol, amount)
query = self.omit(params, ['timeInForce', 'time_in_force', 'clientOrderId', 'client_order_id'])
response = self.privatePostNewOrder(self.extend(request, query))
#
# {
# data: {
# id: string,
# user_id: string,
# market_id: string,
# type: 'orderType',
# side: 'side',
# quantity: string,
# limit_price: string,
# time_in_force: 'timeInForce',
# filled_cost: string,
# filled_quantity: string,
# open_quantity: string,
# cancelled_quantity: string,
# status: 'orderStatus',
# time: 'date',
# client_order_id: string,
# }
# }
#
data = self.safe_value(response, 'data')
order = self.parse_order(data, market)
# a workaround for incorrect huge amounts
# returned by the exchange on market buys
if (type == 'market') and (side == 'buy'):
order['amount'] = None
order['cost'] = float(costToPrecision)
order['remaining'] = None
return order
def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
'order_id': id,
}
response = self.privatePostCancelOrder(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_order(data)
def parse_deposit_address(self, depositAddress, currency=None):
address = self.safe_string(depositAddress, 'address')
tag = self.safe_string(depositAddress, 'destination_tag')
currencyId = self.safe_string(depositAddress, 'currency_id')
code = self.safe_currency_code(currencyId)
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': depositAddress,
}
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
request = {
'currency_id': currency['id'],
}
response = self.privateGetDepositAddress(self.extend(request, params))
#
# {
# "data":[
# {
# "currency_id":"ETH",
# "address":"0x12e2caf3c4051ba1146e612f532901a423a9898a",
# "destination_tag":null
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
firstAddress = self.safe_value(data, 0)
if firstAddress is None:
raise InvalidAddress(self.id + ' fetchDepositAddress returned an empty response')
return self.parse_deposit_address(firstAddress, currency)
def fetch_deposit_addresses(self, codes=None, params={}):
self.load_markets()
request = {}
if codes:
currencyIds = []
for i in range(0, len(codes)):
currency = self.currency(codes[i])
currencyIds.append(currency['id'])
request['currency_id'] = ','.join(codes)
response = self.privateGetDepositAddress(self.extend(request, params))
data = self.safe_value(response, 'data', [])
return self.parse_deposit_addresses(data)
def parse_deposit_addresses(self, addresses):
result = {}
for i in range(0, len(addresses)):
address = self.parse_deposit_address(addresses[i])
code = address['currency']
result[code] = address
return result
def withdraw(self, code, amount, address, tag=None, params={}):
# In order to use self method
# you need to allow API withdrawal from the API Settings Page, and
# and register the list of withdrawal addresses and destination tags on the API Settings page
# you can only withdraw to the registered addresses using the API
self.check_address(address)
self.load_markets()
currency = self.currency(code)
if tag is None:
tag = ''
request = {
'currency_id': currency['id'],
# 'platform_id': 'ETH', # if omitted it will use the default platform for the currency
'address': address,
'destination_tag': tag,
'amount': self.currency_to_precision(code, amount),
# which currency to pay the withdrawal fees
# only applicable for currencies that accepts multiple withdrawal fee options
# 'fee_currency_id': 'ETH', # if omitted it will use the default fee policy for each currency
# whether the amount field includes fees
# 'include_fee': False, # makes sense only when fee_currency_id is equal to currency_id
}
response = self.privatePostWithdrawal(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_transaction(data, currency)
def parse_transaction(self, transaction, currency=None):
id = self.safe_string(transaction, 'id')
amount = self.safe_float(transaction, 'amount')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'destination_tag')
txid = self.safe_string(transaction, 'hash')
timestamp = self.parse8601(self.safe_string(transaction, 'time'))
type = self.safe_string(transaction, 'type')
currencyId = self.safe_string(transaction, 'currency_id')
code = self.safe_currency_code(currencyId)
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
feeCost = self.safe_float(transaction, 'fee')
fee = None
if feeCost is not None and feeCost != 0:
fee = {
'currency': code,
'cost': feeCost,
}
return {
'id': id,
'currency': code,
'amount': amount,
'addressFrom': None,
'address': address,
'addressTo': address,
'tagFrom': None,
'tag': tag,
'tagTo': tag,
'status': status,
'type': type,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': fee,
'info': transaction,
}
def parse_transaction_status(self, status):
statuses = {
'requested': 'pending',
'pending': 'pending',
'confirming': 'pending',
'confirmed': 'pending',
'applying': 'pending',
'done': 'ok',
'cancelled': 'canceled',
'cancelling': 'canceled',
}
return self.safe_string(statuses, status, status)
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/'
query = self.omit(params, self.extract_params(path))
if api == 'accounts':
self.check_required_credentials()
url += self.implode_params(path, params)
auth = self.apiKey + ':' + self.secret
auth64 = self.string_to_base64(auth)
headers = {
'Authorization': 'Basic ' + self.decode(auth64),
'Content-Type': 'application/json',
}
if query:
body = self.json(query)
else:
url += self.version + '/'
if api == 'public':
url += self.implode_params(path, params)
if query:
url += '?' + self.urlencode(query)
elif api == 'private':
now = self.milliseconds()
self.check_required_credentials()
expires = self.safe_integer(self.options, 'expires')
if (expires is None) or (expires < now):
raise AuthenticationError(self.id + ' access token expired, call signIn() method')
accessToken = self.safe_string(self.options, 'accessToken')
headers = {
'Authorization': 'Bearer ' + accessToken,
}
url += self.implode_params(path, params)
if method == 'GET':
if query:
url += '?' + self.urlencode(query)
elif query:
body = self.json(query)
headers['Content-Type'] = 'application/json'
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def sign_in(self, params={}):
self.check_required_credentials()
request = {
'grant_type': 'client_credentials', # the only supported value
}
response = self.accountsPostToken(self.extend(request, params))
#
# {
# access_token: '0ttDv/2hTTn3bLi8GP1gKaneiEQ6+0hOBenPrxNQt2s=',
# token_type: 'bearer',
# expires_in: 900
# }
#
expiresIn = self.safe_integer(response, 'expires_in')
accessToken = self.safe_string(response, 'access_token')
self.options['accessToken'] = accessToken
self.options['expires'] = self.sum(self.milliseconds(), expiresIn * 1000)
return response
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
if 'errorCode' in response:
errorCode = self.safe_string(response, 'errorCode')
message = self.safe_string(response, 'message')
if errorCode is not None:
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['exact'], errorCode, feedback)
raise ExchangeError(feedback)
| [
"travis@travis-ci.org"
] | travis@travis-ci.org |
25133cfe18a4adf5b30ef8c1543ad2e7d90b2d30 | 7295ca77099253173c206eee321396b6fe0ea5fa | /manage.py | a466e5d957c38efac4486a0f70ab2ac3e475a3c0 | [] | no_license | SharukhEqbal/FetchStockWithDjango | 2111c200c7114cef53a7568a81f7e01664d90ab9 | 0db203a91ed62a8ff233d49ed6280571e58372c9 | refs/heads/main | 2023-02-02T08:57:06.583332 | 2020-12-18T06:41:44 | 2020-12-18T06:41:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'stockProgram.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"shahrukheqbal@gmail.com"
] | shahrukheqbal@gmail.com |
531d4f9a63300f7e4eafeafe171fedd41b1fc595 | 90555b4724b85480fc584dc7cb5085bf24f86b9c | /src/dataset.py | 307b94fcd355f897f920a90060ce5cc717eefa23 | [] | no_license | skasai5296/pytorch_template | fc482ca4ede6190cb701fbc7327f0e213adfce39 | 0a77446ae1432971081e55b75498dbf4b98bba35 | refs/heads/master | 2021-01-03T03:39:56.155681 | 2020-02-15T03:24:38 | 2020-02-15T03:24:38 | 239,906,739 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,642 | py | import torch
from torch.utils.data import DataLoader, Dataset
class SampleDataset(Dataset):
def __init__(self, CONFIG, mode):
assert mode in ("train", "val", "test")
self.CONFIG = CONFIG
self.data = []
# sample data
for id in range(100):
self.data.append(
{"id": id, "hoge": torch.randn(100), "label": torch.randn(1)}
)
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
{
'id': int; the id of hoge
'hoge': torch.tensor; features
'label': torch.tensor; label of hoge
}
"""
id = self.data[index]["id"]
hoge = self.data[index]["hoge"]
label = self.data[index]["label"]
return {"id": id, "hoge": hoge, "label": label}
def __len__(self):
return len(self.data)
def get_collater(mode):
assert mode in ("train", "val", "test")
def collater(datalist):
ids = []
hoges = []
labels = []
for data in datalist:
ids.append(data["id"])
hoges.append(data["hoge"])
labels.append(data["label"])
hoges = torch.stack(hoges)
labels = torch.stack(labels)
return {"id": ids, "hoge": hoges, "label": labels}
return collater
# for debugging
if __name__ == "__main__":
mode = "train"
ds = SampleDataset(mode)
loader = DataLoader(ds, batch_size=4, collate_fn=get_collater(mode))
for data in loader:
print(data)
break
| [
"seito5296@gmail.com"
] | seito5296@gmail.com |
df83034f4140affc980783d35c7d568b676e9a6a | 6a3a3f5a8fd3eadc9485ff87f181966c0e76c9f6 | /lib/autoAccept.py | 48aac07473b6b2f5704f70fe70e88d26960593b6 | [] | no_license | toothlessG22/Summer2017 | 5be5493a73cd416cf1aba87bc91d0f05b8f1b215 | fb0b37e2c33921106dcd273369cc73a77006ff94 | refs/heads/master | 2021-01-21T20:29:57.901250 | 2017-06-14T01:14:38 | 2017-06-14T01:14:38 | 92,243,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 742 | py | import time
import pyautogui
if __name__ == "__main)":
import openCVLocate
imgpath = "../img"
else:
from lib import openCVLocate
imgpath = "img"
def autoAccept():
delay = 7
accepted = False
while True:
print("accepting")
pos = openCVLocate.locateCenter(imgpath + '/accept.PNG')
if( pos != None):
acceptCoords = pos
pyautogui.click(acceptCoords, duration=.1)
pyautogui.click(acceptCoords[0], acceptCoords[1]-50, duration=.5)
accepted = True
delay = 1
if(accepted and openCVLocate.locateCenter(imgpath + '/wardInChampSelect.PNG')):
break
time.sleep(delay)
if __name__ == "__main__":
autoAccept() | [
"toothlessG22@gmail.com"
] | toothlessG22@gmail.com |
e5f8425c94bde2476eccb9e15a75fbb2e5a1f41b | 3ac81aae7c268f0dc8b2709875b44e99f79065b9 | /homeassistant/components/rachio/device.py | 64066ca7bd7f6ff6288ef9df24b987e01fe66732 | [
"Apache-2.0"
] | permissive | ronal2do/core | fd304d4cacc45a6065a585d064c383c8c12a7452 | d2e22c653a50ef3ce62ea2d1997b9a2fa7f850ee | refs/heads/dev | 2022-12-28T18:36:54.024714 | 2020-10-17T17:10:31 | 2020-10-17T17:10:31 | 304,744,797 | 1 | 0 | Apache-2.0 | 2020-10-17T17:10:32 | 2020-10-16T21:44:06 | null | UTF-8 | Python | false | false | 6,821 | py | """Adapter to wrap the rachiopy api for home assistant."""
import logging
from typing import Optional
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, HTTP_OK
from .const import (
KEY_DEVICES,
KEY_ENABLED,
KEY_EXTERNAL_ID,
KEY_FLEX_SCHEDULES,
KEY_ID,
KEY_MAC_ADDRESS,
KEY_MODEL,
KEY_NAME,
KEY_SCHEDULES,
KEY_SERIAL_NUMBER,
KEY_STATUS,
KEY_USERNAME,
KEY_ZONES,
)
from .webhooks import LISTEN_EVENT_TYPES, WEBHOOK_CONST_ID
_LOGGER = logging.getLogger(__name__)
class RachioPerson:
"""Represent a Rachio user."""
def __init__(self, rachio, config_entry):
"""Create an object from the provided API instance."""
# Use API token to get user ID
self.rachio = rachio
self.config_entry = config_entry
self.username = None
self._id = None
self._controllers = []
def setup(self, hass):
"""Rachio device setup."""
response = self.rachio.person.info()
assert int(response[0][KEY_STATUS]) == HTTP_OK, "API key error"
self._id = response[1][KEY_ID]
# Use user ID to get user data
data = self.rachio.person.get(self._id)
assert int(data[0][KEY_STATUS]) == HTTP_OK, "User ID error"
self.username = data[1][KEY_USERNAME]
devices = data[1][KEY_DEVICES]
for controller in devices:
webhooks = self.rachio.notification.get_device_webhook(controller[KEY_ID])[
1
]
# The API does not provide a way to tell if a controller is shared
# or if they are the owner. To work around this problem we fetch the webooks
# before we setup the device so we can skip it instead of failing.
# webhooks are normally a list, however if there is an error
# rachio hands us back a dict
if isinstance(webhooks, dict):
_LOGGER.error(
"Failed to add rachio controller '%s' because of an error: %s",
controller[KEY_NAME],
webhooks.get("error", "Unknown Error"),
)
continue
rachio_iro = RachioIro(hass, self.rachio, controller, webhooks)
rachio_iro.setup()
self._controllers.append(rachio_iro)
_LOGGER.info('Using Rachio API as user "%s"', self.username)
@property
def user_id(self) -> str:
"""Get the user ID as defined by the Rachio API."""
return self._id
@property
def controllers(self) -> list:
"""Get a list of controllers managed by this account."""
return self._controllers
class RachioIro:
"""Represent a Rachio Iro."""
def __init__(self, hass, rachio, data, webhooks):
"""Initialize a Rachio device."""
self.hass = hass
self.rachio = rachio
self._id = data[KEY_ID]
self.name = data[KEY_NAME]
self.serial_number = data[KEY_SERIAL_NUMBER]
self.mac_address = data[KEY_MAC_ADDRESS]
self.model = data[KEY_MODEL]
self._zones = data[KEY_ZONES]
self._schedules = data[KEY_SCHEDULES]
self._flex_schedules = data[KEY_FLEX_SCHEDULES]
self._init_data = data
self._webhooks = webhooks
_LOGGER.debug('%s has ID "%s"', str(self), self.controller_id)
def setup(self):
"""Rachio Iro setup for webhooks."""
# Listen for all updates
self._init_webhooks()
def _init_webhooks(self) -> None:
"""Start getting updates from the Rachio API."""
current_webhook_id = None
# First delete any old webhooks that may have stuck around
def _deinit_webhooks(_) -> None:
"""Stop getting updates from the Rachio API."""
if not self._webhooks:
# We fetched webhooks when we created the device, however if we call _init_webhooks
# again we need to fetch again
self._webhooks = self.rachio.notification.get_device_webhook(
self.controller_id
)[1]
for webhook in self._webhooks:
if (
webhook[KEY_EXTERNAL_ID].startswith(WEBHOOK_CONST_ID)
or webhook[KEY_ID] == current_webhook_id
):
self.rachio.notification.delete(webhook[KEY_ID])
self._webhooks = None
_deinit_webhooks(None)
# Choose which events to listen for and get their IDs
event_types = []
for event_type in self.rachio.notification.get_webhook_event_type()[1]:
if event_type[KEY_NAME] in LISTEN_EVENT_TYPES:
event_types.append({"id": event_type[KEY_ID]})
# Register to listen to these events from the device
url = self.rachio.webhook_url
auth = WEBHOOK_CONST_ID + self.rachio.webhook_auth
new_webhook = self.rachio.notification.add(
self.controller_id, auth, url, event_types
)
# Save ID for deletion at shutdown
current_webhook_id = new_webhook[1][KEY_ID]
self.hass.bus.listen(EVENT_HOMEASSISTANT_STOP, _deinit_webhooks)
def __str__(self) -> str:
"""Display the controller as a string."""
return f'Rachio controller "{self.name}"'
@property
def controller_id(self) -> str:
"""Return the Rachio API controller ID."""
return self._id
@property
def current_schedule(self) -> str:
"""Return the schedule that the device is running right now."""
return self.rachio.device.current_schedule(self.controller_id)[1]
@property
def init_data(self) -> dict:
"""Return the information used to set up the controller."""
return self._init_data
def list_zones(self, include_disabled=False) -> list:
"""Return a list of the zone dicts connected to the device."""
# All zones
if include_disabled:
return self._zones
# Only enabled zones
return [z for z in self._zones if z[KEY_ENABLED]]
def get_zone(self, zone_id) -> Optional[dict]:
"""Return the zone with the given ID."""
for zone in self.list_zones(include_disabled=True):
if zone[KEY_ID] == zone_id:
return zone
return None
def list_schedules(self) -> list:
"""Return a list of fixed schedules."""
return self._schedules
def list_flex_schedules(self) -> list:
"""Return a list of flex schedules."""
return self._flex_schedules
def stop_watering(self) -> None:
"""Stop watering all zones connected to this controller."""
self.rachio.device.stop_water(self.controller_id)
_LOGGER.info("Stopped watering of all zones on %s", str(self))
| [
"noreply@github.com"
] | ronal2do.noreply@github.com |
a44a8301d9cf018c0b5ff5bc64748a1262eda343 | b9eb496c4551fd091954675a61382636fc68e715 | /src/ABC1xx/ABC14x/ABC140/ABC140B.py | 8357fa46e4c56c5d78f10b2adcc2a1f6074cfb70 | [] | no_license | kttaroha/AtCoder | af4c5783d89a61bc6a40f59be5e0992980cc8467 | dc65ce640954da8c2ad0d1b97580da50fba98a55 | refs/heads/master | 2021-04-17T16:52:09.508706 | 2020-11-22T05:45:08 | 2020-11-22T05:45:08 | 249,460,649 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | def main():
_ = int(input())
A = list(map(int, input().split()))
B = list(map(int, input().split()))
C = list(map(int, input().split()))
prev = -100
s = 0
for a in A:
s += B[a-1]
if a - prev == 1:
s += C[prev-1]
prev = a
print(s)
if __name__ == '__main__':
main()
| [
"kthamano1994@gmail.com"
] | kthamano1994@gmail.com |
1ff154cc6b33963eeeca9cf11be68ba8088d0637 | 0be96465a1c0acd6b5a29080ca75a56d7d2c88a8 | /django_views/django_views/settings.py | 9419dea9fedf92fc5922f6e59f4b224260e3ee6d | [] | no_license | KenZP/tulingxueyuan | 975dd9d92127005d89e69ec063efac83e71d5910 | 458ebc9aabe3a0854141c7f1ad6a7a0c3d58ecae | refs/heads/master | 2020-05-05T14:05:39.882969 | 2019-05-08T07:06:13 | 2019-05-08T07:06:13 | 180,106,816 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,715 | py | """
Django settings for django_views project.
Generated by 'django-admin startproject' using Django 1.8.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%th2$wtpu0tvklo5r7)7vzssm$42d10+ts0%nphlju-6f(25n2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'teacher_views',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'django_views.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,"templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_views.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| [
"398984762@qq.com"
] | 398984762@qq.com |
e54a1974ed4cc824b54c34c1c0ee294ae44db7f2 | 90f7540b6ea01b4d594c54f515fce063f684c7b0 | /astar/test_run_4_cython.py | 7802c9173c6e4fb20e83a09d58e938fd8d3b5a15 | [
"MIT"
] | permissive | rokujyouhitoma/tips | 0c0bd4d1f6e5653466834691bcb4eb55b3d21d7e | dfe0b4211716c1648064769e9118f2d748d20d73 | refs/heads/master | 2023-05-04T09:44:13.107960 | 2023-04-24T10:46:28 | 2023-04-24T10:46:28 | 15,069,738 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,237 | py | import pyximport; pyximport.install()
import astar
MAP_ROW = 7
MAP_COL = 5
map = [[1,1,1,1,1],
[1,0,0,0,1],
[1,1,1,0,1],
[1,0,0,0,1],
[1,0,1,1,1],
[1,0,0,0,1],
[1,1,1,1,1]]
def is_outside_map(x, y):
if x < 0 or x > MAP_COL-1 or y < 0 or y > MAP_ROW-1:
return True
return False
def is_block(x, y):
if map[y][x] == 1:
return True
return False
def is_movable(p):
x, y = p
if is_outside_map(x, y):
return False
if is_block(x, y):
return False
return True
def neighbor_nodes(p):
x, y = p
neighbors = [(x-1, y), (x, y-1), (x+1, y), (x, y+1)]
return filter(is_movable, neighbors)
def heuristic_cost_estimate(p1, p2):
return manhattan_distance(p1, p2)
def manhattan_distance(p1, p2):
return abs(p1[0]-p2[0]) + abs(p1[1]-p2[1])
def euclidean_distance(p1, p2):
return math.sqrt((p1[0]-p2[0])**2 + (p1[1]-p2[1])**2)
if __name__ == '__main__':
start = (2, 2)
goal = (1, 1)
path = astar.astar(start, goal, neighbor_nodes, heuristic_cost_estimate, heuristic_cost_estimate)
if path:
print("====")
for position in reversed(path):
x,y = position
print(x,y)
| [
"ike.toru@dena.jp"
] | ike.toru@dena.jp |
ee7891e1261dd8494dc3b2baa92032c912d1e53a | 63d328c2286e441a1474c85a04d3ac4d5b1ef817 | /main/models.py | 9f1b2be9483346ae70a34e11e4d316cf651a111d | [] | no_license | rajsingh1505/Review_web | 31ceb6cce9eb1a4183442137e23df34c65f5d6b3 | b93471a2beab326a485a26e390a76a19b7b6f773 | refs/heads/main | 2023-04-05T08:09:30.771705 | 2021-04-06T11:29:00 | 2021-04-06T11:29:00 | 351,867,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Movie(models.Model):
# fields for the movie table
name = models.CharField(max_length=300)
price = models.CharField(max_length=800)
description = models.TextField(max_length=5000,blank=True)
image = models.URLField(default=None, null=True)
def __str__(self):
return self.name
def __unicode__(self):
return self.name
class Review(models.Model):
movie = models.ForeignKey(Movie, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
comment = models.TextField(max_length=5000)
rating = models.FloatField(default=0)
def __str__(self):
return self.user.username
| [
"noreply@github.com"
] | rajsingh1505.noreply@github.com |
40e7d551c96dcf303d4fb6a2dba273abb261c1a7 | bd0d772947efe708bfb6c5af2c715f5414a52c20 | /DA/DI/app/urls.py | 5a74619fbe21ffba9b7f21c34b7708dd8d48f943 | [] | no_license | kut-info-ase-2020/G1 | 65933fa095313dfcb9ca8fc03dc454b895a584ba | 07ac1f4081b815822b96c1d6e57780e01a1243ba | refs/heads/master | 2022-12-31T03:57:26.146925 | 2020-10-14T03:39:03 | 2020-10-14T03:39:03 | 274,355,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 298 | py | from django.conf.urls import url
from django.views.static import serve
from DI.settings import MEDIAS_ROOT
from app.views import allPage, refresh
urlpatterns = [
url(r'^$', allPage),
url(r'^refresh/$', refresh),
url(r'^medias/(?P<path>.*)$', serve, {'document_root': MEDIAS_ROOT}),
]
| [
"chunyuan.lan@foxmail.com"
] | chunyuan.lan@foxmail.com |
eefb34c4f1c10fed44c2715638eee63974cde218 | ec02c2d3a48c5a1613a69db94f5e8b133bd817e2 | /OSRGAN/realesrgan/archs/__init__.py | 15e1837d4b550ed2c4e3e8d0f7ffcc7e3683470a | [
"MIT"
] | permissive | 170744039/QDL-CMFD | c2cbf8d7aba7f76a3960b40a18dbe04763a5bfa4 | 7a95ee8c9f29c98461987977180c1e84fb1b4919 | refs/heads/main | 2023-08-31T18:45:34.962814 | 2021-10-22T14:30:19 | 2021-10-22T14:30:19 | 424,864,886 | 1 | 0 | MIT | 2021-11-05T07:40:08 | 2021-11-05T07:40:07 | null | UTF-8 | Python | false | false | 499 | py | import importlib
from basicsr.utils import scandir
from os import path as osp
# automatically scan and import arch modules for registry
# scan all the files that end with '_arch.py' under the archs folder
arch_folder = osp.dirname(osp.abspath(__file__))
arch_filenames = [osp.splitext(osp.basename(v))[0] for v in scandir(arch_folder) if v.endswith('_arch.py')]
# import all the arch modules
_arch_modules = [importlib.import_module(f'realesrgan.archs.{file_name}') for file_name in arch_filenames] | [
"mr.digital@live.com"
] | mr.digital@live.com |
04ce0cfd08c81634d1d820c5f34b2032aac5460c | f1df7e0a5c7c4c60edfc2cad83ccdecfce9a684f | /bigwebsite/include/tables.py | 29d70b2a56f0e2a0fefa7fa7504290cac322e99f | [
"MIT"
] | permissive | lilwebsite/bigwebsite-public | c68bfb76c6c36afffca701c416ef2b2f3769ab3a | 4178f3cfb0d5575907fef0916c04c975687a48a5 | refs/heads/master | 2020-05-19T21:47:26.711926 | 2019-06-02T12:59:37 | 2019-06-02T12:59:37 | 185,233,110 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | from passlib.hash import bcrypt
from sqlalchemy import (
Column,
Integer,
Text,
Boolean,
DateTime
)
from ..meta import (
dbsession,
Base
)
import subprocess
import re
| [
"carl@bigwebsite.cool"
] | carl@bigwebsite.cool |
6c897b9a56505dc586e95b5f8b828384eeb280f2 | 92023290c3ae7a1ce5398696a7e8feab87d11a04 | /alice.py | b9b9082a655cd40769cccb33a46077f8c211eda7 | [] | no_license | EigenAlexa/flask-alice | 58234ed91f6be14cfe632c79e06e499ea6570a71 | 8299d32d8f1ceed300d842f9f51904eaede7984d | refs/heads/master | 2021-03-24T12:52:05.869282 | 2017-07-22T17:10:16 | 2017-07-22T17:10:16 | 76,002,079 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,112 | py | import aiml
import os
class Alice:
def __init__(self):
DIR = os.path.dirname(os.path.realpath(__file__)) + '/aiml/'
files = [DIR + f for f in os.listdir(DIR) if os.path.isfile(DIR + f) and '.aiml' in f]
interpretor = aiml.Kernel()
interpretor.verbose(False)
# TODO uncomment when this works
# if os.path.isfile("bot_brain.brn"):
# interpretor.bootstrap(brainFile = "bot_brain.brn")
# else:
interpretor.bootstrap(learnFiles=files)
properties_file = open(os.sep.join([DIR, 'bot.properties']))
for line in properties_file:
parts = line.split('=')
key = parts[0]
value = parts[1]
interpretor.setBotPredicate(key, value)
# interpretor.saveBrain("bot_brain.brn")
self.interpretor = interpretor
def message(self, string, sessionID=None):
if sessionID:
return self.interpretor.respond(string, sessionID)
else:
return self.interpretor.respond(string)
if __name__ == "__main__":
alice = Alice()
print alice.message("what language are you speaking")
| [
"superpker@gmail.com"
] | superpker@gmail.com |
c944de8fab0409d084fe3201a216d40e86f28bb9 | b11f1b168f4976e5ded32cbf18524ddf89ba6a67 | /resources/notifications.py | 6e4c51048fc22f67e703ae7fbc213537ff3703b3 | [] | no_license | X0GT0X/emotion-chat | 0a5943803d4d8559fc519339ee6da80c87050308 | 176a11e9f4930ef48371c4e19d55b3d519746b7e | refs/heads/master | 2023-05-23T16:28:13.475860 | 2021-06-12T15:23:25 | 2021-06-12T15:23:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,415 | py | from flask import Response, request
from flask_restful import Resource
from database.models import Subscriber
from pywebpush import webpush
import datetime
import json
class Subscription(Resource):
def post(self):
try:
subscription_info = request.get_json()
subscriber = Subscriber.objects(subscriptionInfo=subscription_info).first()
if not subscriber:
subscriber = Subscriber()
subscriber.created = datetime.datetime.utcnow()
subscriber.subscriptionInfo = subscription_info
subscriber.modified = datetime.datetime.utcnow()
subscriber.save()
return Response(json.dumps({id: subscriber.id}), mimetype="application/json", status=201)
except Exception as e:
return {"message": "Something went wrong."}, 400
class Notifications(Resource):
WEBPUSH_VAPID_PRIVATE_KEY = 'xxx'
def post(self, id):
body = request.get_json()
try:
subscriber = Subscriber.objects(id=id).first()
webpush(
subscription_info=subscriber.subscriptionInfo,
data=body.get("data"),
vapid_private_key=self.WEBPUSH_VAPID_PRIVATE_KEY
)
except Exception as e:
return {"message": "Something went wrong."}, 400
return {"message": "Notification was sent"}, 201
| [
"a.vozniuk00@gmail.com"
] | a.vozniuk00@gmail.com |
41f29315eaba29aa2bfd4bc89269680dcbd5ce9f | d875ebc7f6fb0195aa140de27111df6db86ccfb7 | /crossmath/dfs2.py | d12c80cfa73d7f914f8f44d097eed4621984217a | [] | no_license | kebzilla/4511 | a6502409022177d958cd7211a4fa0dd96fc92951 | cd3139c9a9047420f2b66ab14e4686152392a774 | refs/heads/master | 2021-01-11T17:01:59.974272 | 2016-09-28T21:20:38 | 2016-09-28T21:20:38 | 69,504,683 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 881 | py | from collections import deque,defaultdict
class Solution(object):
def findItinerary(self, tickets):
def build_graph(tickets):
G = defaultdict(list)
for S, E in tickets:
G[S].append(E)
for A in G:
G[A].sort(reverse=True)
G[A] = deque(G[A])
return G
def dfs(G, S):
trip.append(S)
if len(trip) == length:
return True
if S in G:
queue=G[S]
for i in xrange(len(queue)):
A = queue.pop()
if dfs(G, A):
return True
queue.appendleft(A)
trip.pop()
return False
G = build_graph(tickets)
trip, length = [], len(tickets) + 1
dfs(G, "JFK")
return trip | [
"condition.grounded@gmail.com"
] | condition.grounded@gmail.com |
2b67e235a3490fd768faa695ff32d76ed01f6f61 | a6bd25c3508d45134436bc3a39345e2565debec0 | /Assignment1/urls.py | 1fec19327932a4d972c8807b1a1ec09c09df8b86 | [] | no_license | gitNikhilsahu/Django-Business-Employee-Management | 2a869dbf9c0aac078662b09db708b7c03b372c5c | e1c6d1588561abf193d70ca4cb91c912c3ea66d1 | refs/heads/master | 2022-12-17T07:58:25.655611 | 2020-09-25T08:43:18 | 2020-09-25T08:43:18 | 298,517,130 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', include('business.urls')),
path('employee/', include('employee.urls')),
path('admin/', admin.site.urls),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"Nikhilsahu.in@gmail.com"
] | Nikhilsahu.in@gmail.com |
f72df796efd23d5fe5956bf5c335d28e7338693a | c29daba4b0c5d649cf10e1c016fd12a11d2e21e6 | /Day2/day2.py | 9f928792bbaab35a1cbece1bb2a59b9ad4a7fc19 | [] | no_license | 16kalai/pythonselenium | 4be985df708d678993a952b4d7dc00985ffe7b7f | 53cb5f8b92737a86f94da84b5f88cdae642a5efd | refs/heads/main | 2023-07-12T18:20:30.008240 | 2021-08-17T17:35:40 | 2021-08-17T17:35:40 | 397,333,315 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | #string method
k = 'Kalai'
s = ['selvam']
N = ('Natarajan',"a b c")
K = {'kannan'}
a = {'Amma':'1,2,3'}
m= {("kala"),'1,2,3,5,9'}
print(k.upper())
print(k.join(s))
print(k.translate(s))
print(k.isnumeric())
print(type(k))
print(type(s))
print(type(K))
print(type(a))
print(type(N),'N:')
print(type(m))
print(len(N))
print()
| [
"16kalai@gmail.com"
] | 16kalai@gmail.com |
76a3692170a887972c6130eaa29633efdcaadd87 | 7dc86226b44a3a00903926a7f7d4930c954c2973 | /python-fundamentals/07 Dictionaries - Exercise/01. Count Chars in a String.py | e08fa2a330a8c0e16d136d49dfbbbe4b11e65f82 | [] | no_license | rescenic/Softuni-Python-v2 | db49ded54fd6eac2179b5a2795bb58b7bc6af1f8 | e5a0d6b13570239a32245e8126cd3cf6abd58d3f | refs/heads/main | 2023-08-10T18:31:01.225334 | 2021-10-05T06:50:37 | 2021-10-05T06:50:37 | 576,778,410 | 1 | 0 | null | 2022-12-11T00:07:25 | 2022-12-11T00:07:24 | null | UTF-8 | Python | false | false | 203 | py | from collections import defaultdict
string = input()
chars = defaultdict(int)
for c in list(string):
if not c.isspace():
chars[c] += 1
for k, v in chars.items():
print(f'{k} -> {v}')
| [
"nkolew@gmail.com"
] | nkolew@gmail.com |
447eb1a1867e23d4dd3d1e0ba385ec65d16efe35 | 7dcd605564f63eb242003aa757d9e22a58c81b9a | /OpenCV_Python/Trackbar_bind_to_OpenCV_win.py | d5e6fd10236f5d13fd2b695cb0c3f6b0f2ba0790 | [] | no_license | satishrawat730/OpenCV | 70fa57cbbe930defc5fc88bed691ec62f2156129 | a930d2d6fb5a22400e45b2ff00df5215b277eca3 | refs/heads/master | 2022-11-20T18:50:51.888344 | 2020-07-26T08:04:35 | 2020-07-26T08:04:35 | 268,498,670 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | '''
examples of TrackBar :
including using Trackbar as the Color Palette
get user input with OpenCV trackbars.
'''
import cv2 as cv
import numpy as np
def nothing(x):
print(x)
# create black image
img = np.zeros((300,512,3), np.uint8)
# namedWindow(winname, flags=None)
cv.namedWindow('image')
# createTrackbar(trackbarName, windowName, value, count, callback func)
cv.createTrackbar('B', 'image', 0, 255, nothing)
cv.createTrackbar('G', 'image', 0, 255, nothing)
cv.createTrackbar('R', 'image', 0, 255, nothing)
switch = '0 : OFF\n 1 : ON'
cv.createTrackbar(switch, 'image', 0, 1, nothing)
while(True):
cv.imshow('image', img)
k = cv.waitKey(1) & 0xFF
if k==27:
break
# getTrackbarPos(trackbarname, winname)
# The function returns the current position of the specified trackbar
b = cv.getTrackbarPos('B', 'image')
g = cv.getTrackbarPos('G', 'image')
r = cv.getTrackbarPos('R', 'image')
s = cv.getTrackbarPos(switch, 'image')
if s == 0:
img[:] = 0
else:
img[:] = [b,g,r]
cv.destroyAllWindows()
| [
"satishrawat730@gmail.com"
] | satishrawat730@gmail.com |
fafc0114c286ee909152b1f42b45d295f24f7c64 | 68e548e5e4d9a9ffa71cfc691644ef8032537439 | /Projects/Bootcamp/(8)Con/animal.py | e03535638793e4e061c3fd0ebf7b1b9380fe39d4 | [] | no_license | Elemento24/Python3-Bootcamp | cd4529cae8508bff4c02d2180a87e9b1afb3aaf4 | fd4538a1ec77312925b333db4f0572c63274d77b | refs/heads/master | 2022-10-22T10:03:39.019364 | 2020-06-12T18:52:30 | 2020-06-12T18:52:30 | 271,868,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 142 | py | animal = input("Enter your fav animal - ")
if animal:
print(animal + " is my favorite too!")
else:
print("You didn't say anything!")
| [
"mittalvishesh021@gmail.com"
] | mittalvishesh021@gmail.com |
6635c9e91f58f28952d46417bf7414b4f7167f97 | 4860fdddb247f261906cb0e38f415ba605578d06 | /project35/project35/settings.py | f1f803fcbc3bd9b2ac7cb8401624829052da8351 | [] | no_license | adi-23/WADprojectgroup35 | 176bec5eba05999a7e7c20cf103aec9e841a0b15 | 8c8fb8980ebc9a13fcaa48a78eb9b25f345d592d | refs/heads/main | 2023-04-22T14:51:02.918864 | 2021-05-10T07:23:18 | 2021-05-10T07:23:18 | 365,946,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,130 | py | """
Django settings for project35 project.
Generated by 'django-admin startproject' using Django 3.1.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_qyp+c7-h)=_ge9zukk(@7=fdc-9k1#bq206%%$s2bc&9#w#nq'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'Visitplace.apps.VisitplaceConfig',
'travel.apps.TravelConfig',
'authentication.apps.AuthenticationConfig',
'hotels.apps.HotelsConfig',
'restaurants.apps.RestaurantsConfig',
'hospitals.apps.HospitalsConfig',
'CinemaHalls.apps.CinemahallsConfig',
'ShoppingComplex.apps.ShoppingcomplexConfig',
'widget_tweaks',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_filters',
'crispy_forms',
'bootstrapform',
'shops.apps.ShopsConfig'
]
CRISPY_TEMPLATE_PACK = 'bootstrap4'
AUTH_USER_MODEL = 'authentication.User'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project35.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project35.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'users',
'USER': 'root',
'PASSWORD': 'varshini23',
'HOST': 'localhost',
'PORT': '3306',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL='/static/'
STATICFILES_DIRS=[
os.path.join(BASE_DIR, 'hotels/static/hotels'),
os.path.join(BASE_DIR,'hospitals/static/hospitals'),
os.path.join(BASE_DIR,'ShoppingComplex/static/ShoppingComplex'),
os.path.join(BASE_DIR,'CinemaHalls/static/CinemaHalls'),
]
STATIC_ROOT=os.path.join(BASE_DIR, 'hotels/assets')
MEDIA_URL='/media/'
MEDIA_ROOT=os.path.join(BASE_DIR,'media') | [
"vallinarasimhaswamy.k19@iiits.in"
] | vallinarasimhaswamy.k19@iiits.in |
3b34d116209b819b985bb04c1ef6bd279dc5b23a | a291a13a448bfaee5f130d01b5f0c421cc6d0c46 | /silky-squirrels/chat/models.py | c139c81f38e60d45f2c00e01ae2f3519521f37ce | [
"MIT",
"Python-2.0"
] | permissive | EdwardZhou538/summer-code-jam-2020 | 8e7fe1a22f7e742ba107e23918aaf82c97d643f9 | 89b68b9b080e6a9f5733bc4f1f045f91254fb28e | refs/heads/master | 2022-12-05T01:18:53.384200 | 2020-08-08T04:08:53 | 2020-08-08T04:08:53 | 285,943,330 | 0 | 0 | MIT | 2020-08-08T00:16:22 | 2020-08-08T00:16:22 | null | UTF-8 | Python | false | false | 923 | py | from django.contrib.auth.models import User
from django.db import models
from django.utils import timezone
class Room(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class RoomMember(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
room = models.ForeignKey(Room, on_delete=models.CASCADE)
def __str__(self):
return f"room='{self.room}' user='{self.user}'"
class Message(models.Model):
room_member = models.ForeignKey(RoomMember, on_delete=models.CASCADE)
room = models.ForeignKey(Room, on_delete=models.CASCADE)
timestamp = models.DateTimeField(default=timezone.now)
text = models.CharField(max_length=1000)
def __str__(self):
return (
f"user='{self.room_member.user.username}' room='{self.room}' text='{self.text}' "
f"timestamp='{self.timestamp}'"
)
| [
"kmui2@wisc.edu"
] | kmui2@wisc.edu |
4dd78f82cf1a0f806669929f566eba7a2f515de4 | 57ebb9ac0419e80d7763fd65cde5425372ef2bfe | /data_clean.py | 978a13e3ceacae5160c52e47e733ea5a2d18b3e6 | [] | no_license | ZiqianXie/ECoG_analysis | ac090a84db2c6736f2fcbbb4faf6bf1bbd881485 | 5d947472a3788edb73a4e3192d27b7bfd6c778a3 | refs/heads/master | 2021-01-10T14:42:07.340627 | 2015-11-13T01:42:11 | 2015-11-13T01:42:11 | 43,315,952 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 684 | py | # -*- coding: utf-8 -*-
import numpy as np
from operator import add
def clean(X, thres=15, length=400, stride=40):
"""
X is of shape (time/samples, channels)
it calculates the square difference of the
original signal and reject the top 0.05 percentile.
return the row number of the remaining data matrix.
"""
dX2 = (X[1:, :]-X[:-1, :])**2
tmp = dX2 > np.percentile(dX2, 99.95, 0)
idx = np.unique((np.where(np.sum(tmp, 1) > thres)[0] + 1) / stride)
def r(x):
return range(max(0, x - length/stride + 1), x)
return [i for i in range((X.shape[0] - length)/stride + 1) if i not in
np.unique(reduce(add, map(r, idx), []))]
| [
"z.xie4@umiami.edu"
] | z.xie4@umiami.edu |
bf63fe697c539ec382672dc75ea18cf93dae240b | 71d4cc88c68f957a37a2db8234f8178ad2c1c769 | /graphgallery/data/npz_dataset.py | 8e769181796d82f0fa694a5ba370dd41a5b82c3e | [
"MIT"
] | permissive | MLDL/GraphGallery | 3159e0b8ddb1d2fa6b7cea4a27ba075f97db0a03 | 2474622286f135ca693c62981f5a4c4b31bcd2e6 | refs/heads/master | 2022-12-28T03:03:48.516408 | 2020-09-26T16:08:05 | 2020-09-26T16:08:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,080 | py | import os
import zipfile
import os.path as osp
import numpy as np
from graphgallery.data import Dataset
from graphgallery.data.io import makedirs, files_exist, download_file
from graphgallery.data.graph import Graph, load_dataset
_DATASETS = ('citeseer', 'cora', 'cora_ml', 'cora_full', 'amazon_cs', 'amazon_photo',
'coauthor_cs', 'coauthor_phy', 'polblogs', 'pubmed', 'flickr', 'flickr_sdm', 'blogcatalog')
class NPZDataset(Dataset):
github_url = "https://raw.githubusercontent.com/EdisonLeeeee/GraphData/master/datasets/npz/{}.npz"
supported_datasets = _DATASETS
def __init__(self, name, root=None, url=None, standardize=False, verbose=True):
if not name.lower() in self.supported_datasets:
print(f"Dataset not Found. Using custom dataset: {name}.\n")
super().__init__(name, root, verbose)
self._url = url
self.download_dir = osp.join(self.root, "npz")
self.standardize = standardize
makedirs(self.download_dir)
self.download()
self.process()
def download(self):
if files_exist(self.raw_paths):
print(f"Downloaded dataset files have existed.")
if self.verbose:
self.print_files(self.raw_paths)
return
self.print_files(self.raw_paths)
print("Downloading...")
download_file(self.raw_paths, self.urls)
if self.verbose:
self.print_files(self.raw_paths)
print("Downloading completed.")
def process(self):
print("Processing...")
graph = load_dataset(
self.raw_paths[0]).eliminate_selfloops().to_undirected()
if self.standardize:
graph = graph.standardize()
self.graph = graph
print("Processing completed.")
@property
def url(self):
if isinstance(self._url, str):
return self._url
else:
return self.github_url.format(self.name)
@property
def raw_paths(self):
return [f"{osp.join(self.download_dir, self.name)}.npz"]
| [
"cnljt@outlook.com"
] | cnljt@outlook.com |
2f3dc9b1091c9ce8bf8b3f455db007cea527544b | f55ed49e77f2983f9118a5228a0f6d777c4eac97 | /apps/beeswax/gen-py/TCLIService/ttypes.py | 78d1609bf3c6ecb2180d62fcf47162e7936e3d04 | [
"Apache-2.0"
] | permissive | mravi/hue | feb8543e1490fdbfdaff069c021ae168f72b28c6 | 1190bc41c560edf239c5dfc9689d25f3b4b3ab95 | refs/heads/master | 2020-12-25T21:55:41.294305 | 2013-11-07T11:49:05 | 2013-11-08T01:36:42 | 14,227,040 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | true | 169,041 | py | #
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class TProtocolVersion(object):
HIVE_CLI_SERVICE_PROTOCOL_V1 = 0
HIVE_CLI_SERVICE_PROTOCOL_V2 = 1
_VALUES_TO_NAMES = {
0: "HIVE_CLI_SERVICE_PROTOCOL_V1",
1: "HIVE_CLI_SERVICE_PROTOCOL_V2",
}
_NAMES_TO_VALUES = {
"HIVE_CLI_SERVICE_PROTOCOL_V1": 0,
"HIVE_CLI_SERVICE_PROTOCOL_V2": 1,
}
class TTypeId(object):
BOOLEAN_TYPE = 0
TINYINT_TYPE = 1
SMALLINT_TYPE = 2
INT_TYPE = 3
BIGINT_TYPE = 4
FLOAT_TYPE = 5
DOUBLE_TYPE = 6
STRING_TYPE = 7
TIMESTAMP_TYPE = 8
BINARY_TYPE = 9
ARRAY_TYPE = 10
MAP_TYPE = 11
STRUCT_TYPE = 12
UNION_TYPE = 13
USER_DEFINED_TYPE = 14
DECIMAL_TYPE = 15
NULL_TYPE = 16
DATE_TYPE = 17
_VALUES_TO_NAMES = {
0: "BOOLEAN_TYPE",
1: "TINYINT_TYPE",
2: "SMALLINT_TYPE",
3: "INT_TYPE",
4: "BIGINT_TYPE",
5: "FLOAT_TYPE",
6: "DOUBLE_TYPE",
7: "STRING_TYPE",
8: "TIMESTAMP_TYPE",
9: "BINARY_TYPE",
10: "ARRAY_TYPE",
11: "MAP_TYPE",
12: "STRUCT_TYPE",
13: "UNION_TYPE",
14: "USER_DEFINED_TYPE",
15: "DECIMAL_TYPE",
16: "NULL_TYPE",
17: "DATE_TYPE",
}
_NAMES_TO_VALUES = {
"BOOLEAN_TYPE": 0,
"TINYINT_TYPE": 1,
"SMALLINT_TYPE": 2,
"INT_TYPE": 3,
"BIGINT_TYPE": 4,
"FLOAT_TYPE": 5,
"DOUBLE_TYPE": 6,
"STRING_TYPE": 7,
"TIMESTAMP_TYPE": 8,
"BINARY_TYPE": 9,
"ARRAY_TYPE": 10,
"MAP_TYPE": 11,
"STRUCT_TYPE": 12,
"UNION_TYPE": 13,
"USER_DEFINED_TYPE": 14,
"DECIMAL_TYPE": 15,
"NULL_TYPE": 16,
"DATE_TYPE": 17,
}
class TStatusCode(object):
SUCCESS_STATUS = 0
SUCCESS_WITH_INFO_STATUS = 1
STILL_EXECUTING_STATUS = 2
ERROR_STATUS = 3
INVALID_HANDLE_STATUS = 4
_VALUES_TO_NAMES = {
0: "SUCCESS_STATUS",
1: "SUCCESS_WITH_INFO_STATUS",
2: "STILL_EXECUTING_STATUS",
3: "ERROR_STATUS",
4: "INVALID_HANDLE_STATUS",
}
_NAMES_TO_VALUES = {
"SUCCESS_STATUS": 0,
"SUCCESS_WITH_INFO_STATUS": 1,
"STILL_EXECUTING_STATUS": 2,
"ERROR_STATUS": 3,
"INVALID_HANDLE_STATUS": 4,
}
class TOperationState(object):
INITIALIZED_STATE = 0
RUNNING_STATE = 1
FINISHED_STATE = 2
CANCELED_STATE = 3
CLOSED_STATE = 4
ERROR_STATE = 5
UKNOWN_STATE = 6
PENDING_STATE = 7
_VALUES_TO_NAMES = {
0: "INITIALIZED_STATE",
1: "RUNNING_STATE",
2: "FINISHED_STATE",
3: "CANCELED_STATE",
4: "CLOSED_STATE",
5: "ERROR_STATE",
6: "UKNOWN_STATE",
7: "PENDING_STATE",
}
_NAMES_TO_VALUES = {
"INITIALIZED_STATE": 0,
"RUNNING_STATE": 1,
"FINISHED_STATE": 2,
"CANCELED_STATE": 3,
"CLOSED_STATE": 4,
"ERROR_STATE": 5,
"UKNOWN_STATE": 6,
"PENDING_STATE": 7,
}
class TOperationType(object):
EXECUTE_STATEMENT = 0
GET_TYPE_INFO = 1
GET_CATALOGS = 2
GET_SCHEMAS = 3
GET_TABLES = 4
GET_TABLE_TYPES = 5
GET_COLUMNS = 6
GET_FUNCTIONS = 7
UNKNOWN = 8
_VALUES_TO_NAMES = {
0: "EXECUTE_STATEMENT",
1: "GET_TYPE_INFO",
2: "GET_CATALOGS",
3: "GET_SCHEMAS",
4: "GET_TABLES",
5: "GET_TABLE_TYPES",
6: "GET_COLUMNS",
7: "GET_FUNCTIONS",
8: "UNKNOWN",
}
_NAMES_TO_VALUES = {
"EXECUTE_STATEMENT": 0,
"GET_TYPE_INFO": 1,
"GET_CATALOGS": 2,
"GET_SCHEMAS": 3,
"GET_TABLES": 4,
"GET_TABLE_TYPES": 5,
"GET_COLUMNS": 6,
"GET_FUNCTIONS": 7,
"UNKNOWN": 8,
}
class TGetInfoType(object):
CLI_MAX_DRIVER_CONNECTIONS = 0
CLI_MAX_CONCURRENT_ACTIVITIES = 1
CLI_DATA_SOURCE_NAME = 2
CLI_FETCH_DIRECTION = 8
CLI_SERVER_NAME = 13
CLI_SEARCH_PATTERN_ESCAPE = 14
CLI_DBMS_NAME = 17
CLI_DBMS_VER = 18
CLI_ACCESSIBLE_TABLES = 19
CLI_ACCESSIBLE_PROCEDURES = 20
CLI_CURSOR_COMMIT_BEHAVIOR = 23
CLI_DATA_SOURCE_READ_ONLY = 25
CLI_DEFAULT_TXN_ISOLATION = 26
CLI_IDENTIFIER_CASE = 28
CLI_IDENTIFIER_QUOTE_CHAR = 29
CLI_MAX_COLUMN_NAME_LEN = 30
CLI_MAX_CURSOR_NAME_LEN = 31
CLI_MAX_SCHEMA_NAME_LEN = 32
CLI_MAX_CATALOG_NAME_LEN = 34
CLI_MAX_TABLE_NAME_LEN = 35
CLI_SCROLL_CONCURRENCY = 43
CLI_TXN_CAPABLE = 46
CLI_USER_NAME = 47
CLI_TXN_ISOLATION_OPTION = 72
CLI_INTEGRITY = 73
CLI_GETDATA_EXTENSIONS = 81
CLI_NULL_COLLATION = 85
CLI_ALTER_TABLE = 86
CLI_ORDER_BY_COLUMNS_IN_SELECT = 90
CLI_SPECIAL_CHARACTERS = 94
CLI_MAX_COLUMNS_IN_GROUP_BY = 97
CLI_MAX_COLUMNS_IN_INDEX = 98
CLI_MAX_COLUMNS_IN_ORDER_BY = 99
CLI_MAX_COLUMNS_IN_SELECT = 100
CLI_MAX_COLUMNS_IN_TABLE = 101
CLI_MAX_INDEX_SIZE = 102
CLI_MAX_ROW_SIZE = 104
CLI_MAX_STATEMENT_LEN = 105
CLI_MAX_TABLES_IN_SELECT = 106
CLI_MAX_USER_NAME_LEN = 107
CLI_OJ_CAPABILITIES = 115
CLI_XOPEN_CLI_YEAR = 10000
CLI_CURSOR_SENSITIVITY = 10001
CLI_DESCRIBE_PARAMETER = 10002
CLI_CATALOG_NAME = 10003
CLI_COLLATION_SEQ = 10004
CLI_MAX_IDENTIFIER_LEN = 10005
_VALUES_TO_NAMES = {
0: "CLI_MAX_DRIVER_CONNECTIONS",
1: "CLI_MAX_CONCURRENT_ACTIVITIES",
2: "CLI_DATA_SOURCE_NAME",
8: "CLI_FETCH_DIRECTION",
13: "CLI_SERVER_NAME",
14: "CLI_SEARCH_PATTERN_ESCAPE",
17: "CLI_DBMS_NAME",
18: "CLI_DBMS_VER",
19: "CLI_ACCESSIBLE_TABLES",
20: "CLI_ACCESSIBLE_PROCEDURES",
23: "CLI_CURSOR_COMMIT_BEHAVIOR",
25: "CLI_DATA_SOURCE_READ_ONLY",
26: "CLI_DEFAULT_TXN_ISOLATION",
28: "CLI_IDENTIFIER_CASE",
29: "CLI_IDENTIFIER_QUOTE_CHAR",
30: "CLI_MAX_COLUMN_NAME_LEN",
31: "CLI_MAX_CURSOR_NAME_LEN",
32: "CLI_MAX_SCHEMA_NAME_LEN",
34: "CLI_MAX_CATALOG_NAME_LEN",
35: "CLI_MAX_TABLE_NAME_LEN",
43: "CLI_SCROLL_CONCURRENCY",
46: "CLI_TXN_CAPABLE",
47: "CLI_USER_NAME",
72: "CLI_TXN_ISOLATION_OPTION",
73: "CLI_INTEGRITY",
81: "CLI_GETDATA_EXTENSIONS",
85: "CLI_NULL_COLLATION",
86: "CLI_ALTER_TABLE",
90: "CLI_ORDER_BY_COLUMNS_IN_SELECT",
94: "CLI_SPECIAL_CHARACTERS",
97: "CLI_MAX_COLUMNS_IN_GROUP_BY",
98: "CLI_MAX_COLUMNS_IN_INDEX",
99: "CLI_MAX_COLUMNS_IN_ORDER_BY",
100: "CLI_MAX_COLUMNS_IN_SELECT",
101: "CLI_MAX_COLUMNS_IN_TABLE",
102: "CLI_MAX_INDEX_SIZE",
104: "CLI_MAX_ROW_SIZE",
105: "CLI_MAX_STATEMENT_LEN",
106: "CLI_MAX_TABLES_IN_SELECT",
107: "CLI_MAX_USER_NAME_LEN",
115: "CLI_OJ_CAPABILITIES",
10000: "CLI_XOPEN_CLI_YEAR",
10001: "CLI_CURSOR_SENSITIVITY",
10002: "CLI_DESCRIBE_PARAMETER",
10003: "CLI_CATALOG_NAME",
10004: "CLI_COLLATION_SEQ",
10005: "CLI_MAX_IDENTIFIER_LEN",
}
_NAMES_TO_VALUES = {
"CLI_MAX_DRIVER_CONNECTIONS": 0,
"CLI_MAX_CONCURRENT_ACTIVITIES": 1,
"CLI_DATA_SOURCE_NAME": 2,
"CLI_FETCH_DIRECTION": 8,
"CLI_SERVER_NAME": 13,
"CLI_SEARCH_PATTERN_ESCAPE": 14,
"CLI_DBMS_NAME": 17,
"CLI_DBMS_VER": 18,
"CLI_ACCESSIBLE_TABLES": 19,
"CLI_ACCESSIBLE_PROCEDURES": 20,
"CLI_CURSOR_COMMIT_BEHAVIOR": 23,
"CLI_DATA_SOURCE_READ_ONLY": 25,
"CLI_DEFAULT_TXN_ISOLATION": 26,
"CLI_IDENTIFIER_CASE": 28,
"CLI_IDENTIFIER_QUOTE_CHAR": 29,
"CLI_MAX_COLUMN_NAME_LEN": 30,
"CLI_MAX_CURSOR_NAME_LEN": 31,
"CLI_MAX_SCHEMA_NAME_LEN": 32,
"CLI_MAX_CATALOG_NAME_LEN": 34,
"CLI_MAX_TABLE_NAME_LEN": 35,
"CLI_SCROLL_CONCURRENCY": 43,
"CLI_TXN_CAPABLE": 46,
"CLI_USER_NAME": 47,
"CLI_TXN_ISOLATION_OPTION": 72,
"CLI_INTEGRITY": 73,
"CLI_GETDATA_EXTENSIONS": 81,
"CLI_NULL_COLLATION": 85,
"CLI_ALTER_TABLE": 86,
"CLI_ORDER_BY_COLUMNS_IN_SELECT": 90,
"CLI_SPECIAL_CHARACTERS": 94,
"CLI_MAX_COLUMNS_IN_GROUP_BY": 97,
"CLI_MAX_COLUMNS_IN_INDEX": 98,
"CLI_MAX_COLUMNS_IN_ORDER_BY": 99,
"CLI_MAX_COLUMNS_IN_SELECT": 100,
"CLI_MAX_COLUMNS_IN_TABLE": 101,
"CLI_MAX_INDEX_SIZE": 102,
"CLI_MAX_ROW_SIZE": 104,
"CLI_MAX_STATEMENT_LEN": 105,
"CLI_MAX_TABLES_IN_SELECT": 106,
"CLI_MAX_USER_NAME_LEN": 107,
"CLI_OJ_CAPABILITIES": 115,
"CLI_XOPEN_CLI_YEAR": 10000,
"CLI_CURSOR_SENSITIVITY": 10001,
"CLI_DESCRIBE_PARAMETER": 10002,
"CLI_CATALOG_NAME": 10003,
"CLI_COLLATION_SEQ": 10004,
"CLI_MAX_IDENTIFIER_LEN": 10005,
}
class TFetchOrientation(object):
FETCH_NEXT = 0
FETCH_PRIOR = 1
FETCH_RELATIVE = 2
FETCH_ABSOLUTE = 3
FETCH_FIRST = 4
FETCH_LAST = 5
_VALUES_TO_NAMES = {
0: "FETCH_NEXT",
1: "FETCH_PRIOR",
2: "FETCH_RELATIVE",
3: "FETCH_ABSOLUTE",
4: "FETCH_FIRST",
5: "FETCH_LAST",
}
_NAMES_TO_VALUES = {
"FETCH_NEXT": 0,
"FETCH_PRIOR": 1,
"FETCH_RELATIVE": 2,
"FETCH_ABSOLUTE": 3,
"FETCH_FIRST": 4,
"FETCH_LAST": 5,
}
class TPrimitiveTypeEntry(object):
"""
Attributes:
- type
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'type', None, None, ), # 1
)
def __init__(self, type=None,):
self.type = type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.type = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TPrimitiveTypeEntry')
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 1)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.type is None:
raise TProtocol.TProtocolException(message='Required field type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TArrayTypeEntry(object):
"""
Attributes:
- objectTypePtr
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'objectTypePtr', None, None, ), # 1
)
def __init__(self, objectTypePtr=None,):
self.objectTypePtr = objectTypePtr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.objectTypePtr = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TArrayTypeEntry')
if self.objectTypePtr is not None:
oprot.writeFieldBegin('objectTypePtr', TType.I32, 1)
oprot.writeI32(self.objectTypePtr)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.objectTypePtr is None:
raise TProtocol.TProtocolException(message='Required field objectTypePtr is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TMapTypeEntry(object):
"""
Attributes:
- keyTypePtr
- valueTypePtr
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'keyTypePtr', None, None, ), # 1
(2, TType.I32, 'valueTypePtr', None, None, ), # 2
)
def __init__(self, keyTypePtr=None, valueTypePtr=None,):
self.keyTypePtr = keyTypePtr
self.valueTypePtr = valueTypePtr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.keyTypePtr = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.valueTypePtr = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TMapTypeEntry')
if self.keyTypePtr is not None:
oprot.writeFieldBegin('keyTypePtr', TType.I32, 1)
oprot.writeI32(self.keyTypePtr)
oprot.writeFieldEnd()
if self.valueTypePtr is not None:
oprot.writeFieldBegin('valueTypePtr', TType.I32, 2)
oprot.writeI32(self.valueTypePtr)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.keyTypePtr is None:
raise TProtocol.TProtocolException(message='Required field keyTypePtr is unset!')
if self.valueTypePtr is None:
raise TProtocol.TProtocolException(message='Required field valueTypePtr is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TStructTypeEntry(object):
"""
Attributes:
- nameToTypePtr
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'nameToTypePtr', (TType.STRING,None,TType.I32,None), None, ), # 1
)
def __init__(self, nameToTypePtr=None,):
self.nameToTypePtr = nameToTypePtr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.nameToTypePtr = {}
(_ktype1, _vtype2, _size0 ) = iprot.readMapBegin()
for _i4 in xrange(_size0):
_key5 = iprot.readString();
_val6 = iprot.readI32();
self.nameToTypePtr[_key5] = _val6
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TStructTypeEntry')
if self.nameToTypePtr is not None:
oprot.writeFieldBegin('nameToTypePtr', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.nameToTypePtr))
for kiter7,viter8 in self.nameToTypePtr.items():
oprot.writeString(kiter7)
oprot.writeI32(viter8)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.nameToTypePtr is None:
raise TProtocol.TProtocolException(message='Required field nameToTypePtr is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TUnionTypeEntry(object):
"""
Attributes:
- nameToTypePtr
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'nameToTypePtr', (TType.STRING,None,TType.I32,None), None, ), # 1
)
def __init__(self, nameToTypePtr=None,):
self.nameToTypePtr = nameToTypePtr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.nameToTypePtr = {}
(_ktype10, _vtype11, _size9 ) = iprot.readMapBegin()
for _i13 in xrange(_size9):
_key14 = iprot.readString();
_val15 = iprot.readI32();
self.nameToTypePtr[_key14] = _val15
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TUnionTypeEntry')
if self.nameToTypePtr is not None:
oprot.writeFieldBegin('nameToTypePtr', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.nameToTypePtr))
for kiter16,viter17 in self.nameToTypePtr.items():
oprot.writeString(kiter16)
oprot.writeI32(viter17)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.nameToTypePtr is None:
raise TProtocol.TProtocolException(message='Required field nameToTypePtr is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TUserDefinedTypeEntry(object):
"""
Attributes:
- typeClassName
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'typeClassName', None, None, ), # 1
)
def __init__(self, typeClassName=None,):
self.typeClassName = typeClassName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.typeClassName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TUserDefinedTypeEntry')
if self.typeClassName is not None:
oprot.writeFieldBegin('typeClassName', TType.STRING, 1)
oprot.writeString(self.typeClassName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.typeClassName is None:
raise TProtocol.TProtocolException(message='Required field typeClassName is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TTypeEntry(object):
"""
Attributes:
- primitiveEntry
- arrayEntry
- mapEntry
- structEntry
- unionEntry
- userDefinedTypeEntry
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'primitiveEntry', (TPrimitiveTypeEntry, TPrimitiveTypeEntry.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'arrayEntry', (TArrayTypeEntry, TArrayTypeEntry.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'mapEntry', (TMapTypeEntry, TMapTypeEntry.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'structEntry', (TStructTypeEntry, TStructTypeEntry.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'unionEntry', (TUnionTypeEntry, TUnionTypeEntry.thrift_spec), None, ), # 5
(6, TType.STRUCT, 'userDefinedTypeEntry', (TUserDefinedTypeEntry, TUserDefinedTypeEntry.thrift_spec), None, ), # 6
)
def __init__(self, primitiveEntry=None, arrayEntry=None, mapEntry=None, structEntry=None, unionEntry=None, userDefinedTypeEntry=None,):
self.primitiveEntry = primitiveEntry
self.arrayEntry = arrayEntry
self.mapEntry = mapEntry
self.structEntry = structEntry
self.unionEntry = unionEntry
self.userDefinedTypeEntry = userDefinedTypeEntry
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.primitiveEntry = TPrimitiveTypeEntry()
self.primitiveEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.arrayEntry = TArrayTypeEntry()
self.arrayEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.mapEntry = TMapTypeEntry()
self.mapEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.structEntry = TStructTypeEntry()
self.structEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.unionEntry = TUnionTypeEntry()
self.unionEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.userDefinedTypeEntry = TUserDefinedTypeEntry()
self.userDefinedTypeEntry.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TTypeEntry')
if self.primitiveEntry is not None:
oprot.writeFieldBegin('primitiveEntry', TType.STRUCT, 1)
self.primitiveEntry.write(oprot)
oprot.writeFieldEnd()
if self.arrayEntry is not None:
oprot.writeFieldBegin('arrayEntry', TType.STRUCT, 2)
self.arrayEntry.write(oprot)
oprot.writeFieldEnd()
if self.mapEntry is not None:
oprot.writeFieldBegin('mapEntry', TType.STRUCT, 3)
self.mapEntry.write(oprot)
oprot.writeFieldEnd()
if self.structEntry is not None:
oprot.writeFieldBegin('structEntry', TType.STRUCT, 4)
self.structEntry.write(oprot)
oprot.writeFieldEnd()
if self.unionEntry is not None:
oprot.writeFieldBegin('unionEntry', TType.STRUCT, 5)
self.unionEntry.write(oprot)
oprot.writeFieldEnd()
if self.userDefinedTypeEntry is not None:
oprot.writeFieldBegin('userDefinedTypeEntry', TType.STRUCT, 6)
self.userDefinedTypeEntry.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TTypeDesc(object):
"""
Attributes:
- types
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'types', (TType.STRUCT,(TTypeEntry, TTypeEntry.thrift_spec)), None, ), # 1
)
def __init__(self, types=None,):
self.types = types
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.types = []
(_etype21, _size18) = iprot.readListBegin()
for _i22 in xrange(_size18):
_elem23 = TTypeEntry()
_elem23.read(iprot)
self.types.append(_elem23)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TTypeDesc')
if self.types is not None:
oprot.writeFieldBegin('types', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.types))
for iter24 in self.types:
iter24.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.types is None:
raise TProtocol.TProtocolException(message='Required field types is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TColumnDesc(object):
"""
Attributes:
- columnName
- typeDesc
- position
- comment
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'columnName', None, None, ), # 1
(2, TType.STRUCT, 'typeDesc', (TTypeDesc, TTypeDesc.thrift_spec), None, ), # 2
(3, TType.I32, 'position', None, None, ), # 3
(4, TType.STRING, 'comment', None, None, ), # 4
)
def __init__(self, columnName=None, typeDesc=None, position=None, comment=None,):
self.columnName = columnName
self.typeDesc = typeDesc
self.position = position
self.comment = comment
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.columnName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.typeDesc = TTypeDesc()
self.typeDesc.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.position = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.comment = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TColumnDesc')
if self.columnName is not None:
oprot.writeFieldBegin('columnName', TType.STRING, 1)
oprot.writeString(self.columnName)
oprot.writeFieldEnd()
if self.typeDesc is not None:
oprot.writeFieldBegin('typeDesc', TType.STRUCT, 2)
self.typeDesc.write(oprot)
oprot.writeFieldEnd()
if self.position is not None:
oprot.writeFieldBegin('position', TType.I32, 3)
oprot.writeI32(self.position)
oprot.writeFieldEnd()
if self.comment is not None:
oprot.writeFieldBegin('comment', TType.STRING, 4)
oprot.writeString(self.comment)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.columnName is None:
raise TProtocol.TProtocolException(message='Required field columnName is unset!')
if self.typeDesc is None:
raise TProtocol.TProtocolException(message='Required field typeDesc is unset!')
if self.position is None:
raise TProtocol.TProtocolException(message='Required field position is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TTableSchema(object):
"""
Attributes:
- columns
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'columns', (TType.STRUCT,(TColumnDesc, TColumnDesc.thrift_spec)), None, ), # 1
)
def __init__(self, columns=None,):
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.columns = []
(_etype28, _size25) = iprot.readListBegin()
for _i29 in xrange(_size25):
_elem30 = TColumnDesc()
_elem30.read(iprot)
self.columns.append(_elem30)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TTableSchema')
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter31 in self.columns:
iter31.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TBoolValue(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.BOOL, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BOOL:
self.value = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TBoolValue')
if self.value is not None:
oprot.writeFieldBegin('value', TType.BOOL, 1)
oprot.writeBool(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TByteValue(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.BYTE, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BYTE:
self.value = iprot.readByte();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TByteValue')
if self.value is not None:
oprot.writeFieldBegin('value', TType.BYTE, 1)
oprot.writeByte(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TI16Value(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.I16, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I16:
self.value = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TI16Value')
if self.value is not None:
oprot.writeFieldBegin('value', TType.I16, 1)
oprot.writeI16(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TI32Value(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.value = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TI32Value')
if self.value is not None:
oprot.writeFieldBegin('value', TType.I32, 1)
oprot.writeI32(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TI64Value(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.value = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TI64Value')
if self.value is not None:
oprot.writeFieldBegin('value', TType.I64, 1)
oprot.writeI64(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TDoubleValue(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.value = iprot.readDouble();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TDoubleValue')
if self.value is not None:
oprot.writeFieldBegin('value', TType.DOUBLE, 1)
oprot.writeDouble(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TStringValue(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.value = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TStringValue')
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 1)
oprot.writeString(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TColumn(object):
"""
Attributes:
- boolColumn
- byteColumn
- i16Column
- i32Column
- i64Column
- doubleColumn
- stringColumn
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'boolColumn', (TType.STRUCT,(TBoolValue, TBoolValue.thrift_spec)), None, ), # 1
(2, TType.LIST, 'byteColumn', (TType.STRUCT,(TByteValue, TByteValue.thrift_spec)), None, ), # 2
(3, TType.LIST, 'i16Column', (TType.STRUCT,(TI16Value, TI16Value.thrift_spec)), None, ), # 3
(4, TType.LIST, 'i32Column', (TType.STRUCT,(TI32Value, TI32Value.thrift_spec)), None, ), # 4
(5, TType.LIST, 'i64Column', (TType.STRUCT,(TI64Value, TI64Value.thrift_spec)), None, ), # 5
(6, TType.LIST, 'doubleColumn', (TType.STRUCT,(TDoubleValue, TDoubleValue.thrift_spec)), None, ), # 6
(7, TType.LIST, 'stringColumn', (TType.STRUCT,(TStringValue, TStringValue.thrift_spec)), None, ), # 7
)
def __init__(self, boolColumn=None, byteColumn=None, i16Column=None, i32Column=None, i64Column=None, doubleColumn=None, stringColumn=None,):
self.boolColumn = boolColumn
self.byteColumn = byteColumn
self.i16Column = i16Column
self.i32Column = i32Column
self.i64Column = i64Column
self.doubleColumn = doubleColumn
self.stringColumn = stringColumn
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.boolColumn = []
(_etype35, _size32) = iprot.readListBegin()
for _i36 in xrange(_size32):
_elem37 = TBoolValue()
_elem37.read(iprot)
self.boolColumn.append(_elem37)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.byteColumn = []
(_etype41, _size38) = iprot.readListBegin()
for _i42 in xrange(_size38):
_elem43 = TByteValue()
_elem43.read(iprot)
self.byteColumn.append(_elem43)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.i16Column = []
(_etype47, _size44) = iprot.readListBegin()
for _i48 in xrange(_size44):
_elem49 = TI16Value()
_elem49.read(iprot)
self.i16Column.append(_elem49)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.i32Column = []
(_etype53, _size50) = iprot.readListBegin()
for _i54 in xrange(_size50):
_elem55 = TI32Value()
_elem55.read(iprot)
self.i32Column.append(_elem55)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.i64Column = []
(_etype59, _size56) = iprot.readListBegin()
for _i60 in xrange(_size56):
_elem61 = TI64Value()
_elem61.read(iprot)
self.i64Column.append(_elem61)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.doubleColumn = []
(_etype65, _size62) = iprot.readListBegin()
for _i66 in xrange(_size62):
_elem67 = TDoubleValue()
_elem67.read(iprot)
self.doubleColumn.append(_elem67)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.LIST:
self.stringColumn = []
(_etype71, _size68) = iprot.readListBegin()
for _i72 in xrange(_size68):
_elem73 = TStringValue()
_elem73.read(iprot)
self.stringColumn.append(_elem73)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TColumn')
if self.boolColumn is not None:
oprot.writeFieldBegin('boolColumn', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.boolColumn))
for iter74 in self.boolColumn:
iter74.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.byteColumn is not None:
oprot.writeFieldBegin('byteColumn', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.byteColumn))
for iter75 in self.byteColumn:
iter75.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.i16Column is not None:
oprot.writeFieldBegin('i16Column', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.i16Column))
for iter76 in self.i16Column:
iter76.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.i32Column is not None:
oprot.writeFieldBegin('i32Column', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.i32Column))
for iter77 in self.i32Column:
iter77.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.i64Column is not None:
oprot.writeFieldBegin('i64Column', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.i64Column))
for iter78 in self.i64Column:
iter78.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.doubleColumn is not None:
oprot.writeFieldBegin('doubleColumn', TType.LIST, 6)
oprot.writeListBegin(TType.STRUCT, len(self.doubleColumn))
for iter79 in self.doubleColumn:
iter79.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.stringColumn is not None:
oprot.writeFieldBegin('stringColumn', TType.LIST, 7)
oprot.writeListBegin(TType.STRUCT, len(self.stringColumn))
for iter80 in self.stringColumn:
iter80.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TColumnValue(object):
"""
Attributes:
- boolVal
- byteVal
- i16Val
- i32Val
- i64Val
- doubleVal
- stringVal
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'boolVal', (TBoolValue, TBoolValue.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'byteVal', (TByteValue, TByteValue.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'i16Val', (TI16Value, TI16Value.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'i32Val', (TI32Value, TI32Value.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'i64Val', (TI64Value, TI64Value.thrift_spec), None, ), # 5
(6, TType.STRUCT, 'doubleVal', (TDoubleValue, TDoubleValue.thrift_spec), None, ), # 6
(7, TType.STRUCT, 'stringVal', (TStringValue, TStringValue.thrift_spec), None, ), # 7
)
def __init__(self, boolVal=None, byteVal=None, i16Val=None, i32Val=None, i64Val=None, doubleVal=None, stringVal=None,):
self.boolVal = boolVal
self.byteVal = byteVal
self.i16Val = i16Val
self.i32Val = i32Val
self.i64Val = i64Val
self.doubleVal = doubleVal
self.stringVal = stringVal
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.boolVal = TBoolValue()
self.boolVal.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.byteVal = TByteValue()
self.byteVal.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.i16Val = TI16Value()
self.i16Val.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.i32Val = TI32Value()
self.i32Val.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.i64Val = TI64Value()
self.i64Val.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.doubleVal = TDoubleValue()
self.doubleVal.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.stringVal = TStringValue()
self.stringVal.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TColumnValue')
if self.boolVal is not None:
oprot.writeFieldBegin('boolVal', TType.STRUCT, 1)
self.boolVal.write(oprot)
oprot.writeFieldEnd()
if self.byteVal is not None:
oprot.writeFieldBegin('byteVal', TType.STRUCT, 2)
self.byteVal.write(oprot)
oprot.writeFieldEnd()
if self.i16Val is not None:
oprot.writeFieldBegin('i16Val', TType.STRUCT, 3)
self.i16Val.write(oprot)
oprot.writeFieldEnd()
if self.i32Val is not None:
oprot.writeFieldBegin('i32Val', TType.STRUCT, 4)
self.i32Val.write(oprot)
oprot.writeFieldEnd()
if self.i64Val is not None:
oprot.writeFieldBegin('i64Val', TType.STRUCT, 5)
self.i64Val.write(oprot)
oprot.writeFieldEnd()
if self.doubleVal is not None:
oprot.writeFieldBegin('doubleVal', TType.STRUCT, 6)
self.doubleVal.write(oprot)
oprot.writeFieldEnd()
if self.stringVal is not None:
oprot.writeFieldBegin('stringVal', TType.STRUCT, 7)
self.stringVal.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TRow(object):
"""
Attributes:
- colVals
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'colVals', (TType.STRUCT,(TColumnValue, TColumnValue.thrift_spec)), None, ), # 1
)
def __init__(self, colVals=None,):
self.colVals = colVals
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.colVals = []
(_etype84, _size81) = iprot.readListBegin()
for _i85 in xrange(_size81):
_elem86 = TColumnValue()
_elem86.read(iprot)
self.colVals.append(_elem86)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TRow')
if self.colVals is not None:
oprot.writeFieldBegin('colVals', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.colVals))
for iter87 in self.colVals:
iter87.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.colVals is None:
raise TProtocol.TProtocolException(message='Required field colVals is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TRowSet(object):
"""
Attributes:
- startRowOffset
- rows
- columns
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'startRowOffset', None, None, ), # 1
(2, TType.LIST, 'rows', (TType.STRUCT,(TRow, TRow.thrift_spec)), None, ), # 2
(3, TType.LIST, 'columns', (TType.STRUCT,(TColumn, TColumn.thrift_spec)), None, ), # 3
)
def __init__(self, startRowOffset=None, rows=None, columns=None,):
self.startRowOffset = startRowOffset
self.rows = rows
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.startRowOffset = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.rows = []
(_etype91, _size88) = iprot.readListBegin()
for _i92 in xrange(_size88):
_elem93 = TRow()
_elem93.read(iprot)
self.rows.append(_elem93)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.columns = []
(_etype97, _size94) = iprot.readListBegin()
for _i98 in xrange(_size94):
_elem99 = TColumn()
_elem99.read(iprot)
self.columns.append(_elem99)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TRowSet')
if self.startRowOffset is not None:
oprot.writeFieldBegin('startRowOffset', TType.I64, 1)
oprot.writeI64(self.startRowOffset)
oprot.writeFieldEnd()
if self.rows is not None:
oprot.writeFieldBegin('rows', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.rows))
for iter100 in self.rows:
iter100.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter101 in self.columns:
iter101.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.startRowOffset is None:
raise TProtocol.TProtocolException(message='Required field startRowOffset is unset!')
if self.rows is None:
raise TProtocol.TProtocolException(message='Required field rows is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TStatus(object):
"""
Attributes:
- statusCode
- infoMessages
- sqlState
- errorCode
- errorMessage
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'statusCode', None, None, ), # 1
(2, TType.LIST, 'infoMessages', (TType.STRING,None), None, ), # 2
(3, TType.STRING, 'sqlState', None, None, ), # 3
(4, TType.I32, 'errorCode', None, None, ), # 4
(5, TType.STRING, 'errorMessage', None, None, ), # 5
)
def __init__(self, statusCode=None, infoMessages=None, sqlState=None, errorCode=None, errorMessage=None,):
self.statusCode = statusCode
self.infoMessages = infoMessages
self.sqlState = sqlState
self.errorCode = errorCode
self.errorMessage = errorMessage
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.statusCode = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.infoMessages = []
(_etype105, _size102) = iprot.readListBegin()
for _i106 in xrange(_size102):
_elem107 = iprot.readString();
self.infoMessages.append(_elem107)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.sqlState = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.errorCode = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.errorMessage = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TStatus')
if self.statusCode is not None:
oprot.writeFieldBegin('statusCode', TType.I32, 1)
oprot.writeI32(self.statusCode)
oprot.writeFieldEnd()
if self.infoMessages is not None:
oprot.writeFieldBegin('infoMessages', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.infoMessages))
for iter108 in self.infoMessages:
oprot.writeString(iter108)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.sqlState is not None:
oprot.writeFieldBegin('sqlState', TType.STRING, 3)
oprot.writeString(self.sqlState)
oprot.writeFieldEnd()
if self.errorCode is not None:
oprot.writeFieldBegin('errorCode', TType.I32, 4)
oprot.writeI32(self.errorCode)
oprot.writeFieldEnd()
if self.errorMessage is not None:
oprot.writeFieldBegin('errorMessage', TType.STRING, 5)
oprot.writeString(self.errorMessage)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.statusCode is None:
raise TProtocol.TProtocolException(message='Required field statusCode is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class THandleIdentifier(object):
"""
Attributes:
- guid
- secret
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'guid', None, None, ), # 1
(2, TType.STRING, 'secret', None, None, ), # 2
)
def __init__(self, guid=None, secret=None,):
self.guid = guid
self.secret = secret
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.guid = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.secret = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('THandleIdentifier')
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 1)
oprot.writeString(self.guid)
oprot.writeFieldEnd()
if self.secret is not None:
oprot.writeFieldBegin('secret', TType.STRING, 2)
oprot.writeString(self.secret)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.guid is None:
raise TProtocol.TProtocolException(message='Required field guid is unset!')
if self.secret is None:
raise TProtocol.TProtocolException(message='Required field secret is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TSessionHandle(object):
"""
Attributes:
- sessionId
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionId', (THandleIdentifier, THandleIdentifier.thrift_spec), None, ), # 1
)
def __init__(self, sessionId=None,):
self.sessionId = sessionId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionId = THandleIdentifier()
self.sessionId.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TSessionHandle')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.STRUCT, 1)
self.sessionId.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocol.TProtocolException(message='Required field sessionId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TOperationHandle(object):
"""
Attributes:
- operationId
- operationType
- hasResultSet
- modifiedRowCount
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationId', (THandleIdentifier, THandleIdentifier.thrift_spec), None, ), # 1
(2, TType.I32, 'operationType', None, None, ), # 2
(3, TType.BOOL, 'hasResultSet', None, None, ), # 3
(4, TType.DOUBLE, 'modifiedRowCount', None, None, ), # 4
)
def __init__(self, operationId=None, operationType=None, hasResultSet=None, modifiedRowCount=None,):
self.operationId = operationId
self.operationType = operationType
self.hasResultSet = hasResultSet
self.modifiedRowCount = modifiedRowCount
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationId = THandleIdentifier()
self.operationId.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.operationType = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.hasResultSet = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.modifiedRowCount = iprot.readDouble();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TOperationHandle')
if self.operationId is not None:
oprot.writeFieldBegin('operationId', TType.STRUCT, 1)
self.operationId.write(oprot)
oprot.writeFieldEnd()
if self.operationType is not None:
oprot.writeFieldBegin('operationType', TType.I32, 2)
oprot.writeI32(self.operationType)
oprot.writeFieldEnd()
if self.hasResultSet is not None:
oprot.writeFieldBegin('hasResultSet', TType.BOOL, 3)
oprot.writeBool(self.hasResultSet)
oprot.writeFieldEnd()
if self.modifiedRowCount is not None:
oprot.writeFieldBegin('modifiedRowCount', TType.DOUBLE, 4)
oprot.writeDouble(self.modifiedRowCount)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationId is None:
raise TProtocol.TProtocolException(message='Required field operationId is unset!')
if self.operationType is None:
raise TProtocol.TProtocolException(message='Required field operationType is unset!')
if self.hasResultSet is None:
raise TProtocol.TProtocolException(message='Required field hasResultSet is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TOpenSessionReq(object):
"""
Attributes:
- client_protocol
- username
- password
- configuration
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'client_protocol', None, 1, ), # 1
(2, TType.STRING, 'username', None, None, ), # 2
(3, TType.STRING, 'password', None, None, ), # 3
(4, TType.MAP, 'configuration', (TType.STRING,None,TType.STRING,None), None, ), # 4
)
def __init__(self, client_protocol=thrift_spec[1][4], username=None, password=None, configuration=None,):
self.client_protocol = client_protocol
self.username = username
self.password = password
self.configuration = configuration
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.client_protocol = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.username = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.password = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.configuration = {}
(_ktype110, _vtype111, _size109 ) = iprot.readMapBegin()
for _i113 in xrange(_size109):
_key114 = iprot.readString();
_val115 = iprot.readString();
self.configuration[_key114] = _val115
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TOpenSessionReq')
if self.client_protocol is not None:
oprot.writeFieldBegin('client_protocol', TType.I32, 1)
oprot.writeI32(self.client_protocol)
oprot.writeFieldEnd()
if self.username is not None:
oprot.writeFieldBegin('username', TType.STRING, 2)
oprot.writeString(self.username)
oprot.writeFieldEnd()
if self.password is not None:
oprot.writeFieldBegin('password', TType.STRING, 3)
oprot.writeString(self.password)
oprot.writeFieldEnd()
if self.configuration is not None:
oprot.writeFieldBegin('configuration', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.configuration))
for kiter116,viter117 in self.configuration.items():
oprot.writeString(kiter116)
oprot.writeString(viter117)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.client_protocol is None:
raise TProtocol.TProtocolException(message='Required field client_protocol is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TOpenSessionResp(object):
"""
Attributes:
- status
- serverProtocolVersion
- sessionHandle
- configuration
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.I32, 'serverProtocolVersion', None, 1, ), # 2
(3, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 3
(4, TType.MAP, 'configuration', (TType.STRING,None,TType.STRING,None), None, ), # 4
)
def __init__(self, status=None, serverProtocolVersion=thrift_spec[2][4], sessionHandle=None, configuration=None,):
self.status = status
self.serverProtocolVersion = serverProtocolVersion
self.sessionHandle = sessionHandle
self.configuration = configuration
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.serverProtocolVersion = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.configuration = {}
(_ktype119, _vtype120, _size118 ) = iprot.readMapBegin()
for _i122 in xrange(_size118):
_key123 = iprot.readString();
_val124 = iprot.readString();
self.configuration[_key123] = _val124
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TOpenSessionResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.serverProtocolVersion is not None:
oprot.writeFieldBegin('serverProtocolVersion', TType.I32, 2)
oprot.writeI32(self.serverProtocolVersion)
oprot.writeFieldEnd()
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 3)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.configuration is not None:
oprot.writeFieldBegin('configuration', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.configuration))
for kiter125,viter126 in self.configuration.items():
oprot.writeString(kiter125)
oprot.writeString(viter126)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
if self.serverProtocolVersion is None:
raise TProtocol.TProtocolException(message='Required field serverProtocolVersion is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCloseSessionReq(object):
"""
Attributes:
- sessionHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
)
def __init__(self, sessionHandle=None,):
self.sessionHandle = sessionHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCloseSessionReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCloseSessionResp(object):
"""
Attributes:
- status
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
)
def __init__(self, status=None,):
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCloseSessionResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetInfoValue(object):
"""
Attributes:
- stringValue
- smallIntValue
- integerBitmask
- integerFlag
- binaryValue
- lenValue
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'stringValue', None, None, ), # 1
(2, TType.I16, 'smallIntValue', None, None, ), # 2
(3, TType.I32, 'integerBitmask', None, None, ), # 3
(4, TType.I32, 'integerFlag', None, None, ), # 4
(5, TType.I32, 'binaryValue', None, None, ), # 5
(6, TType.I64, 'lenValue', None, None, ), # 6
)
def __init__(self, stringValue=None, smallIntValue=None, integerBitmask=None, integerFlag=None, binaryValue=None, lenValue=None,):
self.stringValue = stringValue
self.smallIntValue = smallIntValue
self.integerBitmask = integerBitmask
self.integerFlag = integerFlag
self.binaryValue = binaryValue
self.lenValue = lenValue
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.stringValue = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I16:
self.smallIntValue = iprot.readI16();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.integerBitmask = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.integerFlag = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.binaryValue = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.lenValue = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetInfoValue')
if self.stringValue is not None:
oprot.writeFieldBegin('stringValue', TType.STRING, 1)
oprot.writeString(self.stringValue)
oprot.writeFieldEnd()
if self.smallIntValue is not None:
oprot.writeFieldBegin('smallIntValue', TType.I16, 2)
oprot.writeI16(self.smallIntValue)
oprot.writeFieldEnd()
if self.integerBitmask is not None:
oprot.writeFieldBegin('integerBitmask', TType.I32, 3)
oprot.writeI32(self.integerBitmask)
oprot.writeFieldEnd()
if self.integerFlag is not None:
oprot.writeFieldBegin('integerFlag', TType.I32, 4)
oprot.writeI32(self.integerFlag)
oprot.writeFieldEnd()
if self.binaryValue is not None:
oprot.writeFieldBegin('binaryValue', TType.I32, 5)
oprot.writeI32(self.binaryValue)
oprot.writeFieldEnd()
if self.lenValue is not None:
oprot.writeFieldBegin('lenValue', TType.I64, 6)
oprot.writeI64(self.lenValue)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetInfoReq(object):
"""
Attributes:
- sessionHandle
- infoType
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.I32, 'infoType', None, None, ), # 2
)
def __init__(self, sessionHandle=None, infoType=None,):
self.sessionHandle = sessionHandle
self.infoType = infoType
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.infoType = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetInfoReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.infoType is not None:
oprot.writeFieldBegin('infoType', TType.I32, 2)
oprot.writeI32(self.infoType)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
if self.infoType is None:
raise TProtocol.TProtocolException(message='Required field infoType is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetInfoResp(object):
"""
Attributes:
- status
- infoValue
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'infoValue', (TGetInfoValue, TGetInfoValue.thrift_spec), None, ), # 2
)
def __init__(self, status=None, infoValue=None,):
self.status = status
self.infoValue = infoValue
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.infoValue = TGetInfoValue()
self.infoValue.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetInfoResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.infoValue is not None:
oprot.writeFieldBegin('infoValue', TType.STRUCT, 2)
self.infoValue.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
if self.infoValue is None:
raise TProtocol.TProtocolException(message='Required field infoValue is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TExecuteStatementReq(object):
"""
Attributes:
- sessionHandle
- statement
- confOverlay
- runAsync
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'statement', None, None, ), # 2
(3, TType.MAP, 'confOverlay', (TType.STRING,None,TType.STRING,None), None, ), # 3
(4, TType.BOOL, 'runAsync', None, False, ), # 4
)
def __init__(self, sessionHandle=None, statement=None, confOverlay=None, runAsync=thrift_spec[4][4],):
self.sessionHandle = sessionHandle
self.statement = statement
self.confOverlay = confOverlay
self.runAsync = runAsync
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.statement = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.confOverlay = {}
(_ktype128, _vtype129, _size127 ) = iprot.readMapBegin()
for _i131 in xrange(_size127):
_key132 = iprot.readString();
_val133 = iprot.readString();
self.confOverlay[_key132] = _val133
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.runAsync = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TExecuteStatementReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.statement is not None:
oprot.writeFieldBegin('statement', TType.STRING, 2)
oprot.writeString(self.statement)
oprot.writeFieldEnd()
if self.confOverlay is not None:
oprot.writeFieldBegin('confOverlay', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.confOverlay))
for kiter134,viter135 in self.confOverlay.items():
oprot.writeString(kiter134)
oprot.writeString(viter135)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.runAsync is not None:
oprot.writeFieldBegin('runAsync', TType.BOOL, 4)
oprot.writeBool(self.runAsync)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
if self.statement is None:
raise TProtocol.TProtocolException(message='Required field statement is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TExecuteStatementResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TExecuteStatementResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTypeInfoReq(object):
"""
Attributes:
- sessionHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
)
def __init__(self, sessionHandle=None,):
self.sessionHandle = sessionHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTypeInfoReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTypeInfoResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTypeInfoResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetCatalogsReq(object):
"""
Attributes:
- sessionHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
)
def __init__(self, sessionHandle=None,):
self.sessionHandle = sessionHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetCatalogsReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetCatalogsResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetCatalogsResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetSchemasReq(object):
"""
Attributes:
- sessionHandle
- catalogName
- schemaName
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'catalogName', None, None, ), # 2
(3, TType.STRING, 'schemaName', None, None, ), # 3
)
def __init__(self, sessionHandle=None, catalogName=None, schemaName=None,):
self.sessionHandle = sessionHandle
self.catalogName = catalogName
self.schemaName = schemaName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.catalogName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.schemaName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetSchemasReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.catalogName is not None:
oprot.writeFieldBegin('catalogName', TType.STRING, 2)
oprot.writeString(self.catalogName)
oprot.writeFieldEnd()
if self.schemaName is not None:
oprot.writeFieldBegin('schemaName', TType.STRING, 3)
oprot.writeString(self.schemaName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetSchemasResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetSchemasResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTablesReq(object):
"""
Attributes:
- sessionHandle
- catalogName
- schemaName
- tableName
- tableTypes
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'catalogName', None, None, ), # 2
(3, TType.STRING, 'schemaName', None, None, ), # 3
(4, TType.STRING, 'tableName', None, None, ), # 4
(5, TType.LIST, 'tableTypes', (TType.STRING,None), None, ), # 5
)
def __init__(self, sessionHandle=None, catalogName=None, schemaName=None, tableName=None, tableTypes=None,):
self.sessionHandle = sessionHandle
self.catalogName = catalogName
self.schemaName = schemaName
self.tableName = tableName
self.tableTypes = tableTypes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.catalogName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.schemaName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.tableName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.tableTypes = []
(_etype139, _size136) = iprot.readListBegin()
for _i140 in xrange(_size136):
_elem141 = iprot.readString();
self.tableTypes.append(_elem141)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTablesReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.catalogName is not None:
oprot.writeFieldBegin('catalogName', TType.STRING, 2)
oprot.writeString(self.catalogName)
oprot.writeFieldEnd()
if self.schemaName is not None:
oprot.writeFieldBegin('schemaName', TType.STRING, 3)
oprot.writeString(self.schemaName)
oprot.writeFieldEnd()
if self.tableName is not None:
oprot.writeFieldBegin('tableName', TType.STRING, 4)
oprot.writeString(self.tableName)
oprot.writeFieldEnd()
if self.tableTypes is not None:
oprot.writeFieldBegin('tableTypes', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.tableTypes))
for iter142 in self.tableTypes:
oprot.writeString(iter142)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTablesResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTablesResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTableTypesReq(object):
"""
Attributes:
- sessionHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
)
def __init__(self, sessionHandle=None,):
self.sessionHandle = sessionHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTableTypesReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTableTypesResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTableTypesResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetColumnsReq(object):
"""
Attributes:
- sessionHandle
- catalogName
- schemaName
- tableName
- columnName
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'catalogName', None, None, ), # 2
(3, TType.STRING, 'schemaName', None, None, ), # 3
(4, TType.STRING, 'tableName', None, None, ), # 4
(5, TType.STRING, 'columnName', None, None, ), # 5
)
def __init__(self, sessionHandle=None, catalogName=None, schemaName=None, tableName=None, columnName=None,):
self.sessionHandle = sessionHandle
self.catalogName = catalogName
self.schemaName = schemaName
self.tableName = tableName
self.columnName = columnName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.catalogName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.schemaName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.tableName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.columnName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetColumnsReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.catalogName is not None:
oprot.writeFieldBegin('catalogName', TType.STRING, 2)
oprot.writeString(self.catalogName)
oprot.writeFieldEnd()
if self.schemaName is not None:
oprot.writeFieldBegin('schemaName', TType.STRING, 3)
oprot.writeString(self.schemaName)
oprot.writeFieldEnd()
if self.tableName is not None:
oprot.writeFieldBegin('tableName', TType.STRING, 4)
oprot.writeString(self.tableName)
oprot.writeFieldEnd()
if self.columnName is not None:
oprot.writeFieldBegin('columnName', TType.STRING, 5)
oprot.writeString(self.columnName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetColumnsResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetColumnsResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetFunctionsReq(object):
"""
Attributes:
- sessionHandle
- catalogName
- schemaName
- functionName
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'catalogName', None, None, ), # 2
(3, TType.STRING, 'schemaName', None, None, ), # 3
(4, TType.STRING, 'functionName', None, None, ), # 4
)
def __init__(self, sessionHandle=None, catalogName=None, schemaName=None, functionName=None,):
self.sessionHandle = sessionHandle
self.catalogName = catalogName
self.schemaName = schemaName
self.functionName = functionName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.catalogName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.schemaName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.functionName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetFunctionsReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.catalogName is not None:
oprot.writeFieldBegin('catalogName', TType.STRING, 2)
oprot.writeString(self.catalogName)
oprot.writeFieldEnd()
if self.schemaName is not None:
oprot.writeFieldBegin('schemaName', TType.STRING, 3)
oprot.writeString(self.schemaName)
oprot.writeFieldEnd()
if self.functionName is not None:
oprot.writeFieldBegin('functionName', TType.STRING, 4)
oprot.writeString(self.functionName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
if self.functionName is None:
raise TProtocol.TProtocolException(message='Required field functionName is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetFunctionsResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetFunctionsResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetOperationStatusReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetOperationStatusReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetOperationStatusResp(object):
"""
Attributes:
- status
- operationState
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.I32, 'operationState', None, None, ), # 2
)
def __init__(self, status=None, operationState=None,):
self.status = status
self.operationState = operationState
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.operationState = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetOperationStatusResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationState is not None:
oprot.writeFieldBegin('operationState', TType.I32, 2)
oprot.writeI32(self.operationState)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCancelOperationReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCancelOperationReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCancelOperationResp(object):
"""
Attributes:
- status
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
)
def __init__(self, status=None,):
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCancelOperationResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCloseOperationReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCloseOperationReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCloseOperationResp(object):
"""
Attributes:
- status
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
)
def __init__(self, status=None,):
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCloseOperationResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetResultSetMetadataReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetResultSetMetadataReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetResultSetMetadataResp(object):
"""
Attributes:
- status
- schema
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'schema', (TTableSchema, TTableSchema.thrift_spec), None, ), # 2
)
def __init__(self, status=None, schema=None,):
self.status = status
self.schema = schema
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.schema = TTableSchema()
self.schema.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetResultSetMetadataResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.schema is not None:
oprot.writeFieldBegin('schema', TType.STRUCT, 2)
self.schema.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TFetchResultsReq(object):
"""
Attributes:
- operationHandle
- orientation
- maxRows
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
(2, TType.I32, 'orientation', None, 0, ), # 2
(3, TType.I64, 'maxRows', None, None, ), # 3
)
def __init__(self, operationHandle=None, orientation=thrift_spec[2][4], maxRows=None,):
self.operationHandle = operationHandle
self.orientation = orientation
self.maxRows = maxRows
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.orientation = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.maxRows = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TFetchResultsReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
if self.orientation is not None:
oprot.writeFieldBegin('orientation', TType.I32, 2)
oprot.writeI32(self.orientation)
oprot.writeFieldEnd()
if self.maxRows is not None:
oprot.writeFieldBegin('maxRows', TType.I64, 3)
oprot.writeI64(self.maxRows)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
if self.orientation is None:
raise TProtocol.TProtocolException(message='Required field orientation is unset!')
if self.maxRows is None:
raise TProtocol.TProtocolException(message='Required field maxRows is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TFetchResultsResp(object):
"""
Attributes:
- status
- hasMoreRows
- results
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.BOOL, 'hasMoreRows', None, None, ), # 2
(3, TType.STRUCT, 'results', (TRowSet, TRowSet.thrift_spec), None, ), # 3
)
def __init__(self, status=None, hasMoreRows=None, results=None,):
self.status = status
self.hasMoreRows = hasMoreRows
self.results = results
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.hasMoreRows = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.results = TRowSet()
self.results.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TFetchResultsResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.hasMoreRows is not None:
oprot.writeFieldBegin('hasMoreRows', TType.BOOL, 2)
oprot.writeBool(self.hasMoreRows)
oprot.writeFieldEnd()
if self.results is not None:
oprot.writeFieldBegin('results', TType.STRUCT, 3)
self.results.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetLogReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetLogReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetLogResp(object):
"""
Attributes:
- status
- log
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRING, 'log', None, None, ), # 2
)
def __init__(self, status=None, log=None,):
self.status = status
self.log = log
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.log = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetLogResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.log is not None:
oprot.writeFieldBegin('log', TType.STRING, 2)
oprot.writeString(self.log)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
if self.log is None:
raise TProtocol.TProtocolException(message='Required field log is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| [
"romain@cloudera.com"
] | romain@cloudera.com |
1e246da55235765c4e1d884dea487732336a89e7 | fc08d687fd1373fabde8d78c6915d656735d4275 | /venv/Scripts/easy_install-3.7-script.py | 50ae5a79b523e54deaeabbadee074c9dbefa9d78 | [] | no_license | muhammadqasim3/OOP-in-Python | 35f8e35e67af53fb84ccf5dcca727c4447d32e50 | 33e2ccd9e2d78d17fc27bb077ed9309b8d5b0c75 | refs/heads/master | 2020-07-02T17:00:00.965435 | 2019-08-10T07:43:42 | 2019-08-10T07:43:42 | 201,597,433 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462 | py | #!C:\Users\QASIM\PycharmProjects\OOPinPython\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"muhammadqasimaslam3@gmail.com"
] | muhammadqasimaslam3@gmail.com |
bfb3295ccebada59f4dbb8a494a9fc11ebb8eaeb | 4c8a639b925fdbdb4aceb8740125d0b58a78301f | /regex.py | 9e6396f0d9e927bc7e792c2e13a3e1080d32944a | [] | no_license | krunalbapodara/python-core | 75a0155d7fb168b56cd3773ed928ad56a1378d56 | d48e55a10eaf3caa80de030812e1b104b74f3362 | refs/heads/master | 2023-01-20T01:49:32.191719 | 2020-11-17T21:04:11 | 2020-11-17T21:04:11 | 313,742,619 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | import re
x = re.match('^a...s$','alfias')
if x:
print('Search successfull')
else:
print('Not found') | [
"krunalb@aditiconsulting.com"
] | krunalb@aditiconsulting.com |
6836a941e6848ab1bd5820853ca611f8a76de081 | e1fd174ac836f8c73d966687fd484d5c7fe40ba0 | /code/blogCSVConstructor.py | d606d78751b10c0eef8b7cff8dddbb5c22dc478a | [] | no_license | shaguniitb/politeness | 296c7b4b7d80f0b1a4b894e3237d3c599f24a5b6 | 86cf1644bf7675571a60bc8f157cf0f67b1a9b25 | refs/heads/master | 2021-05-01T16:19:56.698448 | 2013-12-22T22:58:57 | 2013-12-22T22:58:57 | 14,853,149 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | import csv
import string
import glob
import os
input_dir = "politeness/data/blogs";
output_file = "/home/shagun/blogs.csv";
os.chdir(input_dir)
firstLine = True
for input_file in glob.glob("*.txt"):
print input_file
fo = open(input_file, "rw+")
lines = fo.readlines()
request = ''.join(lines)
with open(output_file, 'a') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
request = request.replace('\n', '')
request = request.replace('\r', '')
if firstLine:
firstLine = False
spamwriter.writerow(['text', '@@class@@'])
spamwriter.writerow([request, '?'])
fo.close()
| [
"shaguniitb@gmail.com"
] | shaguniitb@gmail.com |
0bbd1ab0a6bc645096ae013a61ce705e7e674e84 | e478f2d975a42e98d62a090c20abdec4792152df | /word_count.py | bcbae81bba968635dc6547511e5c0350d233eeec | [] | no_license | JaydSchumacher/word_count | a8386fb3020090f7bbf06122ef4466435043d7e7 | 8740321647ae753ae620457012e7137e03de6c25 | refs/heads/master | 2021-09-03T20:09:12.821471 | 2018-01-11T16:39:36 | 2018-01-11T16:39:36 | 117,125,555 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 215 | py | def word_count(a_str):
str = a_str.lower()
list = str.split(" ")
my_dict = {word:list.count(word) for word in list}
return my_dict
print(word_count("I do not like it Sam I Am I Sam not")) | [
"jaydschumacher@gmail.com"
] | jaydschumacher@gmail.com |
0cff417d3d3fdf0cafcb6fb57b8a3e46c800916d | b7a427c500816ea6410954fb63939db29f03ffc7 | /S7/model.py | 42305c40c64ffff16cb252045cd21177f5e747e8 | [] | no_license | shritigupta/EVA | ca83efdcd8bfd958d6c8a8829768e67c4b2abe94 | a8c22deb4558b9b6f3a36a2633095553a44e8313 | refs/heads/master | 2021-01-04T22:06:45.045580 | 2020-04-05T01:10:39 | 2020-04-05T01:10:39 | 240,778,250 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,292 | py | import torch.nn as nn
import torch.nn.functional as F
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.pool = nn.MaxPool2d(2, 2)
self.fc3 = nn.Linear(10, 10)
self.conv11 = nn.Sequential(nn.Conv2d(3,32,3, padding=1),nn.BatchNorm2d(32))
self.conv12 = nn.Sequential(nn.Conv2d(32,64,3, padding=1),nn.BatchNorm2d(64))
self.conv21 = nn.Sequential(nn.Conv2d(64,128,3,padding=1),nn.BatchNorm2d(128))
self.conv22 = nn.Sequential(nn.Conv2d(128,128,3,padding=1),nn.BatchNorm2d(128))
self.conv3 = nn.Sequential(nn.Conv2d(128,256,3, dilation=2,padding=2),nn.BatchNorm2d(256))
self.conv4 = nn.Sequential((nn.Conv2d(256, 512 ,3, padding=1, groups=32)),
nn.BatchNorm2d(512),
nn.Conv2d(512 ,10,1),
nn.BatchNorm2d(10))
self.gap = nn.AvgPool2d(3)
def forward(self, x):
x = F.relu(self.conv11(x))
x = self.pool(F.relu(self.conv12(x)))
x = F.relu(self.conv21(x))
x = self.pool(F.relu(self.conv22(x)))
x = self.pool(F.relu(self.conv3(x)))
x = self.conv4(x)
x = self.gap(x)
x = x.view(-1, 10)
x = self.fc3(x)
return x
| [
"noreply@github.com"
] | shritigupta.noreply@github.com |
33c101538df11c6010c67e6bebf131c73d036b14 | be5e5aebd753ed1f376dc18ce411f0fac6d2f762 | /natuurpunt_purchase_invoice_line/natuurpunt_purchase_invoice_line.py | 33a495d038111220ae374a7dccc018222c6f58a3 | [] | no_license | smart-solution/natuurpunt-purchase | 7d9fcfdde769b6294d8dc705cecc99a177b4573c | 0ac94cb68cee4ef464158720e04007ee12036179 | refs/heads/master | 2021-05-22T04:43:21.594422 | 2020-11-02T13:32:27 | 2020-11-02T13:32:27 | 39,186,322 | 0 | 2 | null | 2020-11-02T13:32:28 | 2015-07-16T08:42:31 | Python | UTF-8 | Python | false | false | 7,272 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# Smart Solution bvba
# Copyright (C) 2010-Today Smart Solution BVBA (<http://www.smartsolution.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
This Module allows to:
1/ Generate invoice lines from purchase order lines for existing invoices
2/ Make the purchase order lines available again when an invoice is deleted
"""
from openerp.osv import fields, osv
from openerp.tools.translate import _
#
#class purchase_order_line_add_to_invoice(osv.osv_memory):
#
# _name = "purchase.order.line.add_to_invoice"
#
# _columns = {
# 'invoice_id': fields.many2one('account.invoice','Factuur', required=True, domain=[('type','=','in_invoice'),('state','in',['draft'])])
# }
#
#
# def add_to_invoice(self, cr, uid, ids, context=None):
# """
# Generate an invoice lines from a purchase order line for an existing invoice
# """
#
# PurchaseOrder = self.pool.get('purchase.order')
# PurchaseOrderLine = self.pool.get('purchase.order.line')
# Invoice = self.pool.get('account.invoice')
# InvoiceLine = self.pool.get('account.invoice.line')
#
# wizard = self.browse(cr, uid, ids[0], context=context)
# inv = Invoice.browse(cr, uid, wizard.invoice_id.id)
# origin = inv.origin
#
# record_ids = context.get('active_ids', [])
# if record_ids:
# for line in PurchaseOrderLine.browse(cr, uid, record_ids, context=context):
# # Do not generate invoice lines for 'confirmed' po lines, only 'approved' should match
# if (not line.invoiced) and (line.state not in ('draft', 'confirmed', 'cancel')):
# acc_id = PurchaseOrder._choose_account_from_po_line(cr, uid, line, context=context)
# invoice_line_data = PurchaseOrder._prepare_inv_line(cr, uid, acc_id, line, context=context)
# invoice_line_data.update({'origin': line.order_id.name, 'invoice_id':wizard.invoice_id.id})
# invoice_line_id = InvoiceLine.create(cr, uid, invoice_line_data, context=context)
# PurchaseOrderLine.write(cr, uid, [line.id], {'invoiced': True, 'invoice_lines': [(4, invoice_line_id)]})
# # Add the PO reference in Brondocument
# if line.order_id.name not in origin:
# origin = origin + ' ' + line.order_id.name
# Invoice.write(cr, uid, [wizard.invoice_id.id], {'origin': origin})
#
# return {
# 'domain': "[('id','in', [%s])]"%(wizard.invoice_id.id),
# 'name': _('Supplier Invoices'),
# 'view_type': 'form',
# 'view_mode': 'tree,form',
# 'res_model': 'account.invoice',
# 'view_id': False,
# 'context': "{'type':'in_invoice', 'journal_type': 'purchase'}",
# 'type': 'ir.actions.act_window'
# }
class invoice_purchase_order_line_add_to_invoice(osv.osv_memory):
_name = "invoice.purchase.order.line.add_to_invoice"
_columns = {
'partner_id': fields.many2one('res.partner', 'Suppler'),
'purchase_order_line_ids': fields.many2many('purchase.order.line', 'invoice_po_line_add_rel', 'invoice_id', 'po_line_id', string="Aankooplijnen", domain=[('order_id.invoice_method', '<>', 'picking'), ('state', 'in', ('approved', 'done')), ('invoiced', '=', False)]),
}
def default_get(self, cr, uid, fields, context=None):
"""
Set partner from invoice
"""
if context is None:
context = {}
result = super(invoice_purchase_order_line_add_to_invoice, self).default_get(cr, uid, fields, context=context)
inv = self.pool.get('account.invoice').browse(cr, uid, context.get('active_id', False))
result['partner_id'] = inv.partner_id.id
return result
def add_to_invoice(self, cr, uid, ids, context=None):
"""
Generate an invoice lines from a purchase order line for an existing invoice
"""
PurchaseOrder = self.pool.get('purchase.order')
PurchaseOrderLine = self.pool.get('purchase.order.line')
Invoice = self.pool.get('account.invoice')
InvoiceLine = self.pool.get('account.invoice.line')
wizard = self.browse(cr, uid, ids[0], context=context)
inv = Invoice.browse(cr, uid, context.get('active_id', False))
origin = inv.origin
for line in wizard.purchase_order_line_ids:
# Do not generate invoice lines for 'confirmed' po lines, only 'approved' should match
if (not line.invoiced) and (line.state not in ('draft', 'confirmed', 'cancel')) and inv.state == 'draft':
acc_id = PurchaseOrder._choose_account_from_po_line(cr, uid, line, context=context)
invoice_line_data = PurchaseOrder._prepare_inv_line(cr, uid, acc_id, line, context=context)
invoice_line_data.update({'origin': line.order_id.name, 'invoice_id':inv.id})
invoice_line_id = InvoiceLine.create(cr, uid, invoice_line_data, context=context)
PurchaseOrderLine.write(cr, uid, [line.id], {'invoiced': True, 'invoice_lines': [(4, invoice_line_id)]})
# Add the PO reference in Brondocument
if line.order_id.name not in origin:
origin = origin + ' ' + line.order_id.name
Invoice.write(cr, uid, [inv.id], {'origin': origin})
return False
class account_invoice(osv.osv):
_inherit = "account.invoice"
def unlink(self, cr, uid, id, context=None):
"""
Reset the purchase order line visibility when an invoice is deleted
"""
PurchaseOrderLine = self.pool.get('purchase.order.line')
inv = self.browse(cr, uid, id)[0]
po_line_ids = []
for line in inv.invoice_line:
for po_line in line.purchase_order_line_ids:
po_line_ids.append(po_line.id)
PurchaseOrderLine.write(cr, uid, po_line_ids, {'invoiced':False, 'invoice_lines':False})
return super(account_invoice, self).unlink(cr, uid, id, context=None)
class account_invoice_line(osv.osv):
_inherit = "account.invoice.line"
def unlink(self, cr, uid, id, context=None):
"""
Reset the purchase order line visibility when an invoice line is deleted
"""
PurchaseOrderLine = self.pool.get('purchase.order.line')
line = self.browse(cr, uid, id)[0]
po_line_ids = []
for po_line in line.purchase_order_line_ids:
po_line_ids.append(po_line.id)
PurchaseOrderLine.write(cr, uid, po_line_ids, {'invoiced':False, 'invoice_lines':False})
return super(account_invoice_line, self).unlink(cr, uid, id, context=None)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"fabian@deviance-network.com"
] | fabian@deviance-network.com |
227da0ccd56f8c0552de0a25bbc4096f57b38a0a | 88fde4aa672d1db9197d67a7627470645ab71c94 | /tutorial/Jochen_Testing.py | e5646450fbeae0ef2ecf3ac1aff623082b9c3ef1 | [
"MIT"
] | permissive | ErikaNicole/Gene-Expression-Data-Analysis-with-Gaussian-Process | 28d92e3e6763cd47f07ed7cdfcdcc04e524579a0 | 33e49d9bc824cb4107a2fa02f3608fb1bced0bff | refs/heads/master | 2023-02-16T01:00:38.381767 | 2021-01-11T17:02:10 | 2021-01-11T17:02:10 | 289,285,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,591 | py | # 1. Import
import sys
sys.path.append('../') # Necessary to access classes functions from other folder
import numpy as np
import matplotlib.pyplot as plt
import os
import pandas as pd
import csv as cv
import classes.data_prep as prep
import classes.gp as gp
import classes.data_visualisation as visualisation
import classes.optimisation as optimisation
import classes.model_selection as model_selection
from matplotlib.backends.backend_pdf import PdfPages
import warnings # Necessary to avoid certain common warnings from coming up. Won't affect inference.
warnings.filterwarnings('ignore')
# warnings.filterwarnings(action='once')
my_path = os.getcwd()
# my_path = os.path.dirname(os.path.realpath('__file__'))
# 2. Import Data from Excel and Prep it
# Collecting real data from an excel spreadsheet is made easy thanks to the gel_excel() function in the data_prep class.
# Similary, to prep the collected data we need to convert time to hours, normalise the data and detrend it.
# The choice of the detrending parameter is
print("Observed Cells Dataset")
df = prep.Data_Input().get_excel()
print("Control Cells Dataset")
df_control = prep.Data_Input().get_excel()
number_of_observed_cells = 19
number_of_control_cells = 25
# Check no NaNs are present/remove if so, then store in list.
observed_cells = prep.Data_Input().remove_nans(number_of_observed_cells, df)
control_cells = prep.Data_Input().remove_nans(number_of_control_cells, df_control)
# Time in Hours
times = df.iloc[:,0]
times = times.to_numpy()[2:len(times)]
times = times/(60*60*1000)
control_times = df_control.iloc[:,0]
control_times = control_times.to_numpy()[2:len(control_times)]
control_times = control_times/(60*60*1000)
# Normalisation
normalised_cells = prep.Normalisation().normalise_many_cells_from_list(observed_cells)
normalised_control_cells = prep.Normalisation().normalise_many_cells_from_list(control_cells)
# Detrending
detrending_parameter_initial_value = np.exp(-2)
detrended_data = prep.Detrending(alpha = detrending_parameter_initial_value).detrend_data_from_list(times, normalised_cells)
detrended_control_data = prep.Detrending(alpha = detrending_parameter_initial_value).detrend_data_from_list(control_times, normalised_control_cells)[0]
# - - Plot of one Detrended Traces against the Original Normalised Signal
trendfit_x = detrended_data[1]
trendfit_y_cell = detrended_data[2][0]
detrended_cell = detrended_data[0][0]
detrended_data = detrended_data[0]
Fig = plt.figure("Detrended Data", figsize = (10, 6))
plt.xlabel("Time (Hours)")
plt.ylabel("Gene Expression")
plt.plot(times[0:len(detrended_cell)], detrended_cell, label = "detrended cell")
plt.plot(times[0:len(detrended_cell)], normalised_cells[0], label = "normalised cell")
plt.plot(trendfit_x, trendfit_y_cell, ls = '--', color = "black", label = "SE fit")
plt.legend()
plt.xlim(0, times[0:len(detrended_cell)][-1])
plt.ylim(-3, 4)
Fig.savefig(os.path.join(my_path, "results/Example_of_a_Detrended Cell.pdf"))
print("Detrending Completed")
# 3. Model Selection Process for all observed and control cells
# This model selection relies on the bootstrap approximation of the distribution of LLRs for a population of non-oscillating cells.
# This is then used as a frame of reference to compare our observed LLR distributions to identify both visually and analytically whether
# our sample of observed cells are behaving in an oscillatory or non-oscillatory manner. Analytically the estimation of q-values is used.
# For more information on the Model Selection process in detail I recommend checking out the documentation.
# Warning: model selection is currently the most expensive in terms of running time.
print("Starting Model Selection...")
modelselection = model_selection.ModelSelection()
modelselection_obs = modelselection.model_selection_for_list_new(observed_timepoints = times, observed_cells = detrended_data, number_of_synthetic_cells = 2000, control_q_value = 0.05, initial_guess = [0.001, 0.5, 0.5, 0.5])
# Rename plot;
os.rename(os.path.join(my_path, "results/LLR_Distribution_Plot.pdf"), os.path.join(my_path, "results/LLR_Distribution_Plot_(Observed Group).pdf"))
modelselection_control = modelselection.model_selection_for_list_new(observed_timepoints = control_times, observed_cells = detrended_control_data, number_of_synthetic_cells = 2000, control_q_value = 0.05, initial_guess = [0.001, 0.5, 0.5, 0.5])
# Rename plot;
os.rename(os.path.join(my_path, "results/LLR_Distribution_Plot.pdf"), os.path.join(my_path, "results/LLR_Distribution_Plot_(Control Group).pdf"))
print("... Success!!!!!")
| [
"63018077+ErikaNicole@users.noreply.github.com"
] | 63018077+ErikaNicole@users.noreply.github.com |
59c65295bbf233c1466985d1aa33bafac20aa3fe | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_95/1152.py | a5a2f7fcec24d2ae43109115e3074698189fdd34 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,234 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
from string import ascii_lowercase
from pprint import pprint
import sys, os
sample_googlerese = """ejp mysljylc kd kxveddknmc re jsicpdrysi
rbcpc ypc rtcsra dkh wyfrepkym veddknkmkrkcd
de kr kd eoya kw aej tysr re ujdr lkgc jv
"""
sample_answer = """our language is impossible to understand
there are twenty six factorial possibilities
so it is okay if you want to just give up
"""
char_map = dict()
for c in ascii_lowercase:
char_map[c] = ""
char_map['q'] = 'z'
char_map[' '] = ' '
def make_char_mapping():
for a,g in zip(sample_answer, sample_googlerese):
if g in ascii_lowercase:
char_map[g] = a
for c in ascii_lowercase:
if not c in char_map.values():
char_map['z'] = c
def decode(input_str):
output = list()
for c in input_str:
if not c == '\n':
output.append(char_map[c])
return ''.join(output)
if __name__ == "__main__":
make_char_mapping()
filename = sys.argv[1]
template = "Case #%d: %s"
with open(filename) as r:
casenum = int(r.readline())
for i in xrange(casenum):
input_str = r.readline()
print template % (i + 1, decode(input_str))
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
e7ed020d3d17b161765a66a3e7f4d7b554458f09 | 1cddb2023b2ca7137102dd4696e9c1f1ace70b6b | /administrator/views.py | 435d5c625efadc1db5bf8356a8e7b48e25894763 | [] | no_license | tafelaj/headway | df3532245ec67ea2118a5b6365cf8c68fc0fe4ef | c37aff6f009ab3d62607da3f5e01bd6284af9ce2 | refs/heads/master | 2022-12-31T22:10:56.832159 | 2020-10-23T16:39:37 | 2020-10-23T16:39:37 | 285,689,406 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,153 | py | from django.shortcuts import render, redirect
from django.urls import reverse_lazy, reverse
from django.views.generic import TemplateView, UpdateView, CreateView, DetailView, DeleteView, ListView, FormView
from django.db import transaction
from headway.models import Program, News, Course, Exam, Lecturer, Student, ExamRegister, Institution, User, GradingList
from administrator.models import Invoice, Charges, Payments
from django.db.models import Q
from django.utils.decorators import method_decorator
from .import forms
# from headway.mixins import RequestFormKwargsMixin
from Sentinel.decorators import user_is_admin
from django.core.paginator import Paginator
from django.contrib import messages
from django.contrib.messages.views import SuccessMessageMixin
import datetime
@method_decorator(user_is_admin, name='get')
class InstitutionDetail(DetailView):
template_name = 'administrator/institute_details.html'
model = Institution
@method_decorator(user_is_admin, name='get')
class DashBoard(TemplateView): # add pagination
template_name = 'administrator/dashboard.html'
def get(self, request, *args, **kwargs):
programs = Program.objects.filter(institution=self.request.user.institution).order_by('id')
paginator = Paginator(programs, 10)
page = request.GET.get('page')
programs = paginator.get_page(page)
context = {'programs': programs, }
return render(request, self.template_name, context)
# #####################Lecturers#############
@method_decorator(user_is_admin, name='get')
class Lecturers(TemplateView):
template_name = 'administrator/lecturers.html'
def get(self, request, *args, **kwargs):
# get a Lecturers/ access user
lecturers = Lecturer.objects.filter(user__institution=self.request.user.institution)
context = {
'lecturers': lecturers,
}
return render(request, self.template_name, context)
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class LecturerUpdate(FormView):
template_name = 'administrator/lecturer_details.html'
def get(self, request, *args, **kwargs):
lecturer = Lecturer.objects.get(pk=self.kwargs['pk'])
user_form = forms.UserForm(instance=lecturer.user)
lecturer_form = forms.LecturerForm(instance=lecturer)
context = {'user_form': user_form,
'lecturer_form': lecturer_form, }
return render(request, self.template_name, context)
def post(self, request, *args, **kwargs):
lecturer = Lecturer.objects.get(pk=self.kwargs['pk'])
user_form = forms.UserForm(request.POST, instance=lecturer.user)
lecturer_form = forms.LecturerForm(request.POST, instance=lecturer)
if user_form.is_valid() and lecturer_form.is_valid():
user_form.save()
lecturer_form.save()
messages.success(request, "Lecturer Information updated")
return redirect('administrator:lecturers')
# #####################Students#############
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class Students(FormView):
template_name = 'administrator/students.html'
def get(self, request, *args, **kwargs):
filter_form = forms.StudentFilterForm()
program_list = Program.objects.filter(institution=self.request.user.institution)
context = {
'program_list': program_list,
'filter_form': filter_form,
}
return render(request, self.template_name, context)
def post(self, request, *args, **kwargs):
filter_form = forms.StudentFilterForm(request.POST)
students = None
program = None
year = None
semester = None
if filter_form.is_valid():
program_pk = self.request.POST.get('program_pk')
year = filter_form.cleaned_data['year']
semester = filter_form.cleaned_data['semester']
program = Program.objects.get(pk=program_pk)
students = Student.objects.filter(Q(program=program) &
Q(year=year) &
Q(semester=semester) &
Q(is_active=True))
context = {
'students': students,
'program': program,
'first_student': students.first(),
'year': year,
'semester': semester,
}
return render(request, self.template_name, context)
@method_decorator(user_is_admin, name='get')
class StudentDetails(DetailView):
template_name = 'administrator/student_detail.html'
model = Student
context_object_name = 'student'
# adding multiple students
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class StudentsAdd(FormView):
template_name = 'administrator/students_add.html'
def get(self, request, *args, **kwargs):
program_pk = self.request.GET.get('program_pk')
year = self.request.GET.get('student_year')
semester = self.request.GET.get('student_semester')
# queries
program = Program.objects.get(pk=program_pk)
queryset = User.objects.filter(Q(institution=self.request.user.institution) &
Q(student__program=program) &
Q(student__year=year) &
Q(student__semester=semester))
formset = forms.StudentSignUpFormSet(queryset=queryset)
context = {'formset': formset,
'program': program,
'year': year,
'semester': semester, }
return render(request, self.template_name, context)
def post(self, request, *args, **kwargs):
program_pk = self.request.POST.get('program_pk')
year = self.request.POST.get('student_year')
semester = self.request.POST.get('student_semester')
# queries
program = Program.objects.get(pk=program_pk)
queryset = User.objects.filter(Q(institution=self.request.user.institution) &
Q(student__program=program) &
Q(student__year=year) &
Q(student__semester=semester))
formset = forms.StudentSignUpFormSet(request.POST, queryset=queryset)
if formset.is_valid():
instances = formset.save(commit=False)
for instance in instances:
with transaction.atomic():
# get info from non-committed instance
print(instance.username)
username = instance.username
institution = self.request.user.institution
email = str(username) + '@' + str(institution.short_name) + '.com'
password = 'password1234'
# use data to create user
user = User.objects.create_user(username, email, password)
# update Extra User fields
user.first_name, user.middle_name, user.last_name = \
instance.first_name, instance.middle_name, instance.last_name
user.institution = institution
user.save()
Student.objects.create(user=user, year=year, semester=semester, program=program)
messages.success(request, 'The Students Were Added Successfully.')
if not instances:
messages.info(request, 'No Changes Made.')
else:
messages.warning(request, 'Something Went Wrong, We Could Not Validate The User Data.')
messages.error(request, formset.errors)
# students for display on redirect to students list.
students = Student.objects.filter(Q(program=program) &
Q(year=year) &
Q(semester=semester))
context = {
'students': students,
'program': program,
'first_student': students.first(),
'year': year,
'semester': semester,
}
return render(request, 'administrator/students.html', context)
# #####################ExamsViews#############
@method_decorator(user_is_admin, name='get')
class ExamsView(ListView):
template_name = 'administrator/exams.html'
model = Exam
ordering = 'start_date'
context_object_name = 'exams'
paginate_by = '10'
@method_decorator(user_is_admin, name='post')
@method_decorator(user_is_admin, name='get')
class ExamsAdd(SuccessMessageMixin, CreateView):
model = Exam
form_class = forms.ExamForm
template_name = 'administrator/exams_form.html'
success_message = "%(name)s Exam was Created Successfully"
success_url = reverse_lazy('administrator:exams')
def form_valid(self, form):
exam = form.save(commit=False)
exam.institution = self.request.user.institution
exam.created_by = self.request.user
super(ExamsAdd, self).form_valid(form)
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class ExamsUpdate(SuccessMessageMixin, UpdateView):
template_name = 'administrator/exams_update_form.html'
model = Exam
form_class = forms.ExamForm
success_url = reverse_lazy('administrator:exams')
success_message = "%(name)s Was Updated Successfully"
@method_decorator(user_is_admin, name='get')
class ExamsDetail(TemplateView):
template_name = 'administrator/exam_details.html'
def get(self, request, *args, **kwargs): # get number of students and number of courses
exam = Exam.objects.get(pk=self.kwargs['pk'])
register = ExamRegister.objects.filter(exam=exam)
student_num = register.count()
context = {'student_num': student_num,
'exam': exam, }
return render(request, self.template_name, context)
@method_decorator(user_is_admin, name='post')
class PublishExam(FormView):
def post(self, request, *args, **kwargs):
exam = None
try:
exam = Exam.objects.get(pk=request.POST.get('exam_id'))
exam.active = False
exam.save()
messages.success(request, 'The Results Were Successfully Published')
except Exam.DoesNotExist:
messages.error(request, 'It Seems The Exam Was Just Deleted')
return redirect('administrator:exam_details', pk=exam.pk)
# #####################Notifications#############
class Notifications(ListView):
# implement better filtering
template_name = 'administrator/notifications.html'
model = News
context_object_name = 'news'
paginate_by = 15
def get_queryset(self):
institution_id = self.request.user.institution.id
query = News.objects.filter(institution=institution_id).order_by('-create_date')
return query
class NotificationAdd(SuccessMessageMixin, CreateView):
model = News
fields = ['title', 'content', 'staff_only', ]
template_name = 'administrator/add_notification.html'
success_url = reverse_lazy('administrator:notifications')
success_message = "%(title)s Was Successfully Posted"
def form_valid(self, form):
news = form.save(commit=False)
news.institution = self.request.user.institution
news.created_by = self.request.user
if self.request.POST.get('staff_only'):
form.instance.staff_only = True
return super(NotificationAdd, self).form_valid(form)
class NotificationDeleteView(DeleteView):
model = News
success_url = reverse_lazy('administrator:notifications')
template_name = 'administrator/news_confirm_delete.html'
def get(self, request, *args, **kwargs):
messages.error(request, 'Notification Deleted Successfully')
return super(NotificationDeleteView, self).get(self, request, *args, **kwargs)
# #####################Courses#############
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class CourseUpdate(SuccessMessageMixin, UpdateView):
template_name = 'administrator/update_course.html'
model = Course
fields = ('name', 'code', 'summary', 'credits', 'mandatory', 'semester', 'year', )
success_url = reverse_lazy('administrator:dash')
pk_url_kwarg = 'pk'
success_message = "%(name)s Was Successfully Updated"
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class CourseCreate(SuccessMessageMixin, CreateView):
template_name = 'administrator/create_course.html'
form_class = forms.CourseAddForm
success_url = reverse_lazy('administrator:dash')
success_message = "%(name)s Was Created Successfully"
def form_valid(self, form):
form.save(commit=False)
form.instance.institution = self.request.user.institution
form.instance.program = Program.objects.get(pk=self.kwargs['program_pk'])
return super(CourseCreate, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(CourseCreate, self).get_context_data(**kwargs)
context['program'] = Program.objects.get(pk=self.kwargs['program_pk'])
return context
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class CoursesCreateView(FormView):
template_name = 'administrator/add_courses.html'
def get(self, request, *args, **kwargs):
program = Program.objects.get(pk=self.kwargs['program_pk'])
formset = forms.AddCourseFormSet(queryset=Course.objects.filter(program=program))
context = {'program': program,
'formset': formset, }
return render(request, self.template_name, context)
def post(self, request, *args, **kwargs):
program = Program.objects.get(pk=self.kwargs['program_pk'])
formset = forms.AddCourseFormSet(request.POST, queryset=Course.objects.filter(program=program))
if formset.is_valid():
instances = formset.save(commit=False)
for instance in instances:
instance.institution = self.request.user.institution
instance.program = Program.objects.get(pk=self.kwargs['program_pk'])
instance.save()
messages.success(request, "Courses Were Successfully Added/Updated")
if not instances:
messages.info(request, 'No Changes Made')
else:
messages.warning(request, "Sorry Something Went Wrong And We Could Not Add The Course")
messages.error(request, formset.errors)
return redirect('administrator:dash')
# #####################Programs#############
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class AddProgram(SuccessMessageMixin, CreateView):
template_name = 'administrator/program_add.html'
model = Program
fields = ('name', 'summary', 'level')
success_message = "%(name)s Was Successfully Added"
def form_valid(self, form):
form.save(commit=False)
form.instance.institution = self.request.user.institution
return super(AddProgram, self).form_valid(form)
def get_success_url(self):
return reverse('administrator:add_courses', kwargs={'program_pk': self.object.pk})
@method_decorator(user_is_admin, name='get')
class DeleteProgram(DeleteView):
template_name = 'administrator/program_confirm_delete.html'
model = Program
success_url = reverse_lazy('administrator:dash')
def get(self, request, *args, **kwargs):
messages.error(request, 'Program Deleted Successfully')
return super(DeleteProgram, self).get(self, request, *args, **kwargs)
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class ProgramUpdate(SuccessMessageMixin, UpdateView):
template_name = 'administrator/program_update.html'
model = Program
fields = ('name', 'summary', 'level')
success_url = reverse_lazy('administrator:dash')
success_message = "%(name)s Was Successfully Updated"
# ######## Accounts######
class Accounts(TemplateView):
template_name = 'administrator/accounts.html'
@method_decorator(user_is_admin, name='get')
@method_decorator(user_is_admin, name='post')
class InvoiceStudentsFilter(FormView):
template_name = 'administrator/invoice_student_filter.html'
def get(self, request, *args, **kwargs):
filter_form = forms.InvoiceStudentFilterForm()
program_list = Program.objects.filter(institution=self.request.user.institution)
context = {
'program_list': program_list,
'filter_form': filter_form,
}
return render(request, self.template_name, context)
def post(self, request, *args, **kwargs):
filter_form = forms.InvoiceStudentFilterForm(request.POST)
program_pk = None
year = None
semester = None
number_of_charges = 5
if filter_form.is_valid():
program_pk = self.request.POST.get('program_pk')
year = filter_form.cleaned_data['year']
semester = filter_form.cleaned_data['semester']
if filter_form.cleaned_data['number_of_charges']:
number_of_charges = filter_form.cleaned_data['number_of_charges']
return redirect('administrator:add_invoice', program_pk=program_pk, year=year, semester=semester,
extra=number_of_charges)
class CreateInvoices(FormView): # add a way for students who just came to get an Invoice: assign last invoice of
template_name = 'administrator/invoice_create.html'
def get(self, request, *args, **kwargs):
program = Program.objects.get(pk=self.kwargs['program_pk'])
invoice_form = forms.InvoiceForm
extra = None
if self.kwargs['extra']:
extra = int(self.kwargs['extra'])
ChargesFormset = forms.forms.modelformset_factory(Charges, form=forms.ChargesForm, extra=extra)
charges_formset = ChargesFormset(queryset=None)
context = {'program': program,
'year': self.kwargs['year'],
'semester': self.kwargs['semester'],
'invoice_form': invoice_form,
'charges_formset': charges_formset,
'extra': extra, }
return render(request, self.template_name, context)
def post(self, request, *args, **kwargs):
extra = self.kwargs['extra']
ChargesFormset = forms.forms.modelformset_factory(Charges, form=forms.ChargesForm, extra=extra)
charges_formset = ChargesFormset(request.POST)
invoice_form = forms.InvoiceForm(request.POST)
program = Program.objects.get(pk=self.kwargs['program_pk'])
students = Student.objects.filter(Q(program=program) &
Q(year=self.kwargs['year']) &
Q(semester=self.kwargs['semester']) &
Q(is_active=True))
invoice_list = []
if invoice_form.is_valid() and charges_formset.is_valid():
charges = charges_formset.save(commit=False)
# get total amount now to avoid having to save the invoices again
total_amount = 0
for charge in charges:
total_amount += charge.amount
for student in students:
invoice_item = Invoice(institution=self.request.user.institution, prepared_by=self.request.user,
student=student, total_amount=total_amount)
invoice_list.append(invoice_item)
# take note of time before creating in bilk
now = datetime.datetime.now()
# create in bulk
Invoice.objects.bulk_create(invoice_list)
# add charges to each invoice, really its the reverse
invoices = Invoice.objects.filter(institution=self.request.user.institution, prepared_by=self.request.user,
date__gte=now).values_list('id', flat=True)
for invoice in invoices:
for charge in charges:
charge.invoice_id = invoice
charge.save()
messages.success(request, 'Invoices Were Successfully Created')
return redirect('administrator:invoice_list')
class RecordPayments(SuccessMessageMixin, CreateView):
template_name = 'administrator/make_payment.html'
model = Payments
success_message = 'Payment For %(amount)s From %(student_id)s Successfully Recorded.'
form_class = forms.PaymentForm
def form_valid(self, form):
payment = form.save(commit=False)
student = None
try:
student_number = form.cleaned_data['student_id']
student = Student.objects.get(user__username=student_number)
except Student.DoesNotExist or Student.MultipleObjectsReturned:
messages.error(self.request, 'The Student Could Not Be Found. Check The Student ID you Entered.')
return redirect('administrator:add_payment')
payment.student = student
payment.paid_to = self.request.user
return super(RecordPayments, self).form_valid(form)
class PaymentList(ListView):
template_name = 'administrator/payment_list.html'
context_object_name = 'payments'
ordering = 20
def get_queryset(self):
query = Payments.objects.filter(Q(paid_to__institution=self.request.user.institution)).order_by('-date')
return query
class PaymentDetails(DetailView):
template_name = 'administrator/payment_details.html'
model = Payments
context_object_name = 'payment'
class InvoiceList(ListView):
template_name = 'administrator/invoice_list.html'
context_object_name = 'invoices'
ordering = 20
def get_queryset(self):
query = Invoice.objects.filter(Q(institution=self.request.user.institution)).order_by('-date')
return query
class InvoiceUpdate(SuccessMessageMixin, UpdateView):
model = Invoice
template_name = 'administrator/invoice_update.html'
success_url = reverse_lazy('administrator:invoice_list')
success_message = 'Invoice Updated!'
class InvoiceDelete(DeleteView):
model = Invoice
template_name = 'administrator/invoice_confirm_delete.html'
success_url = reverse_lazy('administrator:invoice_list')
def get(self, request, *args, **kwargs):
messages.error(request, 'Invoice Deleted Successfully')
return super(InvoiceDelete, self).get(self, request, *args, **kwargs)
class AddGradingValues(FormView):
model = GradingList
template_name = 'administrator/add_gradings.html'
success_message = 'Grading Values Have Been Updated.'
def get(self, request, *args, **kwargs):
queryset = GradingList.objects.filter(institution=self.request.user.institution)
grade_list_formset = forms.GradeListFormSet(queryset=queryset)
context = {
'grade_list_formset': grade_list_formset,
}
return render(request, self.template_name, context)
def post(self, request, *args, **kwargs):
grade_list_formset = forms.GradeListFormSet(request.POST)
print('pop')
if grade_list_formset.is_valid():
instances = grade_list_formset.save(commit=False)
for instance in instances:
instance.institution = self.request.user.institution
instance.lower = instance.lower - 0.0001
instance.upper = instance.upper + 0.0001
instance.save()
print('popo')
messages.success(request, 'Grading List Updated')
return redirect('administrator:institution_details', pk=self.request.user.institution_id) | [
"tafelaphiri@live.com"
] | tafelaphiri@live.com |
77dcd58897fa39cc6326e1fc2178a0adc30ff87b | cbdef2e8ed259adc4653ade34db12d8bcc0cea9f | /dominion/cards/Wizard_Student.py | fa8a42e3234ec5e594f4503326b3c3dd61788893 | [] | no_license | dwagon/pydominion | 8dd5afef8ec89c63ade74c4ae6c7473cd676799f | 545709f0a41529de74f33aa83b106c456900fa5b | refs/heads/main | 2023-08-29T10:02:26.652032 | 2023-08-23T02:25:00 | 2023-08-23T02:25:00 | 18,776,204 | 1 | 0 | null | 2023-08-23T02:25:02 | 2014-04-14T20:49:28 | Python | UTF-8 | Python | false | false | 3,568 | py | #!/usr/bin/env python
import unittest
from dominion import Game, Card, Piles
###############################################################################
class Card_Student(Card.Card):
def __init__(self):
Card.Card.__init__(self)
self.cardtype = [
Card.CardType.ACTION,
Card.CardType.WIZARD, # pylint: disable=no-member
Card.CardType.LIAISON,
]
self.base = Card.CardExpansion.ALLIES
self.cost = 3
self.name = "Student"
self.actions = 1
self.desc = """+1 Action;
You may rotate the Wizards;
Trash a card from your hand. If it's a Treasure, +1 Favor and put this onto your deck."""
def special(self, game, player):
opt = player.plr_choose_options(
"Do you want to rotate the Wizards?",
("Don't change", False),
("Rotate", True),
)
if opt:
game["Wizards"].rotate()
trashed = player.plr_trash_card(
prompt="Pick a card to trash", num=1, force=True
)
if trashed and trashed[0].isTreasure():
player.favors.add(1)
player.piles[Piles.PLAYED].remove(self)
player.add_card(self, "deck")
###############################################################################
class TestStudent(unittest.TestCase):
def setUp(self):
self.g = Game.TestGame(numplayers=1, initcards=["Wizards"], use_liaisons=True)
self.g.start_game()
self.plr = self.g.player_list()[0]
def test_play_trash_treas(self):
"""Play a student - don't rotate, but trash treasure"""
while True:
card = self.g["Wizards"].remove()
if card.name == "Student":
break
self.plr.piles[Piles.HAND].set("Copper", "Silver", "Gold", "Estate")
self.plr.add_card(card, Piles.HAND)
self.plr.test_input = ["Don't change", "Trash Copper"]
favors = self.plr.favors.get()
self.plr.play_card(card)
self.assertIn("Copper", self.g.trashpile)
self.assertIn("Student", self.plr.piles[Piles.DECK])
self.assertEqual(self.plr.favors.get(), favors + 1)
def test_play_trash_non_treas(self):
"""Play a student - don't rotate, but trash a non treasure"""
while True:
card = self.g["Wizards"].remove()
if card.name == "Student":
break
self.plr.piles[Piles.HAND].set("Copper", "Silver", "Gold", "Estate")
self.plr.add_card(card, Piles.HAND)
self.plr.test_input = ["Don't change", "Trash Estate"]
favors = self.plr.favors.get()
self.plr.play_card(card)
self.assertIn("Estate", self.g.trashpile)
self.assertNotIn("Student", self.plr.piles[Piles.DECK])
self.assertEqual(self.plr.favors.get(), favors)
def test_play_trash_rotate(self):
"""Play a student - rotate, and trash a non treasure"""
while True:
card = self.g["Wizards"].remove()
if card.name == "Student":
break
self.plr.piles[Piles.HAND].set("Copper", "Silver", "Gold", "Estate")
self.plr.add_card(card, Piles.HAND)
self.plr.test_input = ["Rotate", "Trash Estate"]
self.plr.play_card(card)
card = self.g["Wizards"].remove()
self.assertEqual(card.name, "Conjurer")
###############################################################################
if __name__ == "__main__": # pragma: no cover
unittest.main()
# EOF
| [
"dougal.scott@gmail.com"
] | dougal.scott@gmail.com |
3bc6136b64489930bc574a66c873d9d395954d08 | 08249246cf7afc719d7b1a4945f0a3299856eff7 | /Make a dic.py | 3e0102bc9d67830fab1898c3a1ac4156ac76f5e2 | [] | no_license | dashasur/insert-table | b44ef1f983c5f30e27344b0ff4b8ffee0ded23bc | 35a3a9429544c7a853948b159e7e74c69dc747d5 | refs/heads/master | 2020-06-03T05:30:24.964698 | 2019-07-08T22:36:27 | 2019-07-08T22:36:27 | 191,461,536 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 675 | py | import psycopg2
try:
conn = psycopg2.connect(dbname='daria', user='daria', password='dasha50', host='192.168.4.12', port='5432', )
cursor = conn.cursor()
postgres_select_query = """Select id,name from "Public".currency"""
cursor.execute(postgres_select_query)
a = cursor.fetchall()
d = {}
for i in a:
d[i[1]]=i[0]
print(d)
print(d['BTC'])
conn.commit()
except (Exception, psycopg2.Error) as error :
if(conn):
print("Failed", error)
finally:
#closing database connection.
if(conn):
if (cursor):
cursor.close()
conn.close()
print("PostgreSQL connection is closed")
| [
"surtaevad@gmail.com"
] | surtaevad@gmail.com |
cf6089cec9f0d2fe47ea4317a46c9d8b5f5e8db2 | 452c7e30ffca48a1796ef96acbbc96877578a501 | /urls.py | 6cb576c77ac02ce9a5ed6889d5cf21bd49448b67 | [] | no_license | willadamskeane/obieconnect | 96599933383e1e7f6ea6046503979a1b5e41302b | 6f07c7e6fa5b86d9f28943352623f698cc2d658d | refs/heads/master | 2016-08-04T10:27:31.213855 | 2012-04-28T21:34:21 | 2012-04-28T21:34:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,265 | py | from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
from django.views.generic.simple import direct_to_template
from bootstrap.views import *
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
#Registration
(r'^accounts/', include('registration.urls')),
(r'^$', home),
(r'^inside/$', inside),
(r'^directory', directory),
(r'^course/(?P<course_id>\d+)/$', course_detail),
#Ajax Form
(r'^ajax_form', ajax_form),
(r'^ajax_example', ajax_example),
#modal dialog
(r'^modal_dialog/$', modal_dialog),
(r'^text_modal_dialog/$', text_modal_dialog),
#ajax autocomplete
(r'^ajax_autocomplete/$', ajax_autocomplete),
(r'^ajax_autocomplete_lookup/$', ajax_autocomplete_lookup),
(r'^ajax_autocomplete_get_selected_item/$', ajax_autocomplete_get_selected_item),
#Popovers
(r'^popover/$', popover),
#Geolocation
(r'^geolocation/$', geolocation),
#Humans and Robots
('^humans.txt$', direct_to_template, {'template':'humans.txt', 'mimetype':'text/plain'}),
('^robots.txt$', direct_to_template, {'template':'robots.txt', 'mimetype':'text/plain'}),
)
| [
"willadamskeane@gmail.com"
] | willadamskeane@gmail.com |
49f96025b75bc42f6ff032811ed55e1eaeca7270 | c8615c0e06197ae8968e884ab0620076f923e9a9 | /num_str.py | 0dbc6949919fbd526341134e9b54b71fa8bfde40 | [] | no_license | bafss/python | 95c20a10800d5a47dc5a516456f7401376936ecb | 3499c02b1f2106e060aa5abf11da1cae1820e2bf | refs/heads/master | 2020-03-25T15:08:52.925050 | 2018-08-07T12:38:24 | 2018-08-07T12:38:24 | 143,868,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,800 | py | # -*- coding:utf-8 -*-
#整数、浮点数、字符串、布尔值、空值None、列表、字典、集合
print(type(int('11111')))
print(type(11111111111111111111111))
print(type(11111111111.11111111111111111111))
print(4/2,9/3)
print(r'''i'am ok "{a}\t \\\\''' . format(a = 5)) #r表示不转义看到是啥就是啥
# 在计算机内存中,str统一使用Unicode编码,当需要保存到硬盘或者需要传输的时候,就编码为UTF-8编码。
print('123文档')
print(b'\xe4\xb8\xad'.decode('utf-8').encode('utf-8'))#byte类型的每个字符都只占用一个字节。传输、保存到文件的类型
print(b'123\xe6\x96\x87\xe6\xa1\xa3'.decode('utf-8'))
print(b'123\xe6\x96\x87\xe6\xa1\xe6\x96\x87'.decode('utf-8', errors='ignore'))#忽略部分错误
print(type('中文'))
print(len('中文'))#字节数 6
str = 'abc'
print(str.replace("a","AB"))
print(str)
print(" ss ".strip()) #ss
print(type(True),type(False)) #and or not
print(type(None))
list1 = [4,1,2,5,-8] #有序
print(type(list1))
print(len(list1))
list1.insert(1,'insert')
list1.append('append')
print(list1)
print(list1.pop()) #append
arr = [2,-9,5,1]
arr.sort()
print(arr)
dict1 = {'name':'覃佳','age':18}
print(type(dict1))
print(dict1['name'])
print('name' in dict1)
print('sex' in dict1)
print(dict1.get('name'))
print(dict1.get('sex',-1))
del dict1['name']
print(dict1)
print(dict1.pop('age'))
print(dict1)
print(set([5,5,1,1,2,2,3,4]))#唯一、无序
age = 6
if age > 18:
print('adult')
elif age > 6:
print('teenager')
else:
print('kid')
sum = 0
for i in range(0,101):
sum += i
print(sum)
print(type(range(1,100)))
sum = 0
n = 0
while n < 10:
n += 1
print(n)
if n > 7:
break
if n % 2 == 0:
continue
sum += n
print(n)
print("///",sum,n)
| [
"lilizhao@sogou-inc.com"
] | lilizhao@sogou-inc.com |
19c2317d62bdc44429333c7da02931c3fcf51573 | 64f471e42e2abe481a721e254f499c5b965e1464 | /captive_portal/policy_list/policylist_control.py | bfc230ee12fbf307f00bbb558e3506ed2dc6802e | [] | no_license | gwnauto/GWN_Cloud_regressiontest | 5a57a05e01ef24ee31612a1406a22e7e16646836 | 9cff9c9e828636192c215e85e041593719fa4a59 | refs/heads/master | 2020-03-23T14:42:49.641470 | 2018-07-20T09:48:09 | 2018-07-20T09:48:09 | 131,081,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,281 | py | #coding=utf-8
#作者:曾祥卫
#时间:2018.07.10
#描述:captive portal--policy list的控制层
import time, subprocess
from data import data
from publicControl.public_control import PublicControl
from captive_portal.splash_page.splashpage_business import SplashPageBusiness
class PolicyListControl(PublicControl):
def __init__(self, s):
#继承PublicControl类的属性和方法
PublicControl.__init__(self, s)
#增加一条list,返回这条list的id(int)
def add_policylist(self, list_name, expiration_time, \
splashpage_name, data_dict={}):
#首先获取启动页的id
tmp = SplashPageBusiness(self.s)
splashpage_id = tmp.get_splashpage_id(splashpage_name)
#接口的url
api = self.loadApi()['portalConfigAdd']
request = PublicControl(self.s)
#替换数据字典
pre_dict = {'name': list_name,
'splash_page': "0",
'expiration': expiration_time,
'portal_page_path': "{}".format(splashpage_id)}
aft_dict = self.replaceConfig(pre_dict, data_dict)
#发送接口请求
recvdata = request.apiRequest(api, aft_dict)
#返回list的id(int)
list_id = recvdata['data']['value']
return list_id
#编辑一条policy list,返回这条list的id(int)
def edit_policylist(self, list_name, expiration_time, \
splashpage_name, data_dict={}):
#首先获取启动页的id
tmp = SplashPageBusiness(self.s)
splashpage_id = tmp.get_splashpage_id(splashpage_name)
#获取policy list的id
policylist_id = self.get_policylist_id(list_name)
#接口的url
api = self.loadApi()['portalConfigEdit']
request = PublicControl(self.s)
#替换数据字典
pre_dict = {'name': list_name,
'id': policylist_id,
'splash_page': "0",
'expiration': expiration_time,
'portal_page_path': "{}".format(splashpage_id)}
aft_dict = self.replaceConfig(pre_dict, data_dict)
#发送接口请求
recvdata = request.apiRequest(api, aft_dict)
#返回list的id(int)
list_id = recvdata['data']['value']
return list_id
#根据policy list的名称,获取policy list的id
def get_policylist_id(self, list_name):
api = self.loadApi()['portalPolicyList']
request = PublicControl(self.s)
recvdata = request.apiRequest(api,{'pageNum': 1, 'pageSize': 10})
policylist_lists = recvdata['data']['result']
for i in range(len(policylist_lists)):
if list_name == policylist_lists[i]['name']:
policylist_id = policylist_lists[i]['id']
print "policylist's id is %d"%policylist_id
return policylist_id
#删除policy list
def delete_policylist(self, list_name):
#获取listid
list_id = self.get_policylist_id(list_name)
api = self.loadApi()['portalPoilcyDelete']
request = PublicControl(self.s)
recvdata = request.apiRequest(api, {'id': list_id})
return recvdata
| [
"gwn-automation@grandstream.cn"
] | gwn-automation@grandstream.cn |
01f32c1f857b3e6cb6206443d4778d3411fa38fa | 85de10a9467b3cd88ce83227bee0d71706e2c2b0 | /c15/point1.py | bcd93dd9a81cdd42b3999ae7c53212cba3aa9078 | [] | no_license | sreejithev/thinkpythonsolutions | f0bbfc0951e57e9b81f50aabf968860484081524 | 59481fd3d2976e73691a3fff97e083c336070cea | refs/heads/master | 2019-07-22T14:08:54.890004 | 2017-09-15T05:06:26 | 2017-09-15T05:06:26 | 94,759,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,480 | py | """
Code example from Think Python, by Allen B. Downey.
Available from http://thinkpython.com
Copyright 2012 Allen B. Downey.
Distributed under the GNU General Public License at gnu.org/licenses/gpl.html.
"""
class Point(object):
"""Represents a point in 2-D space."""
def print_point(p):
"""Print a Point object in human-readable format."""
print '(%g, %g)' % (p.x, p.y)
class Rectangle(object):
"""Represents a rectangle.
attributes: width, height, corner.
"""
def find_center(rect):
"""Returns a Point at the center of a Rectangle."""
p = Point()
p.x = rect.corner.x + rect.width/2.0
p.y = rect.corner.y + rect.height/2.0
return p
def grow_rectangle(rect, dwidth, dheight):
"""Modify the Rectangle by adding to its width and height.
rect: Rectangle object.
dwidth: change in width (can be negative).
dheight: change in height (can be negative).
"""
rect.width += dwidth
rect.height += dheight
def main():
blank = Point()
blank.x = 3
blank.y = 4
print 'blank',
print_point(blank)
box = Rectangle()
box.width = 100.0
box.height = 200.0
box.corner = Point()
box.corner.x = 0.0
box.corner.y = 0.0
center = find_center(box)
print 'center',
print_point(center)
print box.width
print box.height
print 'grow'
grow_rectangle(box, 50, 100)
print box.width
print box.height
if __name__ == '__main__':
main()
| [
"sreejithevwyd@gmail.com"
] | sreejithevwyd@gmail.com |
8b85bd2c6c9b054817167698608b01c1bde8b42e | 8fa3f6a0b64fba033adda723b229cf4cc74898ab | /truthing/pyqtgraph/console/Console.py | 3ea1580f6b18a207f2d470401d2854be9c5c97f5 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] | permissive | aldopareja/MCS | 3f86a562e9427302b1fd46b2a6d539aab5a0e9b3 | d8cf9172460dec281e8ec2d714835226a75a6966 | refs/heads/master | 2021-04-05T16:46:38.706045 | 2020-03-20T13:21:52 | 2020-03-20T13:21:52 | 248,579,433 | 1 | 0 | Apache-2.0 | 2020-03-19T18:43:56 | 2020-03-19T18:43:55 | null | UTF-8 | Python | false | false | 15,433 | py | import sys, re, os, time, traceback, subprocess
import pickle
from ..Qt import QtCore, QtGui, USE_PYSIDE, USE_PYQT5
from ..python2_3 import basestring
from .. import exceptionHandling as exceptionHandling
from .. import getConfigOption
if USE_PYSIDE:
from . import template_pyside as template
elif USE_PYQT5:
from . import template_pyqt5 as template
else:
from . import template_pyqt as template
class ConsoleWidget(QtGui.QWidget):
"""
Widget displaying console output and accepting command input.
Implements:
- eval python expressions / exec python statements
- storable history of commands
- exception handling allowing commands to be interpreted in the context of any level in the exception stack frame
Why not just use python in an interactive shell (or ipython) ? There are a few reasons:
- pyside does not yet allow Qt event processing and interactive shell at the same time
- on some systems, typing in the console _blocks_ the qt event loop until the user presses enter. This can
be baffling and frustrating to users since it would appear the program has frozen.
- some terminals (eg windows cmd.exe) have notoriously unfriendly interfaces
- ability to add extra features like exception stack introspection
- ability to have multiple interactive prompts, including for spawned sub-processes
"""
def __init__(self, parent=None, namespace=None, historyFile=None, text=None, editor=None):
"""
============== ============================================================================
**Arguments:**
namespace dictionary containing the initial variables present in the default namespace
historyFile optional file for storing command history
text initial text to display in the console window
editor optional string for invoking code editor (called when stack trace entries are
double-clicked). May contain {fileName} and {lineNum} format keys. Example::
editorCommand --loadfile {fileName} --gotoline {lineNum}
============== =============================================================================
"""
QtGui.QWidget.__init__(self, parent)
if namespace is None:
namespace = {}
self.localNamespace = namespace
self.editor = editor
self.multiline = None
self.inCmd = False
self.ui = template.Ui_Form()
self.ui.setupUi(self)
self.output = self.ui.output
self.input = self.ui.input
self.input.setFocus()
if text is not None:
self.output.setPlainText(text)
self.historyFile = historyFile
history = self.loadHistory()
if history is not None:
self.input.history = [""] + history
self.ui.historyList.addItems(history[::-1])
self.ui.historyList.hide()
self.ui.exceptionGroup.hide()
self.input.sigExecuteCmd.connect(self.runCmd)
self.ui.historyBtn.toggled.connect(self.ui.historyList.setVisible)
self.ui.historyList.itemClicked.connect(self.cmdSelected)
self.ui.historyList.itemDoubleClicked.connect(self.cmdDblClicked)
self.ui.exceptionBtn.toggled.connect(self.ui.exceptionGroup.setVisible)
self.ui.catchAllExceptionsBtn.toggled.connect(self.catchAllExceptions)
self.ui.catchNextExceptionBtn.toggled.connect(self.catchNextException)
self.ui.clearExceptionBtn.clicked.connect(self.clearExceptionClicked)
self.ui.exceptionStackList.itemClicked.connect(self.stackItemClicked)
self.ui.exceptionStackList.itemDoubleClicked.connect(self.stackItemDblClicked)
self.ui.onlyUncaughtCheck.toggled.connect(self.updateSysTrace)
self.currentTraceback = None
def loadHistory(self):
"""Return the list of previously-invoked command strings (or None)."""
if self.historyFile is not None:
return pickle.load(open(self.historyFile, 'rb'))
def saveHistory(self, history):
"""Store the list of previously-invoked command strings."""
if self.historyFile is not None:
pickle.dump(open(self.historyFile, 'wb'), history)
def runCmd(self, cmd):
#cmd = str(self.input.lastCmd)
self.stdout = sys.stdout
self.stderr = sys.stderr
encCmd = re.sub(r'>', '>', re.sub(r'<', '<', cmd))
encCmd = re.sub(r' ', ' ', encCmd)
self.ui.historyList.addItem(cmd)
self.saveHistory(self.input.history[1:100])
try:
sys.stdout = self
sys.stderr = self
if self.multiline is not None:
self.write("<br><b>%s</b>\n"%encCmd, html=True)
self.execMulti(cmd)
else:
self.write("<br><div style='background-color: #CCF'><b>%s</b>\n"%encCmd, html=True)
self.inCmd = True
self.execSingle(cmd)
if not self.inCmd:
self.write("</div>\n", html=True)
finally:
sys.stdout = self.stdout
sys.stderr = self.stderr
sb = self.output.verticalScrollBar()
sb.setValue(sb.maximum())
sb = self.ui.historyList.verticalScrollBar()
sb.setValue(sb.maximum())
def globals(self):
frame = self.currentFrame()
if frame is not None and self.ui.runSelectedFrameCheck.isChecked():
return self.currentFrame().tb_frame.f_globals
else:
return globals()
def locals(self):
frame = self.currentFrame()
if frame is not None and self.ui.runSelectedFrameCheck.isChecked():
return self.currentFrame().tb_frame.f_locals
else:
return self.localNamespace
def currentFrame(self):
## Return the currently selected exception stack frame (or None if there is no exception)
if self.currentTraceback is None:
return None
index = self.ui.exceptionStackList.currentRow()
tb = self.currentTraceback
for i in range(index):
tb = tb.tb_next
return tb
def execSingle(self, cmd):
try:
output = eval(cmd, self.globals(), self.locals())
self.write(repr(output) + '\n')
except SyntaxError:
try:
exec(cmd, self.globals(), self.locals())
except SyntaxError as exc:
if 'unexpected EOF' in exc.msg:
self.multiline = cmd
else:
self.displayException()
except:
self.displayException()
except:
self.displayException()
def execMulti(self, nextLine):
#self.stdout.write(nextLine+"\n")
if nextLine.strip() != '':
self.multiline += "\n" + nextLine
return
else:
cmd = self.multiline
try:
output = eval(cmd, self.globals(), self.locals())
self.write(str(output) + '\n')
self.multiline = None
except SyntaxError:
try:
exec(cmd, self.globals(), self.locals())
self.multiline = None
except SyntaxError as exc:
if 'unexpected EOF' in exc.msg:
self.multiline = cmd
else:
self.displayException()
self.multiline = None
except:
self.displayException()
self.multiline = None
except:
self.displayException()
self.multiline = None
def write(self, strn, html=False):
self.output.moveCursor(QtGui.QTextCursor.End)
if html:
self.output.textCursor().insertHtml(strn)
else:
if self.inCmd:
self.inCmd = False
self.output.textCursor().insertHtml("</div><br><div style='font-weight: normal; background-color: #FFF;'>")
#self.stdout.write("</div><br><div style='font-weight: normal; background-color: #FFF;'>")
self.output.insertPlainText(strn)
#self.stdout.write(strn)
def displayException(self):
"""
Display the current exception and stack.
"""
tb = traceback.format_exc()
lines = []
indent = 4
prefix = ''
for l in tb.split('\n'):
lines.append(" "*indent + prefix + l)
self.write('\n'.join(lines))
self.exceptionHandler(*sys.exc_info())
def cmdSelected(self, item):
index = -(self.ui.historyList.row(item)+1)
self.input.setHistory(index)
self.input.setFocus()
def cmdDblClicked(self, item):
index = -(self.ui.historyList.row(item)+1)
self.input.setHistory(index)
self.input.execCmd()
def flush(self):
pass
def catchAllExceptions(self, catch=True):
"""
If True, the console will catch all unhandled exceptions and display the stack
trace. Each exception caught clears the last.
"""
self.ui.catchAllExceptionsBtn.setChecked(catch)
if catch:
self.ui.catchNextExceptionBtn.setChecked(False)
self.enableExceptionHandling()
self.ui.exceptionBtn.setChecked(True)
else:
self.disableExceptionHandling()
def catchNextException(self, catch=True):
"""
If True, the console will catch the next unhandled exception and display the stack
trace.
"""
self.ui.catchNextExceptionBtn.setChecked(catch)
if catch:
self.ui.catchAllExceptionsBtn.setChecked(False)
self.enableExceptionHandling()
self.ui.exceptionBtn.setChecked(True)
else:
self.disableExceptionHandling()
def enableExceptionHandling(self):
exceptionHandling.register(self.exceptionHandler)
self.updateSysTrace()
def disableExceptionHandling(self):
exceptionHandling.unregister(self.exceptionHandler)
self.updateSysTrace()
def clearExceptionClicked(self):
self.currentTraceback = None
self.ui.exceptionInfoLabel.setText("[No current exception]")
self.ui.exceptionStackList.clear()
self.ui.clearExceptionBtn.setEnabled(False)
def stackItemClicked(self, item):
pass
def stackItemDblClicked(self, item):
editor = self.editor
if editor is None:
editor = getConfigOption('editorCommand')
if editor is None:
return
tb = self.currentFrame()
lineNum = tb.tb_lineno
fileName = tb.tb_frame.f_code.co_filename
subprocess.Popen(self.editor.format(fileName=fileName, lineNum=lineNum), shell=True)
#def allExceptionsHandler(self, *args):
#self.exceptionHandler(*args)
#def nextExceptionHandler(self, *args):
#self.ui.catchNextExceptionBtn.setChecked(False)
#self.exceptionHandler(*args)
def updateSysTrace(self):
## Install or uninstall sys.settrace handler
if not self.ui.catchNextExceptionBtn.isChecked() and not self.ui.catchAllExceptionsBtn.isChecked():
if sys.gettrace() == self.systrace:
sys.settrace(None)
return
if self.ui.onlyUncaughtCheck.isChecked():
if sys.gettrace() == self.systrace:
sys.settrace(None)
else:
if sys.gettrace() is not None and sys.gettrace() != self.systrace:
self.ui.onlyUncaughtCheck.setChecked(False)
raise Exception("sys.settrace is in use; cannot monitor for caught exceptions.")
else:
sys.settrace(self.systrace)
def exceptionHandler(self, excType, exc, tb):
if self.ui.catchNextExceptionBtn.isChecked():
self.ui.catchNextExceptionBtn.setChecked(False)
elif not self.ui.catchAllExceptionsBtn.isChecked():
return
self.ui.clearExceptionBtn.setEnabled(True)
self.currentTraceback = tb
excMessage = ''.join(traceback.format_exception_only(excType, exc))
self.ui.exceptionInfoLabel.setText(excMessage)
self.ui.exceptionStackList.clear()
for index, line in enumerate(traceback.extract_tb(tb)):
self.ui.exceptionStackList.addItem('File "%s", line %s, in %s()\n %s' % line)
def systrace(self, frame, event, arg):
if event == 'exception' and self.checkException(*arg):
self.exceptionHandler(*arg)
return self.systrace
def checkException(self, excType, exc, tb):
## Return True if the exception is interesting; False if it should be ignored.
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
filterStr = str(self.ui.filterText.text())
if filterStr != '':
if isinstance(exc, Exception):
msg = exc.message
elif isinstance(exc, basestring):
msg = exc
else:
msg = repr(exc)
match = re.search(filterStr, "%s:%s:%s" % (filename, function, msg))
return match is not None
## Go through a list of common exception points we like to ignore:
if excType is GeneratorExit or excType is StopIteration:
return False
if excType is KeyError:
if filename.endswith('python2.7/weakref.py') and function in ('__contains__', 'get'):
return False
if filename.endswith('python2.7/copy.py') and function == '_keep_alive':
return False
if excType is AttributeError:
if filename.endswith('python2.7/collections.py') and function == '__init__':
return False
if filename.endswith('numpy/core/fromnumeric.py') and function in ('all', '_wrapit', 'transpose', 'sum'):
return False
if filename.endswith('numpy/core/arrayprint.py') and function in ('_array2string'):
return False
if filename.endswith('MetaArray.py') and function == '__getattr__':
for name in ('__array_interface__', '__array_struct__', '__array__'): ## numpy looks for these when converting objects to array
if name in exc:
return False
if filename.endswith('flowchart/eq.py'):
return False
if filename.endswith('pyqtgraph/functions.py') and function == 'makeQImage':
return False
if excType is TypeError:
if filename.endswith('numpy/lib/function_base.py') and function == 'iterable':
return False
if excType is ZeroDivisionError:
if filename.endswith('python2.7/traceback.py'):
return False
return True
| [
"clark.dorman@nextcentury.com"
] | clark.dorman@nextcentury.com |
d7a3715564bf502e8f7675515f39437fd16aea6d | 1adc05008f0caa9a81cc4fc3a737fcbcebb68995 | /hardhat/recipes/libsecret.py | b42a31342843ec94f44a8536408092b7348707ab | [
"MIT",
"BSD-3-Clause"
] | permissive | stangelandcl/hardhat | 4aa995518697d19b179c64751108963fa656cfca | 1ad0c5dec16728c0243023acb9594f435ef18f9c | refs/heads/master | 2021-01-11T17:19:41.988477 | 2019-03-22T22:18:44 | 2019-03-22T22:18:52 | 79,742,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 672 | py | from .base import GnuRecipe
class LibSecretRecipe(GnuRecipe):
def __init__(self, *args, **kwargs):
super(LibSecretRecipe, self).__init__(*args, **kwargs)
self.sha256 = '9ce7bd8dd5831f2786c935d82638ac42' \
'8fa085057cc6780aba0e39375887ccb3'
self.name = 'libsecret'
self.version = '0.18.5'
self.version_regex = r'(?P<version>\d+\.\d+(\.\d+)?)'
self.depends = ['gcrypt', 'glib', 'gobject-introspection', 'vala']
self.url = 'http://ftp.gnome.org/pub/gnome/sources/libsecret/' \
'$short_version/libsecret-$version.tar.xz'
self.configure_args += ['--enable-vala=no']
| [
"clayton.stangeland@gmail.com"
] | clayton.stangeland@gmail.com |
16bc8676616f58fc61217f3c23d885a36eac8473 | 5cbd70905f76c63058ba96edcc91650a131e2911 | /node_modules/webpack-dev-server/node_modules/fsevents/build/config.gypi | 86e7c075dc794be1a39a73d36b2276978010e880 | [
"MIT"
] | permissive | lfry610/monsters-rolodex | 24590bcbe48c0d37e18cca3d8a459306d9a8fe92 | 7f27a93aaedca6f0029ebf6f21b48f061aa29980 | refs/heads/master | 2023-02-14T22:37:26.806549 | 2021-01-10T22:16:58 | 2021-01-10T22:16:58 | 328,454,322 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,746 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"build_v8_with_gn": "false",
"coverage": "false",
"dcheck_always_on": 0,
"debug_nghttp2": "false",
"debug_node": "false",
"enable_lto": "false",
"enable_pgo_generate": "false",
"enable_pgo_use": "false",
"error_on_warn": "false",
"force_dynamic_crt": 0,
"host_arch": "x64",
"icu_data_in": "../../deps/icu-tmp/icudt67l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_path": "deps/icu-small",
"icu_small": "false",
"icu_ver_major": "67",
"is_debug": 0,
"llvm_version": "11.0",
"napi_build_version": "7",
"node_byteorder": "little",
"node_debug_lib": "false",
"node_enable_d8": "false",
"node_install_npm": "true",
"node_module_version": 83,
"node_no_browser_globals": "false",
"node_prefix": "/usr/local",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared": "false",
"node_shared_brotli": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_nghttp2": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_target_type": "executable",
"node_use_bundled_v8": "true",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_node_code_cache": "true",
"node_use_node_snapshot": "true",
"node_use_openssl": "true",
"node_use_v8_platform": "true",
"node_with_ltcg": "false",
"node_without_node_options": "false",
"openssl_fips": "",
"openssl_is_fips": "false",
"ossfuzz": "false",
"shlib_suffix": "83.dylib",
"target_arch": "x64",
"v8_enable_31bit_smis_on_64bit_arch": 0,
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_enable_inspector": 1,
"v8_enable_lite_mode": 0,
"v8_enable_object_print": 1,
"v8_enable_pointer_compression": 0,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 1,
"v8_promise_internal_field_count": 1,
"v8_random_seed": 0,
"v8_trace_maps": 0,
"v8_use_siphash": 1,
"want_separate_host_toolset": 0,
"xcode_version": "11.0",
"nodedir": "/Users/lawrencefryer/Library/Caches/node-gyp/14.15.4",
"standalone_static_library": 1,
"dry_run": "",
"legacy_bundling": "",
"save_dev": "",
"browser": "",
"commit_hooks": "true",
"only": "",
"viewer": "man",
"also": "",
"rollback": "true",
"sign_git_commit": "",
"audit": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"init_author_url": "",
"maxsockets": "50",
"shell": "/bin/zsh",
"metrics_registry": "https://registry.npmjs.org/",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"timing": "",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"preid": "",
"fetch_retries": "2",
"registry": "https://registry.npmjs.org/",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"logs_max": "10",
"prefer_online": "",
"cache_lock_retries": "10",
"global_style": "",
"update_notifier": "true",
"audit_level": "low",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"offline": "",
"read_only": "",
"searchlimit": "20",
"access": "",
"json": "",
"allow_same_version": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/lawrencefryer/.npm-init.js",
"userconfig": "/Users/lawrencefryer/.npmrc",
"cidr": "",
"node_version": "14.15.4",
"user": "",
"save": "true",
"auth_type": "legacy",
"editor": "vi",
"ignore_prepublish": "",
"script_shell": "",
"tag": "latest",
"before": "",
"global": "",
"progress": "true",
"ham_it_up": "",
"optional": "true",
"searchstaleness": "900",
"bin_links": "true",
"force": "",
"save_prod": "",
"searchopts": "",
"depth": "Infinity",
"node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js",
"rebuild_bundle": "true",
"sso_poll_frequency": "500",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"scripts_prepend_node_path": "warn-only",
"sso_type": "oauth",
"strict_ssl": "true",
"tag_version_prefix": "v",
"save_exact": "true",
"dev": "",
"fetch_retry_factor": "10",
"group": "20",
"cache_lock_stale": "60000",
"prefer_offline": "",
"version": "",
"cache_min": "10",
"otp": "",
"cache": "/Users/lawrencefryer/.npm",
"searchexclude": "",
"color": "true",
"package_lock": "true",
"fund": "true",
"package_lock_only": "",
"save_optional": "",
"user_agent": "npm/6.14.10 node/v14.15.4 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"send_metrics": "",
"init_version": "1.0.0",
"node_options": "",
"umask": "0022",
"scope": "",
"git": "git",
"init_author_name": "",
"onload_script": "",
"tmp": "/var/folders/xq/5rc9rv6d28v1hd5t7sg6xmxh0000gn/T",
"unsafe_perm": "true",
"format_package_lock": "true",
"link": "",
"prefix": "/usr/local"
}
}
| [
"lawrencefryer@Lawrences-MacBook-Air.local"
] | lawrencefryer@Lawrences-MacBook-Air.local |
0708beed159ee8612e7a5cb4b59e3e475cc257ab | f5c60e2a20ce8d935b97349a7040e59ffd2b442b | /yash/views.py | c44db3c6588a1b8fca00fd9efc90f9b08a96881f | [] | no_license | yasobantakumar/ONLINE-FOOD-MARKET | cb1e168f2aa8c1585b311d078a4f04c3f8612969 | 1d41154e2e261f4a2e15f6ce6051584f6203ff48 | refs/heads/master | 2022-12-10T03:16:40.286722 | 2020-09-08T09:44:26 | 2020-09-08T09:44:26 | 293,413,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,815 | py | from django.contrib import messages
from django.shortcuts import render, redirect
from yash.models import AdminLoginModel,StateModel1,CityModel1,CuisineModel
def showIndex(request):
return render(request,"yash/login.html")
def login_check(request):
if request.method == "POST":
try:
admin = AdminLoginModel.objects.get(username=request.POST.get("yash_username"),
password=request.POST.get("yash_password"))
request.session["admin_status"] = True
return redirect('welcome')
except:
return render(request, "yash/login.html", {"error": "Invalid User"})
else:
request.session["admin_status"] = False
return render(request, "yash/login.html", {"error": "Admin Logout Success"})
def welcome(request):
return render(request,"yash/home.html")
def openstate(request):
data=StateModel1.objects.all()
return render(request,"yash/openstate1.html",{"DATA":data})
def opencity(request):
sm = StateModel1.objects.all()
cm = CityModel1.objects.all()
return render(request, "yash/opencity.html", {'state': sm, 'city': cm})
def addingstate(request):
s_name = request.POST.get("t1")
s_photo= request.FILES["t2"]
StateModel1(name=s_name,photo=s_photo,).save()
return openstate(request)
def updatestate(request):
sid = request.GET.get('id')
sm = StateModel1.objects.get(id=sid)
sm_all=StateModel1.objects.all()
return render(request, 'yash/openstate1.html', {'update_state': sm,'data':sm_all})
def updatestateid(request):
s_id = request.GET.get('state_id')
s_name = request.POST.get('t1')
s_photo = request.FILES.get('t2')
StateModel1.objects.filter(id=s_id).update(name=s_name,photo=s_photo)
return redirect('openstate')
def sdelete(request):
s_id = request.GET.get('state_id')
StateModel1.objects.filter(id=s_id).delete()
messages.success(request, 'state is removed')
return redirect('openstate')
def savecity(request):
cid = request.POST.get('t2')
name = request.POST.get('t1')
print(cid, name)
CityModel1(name=request.POST.get('t1'), photo=request.FILES["t3"], city_state_id=cid).save()
messages.success(request, 'city is added')
return redirect('opencity')
def updatecity(request):
cm = CityModel1.objects.filter(id=request.GET.get('cid'))
return render(request,'yash/opencity.html',{'ucity':cm})
def updatecityid(request):
CityModel1.objects.filter(id=request.GET.get('cid')).update(name=request.POST.get('t1'),photo =request.FILES.get('t3'))
messages.success(request,'updated success')
return opencity(request)
def cdelete(request):
CityModel1.objects.filter(id=request.GET.get('cid')).delete()
messages.success(request, 'City deleted')
return redirect('opencity')
def openCusine(request):
cm = CuisineModel.objects.all()
return render(request,"yash/opencuisine.html",{'data':cm})
def savecuisine(request):
CuisineModel(type=request.POST.get('t1'),photo=request.FILES.get('t2')).save()
messages.success(request,'cuisine saved')
return openCusine(request)
def updatecuisine(request):
cid = request.GET.get('cid')
cm = CuisineModel.objects.filter(id=cid)
return render(request,'yash/opencuisine.html',{'update':cm})
def updatecuisineid(request):
CuisineModel.objects.filter(id=request.GET.get('cid')).update(type=request.POST.get('t1'),
photo=request.FILES.get('t2'))
messages.success(request,'cuisine updated')
return redirect('cuisine')
def dcuisine(request):
CuisineModel.objects.filter(id=request.GET.get('cid')).delete()
messages.success(request, 'cuisine deleted')
return redirect('cuisine')
| [
"yashbardhan69@gmail.com"
] | yashbardhan69@gmail.com |
f7506da0c51002450d4c296cd3c9a8b07112962d | 517a4a9ce0ec7ad958981affef479b9097b9a023 | /gen-go-build.py | 199410b4c29e6cf16124c8ce73c16d88a8f8f748 | [] | no_license | wisechengyi/binaries | b9aa39bec865fc75739413a35447468815e46026 | 8d5d6fed66706ec9f52965bad9c6d5ed7d7430b6 | refs/heads/master | 2023-02-28T13:24:20.686886 | 2018-11-20T17:03:33 | 2018-11-20T17:03:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,312 | py | #!/usr/bin/python
# Helper for creating Go build.sh files. This script will create a default build.sh file
# for Go/macOS/Linux versions listed below, unless the directory already exists. If a custom
# build.sh script is needed, just create it and this helper will ignore it going forward.
import os
from textwrap import dedent
go_versions = [
'1.9.4',
'1.10',
]
mac_versions = [
'10.8',
'10.9',
'10.10',
'10.11',
'10.12',
'10.13',
]
tmpl = dedent("""\
#!/bin/bash
set -o xtrace
curl https://storage.googleapis.com/golang/go{go_version}.{arch}.tar.gz -o go.tar.gz
""")
def maybe_gen(dir, go_version, arch):
if not os.path.exists(dir):
os.makedirs(dir)
filename = os.path.join(dir, 'build.sh')
with open(filename, 'w') as fh:
fh.write(tmpl.format(go_version=go_version, arch=arch))
os.chmod(filename, 0755)
def mac():
for mac_version in mac_versions:
for go_version in go_versions:
dir = 'build-support/bin/go/mac/%s/%s' % (mac_version, go_version)
maybe_gen(dir, go_version, 'darwin-amd64')
def x86_64():
for go_version in go_versions:
dir = 'build-support/bin/go/linux/x86_64/%s' % go_version
maybe_gen(dir, go_version, 'linux-amd64')
mac()
x86_64()
| [
"stuhood@twitter.com"
] | stuhood@twitter.com |
e50972f3b85ec4897e5d40a0ed5ad4c465cd8100 | 38f267668c75ed37a17e514e18f263c872b0b5d6 | /repos/blender_tools/breakout/breakout_controler.py | f39d1752069d0f8283c2fe47c64c5358dc10f708 | [] | no_license | BlenderCN-Org/working_files | c264d1390a2bfdd4781376fd1ec1747d8c81c251 | 0788f00283d7c8c083aa5d554eb1f32c201adbd6 | refs/heads/master | 2020-05-27T08:35:12.899432 | 2016-01-06T21:52:21 | 2016-01-06T21:52:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,786 | py | # ##### BEGIN GPL LICENSE BLOCK #####
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# ##### END GPL LICENSE BLOCK #####
# ##### BEGIN SCRIPT COMMENTS #####
# Main Tool: "Breakout Tool"
# Parent: "Breakout Launcher"
# Name: "Breakout Controler",
# Author: "Jordan Goddard",
# Company: "Tangent Animation"
# Blender: (2, 74, 0),
# Description: "Controls what the breakout tool will run on, and how many times it will run"
# <pep8 compliant>
# ##### END SCRIPT COMMENTS #####
"""
Import the necesary libraries for the application to call as well as declare any global variables
"""
import os
import sys
import subprocess
import re
response = None
file_path = None
def create_file():
"""
Creates the breakout list that the breakout tool will gather information from
"""
file = open("C:\\Temp\\breakout_tool_data\\breakout_log.txt", "w")
file.write("file=C:\\Temp\n000.0000")
file.close()
def check_file():
"""
Checks if there is a file for breakout to run and read from
"""
try:
file = open("C:\\Temp\\breakout_tool_data\\breakout_log.txt", "r")
except:
create_file()
print("\nThe file is empty, please enter data into the file\n")
def read_file():
"""
Reads the file that has the data of what to breakout and from where, then prints it for the user
"""
file = open("C:\\Temp\\breakout_tool_data\\breakout_log.txt", "r")
print("\n-------------------------------------------------------------------------\n")
for line in file:
print(line)
print("\n-------------------------------------------------------------------------\n")
def question_one():
"""
Asks the user if they want to breakout the files that have been read from the breakout log file
"""
print("\nThe following shots have been set for breakout: ")
read_file()
print("\nWould you like to break these out? (y/n)")
response = input()
question_one_answer(response)
def question_one_answer(response):
"""
Handles the response from the user for wether or not to breakout the files listed
"""
if response == 'y':
print("\nBreakout is beginning\n")
run_scripts()
print("\n\n\nrun\n\n\n")
elif response == 'n':
print("Please update the file with correct data")
question_two()
else:
print("\nInvalid Entry!\nPlease try again!")
question_one()
def execute():
"""
Runs functions in order of execution
"""
print("\n-------------------------------------------------------------------------\n")
directory_creation()
check_file()
question_one()
def directory_creation():
"""
Checks if the proper directory of files already exists, and if it doesnt or it is partial, it creates it.
"""
new_path = ["C:\\Temp\\breakout_tool_data", "C:\\Temp\\breakout_tool_data\\blender_files", "C:\\Temp\\breakout_tool_data\\error_logs"]
if not os.path.exists(new_path[0]):
os.makedirs(new_path[0])
if not os.path.exists(new_path[1]):
os.makedirs(new_path[1])
if not os.path.exists(new_path[2]):
os.makedirs(new_path[2])
def run_scripts():
"""
Runs breakout tool based on the number of files to be broken out
"""
try:
file = open("C:\\Temp\\breakout_tool_data\\breakout_log.txt", "r")
except:
print("File Error!")
file = open("C:\\Temp\\breakout_tool_data\\breakout_log.txt", "w")
file.close()
else:
for line in file.readlines():
if line.startswith('file='):
match=re.match("file=(\S+)", line)
file_path = match.group(1)
else:
file_new = open("C:\\Temp\\pass_temp.txt", "w")
match = re.match(r'(\d+\.\d+)',line)
scene_name = match.group(1)
file_new.write("%s\n%s" %(scene_name,file_path))
file_new.close()
subprocess.call("C:\\pipeline\\non_client\\breakout\\breakout_batch_code.bat")
finally:
print("\n\nComplete\n\n")
file.close()
execute() | [
"jordan.goddard@tangent-animation.com"
] | jordan.goddard@tangent-animation.com |
8528501e9a70578cfb338083bb1c11b49ca821f7 | fc5ff290909092ee2d01abc7610623966bc532ad | /warmup-2/last2.py | 774a08a30ec5aba96c95b033f395f165e726d07f | [] | no_license | aishuannu/codingbat | 7fe1e486010bf421095a19e084136834f97f2a70 | 25b819be3587ecdea0f7de132ad24bb3bdbe7a63 | refs/heads/master | 2021-01-12T13:37:49.619866 | 2016-10-18T13:51:07 | 2016-10-18T13:51:07 | 68,989,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132 | py | def last2(a):
i = 0
b = 0
while i<len(a)-2:
if a[i:i+2] == a[len(a)-2: ]:
b = b+1
i += 1
else:
i += 1
return b
| [
"aishuradhakrishnan21@gmail.com"
] | aishuradhakrishnan21@gmail.com |
b59e5fc9e79aa3e28a8c97c4345cbe549c9e0b42 | 3386fd7bf665797c028e4b46f7e9ce0c9b101a9d | /MadLib.py | 1357deeb3ab0352fad743a31ca8b3ea43a6af9a0 | [] | no_license | Julie789/PythonClass1 | d1ac0f214b9434408d47d87a38536a3c6b069ed1 | d77873dc20b10ae1d99d1d4f23ec81b2266c42c7 | refs/heads/master | 2022-12-06T18:46:37.446309 | 2020-09-05T22:08:02 | 2020-09-05T22:08:02 | 293,113,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | #MadLib.py
#Name:
#Date:
def main():
#Ask user for words
#Print the story with the user supplied words.
main()
| [
"noreply@github.com"
] | Julie789.noreply@github.com |
da3f14eb4676c866d47a2784491765e6f5abcac8 | 0bbeb0bbe788ec5a8ba15acf159e4b913985bba4 | /tests/testsuite/a_basic/tests_03_networking.py | 5173cf96368f92efba523e5f790107970eeb035a | [
"Apache-2.0"
] | permissive | GlenDC/0-core | 629bd9836ab4ff2fe0c40628419b58205bb64648 | 807fa1939199fa3aa3b3e57679f61bb6c72cc57f | refs/heads/master | 2021-06-17T19:52:40.405225 | 2017-06-14T16:42:39 | 2017-06-14T16:42:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,190 | py | from utils.utils import BaseTest
import time
import unittest
class BasicNetworking(BaseTest):
def setUp(self):
super(BasicNetworking, self).setUp()
self.check_g8os_connection(BasicNetworking)
def test001_join_leave_list_zerotier(self):
""" g8os-012
*Test case for testing joining, listing, leaving zerotier networks*
**Test Scenario:**
#. Get NetworkId using zerotier API
#. Join zerotier network (N1), should succeed
#. List zerotier network
#. Join fake zerotier network (N1), should fail
#. Leave zerotier network (N1), should succeed
#. List zerotier networks, N1 should be gone
#. Leave zerotier network (N1), should fail
"""
self.lg('{} STARTED'.format(self._testID))
self.lg('Get NetworkId using zerotier API')
networkId = self.getZtNetworkID()
self.lg('Join zerotier network (N1), should succeed')
self.client.zerotier.join(networkId)
self.lg('List zerotier network')
r = self.client.zerotier.list()
self.assertIn(networkId, [x['nwid'] for x in r])
self.lg('Join fake zerotier network (N1), should fail')
with self.assertRaises(RuntimeError):
self.client.zerotier.join(self.rand_str())
self.lg('Leave zerotier network (N1), should succeed')
self.client.zerotier.leave(networkId)
self.lg('List zerotier networks, N1 should be gone')
r = self.client.zerotier.list()
self.assertNotIn(networkId, [x['nwid'] for x in r])
self.lg('Leave zerotier network (N1), should fail')
with self.assertRaises(RuntimeError):
self.client.zerotier.leave(networkId)
self.lg('{} ENDED'.format(self._testID))
def test002_create_delete_list_bridges(self):
""" g8os-013
*Test case for testing creating, listing, deleting bridges*
**Test Scenario:**
#. Create bridge (B1), should succeed
#. List bridges, B1 should be listed
#. Create bridge with same name of (B1), should fail
#. Delete bridge B1, should succeed
#. List bridges, B1 should be gone
#. Delete bridge B1, should fail
"""
self.lg('{} STARTED'.format(self._testID))
self.lg('Create bridge (B1), should succeed')
bridge_name = self.rand_str()
self.client.bridge.create(bridge_name)
self.lg('List bridges, B1 should be listed')
response = self.client.bridge.list()
self.assertIn(bridge_name, response)
self.lg('Create bridge with same name of (B1), should fail')
with self.assertRaises(RuntimeError):
self.client.bridge.create(bridge_name)
self.lg('Delete bridge B1, should succeed')
self.client.bridge.delete(bridge_name)
self.lg('List bridges, B1 should be gone')
response = self.client.bridge.list()
self.assertNotIn(bridge_name, response)
self.lg('Delete bridge B1, should fail')
with self.assertRaises(RuntimeError):
self.client.bridge.delete(bridge_name)
self.lg('{} ENDED'.format(self._testID))
| [
"deboeck.jo@gmail.com"
] | deboeck.jo@gmail.com |
8db1be4351b09baa57321eff9665ef48aaacf2e5 | 81df54e1245fbc50ffaa6c08108deeef43710596 | /Basics/simple_recursion_01.py | 28fafdb19a8df91174492771fd7f22a7f4a8167f | [] | no_license | gurupsv/LearnPython | 1abdca29ac910e8c99af43fd431da02618175bf2 | e0b25a0a4fa70ee3e0ce5f96cc7bf35eeba8b15c | refs/heads/master | 2021-08-29T03:09:05.366315 | 2021-08-19T03:36:40 | 2021-08-19T03:36:40 | 117,851,031 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 705 | py |
def sumofall(n):
if n == 1 :
return 1
else :
return sumofall(n-1)+n
print(sumofall(5))
print(sumofall(2))
word_list = ['cat','dog','rabbit']
letter_set = set()
letter_list=[]
for a_word in word_list:
for a_letter in a_word:
if a_letter in letter_list:
continue
else :
letter_list.append(a_letter)
letter_set.update(a_letter)
print(list(letter_set))
print(letter_list)
def extendList(val, list=[]):
list.append(val)
return list
list1 = extendList(10)
list2 = extendList(123,[])
list3 = extendList('a')
list2 = extendList(143,list2)
print("list1 = %s" % list1)
print("list2 = %s" % list2)
print("list3 = %s" % list3) | [
"guruprasad.sv@gmail.com"
] | guruprasad.sv@gmail.com |
523c65f5d46898de218134794522c807284fe7d3 | b636634de73bc8208e244158f0a68932ed2c0682 | /exercises/501/solution.py | 6789cd15173eba585603de05988c838f7fa044df | [] | no_license | augustinkrug/hackinscience | 3de39d3a938d0f38295b8e54bcaf45735036fbda | fe50bd8b5e16851c0e650b65a8063442f92fe5d9 | refs/heads/master | 2020-06-15T02:21:47.450981 | 2015-09-25T15:40:14 | 2015-09-25T15:40:14 | 42,852,896 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Sep 22 13:33:07 2015
@author: A.KRUG
"""
def changes2(amount, coins_rv):
count = 0
while coins_rv[0] > amount:
del coins_rv[0]
maxi = amount // coins_rv[0]
for i in reversed(range(0, maxi + 1)):
rest = amount - i * coins_rv[0]
if rest == 0:
count = count + 1
else:
if len(coins_rv) > 1:
count = count + changes2(rest, coins_rv[1:])
return count
def changes(amount, coins):
coins_rv = sorted(coins, reverse=True)
return changes2(amount, coins_rv)
| [
"a.krug@ackr.net"
] | a.krug@ackr.net |
52d164c079e7024407ee033d66648507ebb48c67 | 99052370591eadf44264dbe09022d4aa5cd9687d | /install/lib/python2.7/dist-packages/cwru_msgs/msg/_NavSatFix.py | 7fc5ec0e156d178ec0e03f7841197d2a80f8b5d4 | [] | no_license | brucemingxinliu/ros_ws | 11b1a3e142132925d35b3adf929f1000392c5bdc | 45f7e553ea20b79e3e93af5f77a1b14b64184875 | refs/heads/master | 2021-01-24T03:36:47.043040 | 2018-02-26T00:53:37 | 2018-02-26T00:53:37 | 122,892,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,328 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from cwru_msgs/NavSatFix.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import cwru_msgs.msg
import std_msgs.msg
class NavSatFix(genpy.Message):
_md5sum = "2d3a8cd499b9b4a0249fb98fd05cfa48"
_type = "cwru_msgs/NavSatFix"
_has_header = True #flag to mark the presence of a Header object
_full_text = """# Navigation Satellite fix for any Global Navigation Satellite System
#
# Specified using the WGS 84 reference ellipsoid
# Header specifies ROS time and frame of reference for this fix.
Header header
# satellite fix status information
cwru_msgs/NavSatStatus status
# Latitude [degrees]. Positive is north of equator; negative is south.
float64 latitude
# Longitude [degrees]. Positive is east of prime meridian; negative is west.
float64 longitude
# Altitude [m]. Positive is above the WGS 84 ellipsoid.
float64 altitude
# Position covariance [m^2] defined relative to a tangential plane
# through the reported position. The components are East, North, and
# Up (ENU), in row-major order.
#
# Beware: this coordinate system exhibits singularities at the poles.
float64[9] position_covariance
# If the covariance of the fix is known, fill it in completely. If the
# GPS receiver provides the variance of each measurement, put them
# along the diagonal. If only Dilution of Precision is available,
# estimate an approximate covariance from that.
uint8 COVARIANCE_TYPE_UNKNOWN = 0
uint8 COVARIANCE_TYPE_APPROXIMATED = 1
uint8 COVARIANCE_TYPE_DIAGONAL_KNOWN = 2
uint8 COVARIANCE_TYPE_KNOWN = 3
uint8 position_covariance_type
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: cwru_msgs/NavSatStatus
# Navigation Satellite fix status for any Global Navigation Satellite System
# Whether to output an augmented fix is determined by both the fix
# type and the last time differential corrections were received. A
# fix is valid when status >= STATUS_FIX.
int8 STATUS_NO_FIX = -1 # unable to fix position
int8 STATUS_FIX = 0 # unaugmented fix
int8 STATUS_SBAS_FIX = 1 # with satellite-based augmentation
int8 STATUS_GBAS_FIX = 2 # with ground-based augmentation
int8 status
# Bits defining which Global Navigation Satellite System signals were
# used by the receiver.
uint16 SERVICE_GPS = 1
uint16 SERVICE_GLONASS = 2
uint16 SERVICE_COMPASS = 4 # includes BeiDou.
uint16 SERVICE_GALILEO = 8
uint16 service
"""
# Pseudo-constants
COVARIANCE_TYPE_UNKNOWN = 0
COVARIANCE_TYPE_APPROXIMATED = 1
COVARIANCE_TYPE_DIAGONAL_KNOWN = 2
COVARIANCE_TYPE_KNOWN = 3
__slots__ = ['header','status','latitude','longitude','altitude','position_covariance','position_covariance_type']
_slot_types = ['std_msgs/Header','cwru_msgs/NavSatStatus','float64','float64','float64','float64[9]','uint8']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,status,latitude,longitude,altitude,position_covariance,position_covariance_type
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(NavSatFix, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = cwru_msgs.msg.NavSatStatus()
if self.latitude is None:
self.latitude = 0.
if self.longitude is None:
self.longitude = 0.
if self.altitude is None:
self.altitude = 0.
if self.position_covariance is None:
self.position_covariance = [0.,0.,0.,0.,0.,0.,0.,0.,0.]
if self.position_covariance_type is None:
self.position_covariance_type = 0
else:
self.header = std_msgs.msg.Header()
self.status = cwru_msgs.msg.NavSatStatus()
self.latitude = 0.
self.longitude = 0.
self.altitude = 0.
self.position_covariance = [0.,0.,0.,0.,0.,0.,0.,0.,0.]
self.position_covariance_type = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_bH3d.pack(_x.status.status, _x.status.service, _x.latitude, _x.longitude, _x.altitude))
buff.write(_struct_9d.pack(*self.position_covariance))
buff.write(_struct_B.pack(self.position_covariance_type))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = cwru_msgs.msg.NavSatStatus()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 27
(_x.status.status, _x.status.service, _x.latitude, _x.longitude, _x.altitude,) = _struct_bH3d.unpack(str[start:end])
start = end
end += 72
self.position_covariance = _struct_9d.unpack(str[start:end])
start = end
end += 1
(self.position_covariance_type,) = _struct_B.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_bH3d.pack(_x.status.status, _x.status.service, _x.latitude, _x.longitude, _x.altitude))
buff.write(self.position_covariance.tostring())
buff.write(_struct_B.pack(self.position_covariance_type))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = cwru_msgs.msg.NavSatStatus()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 27
(_x.status.status, _x.status.service, _x.latitude, _x.longitude, _x.altitude,) = _struct_bH3d.unpack(str[start:end])
start = end
end += 72
self.position_covariance = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
start = end
end += 1
(self.position_covariance_type,) = _struct_B.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_3I = struct.Struct("<3I")
_struct_bH3d = struct.Struct("<bH3d")
_struct_9d = struct.Struct("<9d")
_struct_B = struct.Struct("<B")
| [
"mxl592@case.edu"
] | mxl592@case.edu |
d57cb07ed57a82b555564b74948badf8c7d3afbb | 30ee4a21565cb20c046a300879c88992f78d9ebd | /JumpGameII.py | 10e9fccb26b3ee0c6f93a9addfb28dbf4d11f6c1 | [] | no_license | pranjay01/leetcode_python | 93ecead7d1a954970074e5cde894dff5537389a5 | b7a625e6fafd856c933c3d2bbbed00ae8992ef9e | refs/heads/master | 2022-11-06T20:19:21.810048 | 2020-06-30T20:56:34 | 2020-06-30T20:56:34 | 276,206,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 547 | py | nums=[1,2,1,1,1]
def findindexOfMax(nums,cur,maxD,l):
#l=len(nums)-1
limit=cur+maxD-1
if limit==(l):
return (l)
elif limit>(l):
limit=l
maxv=nums[cur]
index=cur
for i in range(cur,limit):
if maxv<nums[i]:
maxv=nums[i]
index=i
return index
l=len(nums)
if l==1:
print (0)
maxD=nums[0]
count=0
cur=0
while l-cur>maxD and cur<l-1:
count+=1
cur=findindexOfMax(nums,cur+1,maxD,l-1)
maxD=nums[cur]
if l-cur<=maxD and cur<l-1:
count+=1
print (count) | [
"pranjay.sagar01@gmail.com"
] | pranjay.sagar01@gmail.com |
38fd0d89ac93cda1cf7c1026d0604cb9e0057c48 | 44075f81ed374c860c731cbc8713d60e177495a7 | /Selenium/Log_In/practice.py | 7418d4bcfad45936920810a113c7f44a08d85f41 | [] | no_license | skashem/Python | a6740dc4240f90a639caa9eebcaf14e1578af594 | cd314e73ef1fab19dc90464c649a90e2c31e33ee | refs/heads/master | 2021-01-10T22:01:12.843946 | 2013-03-12T16:19:51 | 2013-03-12T16:19:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | from Log_In import driver
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver import ActionChains
#method fo send keys
def Edit_Field( user_name,password ):
driver.find_element_by_xpath("//*[@id='email']").send_keys(user_name)
return Edit_Field
#Login Credentials
#Edit_Field("//*[@id='email']", "skashem+0965@totsy.com")
#Edit_Field("//*[@id='pass']", "user123")
#click_on_objects("//*[@id='submit-button']")
#clicking on sign out
#click_on_objects("//*[@id='userAccount']/a")
#click_on_objects("//*[@id='userAccount']/ul/li[10]/a")
| [
"skashem@totsy.com"
] | skashem@totsy.com |
456dccc0d0b058daea30a5811b67c32f327eaad5 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/breadcrumbs/exceptAs.py | 98450c359de443a2f944b026192782eee6f6b9cc | [
"Apache-2.0"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 61 | py | try:
print "abc"
except KeyError as e:
print "d<caret>ef" | [
"Semyon.Proshev@jetbrains.com"
] | Semyon.Proshev@jetbrains.com |
6b3e10704b67a05bbd5fc73fe408618d870f0728 | 262311e60529868e38c2c57ee3db573f8e11c458 | /qa-automated/runner.py | c841c2e6d2e393b0fa9c3ef97393f624bae447f1 | [] | no_license | huileizhan227/untitled | 1c5604736d9ffcce6f7cb7e308cdc0ebd07e116a | 07df74c89291b1664a28e3c8dcba51a917f1835f | refs/heads/master | 2023-01-27T11:51:37.609210 | 2020-04-16T11:49:59 | 2020-04-16T11:49:59 | 150,606,504 | 1 | 0 | null | 2023-01-09T12:00:12 | 2018-09-27T15:12:18 | HTML | UTF-8 | Python | false | false | 2,700 | py | import os
import sys
import time
import qasite
import pytest
import config
from multiprocessing import Pool
from performance import Report as Perf
from common import devicectl
from common import serverctl
from common import utils
def run(project_name=None, build_id=None, test_name_filter=None):
# before
if (not project_name) or (not build_id):
log_folder = os.path.join(config.LOG_FOLDER, utils.get_formated_time())
else:
log_folder = os.path.join(config.LOG_FOLDER, project_name, str(build_id))
# run server
serverctl.run_servers(log_folder=log_folder)
devicectl.uninstall_apk()
devicectl.uninstall_ua2()
devicectl.wakeup()
# run cases
devices = config.devices
# case_process_list = []
args_list = []
for device in devices:
report_folder = os.path.join(log_folder, device['name'])
if not os.path.exists(report_folder):
os.makedirs(report_folder)
perf_log = os.path.join(report_folder, 'performance.csv')
perf_report = os.path.join(report_folder, 'performance.html')
ui_report = os.path.join(report_folder, 'report.html')
device['perf_report'] = perf_report
device['ui_report'] = ui_report
args=(perf_log, perf_report, ui_report, device['id'], test_name_filter)
args_list.append(args)
pool = Pool(len(args_list))
pool.starmap(run_cases, args_list)
pool.close()
pool.join()
# stop server
print('run cases over, killing servers...')
serverctl.stop_servers()
# upload report
# todo 先上传一个测试报告,多报告需qasite支持
if (project_name is not None) and (build_id is not None):
print('uploading aotomated testing report...')
if not qasite.upload_report(devices[0]['ui_report'], 0, project_name, build_id):
print('upload failed')
print('uploading performance testing report...')
if not qasite.upload_report(devices[0]['perf_report'], 1, project_name, build_id):
print('upload failed')
print('test finished.')
def run_cases(perf_log, perf_report, ui_report, device_id, test_name_filter):
# runpytest
arg_list = [
'cases/app',
'--html={}'.format(ui_report),
'--self-contained-html',
'--device-id={}'.format(device_id),
'--perf-log={}'.format(perf_log),
'--perf-report={}'.format(perf_report)
]
if test_name_filter:
arg_list.extend(['-k', test_name_filter])
pytest.main(arg_list)
if __name__ == "__main__":
test_name_filter = None
if len(sys.argv) > 1:
test_name_filter = sys.argv[1]
run(test_name_filter=test_name_filter)
| [
"374826581@qq.com"
] | 374826581@qq.com |
2ac4989e7ee98f671dc7f66b5079039b4e0c3f88 | 1f30b545c14cdeb63b4409dccf75e99e561e94ae | /welleng/visual.py | 318f7a427a7219af39455e3876061ada7993597d | [
"Apache-2.0"
] | permissive | huangkai31/welleng | 1dbff0383e8b67e54003e01f10e758382c21dfe8 | 860b78d2fc484e40426b2a2cf6a7830a859807cf | refs/heads/main | 2023-07-12T17:20:55.223218 | 2021-08-02T22:43:52 | 2021-08-02T22:43:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,901 | py | try:
import trimesh
TRIMESH = True
except ImportError:
TRIMESH = False
try:
from vedo import show, Box, Axes, trimesh2vedo, Lines, Sphere
VEDO = True
except ImportError:
VEDO = False
import numpy as np
from .version import __version__ as VERSION
class World:
def __init__(
self,
bb_center,
length,
width,
height
):
self.bb_center = bb_center
self.length = length
self.width = width
self.height = height
self.world = Box(
bb_center,
length,
width,
height
).wireframe()
def plot(
data,
names=None,
colors=None,
lines=None,
targets=None,
arrows=None,
text=None,
boxes=None,
points=None,
interactive=True,
):
"""
A vedo wrapper for quickly visualizing well trajectories for QAQC purposes.
Parameters
----------
data: a trimesh.Trimesh object or a list of trimesh.Trimesh
objects or a trmiesh.scene object
names: list of strings (default: None)
A list of names, index aligned to the list of well meshes.
colors: list of strings (default: None)
A list of color or colors. If a single color is listed then this is
applied to all meshes in data, otherwise the list of colors is
indexed to the list of meshes.
"""
assert all((VEDO, TRIMESH)), "ImportError: try pip install welleng[easy]"
if isinstance(data, trimesh.scene.scene.Scene):
meshes = [v for k, v in data.geometry.items()]
if names is None:
names = list(data.geometry.keys())
# handle a single mesh being passed
elif isinstance(data, trimesh.Trimesh):
meshes = [data]
else:
meshes = data
if names is not None:
assert len(names) == len(data), \
"Names must be length of meshes list else None"
if colors is not None:
if len(colors) == 1:
colors = colors * len(meshes)
else:
assert len(colors) == len(meshes), \
"Colors must be length of meshes list, 1 else None"
if points is not None:
points = [
Sphere(p, r=30, c='grey')
for p in points
]
meshes_vedo = []
for i, mesh in enumerate(meshes):
if i == 0:
vertices = np.array(mesh.vertices)
start_locations = np.array([mesh.vertices[0]])
else:
vertices = np.concatenate(
(vertices, np.array(mesh.vertices)),
axis=0
)
start_locations = np.concatenate(
(start_locations, np.array([mesh.vertices[0]])),
axis=0
)
# convert to vedo mesh
m_vedo = trimesh2vedo(mesh)
if colors is not None:
m_vedo.c(colors[i])
if names is not None:
m_vedo.name = names[i]
m_vedo.flag()
meshes_vedo.append(m_vedo)
w = get_bb(vertices)
axes = get_axes(w.world)
# try and figure out a nice start camera position
pos = w.bb_center
vec1 = pos - [w.length, w.width, 0]
vec2 = np.array([vec1[1], vec1[0], 0])
pos_new = [pos[0], pos[1], -4000] + vec2 * 3
camera_opts = dict(
pos=pos_new,
focalPoint=pos,
viewup=[0., 0., -1.]
)
show(
meshes_vedo,
w.world,
lines,
targets,
arrows,
boxes,
axes,
points,
bg='lightgrey',
bg2='lavender',
camera=camera_opts,
interactorStyle=10,
resetcam=True,
interactive=True,
# verbose=True,
title=f'welleng {VERSION}'
)
def get_start_location(start_locations):
start_location = np.average(start_locations, axis=0)
start_location[2] = np.amin(start_locations[:, 2], axis=0)
return start_location
def get_bb(vertices, min_size=[1000., 1000., 0.]):
bb_max = np.amax(vertices, axis=0)
bb_min = np.amin(vertices, axis=0)
l, w, h = np.amax(np.array([(bb_max - bb_min), min_size]), axis=0)
bb_center = bb_min + np.array(bb_max - bb_min) / 2
world = World(
bb_center,
l,
w,
h
)
return world
# make a dictionary of axes options
def get_axes(world):
assert VEDO, "ImportError: try pip install welleng[easy]"
axes = Axes(
world,
xtitle='y: North (m)', # swap axis to plot correctly
ytitle='x: East (m)',
ztitle='z: TVD (m)',
xTitleJustify='bottom-right',
yTitleJustify='top-right',
zTitleJustify='top-right',
xyGrid2=True, xyGrid=False,
zxGrid=True, yzGrid2=True,
zxGridTransparent=True, yzGrid2Transparent=True,
yzGrid=False,
xLabelRotation=-1,
yLabelRotation=1,
zLabelRotation=1,
)
for a in axes.unpack(): # unpack the Assembly to access its elements
if 'title' in a.name or 'NumericLabel' in a.name:
a.mirror('y')
if 'yNumericLabel' in a.name:
a.scale(0.8)
return axes
def get_lines(clearance):
"""
Add lines per reference well interval between the closest points on the
reference well and the offset well and color them according to the
calculated Separation Factor (SF) between the two wells at these points.
Parameters
----------
clearance: welleng.clearance object
Returns
-------
lines: vedo.Lines object
A vedo.Lines object colored by the object's SF values.
"""
assert VEDO, "ImportError: try pip install welleng[easy]"
c = clearance.SF
start_points, end_points = clearance.get_lines()
lines = Lines(start_points, end_points).cmap('hot_r', c, on='cells')
lines.addScalarBar(title='SF')
return lines
| [
"jonnycorcutt@gmail.com"
] | jonnycorcutt@gmail.com |
4926ffe92721d5b449773c2caff35eabfbef1e6a | b410490f4249b4075eab92e3a16000a8b839e18c | /object_detection/YOLOv3/dataset.py | 835b5b26d48edf15af90e53cc530340dfc619848 | [] | no_license | TaeYeon-kim-ai/Pytorch | 5936145643a2b36b5c52e43f735bda81264ed6d5 | 452e5543a959f2b280b088635953985e1101041d | refs/heads/master | 2023-07-01T17:29:30.558774 | 2021-08-12T19:01:36 | 2021-08-12T19:01:36 | 387,499,162 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,282 | py | #import config
import numpy as np
import os
import pandas as pd
import torch
from PIL import Image, ImageFile
from torch.utils.data import Dataset, DataLoader
from utils import (
iou_width_height as iou,
non_max_suppression_as_nms,
)
ImageFile.LOAD_TRUNCATED_IMAGES = True
class YOLODataset(Dataset) :
def __init__(
self,
csv_file,
img_dir, label_dir,
anchors,
image_size = 416,
S = [13, 26, 52],
C = 20,
transform = None,
):
self.annotations = pd.read_csv(csv_file)
self.img_dir = img_dir
self.label_dir = label_dir
self.transform = transform
self.S = S
self.anchors
self.num_anchors = self.anchors.shape[0]
self.num_anchors_per_scale = self.num_anchors // 3
self.C = C
self.ignore_iou_thresh = 0.5
def __len__(self) :
return len(self.annotation)
def __getitem__(self, index) :
label_path = os.path.join(self.label_dir, self.annotations.iloc[index, 1])
bboxes = np.roll(np.loadtxt(fname=label_path, delimiter=" ", ndmim = 2), 4, axis = 1).tolist()
img_path = os.path.join(self.img_dir, self.annotations.iloc[index, 0])
image = np.array(Image.open(img_path).convert("RGB"))
if self.transform :
augmentation = self.transform(iamge = image, bboxes = bboxes)
image = augmentation["image"]
bboxes = augmentation["bboxes"]
targets = [torch.zeros((self.num_anchors // 3, S, S, 6)) for S in self.S] # [p_o, x, y , w, h, c]
for box in bboxes :
iou_anchors = iou(torch.Tensor(box[2:4]), self.anchors)
anchor_indices = iou_anchors.argsort(descending = True, dim = 0)
x, y, width, height, class_label = box
has_anchor = [False, False, False]
for anchor_idx in anchor_indices :
scale_idx = anchor_idx // self.num_anchors_per_scale # 0, 1, 2
anchor_on_scale = anchor_idx % self.num_anchors_per_scale # 0, 1, 2
S = self.S[scale_idx]
i, j = int(S*y), int(S * x) # x = 0.5, S = 13 --> int(6.5) = 6 .. 중심값 ?? roI
anchor_taken = targets[scale_idx][anchor_on_scale, i, j, 0]
if not anchor_taken and not has_anchor[scale_idx] : #anchor
targets[scale_idx][anchor_on_scale, i , j, 0] = 1
x_cell, y_cell = S*x - j, S*y - i # 6.5 both are between [0, 1]
width_cell, height_cell = (
width * S,
height * S,
)
box_coordinates = torch.tensor(
[x_cell, y_cell, width_cell, height_cell]
)
targets[scale_idx][anchor_on_scale, i, j, 1:5] = box_coordinates
targets[scale_idx][anchor_on_scale, i, j, 5] = int(class_label)
has_anchor[scale_idx] = True
elif not anchor_taken and iou_anchors[anchor_idx] > self.ignore_iou_thresh:
targets[scale_idx][anchor_on_scale, i, j, 0] = -1 #ignore this prediction
return image, tuple(targets)
| [
"lovehjty@gmail.com"
] | lovehjty@gmail.com |
04f7c65a1415eda08f8795e92296d6e74b3ec043 | 3b645c0180db61a1dbc870ea4b48c27f96683965 | /model_and_simulate/road_traffic_microscopic/vehicle.py | 73418cc0dca39e335cdc473566014b1b075cfc1e | [
"CC0-1.0",
"BSD-2-Clause"
] | permissive | tomtuamnuq/model_and_simulate | 245c361bbbfb3ad4a3b11f6bae10cab1fe6aedf1 | 40a37159ef03ca992558924c5b9cdbbfba9c5a85 | refs/heads/main | 2023-06-30T00:25:13.146440 | 2021-07-29T13:57:32 | 2021-07-29T13:57:32 | 380,200,699 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,440 | py | """Module with `Vehicle` class."""
from __future__ import annotations
from typing import Optional
from numpy.random import default_rng
from .section import Cell, Section
SEED = 1234
rng = default_rng(seed=SEED) # keyword seed=SEED
class Vehicle:
"""This describes a stochastic cellular automata."""
def __init__(
self,
identity: int,
velocity: int,
velocity_max: int,
dawdling_factor: float,
):
"""
Args:
identity (int): Non changeable identification number.
velocity (int): The initial velocity along the x-Axis.
velocity_max (int): The maximum allowed velocity of this vehicle.
dawdling_factor (float): The probability in [0, 1] for dawdling.
"""
self._identity = identity
self._velocity = velocity
self._velocity_max = velocity_max
self._cell = None # type: Optional[Cell]
self._successor = None # type: Optional[Vehicle]
self._dawdling_factor = dawdling_factor
@property
def successor(self) -> Optional[Vehicle]:
"""The vehicle on the right side of this vehicle."""
return self._successor
@successor.setter
def successor(self, successor: Vehicle) -> None:
"""Setter of successor."""
self._successor = successor
@property
def identity(self) -> int:
"""The identification number."""
return self._identity
@property
def position(self) -> int:
"""The position in units of cell lengths."""
return self._cell.number
@property
def velocity(self) -> int:
"""The current velocity."""
return self._velocity
@velocity.setter
def velocity(self, vel: int) -> None:
"""Setter for velocity."""
self._velocity = vel
def distance_to_successor(self, right_border: int) -> int:
"""Get the distance to the cell of the vehicle on the right side."""
distance = (
self._velocity_max
if self.successor is None
else self.successor.position - self.position - 1
)
if distance < 0:
distance = right_border + distance
return distance
def _accelerate(self) -> None:
self.velocity = min(self.velocity + 1, self._velocity_max)
def _brake(self, distance: int) -> None:
if self.velocity > distance:
self.velocity = distance
def _is_dawdling(self) -> bool:
return rng.binomial(1, self._dawdling_factor) == 1
def _dawdle(self) -> None:
self.velocity = max(self.velocity - 1, 0)
def update_velocity(self, right_border: int) -> None:
"""Perform rules of NaSch-Model to calculate velocity.
The right border is used to check for the distance."""
self._accelerate()
distance = self.distance_to_successor(right_border)
self._brake(distance)
if self._is_dawdling():
self._dawdle()
def move(self, section: Section) -> None:
"""Empty the current cell and move to the next cell on `section`."""
if self.velocity > 0:
self._cell.make_empty()
self._cell = section.get_cell(self.position + self.velocity)
self._cell.make_occupied()
def place_into_cell(self, cell: Cell):
"""Puts this vehicle into `cell`."""
self._cell = cell
cell.make_occupied()
| [
"tomtuamnuq@users.noreply.github.com"
] | tomtuamnuq@users.noreply.github.com |
94ab3c6e0fc557517bcc2017e61c5afc8ca33fa7 | c538edef188725501ad164bd7c9b8c537c024376 | /blog/handler/test2.py | 24c5ff0ed6466a6d4a96e16d8234ff800d61f201 | [] | no_license | daishitong12345/PersonalBlog | cdfcda40cf30edd4558eb2c79074fa0ac6280e64 | 83f21422e6b201d923fdcbb4156f612f75df3a93 | refs/heads/master | 2022-11-28T04:49:32.635581 | 2018-03-07T08:40:13 | 2018-03-07T08:40:13 | 124,204,737 | 0 | 1 | null | 2022-11-20T20:03:13 | 2018-03-07T08:33:47 | Python | UTF-8 | Python | false | false | 344 | py | #_*_coding:utf_8_*_
__author__ = 'dst'
from magweb import MageWeb
from ..model import User,session
from ..util import jsonify
user_router = MageWeb.Router(prefix='/user')
@user_router.post('/reg')
def reg(ctx,request:MageWeb.Request):
print(request)
@user_router.post('/login')
def login(ctx,request:MageWeb.Request):
print(request)
| [
"291415593@qq.com"
] | 291415593@qq.com |
9767187549aa3969e1f4c38295e879146bb40233 | 2d1cf59d4ad9321664a3e4b846395107c42f520c | /sdk/search/azure-search-documents/azure/search/documents/_paging.py | 15968c0066934d71c94f6aca6e8a30159cbfc3d4 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | RGKarthik/azure-sdk-for-python | ad73755eed6760d5c0dfa3c4ae9b9e58005f8626 | 7927f8468808a6938972d32bad69448c98297c03 | refs/heads/master | 2023-05-14T05:40:53.086308 | 2021-05-25T19:08:31 | 2021-05-25T19:08:31 | 370,263,477 | 1 | 0 | MIT | 2021-05-24T19:19:31 | 2021-05-24T07:21:17 | null | UTF-8 | Python | false | false | 5,136 | py | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import base64
import itertools
import json
from azure.core.paging import ItemPaged, PageIterator, ReturnType
from ._generated.models import SearchRequest
if TYPE_CHECKING:
# pylint:disable=unused-import,ungrouped-imports
from typing import Any, Union
def convert_search_result(result):
ret = result.additional_properties
ret["@search.score"] = result.score
ret["@search.highlights"] = result.highlights
return ret
def pack_continuation_token(response, api_version="2020-06-30"):
if response.next_page_parameters is not None:
token = {
"apiVersion": api_version,
"nextLink": response.next_link,
"nextPageParameters": response.next_page_parameters.serialize(),
}
return base64.b64encode(json.dumps(token).encode("utf-8"))
return None
def unpack_continuation_token(token):
unpacked_token = json.loads(base64.b64decode(token))
next_link = unpacked_token["nextLink"]
next_page_parameters = unpacked_token["nextPageParameters"]
next_page_request = SearchRequest.deserialize(next_page_parameters)
return next_link, next_page_request
class SearchItemPaged(ItemPaged[ReturnType]):
def __init__(self, *args, **kwargs):
super(SearchItemPaged, self).__init__(*args, **kwargs)
self._first_page_iterator_instance = None
def __next__(self):
# type: () -> ReturnType
if self._page_iterator is None:
first_iterator = self._first_iterator_instance()
self._page_iterator = itertools.chain.from_iterable(first_iterator)
return next(self._page_iterator)
def _first_iterator_instance(self):
if self._first_page_iterator_instance is None:
self._first_page_iterator_instance = self.by_page()
return self._first_page_iterator_instance
def get_facets(self):
# type: () -> Union[dict, None]
"""Return any facet results if faceting was requested.
"""
return self._first_iterator_instance().get_facets()
def get_coverage(self):
# type: () -> float
"""Return the covereage percentage, if `minimum_coverage` was
specificied for the query.
"""
return self._first_iterator_instance().get_coverage()
def get_count(self):
# type: () -> float
"""Return the count of results if `include_total_count` was
set for the query.
"""
return self._first_iterator_instance().get_count()
# The pylint error silenced below seems spurious, as the inner wrapper does, in
# fact, become a method of the class when it is applied.
def _ensure_response(f):
# pylint:disable=protected-access
def wrapper(self, *args, **kw):
if self._current_page is None:
self._response = self._get_next(self.continuation_token)
self.continuation_token, self._current_page = self._extract_data(
self._response
)
return f(self, *args, **kw)
return wrapper
class SearchPageIterator(PageIterator):
def __init__(self, client, initial_query, kwargs, continuation_token=None):
super(SearchPageIterator, self).__init__(
get_next=self._get_next_cb,
extract_data=self._extract_data_cb,
continuation_token=continuation_token,
)
self._client = client
self._initial_query = initial_query
self._kwargs = kwargs
self._facets = None
self._api_version = kwargs.pop("api_version", "2020-06-30")
def _get_next_cb(self, continuation_token):
if continuation_token is None:
return self._client.documents.search_post(
search_request=self._initial_query.request, **self._kwargs
)
_next_link, next_page_request = unpack_continuation_token(continuation_token)
return self._client.documents.search_post(search_request=next_page_request, **self._kwargs)
def _extract_data_cb(self, response): # pylint:disable=no-self-use
continuation_token = pack_continuation_token(response, api_version=self._api_version)
results = [convert_search_result(r) for r in response.results]
return continuation_token, results
@_ensure_response
def get_facets(self):
self.continuation_token = None
facets = self._response.facets
if facets is not None and self._facets is None:
self._facets = {k: [x.as_dict() for x in v] for k, v in facets.items()}
return self._facets
@_ensure_response
def get_coverage(self):
self.continuation_token = None
return self._response.coverage
@_ensure_response
def get_count(self):
self.continuation_token = None
return self._response.count
| [
"noreply@github.com"
] | RGKarthik.noreply@github.com |
aa61f02c9495f71cfaf5f623af8b8972ba1ad99c | 763278b0a6357e88deda2c5c734e22f2337cc926 | /Monitor/Monitor/urlgenerator.py | a40b2fb14b2cd0c0765f309063a3c587aa98d724 | [] | no_license | abirAdhikari/Website_Monitoring | 1e8a4c2162dcd89b16fe4552a32a9c14b10c1137 | 8cc4a0f4a4037fdaf37ee823ee3529e35456001d | refs/heads/master | 2020-05-18T11:52:34.083834 | 2019-05-06T17:17:26 | 2019-05-06T17:17:26 | 184,391,907 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,565 | py | # Author: AADHIKARI 06/06/2018
'''
How to parse JSON files?
https://stackoverflow.com/questions/21058935/python-json-loads-shows-valueerror-extra-data
'''
import os
import sys
import logging
import random
import pandas as pd
import json
from pandas.io.json import json_normalize
import urllib
from lxml import etree
import xml.etree.ElementTree as ET
from timedecorator import TimeDecorator
from sqlite3 import dbapi2 as sqlite
import hashlib
def print_x(msg):
print(msg)
pass
class UrlGenerator(object):
'''Singelton Start'''
class __UrlGenerator:
def __init__(self):
self.init_vendor_routes()
self.init_polling_routes()
def init_vendor_routes(self):
self._gozo_df = self.init_gozo_routes()
self._makemytrip_df = self.init_makemytrip_routes()
self._ahataxis_df = self.init_ahataxis_routes()
self._wiwigo_df = self.init_wiwigo_routes()
self._oneway_df = self.init_onewaycab_routes()
self._mytaxiindia_df = self.init_mytaxiindia_routes()
self._getmecab_df = self.init_getmecab_routes()
self._hippocabs_df = self.init_hippocabs_routes()
def find_city_name(x):
gozoLoc = self.Instance._gozo_df.loc[x]
print(x.gozoLoc.city_name)
return gozoLoc.city_name
def init_makemytrip_routes(self):
mmt_input_json = os.getcwd() + '.\\spiders\\resources\\served_cities_makemytrip.json'
with open(mmt_input_json) as data_file:
content = data_file.read()
content = '[' + content + ']'
content = json.loads(content)
mmt_df = pd.DataFrame(json_normalize(content))
return mmt_df.reset_index()
def init_mytaxiindia_routes(self):
mytaxiindia_input_json = os.getcwd() + '.\\spiders\\resources\\served_cities_mytaxiindia.json'
with open(mytaxiindia_input_json) as data_file:
content = data_file.read()
content = '[' + content + ']'
content = json.loads(content)
mytaxiindia_df = pd.DataFrame(json_normalize(content))
return mytaxiindia_df.reset_index()
def init_ahataxis_routes(self):
pass
def init_wiwigo_routes(self):
pass
def init_onewaycab_routes(self):
pass
def init_getmecab_routes(self):
pass
def init_hippocabs_routes(self):
mytaxiindia_input_json = os.getcwd() + '.\\spiders\\resources\\served_cities_mytaxiindia.json'
with open(mytaxiindia_input_json) as data_file:
content = data_file.read()
content = '[' + content + ']'
content = json.loads(content)
hippocabs_df = pd.DataFrame(json_normalize(content))
return hippocabs_df.reset_index()
def init_polling_routes(self):
#resource_file = os.getcwd() + '.\\spiders\\resources\\routes_data.csv'
resource_file = os.getcwd() + '.\\spiders\\resources\\routes_data_stripped.csv'
df = pd.read_csv(resource_file, low_memory=False)
df = df.filter(['INDEX', 'SOURCE','DESTINATION','POLL_PRIORITY'], axis=1)
self._high_priority_df = df.loc[df['POLL_PRIORITY'] == 'HIGH_PRIORITY']
self._moderate_priority_df = df.loc[df['POLL_PRIORITY'] == 'MODERATE_PRIORITY']
self._low_priority_df = df.loc[df['POLL_PRIORITY'] == 'LOW_PRIORITY']
Instance = None
'''Singelton End'''
# MakeMyTrip
_makemytrip_domain_url = 'https://cabs.makemytrip.com/dt_cabsListing?'
_makemytrip_query_url = 'fromCity=%s&toCity=%s&tripType=OW&'
_makemytrip_date_url = 'departDate=%s&returnDate=%s&pickupTime=%s'
_makemytrip_src_dest_url_docorator = '{"address":"%s","latitude":%s,"longitude":%s,"pincode":"","place_id":"%s","is_city":true}'
# AhaTaxis
_ahataxis_domain_url = 'https://www.ahataxis.com/select/?'
_ahataxis_query_url = 'trip=%s&fromCity=%s&toCity=%s'
_ahataxis_date_url = '&startDate=%s&pickupTime=%s'
# OnewayCab
_onewaycab_domain_url = 'https://oneway.cab/outstation/'
_onewaycab_query_url = '%s-to-%s-taxi'
# Wiwigo
_wiwigo_domain_url = 'https://www.wiwigo.com/onesearch?'
_wiwigo_query_url = 'from=%s&to=%s'
_wiwigo_date_url = '&start=%s'
# GozoCabs
_gozo_domain_url = 'https://www.gozocabs.com/api/agent/booking/getCurrentQuote?'
_gozo_api_key = 'api=DC4E575B3EFFEF7C64F2C40A6E829627&'
_gozo_query_url = 'pickupCity=%s&dropCity=%s&tripType=1&'
_gozo_date_url = 'pickupDate=%s&pickupTime=%s'
#getmecab
_getmecab_domain_url = 'https://www.getmecab.com/one-way/'
_getmecab_query_url = '%s/%s'
#_getmecab_date_url = '&start=%s'
# MyTaxiIndia
_mytaxiindia_domain_url = 'http://www.mytaxiindia.com/outstationcar'
#HippoCabs
#_hippocabs_domain_url= 'https://hippocabs.com/web/'
_hippocabs_domain_url = 'https://hippocabs.com/web/search_cabs.php'
def __init__(self, scheduler_id, vendor, dtimeDecorator, forced_refresh):
if not UrlGenerator.Instance:
UrlGenerator.Instance = UrlGenerator.__UrlGenerator()
self._scheduler_id = scheduler_id
self._vendor = vendor
self._timeDecorator = dtimeDecorator
self._forced_refresh = forced_refresh
self._src_cities = []
self._dst_cities = []
self.populate_cities()
self.hippo_content = None
def normalize_makemytrip_city_names(self, cities):
return cities
def normalize_ahataxis_city_names(self, cities):
return cities
def normalize_onewaycab_city_names(self, cities):
return cities
def normalize_wiwigo_city_names(self, cities):
return cities
def normalize_mytaxiindia_city_names(self, cities):
return cities
def normalize_gozo_city_names(self, cities):
return cities
def normalize_getmecab_city_names(self, cities):
return cities
def normalize_hippocabs_city_names(self,cities):
return cities
# def get_hippo_pickup_dropoff(self,elem):
# if self.hippo_content == None:
# hippocabs_input_json= os.getcwd()+ '.\\spiders\\resources\\india_states_cities.json'
# with open(hippocabs_input_json,'r') as _file:
# self.hippo_content = json.loads(_file.read())
# print (self.hippo_content)
# for state in self.hippo_content.keys():
# if elem in self.hippo_content[state]:
# content = '{},{},{}'.format(elem,state,'India')
# print(content)
# return '{},{},{}'.format(elem,state,'India')
# #return elem+','+state+',India'
def gen_phone(self):
first = '998' #str(random.randint(100,999))
second = str(random.randint(1,888)).zfill(3)
last = (str(random.randint(1,9998)).zfill(4))
while last in ['1111','2222','3333','4444','5555','6666','7777','8888']:
last = (str(random.randint(1,9998)).zfill(4))
return '{}{}{}'.format(first,second, last)
def populate_cities(self):
df = self.Instance._high_priority_df
if self._scheduler_id == 'HIGH_PRIORITY':
df = self.Instance._high_priority_df
elif self._scheduler_id == 'MODERATE_PRIORITY':
df = self.Instance._moderate_priority_df
elif self._scheduler_id == 'LOW_PRIORITY':
df = self.Instance._low_priority_df
self._src_cities = df['SOURCE'].tolist()
self._dst_cities = df['DESTINATION'].tolist()
for pair in zip(self._src_cities, self._dst_cities):
logging.log(logging.INFO, "UrlGenerator::populate_cities(#####) %s: From City: %s, To City: %s" % (self._scheduler_id, pair[0], pair[1]))
def get_filtered_urls(self, selected_days):
urls = self.get_urls(selected_days)
if self._forced_refresh == True:
return urls
else:
filtered_urls = []
connection = sqlite.connect('spiders\sqlDb\competitiondatabase.db')
cursor = connection.cursor()
for url_meta in urls:
m=hashlib.md5(bytes(str(url_meta),"ascii")) # python 3
url_md5 = m.hexdigest()
print ("url_meta = %s, url_md5 = %s" % (url_meta, url_md5))
cursor.execute("select * from competitiondata where url_md5=?", url_md5[0])
result = cursor.fetchone()
if result == False:
filtered_urls.append(url_meta)
connection.close()
return filtered_urls
def get_urls(self, selected_days):
if self._vendor == 'makemytrip':
return self.get_makemytrip_urls(selected_days)
if self._vendor == 'ahataxis':
return self.get_ahataxis_urls(selected_days)
if self._vendor == 'onewaycab':
return self.get_onewaycab_urls(selected_days)
if self._vendor == 'gozo':
return self.get_gozo_urls(selected_days)
if self._vendor == 'wiwigo':
return self.get_wiwigo_urls(selected_days)
if self._vendor == 'mytaxiindia':
return self.get_mytaxiindia_urls(selected_days)
if self._vendor == 'getmecab':
return self.get_getmecab_urls(selected_days)
if self._vendor== 'hippocabs':
return self.get_hippocabs_urls(selected_days)
print_x ("NO VENDOR FOUND")
def get_makemytrip_urls(self, selected_days):
normalized_src_cities = self.normalize_makemytrip_city_names(self._src_cities)
normalized_dst_cities = self.normalize_makemytrip_city_names(self._dst_cities)
df = self.Instance._makemytrip_df.copy()
urls = []
for trip_date in self._timeDecorator.get_oneway_date_range('makemytrip', selected_days):
for pair in zip(normalized_src_cities, normalized_dst_cities):
src_city = pair[0]
dst_city = pair[1]
if src_city == dst_city:
continue
src_df = df[df['city_name'] == src_city]
if (src_df.empty):
continue
dest_df = df[df['city_name'] == dst_city]
if (dest_df.empty):
continue
print_x("*****[Scheduler=%s, Vendor=%s] From City: %s - To City: %s *****" % (self._scheduler_id, self._vendor, src_city, dst_city))
src_url = ''
for src_row in src_df.itertuples():
src_url = (self._makemytrip_src_dest_url_docorator % (src_row.city_name, src_row.latitude, src_row.longitude, src_row.place_id))
dest_url = ''
for dest_row in dest_df.itertuples():
dest_url = (self._makemytrip_src_dest_url_docorator % (dest_row.city_name, dest_row.latitude, dest_row.longitude, dest_row.place_id))
query_url = (self._makemytrip_query_url % (src_url, dest_url))
date_url = (self._makemytrip_date_url % (trip_date, '', '09:00'))
this_url = self._makemytrip_domain_url + query_url + date_url
print_x(this_url)
metadata = {'src_city': src_city, 'dst_city': dst_city, 'trip_date': trip_date, 'url': this_url}
urls.append(metadata)
return urls
def get_makemytrip_popular_destination_urls(self, selected_days):
normalized_src_cities = self.normalize_makemytrip_city_names(self._src_cities)
normalized_dst_cities = self.normalize_makemytrip_city_names(self._dst_cities)
pop_dest_df = self.Instance._makemytrip_df.copy()
urls = []
for trip_date in self._timeDecorator.get_oneway_date_range('makemytrip', selected_days):
for src_row in pop_dest_df.itertuples():
src_url = (self._makemytrip_src_dest_url_docorator % (src_row.city_name, src_row.latitude, src_row.longitude, src_row.place_id))
for pop_dest in src_row.pop_dest:
dest_df = pop_dest_df[pop_dest_df['city_code'] == pop_dest]
for dest_row in dest_df.itertuples():
dest_url = (self._makemytrip_src_dest_url_docorator % (dest_row.city_name, dest_row.latitude, dest_row.longitude, dest_row.place_id))
query_url = (self._makemytrip_query_url % (src_url, dest_url))
print_x("***** From City: %s - To City: %s *****" % (src_row.city_name, dest_row.city_name))
date_url = (self._makemytrip_date_url % (trip_date, '', '09:00'))
this_url = self._makemytrip_domain_url + query_url + date_url
metadata = {'src_city': src_row.city_name, 'dst_city': dest_row.city_name, 'trip_date': trip_date, 'url': this_url }
urls.append(metadata)
return urls
def get_gozo_urls(self, selected_days):
normalized_src_cities = self.normalize_gozo_city_names(self._src_cities)
normalized_dst_cities = self.normalize_gozo_city_names(self._dst_cities)
df = self.Instance._gozo_df.copy()
urls = []
for pair in zip(normalized_src_cities, normalized_dst_cities):
src_city = pair[0]
dst_city = pair[1]
if src_city == dst_city:
continue
src_df = df[df['city_name'] == src_city]
if (src_df.empty):
continue
dest_df = df[df['city_name'] == dst_city]
if (dest_df.empty):
continue
src_city_name = ''
src_city_id = ''
for src_row in src_df.itertuples():
src_city_name = src_row.city_name
src_city_id = src_row.Index
dest_city_name = ''
dest_city_id = ''
for dest_row in dest_df.itertuples():
dest_city_name = dest_row.city_name
dest_city_id = dest_row.Index
print("***** From City: %s (%s) - To City: %s (%s)*****" % (src_city, src_city_id, dst_city, dest_city_id))
query_url = (self._gozo_query_url % (src_city_id, dest_city_id))
for trip_date in self._timeDecorator.get_oneway_date_range('gozo', selected_days):
date_url = (self._gozo_date_url % (str(trip_date), '09:00'))
this_url = self._gozo_domain_url + self._gozo_api_key + query_url + date_url
metadata = {'src_city': src_city, 'dst_city': dst_city, 'trip_date': trip_date, 'url': this_url}
urls.append(metadata)
return urls
def get_ahataxis_urls(self, selected_days):
normalized_src_cities = self.normalize_ahataxis_city_names(self._src_cities)
normalized_dst_cities = self.normalize_ahataxis_city_names(self._dst_cities)
urls = []
for trip_date in self._timeDecorator.get_oneway_date_range('ahataxis', selected_days):
for pair in zip(normalized_src_cities, normalized_dst_cities):
src_city = pair[0]
dst_city = pair[1]
if src_city == dst_city:
continue
print_x("*****[Scheduler=%s, Vendor=%s] From City: %s - To City: %s *****" % (self._scheduler_id, self._vendor, src_city, dst_city))
query_url = (self._ahataxis_query_url % ('one-way', src_city, dst_city))
date_url = (self._ahataxis_date_url % (trip_date,'09:00'))
this_url = self._ahataxis_domain_url + query_url + date_url
print_x(this_url)
metadata = {'src_city': src_city, 'dst_city': dst_city, 'trip_date': trip_date, 'url': this_url }
urls.append(metadata)
return urls
def get_onewaycab_urls(self, selected_days):
normalized_src_cities = self.normalize_onewaycab_city_names(self._src_cities)
normalized_dst_cities = self.normalize_onewaycab_city_names(self._dst_cities)
urls = []
for trip_date in self._timeDecorator.get_oneway_date_range('onewaycab', selected_days):
for pair in zip(normalized_src_cities, normalized_dst_cities):
src_city = pair[0]
dst_city = pair[1]
if src_city == dst_city:
continue
print_x("*****[Scheduler=%s, Vendor=%s] From City: %s - To City: %s *****" % (self._scheduler_id, self._vendor, src_city, dst_city))
query_url = (self._onewaycab_query_url % (src_city, dst_city))
this_url = self._onewaycab_domain_url + query_url
metadata = {'src_city': src_city, 'dst_city': dst_city,'trip_date':trip_date, 'url': this_url }
print_x(this_url)
urls.append(metadata)
return urls
def get_wiwigo_urls(self, selected_days):
normalized_src_cities = self.normalize_wiwigo_city_names(self._src_cities)
normalized_dst_cities = self.normalize_wiwigo_city_names(self._dst_cities)
urls = []
for trip_date in self._timeDecorator.get_oneway_date_range('wiwigo', selected_days):
for pair in zip(normalized_src_cities, normalized_dst_cities):
src_city = pair[0]
dst_city = pair[1]
if src_city == dst_city:
continue
print_x("*****[Scheduler=%s, Vendor=%s] From City: %s - To City: %s *****" % (self._scheduler_id, self._vendor, src_city, dst_city))
query_url = (self._wiwigo_query_url % (src_city, dst_city))
date_url = (self._wiwigo_date_url % trip_date)
this_url = self._wiwigo_domain_url + query_url + date_url
metadata = {'src_city': src_city, 'dst_city': dst_city, 'trip_date': trip_date, 'url': this_url }
print_x(this_url)
urls.append(metadata)
return urls
def get_getmecab_urls(self, selected_days):
normalized_src_cities = self.normalize_getmecab_city_names(self._src_cities)
normalized_dst_cities = self.normalize_getmecab_city_names(self._dst_cities)
urls = []
for trip_date in self._timeDecorator.get_oneway_date_range('getmecab', selected_days):
for pair in zip(normalized_src_cities, normalized_dst_cities):
src_city = pair[0]
dst_city = pair[1]
if src_city == dst_city:
continue
print_x("*****[Scheduler=%s, Vendor=%s] From City: %s - To City: %s *****" % (self._scheduler_id, self._vendor, src_city, dst_city))
query_url = (self._getmecab_query_url % (src_city, dst_city))
#date_url = (self._getmecab_date_url % trip_date)
this_url = self._getmecab_domain_url + query_url
metadata = {'src_city': src_city, 'dst_city': dst_city,'trip_date':trip_date, 'url': this_url }
print_x(this_url)
urls.append(metadata)
return urls
def get_mytaxiindia_urls(self, selected_days):
'''
data = {
'trip_type' : 'One Way Trip',
'OS_From' : 'New Delhi',
'f_cit_val' : '82',
'f_cit_name' : 'New Delhi',
'OS_To' : 'Agra',
't_cit_val' : '352',
't_cit_name' : 'Agra',
'osDate' : '27/07/2018',
'duration' : '27/07/2018'
}
'''
normalized_src_cities = self.normalize_mytaxiindia_city_names(self._src_cities)
normalized_dst_cities = self.normalize_mytaxiindia_city_names(self._dst_cities)
df = self.Instance._mytaxiindia_df.copy()
urls = []
for trip_date in self._timeDecorator.get_oneway_date_range('mytaxiindia', selected_days):
for pair in zip(normalized_src_cities, normalized_dst_cities):
src_city = pair[0]
dst_city = pair[1]
if src_city == dst_city:
continue
src_df = df[df['city'] == src_city]
if (src_df.empty):
continue
dest_df = df[df['city'] == dst_city]
if (dest_df.empty):
continue
print_x("*****[Scheduler=%s, Vendor=%s] From City: %s - To City: %s *****" % (self._scheduler_id, self._vendor, src_city, dst_city))
src_value = ''
src_label = ''
for src_row in src_df.itertuples():
src_value = src_row.value
src_label = src_row.label
dst_value = ''
dst_label = ''
for dst_row in dest_df.itertuples():
dst_value = dst_row.value
dst_label = dst_row.label
query_url = self._mytaxiindia_domain_url
formdata = {'trip_type':'One Way Trip',\
'OS_From':src_label, 'f_cit_val':src_value, 'f_cit_name':src_label,\
'OS_To':dst_label, 't_cit_val':dst_value, 't_cit_name':dst_label,\
'osDate':trip_date, 'duration':trip_date}
metadata = {'src_city': src_city, 'dst_city': dst_city, 'trip_date': trip_date, 'url': query_url, 'formdata':formdata }
print_x("query_url=%s, formdata=%s" % (query_url, formdata))
urls.append(metadata)
return urls
def get_hippocabs_urls(self,selected_days):
'''
Form Data: view URL encoded
drp_start_city: Chandigarh
drp_end_city: Manali
txt_pickup: Chandigarh, India
txt_dropoff: Manali, Himachal Pradesh, India
txt_datetime: 04/03/19
drp_time: 09:00 AM
pas_contact1: 9876543210
'''
normalized_src_cities = self.normalize_hippocabs_city_names(self._src_cities)
normalized_dst_cities = self.normalize_hippocabs_city_names(self._dst_cities)
df = self.Instance._hippocabs_df.copy()
print('Hippocabs Dataframe: ', df)
urls = []
for trip_date in self._timeDecorator.get_oneway_date_range('hippocabs', selected_days):
for pair in zip(normalized_src_cities, normalized_dst_cities):
src_city = pair[0]
dst_city = pair[1]
if src_city == dst_city:
continue
src_df = df[df['city'] == src_city]
if (src_df.empty):
continue
dest_df = df[df['city'] == dst_city]
if (dest_df.empty):
continue
print("*****[Scheduler=%s, Vendor=%s] From City: %s - To City: %s, Trip Data: %s*****" % (self._scheduler_id, self._vendor, src_city, dst_city, trip_date))
#src_value = ''
src_label = ''
for src_row in src_df.itertuples():
src_label = src_row.city
#src_value = self.get_hippo_pickup_dropoff(src_label)
#dst_value = ''
dst_label = ''
for dst_row in dest_df.itertuples():
dst_label = dst_row.city
#dst_value = self.get_hippo_pickup_dropoff(dst_label)
query_url = self._hippocabs_domain_url
mobile_no= self.gen_phone()
# formdata = {'trip_type':'One Way Trip',\
# 'OS_From':src_label, 'f_cit_val':src_value, 'f_cit_name':src_label,\
# 'OS_To':dst_label, 't_cit_val':dst_value, 't_cit_name':dst_label,\
# 'osDate':trip_date, 'duration':trip_date}
'''
formdata = {
'textbox-country':src_label, \
'textbox-city':dst_label,\
'pickup-head':src_label, \
'dropoff-head':dst_label,\
'date-textbox':trip_date, 'time-textbox':"09:00 AM",'txt_contact_coupon':mobile_no}
'''
formdata = {
'drp_start_city':src_label, \
'drp_end_city':dst_label,\
'txt_pickup':src_label, \
'txt_dropoff':dst_label,\
'txt_datetime':trip_date,
'drp_time':"09:00 AM",
'pas_contact1':mobile_no}
metadata = {'src_city': src_city, 'dst_city': dst_city, 'trip_date': trip_date, 'url': query_url, 'formdata':formdata }
print("query_url=%s, formdata=%s" % (query_url, formdata))
urls.append(metadata)
return urls
| [
"noreply@github.com"
] | abirAdhikari.noreply@github.com |
c0f054d4e60c35d007c3451890947d020565fbc5 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02783/s809384037.py | b1bac9c9f8363c745905fad4ee60b29b26e6e608 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | n,d=list(map(int,input().split()))
if n%d==0:
print(n//d)
else:
print((n//d)+1) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
406df6ef0eb1d39f1092d6c53dd1dce093229a1e | c60d4905da857517c8c9713624c9c4d3d4dd0a33 | /shop/migrations/0001_initial.py | 6d0fc6d5540caac5fa0af66df281d64b7d6909e0 | [] | no_license | ianb-/pojshop | 3037ee94df9f095f45a650ec9ab51d5f3a3a7667 | edf46479bfca453e1d704ad0efad494dbe62f201 | refs/heads/master | 2021-01-21T14:07:42.800642 | 2015-05-17T23:13:44 | 2015-05-17T23:13:44 | 28,342,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,990 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import easy_thumbnails.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=30)),
('level', models.PositiveSmallIntegerField(default=3)),
('slug', models.SlugField(unique=True)),
('details', models.TextField(null=True, blank=True)),
('image', models.ImageField(upload_to=b'categories/')),
('views', models.IntegerField(default=0)),
('likes', models.IntegerField(default=0)),
('parent', models.ForeignKey(blank=True, to='shop.Category', null=True)),
],
options={
'verbose_name_plural': 'Categories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=24)),
('firstname', models.CharField(default=None, max_length=32)),
('lastname', models.CharField(max_length=32)),
('job_title', models.CharField(default=None, max_length=32, null=True, blank=True)),
('phone', models.CharField(max_length=12)),
('email', models.EmailField(max_length=64)),
('notes', models.TextField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('serial', models.CharField(max_length=64, null=True)),
('is_for_sale', models.BooleanField(default=False)),
('purchase_price', models.DecimalField(default=None, max_digits=6, decimal_places=2)),
('sale_price', models.DecimalField(default=None, null=True, max_digits=6, decimal_places=2)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64)),
('SKU', models.CharField(max_length=64)),
('slug', models.SlugField(unique=True)),
('details', models.TextField()),
('shelf', models.BooleanField(default=False)),
('stock', models.PositiveSmallIntegerField(default=0)),
('price', models.DecimalField(max_digits=6, decimal_places=2)),
('views', models.IntegerField(default=0)),
('likes', models.IntegerField(default=0)),
('sales', models.IntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Product_to_cat',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('category', models.ForeignKey(to='shop.Category')),
('product', models.ForeignKey(to='shop.Product')),
],
options={
'verbose_name_plural': 'Product Categorisations',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ProductImage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('image', easy_thumbnails.fields.ThumbnailerImageField(upload_to=b'products')),
('product', models.ForeignKey(to='shop.Product')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PurchaseDetail',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('quantity', models.PositiveSmallIntegerField(default=1)),
('price', models.DecimalField(max_digits=6, decimal_places=2)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PurchaseOrder',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('price', models.DecimalField(max_digits=8, decimal_places=2)),
('date', models.DateField()),
('final', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='SaleDetail',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('price', models.DecimalField(max_digits=6, decimal_places=2)),
('quantity', models.PositiveSmallIntegerField(default=0)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='SaleOrder',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('order_id', models.CharField(unique=True, max_length=128)),
('date', models.DateTimeField(auto_now_add=True)),
('total_price', models.DecimalField(max_digits=8, decimal_places=2)),
('shipping_price', models.DecimalField(max_digits=5, decimal_places=2)),
('paid', models.BooleanField(default=False)),
('address1', models.CharField(max_length=128)),
('address2', models.CharField(max_length=128)),
('city', models.CharField(max_length=128)),
('county', models.CharField(max_length=128)),
('postcode', models.CharField(max_length=8)),
('phone', models.CharField(max_length=12)),
('shipped', models.NullBooleanField(default=False)),
('tracking_number', models.CharField(default=None, max_length=64, null=True)),
('first_name', models.CharField(max_length=64)),
('last_name', models.CharField(max_length=64)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Supplier',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128)),
('address1', models.CharField(max_length=128)),
('address2', models.CharField(max_length=128, null=True, blank=True)),
('city', models.CharField(max_length=128)),
('county', models.CharField(max_length=128)),
('postcode', models.CharField(max_length=8)),
('phone', models.CharField(max_length=12)),
('email', models.EmailField(max_length=64)),
('notes', models.TextField()),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='saledetail',
name='order',
field=models.ForeignKey(to='shop.SaleOrder'),
preserve_default=True,
),
migrations.AddField(
model_name='saledetail',
name='product',
field=models.ForeignKey(to='shop.Product'),
preserve_default=True,
),
migrations.AddField(
model_name='purchaseorder',
name='supplier',
field=models.ForeignKey(to='shop.Supplier'),
preserve_default=True,
),
migrations.AddField(
model_name='purchasedetail',
name='order',
field=models.ForeignKey(default=None, to='shop.PurchaseOrder'),
preserve_default=True,
),
migrations.AddField(
model_name='purchasedetail',
name='product',
field=models.ForeignKey(to='shop.Product'),
preserve_default=True,
),
migrations.AddField(
model_name='item',
name='product',
field=models.ForeignKey(to='shop.Product'),
preserve_default=True,
),
migrations.AddField(
model_name='item',
name='purchase_order',
field=models.ForeignKey(to='shop.PurchaseOrder'),
preserve_default=True,
),
migrations.AddField(
model_name='item',
name='sale_order',
field=models.ForeignKey(default=None, to='shop.SaleOrder', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='contact',
name='company',
field=models.ForeignKey(to='shop.Supplier'),
preserve_default=True,
),
]
| [
"ijhbrwn@gmail.com"
] | ijhbrwn@gmail.com |
40ac61ccebbeae3f39a88ba709f64e382cafcc52 | 8252d36ace0bcd68c40ddfc53a4b3179d3065ace | /Miller-Rabin.py | 8d61e419c4971b6a9ed96298ca8838c3f90166fb | [] | no_license | White898/MATH-IA | 81e730a3d83895a393e9656589c75524f113b0ef | 8e6e7a12e66790eeba3f44c01f019a46f70e79ea | refs/heads/master | 2022-04-15T18:44:53.950315 | 2020-04-10T09:06:33 | 2020-04-10T09:06:33 | 254,591,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,250 | py | # Python3 program Miller-Rabin primality test
import random
import time as t
# Utility function to do
# modular exponentiation.
# It returns (x^y) % p
def power(x, y, p):
# Initialize result
res = 1;
# Update x if it is more than or
# equal to p
x = x % p;
while (y > 0):
# If y is odd, multiply
# x with result
if (y & 1):
res = (res * x) % p;
# y must be even now
y = y >> 1; # y = y/2
x = (x * x) % p;
return res;
# This function is called
# for all k trials. It returns
# false if n is composite and
# returns false if n is
# probably prime. d is an odd
# number such that d*2<sup>r</sup> = n-1
# for some r >= 1
def miillerTest(d, n):
# Pick a random number in [2..n-2]
# Corner cases make sure that n > 4
a = 2 + random.randint(1, n - 4);
# Compute a^d % n
x = power(a, d, n);
if (x == 1 or x == n - 1):
return True;
# Keep squaring x while one
# of the following doesn't
# happen
# (i) d does not reach n-1
# (ii) (x^2) % n is not 1
# (iii) (x^2) % n is not n-1
while (d != n - 1):
x = (x * x) % n;
d *= 2;
if (x == 1):
return False;
if (x == n - 1):
return True;
# Return composite
return False;
# It returns false if n is
# composite and returns true if n
# is probably prime. k is an
# input parameter that determines
# accuracy level. Higher value of
# k indicates more accuracy.
def isPrime(n, k):
# Corner cases
if (n <= 1 or n == 4):
return False;
if (n <= 3):
return True;
# Find r such that n =
# 2^d * r + 1 for some r >= 1
d = n - 1;
while (d % 2 == 0):
d //= 2;
# Iterate given nber of 'k' times
for i in range(k):
if (miillerTest(d, n) == False):
return False;
return True;
# Driver Code
# Number of iterations
k = 4;
start=t.time()
for n in range(10000000, 10001000):
if (isPrime(n, k)):
print(n);
# This code is contributed by mits
end=t.time()-start
print(end) | [
"noreply@github.com"
] | White898.noreply@github.com |
0dd58fe6ef1aab7424246548476e00b0bfa10837 | f8af1c9be123aa2fd9ea524681277f489936fe5d | /blog/views/blog/demoview.py | bfd6709788ffbac56561f040e537c3aa0ff7620c | [] | no_license | xuejiacore/Loj | a4e949619ec4be878c9792abaa8dcb33595fc07f | af4c9a7ac540a86a465ef5469243dbebb6c03011 | refs/heads/master | 2021-01-10T01:20:46.917007 | 2016-04-04T14:48:20 | 2016-04-04T14:48:20 | 53,296,454 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 787 | py | from django.shortcuts import render
from Personal import settings
from lib.io.HttpIO import FileIO
import logging
logger = logging.getLogger('app')
def demo(request, whose):
print("username = {}".format(whose))
return render(request, 'blog/demo.html')
def ckeditor(request, whose):
return render(request, 'blog/ckeditor.html')
def upload_file(request, whose):
fio = FileIO(logger)
result = fio.file_uploader(request, f_path=settings.FILE_UPLOAD_PATH)
logger.debug("* 上传成功:{}".format(result.full_path))
def download_file(request, whose):
fio = FileIO(logger)
return fio.single_file_downloader(r'F:\Backup\2016-02-01160728_周一_跟据PPT内容进行修改wxzh.7z',
lambda: "2016-02-21 16:58:26")
| [
"xuejiacore@sina.com"
] | xuejiacore@sina.com |
193cd2f8c6a0023673e14b6491214ea7ea79cbdc | 61d3d7864aadf9686bf8b67a6a52c6ce6218a32e | /A. Calculating Function.py | b0dff93a5dad1ce2d5c585523e9bb3c90c4528e2 | [] | no_license | jonckjunior/Codeforce | c47dc45f0cfa7217d7ab713a39a6be107b7bc3ec | b49a6914fc57bbdd947f15906926d501672cd93e | refs/heads/master | 2021-09-11T23:31:58.115876 | 2018-04-13T01:23:08 | 2018-04-13T01:23:08 | 114,308,537 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | import math
n = input();
n = int(n)
#1 2 3 4 5
#
impar = math.ceil(n/2)
par = math.floor(n/2)
print( (par+1)**2 - (par+1) - (impar**2) ) | [
"jonckjuniorr@gmail.com"
] | jonckjuniorr@gmail.com |
ca70f69e90688b58b891a97d5a821d57cafae928 | 3f8ecf5e820ae9b4c6bdc77f39c69e0b2c4bfeb4 | /manage.py | b8b5d14c48df11ddfce66d44bec00c74a3eabb7d | [] | no_license | chaweewatp/Envest_3 | 8c677c4c4d9f63ca619330908c1c4de2ef40507e | d7dc31d3f4ceca5c93f4b8e8616e4585ea9c4fed | refs/heads/master | 2023-01-07T17:39:54.395948 | 2020-10-08T08:48:57 | 2020-10-08T08:48:57 | 288,642,873 | 0 | 0 | null | 2020-11-13T03:25:49 | 2020-08-19T05:35:17 | Python | UTF-8 | Python | false | false | 664 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Envest_3.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"chaweewat.p@gmail.com"
] | chaweewat.p@gmail.com |
707dc6a7cfd0a657d590461eb85f4204b49e0385 | 7945cd610e6f57ff97e434db6fcf66cfd70d3872 | /driver/migrations/0001_initial.py | c3eb59c605d71f23d452067361a784fe4d9f8cce | [] | no_license | hyu-i/django_lesson_1 | 4700413e590368f047005e7ca0d69446a7f3c6b1 | e6f8d9eb9ca03b272fd09cd2672830af8e2f116f | refs/heads/master | 2020-09-11T10:50:31.476092 | 2019-11-16T03:40:56 | 2019-11-16T03:40:56 | 222,040,477 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,130 | py | # Generated by Django 2.2.6 on 2019-11-02 13:47
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='ティートレジャー', max_length=255, verbose_name='会社名')),
('created_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='日付')),
],
),
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='コース')),
('created_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='日付')),
],
),
migrations.CreateModel(
name='Driver',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20, verbose_name='名前')),
('age', models.IntegerField(blank=True, verbose_name='年齢')),
('gender', models.CharField(blank=True, choices=[('1', '男性'), ('2', '女性')], max_length=2, verbose_name='性別')),
('phone', models.CharField(blank=True, max_length=255, verbose_name='電話番号')),
('created_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='日付')),
('company', models.ForeignKey(default='T', on_delete=django.db.models.deletion.PROTECT, to='driver.Company', verbose_name='会社名')),
('course', models.ManyToManyField(to='driver.Course', verbose_name='コース')),
],
),
]
| [
"hyui.games@gmail.com"
] | hyui.games@gmail.com |
e50cae72db9459a5c8e7ef3a90a37c22cf33b31a | aa933da50c726f54e1ee31af1e5483c104655992 | /nova/migrations/0027_task_execute_user.py | b97f76b2b570b833c4aaa639119b2ee858fbfdb7 | [] | no_license | Lovezhe4ever/nova | ee638f7480277f62923fa81cba159f534b0efa92 | aede1b7276d5e5d53ed1021e954cd8b35d296723 | refs/heads/master | 2020-09-11T22:26:07.019141 | 2019-08-30T02:30:17 | 2019-08-30T02:30:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-10-10 02:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nova', '0026_assetgroup_asset_groups'),
]
operations = [
migrations.AddField(
model_name='task',
name='execute_user',
field=models.CharField(blank=True, max_length=30, null=True, verbose_name='\u64cd\u4f5c\u4eba'),
),
]
| [
"qiuyy_128@163.com"
] | qiuyy_128@163.com |
bdcabe865379a6f52e93ef38d4b03568127e97b0 | 7fda1a5d5a0de7043a4055408cf092f09d0ab9e8 | /Aula15/Mediador/plataforma/plataforma/plataformas/__init__.py | fbe2853bb6e1703565c1c6e8bad6b6ffb1fc6ed0 | [] | no_license | felipem4rtins/CES-22 | af5e2361eef2f5c8bbbcb4e462f8b8ba523e5b82 | 228a32034b170158ecffeb26410624788786c9e5 | refs/heads/master | 2020-03-13T18:20:05.786968 | 2018-07-04T16:50:57 | 2018-07-04T16:50:57 | 131,233,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 88 | py | from plataforma.plataformas.ps4 import PS4
from plataforma.plataformas.wiiu import WiiU
| [
"fel-mg@hotmail.com"
] | fel-mg@hotmail.com |
401771ebfea3ded08cebf3e5a5112f495535892d | d2ee4260f476b2246761d0b32891e6773b2185d8 | /pythonread.py | e727a4ed2d19b2cb0a565c114c8dc04da3153894 | [] | no_license | lyx12311/spokes | fa8832be4f9b2e0e459218900c46f68282baf018 | e6e4560936d273f1a471107252e45e48a8fc2f28 | refs/heads/master | 2020-05-07T09:47:14.955452 | 2019-07-11T18:18:26 | 2019-07-11T18:18:26 | 180,391,762 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,345 | py | #!/usr/bin/env python
import numpy as np
import idlsave
import math
import operator
import os
import sys
import glob
import matplotlib.image as mpimg
import operator
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
from helperFn import *
from sklearn.cluster import MeanShift, estimate_bandwidth
from sklearn.datasets.samples_generator import make_blobs
from itertools import cycle
##### row threadholds
thmn=0.01 # min threadhold for fraction of pixals that are below median
thmx=1 # max threadhold for fraction of pixals that are below median
th_med=0.95 # threadhold to identify as posible spokes (th_med*median) will be count towards the pixals that are spokes
dk_ring_med=0.1 # if the median of this row is lower than this number, then it is just a dark ring not contain any spokes or it is hard to identify
spokes_sep_r=0.05 # if seperated more than *spokes_sep* fraction of the total observed longtitude then this is another spoke
shortspokes_r=0.05 # if the number of pixals in this row that contains spokes is smaller than *shortspokes_r* fraction of the total observed longtitude then it is probably just noise...
##### column threadholds
th_col=0.051 # how much deviation from mean when identify spokes columns
spokes_sep_c=0.01 # see *spokes_sep_r* for reference
shortspokes_c=0.01 # see *shortspokes_r* for reference
##### parameters for finding spokes after identifying the "spoke boxes"
stdvalue=0.002 # there is spokes in this row if the standard deviation is larger than 0.002 while calculating minimum to determine the darkest pixels
qufit=0.2 # for clustering while finding extra spokes (should be between [0, 1] 0.5 means that the median of all pairwise distances is used.)
spoke_pix=0.05 # if the brighness of the pixals next to that of the darkest spot of the spokes is within *spoke_pix* fraction then it is also a spoke pixal
pixelcheck=3 # how many neighbor pixels to check
totchange_pix=1 # how many to stop adding to the list
totchange_pixmax= 0.01 # how many fraction of the total pixal increase is max increase and will break after that (this idicates the threadhold is too lose, result not converging)
iteration=100 # while adding pixels to spokes, if the iteration is greater than 100, break...
# read files and set up initial conditions
a=idlsave.read('W1597976395_1_cal.rpj1')
datapoints=a.rrpi # load data in
m,n=datapoints.shape # get the shape of the brightness data
print('image size: ',(m,n))
totpix=m*n
print('total number of pixcel:, ',totpix)
m,n=datapoints.shape # get the shape of the brightness data
# get row numbers with spokes
withspi=getrows(datapoints,th_med,dk_ring_med,thmn,thmx,spokes_sep_r,shortspokes_r)
withspi_1d=np.hstack(withspi) # put into 1d
# get column numbers with spokes
withspj=getcols(withspi,datapoints,th_col,spokes_sep_c,shortspokes_c)
withspj_1d=np.hstack(withspj) # put into 1d
if len(withspi_1d)==0 or len(withspj_1d)==0:
print("No spokes!")
exit(1)
################## plotting original image and smoothed image ####################################
# 2D original plot without spokes
plt.figure()
plt.title('original image')
plt.imshow(datapoints, cmap = plt.get_cmap('gray'))
# smooth background with median
plt.figure()
plt.title('original image - median of each row')
for i in range(m):
med=np.median(datapoints[i,:])
datapoints[i,:]=datapoints[i,:]-med
plt.imshow(datapoints, cmap = plt.get_cmap('gray'))
############################################################################################################
# cropped the data so only ones with spokes are left
datacrop=datapoints[np.reshape(withspi_1d,(len(withspi_1d),1)),np.reshape(withspj_1d,(1,len(withspj_1d)))]
# spoke boxes
spkbox_prebpx=[[] for i in range(len(withspi)*len(withspj))] # pixal values
spkbox_ind_lon_prebpx=[[] for i in range(len(withspi)*len(withspj))] # pixal indices in lon
spkbox_ind_rad_prebpx=[[] for i in range(len(withspi)*len(withspj))] # pixal indices in rad
numb=0
for i in range(len(withspi)):
for j in range(len(withspj)):
spkbox_prebpx[numb]=datapoints[min(withspi[i]):max(withspi[i]),min(withspj[j]):max(withspj[j])]
spkbox_ind_lon_prebpx[numb]=range(min(withspj[j]),max(withspj[j]))
spkbox_ind_rad_prebpx[numb]=range(min(withspi[i]),max(withspi[i]))
numb=numb+1
################## subplot of all the data ################
# plot subplots of all the data
'''
# find overall min
plt.figure()
spokes_ind_lon=[] # get spokes center in lon
spokes_ind_rad=[] # get spokes center in rad
minidx=[]
subpltn=1 # subplot counts
for i in range(len(withspi)*len(withspj)):
plt.subplot(len(withspi),len(withspj),subpltn)
plt.imshow(spkbox[i], cmap = plt.get_cmap('gray'))
locallon=[]
spkbox[i]=np.array(spkbox[i])
spkflat=spkbox[i].flatten()
#minidx=(np.argsort(spkflat)[:spkpeak])
minidx=(np.argsort(spkflat))
k=0
unrmin=np.unravel_index(minidx[k], spkbox[i].shape)
longind=unrmin[1]
locallon.append(longind)
spokes_ind_lon.append(spkbox_ind_lon[i][unrmin[1]])
spokes_ind_rad.append(spkbox_ind_rad[i][unrmin[0]])
k=1
indcout=1
while k<spkpeak and indcout<len(minidx):
unrmin=np.unravel_index(minidx[k], spkbox[i].shape)
longind=unrmin[1]
if longind not in locallon:
spokes_ind_lon.append(spkbox_ind_lon[i][unrmin[1]])
spokes_ind_rad.append(spkbox_ind_rad[i][unrmin[0]])
locallon.append(longind)
plt.plot(unrmin[1],unrmin[0],'bo')
k=k+1
indcout=indcout+1
else:
indcout=indcout+1
subpltn=subpltn+1
'''
'''
# find min in each row and column
plt.figure()
spokes_ind_lon=[] # get spokes center in lon
spokes_ind_rad=[] # get spokes center in rad
minidx=[]
subpltn=1 # subplot counts
for i in range(len(withspi)*len(withspj)):
plt.subplot(len(withspi),len(withspj),subpltn)
plt.imshow(spkbox[i], cmap = plt.get_cmap('gray'))
locallon=[]
spkbox[i]=np.array(spkbox[i])
for k in range(len(spkbox[i])):
index, value = min(enumerate(spkbox[i][k]), key=operator.itemgetter(1))
spokes_ind_lon.append(spkbox_ind_lon[i][index])
spokes_ind_rad.append(spkbox_ind_rad[i][k])
plt.plot(index,k,'bo')
for j in range(len(spkbox[i][:][0])):
rownum=[spkbox[i][p][j] for p in range(len(spkbox[i]))]
index, value = min(enumerate(rownum), key=operator.itemgetter(1))
spokes_ind_lon.append(spkbox_ind_lon[i][j])
spokes_ind_rad.append(spkbox_ind_rad[i][index])
plt.plot(j,index,'ro')
subpltn=subpltn+1
'''
# find min in each row
spokesnumb=len(withspi)*len(withspj) # spokes number
plt.figure()
plt.title('spoke boxes and spokes before clustering')
spokes_ind_lon_prebpx=[[] for i in range(spokesnumb)] # get spokes center in lon for each spoke
spokes_ind_rad_prebpx=[[] for i in range(spokesnumb)] # get spokes center in rad for each spoke
minidx=[]
subpltn=1 # subplot counts
cmap = plt.get_cmap('viridis')
colors = cmap(np.linspace(0, 1, spokesnumb))
for i in range(spokesnumb):
plt.subplot(len(withspi),len(withspj),subpltn)
plt.imshow(spkbox_prebpx[i], cmap = plt.get_cmap('gray'))
locallon=[]
spkbox_prebpx[i]=np.array(spkbox_prebpx[i])
for k in range(len(spkbox_prebpx[i])):
index, value = min(enumerate(spkbox_prebpx[i][k]), key=operator.itemgetter(1))
if np.std(spkbox_prebpx[i][k])<stdvalue:
continue
else:
spokes_ind_lon_prebpx[i].append(spkbox_ind_lon_prebpx[i][index])
spokes_ind_rad_prebpx[i].append(spkbox_ind_rad_prebpx[i][k])
plt.plot(index,k,'o',c=colors[i])
subpltn=subpltn+1
# check if there are other spokes within one box (clustering)
plt.figure()
plt.title('spokes after clustering')
plt.imshow(datapoints, cmap = plt.get_cmap('gray'))
distant=[[] for i in range(spokesnumb)]
colors = cycle('bgrcmykbgrcmykbgrcmykbgrcmyk')
spokes_ind_rad_1d=np.hstack(spokes_ind_rad_prebpx)
spokes_ind_lon_1d=np.hstack(spokes_ind_lon_prebpx)
centers = [spokes_ind_rad_prebpx[0][0], spokes_ind_rad_prebpx[0][0]]
X=np.array(zip(spokes_ind_rad_1d,spokes_ind_lon_1d))
bandwidth = estimate_bandwidth(X,quantile=qufit)
ms = MeanShift(bandwidth=bandwidth, bin_seeding=True)
ms.fit(X)
labels = ms.labels_
cluster_centers = ms.cluster_centers_
labels_unique = np.unique(labels)
n_clusters_ = len(labels_unique)
spkbox=[[] for i in range(n_clusters_)] # pixal values
spokes_ind_lon=[[] for i in range(n_clusters_)] # get spokes center in lon for each spoke
spokes_ind_rad=[[] for i in range(n_clusters_)] # get spokes center in rad for each spoke
for k, col in zip(range(n_clusters_), colors):
my_members = labels == k
# reasign boxes
spokes_ind_lon[k]=X[my_members, 1]
spokes_ind_rad[k]=X[my_members, 0]
spkbox[k]=datapoints[min(spokes_ind_rad[k]):max(spokes_ind_rad[k]),min(spokes_ind_lon[k]):max(spokes_ind_lon[k])]
cluster_center = cluster_centers[k]
plt.plot(X[my_members, 1], X[my_members, 0], 'o')
plt.plot(cluster_center[1], cluster_center[0], 'o', markerfacecolor=col,markeredgecolor='k', markersize=14)
spokesnumb=n_clusters_
print('length of lon: ',len(spokes_ind_lon[0]))
# get the spokes (check if the surrounding pixals are part of the spokes)
countnew=0
# average increase in pixels
totchange=[totchange_pix+1 for j in range(spokesnumb)]
totchangep=[totchange_pix+1 for j in range(spokesnumb)]
# each direction count
totchange_ev_p=[0 for i in range(6)]
britness=[0 for j in range(spokesnumb)]
spokes_ind_lon_newadd=[[] for j in range(spokesnumb)]
spokes_ind_rad_newadd=[[] for j in range(spokesnumb)]
while countnew<iteration and any([totchange[i]>totchange_pix for i in range(spokesnumb)]) and any([totchange[i]<totpix*totchange_pixmax for i in range(spokesnumb)]): #and any(diff[i]>totchange_pix for i in range(6)):
totchange=[0 for j in range(spokesnumb)]
totchange_ev=[0 for j in range(6)]
for k in range(spokesnumb):
if totchangep[k]>totchange_pix and totchangep[k]<totpix*totchange_pixmax:
if countnew==0:
spokes_ind_lon_newadd_p=spokes_ind_lon[k]
spokes_ind_rad_newadd_p=spokes_ind_rad[k]
#britness[k]=np.mean([datapoints[a][b] for a,b in zip(spokes_ind_rad_newadd_p,spokes_ind_lon_newadd_p)]) # get average brightness of the spoke
else:
spokes_ind_lon_newadd_p=spokes_ind_lon_newadd[k]
spokes_ind_rad_newadd_p=spokes_ind_rad_newadd[k]
spokes_ind_lon_newadd[k]=[]
spokes_ind_rad_newadd[k]=[]
#print('brightness is:',britness)
for i in range(len(spokes_ind_lon_newadd_p)):
x=spokes_ind_lon_newadd_p[i]
y=spokes_ind_rad_newadd_p[i]
britness[k]=datapoints[y][x]
for np1 in range(pixelcheck):
if (y+np1+1<m) and ((x,y+np1+1) not in zip(spokes_ind_lon[k],spokes_ind_rad[k])) and (abs(datapoints[y+np1+1][x]-britness[k])<abs(spoke_pix*britness[k])):
spokes_ind_lon[k]=np.append(spokes_ind_lon[k],x)
spokes_ind_rad[k]=np.append(spokes_ind_rad[k],y+np1+1)
spokes_ind_lon_newadd[k].append(x)
spokes_ind_rad_newadd[k].append(y+np1+1)
totchange[k]=totchange[k]+1
totchange_ev[0]=totchange_ev[0]+1
if (x+np1+1<n) and ((x+np1+1,y) not in zip(spokes_ind_lon[k],spokes_ind_rad[k])) and (abs(datapoints[y][x+np1+1]-britness[k])<abs(spoke_pix*britness[k])):
spokes_ind_lon[k]=np.append(spokes_ind_lon[k],x+np1+1)
spokes_ind_rad[k]=np.append(spokes_ind_rad[k],y)
spokes_ind_lon_newadd[k].append(x+np1+1)
spokes_ind_rad_newadd[k].append(y)
totchange[k]=totchange[k]+1
totchange_ev[1]=totchange_ev[1]+1
if (x+np1+1<n and y+np1+1<m) and ((x+np1+1,y+np1+1) not in zip(spokes_ind_lon[k],spokes_ind_rad[k])) and (abs(datapoints[y+np1+1][x+np1+1]-britness[k])<abs(spoke_pix*britness[k])):
spokes_ind_lon[k]=np.append(spokes_ind_lon[k],x+np1+1)
spokes_ind_rad[k]=np.append(spokes_ind_rad[k],y+np1+1)
spokes_ind_lon_newadd[k].append(x+np1+1)
spokes_ind_rad_newadd[k].append(y+np1+1)
totchange[k]=totchange[k]+1
totchange_ev[2]=totchange_ev[2]+1
if (y-np1-1>0) and ((x,y-np1-1) not in zip(spokes_ind_lon[k],spokes_ind_rad[k])) and (abs(datapoints[y-np1-1][x]-britness[k])<abs(spoke_pix*britness[k])):
spokes_ind_lon[k]=np.append(spokes_ind_lon[k],x)
spokes_ind_rad[k]=np.append(spokes_ind_rad[k],y-np1-1)
spokes_ind_lon_newadd[k].append(x)
spokes_ind_rad_newadd[k].append(y-np1-1)
totchange[k]=totchange[k]+1
totchange_ev[3]=totchange_ev[3]+1
if (x-np1-1>0) and ((x-np1-1,y) not in zip(spokes_ind_lon[k],spokes_ind_rad[k])) and (abs(datapoints[y][x-np1-1]-britness[k])<abs(spoke_pix*britness[k])):
spokes_ind_lon[k]=np.append(spokes_ind_lon[k],x-np1-1)
spokes_ind_rad[k]=np.append(spokes_ind_rad[k],y)
spokes_ind_lon_newadd[k].append(x-np1-1)
spokes_ind_rad_newadd[k].append(y)
totchange[k]=totchange[k]+1
totchange_ev[4]=totchange_ev[4]+1
if (x-np1-1>0 and y-np1-1>0) and ((x-np1-1,y-np1-1) not in zip(spokes_ind_lon[k],spokes_ind_rad[k])) and (abs(datapoints[y-np1-1][x-np1-1]-britness[k])<abs(spoke_pix*britness[k])):
spokes_ind_lon[k]=np.append(spokes_ind_lon[k],x-np1-1)
spokes_ind_rad[k]=np.append(spokes_ind_rad[k],y-np1-1)
spokes_ind_lon_newadd[k].append(x-np1-1)
spokes_ind_rad_newadd[k].append(y-np1-1)
totchange[k]=totchange[k]+1
totchange_ev[5]=totchange_ev[5]+1
totchange_ev_p=totchange_ev
print('Total pixal changes per spoke: ',totchange)
print('Total pixal changes per direction: ',totchange_ev)
totchangep=totchange
countnew=countnew+1
if any([totchange[i]>totpix*totchange_pixmax for i in range(spokesnumb)]):
print('Error: Choose a smaller spoke_pix value so result can converage!')
print('Total change: ',totchange)
exit(1)
elif countnew>=iteration:
print("Warning: Result hasn't converage after ",iteration," iteration")
else:
print("Finished identifying spokes")
# 2D original plot with spokes
plt.figure()
colors = cmap(np.linspace(0, 1, spokesnumb))
plt.title('original image with identified spokes')
plt.imshow(datapoints, cmap = plt.get_cmap('gray'))
print('length of lon: ',len(spokes_ind_lon[0]))
for i in range(spokesnumb):
plt.plot(spokes_ind_lon[i],spokes_ind_rad[i],'o',c=colors[i])
# plot lines to show which columns and rows are identified as having spokes
plt.figure()
plt.imshow(datapoints, cmap = plt.get_cmap('gray'))
for i in withspi_1d:
plt.plot(np.arange(n),i*np.ones(n),'b')
for j in withspj_1d:
plt.plot(j*np.ones(m),np.arange(m),'r')
plt.show()
| [
"noreply@github.com"
] | lyx12311.noreply@github.com |
b51db30d31e4565ca19e3049e01ce56bbcadda9e | 1483cbcadfa336c97b8eaf6936fe2b5e208d0eea | /code/torrentData.py | fe2030be5649b8669ba2391729acd788349dbe69 | [] | no_license | meta-engineer/CS456-Peer2Peer | a777023e7f5a811145de8af175081e07cf026b4b | af66c58a5d42767adad47bb7f59f6252375b62c4 | refs/heads/main | 2022-12-31T11:42:27.329490 | 2020-10-14T18:57:24 | 2020-10-14T18:57:24 | 304,108,433 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,270 | py | # defines a listing of peers and files available in the network
class torrentData:
CHUNK_SIZE = 512
def __init__(self):
self.peerDict = {
#ID : [IP, PORT]
}
self.fileDict = {
#"FILENAME": {"filesize" : FILESIZE(bytes), "totalchunks" : NUM_CHUNKS, "chunkDict" : {0 : [ID, ID, ...], 1 : [ID, ID, ...]}}
}
# adds file with no owned chunks
def addFile(self, name, size):
if name in self.fileDict.keys():
return False
numChunks = -(-size // self.CHUNK_SIZE)
self.fileDict[name] = {"filesize" : size, "totalchunks" : numChunks, "chunkDict" : {}}
for i in range(0, self.fileDict[name]["totalchunks"]):
self.fileDict[name]["chunkDict"][i] = []
return True
# use fileDict.pop("filename") if ever needed
# adds to known peers, but does not inform on owned chunks
def addPeer(self, pid, ip, port):
if pid in self.peerDict.keys():
return False #no duplicate
self.peerDict[pid] = [ip, port]
return True
# removes from peerDict and removes from chunkDicts
def removePeer(self, pid):
if pid not in self.peerDict.keys():
return False
for f in self.fileDict.keys():
for c in self.fileDict[f]["chunkDict"].keys():
self.fileDict[f]["chunkDict"][c].remove(pid)
self.peerDict.pop(pid)
return True
# peer obtains single chunk of file
def peerAquireFileChunk(self, pid, filename, chunk):
if pid not in self.peerDict.keys():
return False # pid not registered
if filename not in self.fileDict.keys():
return False #non existance file to own
if chunk >= self.fileDict[filename]["totalchunks"]:
return False # non-existant chunk
# chunk has been registered
if chunk in self.fileDict[filename]["chunkDict"]:
if pid in self.fileDict[filename]["chunkDict"][chunk]:
return False #already owns
self.fileDict[filename]["chunkDict"][chunk].append(pid)
return True
# peer obtains all chunks of file (when they first connect)
def peerAquireWholeFile(self, pid, filename):
if pid not in self.peerDict.keys():
return False
if filename not in self.fileDict.keys():
return False
for c in range(0, self.fileDict[filename]["totalchunks"]):
self.peerAquireFileChunk(pid, filename, c)
return True
| [
"noreply@github.com"
] | meta-engineer.noreply@github.com |
4680448fb41df2983cfc6d58ce6ec8f03465b75e | fe4f569d6e2c01f7059bf029cd58ee2fbd882e92 | /viceversa_project/asgi.py | d085cbc71a97b681174236cb776660fed3e1bce2 | [] | no_license | Revival-coder/viceversa_project | 51dc0511b5678294aa00ad66cdecefd6c94ed17a | 73c20e682e6984514f32c7db2039242587c96f9c | refs/heads/main | 2023-07-24T15:09:23.816145 | 2021-09-08T23:32:55 | 2021-09-08T23:32:55 | 404,504,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | """
ASGI config for viceversa_project project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'viceversa_project.settings')
application = get_asgi_application()
| [
"nastia8425@gmail.com"
] | nastia8425@gmail.com |
fa653f9c0963489e50b7ebe54873f2359c9252e1 | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/P/pere/postliste-ruter.py | 6868c570393c7a8e844c70e499b5f1ed041bc480 | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,876 | py | # -*- coding: UTF-8 -*-
import scraperwiki
import json
from BeautifulSoup import BeautifulSoup
import datetime
import dateutil.parser
import lxml.html
import resource
import sys
import urlparse
import re
scraperwiki.scrape('http://www2.ruter.no/verdt-a-vite/presse/offentlig-journal/')
lazycache=scraperwiki.swimport('lazycache')
postlistelib=scraperwiki.swimport('postliste-python-lib')
agency = 'Ruter AS'
def report_errors(errors):
if 0 < len(errors):
print "Errors:"
for e in errors:
print e
exit(1)
def out_of_cpu(arg, spent, hard, soft):
report_errors(arg)
def process_pdf(parser, pdfurl, errors):
errors = []
postlistelib.exit_if_no_cpu_left(0, out_of_cpu, errors)
try:
pdfcontent = scraperwiki.scrape(pdfurl)
parser.preprocess(pdfurl, pdfcontent)
pdfcontent = None
# except ValueError, e:
# errors.append(e)
except IndexError, e:
errors.append(e)
def process_page_queue(parser, errors):
try:
parser.process_pages()
postlistelib.exit_if_no_cpu_left(0, out_of_cpu, errors)
except scraperwiki.CPUTimeExceededError, e:
errors.append("Processing pages interrupted")
def process_journal_pdfs(parser, listurl, errors):
# print "Finding PDFs on " + listurl
# u = urllib.parse.urlparse(listurl)
html = scraperwiki.scrape(listurl)
root = lxml.html.fromstring(html)
html = None
for ahref in root.cssselect("div.vedlegg a"):
href = ahref.attrib['href']
url = urlparse.urljoin(listurl, href)
if -1 != href.find("file://") or -1 == url.find(".pdf"):
# print "Skipping non-http URL " + url
continue
if parser.is_already_scraped(url):
True
# print "Skipping already scraped " + url
else:
# print "Will process " + url
process_pdf(parser, url, errors)
def test_small_pdfs(parser):
# Test with some smaller PDFs
errors = []
process_pdf(parser, "http://www2.ruter.no/Documents/Offentlig_journal/2012_Uke_24.pdf?epslanguage=no", errors)
process_page_queue(parser, errors)
report_errors(errors)
exit(0)
errors = []
parser = postlistelib.PDFJournalParser(agency=agency)
#test_small_pdfs(parser)
process_journal_pdfs(parser, "http://www2.ruter.no/verdt-a-vite/presse/offentlig-journal/", errors)
process_page_queue(parser, errors)
report_errors(errors)
# -*- coding: UTF-8 -*-
import scraperwiki
import json
from BeautifulSoup import BeautifulSoup
import datetime
import dateutil.parser
import lxml.html
import resource
import sys
import urlparse
import re
scraperwiki.scrape('http://www2.ruter.no/verdt-a-vite/presse/offentlig-journal/')
lazycache=scraperwiki.swimport('lazycache')
postlistelib=scraperwiki.swimport('postliste-python-lib')
agency = 'Ruter AS'
def report_errors(errors):
if 0 < len(errors):
print "Errors:"
for e in errors:
print e
exit(1)
def out_of_cpu(arg, spent, hard, soft):
report_errors(arg)
def process_pdf(parser, pdfurl, errors):
errors = []
postlistelib.exit_if_no_cpu_left(0, out_of_cpu, errors)
try:
pdfcontent = scraperwiki.scrape(pdfurl)
parser.preprocess(pdfurl, pdfcontent)
pdfcontent = None
# except ValueError, e:
# errors.append(e)
except IndexError, e:
errors.append(e)
def process_page_queue(parser, errors):
try:
parser.process_pages()
postlistelib.exit_if_no_cpu_left(0, out_of_cpu, errors)
except scraperwiki.CPUTimeExceededError, e:
errors.append("Processing pages interrupted")
def process_journal_pdfs(parser, listurl, errors):
# print "Finding PDFs on " + listurl
# u = urllib.parse.urlparse(listurl)
html = scraperwiki.scrape(listurl)
root = lxml.html.fromstring(html)
html = None
for ahref in root.cssselect("div.vedlegg a"):
href = ahref.attrib['href']
url = urlparse.urljoin(listurl, href)
if -1 != href.find("file://") or -1 == url.find(".pdf"):
# print "Skipping non-http URL " + url
continue
if parser.is_already_scraped(url):
True
# print "Skipping already scraped " + url
else:
# print "Will process " + url
process_pdf(parser, url, errors)
def test_small_pdfs(parser):
# Test with some smaller PDFs
errors = []
process_pdf(parser, "http://www2.ruter.no/Documents/Offentlig_journal/2012_Uke_24.pdf?epslanguage=no", errors)
process_page_queue(parser, errors)
report_errors(errors)
exit(0)
errors = []
parser = postlistelib.PDFJournalParser(agency=agency)
#test_small_pdfs(parser)
process_journal_pdfs(parser, "http://www2.ruter.no/verdt-a-vite/presse/offentlig-journal/", errors)
process_page_queue(parser, errors)
report_errors(errors)
| [
"pallih@kaninka.net"
] | pallih@kaninka.net |
482e93f7d67ed7308db05da1194268e8dc55f176 | 81e5c2fc8c5eead47821f5e5dc21c5bb70f55fa9 | /A7/src/subsitute_you/subsituteYou.py | 875d2aa330d5db6bc52dd032a91832c9f483c331 | [] | no_license | oduprogrammer16/cs532-s16 | 4763cf8941d4d0b92232ef3566a211f8a9af88c1 | 65ed4e5e7ec6c9fd43135e311a3a260113d87816 | refs/heads/master | 2020-07-14T04:09:15.636154 | 2018-07-07T14:39:03 | 2018-07-07T14:39:03 | 49,685,204 | 0 | 0 | null | 2016-01-15T00:46:26 | 2016-01-15T00:46:25 | null | UTF-8 | Python | false | false | 6,402 | py | import argparse
import logging
import sys
from data_extractor.data_set import Data_Set
logging.basicConfig(level=logging.DEBUG,format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',datefmt='%m-%d %H:%M:%S',filename='subsitute_you.log',filemode='w+')
defaultLogger = logging.getLogger('default')
def get_prefs(data):
'''Create a dictionary of people and the movies that they have rated.
'''
# This code taken from page 26 in collective intelligence book
movies = {}
for movie in data.movie_list:
movie_id = movie['movie_id']
movie_title = movie['movie_title']
movies[movie_id] = movie_title
prefs = {}
for dataPoint in data.rating_list:
user_id = int(dataPoint['user_id'])
movieId = dataPoint['item_id']
rating = dataPoint['rating']
prefs.setdefault(user_id,{})
prefs[user_id][movies[movieId]] = float(rating)
return prefs
# Returns the best matches for person from the prefs dictionary.
# Number of results and similarity function are optional params.
#def topMatches(prefs,person,n=5,similarity=sim_pearson):
# scores=[(similarity(prefs,person,other),other)
#for other in prefs if other!=person]
# scores.sort()
# scores.reverse()
# return scores[0:n]
def printResults(similarUsers):
seperator = '-----------------------------------------------------'
heading1 = '-----------------------Top-3-------------------------'
heading2 = '--------------------Bottom-Three---------------------'
heading3 = '----------------------Summary------------------------'
heading0 = '--------------------Similar Users--------------------'
print(heading0)
for key in similarUsers.keys():
print("User: {0}:".format(key))
#print(seperator)
print(heading1)
#print("Top 3:")
counter = 1
for topThree in similarUsers[key]['top_three_favorite']:
print("{0}){1:.<48}{2}".format(counter,topThree['movie_title'],topThree['rating']))
counter += 1
counter = 1
print(heading2)
for bottomThree in similarUsers[key]['bottom_three_favorite']:
print("{0}){1:.<48}{2}".format(counter,bottomThree['movie_title'],bottomThree['rating']))
counter += 1
print(seperator)
print(seperator)
print(heading3)
print("{0:.<52}{1}".format('Number of Users',len(similarUsers)))
def get_user_match(data,name=None,gender=None,age=None,occupation=None):
# Default values if gender, age or occupation are not passed in.
_name = 'User'
_gender = 'M'
_age = 21
_occupation = 'student'
if name:
_name = name
if gender:
_gender = gender
if age:
_age = age
if occupation:
_occupation = occupation
# Create a dictionary of users and the movies that they rated.
prefs = get_prefs(data)
similarUsers = list(filter(lambda x: x['age'] == _age and x['gender'] == _gender and x['occupation'] == _occupation,data.critic_list))
defaultLogger.info("{0} similar users found".format(len(similarUsers)))
# Ensure that there are at least 3 similar users found in the filter.
userInformation = {}
for i in similarUsers:
user_id = int(i['user_id'])
defaultLogger.debug("Getting movie ratings for user: {0}".format(user_id))
userPrefs = prefs[user_id]
defaultLogger.debug("{0} movie ratings found for user: {0}".format(len(userPrefs),user_id))
# Create a list that can be sorted on rating id
defaultLogger.debug("Creating list that can be sorted for user: {0}".format(user_id))
movieRankingList = []
for movie in userPrefs.keys():
movieTitle = movie
rating = userPrefs[movie]
movieRankingList.append({'movie_title':movieTitle,'rating':rating})
lowestToHigh = sorted(movieRankingList,key=lambda x: x['rating'],reverse=False)
highestToLow = sorted(movieRankingList,key=lambda x: x['rating'],reverse=True)
defaultLogger.info("Determining top three favorite movies and top three least favorite films for user: {0}".format(user_id))
userInformation[user_id] = {'top_three_favorite':highestToLow[:3], 'bottom_three_favorite':lowestToHigh[:3]}
return userInformation
if __name__ == '__main__':
gender = None
age = None
occupation = None
name=None
occupationFileName = '../data_files/u.occupation'
data = Data_Set()
try:
occupationFile = open(occupationFileName,'r')
except IOError as e:
defaultLogger.error("Error getting occupation list file: {0},{1}".format(occupationFileName,e[1]))
print("Error getting occupation list file: {0},{1}".format(occupationFileName,e[1]))
if occupationFile is not None:
# Create a list of valid occupations
occupationList = [line.strip('\n') for line in occupationFile.readlines() ]
occupationFile.close()
# Parse the command-line arguments
parser = argparse.ArgumentParser(description='Finds three users who are closest to you in terms of age, gender, and occupation')
parser.add_argument('-n','--name',action='store',dest='name',help='Your Name')
parser.add_argument('-a','--age',action='store',dest='age',help='Your age.')
parser.add_argument('-g','--gender',choices=['m','f','male','female'],dest='gender',help='Gender of the user')
parser.add_argument('-o','--occupation',choices=occupationList, dest='occupation', help='Occupation')
args = parser.parse_args()
if args.name:
name = args.name
defaultLogger.info("Using custom name: {0}".format(name))
else:
defaultLogger.info("Using default name: User")
if args.age:
age = int(args.age)
defaultLogger.info("Using custom age: {0}".format(age))
else:
defaultLogger.info("Using default age: 21")
if args.gender:
defaultLogger.info("Using custom gender: {0}".format(args.gender))
if args.gender == 'm' or args.gender =='male':
gender = 'M'
defaultLogger.debug("gender set to: M")
if args.gender == 'f' or args.gender == 'female':
gender = 'F'
defaultLogger.debug("gender set to: F")
else:
defaultLogger.info("Using default gender: M")
if args.occupation:
defaultLogger.info("Custom occupation given: {0}".format(occupation))
occupation = args.occupation
else:
defaultLogger.info("Using default occupation: student")
# Get a match of the users
result = get_user_match(data,name=name,gender=gender,age=age,occupation=occupation)
printResults(result)
#print("Hello")
| [
"kclem014@odu.edu"
] | kclem014@odu.edu |
8a0f762a9c144565a0590d6ca5eecf3299bf8672 | d02d25541fc2738db4c8c38624b9ac529d82abf5 | /pokemonAI/pokemon.py | 709895f4c140a166dc6afca9315975de25c9f213 | [] | no_license | michaelostrow/pokemonAI | d356c1a1e6f299966e24492c52e3a3322a25d768 | 1bf81cd494892e7f56ebdd687bd12e033448f5e7 | refs/heads/master | 2020-03-31T01:34:45.302733 | 2015-01-05T19:25:23 | 2015-01-05T19:25:23 | 28,827,781 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,332 | py | from pokemon_classes import Type, Pokemon, Attack, calculate_damage
from poke_gamestate import GameState
import minimax_poke_agent
import sys
# given a text file, parses it and generates
# an initial gamestate
def generate_state(fileName):
f = open(fileName)
team1 = generate_team_loop(f)
team2 = generate_team_loop(f)
return GameState(team1, team2)
# subroutine for team generation function
def generate_team_loop(openFile):
team = []
# ignore first two lines, used only to show team
openFile.readline()
openFile.readline()
for i in range(6):
attacks = []
attributes = openFile.readline().split()
poke = Pokemon(*attributes)
types = openFile.readline().split()
poke.types = Type(*types)
for x in range(4):
atk = openFile.readline().split()
attacks.append(Attack(*atk))
poke.attacks = attacks
team.append(poke)
openFile.readline()
return team
# function that is given an initial gamestate
# and then uses minimax search to run a game,
# displaying actions taken along the way
def run_game(gamestate):
agent = minimax_poke_agent.MinimaxPokeAgent()
attacker = None
defender = None
player = None
form_str = '%s attacks with %s for %i damage.'
while True:
attacked = False
if gamestate.is_terminal_state():
winner = gamestate.get_winner()
print 'Game over: ' + winner + ' wins!'
return
action = agent.alpha_beta_search(gamestate)
# uncomment the line below to see the nodes expanded to
# determine which action to take
#print agent.NODES_EXPANDED
agent.NODES_EXPANDED = 0
if gamestate.turn == 1:
player = 'Player 1'
attacker = gamestate.team1
defender = gamestate.team2
if gamestate.turn == 2:
player = 'Player 2'
attacker = gamestate.team2
defender = gamestate.team1
for a in attacker[0].attacks:
if a.name == action:
attacked = True
print form_str % (player, action, calculate_damage(defender[0], attacker[0], a))
if not attacked:
print player + ' switches out to ' + action + '.'
gamestate = gamestate.get_next_state(action)
# main routine for instantiating and then running a game
if sys.argv[1] == 'basic':
run_game(generate_state('basic.txt'))
if sys.argv[1] == 'intermediate':
run_game(generate_state('intermediate.txt'))
if sys.argv[1] == 'advanced':
run_game(generate_state('advanced.txt'))
| [
"michaelostrow321@gmail.com"
] | michaelostrow321@gmail.com |
274db1c7e4366ef5d355c5f0b6718f4d5f41f569 | 33524b5c049f934ce27fbf046db95799ac003385 | /Дистанционная_подготовка/Программирование_на_python/9_списки/zadache_N.py | bb6732945983d5ab8c93702cbfcc871d84f63cc6 | [] | no_license | mgbo/My_Exercise | 07b5f696d383b3b160262c5978ad645b46244b70 | 53fb175836717493e2c813ecb45c5d5e9d28dd23 | refs/heads/master | 2022-12-24T14:11:02.271443 | 2020-10-04T04:44:38 | 2020-10-04T04:44:38 | 291,413,440 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py |
l = list(map(int, input().split()))
i = 0
for _ in range(len(l)//2):
l[i], l[i+1] = l[i+1], l[i]
i +=2
print (*l) | [
"mgbo433@gmail.com"
] | mgbo433@gmail.com |
c2d90777355d93fc65291fc42863c08c8a844589 | 3e4d738379ab4c0ffc6eda76dc31254aaced694b | /src/ex28/client.py | 2d254bb65aeb50a0ca497b5d90ab13282439b238 | [] | no_license | sofiane8384/csd | 21e0883ecdad412320f23957193cd0b2750b8eac | 097d458ada69a464387f73a0996f9779123410a6 | refs/heads/master | 2020-07-06T06:08:50.890391 | 2016-11-19T12:26:55 | 2016-11-19T12:26:55 | 74,056,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 302 | py |
from suds.client import Client
url= "http://www.webservicex.com/globalweather.asmx?wsdl"
client = Client(url)
#pour voir l'objet client
print client
clientclient.service.GetCitiesByCountry("France")
clientclient.service.GetWeather ("France","Besancon")
| [
"sofiane8384@gmail.com"
] | sofiane8384@gmail.com |
5049c7f8f9cc224352783ce24a1150664d01bb7d | c887ab97b1f793753b2d8d1db395c53e958e5343 | /main.py | 7a0703ea261be4e926cc9198b089f86e307630ef | [] | no_license | mmdaz/feature_testing_chat_bots | 70ce75639fe7b13090d3bcdec9ff43e22c50003a | 3dae6561b9d58c3c025916720ee0b1e1d507cd93 | refs/heads/master | 2020-06-26T15:08:45.315843 | 2020-05-31T20:01:15 | 2020-05-31T20:01:15 | 199,668,478 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | from telegram.ext import Updater
from bot.test_controller import TestController
if __name__ == '__main__':
updater = Updater(token="YOUR_BOT_TOKEN")
dispatcher = updater.dispatcher
test_controller = TestController(dispatcher=dispatcher)
updater.start_polling(poll_interval=1)
updater.idle()
| [
"mohammad.azhdari.22@gmail.com"
] | mohammad.azhdari.22@gmail.com |
58d8ac2606cbf3b94e8e4f59dbdf4f4ca6b72e0c | 8c51b8513f0bfaa41b2c6d00f67a7fcee48e0b0d | /user/models.py | 5481b35dfa94b908eaacd399621c5db291505808 | [] | no_license | manik912/internship_fair | 82ca6f5d455b7816b6b29cd5d6c2c8133c9d77fe | 09dbe17fd6053f92be21f5574635522202a20644 | refs/heads/master | 2023-04-11T07:31:26.943770 | 2021-04-24T18:51:18 | 2021-04-24T18:51:18 | 357,615,713 | 0 | 11 | null | 2021-04-24T23:55:24 | 2021-04-13T16:13:56 | CSS | UTF-8 | Python | false | false | 934 | py | from django.contrib.auth.base_user import BaseUserManager
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from phonenumber_field.modelfields import PhoneNumberField
from django.contrib.auth.models import AbstractUser
class CustomUserManager(BaseUserManager):
def create_superuser(self, email, password=None):
user = self.model(email=email, is_staff=True, is_superuser=True)
user.set_password(password)
user.save()
return user
class User(AbstractUser):
username = None
name = models.CharField(max_length=60, default='')
email = models.EmailField(verbose_name='Email Address', unique=True)
contact = PhoneNumberField(default='+919999999999', blank=False)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
objects = CustomUserManager()
def __str__(self):
return self.email
| [
"guptamanik1921@gmail.com"
] | guptamanik1921@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.