blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e5fc3b96e27fd580d60715aa2a9faae4dfa0113f | 7f7ba9fa96eb7741e3a7956aae439802376099d1 | /excel_sync/db/models/mixins.py | 27dc97215b789f0bf7f1c8a652037d36b68db776 | [
"BSD-3-Clause"
] | permissive | FriedrichK/django-excel-sync | bd34911960fab6580985378da7427f7823163bf7 | 3e649231dcdd26b29278dc2e9563ad0ab67d9f1c | refs/heads/master | 2021-01-02T14:46:34.420031 | 2014-04-21T09:59:47 | 2014-04-21T09:59:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,270 | py | from excel_sync.db.models.fields import SpreadsheetMixin
class SpreadsheetModelMixin:
@staticmethod
def import_spreadsheet_data(klass):
source = get_spreadsheet_datasource(klass)
fields_with_spreadsheet_metadata = get_fields_with_spreadsheet_metadata(klass)
field_settings = build_field_settings(fields_with_spreadsheet_metadata)
data_for_rows = source.get_rows(field_settings)
for data_for_row in data_for_rows:
entry = klass(**data_for_row)
entry.save()
def get_spreadsheet_datasource(klass):
return klass._meta.spreadsheet_source
def get_fields_with_spreadsheet_metadata(klass):
all_fields = klass._meta.fields
fields_with_spreadsheet_metadata = []
for field in all_fields:
if(has_spreadsheet_metadata(field)):
fields_with_spreadsheet_metadata.append(field)
return fields_with_spreadsheet_metadata
def has_spreadsheet_metadata(field):
return isinstance(field, SpreadsheetMixin)
def build_field_settings(fields_with_spreadsheet_metadata):
field_settings = []
for field in fields_with_spreadsheet_metadata:
field_setting = field.get_spreadsheet_settings()
field_settings.append(field_setting)
return field_settings
| [
"friedrich@cartogami.com"
] | friedrich@cartogami.com |
17044f509ab7656dd2a0f90537d7b43bd6c089c7 | b026d650dd82704c89e4dd1ead291985758340f5 | /docadmin/uploadfile/uploadFileController/uploadFileController.py | 65f3d871b537c1991aea1b762d84e1b934ada82a | [] | no_license | RiteshM1993/document-management-system | 9c253405f62f501a5dcd463cbe47a95305f809cb | 31c142bfa9450c2fd91f7909a400d575d3e5d305 | refs/heads/master | 2020-04-08T09:19:14.702209 | 2018-11-26T19:04:54 | 2018-11-26T19:04:54 | 159,217,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,223 | py | import json
from rest_framework.decorators import api_view
from django.http import HttpResponse
from django.core.serializers.json import DjangoJSONEncoder
from docadmin.uploadfile.uploadFileService.uploadFileService import docsUpload
import time
@api_view(['POST'])
def savepdf(request):
pdfFile = request.FILES['file']
createdDate = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())
ezedocs_pdf_upload = docsUpload()
result = ezedocs_pdf_upload.savePdfFile(pdfFile,createdDate)
dataobj = {'data': result}
return HttpResponse(json.dumps(dataobj, cls=DjangoJSONEncoder), content_type='application/json', status=200)
@api_view(['GET'])
def listPdf(request):
ezedocs_pdf_upload = docsUpload()
result = ezedocs_pdf_upload.listPdfFile()
dataobj = {'data': result}
return HttpResponse(json.dumps(dataobj, cls=DjangoJSONEncoder), content_type='application/json', status=200)
@api_view(['DELETE'])
def delPdf(request):
id = request.GET['id']
ezedocs_pdf_upload = docsUpload()
result = ezedocs_pdf_upload.delPdfFile(id)
dataobj = {'data': result}
return HttpResponse(json.dumps(dataobj, cls=DjangoJSONEncoder), content_type='application/json', status=200) | [
"riteshmahajan302@gmail.com"
] | riteshmahajan302@gmail.com |
d0d69744673378fb17a20fe4185d3a6f630df2a7 | e278ebf57ad195d6790c02f255a4aef872b82761 | /store/migrations/0010_auto_20170305_1808.py | cceb45739aebadc1c9746211614fa2251a32ddcd | [] | no_license | Georgerowberry/ArthursAntlers-WebShop | 97c63abcffed12dacc4adddbf49c0e70a691b403 | 74787f0f4a29d21956d9231f7efa816fcf111c01 | refs/heads/master | 2020-05-20T18:00:25.267667 | 2017-09-16T11:26:01 | 2017-09-16T11:26:01 | 84,499,198 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 537 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-06 02:08
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('store', '0009_auto_20170305_1805'),
]
operations = [
migrations.RenameModel(
old_name='AboutTom',
new_name='Info',
),
migrations.RenameField(
model_name='info',
old_name='description',
new_name='about_tom',
),
]
| [
"george.rowberry@gmail.com"
] | george.rowberry@gmail.com |
2657d73570f09dbfee9cfc8dbda13278d9b56ded | b33515c51737edef52d8e578118b240e198b404b | /Refactoring/utils/deprecatedWarn.py | be494bc79ce1e7dc1d8bb884a15ecd4568f137b8 | [] | no_license | Panlq/NoteBook | 8e82419b3bf92872257697775610cd5c95beae40 | 0bc0b9605db750787bf0ef629f52f6f8df759052 | refs/heads/master | 2021-07-08T02:46:15.486364 | 2020-07-30T05:08:45 | 2020-07-30T05:08:45 | 164,087,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,019 | py |
"""
警告消息通常写入 sys.stderr
警告过滤器可以用来控制是否发出警告信息, 是一些匹配规则和动作的序列
reference: http://blog.konghy.cn/2017/12/16/python-warnings/
"""
import warnings
import functools
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning) # turn off filter
warnings.warn(f'Call to deprecated function {func.__name__}', category=DeprecationWarning, stacklevel=2)
warnings.simplefilter('default', DeprecationWarning) # reset filter
return func(*args, **kwargs)
return wrapper
# Example
@deprecated
def oldAdd(a, b):
return a + b
class SomeClass(object):
@deprecated
def someOldFunc(self, x, y):
return x + y
if __name__ == '__main__':
print(oldAdd(1, 2)) | [
"18960311163@163.com"
] | 18960311163@163.com |
ff93f4c4e03c2723185097e82af8b56b2598c151 | 16be53c2dc4eee5602d3f7a38c599917009fb802 | /account/migrations/0002_remove_profile_address.py | 02e0e499ef82da7cbf16355972f4610131d12c84 | [] | no_license | surajit003/mubango | 35f37fb992782ae168a407922b494c3be0605e00 | 603e13cd07417d200330ca7292d9032af568a0b9 | refs/heads/main | 2023-03-28T07:59:57.967026 | 2021-03-27T09:34:46 | 2021-03-27T09:34:46 | 315,040,701 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 319 | py | # Generated by Django 3.0.4 on 2021-01-12 21:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("account", "0001_initial"),
]
operations = [
migrations.RemoveField(
model_name="profile",
name="address",
),
]
| [
"surajit@poweredbypeople.io"
] | surajit@poweredbypeople.io |
e8e45e3e8945903fca914b84a2bd036df033d91b | 96c432c6d1083ca664111fd9b90161f077b43fbc | /001 - valid binary search tree/my_solution_01.py | 24d59df9d88ae4737a686570c6b5a017f218e93c | [] | no_license | rafa761/tech-series-algorithms | e9a5c1e9f99f70c50b88b997a56b7b37dfae4f12 | 1f88410f1c4f943c4af300e714b383c5a93588cd | refs/heads/master | 2022-12-01T23:12:45.909146 | 2020-08-06T11:20:49 | 2020-08-06T11:20:49 | 285,551,563 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,085 | py | def print_tree_traverse(root):
""" Just to print the tree, not used in the solution"""
current_level = [root]
while current_level:
print(' '.join(str(node) for node in current_level))
next_level = list()
for n in current_level:
if n.left:
next_level.append(n.left)
if n.right:
next_level.append(n.right)
current_level = next_level
class Node(object):
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
def __gt__(self, other):
if self.val > other.val:
return True
def __le__(self, other):
if self.val < other:
return True
def __repr__(self):
return str(self.val)
class ValidateTree:
def __init__(self, tree):
self.tree = tree
def validate(self):
"""
To Validate the tree we need to traverse all branches, storing values on left and right branches
"""
left_branch = []
right_branch = []
if self.tree.left:
left_branch = self.traverse_tree(self.tree.left)
if self.tree.right:
right_branch = self.traverse_tree(self.tree.right)
if not left_branch and right_branch:
return 'EMPTY'
for left_value in left_branch:
for right_value in right_branch:
if left_value > right_value:
return 'INVALID'
return 'VALID'
def traverse_tree(self, start_node):
values_list = [start_node]
current_level = [start_node]
while current_level:
next_level = []
for node in current_level:
if node.left:
next_level.append(node.left)
values_list.append(node.left)
if node.right:
next_level.append(node.right)
values_list.append(node.right)
current_level = next_level
return values_list
if __name__ == '__main__':
invalid_tree = Node(5)
invalid_tree.left = Node(4)
invalid_tree.right = Node(7)
invalid_tree.right.left = Node(2)
# print_tree_traverse(invalid_tree)
validation = ValidateTree(invalid_tree).validate()
print('invalid tree', validation)
valid_tree = Node(5)
valid_tree.left = Node(4)
valid_tree.right = Node(7)
validation = ValidateTree(valid_tree).validate()
print('valid tree', validation)
| [
"rafael.antunes@terceiroslinx.com.br"
] | rafael.antunes@terceiroslinx.com.br |
a4d09a2daee01e432a211c7250828e7db3f6b2a1 | 250538d7e39c4757e90afa2aae91e5141ac4c1bf | /task/chapter3_game.py | f02594d94399dfc1eda517c9e18a5afaa7cdc9f7 | [] | no_license | SakaiMasato/pythonTest | e9c518d43cf19b2cf5003475830b3f5d3d7b7518 | a653f1f2870a37212457f5cdf8188be097ed78da | refs/heads/master | 2020-05-30T04:39:02.024203 | 2020-01-09T10:17:00 | 2020-01-09T10:17:00 | 189,543,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
' guess number game '
__author__ = 'Bob Bao'
try_number = 7
def game_logic(num):
global try_number
index = 0
while index < try_number:
print('please guess the number, range from 1~10: ', end='')
guess_number = int(input())
if (guess_number == num):
print('bingo! you did it!')
return
elif (guess_number < num):
print('your number is less than answer')
else:
print('your number is greater than answer')
index += 1
print('you have run out of all the chances')
if __name__ == '__main__':
game_logic(4) | [
"bob.bao@ericsson.com"
] | bob.bao@ericsson.com |
a1707473ebf79ec49b00af1832a855ac2897e02f | 8b2d1894d99c332950362042d2a12c138ab696fa | /main.py | b85bf401d3262e63b069a477e9fc0c4599b810a1 | [] | no_license | bijoyh1/assignment3 | 0a1f50ad43fd225abd499a9dde45c37c8b90372d | 3a5b08f5797e0cd5afc1c3ce6d3693e9b892e5ca | refs/heads/main | 2023-04-10T05:19:26.767289 | 2021-04-12T02:15:59 | 2021-04-12T02:15:59 | 357,005,747 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,036 | py | import math
def feistel(input, k):
L0 = input[:4]
R0 = input[4:]
L1 = R0
R1 = exclusiveOr(L0, F(R0, k))
temp = R1
R1 = L1
L1 = temp
return L1 + R1
def exclusiveOr(a, b):
xor = ""
for x in range(4):
if a[x] == b[x]:
xor = xor + "0"
else:
xor = xor + "1"
return xor
def F(R0, k):
total = (2 * (math.pow(BinarytoDecimal(R0), BinarytoDecimal(k)))) % 16
total = DecimaltoBinary(total)
while len(total) != 4:
total = "0" + total
return total
def DecimaltoBinary(a):
a = int(a)
if a >= 1:
return str(DecimaltoBinary(a // 2)) + str(a % 2)
else:
return ""
def BinarytoDecimal(a):
total = 0
for x in range(4):
total = total + int(a[3 - x]) * math.pow(2, x)
return total
def ASCIIBinary(x):
bin = DecimaltoBinary(str(ord(x)))
while len(bin) != 8:
bin = '0' + bin
return bin
word = input("Enter word")
for x in word:
print(feistel(ASCIIBinary(x),"0011"))
| [
"noreply@github.com"
] | bijoyh1.noreply@github.com |
6435cc8890aee49f3317639662d17b1144870d8f | 0a96c45ea627fea8aacce176c30d8c4e3e31d798 | /trapezoid.py | 0d9bbc4dc73b523226cbc533d425602c77793e27 | [] | no_license | aomsk/GeoCal | 6e87407e81a43c30e0e43acba3f67d5dade0e649 | 63245a8453c052231099eff816bac85373377c27 | refs/heads/master | 2023-03-19T01:29:53.773751 | 2019-12-14T11:28:03 | 2019-12-14T11:28:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,481 | py | import turtle, re
import tkinter as tk
from math import pi, sin, asin, radians, degrees, sqrt, acos
root = tk.Tk()
screen = turtle.Screen()
root.title("Geometric Shape Calculator")
w_px = root.winfo_screenwidth()
h_px = root.winfo_screenheight()
#Estimation of the amount of pixel in a 1 cm line in any direction
px_cm = ((w_px / (root.winfo_screenmmwidth() / 10) + h_px / (root.winfo_screenmmheight() / 10)) / 2)
"""Trapezoid"""
def trapezoid(t):
"""Draw trapezoid"""
#enter trapezoid parts
size = turtle.textinput("Enter the sizes:", "Base_A Base_B Leg_A Leg_B Height")
size = [float(i) for i in size.split()]
base_a = size[0] * px_cm
base_b = size[1] * px_cm
leg_a = size[2] * px_cm
leg_b = size[3] * px_cm
height = size[4] * px_cm
#find base angles
angles_1 = degrees(asin(height/leg_a))
angles_2 = degrees(acos(height/leg_b))
#Turtle
t.fd(max(base_a, base_b))
t.lt(90+(90-angles_1))
t.fd(leg_a)
t.lt(angles_1)
t.fd(min(base_a, base_b))
t.lt(90-angles_2)
t.fd(leg_b)
#Info for trapezoid
return size + [angles_1, angles_2]
def info_trap(size)
"""Info of the trapezoid"""
text = "The area of this trapezoid is %0.2f sqcm."%(1/2*(size[0]+size[1])*size[4])
text += "The Perimeter of this trapezoid is %0.2f cm."%(size[0]+size[1]+size[2]+size[3])
text += "All angles in this trapezoid is %0.2f, %0.2f, %0.2f, %0.2f."%(size[5], 180-size[5], size[6]+90, 180-(size[6]+90))
main()
| [
"50584701+R0C0KY@users.noreply.github.com"
] | 50584701+R0C0KY@users.noreply.github.com |
299477b4f7e66032e1ed0e16cfad287e693edfdb | 59d7b904615c7a387b99d180c144d16ac8d2fd43 | /e_beautiful_strings.py | 667d93902ed65cca39b8a78ffaa208f38356c99a | [] | no_license | gtenorio/Code-Eval-Challenges | 03b8365d112eb594dde2d3a021cdcb7b212ac33f | 670c223d6029bb5ce1d38f3c56a8c1ca6a3dd2ca | refs/heads/master | 2021-01-20T02:15:59.847947 | 2015-01-07T03:08:27 | 2015-01-07T03:08:27 | 26,451,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | __author__ = 'Gio'
#Code Eval: Beautiful Strings
import sys
test_cases = open(sys.argv[1], 'r')
for test in test_cases:
lib = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
test = test.lower()
total = 0
mult = 1
for i in test:
s = ord(i)
if s >= 97 and s <= 122:
s -= 97
lib[s] += 1
lib = sorted(lib)
for j in lib:
total += j*mult
mult += 1
print(total)
test_cases.close()
| [
"gtenorio868@gmail.com"
] | gtenorio868@gmail.com |
7111f32084156c05493026e38ec44b185622bb09 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_198/ch73_2019_09_03_19_31_47_647277.py | 33be594abb99268b65cbb1ae31306829a17afa62 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | def remove_vogais(palavra):
return palavra.replace('a','').replace('e','').replace('i','').replace('o','').replace('u','') | [
"you@example.com"
] | you@example.com |
0589f953505604a5789640d057e5e0234c0d7c2c | 6dda9390f5f7a1063a295a58a3303ec03239774c | /counting-characters/counting.py | 8168c0cfb74257de639a70f4af034501de35dfc9 | [] | no_license | vinshe1169/build-a-blog | 32d70da1056bae5228d85393d1bdd5dcb1d6ecb1 | ca85d7f14bc81a5226438f5552402d3b9b785ee8 | refs/heads/master | 2021-01-11T14:23:51.601312 | 2017-02-23T14:09:32 | 2017-02-23T14:09:32 | 81,372,490 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 682 | py | text = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc accumsan sem ut ligula scelerisque sollicitudin. Ut at sagittis augue. Praesent quis rhoncus justo. Aliquam erat volutpat. Donec sit amet suscipit metus, non lobortis massa. Vestibulum augue ex, dapibus ac suscipit vel, volutpat eget massa. Donec nec velit non ligula efficitur luctus."
countchars = {}
for i in range(len(text)):
k = text[i]
if countchars.has_key(k):
countchars[k] = countchars[k] + 1
else:
countchars[k] = 1
#keylist = list(countchars.keys())
#keylist.sort()
#print(keylist)
for eachitem in countchars:
print(eachitem,countchars[eachitem])
#print(countchars)
| [
"vinman99@gmail.com"
] | vinman99@gmail.com |
417a581449ba402868c72f98069ca79db1a6fbc1 | 461a52411199fd05853ea7044ce0170226e70049 | /scheduler/post_scheduler/migrations/0002_schedule.py | 51842ed5b5838e5d9175c3eb007ce414219ccc9e | [
"MIT"
] | permissive | Awinja-j/Social-Media-post-Scheduler | 46a6c1306c672bd689f6b9c6c51abe0c7da209d1 | 4f95b4bb2ca3f890d3e22bcda859b94ebc483b87 | refs/heads/main | 2023-08-25T19:34:56.066898 | 2021-10-29T12:31:40 | 2021-10-29T12:31:40 | 339,370,705 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | # Generated by Django 3.2.8 on 2021-10-28 20:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post_scheduler', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Schedule',
fields=[
('post_id', models.CharField(default=None, max_length=11)),
('scheduled_id', models.CharField(default=None, max_length=11, primary_key=True, serialize=False)),
],
),
]
| [
"ingari61@hotmail.com"
] | ingari61@hotmail.com |
2398d673bdef1187105868a48ac3e87d4555d20f | 3c01d7928029e74a19d646f5a40b3bf099b281a7 | /typeshed/stdlib/mimetypes.pyi | 5a3ec91acbcdb71fb39f59656555c7233be0d66e | [
"MIT"
] | permissive | arpancodes/protectsql | f3ced238c103fca72615902a9cb719c44ee2b5ba | 6392bb7a86d1f62b86faf98943a302f7ea3fce4c | refs/heads/main | 2023-08-07T16:33:57.496144 | 2021-09-24T19:44:51 | 2021-09-24T19:44:51 | 409,894,807 | 0 | 1 | MIT | 2021-09-24T19:44:52 | 2021-09-24T08:46:02 | Python | UTF-8 | Python | false | false | 1,554 | pyi | import sys
from _typeshed import StrPath
from typing import IO, Sequence, Tuple
if sys.version_info >= (3, 8):
def guess_type(url: StrPath, strict: bool = ...) -> Tuple[str | None, str | None]: ...
else:
def guess_type(url: str, strict: bool = ...) -> Tuple[str | None, str | None]: ...
def guess_all_extensions(type: str, strict: bool = ...) -> list[str]: ...
def guess_extension(type: str, strict: bool = ...) -> str | None: ...
def init(files: Sequence[str] | None = ...) -> None: ...
def read_mime_types(file: str) -> dict[str, str] | None: ...
def add_type(type: str, ext: str, strict: bool = ...) -> None: ...
inited: bool
knownfiles: list[str]
suffix_map: dict[str, str]
encodings_map: dict[str, str]
types_map: dict[str, str]
common_types: dict[str, str]
class MimeTypes:
suffix_map: dict[str, str]
encodings_map: dict[str, str]
types_map: Tuple[dict[str, str], dict[str, str]]
types_map_inv: Tuple[dict[str, str], dict[str, str]]
def __init__(self, filenames: Tuple[str, ...] = ..., strict: bool = ...) -> None: ...
def guess_extension(self, type: str, strict: bool = ...) -> str | None: ...
def guess_type(self, url: str, strict: bool = ...) -> Tuple[str | None, str | None]: ...
def guess_all_extensions(self, type: str, strict: bool = ...) -> list[str]: ...
def read(self, filename: str, strict: bool = ...) -> None: ...
def readfp(self, fp: IO[str], strict: bool = ...) -> None: ...
if sys.platform == "win32":
def read_windows_registry(self, strict: bool = ...) -> None: ...
| [
"arpanforbusiness@gmail.com"
] | arpanforbusiness@gmail.com |
18c97960a1dea255841b8afaa63e1c9984fe45ed | 034158bee04f7b03dc3dcf53dfed4bdfe6923121 | /gbdxtools/__init__.py | 5d52720ad43871087ffc77fcc9fe9e2a819f41dd | [
"MIT"
] | permissive | SeanFoley123/gbdxtools | 3141de4884b4276366046c28c4f7e583438354a8 | aa82a5c684f2dd959b4be93cc038dc5d0da7c392 | refs/heads/master | 2021-01-19T06:24:14.144019 | 2017-04-06T18:21:10 | 2017-04-06T18:21:10 | 87,461,645 | 1 | 0 | null | 2017-04-06T18:26:53 | 2017-04-06T18:26:53 | null | UTF-8 | Python | false | false | 71 | py | from __future__ import absolute_import
from .interface import Interface | [
"nate@rickl.in"
] | nate@rickl.in |
8be67a4123d82e52566b9ddbcd25f0c0e664ead7 | f9fe197a099f92b6467775e4584b19783ac0a835 | /阿里云服务器上传文件/python/momosite/pay/models.py | 49c69d681b2d5877156951e13e7e1aa9ac1ab3e0 | [] | no_license | MoMoLT/Mo- | 2b7d9ec2b123a56e1f6816544f8839d835d7899c | 4a26e232cbe6c83cc09cffe3ca85117cd2c87c61 | refs/heads/master | 2021-02-06T17:29:02.502936 | 2020-02-29T09:39:15 | 2020-02-29T09:39:15 | 243,935,000 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 796 | py | from django.db import models
# Create your models here.
# 一张订单表, 两张充值信息表
class OrdersInfo(models.Model):
订单号 = models.CharField(max_length=100, unique=True)
充值类型 = models.CharField(max_length=20)
充值金额 = models.FloatField()
创建时间 = models.DateTimeField(auto_now_add=True)
支付状态 = models.BooleanField(default=False)
def __str__(self):
return self.订单号
class OrderSOWInfo(models.Model):
订单号 = models.ForeignKey(OrdersInfo, on_delete=models.CASCADE)
学号 = models.CharField(max_length=15)
姓名 = models.CharField(max_length=15)
class OrderDormInfo(models.Model):
订单号 = models.ForeignKey(OrdersInfo, on_delete=models.CASCADE)
住址 = models.TextField(null=False)
| [
"957869702@qq.com"
] | 957869702@qq.com |
b4891a5b540f1bd8e420aa57dab3d7ec38f825b6 | 232d0a99df3ad03ce7811b4e96ebb6982cc0f865 | /aggregate.py | 396c21a0343add8ef7e949e48692f0f5f77f2ad8 | [] | no_license | tomalrussell/aggregation-case-study | 9e6db17e3a43f436dfce8cb84e608cb61b607239 | d23217d94e61d89043baffcfd157883fc474b8ae | refs/heads/master | 2020-06-23T03:46:44.340892 | 2016-11-24T11:23:43 | 2016-11-24T11:23:43 | 74,666,760 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,478 | py | from __future__ import print_function
import argparse
from collections import namedtuple
import fiona
from shapely.geometry import shape
from rtree import index
def proportion_of_a_intersecting_b(a, b):
intersection = a.intersection(b)
return intersection.area / a.area
ShapeWithValue = namedtuple('ShapeWithValue', ['shape', 'value'])
def aggregate(input_file, output_file, reporting_geometry, reporting_initial_value, reporting_attribute, reporting_attribute_type):
input_features = []
idx = index.Index()
with fiona.drivers():
with fiona.open(input_file) as input_src:
for feature in input_src:
s = ShapeWithValue(
shape=shape(feature['geometry']),
value=feature['properties'][reporting_attribute]
)
input_features.append(s)
# Populate R-tree index with bounds of input features
for pos, feature in enumerate(input_features):
idx.insert(pos, feature.shape.bounds)
with fiona.open(reporting_geometry) as reporting_src:
sink_schema = reporting_src.schema.copy()
sink_schema['properties'][reporting_attribute] = reporting_attribute_type
with fiona.open(
output_file, 'w',
crs=reporting_src.crs,
driver="ESRI Shapefile",
schema=sink_schema) as reporting_sink:
for reporting_feature in reporting_src:
reporting_shape = shape(reporting_feature['geometry'])
reporting_value = reporting_initial_value
# look up bbox intersecting features in R-tree
intersecting_features = [input_features[pos] for pos in idx.intersection(reporting_shape.bounds)]
for input_feature in intersecting_features:
# find proportion of input feature that intersects
proportion = proportion_of_a_intersecting_b(input_feature.shape, reporting_shape)
# add that proportion of the attribute_to_report to the reporting_value
reporting_value = reporting_value + proportion * input_feature.value
print(reporting_value)
reporting_feature['properties'][reporting_attribute] = reporting_value
reporting_sink.write(reporting_feature)
def setup_parser():
"""Parse command line arguments
"""
parser = argparse.ArgumentParser(description='Aggregate a value from one geometry to another.')
parser.add_argument('-i', '--input-file',
required=True,
help='Path to the input file, containing the data to be aggregated.')
parser.add_argument('-o', '--output-file',
required=True,
help='Path to the output file.')
parser.add_argument('-rg', '--reporting-geometry',
required=True,
help='Path to the reporting geometry file, containing geometry to be used as output.')
parser.add_argument('-ri', '--reporting-initial-value',
required=True,
help='Initial value for the attribute to output (used if no geometries intersect)')
parser.add_argument('-ra', '--reporting-attribute',
required=True,
help='Attribute name')
parser.add_argument('-rt', '--reporting-attribute-type',
required=True,
choices=['int', 'str', 'float'],
help='Type of value (can be "int", "str" or "float")')
parsed_args = parser.parse_args()
if parsed_args.reporting_attribute_type == 'int':
parsed_args.reporting_initial_value = int(parsed_args.reporting_initial_value)
if parsed_args.reporting_attribute_type == 'str':
parsed_args.reporting_initial_value = str(parsed_args.reporting_initial_value)
if parsed_args.reporting_attribute_type == 'float':
parsed_args.reporting_initial_value = float(parsed_args.reporting_initial_value)
return parsed_args
if __name__ == '__main__':
args = setup_parser()
"""Example usage:
python aggregate.py \
-i data/oa/england_oa_2011_clipped_with_pop.shp \
-o data/grid_with_pop.shp \
-rg data/grid.shp \
-ri 0 -ra pop -rt int
"""
aggregate(
args.input_file,
args.output_file,
args.reporting_geometry,
args.reporting_initial_value,
args.reporting_attribute,
args.reporting_attribute_type
)
| [
"tomalrussell@gmail.com"
] | tomalrussell@gmail.com |
846b95f2235f52cf23529b41d7ec6866aa1da566 | 8fdb93b6dfddf3d67281523fb10c7da52b5b501b | /testing/nested_patch/mocked_my_module.py | ebbe62715b4ae57d46f451d6158a306bd7a6e1e6 | [] | no_license | ishtiaq2/python | e40a0f8e7ab151d5dc460513f267a943d9ec3b9f | dc96a63f2a3755634752267182950dfaf515b698 | refs/heads/master | 2020-12-19T23:01:39.510101 | 2020-04-27T15:46:10 | 2020-04-27T15:46:10 | 235,876,722 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69 | py | def mocked_db_get_data():
return {
'Car': 'BMW'
} | [
"ishtiaq2@kth.se"
] | ishtiaq2@kth.se |
3b8168d0607de48f6eda586dccf7a0aa45bfa983 | 038a9ab71cb7d4283f8bda1a881a6ea0926aff6f | /questions/migrations/0003_auto_20190514_0120.py | 08456216eed7c88f56549947b86ecb255b07f217 | [] | no_license | mikeaerickson/interviewrepo | ff35e8b20c6152fde19cb7e1927d0a5f90d7b601 | 593bf30f20f124d0369334d0e56ff8a77761d8dd | refs/heads/master | 2023-05-01T16:51:26.763552 | 2019-06-30T22:21:00 | 2019-06-30T22:21:00 | 194,566,649 | 0 | 0 | null | 2023-04-21T20:33:10 | 2019-06-30T22:22:41 | Python | UTF-8 | Python | false | false | 458 | py | # Generated by Django 2.2.1 on 2019-05-14 01:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('questions', '0002_auto_20190512_2248'),
]
operations = [
migrations.AlterField(
model_name='question',
name='belongs_to',
field=models.ForeignKey(null=True, on_delete='SET_NULL', related_name='question', to='questions.Job'),
),
]
| [
"michaelanderickson@gmail.com"
] | michaelanderickson@gmail.com |
ba683fa2671b6bcd12fa5fce3c7356675c5f5a60 | db697271157368eb39ee9d9479d0c6a7eb9d06dd | /virtual/bin/easy_install | f22e90e735bfeae51bc3a727bc7716715ebadcd1 | [
"MIT"
] | permissive | amoskipz/instagram | 5edaf03fd784c44fb325dc9f294fab41acc7bc4c | 120e5ef1213567297689e04d5b8620508ce18fea | refs/heads/master | 2023-04-09T13:09:27.379801 | 2021-04-10T17:29:14 | 2021-04-10T17:29:14 | 352,740,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | #!/home/moringa/Desktop/amoz/amosinstagram/virtual/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"oduorthomas14@gmail.com"
] | oduorthomas14@gmail.com | |
720cdd79d38734149af95e62f0400a403dd01f09 | f747153527426d40e999ae9e1c8c7f17d74eb408 | /ML/pa2/KNN.py | bed265e545bdf42732c933aa080c118944713d4e | [] | no_license | philipz1/ML | 927ed7b1f38034321af405af0d837d50b89e8cb2 | 9198ca6f6503e378610a715cbdda88c87050c930 | refs/heads/master | 2021-03-08T19:26:15.313846 | 2017-06-01T04:11:29 | 2017-06-01T04:11:29 | 56,735,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,480 | py | import pandas as pd
import numpy as np
import time
def dist(p1, p2):
'''
uses the distance squared metric
'''
J = 0
for i in range(len(p1)):
J += (p1[i] - p2[i]) ** 2
return np.sqrt(J)
def knn(data, k):
'''
returns a dictionary of dictionaries where key: key: distance between keys
'''
n, p = data.shape
knn_dict = {}
for i, p1 in enumerate(data):
k_list = []
k_dict = {}
for j, p2 in enumerate(data):
k_list.append([p2, dist(p1,p2)])
k_list.sort(key = lambda x : x[1])
for i in range(1, k+1): #gets rid of self
k_dict[tuple(k_list[i][0])] = k_list[i][1]
knn_dict[tuple(p1)] = k_dict
return knn_dict
# def knn(data, k):
# '''
# returns a dictionary of dictionaries where key: key: distance between keys
# '''
# n, p = data.shape
# knn_dict = {}
# for i, p1 in enumerate(data):
# k_list = [0] * k
# k_dist = [0] * k
# k_dict = {}
# for j, p2 in enumerate(data):
# zero_check = sum([1 if type(i).__module__ == 'numpy' else 0 for i in k_list]) != k
# if zero_check and dist(p1, p2) != 0:
# k_list[k_dist.index(0)] = p2
# k_dist[k_dist.index(0)] = dist(p1, p2)
# elif dist(p1, p2) < max(k_dist) and dist(p1, p2) != 0:
# k_list[k_dist.index(max(k_dist))] = p2
# k_dist[k_dist.index(max(k_dist))] = dist(p1, p2)
# for i in range(len(k_list)):
# k_dict[tuple(k_list[i])] = k_dist[i]
# knn_dict[tuple(p1)] = k_dict
# return knn_dict | [
"noreply@github.com"
] | philipz1.noreply@github.com |
7649515b25b4617c91af001997ef656027e0ba82 | fea1a96ae970490ff582b031b6c78aae38829cb5 | /.history/app_20210831071818.py | 51d29ef19efdab19aa229919d2dac37dcc1f8313 | [] | no_license | endomasato/flask_hayatasu | 03fc77be9e44dc50d796640c4c1fd84d5ea03074 | 08f933b920c909d43f3b87d2638e6963d577cb6f | refs/heads/master | 2023-08-01T06:28:54.605717 | 2021-09-04T13:36:57 | 2021-09-04T13:36:57 | 400,898,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,120 | py | from datetime import datetime
from flask import Flask, render_template, request, redirect
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///todo.db'
db = SQLAlchemy(app)
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(30), nullable=False)
detail = db.Column(db.String(100))
due = db.Column(db.DateTime, nullable=False)
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
posts = Post.query.all()
return render_template('index.html', posts=posts)
else:
title = request.form.get('title')
detail = request.form.get('detail')
due = request.form.get('due')
due = datetime.strptime(due, '%Y-%m-%d')
new_post = Post(title=title, detail=detail, due=due)
db.session.add(new_post)
db.session.commit()
return redirect('/')
@app.route('/')
def create():
return render_template('create.html')
if __name__ == '__main__':
app.run(debug=True) | [
""
] | |
0a33b5f374481795feb5819c3626f7fee2d520e3 | a5cdd65d990693950410502575ff6ceb5a8bae46 | /Main Program (зубр, ценовое позиционирование)/zavedis AKOM/zavECO.py | 0bc612f62437c3ee11d04c58cd15bec5c3db5db9 | [] | no_license | AlexCruel/Web-Scrapping-Application | be696ce1d3ce5b158c90e138857537689ac9fe0b | 8e971cc745be9b38f2d36c189e0f18391f628295 | refs/heads/master | 2023-07-10T21:46:28.083003 | 2021-08-10T17:58:59 | 2021-08-10T17:58:59 | 276,294,591 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,472 | py | from main import Site
zav59 = Site('https://www.rimir.by/', 'https://zavedis.by/vibor-po-marke-akb/akkumulyator-rusbat-6st-75-evro')
find_str = "find('div', class_='product-price-old').get_text(strip=True)"
zav59.parser('div', 'product-price-old', find_str, 'G14')
zav59 = Site('https://www.rimir.by/', 'https://zavedis.by/vibor-po-marke-akb/akkumulyator-rusbat-6st-90-evro')
find_str = "find('div', class_='product-price-old').get_text(strip=True)"
zav59.parser('div', 'product-price-old', find_str, 'G15')
zav59 = Site('https://www.rimir.by/', 'https://zavedis.by/vibor-po-marke-akb/akkumulyator-rusbat-6st-55e')
find_str = "find('div', class_='product-price-old').get_text(strip=True)"
zav59.parser('div', 'product-price-old', find_str, 'G17')
zav59 = Site('https://www.rimir.by/', 'https://zavedis.by/vibor-po-marke-akb/akkumulyator-rusbat-6st-60e-2')
find_str = "find('div', class_='product-price-old').get_text(strip=True)"
zav59.parser('div', 'product-price-old', find_str, 'G18')
zav59 = Site('https://www.rimir.by/', 'https://zavedis.by/vibor-po-marke-akb/akkumulyator-rusbat-6st-75e')
find_str = "find('div', class_='product-price-old').get_text(strip=True)"
zav59.parser('div', 'product-price-old', find_str, 'G19')
zav59 = Site('https://www.rimir.by/', 'https://zavedis.by/vibor-po-marke-akb/akkumulyator-rusbat-6st-100e')
find_str = "find('div', class_='product-price-old').get_text(strip=True)"
zav59.parser('div', 'product-price-old', find_str, 'G20') | [
"sasha.karpenkov.34@gmail.com"
] | sasha.karpenkov.34@gmail.com |
7206562dcaae23b8ffd1ffd4ad6f50ea934181ac | 066d6c62f7271e99694a8e54190d192b9f73df8b | /level1/행렬의 덧셈.py | d510eae418f7cf6bd045212d0f026002547becf8 | [] | no_license | chankyu11/programmers | ebb904461a2585ac298c9d00d14973e472b7a36d | dfa8b1dd3948db66c404f8ccc15ce1c0804a2bb2 | refs/heads/master | 2023-07-12T02:38:45.808813 | 2021-08-23T09:08:47 | 2021-08-23T09:08:47 | 381,300,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | # https://programmers.co.kr/learn/courses/30/lessons/12950
def solution(arr1, arr2):
answer = []
for i, j in zip(arr1,arr2):
tmp = []
for m in range(len(i)):
tmp.append(i[m]+j[m])
answer.append(tmp)
return answer
def solution(arr1, arr2):
answer = [[c + d for c, d in zip(a, b)] for a, b in zip(arr1,arr2)]
return answer | [
"lck1130@naver.com"
] | lck1130@naver.com |
0571f647ae3ab197917c302a532b8449f7afe44d | f504253210cec1c4ec6c3ea50a45564db7d6cd7f | /scripts/parse_pyi.py | d4b35524274e7a8a2fb6d2ba5b401434ca6486d5 | [
"MIT"
] | permissive | phil65/PrettyQt | b1150cb4dce982b9b8d62f38f56694959b720a3e | f00500d992d1befb0f2c2ae62fd2a8aafba7fd45 | refs/heads/master | 2023-08-30T21:00:08.905444 | 2023-08-17T12:24:45 | 2023-08-17T12:24:45 | 177,451,205 | 17 | 5 | MIT | 2020-08-15T22:21:18 | 2019-03-24T18:10:21 | Python | UTF-8 | Python | false | false | 1,049 | py | import pathlib
import sys
from typed_ast import ast3
def add_parents(tree):
for node in ast3.walk(tree):
for child in ast3.iter_child_nodes(node):
child.parent = node # type: ignore
def find_enums(tree):
for node in ast3.walk(tree):
if not isinstance(node, ast3.Assign):
continue
if node.type_comment is None:
continue
if "." not in node.type_comment:
continue
if not node.type_comment.startswith("'"):
continue
comment = node.type_comment.strip("'")
mod, cls = comment.rsplit(".", maxsplit=1)
assert len(node.targets) == 1
name = node.targets[0].id # type: ignore
yield (mod, cls, name)
def main():
for filename in sys.argv[1:]:
tree = ast3.parse(pathlib.Path(filename).read_text())
for mod, cls, name in find_enums(tree):
old = f"{mod}.{name}"
new = f"{mod}.{cls}.{name}"
print(f"{old} {new}")
if __name__ == "__main__":
main()
| [
"philipptemminghoff@googlemail.com"
] | philipptemminghoff@googlemail.com |
59de5f4d684f1fcbb412201731cec5a17abe4081 | 08f42d68aaea64c986a532b86562365f2a6fe3b5 | /src/addlayouttable.py | 3e32ab12ae34fdcd43ccd0ad5cf36d01a2dfbff6 | [
"MIT"
] | permissive | sos82/micropython-microbit-v2 | da65da0c4ae8a3e2189bb598f75fec43d4628007 | a44573c623a9e935257c8db51a3f0c2e75ea10aa | refs/heads/master | 2023-07-19T07:03:40.101099 | 2021-09-11T15:13:42 | 2021-09-11T15:13:42 | 402,835,132 | 0 | 0 | MIT | 2021-09-11T15:13:43 | 2021-09-03T16:35:34 | null | UTF-8 | Python | false | false | 7,683 | py | #!/usr/bin/env python3
"""
Add a flash layout table to a hex firmware for MicroPython on the micro:bit.
Usage: ./addlayouttable.py <firmware.hex> <firmware.map> [-o <combined.hex>]
Output goes to stdout if no filename is given.
The layout table is a sequence of 16-byte entries. The last entry contains the
header (including magic numbers) and is aligned to the end of a page such that
the final byte of the layout table is the final byte of the page it resides in.
This is so it can be quickly and easily searched for.
The layout table has the following format. All integer values are unsigned and
store little endian.
0x00 0x01 0x02 0x03 0x04 0x05 0x06 0x07 0x08 0x09 0x0a 0x0b 0x0c 0x0d 0x0e 0x0f
ID HT REG_PAGE REG_LEN HASH_DATA
(additional regions)
...
MAGIC1 VERSION TABLE_LEN NUM_REG PSIZE_LOG2 MAGIC2
The values are:
ID - 1 byte - region id for this entry, defined by the region
HT - 1 byte - hash type of the region hash data
REG_PAGE - 2 bytes - starting page number of the region
REG_LEN - 4 bytes - length in bytes of the region
HASH_DATA - 8 bytes - data for the hash of this region
HT=0: hash data is empty
HT=1: hash data contains 8 bytes of verbatim data
HT=2: hash data contains a 4-byte pointer to a string
MAGIC1 - 4 bytes - 0x597F30FE
VERSION - 2 bytes - table version (currently 1)
TABLE_LEN - 2 bytes - length in bytes of the table excluding this header row
NUM_REG - 2 bytes - number of regions
PSIZE_LOG2 - 2 bytes - native page size of the flash, log-2
MAGIC2 - 4 bytes - 0xC1B1D79D
"""
import argparse
import binascii
import struct
import sys
IHEX_TYPE_DATA = 0
IHEX_TYPE_EXT_LIN_ADDR = 4
NRF_PAGE_SIZE_LOG2 = 12
NRF_PAGE_SIZE = 1 << NRF_PAGE_SIZE_LOG2
class FlashLayout:
MAGIC1 = 0x597F30FE
MAGIC2 = 0xC1B1D79D
VERSION = 1
REGION_HASH_NONE = 0
REGION_HASH_DATA = 1
REGION_HASH_PTR = 2
def __init__(self):
self.data = b""
self.num_regions = 0
def add_region(
self, region_id, region_addr, region_len, region_hash_type, region_hash=None
):
# Compute/validate the hash data.
if region_addr % NRF_PAGE_SIZE != 0:
assert 0, region_addr
if region_hash_type == FlashLayout.REGION_HASH_NONE:
assert region_hash is None
region_hash = b"\x00" * 8
elif region_hash_type == FlashLayout.REGION_HASH_DATA:
assert len(region_hash) == 8
elif region_hash_type == FlashLayout.REGION_HASH_PTR:
region_hash = struct.pack("<II", region_hash, 0)
# Increase number of regions.
self.num_regions += 1
# Add the region data.
self.data += struct.pack(
"<BBHI8s",
region_id,
region_hash_type,
region_addr // NRF_PAGE_SIZE,
region_len,
region_hash,
)
def finalise(self):
# Add padding to data to align it to 16 bytes.
if len(self.data) % 16 != 0:
self.data += b"\xff" * 16 - len(self.data) % 16
# Add 16-byte "header" at the end with magic numbers and meta data.
self.data += struct.pack(
"<IHHHHI",
FlashLayout.MAGIC1,
FlashLayout.VERSION,
len(self.data),
self.num_regions,
NRF_PAGE_SIZE_LOG2,
FlashLayout.MAGIC2,
)
def make_ihex_record(addr, type, data):
record = struct.pack(">BHB", len(data), addr & 0xFFFF, type) + data
checksum = (-(sum(record))) & 0xFF
return ":%s%02X" % (str(binascii.hexlify(record), "utf8").upper(), checksum)
def parse_map_file(filename, symbols):
parse_symbols = False
with open(filename) as f:
for line in f:
line = line.strip()
if line == "Linker script and memory map":
parse_symbols = True
elif parse_symbols and line.startswith("0x00"):
line = line.split()
if len(line) >= 2 and line[1] in symbols:
symbols[line[1]] = int(line[0], 16)
def output_firmware(dest, firmware, layout_addr, layout_data):
# Output head of firmware.
for line in firmware[:-2]:
print(line, end="", file=dest)
# Output layout data.
print(
make_ihex_record(
0,
IHEX_TYPE_EXT_LIN_ADDR,
struct.pack(">H", layout_addr >> 16),
),
file=dest,
)
for i in range(0, len(layout_data), 16):
chunk = layout_data[i : min(i + 16, len(layout_data))]
print(
make_ihex_record(layout_addr + i, IHEX_TYPE_DATA, chunk),
file=dest,
)
# Output tail of firmware.
print(firmware[-2], end="", file=dest)
print(firmware[-1], end="", file=dest)
def main():
arg_parser = argparse.ArgumentParser(
description="Add UICR region to hex firmware for the micro:bit."
)
arg_parser.add_argument(
"-o",
"--output",
default=sys.stdout,
type=argparse.FileType("wt"),
help="output file (default is stdout)",
)
arg_parser.add_argument("firmware", nargs=1, help="input MicroPython firmware")
arg_parser.add_argument(
"mapfile",
nargs=1,
help="input map file",
)
args = arg_parser.parse_args()
# Read in the firmware from the given hex file.
with open(args.firmware[0], "rt") as f:
firmware = f.readlines()
# Parse the linker map file, looking for the following symbols.
symbols = {
key: None
for key in [
"_binary_softdevice_bin_start",
"__isr_vector",
"__etext",
"__data_start__",
"__data_end__",
"_fs_start",
"_fs_end",
"microbit_version_string",
]
}
parse_map_file(args.mapfile[0], symbols)
# Get the required symbol addresses.
sd_start = symbols["_binary_softdevice_bin_start"]
sd_end = symbols["__isr_vector"]
mp_start = symbols["__isr_vector"]
data_len = symbols["__data_end__"] - symbols["__data_start__"]
mp_end = symbols["__etext"] + data_len
mp_version = symbols["microbit_version_string"]
fs_start = symbols["_fs_start"]
fs_end = symbols["_fs_end"]
# Make the flash layout information table.
layout = FlashLayout()
layout.add_region(1, sd_start, sd_end - sd_start, FlashLayout.REGION_HASH_NONE)
layout.add_region(
2, mp_start, mp_end - mp_start, FlashLayout.REGION_HASH_PTR, mp_version
)
layout.add_region(3, fs_start, fs_end - fs_start, FlashLayout.REGION_HASH_NONE)
layout.finalise()
# Compute layout address.
layout_addr = (
((mp_end >> NRF_PAGE_SIZE_LOG2) << NRF_PAGE_SIZE_LOG2)
+ NRF_PAGE_SIZE
- len(layout.data)
)
if layout_addr < mp_end:
layout_addr += NRF_PAGE_SIZE
if layout_addr >= fs_start:
print("ERROR: Flash layout information overlaps with filesystem")
sys.exit(1)
# Print information.
if args.output is not sys.stdout:
fmt = "{:13} 0x{:05x}..0x{:05x}"
print(fmt.format("SoftDevice", sd_start, sd_end))
print(fmt.format("MicroPython", mp_start, mp_end))
print(fmt.format("Layout table", layout_addr, layout_addr + len(layout.data)))
print(fmt.format("Filesystem", fs_start, fs_end))
# Output the new firmware as a hex file.
output_firmware(args.output, firmware, layout_addr, layout.data)
if __name__ == "__main__":
main()
| [
"damien@micropython.org"
] | damien@micropython.org |
af3d10e72f46f60903f229b8c409d1372fd9b5e8 | b282ccdc65f969e45b36630f9008f54d2087b9c1 | /thrift-0.11.0/test/py/gen-py-slots/ThriftTest/ThriftTest.py | 3f67ce151c41cbf8af305c8a581a5e09db97f816 | [
"MIT",
"LicenseRef-scancode-public-domain-disclaimer",
"FSFAP",
"Apache-2.0"
] | permissive | jdencala10/Proyecto-Distribuidos | 36727fb314159393eda86385d8908bb3ae70a549 | 5b34ece54cc9e8240e9f0106ffbf76449fea507d | refs/heads/master | 2020-03-24T02:26:52.556503 | 2018-08-23T07:44:08 | 2018-08-23T07:44:08 | 142,375,884 | 1 | 0 | null | 2018-07-26T04:59:33 | 2018-07-26T02:10:13 | Python | UTF-8 | Python | false | true | 160,877 | py | #
# Autogenerated by Thrift Compiler (0.11.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:slots
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def testVoid(self):
"""
Prints "testVoid()" and returns nothing.
"""
pass
def testString(self, thing):
"""
Prints 'testString("%s")' with thing as '%s'
@param string thing - the string to print
@return string - returns the string 'thing'
Parameters:
- thing
"""
pass
def testBool(self, thing):
"""
Prints 'testBool("%s")' where '%s' with thing as 'true' or 'false'
@param bool thing - the bool data to print
@return bool - returns the bool 'thing'
Parameters:
- thing
"""
pass
def testByte(self, thing):
"""
Prints 'testByte("%d")' with thing as '%d'
The types i8 and byte are synonyms, use of i8 is encouraged, byte still exists for the sake of compatibility.
@param byte thing - the i8/byte to print
@return i8 - returns the i8/byte 'thing'
Parameters:
- thing
"""
pass
def testI32(self, thing):
"""
Prints 'testI32("%d")' with thing as '%d'
@param i32 thing - the i32 to print
@return i32 - returns the i32 'thing'
Parameters:
- thing
"""
pass
def testI64(self, thing):
"""
Prints 'testI64("%d")' with thing as '%d'
@param i64 thing - the i64 to print
@return i64 - returns the i64 'thing'
Parameters:
- thing
"""
pass
def testDouble(self, thing):
"""
Prints 'testDouble("%f")' with thing as '%f'
@param double thing - the double to print
@return double - returns the double 'thing'
Parameters:
- thing
"""
pass
def testBinary(self, thing):
"""
Prints 'testBinary("%s")' where '%s' is a hex-formatted string of thing's data
@param binary thing - the binary data to print
@return binary - returns the binary 'thing'
Parameters:
- thing
"""
pass
def testStruct(self, thing):
"""
Prints 'testStruct("{%s}")' where thing has been formatted into a string of comma separated values
@param Xtruct thing - the Xtruct to print
@return Xtruct - returns the Xtruct 'thing'
Parameters:
- thing
"""
pass
def testNest(self, thing):
"""
Prints 'testNest("{%s}")' where thing has been formatted into a string of the nested struct
@param Xtruct2 thing - the Xtruct2 to print
@return Xtruct2 - returns the Xtruct2 'thing'
Parameters:
- thing
"""
pass
def testMap(self, thing):
"""
Prints 'testMap("{%s")' where thing has been formatted into a string of 'key => value' pairs
separated by commas and new lines
@param map<i32,i32> thing - the map<i32,i32> to print
@return map<i32,i32> - returns the map<i32,i32> 'thing'
Parameters:
- thing
"""
pass
def testStringMap(self, thing):
"""
Prints 'testStringMap("{%s}")' where thing has been formatted into a string of 'key => value' pairs
separated by commas and new lines
@param map<string,string> thing - the map<string,string> to print
@return map<string,string> - returns the map<string,string> 'thing'
Parameters:
- thing
"""
pass
def testSet(self, thing):
"""
Prints 'testSet("{%s}")' where thing has been formatted into a string of values
separated by commas and new lines
@param set<i32> thing - the set<i32> to print
@return set<i32> - returns the set<i32> 'thing'
Parameters:
- thing
"""
pass
def testList(self, thing):
"""
Prints 'testList("{%s}")' where thing has been formatted into a string of values
separated by commas and new lines
@param list<i32> thing - the list<i32> to print
@return list<i32> - returns the list<i32> 'thing'
Parameters:
- thing
"""
pass
def testEnum(self, thing):
"""
Prints 'testEnum("%d")' where thing has been formatted into it's numeric value
@param Numberz thing - the Numberz to print
@return Numberz - returns the Numberz 'thing'
Parameters:
- thing
"""
pass
def testTypedef(self, thing):
"""
Prints 'testTypedef("%d")' with thing as '%d'
@param UserId thing - the UserId to print
@return UserId - returns the UserId 'thing'
Parameters:
- thing
"""
pass
def testMapMap(self, hello):
"""
Prints 'testMapMap("%d")' with hello as '%d'
@param i32 hello - the i32 to print
@return map<i32,map<i32,i32>> - returns a dictionary with these values:
{-4 => {-4 => -4, -3 => -3, -2 => -2, -1 => -1, }, 4 => {1 => 1, 2 => 2, 3 => 3, 4 => 4, }, }
Parameters:
- hello
"""
pass
def testInsanity(self, argument):
"""
So you think you've got this all worked, out eh?
Creates a the returned map with these values and prints it out:
{ 1 => { 2 => argument,
3 => argument,
},
2 => { 6 => <empty Insanity struct>, },
}
@return map<UserId, map<Numberz,Insanity>> - a map with the above values
Parameters:
- argument
"""
pass
def testMulti(self, arg0, arg1, arg2, arg3, arg4, arg5):
"""
Prints 'testMulti()'
@param i8 arg0 -
@param i32 arg1 -
@param i64 arg2 -
@param map<i16, string> arg3 -
@param Numberz arg4 -
@param UserId arg5 -
@return Xtruct - returns an Xtruct with string_thing = "Hello2, byte_thing = arg0, i32_thing = arg1
and i64_thing = arg2
Parameters:
- arg0
- arg1
- arg2
- arg3
- arg4
- arg5
"""
pass
def testException(self, arg):
"""
Print 'testException(%s)' with arg as '%s'
@param string arg - a string indication what type of exception to throw
if arg == "Xception" throw Xception with errorCode = 1001 and message = arg
elsen if arg == "TException" throw TException
else do not throw anything
Parameters:
- arg
"""
pass
def testMultiException(self, arg0, arg1):
"""
Print 'testMultiException(%s, %s)' with arg0 as '%s' and arg1 as '%s'
@param string arg - a string indication what type of exception to throw
if arg0 == "Xception" throw Xception with errorCode = 1001 and message = "This is an Xception"
elsen if arg0 == "Xception2" throw Xception2 with errorCode = 2002 and struct_thing.string_thing = "This is an Xception2"
else do not throw anything
@return Xtruct - an Xtruct with string_thing = arg1
Parameters:
- arg0
- arg1
"""
pass
def testOneway(self, secondsToSleep):
"""
Print 'testOneway(%d): Sleeping...' with secondsToSleep as '%d'
sleep 'secondsToSleep'
Print 'testOneway(%d): done sleeping!' with secondsToSleep as '%d'
@param i32 secondsToSleep - the number of seconds to sleep
Parameters:
- secondsToSleep
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def testVoid(self):
"""
Prints "testVoid()" and returns nothing.
"""
self.send_testVoid()
self.recv_testVoid()
def send_testVoid(self):
self._oprot.writeMessageBegin('testVoid', TMessageType.CALL, self._seqid)
args = testVoid_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testVoid(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testVoid_result()
result.read(iprot)
iprot.readMessageEnd()
return
def testString(self, thing):
"""
Prints 'testString("%s")' with thing as '%s'
@param string thing - the string to print
@return string - returns the string 'thing'
Parameters:
- thing
"""
self.send_testString(thing)
return self.recv_testString()
def send_testString(self, thing):
self._oprot.writeMessageBegin('testString', TMessageType.CALL, self._seqid)
args = testString_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testString(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testString_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testString failed: unknown result")
def testBool(self, thing):
"""
Prints 'testBool("%s")' where '%s' with thing as 'true' or 'false'
@param bool thing - the bool data to print
@return bool - returns the bool 'thing'
Parameters:
- thing
"""
self.send_testBool(thing)
return self.recv_testBool()
def send_testBool(self, thing):
self._oprot.writeMessageBegin('testBool', TMessageType.CALL, self._seqid)
args = testBool_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testBool(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testBool_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testBool failed: unknown result")
def testByte(self, thing):
"""
Prints 'testByte("%d")' with thing as '%d'
The types i8 and byte are synonyms, use of i8 is encouraged, byte still exists for the sake of compatibility.
@param byte thing - the i8/byte to print
@return i8 - returns the i8/byte 'thing'
Parameters:
- thing
"""
self.send_testByte(thing)
return self.recv_testByte()
def send_testByte(self, thing):
self._oprot.writeMessageBegin('testByte', TMessageType.CALL, self._seqid)
args = testByte_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testByte(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testByte_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testByte failed: unknown result")
def testI32(self, thing):
"""
Prints 'testI32("%d")' with thing as '%d'
@param i32 thing - the i32 to print
@return i32 - returns the i32 'thing'
Parameters:
- thing
"""
self.send_testI32(thing)
return self.recv_testI32()
def send_testI32(self, thing):
self._oprot.writeMessageBegin('testI32', TMessageType.CALL, self._seqid)
args = testI32_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testI32(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testI32_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testI32 failed: unknown result")
def testI64(self, thing):
"""
Prints 'testI64("%d")' with thing as '%d'
@param i64 thing - the i64 to print
@return i64 - returns the i64 'thing'
Parameters:
- thing
"""
self.send_testI64(thing)
return self.recv_testI64()
def send_testI64(self, thing):
self._oprot.writeMessageBegin('testI64', TMessageType.CALL, self._seqid)
args = testI64_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testI64(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testI64_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testI64 failed: unknown result")
def testDouble(self, thing):
"""
Prints 'testDouble("%f")' with thing as '%f'
@param double thing - the double to print
@return double - returns the double 'thing'
Parameters:
- thing
"""
self.send_testDouble(thing)
return self.recv_testDouble()
def send_testDouble(self, thing):
self._oprot.writeMessageBegin('testDouble', TMessageType.CALL, self._seqid)
args = testDouble_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testDouble(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testDouble_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testDouble failed: unknown result")
def testBinary(self, thing):
"""
Prints 'testBinary("%s")' where '%s' is a hex-formatted string of thing's data
@param binary thing - the binary data to print
@return binary - returns the binary 'thing'
Parameters:
- thing
"""
self.send_testBinary(thing)
return self.recv_testBinary()
def send_testBinary(self, thing):
self._oprot.writeMessageBegin('testBinary', TMessageType.CALL, self._seqid)
args = testBinary_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testBinary(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testBinary_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testBinary failed: unknown result")
def testStruct(self, thing):
"""
Prints 'testStruct("{%s}")' where thing has been formatted into a string of comma separated values
@param Xtruct thing - the Xtruct to print
@return Xtruct - returns the Xtruct 'thing'
Parameters:
- thing
"""
self.send_testStruct(thing)
return self.recv_testStruct()
def send_testStruct(self, thing):
self._oprot.writeMessageBegin('testStruct', TMessageType.CALL, self._seqid)
args = testStruct_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testStruct(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testStruct_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testStruct failed: unknown result")
def testNest(self, thing):
"""
Prints 'testNest("{%s}")' where thing has been formatted into a string of the nested struct
@param Xtruct2 thing - the Xtruct2 to print
@return Xtruct2 - returns the Xtruct2 'thing'
Parameters:
- thing
"""
self.send_testNest(thing)
return self.recv_testNest()
def send_testNest(self, thing):
self._oprot.writeMessageBegin('testNest', TMessageType.CALL, self._seqid)
args = testNest_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testNest(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testNest_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testNest failed: unknown result")
def testMap(self, thing):
"""
Prints 'testMap("{%s")' where thing has been formatted into a string of 'key => value' pairs
separated by commas and new lines
@param map<i32,i32> thing - the map<i32,i32> to print
@return map<i32,i32> - returns the map<i32,i32> 'thing'
Parameters:
- thing
"""
self.send_testMap(thing)
return self.recv_testMap()
def send_testMap(self, thing):
self._oprot.writeMessageBegin('testMap', TMessageType.CALL, self._seqid)
args = testMap_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testMap(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testMap_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testMap failed: unknown result")
def testStringMap(self, thing):
"""
Prints 'testStringMap("{%s}")' where thing has been formatted into a string of 'key => value' pairs
separated by commas and new lines
@param map<string,string> thing - the map<string,string> to print
@return map<string,string> - returns the map<string,string> 'thing'
Parameters:
- thing
"""
self.send_testStringMap(thing)
return self.recv_testStringMap()
def send_testStringMap(self, thing):
self._oprot.writeMessageBegin('testStringMap', TMessageType.CALL, self._seqid)
args = testStringMap_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testStringMap(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testStringMap_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testStringMap failed: unknown result")
def testSet(self, thing):
"""
Prints 'testSet("{%s}")' where thing has been formatted into a string of values
separated by commas and new lines
@param set<i32> thing - the set<i32> to print
@return set<i32> - returns the set<i32> 'thing'
Parameters:
- thing
"""
self.send_testSet(thing)
return self.recv_testSet()
def send_testSet(self, thing):
self._oprot.writeMessageBegin('testSet', TMessageType.CALL, self._seqid)
args = testSet_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testSet(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testSet_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testSet failed: unknown result")
def testList(self, thing):
"""
Prints 'testList("{%s}")' where thing has been formatted into a string of values
separated by commas and new lines
@param list<i32> thing - the list<i32> to print
@return list<i32> - returns the list<i32> 'thing'
Parameters:
- thing
"""
self.send_testList(thing)
return self.recv_testList()
def send_testList(self, thing):
self._oprot.writeMessageBegin('testList', TMessageType.CALL, self._seqid)
args = testList_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testList(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testList_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testList failed: unknown result")
def testEnum(self, thing):
"""
Prints 'testEnum("%d")' where thing has been formatted into it's numeric value
@param Numberz thing - the Numberz to print
@return Numberz - returns the Numberz 'thing'
Parameters:
- thing
"""
self.send_testEnum(thing)
return self.recv_testEnum()
def send_testEnum(self, thing):
self._oprot.writeMessageBegin('testEnum', TMessageType.CALL, self._seqid)
args = testEnum_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testEnum(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testEnum_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testEnum failed: unknown result")
def testTypedef(self, thing):
"""
Prints 'testTypedef("%d")' with thing as '%d'
@param UserId thing - the UserId to print
@return UserId - returns the UserId 'thing'
Parameters:
- thing
"""
self.send_testTypedef(thing)
return self.recv_testTypedef()
def send_testTypedef(self, thing):
self._oprot.writeMessageBegin('testTypedef', TMessageType.CALL, self._seqid)
args = testTypedef_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testTypedef(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testTypedef_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testTypedef failed: unknown result")
def testMapMap(self, hello):
"""
Prints 'testMapMap("%d")' with hello as '%d'
@param i32 hello - the i32 to print
@return map<i32,map<i32,i32>> - returns a dictionary with these values:
{-4 => {-4 => -4, -3 => -3, -2 => -2, -1 => -1, }, 4 => {1 => 1, 2 => 2, 3 => 3, 4 => 4, }, }
Parameters:
- hello
"""
self.send_testMapMap(hello)
return self.recv_testMapMap()
def send_testMapMap(self, hello):
self._oprot.writeMessageBegin('testMapMap', TMessageType.CALL, self._seqid)
args = testMapMap_args()
args.hello = hello
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testMapMap(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testMapMap_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testMapMap failed: unknown result")
def testInsanity(self, argument):
"""
So you think you've got this all worked, out eh?
Creates a the returned map with these values and prints it out:
{ 1 => { 2 => argument,
3 => argument,
},
2 => { 6 => <empty Insanity struct>, },
}
@return map<UserId, map<Numberz,Insanity>> - a map with the above values
Parameters:
- argument
"""
self.send_testInsanity(argument)
return self.recv_testInsanity()
def send_testInsanity(self, argument):
self._oprot.writeMessageBegin('testInsanity', TMessageType.CALL, self._seqid)
args = testInsanity_args()
args.argument = argument
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testInsanity(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testInsanity_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testInsanity failed: unknown result")
def testMulti(self, arg0, arg1, arg2, arg3, arg4, arg5):
"""
Prints 'testMulti()'
@param i8 arg0 -
@param i32 arg1 -
@param i64 arg2 -
@param map<i16, string> arg3 -
@param Numberz arg4 -
@param UserId arg5 -
@return Xtruct - returns an Xtruct with string_thing = "Hello2, byte_thing = arg0, i32_thing = arg1
and i64_thing = arg2
Parameters:
- arg0
- arg1
- arg2
- arg3
- arg4
- arg5
"""
self.send_testMulti(arg0, arg1, arg2, arg3, arg4, arg5)
return self.recv_testMulti()
def send_testMulti(self, arg0, arg1, arg2, arg3, arg4, arg5):
self._oprot.writeMessageBegin('testMulti', TMessageType.CALL, self._seqid)
args = testMulti_args()
args.arg0 = arg0
args.arg1 = arg1
args.arg2 = arg2
args.arg3 = arg3
args.arg4 = arg4
args.arg5 = arg5
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testMulti(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testMulti_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testMulti failed: unknown result")
def testException(self, arg):
"""
Print 'testException(%s)' with arg as '%s'
@param string arg - a string indication what type of exception to throw
if arg == "Xception" throw Xception with errorCode = 1001 and message = arg
elsen if arg == "TException" throw TException
else do not throw anything
Parameters:
- arg
"""
self.send_testException(arg)
self.recv_testException()
def send_testException(self, arg):
self._oprot.writeMessageBegin('testException', TMessageType.CALL, self._seqid)
args = testException_args()
args.arg = arg
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testException(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testException_result()
result.read(iprot)
iprot.readMessageEnd()
if result.err1 is not None:
raise result.err1
return
def testMultiException(self, arg0, arg1):
"""
Print 'testMultiException(%s, %s)' with arg0 as '%s' and arg1 as '%s'
@param string arg - a string indication what type of exception to throw
if arg0 == "Xception" throw Xception with errorCode = 1001 and message = "This is an Xception"
elsen if arg0 == "Xception2" throw Xception2 with errorCode = 2002 and struct_thing.string_thing = "This is an Xception2"
else do not throw anything
@return Xtruct - an Xtruct with string_thing = arg1
Parameters:
- arg0
- arg1
"""
self.send_testMultiException(arg0, arg1)
return self.recv_testMultiException()
def send_testMultiException(self, arg0, arg1):
self._oprot.writeMessageBegin('testMultiException', TMessageType.CALL, self._seqid)
args = testMultiException_args()
args.arg0 = arg0
args.arg1 = arg1
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testMultiException(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testMultiException_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.err1 is not None:
raise result.err1
if result.err2 is not None:
raise result.err2
raise TApplicationException(TApplicationException.MISSING_RESULT, "testMultiException failed: unknown result")
def testOneway(self, secondsToSleep):
"""
Print 'testOneway(%d): Sleeping...' with secondsToSleep as '%d'
sleep 'secondsToSleep'
Print 'testOneway(%d): done sleeping!' with secondsToSleep as '%d'
@param i32 secondsToSleep - the number of seconds to sleep
Parameters:
- secondsToSleep
"""
self.send_testOneway(secondsToSleep)
def send_testOneway(self, secondsToSleep):
self._oprot.writeMessageBegin('testOneway', TMessageType.ONEWAY, self._seqid)
args = testOneway_args()
args.secondsToSleep = secondsToSleep
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["testVoid"] = Processor.process_testVoid
self._processMap["testString"] = Processor.process_testString
self._processMap["testBool"] = Processor.process_testBool
self._processMap["testByte"] = Processor.process_testByte
self._processMap["testI32"] = Processor.process_testI32
self._processMap["testI64"] = Processor.process_testI64
self._processMap["testDouble"] = Processor.process_testDouble
self._processMap["testBinary"] = Processor.process_testBinary
self._processMap["testStruct"] = Processor.process_testStruct
self._processMap["testNest"] = Processor.process_testNest
self._processMap["testMap"] = Processor.process_testMap
self._processMap["testStringMap"] = Processor.process_testStringMap
self._processMap["testSet"] = Processor.process_testSet
self._processMap["testList"] = Processor.process_testList
self._processMap["testEnum"] = Processor.process_testEnum
self._processMap["testTypedef"] = Processor.process_testTypedef
self._processMap["testMapMap"] = Processor.process_testMapMap
self._processMap["testInsanity"] = Processor.process_testInsanity
self._processMap["testMulti"] = Processor.process_testMulti
self._processMap["testException"] = Processor.process_testException
self._processMap["testMultiException"] = Processor.process_testMultiException
self._processMap["testOneway"] = Processor.process_testOneway
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_testVoid(self, seqid, iprot, oprot):
args = testVoid_args()
args.read(iprot)
iprot.readMessageEnd()
result = testVoid_result()
try:
self._handler.testVoid()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testVoid", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testString(self, seqid, iprot, oprot):
args = testString_args()
args.read(iprot)
iprot.readMessageEnd()
result = testString_result()
try:
result.success = self._handler.testString(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testString", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testBool(self, seqid, iprot, oprot):
args = testBool_args()
args.read(iprot)
iprot.readMessageEnd()
result = testBool_result()
try:
result.success = self._handler.testBool(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testBool", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testByte(self, seqid, iprot, oprot):
args = testByte_args()
args.read(iprot)
iprot.readMessageEnd()
result = testByte_result()
try:
result.success = self._handler.testByte(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testByte", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testI32(self, seqid, iprot, oprot):
args = testI32_args()
args.read(iprot)
iprot.readMessageEnd()
result = testI32_result()
try:
result.success = self._handler.testI32(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testI32", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testI64(self, seqid, iprot, oprot):
args = testI64_args()
args.read(iprot)
iprot.readMessageEnd()
result = testI64_result()
try:
result.success = self._handler.testI64(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testI64", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testDouble(self, seqid, iprot, oprot):
args = testDouble_args()
args.read(iprot)
iprot.readMessageEnd()
result = testDouble_result()
try:
result.success = self._handler.testDouble(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testDouble", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testBinary(self, seqid, iprot, oprot):
args = testBinary_args()
args.read(iprot)
iprot.readMessageEnd()
result = testBinary_result()
try:
result.success = self._handler.testBinary(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testBinary", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testStruct(self, seqid, iprot, oprot):
args = testStruct_args()
args.read(iprot)
iprot.readMessageEnd()
result = testStruct_result()
try:
result.success = self._handler.testStruct(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testStruct", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testNest(self, seqid, iprot, oprot):
args = testNest_args()
args.read(iprot)
iprot.readMessageEnd()
result = testNest_result()
try:
result.success = self._handler.testNest(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testNest", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testMap(self, seqid, iprot, oprot):
args = testMap_args()
args.read(iprot)
iprot.readMessageEnd()
result = testMap_result()
try:
result.success = self._handler.testMap(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testMap", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testStringMap(self, seqid, iprot, oprot):
args = testStringMap_args()
args.read(iprot)
iprot.readMessageEnd()
result = testStringMap_result()
try:
result.success = self._handler.testStringMap(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testStringMap", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testSet(self, seqid, iprot, oprot):
args = testSet_args()
args.read(iprot)
iprot.readMessageEnd()
result = testSet_result()
try:
result.success = self._handler.testSet(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testSet", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testList(self, seqid, iprot, oprot):
args = testList_args()
args.read(iprot)
iprot.readMessageEnd()
result = testList_result()
try:
result.success = self._handler.testList(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testList", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testEnum(self, seqid, iprot, oprot):
args = testEnum_args()
args.read(iprot)
iprot.readMessageEnd()
result = testEnum_result()
try:
result.success = self._handler.testEnum(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testEnum", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testTypedef(self, seqid, iprot, oprot):
args = testTypedef_args()
args.read(iprot)
iprot.readMessageEnd()
result = testTypedef_result()
try:
result.success = self._handler.testTypedef(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testTypedef", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testMapMap(self, seqid, iprot, oprot):
args = testMapMap_args()
args.read(iprot)
iprot.readMessageEnd()
result = testMapMap_result()
try:
result.success = self._handler.testMapMap(args.hello)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testMapMap", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testInsanity(self, seqid, iprot, oprot):
args = testInsanity_args()
args.read(iprot)
iprot.readMessageEnd()
result = testInsanity_result()
try:
result.success = self._handler.testInsanity(args.argument)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testInsanity", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testMulti(self, seqid, iprot, oprot):
args = testMulti_args()
args.read(iprot)
iprot.readMessageEnd()
result = testMulti_result()
try:
result.success = self._handler.testMulti(args.arg0, args.arg1, args.arg2, args.arg3, args.arg4, args.arg5)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testMulti", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testException(self, seqid, iprot, oprot):
args = testException_args()
args.read(iprot)
iprot.readMessageEnd()
result = testException_result()
try:
self._handler.testException(args.arg)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except Xception as err1:
msg_type = TMessageType.REPLY
result.err1 = err1
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testException", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testMultiException(self, seqid, iprot, oprot):
args = testMultiException_args()
args.read(iprot)
iprot.readMessageEnd()
result = testMultiException_result()
try:
result.success = self._handler.testMultiException(args.arg0, args.arg1)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except Xception as err1:
msg_type = TMessageType.REPLY
result.err1 = err1
except Xception2 as err2:
msg_type = TMessageType.REPLY
result.err2 = err2
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testMultiException", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testOneway(self, seqid, iprot, oprot):
args = testOneway_args()
args.read(iprot)
iprot.readMessageEnd()
try:
self._handler.testOneway(args.secondsToSleep)
except TTransport.TTransportException:
raise
except Exception:
logging.exception('Exception in oneway handler')
# HELPER FUNCTIONS AND STRUCTURES
class testVoid_args(object):
__slots__ = (
)
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testVoid_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testVoid_args)
testVoid_args.thrift_spec = (
)
class testVoid_result(object):
__slots__ = (
)
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testVoid_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testVoid_result)
testVoid_result.thrift_spec = (
)
class testString_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.thing = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testString_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.STRING, 1)
oprot.writeString(self.thing.encode('utf-8') if sys.version_info[0] == 2 else self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testString_args)
testString_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'thing', 'UTF8', None, ), # 1
)
class testString_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testString_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testString_result)
testString_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class testBool_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BOOL:
self.thing = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testBool_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.BOOL, 1)
oprot.writeBool(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testBool_args)
testBool_args.thrift_spec = (
None, # 0
(1, TType.BOOL, 'thing', None, None, ), # 1
)
class testBool_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testBool_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testBool_result)
testBool_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
)
class testByte_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BYTE:
self.thing = iprot.readByte()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testByte_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.BYTE, 1)
oprot.writeByte(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testByte_args)
testByte_args.thrift_spec = (
None, # 0
(1, TType.BYTE, 'thing', None, None, ), # 1
)
class testByte_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BYTE:
self.success = iprot.readByte()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testByte_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BYTE, 0)
oprot.writeByte(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testByte_result)
testByte_result.thrift_spec = (
(0, TType.BYTE, 'success', None, None, ), # 0
)
class testI32_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.thing = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testI32_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.I32, 1)
oprot.writeI32(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testI32_args)
testI32_args.thrift_spec = (
None, # 0
(1, TType.I32, 'thing', None, None, ), # 1
)
class testI32_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testI32_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testI32_result)
testI32_result.thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
class testI64_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.thing = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testI64_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.I64, 1)
oprot.writeI64(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testI64_args)
testI64_args.thrift_spec = (
None, # 0
(1, TType.I64, 'thing', None, None, ), # 1
)
class testI64_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testI64_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testI64_result)
testI64_result.thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
)
class testDouble_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.thing = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testDouble_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.DOUBLE, 1)
oprot.writeDouble(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testDouble_args)
testDouble_args.thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'thing', None, None, ), # 1
)
class testDouble_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.DOUBLE:
self.success = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testDouble_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.DOUBLE, 0)
oprot.writeDouble(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testDouble_result)
testDouble_result.thrift_spec = (
(0, TType.DOUBLE, 'success', None, None, ), # 0
)
class testBinary_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.thing = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testBinary_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.STRING, 1)
oprot.writeBinary(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testBinary_args)
testBinary_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'thing', 'BINARY', None, ), # 1
)
class testBinary_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testBinary_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeBinary(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testBinary_result)
testBinary_result.thrift_spec = (
(0, TType.STRING, 'success', 'BINARY', None, ), # 0
)
class testStruct_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.thing = Xtruct()
self.thing.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testStruct_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.STRUCT, 1)
self.thing.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testStruct_args)
testStruct_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'thing', [Xtruct, None], None, ), # 1
)
class testStruct_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Xtruct()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testStruct_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testStruct_result)
testStruct_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Xtruct, None], None, ), # 0
)
class testNest_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.thing = Xtruct2()
self.thing.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testNest_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.STRUCT, 1)
self.thing.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testNest_args)
testNest_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'thing', [Xtruct2, None], None, ), # 1
)
class testNest_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Xtruct2()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testNest_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testNest_result)
testNest_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Xtruct2, None], None, ), # 0
)
class testMap_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.thing = {}
(_ktype264, _vtype265, _size263) = iprot.readMapBegin()
for _i267 in range(_size263):
_key268 = iprot.readI32()
_val269 = iprot.readI32()
self.thing[_key268] = _val269
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMap_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.MAP, 1)
oprot.writeMapBegin(TType.I32, TType.I32, len(self.thing))
for kiter270, viter271 in self.thing.items():
oprot.writeI32(kiter270)
oprot.writeI32(viter271)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMap_args)
testMap_args.thrift_spec = (
None, # 0
(1, TType.MAP, 'thing', (TType.I32, None, TType.I32, None, False), None, ), # 1
)
class testMap_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype273, _vtype274, _size272) = iprot.readMapBegin()
for _i276 in range(_size272):
_key277 = iprot.readI32()
_val278 = iprot.readI32()
self.success[_key277] = _val278
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMap_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.I32, TType.I32, len(self.success))
for kiter279, viter280 in self.success.items():
oprot.writeI32(kiter279)
oprot.writeI32(viter280)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMap_result)
testMap_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.I32, None, TType.I32, None, False), None, ), # 0
)
class testStringMap_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.thing = {}
(_ktype282, _vtype283, _size281) = iprot.readMapBegin()
for _i285 in range(_size281):
_key286 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val287 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.thing[_key286] = _val287
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testStringMap_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.thing))
for kiter288, viter289 in self.thing.items():
oprot.writeString(kiter288.encode('utf-8') if sys.version_info[0] == 2 else kiter288)
oprot.writeString(viter289.encode('utf-8') if sys.version_info[0] == 2 else viter289)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testStringMap_args)
testStringMap_args.thrift_spec = (
None, # 0
(1, TType.MAP, 'thing', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 1
)
class testStringMap_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype291, _vtype292, _size290) = iprot.readMapBegin()
for _i294 in range(_size290):
_key295 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val296 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.success[_key295] = _val296
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testStringMap_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success))
for kiter297, viter298 in self.success.items():
oprot.writeString(kiter297.encode('utf-8') if sys.version_info[0] == 2 else kiter297)
oprot.writeString(viter298.encode('utf-8') if sys.version_info[0] == 2 else viter298)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testStringMap_result)
testStringMap_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0
)
class testSet_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.SET:
self.thing = set()
(_etype302, _size299) = iprot.readSetBegin()
for _i303 in range(_size299):
_elem304 = iprot.readI32()
self.thing.add(_elem304)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testSet_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.SET, 1)
oprot.writeSetBegin(TType.I32, len(self.thing))
for iter305 in self.thing:
oprot.writeI32(iter305)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testSet_args)
testSet_args.thrift_spec = (
None, # 0
(1, TType.SET, 'thing', (TType.I32, None, False), None, ), # 1
)
class testSet_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.SET:
self.success = set()
(_etype309, _size306) = iprot.readSetBegin()
for _i310 in range(_size306):
_elem311 = iprot.readI32()
self.success.add(_elem311)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testSet_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.SET, 0)
oprot.writeSetBegin(TType.I32, len(self.success))
for iter312 in self.success:
oprot.writeI32(iter312)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testSet_result)
testSet_result.thrift_spec = (
(0, TType.SET, 'success', (TType.I32, None, False), None, ), # 0
)
class testList_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.thing = []
(_etype316, _size313) = iprot.readListBegin()
for _i317 in range(_size313):
_elem318 = iprot.readI32()
self.thing.append(_elem318)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testList_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.LIST, 1)
oprot.writeListBegin(TType.I32, len(self.thing))
for iter319 in self.thing:
oprot.writeI32(iter319)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testList_args)
testList_args.thrift_spec = (
None, # 0
(1, TType.LIST, 'thing', (TType.I32, None, False), None, ), # 1
)
class testList_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype323, _size320) = iprot.readListBegin()
for _i324 in range(_size320):
_elem325 = iprot.readI32()
self.success.append(_elem325)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testList_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.I32, len(self.success))
for iter326 in self.success:
oprot.writeI32(iter326)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testList_result)
testList_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.I32, None, False), None, ), # 0
)
class testEnum_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.thing = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testEnum_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.I32, 1)
oprot.writeI32(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testEnum_args)
testEnum_args.thrift_spec = (
None, # 0
(1, TType.I32, 'thing', None, None, ), # 1
)
class testEnum_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testEnum_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testEnum_result)
testEnum_result.thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
class testTypedef_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.thing = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testTypedef_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.I64, 1)
oprot.writeI64(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testTypedef_args)
testTypedef_args.thrift_spec = (
None, # 0
(1, TType.I64, 'thing', None, None, ), # 1
)
class testTypedef_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testTypedef_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testTypedef_result)
testTypedef_result.thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
)
class testMapMap_args(object):
"""
Attributes:
- hello
"""
__slots__ = (
'hello',
)
def __init__(self, hello=None,):
self.hello = hello
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.hello = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMapMap_args')
if self.hello is not None:
oprot.writeFieldBegin('hello', TType.I32, 1)
oprot.writeI32(self.hello)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMapMap_args)
testMapMap_args.thrift_spec = (
None, # 0
(1, TType.I32, 'hello', None, None, ), # 1
)
class testMapMap_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype328, _vtype329, _size327) = iprot.readMapBegin()
for _i331 in range(_size327):
_key332 = iprot.readI32()
_val333 = {}
(_ktype335, _vtype336, _size334) = iprot.readMapBegin()
for _i338 in range(_size334):
_key339 = iprot.readI32()
_val340 = iprot.readI32()
_val333[_key339] = _val340
iprot.readMapEnd()
self.success[_key332] = _val333
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMapMap_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.I32, TType.MAP, len(self.success))
for kiter341, viter342 in self.success.items():
oprot.writeI32(kiter341)
oprot.writeMapBegin(TType.I32, TType.I32, len(viter342))
for kiter343, viter344 in viter342.items():
oprot.writeI32(kiter343)
oprot.writeI32(viter344)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMapMap_result)
testMapMap_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.I32, None, TType.MAP, (TType.I32, None, TType.I32, None, False), False), None, ), # 0
)
class testInsanity_args(object):
"""
Attributes:
- argument
"""
__slots__ = (
'argument',
)
def __init__(self, argument=None,):
self.argument = argument
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.argument = Insanity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testInsanity_args')
if self.argument is not None:
oprot.writeFieldBegin('argument', TType.STRUCT, 1)
self.argument.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testInsanity_args)
testInsanity_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'argument', [Insanity, None], None, ), # 1
)
class testInsanity_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype346, _vtype347, _size345) = iprot.readMapBegin()
for _i349 in range(_size345):
_key350 = iprot.readI64()
_val351 = {}
(_ktype353, _vtype354, _size352) = iprot.readMapBegin()
for _i356 in range(_size352):
_key357 = iprot.readI32()
_val358 = Insanity.read(iprot)
_val351[_key357] = _val358
iprot.readMapEnd()
self.success[_key350] = _val351
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testInsanity_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.I64, TType.MAP, len(self.success))
for kiter359, viter360 in self.success.items():
oprot.writeI64(kiter359)
oprot.writeMapBegin(TType.I32, TType.STRUCT, len(viter360))
for kiter361, viter362 in viter360.items():
oprot.writeI32(kiter361)
viter362.write(oprot)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testInsanity_result)
testInsanity_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.I64, None, TType.MAP, (TType.I32, None, TType.STRUCT, [Insanity, None], False), False), None, ), # 0
)
class testMulti_args(object):
"""
Attributes:
- arg0
- arg1
- arg2
- arg3
- arg4
- arg5
"""
__slots__ = (
'arg0',
'arg1',
'arg2',
'arg3',
'arg4',
'arg5',
)
def __init__(self, arg0=None, arg1=None, arg2=None, arg3=None, arg4=None, arg5=None,):
self.arg0 = arg0
self.arg1 = arg1
self.arg2 = arg2
self.arg3 = arg3
self.arg4 = arg4
self.arg5 = arg5
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BYTE:
self.arg0 = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.arg1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.arg2 = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.arg3 = {}
(_ktype364, _vtype365, _size363) = iprot.readMapBegin()
for _i367 in range(_size363):
_key368 = iprot.readI16()
_val369 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.arg3[_key368] = _val369
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.arg4 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.arg5 = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMulti_args')
if self.arg0 is not None:
oprot.writeFieldBegin('arg0', TType.BYTE, 1)
oprot.writeByte(self.arg0)
oprot.writeFieldEnd()
if self.arg1 is not None:
oprot.writeFieldBegin('arg1', TType.I32, 2)
oprot.writeI32(self.arg1)
oprot.writeFieldEnd()
if self.arg2 is not None:
oprot.writeFieldBegin('arg2', TType.I64, 3)
oprot.writeI64(self.arg2)
oprot.writeFieldEnd()
if self.arg3 is not None:
oprot.writeFieldBegin('arg3', TType.MAP, 4)
oprot.writeMapBegin(TType.I16, TType.STRING, len(self.arg3))
for kiter370, viter371 in self.arg3.items():
oprot.writeI16(kiter370)
oprot.writeString(viter371.encode('utf-8') if sys.version_info[0] == 2 else viter371)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.arg4 is not None:
oprot.writeFieldBegin('arg4', TType.I32, 5)
oprot.writeI32(self.arg4)
oprot.writeFieldEnd()
if self.arg5 is not None:
oprot.writeFieldBegin('arg5', TType.I64, 6)
oprot.writeI64(self.arg5)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMulti_args)
testMulti_args.thrift_spec = (
None, # 0
(1, TType.BYTE, 'arg0', None, None, ), # 1
(2, TType.I32, 'arg1', None, None, ), # 2
(3, TType.I64, 'arg2', None, None, ), # 3
(4, TType.MAP, 'arg3', (TType.I16, None, TType.STRING, 'UTF8', False), None, ), # 4
(5, TType.I32, 'arg4', None, None, ), # 5
(6, TType.I64, 'arg5', None, None, ), # 6
)
class testMulti_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Xtruct()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMulti_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMulti_result)
testMulti_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Xtruct, None], None, ), # 0
)
class testException_args(object):
"""
Attributes:
- arg
"""
__slots__ = (
'arg',
)
def __init__(self, arg=None,):
self.arg = arg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.arg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testException_args')
if self.arg is not None:
oprot.writeFieldBegin('arg', TType.STRING, 1)
oprot.writeString(self.arg.encode('utf-8') if sys.version_info[0] == 2 else self.arg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testException_args)
testException_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'arg', 'UTF8', None, ), # 1
)
class testException_result(object):
"""
Attributes:
- err1
"""
__slots__ = (
'err1',
)
def __init__(self, err1=None,):
self.err1 = err1
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.err1 = Xception()
self.err1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testException_result')
if self.err1 is not None:
oprot.writeFieldBegin('err1', TType.STRUCT, 1)
self.err1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testException_result)
testException_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'err1', [Xception, None], None, ), # 1
)
class testMultiException_args(object):
"""
Attributes:
- arg0
- arg1
"""
__slots__ = (
'arg0',
'arg1',
)
def __init__(self, arg0=None, arg1=None,):
self.arg0 = arg0
self.arg1 = arg1
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.arg0 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.arg1 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMultiException_args')
if self.arg0 is not None:
oprot.writeFieldBegin('arg0', TType.STRING, 1)
oprot.writeString(self.arg0.encode('utf-8') if sys.version_info[0] == 2 else self.arg0)
oprot.writeFieldEnd()
if self.arg1 is not None:
oprot.writeFieldBegin('arg1', TType.STRING, 2)
oprot.writeString(self.arg1.encode('utf-8') if sys.version_info[0] == 2 else self.arg1)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMultiException_args)
testMultiException_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'arg0', 'UTF8', None, ), # 1
(2, TType.STRING, 'arg1', 'UTF8', None, ), # 2
)
class testMultiException_result(object):
"""
Attributes:
- success
- err1
- err2
"""
__slots__ = (
'success',
'err1',
'err2',
)
def __init__(self, success=None, err1=None, err2=None,):
self.success = success
self.err1 = err1
self.err2 = err2
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Xtruct()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.err1 = Xception()
self.err1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.err2 = Xception2()
self.err2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMultiException_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.err1 is not None:
oprot.writeFieldBegin('err1', TType.STRUCT, 1)
self.err1.write(oprot)
oprot.writeFieldEnd()
if self.err2 is not None:
oprot.writeFieldBegin('err2', TType.STRUCT, 2)
self.err2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMultiException_result)
testMultiException_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Xtruct, None], None, ), # 0
(1, TType.STRUCT, 'err1', [Xception, None], None, ), # 1
(2, TType.STRUCT, 'err2', [Xception2, None], None, ), # 2
)
class testOneway_args(object):
"""
Attributes:
- secondsToSleep
"""
__slots__ = (
'secondsToSleep',
)
def __init__(self, secondsToSleep=None,):
self.secondsToSleep = secondsToSleep
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.secondsToSleep = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testOneway_args')
if self.secondsToSleep is not None:
oprot.writeFieldBegin('secondsToSleep', TType.I32, 1)
oprot.writeI32(self.secondsToSleep)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testOneway_args)
testOneway_args.thrift_spec = (
None, # 0
(1, TType.I32, 'secondsToSleep', None, None, ), # 1
)
fix_spec(all_structs)
del all_structs
| [
"lufezuro@gmail.com"
] | lufezuro@gmail.com |
7d1848df9c7503861b5d6999a25c43feae4a4c14 | d6b984433d6674f6f31a441726659ec65d2d7eb1 | /teco/dproj/piel/models.py | 1062722c5e1a327b1715ffe0af4fe0417ba2c564 | [] | no_license | jjconti/twisted-examples | f0de8dc7a3b2b97c9f9f81fcac969805a7573167 | badbec8196164919b773b427f1130bf432ea98f4 | refs/heads/master | 2016-09-06T03:13:53.252380 | 2011-03-09T20:29:56 | 2011-03-09T20:29:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,541 | py | # This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'
# into your database.
from django.db import models
class Magnitud(models.Model):
id = models.IntegerField(primary_key=True, unique=True)
nombre = models.CharField(max_length=120, blank=True)
minimo = models.DecimalField(null=True, max_digits=13, decimal_places=1, blank=True)
maximo = models.DecimalField(null=True, max_digits=13, decimal_places=1, blank=True)
unidad = models.CharField(max_length=60, blank=True)
uni = models.CharField(max_length=15, blank=True)
class Meta:
db_table = u'magnitudes'
def __unicode__(self):
return u"%s en %s" % (self.nombre, self.uni)
class Medida(models.Model):
id = models.IntegerField(primary_key=True, unique=True)
texto = models.CharField(max_length=240, blank=True)
class Meta:
db_table = u'medidas'
def __unicode__(self):
return self.texto
class Provincia(models.Model):
id = models.IntegerField(primary_key=True, unique=True)
zona_id = models.IntegerField(null=True, blank=True)
nombre = models.CharField(max_length=150, blank=True)
class Meta:
db_table = u'provincias'
class Sitio(models.Model):
id = models.IntegerField(primary_key=True, unique=True)
nombre = models.CharField(max_length=120, blank=True)
ccc = models.CharField(max_length=9, blank=True)
port = models.IntegerField(unique=True)
online = models.IntegerField() #0: online 1: error
class Meta:
db_table = u'sitios'
def __unicode__(self):
return self.ccc
class RobotTipoIO(models.Model):
id = models.IntegerField(primary_key=True, unique=True)
nombre = models.CharField(max_length=60, blank=True)
campo = models.CharField(max_length=10, blank=True) # campo de la tabla Valores
class Meta:
db_table = u'robots_tipoio'
def esEA(self):
return self.nombre.startswith('Entrada Analogica')
def esRE(self):
return self.nombre.startswith('Registro')
def esSD(self):
return self.nombre.startswith('Salida Digital')
def esED(self):
return self.nombre.startswith('Entrada Digital')
def __unicode__(self):
return self.nombre
class RobotTipo(models.Model):
id = models.IntegerField(primary_key=True, unique=True)
nombre = models.CharField(max_length=120, blank=True)
mascara = models.CharField(max_length=500, blank=True)
class Meta:
db_table = u'robots_tipos'
def __unicode__(self):
return self.nombre
class Robot(models.Model):
id = models.IntegerField(primary_key=True, unique=True)
tipo = models.ForeignKey(RobotTipo, null=True, db_column='tipo', blank=True)
nombre = models.CharField(max_length=120)
mbdir = models.CharField(max_length=6, blank=True)
observaciones = models.TextField(blank=True)
sitio = models.ForeignKey(Sitio, null=True, db_column='sitio', blank=True)
last_valor = models.ForeignKey('Valor', null=True, blank=True, db_column='last_valor', related_name='Valor.robot')
online = models.IntegerField() #0: online 1: error
class Meta:
db_table = u'robots'
def __unicode__(self):
return u"%s en %s" % (self.mbdir, self.sitio)
def config_dict(self, gcond=None):
configuracion = self.robotconfig_set.all()
if gcond is not None:
configuracion = configuracion.filter(graficable=gcond)
entradasanalogicas = [c for c in configuracion if c.tipoio.esEA()]
registros = [c for c in configuracion if c.tipoio.esRE()]
salidasdigitales = [c for c in configuracion if c.tipoio.esSD()]
entradasdigitales = [c for c in configuracion if c.tipoio.esED()]
return {'entradasanalogicas': entradasanalogicas,
'registros': registros,
'salidasdigitales': salidasdigitales,
'entradasdigitales': entradasdigitales
}
def confignames_dict(self, gcond=None):
configuracion = self.robotconfig_set.all()
if gcond is not None:
configuracion = configuracion.filter(graficable=gcond)
entradasanalogicas = [c.tipoio.campo for c in configuracion if c.tipoio.esEA()]
registros = [c.tipoio.campo for c in configuracion if c.tipoio.esRE()]
salidasdigitales = [c.tipoio.campo for c in configuracion if c.tipoio.esSD()]
entradasdigitales = [c.tipoio.campo for c in configuracion if c.tipoio.esED()]
return {u'entradasanalogicas': entradasanalogicas,
u'registros': registros,
u'salidasdigitales': salidasdigitales,
u'entradasdigitales': entradasdigitales
}
class RobotConfig(models.Model):
id = models.IntegerField(primary_key=True, unique=True)
robot = models.ForeignKey(Robot, db_column='robot')
tipoio = models.ForeignKey(RobotTipoIO, null=True, db_column='tipoio', blank=True)
magnitud = models.ForeignKey(Magnitud, null=True, db_column='magnitud', blank=True)
medida = models.ForeignKey(Medida, null=True, db_column='medida', blank=True)
editable = models.BooleanField()
graficable = models.BooleanField()
class Meta:
db_table = u'robots_config'
class Valor(models.Model):
id = models.IntegerField(primary_key=True, unique=True)
robot = models.ForeignKey(Robot, null=True, db_column='robot', blank=True)
ea1 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
ea2 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
ea3 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
ea4 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
ea5 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
ea6 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
ea7 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
ea8 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
ea9 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
ea10 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re1 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re2 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re3 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re4 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re5 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re6 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re7 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re8 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re9 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
re10 = models.DecimalField(null=True, max_digits=5, decimal_places=2, blank=True)
sd1 = models.IntegerField(null=True, blank=True)
sd2 = models.IntegerField(null=True, blank=True)
sd3 = models.IntegerField(null=True, blank=True)
sd4 = models.IntegerField(null=True, blank=True)
sd5 = models.IntegerField(null=True, blank=True)
sd6 = models.IntegerField(null=True, blank=True)
sd7 = models.IntegerField(null=True, blank=True)
sd8 = models.IntegerField(null=True, blank=True)
sd9 = models.IntegerField(null=True, blank=True)
sd10 = models.IntegerField(null=True, blank=True)
ed1 = models.IntegerField(null=True, blank=True)
ed2 = models.IntegerField(null=True, blank=True)
ed3 = models.IntegerField(null=True, blank=True)
ed4 = models.IntegerField(null=True, blank=True)
ed5 = models.IntegerField(null=True, blank=True)
ed6 = models.IntegerField(null=True, blank=True)
ed7 = models.IntegerField(null=True, blank=True)
ed8 = models.IntegerField(null=True, blank=True)
ed9 = models.IntegerField(null=True, blank=True)
ed10 = models.IntegerField(null=True, blank=True)
timestamp = models.DateTimeField(null=True)
class Meta:
db_table = u'valores'
ordering = ('id',)
EVENTO_CHOICES = (
('I', 'Informacion'),
('W', 'Advertencia'),
('A', 'Alerta'),
)
class Evento(models.Model):
timestamp = models.DateTimeField(null=True, auto_now=True)
tipo = models.CharField(max_length=1, choices=EVENTO_CHOICES)
texto = models.CharField(max_length=60, blank=True)
def __unicode__(self):
return "%s: %s - %s" % (self.timestamp, self.tipo, self.texto)
# Signals
from django.db.models.signals import post_save
def record_last_valor(sender, instance, created, **kwargs):
print "LAST_VALOR"*5
print instance, created
if created:
instance.robot.last_valor = instance
instance.robot.save()
print instance.robot.last_valor
print instance == instance.robot.last_valor
post_save.connect(record_last_valor, sender=Valor)
| [
"jjconti@c17424ab-9666-0410-ad2c-99304602335c"
] | jjconti@c17424ab-9666-0410-ad2c-99304602335c |
053d0a2b38d37b96a8c34261610917f248080b94 | a27d1d9d264df6667def06ec7f17c47ec9ab1e94 | /apps/auth/migrations/0001_initial.py | 5a118c35532a74c15fe9282bfd914be1699f979e | [] | no_license | maxrevilo/barachiel-server | 47d3cad9b0319e32824623863d2c3e6ea25c7bae | 0d1506e1b33ad7d160355027ca4a6b356a1a3c9a | refs/heads/master | 2021-01-14T11:48:57.524534 | 2015-04-12T22:28:58 | 2015-04-12T22:47:42 | 25,367,746 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,049 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Permission'
db.create_table(u'auth_permission', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('codename', self.gf('django.db.models.fields.CharField')(max_length=100)),
))
db.send_create_signal(u'auth', ['Permission'])
# Adding unique constraint on 'Permission', fields ['content_type', 'codename']
db.create_unique(u'auth_permission', ['content_type_id', 'codename'])
# Adding model 'Group'
db.create_table(u'auth_group', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=80)),
))
db.send_create_signal(u'auth', ['Group'])
# Adding M2M table for field permissions on 'Group'
m2m_table_name = db.shorten_name(u'auth_group_permissions')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('group', models.ForeignKey(orm[u'auth.group'], null=False)),
('permission', models.ForeignKey(orm[u'auth.permission'], null=False))
))
db.create_unique(m2m_table_name, ['group_id', 'permission_id'])
# Adding model 'Confirmation'
db.create_table(u'auth_confirmation', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='confirmations', to=orm['users.User'])),
('token', self.gf('django.db.models.fields.CharField')(unique=True, max_length=8)),
('confirmation_type', self.gf('django.db.models.fields.CharField')(max_length=1)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'auth', ['Confirmation'])
def backwards(self, orm):
# Removing unique constraint on 'Permission', fields ['content_type', 'codename']
db.delete_unique(u'auth_permission', ['content_type_id', 'codename'])
# Deleting model 'Permission'
db.delete_table(u'auth_permission')
# Deleting model 'Group'
db.delete_table(u'auth_group')
# Removing M2M table for field permissions on 'Group'
db.delete_table(db.shorten_name(u'auth_group_permissions'))
# Deleting model 'Confirmation'
db.delete_table(u'auth_confirmation')
models = {
u'auth.confirmation': {
'Meta': {'object_name': 'Confirmation'},
'confirmation_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '8'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'confirmations'", 'to': u"orm['users.User']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'likes.like': {
'Meta': {'ordering': "['_created_at']", 'unique_together': "(('liker', 'liked'),)", 'object_name': 'Like'},
'_created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'_updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'geo_lat': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'geo_lon': ('django.db.models.fields.FloatField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'liked': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'likes_to'", 'to': u"orm['users.User']"}),
'liker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'likes_from'", 'to': u"orm['users.User']"})
},
u'multimedia.media': {
'Meta': {'ordering': "['-_created_at']", 'object_name': 'Media'},
'_created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'I'", 'max_length': '1'}),
'uploader': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'uploads'", 'to': u"orm['users.User']"})
},
u'users.user': {
'Meta': {'object_name': 'User'},
'_created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'_email_wtc': ('django.db.models.fields.EmailField', [], {'default': 'None', 'max_length': '75', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'_updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'age': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bio': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'birthday': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'geo_lat': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'geo_lon': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'geo_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'how_can_see_age': ('django.db.models.fields.CharField', [], {'default': "'E'", 'max_length': '1'}),
'how_can_see_bio': ('django.db.models.fields.CharField', [], {'default': "'E'", 'max_length': '1'}),
'how_can_see_email': ('django.db.models.fields.CharField', [], {'default': "'L'", 'max_length': '1'}),
'how_can_see_name': ('django.db.models.fields.CharField', [], {'default': "'E'", 'max_length': '1'}),
'how_can_see_picture': ('django.db.models.fields.CharField', [], {'default': "'E'", 'max_length': '1'}),
'how_can_see_ss': ('django.db.models.fields.CharField', [], {'default': "'E'", 'max_length': '1'}),
'how_can_see_tel': ('django.db.models.fields.CharField', [], {'default': "'L'", 'max_length': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'liked_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'liked'", 'symmetrical': 'False', 'through': u"orm['likes.Like']", 'to': u"orm['users.User']"}),
'likes_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'off_radar': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['multimedia.Media']"}),
'r_interest': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '1'}),
'sentimental_status': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '1'}),
'sex': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '1'}),
'tel': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'})
}
}
complete_apps = ['auth'] | [
"oliver.a.perez.c@gmail.com"
] | oliver.a.perez.c@gmail.com |
41bfd5f933f91531e6dc13ac226c7483fe24d43f | 6b7a05e55adacfebf9374ad83bc4c7221b070089 | /1_learning/setup_notes.py | 352dfa532a78363b176521df3c17676d61a283f5 | [] | no_license | xcaperton/sandbox | 33c051dce900aea40b6e28a7c2efaa78a37fed96 | 824ab0d445bce7b5eb6608085f1dff0f1f7f5aec | refs/heads/master | 2021-01-20T11:50:22.750251 | 2017-09-26T05:23:08 | 2017-09-26T05:23:08 | 101,691,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 569 | py |
'''
- Install Xcode
- Homebrew
- Install Python3
- Install virtualenv
- Set up virtualenv
- Install Sublime Text 3
- Install Package Manager
- Install Anoconda, BracketHighligther, Enhanced Sidebar
- Adjust Anoconda setting
- Install Virtual Env and add to the path with virtualenv locations
'''
'''
1. Start Sublime Text
2. Magic > VirtualEnv > Activate > py3
3. Magic > VirtualEnv > SublimeREPL - Python
'''
'''
To install matplotlib:
brew install pkg-config
brew install freetype
pip install matplotlib
import matplotlib
matplotlib.use('TkAgg')
''' | [
"capertja@gmail.com"
] | capertja@gmail.com |
732f101b290d2baa7f1a5ae0864a6806f02bd45d | 8648ab97e7bc1c2027c72c4fd02a5409e0af4338 | /App/opencv_detect.py | baeecee4dcf0ec9a01b123bba1fd8863427a789a | [] | no_license | Dom1nik/cloud-face-recognition | 2d10223be6cb829b23c6d61c2066fbe8f446f096 | 43a9896e8ef7107ca28e52b66bac2ffac95d6076 | refs/heads/master | 2020-09-21T06:01:28.684474 | 2016-09-08T13:29:30 | 2016-09-08T13:29:30 | 67,703,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 634 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
@author: Dominik
DESCRIPTION:
Function for detecting frontal faces on an input frame.
Detection is done with the haar classifier library using pretrained face model .xml file (included in standard OpenCV build).
INPUT:
img = cv2.imread("/path/to/image/img")
"""
import cv2
def detect(img):
cascade = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
rects = cascade.detectMultiScale(img, 1.3, 4, cv2.cv.CV_HAAR_SCALE_IMAGE, (20,20))
# If no faces are detected, return empty list, else return face coordinates.
if len(rects) == 0:
return []
return rects
| [
"dominik.bezevan@gmail.com"
] | dominik.bezevan@gmail.com |
65e514a1d5bbf9f7ee9f075596c4c977c659d98a | a6f2e58d4afd73386477cfd55a868016ac36061a | /Tkinter/07_windows_example.py | 8a30d77b1d7e622a2035385fcb4a19c3039ba894 | [] | no_license | griadooss/HowTos | c3d2d4862da7072aa4dcba47fcd0c448aecd9e74 | 97c881f7b4855ba73fda5648ecd5b9e45a5fd0fb | refs/heads/master | 2021-01-06T20:41:48.196407 | 2014-11-22T06:28:36 | 2014-11-22T06:28:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,355 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
ZetCode Tkinter tutorial
In this script, we use the grid
manager to create a more complicated
layout.
author: Jan Bodnar
last modified: December 2010
website: www.zetcode.com
"""
#In this example, we will use a Label widget, a Text widget and four buttons.
from Tkinter import Tk, Text, BOTH, W, N, E, S
from ttk import Frame, Button, Label, Style
class Example(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.initUI()
def initUI(self):
self.parent.title("Windows")
self.style = Style()
self.style.theme_use("default")
self.pack(fill=BOTH, expand=1)
self.columnconfigure(1, weight=1)
self.columnconfigure(3, pad=7)
self.rowconfigure(3, weight=1)
self.rowconfigure(5, pad=7)
#We define some spaces among widgets in the grid.
#The largest space is put between the Text widget and the buttons.
lbl = Label(self, text="Windows")
lbl.grid(sticky=W, pady=4, padx=5)
#The label widget is created and put into the grid.
#If no column and row is specified, then the first column or row is assumed.
#The label sticks to west and it has some padding around its text.
area = Text(self)
area.grid(row=1, column=0, columnspan=2, rowspan=4,
padx=5, sticky=E+W+S+N)
#The Text widget is created and starts from the second row, first column.
#It spans 2 columns and 4 rows.
#There is 4px space between the widget and the left border of the root window.
#Finally, it sticks to all the four sides.
#So when the window is resized, the Text widget grows in all directions.
abtn = Button(self, text="Activate")
abtn.grid(row=1, column=3)
cbtn = Button(self, text="Close")
cbtn.grid(row=2, column=3, pady=4)
hbtn = Button(self, text="Help")
hbtn.grid(row=5, column=0, padx=5)
obtn = Button(self, text="OK")
obtn.grid(row=5, column=3)
def main():
root = Tk()
root.geometry("350x300+300+300")
app = Example(root)
root.mainloop()
if __name__ == '__main__':
main() | [
"github@zimpics.com"
] | github@zimpics.com |
ddee9b71d0ccc3475deacdbbc5ebffbed153f8ab | 2b6d1170a26480e2b54a01a47448247cbb7ace72 | /blog/forms.py | 8a3fbb3b983996a908fbb1cfcf4b71edacb7d717 | [] | no_license | BillyBoy26/MyBlog | 9a03db86cc58b679f76e4d1ad03e11b8ae970387 | 4718c585da569c7b252dc32253db0aec8980cca6 | refs/heads/master | 2021-01-01T19:41:26.035524 | 2017-08-08T18:26:47 | 2017-08-08T18:26:47 | 98,651,535 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 602 | py | from django import forms
from blog.models import Article
from blog.widgets import RichText
class ContactForm(forms.Form):
sujet = forms.CharField(max_length=100)
message = forms.CharField(widget=forms.Textarea)
sender = forms.EmailField(label="Votre adresse email")
resend = forms.BooleanField(help_text="Cochez si vous souhaitez obtenir une copie du mail envoyé.", required=False)
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = '__all__'
widgets = {
'content': RichText(attrs={'rows': 15, 'cols': 100}),
}
| [
"benjamin.scheinhardt@gmail.com"
] | benjamin.scheinhardt@gmail.com |
ffca2ea84bc267e1b18b0a67ac1f41fbc0a91093 | fb5d1d93c7432912f7f5e1d9ff50309f49bf6b16 | /average_diff.py | eea61b3f8dcf731fcdab15da8a4e21fc6cfeea17 | [] | no_license | Eileencaraway/Joyjit-s-Python-Script | 0089635bdd8609d9ae28aa03f0029304d16542b0 | 48c29fa7f1a0060b7fc5b791ce635e1ecdeb2e98 | refs/heads/master | 2021-09-20T15:21:14.717982 | 2018-08-11T06:56:57 | 2018-08-11T06:56:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,970 | py | #! /usr/bin/env python
# given a datafile with energy, pressure, stress,... data, perform averages
import sys
import os
import StringIO
import math
import numpy
from numpy import *
def readlines(filename):
file = open(filename)
sys.stderr.write('import '+filename)
data = file.readlines()
file.close()
return data
if len(sys.argv) < 2:
sys.exit(0)
if not os.path.exists(sys.argv[1]):
sys.exit(0)
xmin = 1.5
xmax = 2.5
data0 = readlines(sys.argv[1])
Nlines = len(data0)
sys.stderr.write(' '+str(Nlines)+'\n')
linetype = empty(Nlines, bool)
vals = []
vals2 = []
Ndatalines = 0
meanvals = []
meanvalues = empty(0)
for i in range(0, Nlines):
words = data0[i].split()
if words[0][0]=="#" or words[0]=="&":
linetype[i] = False
vals.append(empty(0))
vals2.append(empty(0))
print data0[i],
else:
linetype[i] = True
values = fromstring(data0[i],sep=" ")
values[1:] /= values[0]
vals.append(values)
vals2.append(values*values)
if values[0] >= xmin and values[0] <= xmax:
if Ndatalines == 0:
meanvalues = zeros(len(values))
meanvalues += values
Ndatalines += 1
meanvalues /= Ndatalines
meanvals.append(meanvalues)
Nfiles = 1
for filename in sys.argv[2:]:
data = readlines(filename)
sys.stderr.write(' '+str(len(data)))
if len(data) != Nlines:
sys.stderr.write(' size mismatch')
if len(data)>=Nlines:
Ndatalines = 0
for i in range(0, Nlines):
if linetype[i]:
if data[i][0] != data0[i][0]:
sys.stderr.write(' file mismatch '+str(data0[i][0])+' '+sys.argv[1]+' '+filename+'\n')
values = fromstring(data[i],sep=" ")
values[1:] /= values[0]
vals[i] += values
vals2[i] += values*values
if values[0] >= xmin and values[0] <= xmax:
if Ndatalines == 0:
meanvalues = zeros(len(values))
meanvalues += values
Ndatalines += 1
meanvalues /= Ndatalines
meanvals.append(meanvalues)
Nfiles += 1
sys.stderr.write('\n')
for i in range(0, Nlines):
if linetype[i]:
vals[i] /= Nfiles
vals2[i] /= Nfiles
print vals[i][0],
for j in range(1,len(vals[i])):
print vals[i][j], 1.3*sqrt((vals2[i][j]-vals[i][j]*vals[i][j])/(Nfiles-1)),
print
meanmean = meanvals[0]
mean2mean = meanvals[0] * meanvals[0]
for i in range(1,len(meanvals)):
meanmean += meanvals[i]
mean2mean += meanvals[i] * meanvals[i]
meanmean /= len(meanvals)
mean2mean /= len(meanvals)
Ninst = Nfiles
if Ninst == 1:
Ninst = 2
print "# mean values (",Nfiles,"files, ",Ndatalines,"lines) = ",
for i in range(1,len(meanmean)):
print meanmean[i],1.3*sqrt((mean2mean[i]-meanmean[i]*meanmean[i])/(Ninst-1)),
print
| [
"noreply@github.com"
] | Eileencaraway.noreply@github.com |
1132ad23972287ae21036c12c2e792127aab84a5 | 79eaba10d7ab519a374766abbb136f3789dafb02 | /src/torch/src/transforms/random_rotate.py | dc1a20bffd53c216d77b501db786a7c95b0bfcf4 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | jelmr/pc_temporal_interpolation | 878e28c35fb9ec3689a5058de66c1206c0db08f7 | 0bc230ca0855876ef7aa8c1fe8a16764097a8a25 | refs/heads/master | 2022-01-22T02:33:25.710936 | 2022-01-13T18:34:19 | 2022-01-13T18:34:19 | 202,586,854 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,215 | py | import math
import random
import torch
class RandomRotate(object):
r""" Rotates along the specified axis.
Args:
degree_range (tuple): Range from which rotation degree is sampled
axis (int, optional): The rotation axis
"""
def __init__(self, degree_range=(0, 360), axis=0):
self.degree_range = degree_range
self.axis = axis
def __call__(self, data):
degree = math.pi * random.uniform(*self.degree_range) / 180.0
sin, cos = math.sin(degree), math.cos(degree)
if self.axis == 0:
matrix = [[1, 0, 0], [0, cos, sin], [0, -sin, cos]]
elif self.axis == 1:
matrix = [[cos, 0, -sin], [0, 1, 0], [sin, 0, cos]]
else:
matrix = [[cos, sin, 0], [-sin, cos, 0], [0, 0, 1]]
matrix = torch.tensor(matrix)
data.pos[..., :3] = torch.mm(data.pos[..., :3], matrix.to(data.pos.dtype).to(data.pos.device))
data.y[..., :3] = torch.mm(data.y[..., :3], matrix.to(data.y.dtype).to(data.y.device))
return data
def __repr__(self):
return '{}({}, axis={})'.format(self.__class__.__name__, self.degree_range,
self.axis)
| [
"jelmer@jelmermulder.dev"
] | jelmer@jelmermulder.dev |
5a9b7f424fc004b04e1fb24d613acf643a7e9ce9 | 7f664a130d13acbbdc52d93cf7a674b58694383e | /graph2geojson.py | dfc8a4f13fa06cea69058d9e70da1401bb5b63ec | [] | no_license | mitragithub/MorseAnalysis | 0569c7d4ae9f8ce02dce00cfa38e0488a63d57f0 | be91e80972b873fcb4345171a66582e1ba69995a | refs/heads/master | 2021-10-24T21:03:35.118160 | 2019-03-28T22:22:51 | 2019-03-28T22:22:51 | 175,226,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,570 | py | import os
import sys
from geojson import Feature, FeatureCollection, LineString
import geojson as gjson
def make_geojson(vertices, edges, output_path):
dir_path = os.path.dirname(output_path)
if not os.path.isdir(dir_path):
os.mkdir(dir_path)
features = []
for i in range(len(edges)):
e = edges[i]
u, v = vertices[e[0]], vertices[e[1]]
features.append(Feature(id=i, geometry=LineString([(u[0], u[1]), (v[0], v[1])]),
properties={"stroke-width": 1}))
with open(output_path, 'w') as file:
file.write(gjson.dumps(FeatureCollection(features), sort_keys=True))
if __name__ == '__main__':
id = int(sys.argv[1])
input_folder = sys.argv[2]
output_folder = sys.argv[3]
input_vertices = os.path.join(input_folder, 'new_vert.txt')
input_edges = os.path.join(input_folder, 'new_edge.txt')
# input_vertices = sys.argv[1]
# input_edges = sys.argv[2]
neuron_name = sys.argv[4]
vertices =[]
edges = []
with open(input_vertices) as input_v:
for line in input_v:
data = line.strip().split()[:2]
v = [int(x) for x in data]
v[1] = -v[1] # flip y axis
vertices.append(v)
with open(input_edges) as input_e:
for line in input_e:
data = line.strip().split()
e = [int(x) for x in data]
edges.append(e)
output_path = os.path.join(output_folder, neuron_name+ '_' + '{0:04d}'.format(id) + '.json')
make_geojson(vertices, edges, output_path) | [
"1257332448@qq.com"
] | 1257332448@qq.com |
9b7adb5678f71064de435ee4ec62090b3e6ba879 | 951fd820620ce0056574399fbc349f86b70be61e | /pic2video.py | 8df9d002c0d0eedd0546aafc975edc37f358e23c | [] | no_license | nzhang258/script | 7adbf8e583f6c09a2de7b1324ffd903e3ff6f6f4 | dade3fdf5351cb5da9ed9a3ca97bb79011bacdd6 | refs/heads/master | 2021-08-08T02:19:32.544219 | 2020-09-08T03:48:58 | 2020-09-08T03:48:58 | 217,058,304 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,336 | py | import os
import cv2
import sys
def v2p(path):
cap = cv2.VideoCapture(path)
suc = cap.isOpened() # 是否成功打开
frame_count = 0
pics=[]
while suc:
frame_count += 1
suc, frame = cap.read()
if suc:
pics.append(frame)
cap.release()
return pics
def p2v(pic, out, fps, ith):
f_all = {1: cv2.VideoWriter_fourcc('P','I','M','1'),
2: cv2.VideoWriter_fourcc('M','J','P','G'),
3:cv2.VideoWriter_fourcc('M','P','4','2'),
4:cv2.VideoWriter_fourcc('D','I','V','3'),
5:cv2.VideoWriter_fourcc('D','I','V','X'),
6:cv2.VideoWriter_fourcc('U','2','6','3'),
7:cv2.VideoWriter_fourcc('I','2','6','3'),
8:cv2.VideoWriter_fourcc('F','L','V','1')}
fourcc = f_all[ith]
h,w,c = pic[0].shape
vw = cv2.VideoWriter(out, fourcc, fps, (w,h))
for i in pic:
vw.write(i)
vw.release()
def p2v1(pic, out, fps, ith):
'''
fourcc = cv2.VideoWriter_fourcc('P','I','M','1')
fourcc = cv2.VideoWriter_fourcc('M','J','P','G')
fourcc = cv2.VideoWriter_fourcc('M','P','4','2')
fourcc = cv2.VideoWriter_fourcc('D','I','V','3')
fourcc = cv2.VideoWriter_fourcc('D','I','V','X')
fourcc = cv2.VideoWriter_fourcc('U','2','6','3')
fourcc = cv2.VideoWriter_fourcc('I','2','6','3')
fourcc = cv2.VideoWriter_fourcc('F','L','V','1')
'''
f_all = {1: cv2.VideoWriter_fourcc('P','I','M','1'),
2: cv2.VideoWriter_fourcc('M','J','P','G'),
3:cv2.VideoWriter_fourcc('M','P','4','2'),
4:cv2.VideoWriter_fourcc('D','I','V','3'),
5:cv2.VideoWriter_fourcc('D','I','V','X'),
6:cv2.VideoWriter_fourcc('U','2','6','3'),
7:cv2.VideoWriter_fourcc('I','2','6','3'),
8:cv2.VideoWriter_fourcc('F','L','V','1')}
fourcc = f_all[ith]
#fourcc = cv2.VideoWriter_fourcc(*"mp4v")
a = os.listdir(pic)
a.sort()
img0 = cv2.imread(os.path.join(pic,a[0]))
h,w,c = img0.shape
vw = cv2.VideoWriter(out, fourcc, fps, (w,h))
for i in a:
img = cv2.imread(os.path.join(pic,i))
vw.write(img)
vw.release()
if __name__ == '__main__':
in_vid = sys.argv[1]
out = sys.argv[2]
fps = 30
imgs = v2p(in_vid)
p2v(imgs,out,fps,5)
| [
"zhangning2@sensetime.com"
] | zhangning2@sensetime.com |
68288fa5a676dae1fae0a2c54eae585c3ac4a8fc | 185219fb50e0c39ac15bd3a748995890072bd3e3 | /twitoff/__init__.py | 4dff86edd3e9ba852e9f39dbd436e0ebbffc3841 | [
"MIT"
] | permissive | standroidbeta/TwitOff | 02e2631f64c12da726b526e6edba3859a6002fb8 | e036efc9fabefa285b9455bef666b59a2c8ca190 | refs/heads/master | 2022-12-14T11:58:00.403599 | 2020-03-31T00:46:53 | 2020-03-31T00:46:53 | 192,377,327 | 0 | 1 | MIT | 2022-12-08T03:12:53 | 2019-06-17T15:57:42 | Python | UTF-8 | Python | false | false | 101 | py | """
Entry point of TwitOff flask application.
"""
from .app import create_app
APP = create_app()
| [
"43893890+standroidbeta@users.noreply.github.com"
] | 43893890+standroidbeta@users.noreply.github.com |
0d1da58ff7e050c877c7671d26db53edf573d298 | bc0f6c2b0e2a338e26c8999d82420f1c46d5914f | /0x0F-python-object_relational_mapping/12-model_state_update_id_2.py | b77314b0283c5d2e3bb85f68d553ceb5d22e7ce6 | [] | no_license | kingsleyabonyi/alx-higher_level_programming | 4ea72b42a14dbdf0ae3d7c1064118fc70ddf491f | e7331951fda92475181fae63829699fe7e1ad5be | refs/heads/main | 2023-03-18T15:40:53.473874 | 2023-01-23T17:13:57 | 2023-01-23T17:13:57 | 507,995,009 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 776 | py | #!/usr/bin/python3
""" a script that changes an object property and saves changes to the
database and lists all State objects from the database hbtn_0e_6_usa
"""
from sys import argv
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from model_state import Base, State
if __name__ == "__main__":
engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'
.format(argv[1], argv[2], argv[3]),
pool_pre_ping=True)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
for state in session.query(State).order_by(State.id).all():
if state.id == 2:
state.name = 'New Mexico'
session.commit()
session.close()
| [
"kinabonyi@gmail.com"
] | kinabonyi@gmail.com |
4ae8bba43babc3a0e6cb78977983aa4716d69be8 | 7e4ab0da41d47d953f517a4e15e6163ecd2368ef | /ufindAPI/urls.py | 69d8a19a65e3fa04923c79d0b28df397e63fc0d6 | [] | no_license | EkhlasMridha/ufind | 8318d7117274a726da649d74df85a4131ace0aa6 | f5d3eb2d962cbabf586a1c3bf48047416bc79fcb | refs/heads/master | 2023-03-06T12:30:37.317579 | 2021-02-21T13:42:02 | 2021-02-21T13:42:02 | 335,329,161 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,241 | py | from django.urls import path, include
from ufindAPI.identityapi import (
login_api_view,
register_api_view,
get_profile_api,
get_userlist_view,
reset_request_view,
change_password
)
from ufindAPI.missingpersonapi import (
submit_case_view,
get_cases_view,
case_data_found,
get_all_cases,
match_person_view,
delete_case_view,
mark_as_solved,
get_solved_cases
)
urlpatterns = [
path('login', login_api_view, name="login"),
path('register', register_api_view, name='register'),
path('profile', get_profile_api, name="profile"),
path('case', submit_case_view, name="case"),
path('get-cases', get_cases_view, name='get-cases'),
path('found', case_data_found, name="found"),
path('allcase', get_all_cases, name="allcase"),
path('alluser', get_userlist_view, name='alluser'),
path('match', match_person_view, name="match"),
path('reset-password', reset_request_view, name='reset-password'),
path('change-password', change_password, name='change-password'),
path('delete-case', delete_case_view, name='delete-case'),
path('mark-solved', mark_as_solved, name='make-solved'),
path('solved-case', get_solved_cases, name='solved-cases')
]
| [
"41969194+EkhlasMridha@users.noreply.github.com"
] | 41969194+EkhlasMridha@users.noreply.github.com |
afaefbecf4649d448df7f63bf2a5ebeca8d36bd7 | e96a98171cfa49a318717ce0189f3143109846ad | /StartupOracle.py | 532fa20c7ea23452a8dba5104715848f35a6cab4 | [] | no_license | ixxl/python | cab4343ccad8f665b02582e922d3ccaa58e27c0f | 98e8ad7afec021ea0e2840aa00f4f50ab606b862 | refs/heads/main | 2023-08-04T16:34:00.023484 | 2021-09-10T01:28:30 | 2021-09-10T01:28:30 | 398,481,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,058 | py | import cx_Oracle
import xlrd
import paramiko
Sheet = xlrd.open_workbook(r'C:\Users\Administrator\Desktop\Python\工控机程序更新\IPList.xlsx')
data = Sheet.sheets()[0]
nrow = data.nrows
TimeoutList = []
# 获取连接信息
def _create_ssh(ip):
transport = paramiko.Transport((ip, 22))
transport.connect(username='root', password='dhERIS@2018*#')
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # 自动添加策略,保存服务器的主机名和秘钥信息,不添加,不在本地hnow_hosts文件中的记录将无法连接
ssh._transport = transport
sftp = paramiko.SFTPClient.from_transport(transport)
return ssh,sftp
for i in range(1, nrow):
ip = data.row_values(i)[0]
conInfo = "ERIS/ERIS@%s:1521/ERIS"%(ip)
try:
ssh,sftp = _create_ssh(ip)
print("%s 连接成功!"%(ip))
ssh.exec_command("su - oracle;sqlplus / as sysdba;")
except Exception as err:
print("%s 连接超时: "%(ip),err)
TimeoutList.append(ip) | [
"15251768772@163.com"
] | 15251768772@163.com |
d885da387b228b60ffedd347951d8fb392ee92ea | 4f2f3888a1f3085224459ea3860ca1ad1abc0c96 | /include/cpf_cnpj.py | 736662742ea564f942bede00e421f7560417c276 | [] | no_license | iegawa/Cadastro | 3ae228c43f422893b21d8c53b5d85b0205cb997c | 2e958609dd881c2232792b6166a69b9e9c760a69 | refs/heads/main | 2023-02-16T02:15:58.005606 | 2021-01-07T13:26:03 | 2021-01-07T13:26:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,247 | py | from validate_docbr import CPF, CNPJ
class Documento():
@staticmethod
def criaDocumento(documento):
tamanho_documento = len(str(documento))
if tamanho_documento == 11:
return Cpf(documento)
elif tamanho_documento == 14:
return Cnpj(documento)
else:
raise ValueError("Quantidade de digitos incorreta")
class Cpf():
def __init__(self, documento):
if self.valido(documento):
self.cpf = documento
else:
raise ValueError("CPF inválido!")
def __str__(self):
return self.formatar()
def valido(self, documento):
validador = CPF()
return validador.validate(documento)
def formatar(self):
mascara = CPF()
return mascara.mask(self.cpf)
class Cnpj():
def __init__(self, documento):
if self.valido(documento):
self.cnpj = documento
else:
raise ValueError("CNPJ inválido!")
def __str__(self):
return self.formatar()
def valido(self, documento):
validador = CNPJ()
return validador.validate(documento)
def formatar(self):
mascara = CNPJ()
return mascara.mask(self.cnpj)
| [
"fabianaiegawa@gmail.com"
] | fabianaiegawa@gmail.com |
e2c9dd04f446e45635efa13aa746f7b4f1de12b4 | 91ac211b67cc7bffffecbc0faf514e69f5a7d22c | /app/models.py | baf994975e5b27ec40ae2f1bfa963191b23237fc | [] | no_license | UDTechLLC/python-web-camera | 35acfb36bec245b08e912d8761090ea3ad6dc5ac | ff71cca791b8bb7e4d1c777f284a7e360698c75f | refs/heads/master | 2020-04-13T08:37:05.535156 | 2018-12-25T13:47:59 | 2018-12-25T13:47:59 | 163,086,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,606 | py | from sqlalchemy.orm import validates
from sqlalchemy import event
import transliterate
from transliterate import detect_language, translit
from web.app import db
from web.app.serverside.serverside_table import ServerSideTable
from web.app.serverside import table_schemas
class Package(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(140))
manufacturer = db.Column(db.String(100))
amount = db.Column(db.Integer, default=0)
mg_content = db.Column(db.SmallInteger, default=0)
ml_volume = db.Column(db.SmallInteger, default=0)
release_form = db.Column(db.String(100))
width = db.Column(db.Integer)
length = db.Column(db.Integer)
height = db.Column(db.Integer)
barcode = db.Column(db.String(100))
train_status = db.Column(db.SmallInteger)
timestamp = db.Column(db.DateTime)
unique_name = db.Column(db.String(140))
def __init__(self, form):
self.name = form['name']
self.width = form['width']
self.height = form['height']
self.length = form['length']
self.manufacturer = form['manufacturer']
self.amount = form['amount']
self.release_form = form['release_form']
self.mg_content = form['mg_content']
self.ml_volume = form['ml_volume']
self.barcode = form['barcode']
self.unique_name = self.translite_string()
self.train_status = 0
@validates('amount', 'mg_content', 'ml_volume')
def validate_name(self, key, value):
if value == '':
value = 0
return value
def __repr__(self):
return '<Package %r>' % (self.name)
def save_changes(self, form, new=False):
"""
Save the changes to the database
"""
# Get data from form and assign it to the correct attributes
# of the SQLAlchemy table object
self.name = form['name']
self.width = form['width']
self.height = form['height']
self.length = form['length']
self.manufacturer = form['manufacturer']
self.amount = form['amount']
self.release_form = form['release_form']
self.mg_content = form['mg_content']
self.ml_volume = form['ml_volume']
self.barcode = form['barcode']
self.unique_name = self.translite_string()
# utilities.change_folder_name(self.barcode, utilities.DATASET_NEW_NAME)
if new:
# Add the new album to the database
db.session.add(self)
# commit the data to the database
db.session.commit()
def translite_string(self):
language = detect_language(self.name)
print(language)
# ru uk
# translite_string = translit(self.name, 'uk', reversed=True)
try:
translite_string = translit(self.name, language, reversed=True)
except transliterate.exceptions.LanguageDetectionError:
translite_string = self.name
translite_string = translite_string.lower()
exists_package = Package.get_by_unique(translite_string)
# check dataset path folder
if exists_package:
translite_string += str(self.barcode)
result = translite_string.replace(' ', '_').lower()
return result
@staticmethod
def get_by_unique(name):
model = db.session.query(Package).filter_by(unique_name=name).first()
return model
@property
def serialize(self):
return {
'id': self.id,
'name': self.name,
'manufacturer': self.manufacturer,
'barcode': self.barcode,
'release_form': self.release_form,
'amount': self.amount,
'mg_content': self.mg_content,
'ml_volume': self.ml_volume,
'train_status': self.train_status,
}
@staticmethod
def make_data_packages_from_db():
newlist = []
for row in Package.query.all():
newlist.append(row.serialize)
return newlist
# standard decorator style
@event.listens_for(Package, 'before_delete')
def receive_before_delete(mapper, connection, target):
print(target)
print(connection)
print(mapper)
event.listen(Package, 'before_delete', receive_before_delete)
class TableBuilder(object):
def collect_data_clientside(self):
return {'data': Package.make_data_packages_from_db()}
@staticmethod
def collect_data_serverside(request):
columns = table_schemas.SERVERSIDE_TABLE_COLUMNS
data = Package.make_data_packages_from_db()
result = ServerSideTable(request, data, columns).output_result()
return result
class Setting(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(140))
value = db.Column(db.String(140))
def __init__(self, name=None, value=None):
self.name = name
self.value = value
def __repr__(self):
return '<Setting %r>' % (self.name)
@staticmethod
def init_setting_model(name, value):
s = Setting(name, value)
db.session.add(s)
db.session.commit()
return s
@staticmethod
def get_by_name(name):
setting = db.session.query(Setting).filter_by(name=name).first()
return setting
def change_value(self, value):
"""
Save the changes to the database
"""
# Get data from form and assign it to the correct attributes
# of the SQLAlchemy table object
self.value = value
# commit the data to the database
db.session.commit()
| [
"kolyvayko@gmail.com"
] | kolyvayko@gmail.com |
2f2592e1db11f7bff6d903d41fb51c84de5aea81 | 2095905eb285d2faae2df8367db6a99049b654cf | /stvenv/Scripts/rst2odt.py | 86f8bb13fe84d1e246459200c571495ea18811ef | [] | no_license | andreishvaidyuk/sattracker | f9124cfa7ec7775af0f240554c32cabeeaadb77d | e19ba543667c135fc67d04b188bcc68e1b1096d6 | refs/heads/master | 2020-04-27T12:20:45.679874 | 2019-05-31T12:07:46 | 2019-05-31T12:07:46 | 174,328,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 820 | py | #!C:\Users\a.shvaidyuk\PythonProjects\sattracker\stvenv\Scripts\python.exe
# $Id: rst2odt.py 5839 2009-01-07 19:09:28Z dkuhlman $
# Author: Dave Kuhlman <dkuhlman@rexx.com>
# Copyright: This module has been placed in the public domain.
"""
A front end to the Docutils Publisher, producing OpenOffice documents.
"""
import sys
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline_to_binary, default_description
from docutils.writers.odf_odt import Writer, Reader
description = ('Generates OpenDocument/OpenOffice/ODF documents from '
'standalone reStructuredText sources. ' + default_description)
writer = Writer()
reader = Reader()
output = publish_cmdline_to_binary(reader=reader, writer=writer,
description=description)
| [
"a.shvaidyuk@Ghalam.local"
] | a.shvaidyuk@Ghalam.local |
5c15640b4f7eb55c5d541c9c3602492e2224363a | 3eedbb6f656cda07068177bf8959800c6b5a3948 | /main.py | e92505bbc7bde7077980eaffefae33c1cfdd130d | [] | no_license | 0-Raigi-0/SPO | 904a7d54e9ed0b12463d330cf6c063a0f42f1640 | 52ed3dcf9de7dc962785385488ab0a690cbd195a | refs/heads/main | 2023-06-08T03:08:57.880591 | 2021-07-01T17:31:10 | 2021-07-01T17:31:10 | 382,109,197 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | from Core import Lex, StM, Par
if __name__ == '__main__':
L = Lex()
L.get_term('code.txt')
print('Tokens:', L.l_tokens)
try:
P = Par(L.l_tokens)
Tree = P.S()
print('Tree:\n', Tree)
StM = StM(Tree.children)
StM.start()
except BaseException:
print('Syntax error')
| [
"noreply@github.com"
] | 0-Raigi-0.noreply@github.com |
0e992fc36e36b4801a93c466630e5c1fdd4a911a | 18cee46b76f00ba73fcaa0aa00f694dcf8c453ec | /coroutine/study/queue.py | 8317ceed7ed6d8d8a93e88578ba5858a5477bf21 | [] | no_license | segmond/pythonThingz | 7450b9c36c6853627495b75512e78af80b238065 | 87404f3ee24c01e4db0d32595bfd2bc54090a9ee | refs/heads/master | 2021-01-18T22:43:57.861336 | 2017-01-18T03:26:57 | 2017-01-18T03:26:57 | 40,515,604 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,242 | py | # produce/consume simulation
from random import randint
class Queue:
queue = []
queue_capacity = 5
def full(self):
if len(self.queue) >= self.queue_capacity:
return True
else:
return False
def empty(self):
return len(self.queue) <= 0
def add(self, item):
if len(self.queue) < self.queue_capacity:
self.queue.append(item)
def remove(self):
if len(self.queue) > 0:
item = self.queue[0]
self.queue.remove(item)
return item
else:
return None
queue = Queue()
def produce():
item = 0
while True:
produce_n = randint(0,5)
while produce_n > 0 and not queue.full():
item = item + 1
print "producer made ", item
queue.add(item)
produce_n = produce_n - 1
yield
def consume():
while True:
consume_n = randint(0,5)
while consume_n > 0 and not queue.empty():
item = queue.remove()
print "consumer got ", item
consume_n = consume_n - 1
yield
p = produce()
c = consume()
while True:
while p.next():
pass
while c.next():
pass
| [
"segmond@gmail.com"
] | segmond@gmail.com |
6fbfbe2d2e8c8222273f9d6a135e3b09b4b3a9ca | 72cb090038db3195013db7908c3922e7a58e220f | /python基础/code/chapter17.py | 68b786622c42d930de939962794f8b64b026fc18 | [] | no_license | iceAcmen/whole-Python | 086f568a6ac29f5d8ffba735cc0b8ae2521b3063 | 114bf86a071317d592cff2ba2ce92360b87cd48a | refs/heads/master | 2020-03-21T17:03:40.219813 | 2018-02-12T08:35:37 | 2018-02-12T08:35:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | # -*- coding: utf-8 -*-
# @Author: cbbfcd
# @Date: 2017-09-17 00:57:16
# @Last Modified by: cbbfcd
# @Last Modified time: 2017-09-17 01:33:07
# type()动态创建一个类
def fn(self, name='Tom'):
print('Hello, {}'.format(name))
Hello = type('Hello', (object,), dict(hello=fn)) # 创建Hello class
s = Hello()
s.hello() # Hello, Tom
print(type(s)) # <class '__main__.Hello'>
print(type(Hello)) # <class 'type'>
class ListMetaClass(type): #=> 必须继承type
def __new__(cls, name, bases, fns):
fns['add'] = lambda self, value: self.append(value)
return type.__new__(cls, name, bases, fns)
class myList(list, metaclass=ListMetaClass):
pass
L = myList()
L.add(1)
print(L) #[1] | [
"2890636389@qq.com"
] | 2890636389@qq.com |
ea6e2ff4f377fb08bbe4ce580e46de2cced32cc3 | 5096eb6e15b30cf37093b775bcf5ecdcbd1d83d5 | /mc_oi.spec | 7a7010a012f8bc3962a7666dce4e1a49c232cfef | [] | no_license | rs9960821/mc_oi | 1680d591d82f20f90c6fba9fb6336c20bb82086f | 3cb28f0599f1d62788bfad7757fadf2c6ff6bf75 | refs/heads/master | 2022-10-11T17:54:03.805344 | 2020-05-26T09:36:27 | 2020-05-26T09:47:24 | 266,990,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 857 | spec | # -*- mode: python ; coding: utf-8 -*-
block_cipher = None
a = Analysis(['mc_oi.py'],
pathex=['C:\\Users\\rs996\\Desktop\\mc_oi'],
binaries=[],
datas=[],
hiddenimports=[],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='mc_oi',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True )
| [
"rs9960821@gmail.com"
] | rs9960821@gmail.com |
d2e0b7afe0e2cbcce829206bfb592a728881c7f8 | 1b0f0258a1ac6f606a8ba473cef785aa7eb20792 | /screen.py | 6a33ba4866d2185b9642c42c47280b0384463ad6 | [
"MIT"
] | permissive | pierav/pyhack | 653294a8cef4cce11c1318914d516b106116ed1b | af5c86fb721053d8a3e819ab772c8144a23b86bf | refs/heads/master | 2022-03-27T13:43:15.541079 | 2019-12-23T15:41:47 | 2019-12-23T15:41:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,036 | py | #!/usr/bin/env python3
# pylint: disable=C0103
"""
Définie la classe Screen
"""
# http://nicofo.tuxfamily.org/index.php?post/2007/01/10/19-mouvement-du-curseur-dans-le-terminal
# https://rosettacode.org/wiki/Terminal_control/Preserve_screen#Python
import sys
import termios
import os
from vect import Vect
class Screen:
"""
Class Screen
Permet d'afficher des elements a l'écran
init > update > del
"""
def __init__(self):
"""
Initialise l'affichage
"""
# Sauvegarde du terminal
self._my_fd = sys.stdin.fileno()
self._old = termios.tcgetattr(self._my_fd)
# Désactivation echo stdin
new = termios.tcgetattr(self._my_fd)
new[3] = new[3] & ~termios.ECHO # lflags
termios.tcsetattr(self._my_fd, termios.TCSADRAIN, new)
# Lancement ecran second + Désactivation curseur
print("\033[?1049h\033[H" + "\033[?25l", end="")
sys.stdout.flush()
# Bufferisarion de stdout
self._BUFFER_SIZE = 500*500
sys.stdout = open(sys.stdout.fileno(), "w", self._BUFFER_SIZE)
# Méthodes d'édition
self.my_print = sys.stdout.write
self.my_flush = sys.stdout.flush
# Taille de l'écran
self.size = Vect(0, 0)
def stop(self):
"""
Remet la console dans l'état initial
"""
# Remise de l'état initial console
termios.tcsetattr(self._my_fd, termios.TCSADRAIN, self._old)
# Fermeture ecran second + Activation curseur
print("\033[?1049l" + "\033[?25h", end="")
def get_size(self):
"""
Met a jour la taille de l'écran
"""
# bof ...
H, W = map(int, os.popen('stty size', 'r').read().split())
assert H * W <= self._BUFFER_SIZE
self.size = Vect(W, H)
return self.size
def g_pos(self):
"""
Retourne un générateur sur tous les position de l'écran
dans l'ordre d'affichage
"""
for scr_y in range(self.size.y-1, 0, -1):
for scr_x in range(self.size.x):
yield Vect(scr_x, scr_y)
def update(self, scr_tab):
"""
Actualise l'écran
"""
self.my_print('\033[0;0H')
for scr_pos in self.g_pos():
self.my_print(scr_tab[scr_pos.x][scr_pos.y])# <=> print(char, end='', flush=False)
self.my_flush()
def main():
"""
TU
"""
import time
from random import choice
tab = [[' ' for x in range(500)] for y in range(500)]
try:
screen = Screen()
while True:
for pos in screen.g_pos():
tab[pos.x][pos.y] = choice([' ', '.'])
str_size = str(screen.get_size())
for i, c in enumerate(str_size):
tab[i+1][1] = c
screen.update(tab)
time.sleep(0.1)
finally:
# Pour avoir les erreures à la fin du programme
screen.stop()
if __name__ == "__main__":
main()
| [
"pierreafe@gmail.com"
] | pierreafe@gmail.com |
66d5f8a4a8f55335aea9dc12b7b063af44729eef | ace7319dffe3856b0bf1323a2c91a0a506a19356 | /covidportal/covidportal/urls.py | e2c98ff40ec025617b192420ee3e764896689b66 | [] | no_license | Gayathritl/django-apps | 8ba8861be16b7bc56ec175ed682dbe3c2e1c8ce8 | e3ffeae804c9a0f1f62c12a921f3e2312079a2dc | refs/heads/master | 2023-04-27T08:45:30.387492 | 2021-05-21T16:18:03 | 2021-05-21T16:18:03 | 369,592,319 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 802 | py | """covidportal URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('beds/',include('beds.urls')),
path('admin/', admin.site.urls),
]
| [
"gayathri219018@sahrdaya.ac.in"
] | gayathri219018@sahrdaya.ac.in |
0d00c9e9aa486a1e5076f911892411a3a5d9780d | 54ab8e214bf5d8909aa8a7c503ca001f74009d08 | /Desafio094 - Unindo Dicionários e Listas.py | f0f2e2f9d3d01c3bcf342fc71724bec43e444a5c | [
"MIT"
] | permissive | kleberfsobrinho/python | a43fb6fd98d731e6ef052d9760792ee2412d7018 | 34739d127c1a3908f5a2fd5a7ef07d4c78658802 | refs/heads/main | 2023-03-05T19:24:36.981429 | 2021-02-13T03:28:41 | 2021-02-13T03:28:41 | 338,488,129 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,225 | py | dados = dict()
grupo = list()
soma_idades = 0
while True:
dados['Nome'] = str(input('Nome: '))
sexo = str(input('Sexo: [M/F] ')).upper()[0]
if sexo not in 'MmFf':
while True:
sexo = str(input('Tente novamente. Sexo: [M/F] ')).upper()[0]
if sexo in 'MF':
break
dados['Sexo'] = sexo
dados['Idade'] = int(input('Idade: '))
soma_idades += dados['Idade']
grupo.append(dados.copy())
opcao = str(input('Deseja continuar? [S/N] '))
if opcao in 'Nn':
break
elif opcao not in 'Ss':
while True:
opcao = str(input('Tente novamente. Deseja continuar? [S/N] '))
if opcao in 'Nn':
break
if opcao in 'Nn':
break
print()
print(f'Um total de {len(grupo)} pessoas foram cadastradas.')
media = soma_idades / len(grupo)
print(f'A média das idades é de {media:.2f} anos.')
print()
print('Mulheres cadastradas: ')
for i in grupo:
if i["Sexo"] in 'Ff':
print(f'{i["Nome"]} - {i["Idade"]} anos')
print()
print('Pessoas cadastradas acima da média de idades: : ')
for i in grupo:
if i["Idade"] > media:
print(f'{i["Nome"]} - [{i["Sexo"]}] - {i["Idade"]} anos')
| [
"kleber.sobrinho@ccc.ufcg.edu.br"
] | kleber.sobrinho@ccc.ufcg.edu.br |
f5ff0644bf5236dde7c565093912b84160b487ec | cd8a6f89a28c302840f6860b9e82f28123442a22 | /zwad/nn.py | 1286632706d8856c99296a06ac14a42cbb1409a4 | [] | no_license | RossyNguyen/zwad | 2d2bcc97c9ac936b4bfa849d47a5bb4311706876 | 58e25d1c5bce54ccf187282131a7d417f64a4a98 | refs/heads/master | 2023-04-09T15:29:41.566721 | 2021-04-13T13:46:44 | 2021-04-13T13:46:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,921 | py | #!/usr/bin/env python3
import pandas as pd
import numpy as np
import sys
import argparse
from sklearn.neighbors import NearestNeighbors
from zwad.utils import load_data
parser = argparse.ArgumentParser(description='Lookup for nearest neighbors')
parser.add_argument('--oid', metavar='FILENAME', action='append', help='Filepath to oid.dat', required=True)
parser.add_argument('--feature', metavar='FILENAME', action='append', help='Filepath to feature.dat', required=True)
parser.add_argument('--lookup', metavar='OID', action='append', help='OID to lookup for the neighbors', required=True)
parser.add_argument('--neighbors', metavar='NUMBER', action='store', help='A number of neighbors to look for', type=int, default=5)
parser.add_argument('--algorithm', metavar='ALGO', action='store', help='ball_tree or kd_tree', default='kd_tree')
def oid_to_index(oids, oid):
index = dict([(o,n) for (n,o) in enumerate(oids)])
return np.array([index[o] for o in oid])
def main(argv=None):
if argv is None:
argv = sys.argv
args = parser.parse_args(argv[1:])
oids, features = load_data(args.oid, args.feature)
mean = features.mean(axis=0)
std = features.std(axis=0)
features = (features - mean) / std
lookup_oids = np.array(args.lookup, dtype=oids.dtype)
lookup = oid_to_index(oids, lookup_oids)
nn = NearestNeighbors(algorithm=args.algorithm)
nn.fit(features)
neigh_dist, neigh_ind = nn.kneighbors(features[lookup, :], n_neighbors=args.neighbors, return_distance=True)
pattern = np.repeat(lookup_oids.reshape(-1,1), repeats=args.neighbors, axis=1)
neighbor = oids[neigh_ind]
dist = neigh_dist
res = pd.DataFrame.from_dict({"lookup": pattern.reshape(-1), "neighbor": neighbor.reshape(-1), "distance": dist.reshape(-1)})
print(res.to_string())
def execute_from_commandline(argv=None):
main(argv)
if __name__ == "__main__":
main()
| [
"matwey.kornilov@gmail.com"
] | matwey.kornilov@gmail.com |
67b2a9dd24d52509e01bfca824c18c8a25229ef6 | 52e2224538bfa7e69f9e0688dc49c53a247070d7 | /app/api/serializers/offers.py | 5a8539cca5bad81d59223205eb8855f955c825cc | [] | no_license | averdier/epsi_my_learning_chain_api | 2958f8ab6333e87f8b5d1a97f8d485361cdcba9d | ea85dced579f6285f3acd0edd0d64ead4f6f2332 | refs/heads/master | 2022-12-09T19:56:27.836637 | 2018-04-13T09:23:46 | 2018-04-13T09:23:46 | 129,227,310 | 1 | 0 | null | 2022-12-08T01:02:30 | 2018-04-12T09:32:18 | Python | UTF-8 | Python | false | false | 1,666 | py | # -*- coding: utf-8 -*-
from flask_restplus import fields
from .nested import facilitator_nested, api
offer_post_model = api.model('Offer POST model', {
'name': fields.String(required=True, min_length=4, description='Name'),
'tags': fields.List(fields.String(), required=False, description='Tags'),
'price': fields.Integer(required=True, min=0, description='Price'),
'description': fields.String(required=False, description='Description')
})
offer_patch_model = api.model('Offer PATCH model', {
'name': fields.String(required=False, min_length=4, description='Name'),
'tags': fields.List(fields.String(), required=False, description='Tags'),
'price': fields.Integer(required=False, min=0, description='Price'),
'description': fields.String(required=False, description='Description')
})
offer_minimal_model = api.model('Offer minimal model', {
'id': fields.String(required=True, description='Offer ID'),
'facilitator_id': fields.String(required=True, description='Facilitator ID', attribute=lambda o: o.facilitator.id),
'name': fields.String(required=True, description='Name'),
'tags': fields.List(fields.String(), required=True, description='Tags'),
'price': fields.Integer(required=True, description='Price')
})
offer_model = api.inherit('Offer model', offer_minimal_model, {
'description': fields.String(required=True, description='Description'),
'facilitator': fields.Nested(facilitator_nested, required=True, description='Facilitator')
})
offer_container = api.model('Offer container', {
'offers': fields.List(fields.Nested(offer_minimal_model), required=True, description='Offers list')
}) | [
"a.verdier@outlook.fr"
] | a.verdier@outlook.fr |
2c526fc3a80edcb39c640cc7e108a307f03c6a26 | 7964ef4f32540c35cf3c092da86b5fe11990c38b | /ch10 - Files and Exceptions/10-11. Favorite Number.py | 1442fe89790ddb6cfa47ace7d2903c890a586f21 | [] | no_license | ziolkowskid06/Python_Crash_Course | eb5135756f24af230b234be5a739b559fc1fe67a | d7aab8ffd4856f39a4cc18fcf6e0389beca1c800 | refs/heads/main | 2023-01-07T22:02:14.438349 | 2020-11-13T20:43:20 | 2020-11-13T20:43:20 | 306,506,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | """
Save and load a file in the json format.
"""
"""
write: favorite_number_write.py
import json
"""
number = input("What's your favorite number? ")
with open('favorite_number.json', 'w') as f:
json.dump(number, f)
print("Number is saved!")
"""
write: favorite_number_read.py
import json
"""
with open('favorite_number.json') as f:
number = json.load(f)
print(f"Number loaded! It is {number}")
| [
"noreply@github.com"
] | ziolkowskid06.noreply@github.com |
d68e16cc929d228633b588ce362aa9510f90ddaa | b83fe134f6040b26eebe2d7d34a1f612760102d5 | /migrations/versions/8d98d38b2349_.py | 98b2d95bea1afd124f7dddb53b5b3077c7f810f1 | [] | no_license | Rdubya54/6_Gun_Backend | 378224ebbea5dd418258d062e4bbbc00ba75440a | d5197219835c934c474a96fa92cafcfeadcdcb26 | refs/heads/main | 2023-06-18T04:41:54.061812 | 2021-03-27T23:36:37 | 2021-03-27T23:36:37 | 352,198,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 763 | py | """empty message
Revision ID: 8d98d38b2349
Revises:
Create Date: 2021-03-24 23:45:45.777933
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8d98d38b2349'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('Posts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('post', sa.String(), nullable=True),
sa.Column('user', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('Posts')
# ### end Alembic commands ###
| [
"ryan.wortmann@yahoo.com"
] | ryan.wortmann@yahoo.com |
9fa6b44e795832c396a26de1768c92c41cc0470e | 019457a7027be385ba117a58a3579b7e527120e5 | /LeetCode/Top Interview Questions/Arrays and Strings/Longest Palindromic Substring.py | b857e1d1b17623455327b866614bfd3c50cf4516 | [] | no_license | msaei/coding | e3bc5a31ee775a575b95cf05fd799525e7bc5b76 | 47d48d261e15d567e4a6c0bb2ff5abcbf206fcb4 | refs/heads/master | 2020-11-29T02:55:22.760934 | 2020-01-26T15:38:29 | 2020-01-26T15:38:29 | 230,000,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 520 | py | #Longest Palindromic Substring
#https://leetcode.com/explore/interview/card/top-interview-questions-medium/103/array-and-strings/780/
class Solution:
def longestPalindrome(self, s: str) -> str:
def isPalindrome(word: str) -> bool:
return word == word[::-1]
l = len(s)
while l > 0:
for i in range(len(s) - l + 1):
if isPalindrome(s[i:i + l]):
return s[i:i+l]
l -= 1
return ''
| [
"mahdi.saeikhalili@gmail.com"
] | mahdi.saeikhalili@gmail.com |
ec04d383ac47c225f76a07964b2467c6ad5a0251 | 79eb0177704239bb8b243c8baa77fadcdcb56d57 | /celerytasks/chef_mock.py | 61e19abfc5600b7afad3e76cb576bfd119dbf481 | [
"Apache-2.0"
] | permissive | wojons/deis | 5b2490d49dd6f7a7736aa64fcd2e5adb93e94ff5 | 07e62395492dafff012d773349beeb5b8c0edc1b | refs/heads/master | 2021-01-18T09:40:21.684840 | 2013-09-15T18:48:40 | 2013-09-15T18:48:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,458 | py | """
https://github.com/coderanger/pychef
We want a simpler version for making API calls
"""
import json
class ChefAPI(object):
def __init__(self, chef_url, client_name, client_key):
self.server_url = chef_url
self.client_name = client_name
self.client_key = client_key
def request(self, verb, path, body=''):
assert verb in ('GET', 'DELETE', 'PUT', 'POST')
assert path
assert body
def create_databag(self, name):
body = json.dumps({'name': name, 'id': name})
resp = self.request('POST', '/data', body)
return resp
def create_databag_item(self, name, item_name, item_value):
item_dict = {'id': item_name}
item_dict.update(item_value)
body = json.dumps(item_dict)
resp = self.request('POST', "/data/%s" % name, body)
return resp
def get_databag(self, bag_name):
return self.request('GET', "/data/%s" % bag_name)
def delete_databag(self, bag_name):
return self.request('DELETE', "/data/%s" % bag_name)
def update_databag_item(self, bag_name, item_name, item_value):
body = json.dumps(item_value)
return self.request('PUT', "/data/%s/%s" % (bag_name, item_name), body)
def get_databag_item(self, bag_name, item_name):
return self.request('GET', "/data/%s/%s" % (bag_name, item_name))
def get_all_cookbooks(self):
return self.request('GET', '/cookbooks')
| [
"matt@opdemand.com"
] | matt@opdemand.com |
68bce261a32438af944cc7a39d301fd1bed0c9b1 | e260e72c6d8f24ef250173025b2ecf129960cf03 | /src/python/py27hash/key.py | 34b6e925ab62923b6e110a518a7371a6aa5cb145 | [
"MIT"
] | permissive | neuml/py27hash | 1cdce30b9480ff338ca0488c2d80a196fc783370 | 4d814de4ab616f33bb2d74c687e74fa57c399a56 | refs/heads/master | 2022-11-23T21:21:56.220330 | 2022-11-20T12:45:21 | 2022-11-20T12:45:21 | 223,510,843 | 11 | 3 | MIT | 2022-03-27T14:46:02 | 2019-11-23T00:57:33 | Python | UTF-8 | Python | false | false | 5,425 | py | """
Compatibility methods to support Python 2.7 style key iteration in Python 3.X+
This is designed for compatibility not performance.
"""
import ctypes
# pylint: disable = E0401
from .hash import Hash
class Keys(object):
"""
Compatibility class to support Python 2.7 style iteration in Python 3.X+
Logic ported from the 2.7 Python branch: cpython/Objects/dictobject.c
Logic ported from the 2.7 Python branch: cpython/Objects/setobject.c
"""
# Min dict size
MINSIZE = 8
# Hash collisions
PERTURB_SHIFT = 5
def __init__(self):
"""
Initializes a keys object.
"""
self.keylist = []
self.keysort = None
# Python 2 dict default size
self.mask = Keys.MINSIZE - 1
def __setstate__(self, state):
"""
Overrides default pickling object to force re-adding all keys and match Python 2.7 deserialization logic.
Args:
state: input state
"""
self.__dict__ = state
keys = self.keys()
# Clear keys and re-add to match deserialization logic
self.__init__()
for k in keys:
self.add(k)
def __iter__(self):
"""
Default iterator.
Returns:
iterator
"""
return iter(self.keys())
def keys(self):
"""
Returns keys ordered using Python 2.7's iteration algorithm.
Method: static PyDictEntry *lookdict(PyDictObject *mp, PyObject *key, register long hash)
Returns:
list of keys
"""
if not self.keysort:
keys = []
hids = set()
for k in self.keylist:
# C API uses unsigned values
h = ctypes.c_size_t(Hash.hash(k)).value
i = h & self.mask
hid = i
perturb = h
while hid in hids:
i = (i << 2) + i + perturb + 1
hid = i & self.mask
perturb >>= Keys.PERTURB_SHIFT
keys.append((hid, k))
hids.add(hid)
# Cache result - performance - clear if more keys added
self.keysort = [v for (k, v) in sorted(keys, key=lambda x: x[0])]
return self.keysort
def add(self, key):
"""
Called each time a new item is inserted. Tracks via insertion order and will maintain the same order
as a dict in Python 2.7.
Method: static int dict_set_item_by_hash_or_entry(register PyObject *op, PyObject *key, long hash,
PyDictEntry *ep, PyObject *value)
Args:
key: key to add
"""
# Add key to list. If this is a replace/update then size won't change.
if key and key not in self.keylist:
# Append key to list
self.keylist.append(key)
# Clear cached keys
self.keysort = None
# Resize dict if 2/3 capacity
if len(self.keylist) * 3 >= ((self.mask + 1) * 2):
# Reset key list to simulate the dict resize + copy operation
self.keylist = self.keys()
self.keysort = None
self.setMask()
def remove(self, key):
"""
Remove a key from the backing list.
Args:
key: key to remove
"""
if key in self.keylist:
# Remove key from list
self.keylist.remove(key)
# Clear cached keys
self.keysort = None
def merge(self, d):
"""
Merges keys from an existing iterable into this key list.
Method: int PyDict_Merge(PyObject *a, PyObject *b, int override)
Args:
d: input dict
"""
# PyDict_Merge initial merge size is double the size of the current + incoming dict
if (len(self.keylist) + len(d)) * 3 >= (self.mask + 1) * 2:
self.setMask((len(self.keylist) + len(d)) * 2)
# Copy actual keys
for k in d:
self.add(k)
def copy(self):
"""
Makes a copy of self.
Method: PyObject *PyDict_Copy(PyObject *o)
Returns:
copy of self
"""
# Copy creates a new object and merges keys in
new = Keys()
new.merge(self.keys())
return new
def pop(self):
"""
Pops the top element from the sorted keys if it exists. Returns None otherwise.
Method: static PyObject *dict_popitem(PyDictObject *mp)
Return:
top element or None if Keys is empty
"""
if self.keylist:
# Pop the top element
value = self.keys()[0]
self.remove(value)
return value
return None
def setMask(self, request=None):
"""
Key based on the total size of this dict. Matches ma_mask in Python 2.7's dict.
Method: static int dictresize(PyDictObject *mp, Py_ssize_t minused)
"""
if not request:
length = len(self.keylist)
# Python 2 dict increases by a factor of 4 for small dicts, 2 for larger ones
request = length * (2 if length > 50000 else 4)
newsize = Keys.MINSIZE
while newsize <= request:
newsize <<= 1
self.mask = newsize - 1
| [
"561939+davidmezzetti@users.noreply.github.com"
] | 561939+davidmezzetti@users.noreply.github.com |
36a243a77291606073c6f41f866295e112ab605a | 632400054c08a0074e13dd5def5f78dbb870519b | /tensorbreeze/data/samplers/balanced_batch_sampler.py | 2b4b9c9961d4ca29c03a1990114797fc8dea4583 | [
"MIT"
] | permissive | mingruimingrui/TensorBreeze | ab14d78a28148e549834a08ef9aebf8c9832300e | 5373ab1d10f276b1e9c08c75af471ff74da635fe | refs/heads/master | 2020-04-09T00:27:27.914452 | 2019-04-10T16:27:12 | 2019-04-10T16:27:12 | 159,867,939 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,181 | py | import random
import torchvision
class BalancedBatchSampler(object):
def __init__(self, dataset, num_iter=1, cls_per_batch=40, img_per_cls=20):
assert isinstance(dataset, torchvision.datasets.DatasetFolder)
class_idx_to_item_idx = []
for c in dataset.classes:
class_idx_to_item_idx.append([])
for i, sample in enumerate(dataset.samples):
class_idx_to_item_idx[sample[1]].append(i)
self.num_iter = num_iter
self.cls_per_batch = cls_per_batch
self.img_per_cls = img_per_cls
self.num_classes = len(dataset.classes)
self.class_idx_to_item_idx = class_idx_to_item_idx
def __iter__(self):
for _ in range(self.num_iter):
class_idx_choosen = random.sample(
range(self.num_classes),
k=self.cls_per_batch
)
batch = []
for class_idx in class_idx_choosen:
batch.extend(random.sample(
self.class_idx_to_item_idx[class_idx],
k=self.img_per_cls
))
yield batch
def __len__(self):
return self.num_iter
| [
"mingruimingrui@hotmail.com"
] | mingruimingrui@hotmail.com |
168a739a384072e0b09f5869c67327961f27634c | 78ecfb3ec5d747e11cfff98e473bf820ebad0e2b | /inventario/models.py | 7f5097f7312253c8f11f87cd0a303eb4b4d8f249 | [] | no_license | leonardo1497/inventario-upshirts | 0a53aea7702c4bd5c21b8aa724f6ab802bc8e88d | 5d7ce0775760aa5b1410187ef5f189b27e5b79e2 | refs/heads/master | 2021-06-19T21:16:38.944509 | 2019-07-27T23:48:06 | 2019-07-27T23:48:06 | 198,448,227 | 0 | 0 | null | 2021-06-10T21:45:29 | 2019-07-23T14:33:17 | Python | UTF-8 | Python | false | false | 310 | py | from django.db import models
from django.contrib.postgres.fields import JSONField
# Create your models here.
class Inventario(models.Model):
Modelo = models.CharField(max_length=200,null=False)
Imagen = models.TextField(null=False)
Precio = models.FloatField(null=False)
Cantidad = JSONField() | [
"1497enrique@gmail.com"
] | 1497enrique@gmail.com |
f30eba5f8057a09dfde4f71d67c088c7ba54857f | cd2fdedba5baf3e049f712f885ea38ed2d31c94a | /env/lib/python3.10/site-packages/lazy_object_proxy/slots.py | 24d2f7e72b6c904b6faa25d7bcf50cbdb973100b | [] | no_license | Michael-okojie/contact-app | b090908b7624d713a04fc03e136a7669e9028828 | d6f473e54ea08a62c3e5b915b95434079c76dd8d | refs/heads/master | 2023-07-08T12:01:32.747768 | 2023-06-21T15:20:57 | 2023-06-21T15:20:57 | 266,363,404 | 0 | 0 | null | 2022-12-02T04:18:27 | 2020-05-23T15:22:41 | JavaScript | UTF-8 | Python | false | false | 12,502 | py | import operator
from .compat import PY2
from .compat import PY3
from .compat import string_types
from .compat import with_metaclass
from .utils import await_
from .utils import identity
class _ProxyMethods(object):
# We use properties to override the values of __module__ and
# __doc__. If we add these in ObjectProxy, the derived class
# __dict__ will still be setup to have string variants of these
# attributes and the rules of descriptors means that they appear to
# take precedence over the properties in the base class. To avoid
# that, we copy the properties into the derived class type itself
# via a meta class. In that way the properties will always take
# precedence.
@property
def __module__(self):
return self.__wrapped__.__module__
@__module__.setter
def __module__(self, value):
self.__wrapped__.__module__ = value
@property
def __doc__(self):
return self.__wrapped__.__doc__
@__doc__.setter
def __doc__(self, value):
self.__wrapped__.__doc__ = value
# We similar use a property for __dict__. We need __dict__ to be
# explicit to ensure that vars() works as expected.
@property
def __dict__(self):
return self.__wrapped__.__dict__
# Need to also propagate the special __weakref__ attribute for case
# where decorating classes which will define this. If do not define
# it and use a function like inspect.getmembers() on a decorator
# class it will fail. This can't be in the derived classes.
@property
def __weakref__(self):
return self.__wrapped__.__weakref__
class _ProxyMetaType(type):
def __new__(cls, name, bases, dictionary):
# Copy our special properties into the class so that they
# always take precedence over attributes of the same name added
# during construction of a derived class. This is to save
# duplicating the implementation for them in all derived classes.
dictionary.update(vars(_ProxyMethods))
return type.__new__(cls, name, bases, dictionary)
class Proxy(with_metaclass(_ProxyMetaType)):
"""
A proxy implementation in pure Python, using slots. You can subclass this to add
local methods or attributes, or enable __dict__.
The most important internals:
* ``__factory__`` is the callback that "materializes" the object we proxy to.
* ``__target__`` will contain the object we proxy to, once it's "materialized".
* ``__resolved__`` is a boolean, `True` if factory was called.
* ``__wrapped__`` is a property that does either:
* return ``__target__`` if it's set.
* calls ``__factory__``, saves result to ``__target__`` and returns said result.
"""
__slots__ = '__target__', '__factory__'
def __init__(self, factory):
object.__setattr__(self, '__factory__', factory)
@property
def __resolved__(self, __getattr__=object.__getattribute__):
try:
__getattr__(self, '__target__')
except AttributeError:
return False
else:
return True
@property
def __wrapped__(self, __getattr__=object.__getattribute__, __setattr__=object.__setattr__,
__delattr__=object.__delattr__):
try:
return __getattr__(self, '__target__')
except AttributeError:
try:
factory = __getattr__(self, '__factory__')
except AttributeError:
raise ValueError("Proxy hasn't been initiated: __factory__ is missing.")
target = factory()
__setattr__(self, '__target__', target)
return target
@__wrapped__.deleter
def __wrapped__(self, __delattr__=object.__delattr__):
__delattr__(self, '__target__')
@__wrapped__.setter
def __wrapped__(self, target, __setattr__=object.__setattr__):
__setattr__(self, '__target__', target)
@property
def __name__(self):
return self.__wrapped__.__name__
@__name__.setter
def __name__(self, value):
self.__wrapped__.__name__ = value
@property
def __class__(self):
return self.__wrapped__.__class__
@__class__.setter # noqa: F811
def __class__(self, value): # noqa: F811
self.__wrapped__.__class__ = value
@property
def __annotations__(self):
return self.__wrapped__.__anotations__
@__annotations__.setter
def __annotations__(self, value):
self.__wrapped__.__annotations__ = value
def __dir__(self):
return dir(self.__wrapped__)
def __str__(self):
return str(self.__wrapped__)
if PY3:
def __bytes__(self):
return bytes(self.__wrapped__)
def __repr__(self, __getattr__=object.__getattribute__):
try:
target = __getattr__(self, '__target__')
except AttributeError:
return '<{} at 0x{:x} with factory {!r}>'.format(
type(self).__name__, id(self),
self.__factory__
)
else:
return '<{} at 0x{:x} wrapping {!r} at 0x{:x} with factory {!r}>'.format(
type(self).__name__, id(self),
target, id(target),
self.__factory__
)
def __fspath__(self):
wrapped = self.__wrapped__
if isinstance(wrapped, string_types):
return wrapped
else:
fspath = getattr(wrapped, '__fspath__', None)
if fspath is None:
return wrapped
else:
return fspath()
def __reversed__(self):
return reversed(self.__wrapped__)
if PY3:
def __round__(self):
return round(self.__wrapped__)
def __lt__(self, other):
return self.__wrapped__ < other
def __le__(self, other):
return self.__wrapped__ <= other
def __eq__(self, other):
return self.__wrapped__ == other
def __ne__(self, other):
return self.__wrapped__ != other
def __gt__(self, other):
return self.__wrapped__ > other
def __ge__(self, other):
return self.__wrapped__ >= other
def __hash__(self):
return hash(self.__wrapped__)
def __nonzero__(self):
return bool(self.__wrapped__)
def __bool__(self):
return bool(self.__wrapped__)
def __setattr__(self, name, value, __setattr__=object.__setattr__):
if hasattr(type(self), name):
__setattr__(self, name, value)
else:
setattr(self.__wrapped__, name, value)
def __getattr__(self, name):
if name in ('__wrapped__', '__factory__'):
raise AttributeError(name)
else:
return getattr(self.__wrapped__, name)
def __delattr__(self, name, __delattr__=object.__delattr__):
if hasattr(type(self), name):
__delattr__(self, name)
else:
delattr(self.__wrapped__, name)
def __add__(self, other):
return self.__wrapped__ + other
def __sub__(self, other):
return self.__wrapped__ - other
def __mul__(self, other):
return self.__wrapped__ * other
def __div__(self, other):
return operator.div(self.__wrapped__, other)
def __truediv__(self, other):
return operator.truediv(self.__wrapped__, other)
def __floordiv__(self, other):
return self.__wrapped__ // other
def __mod__(self, other):
return self.__wrapped__ % other
def __divmod__(self, other):
return divmod(self.__wrapped__, other)
def __pow__(self, other, *args):
return pow(self.__wrapped__, other, *args)
def __lshift__(self, other):
return self.__wrapped__ << other
def __rshift__(self, other):
return self.__wrapped__ >> other
def __and__(self, other):
return self.__wrapped__ & other
def __xor__(self, other):
return self.__wrapped__ ^ other
def __or__(self, other):
return self.__wrapped__ | other
def __radd__(self, other):
return other + self.__wrapped__
def __rsub__(self, other):
return other - self.__wrapped__
def __rmul__(self, other):
return other * self.__wrapped__
def __rdiv__(self, other):
return operator.div(other, self.__wrapped__)
def __rtruediv__(self, other):
return operator.truediv(other, self.__wrapped__)
def __rfloordiv__(self, other):
return other // self.__wrapped__
def __rmod__(self, other):
return other % self.__wrapped__
def __rdivmod__(self, other):
return divmod(other, self.__wrapped__)
def __rpow__(self, other, *args):
return pow(other, self.__wrapped__, *args)
def __rlshift__(self, other):
return other << self.__wrapped__
def __rrshift__(self, other):
return other >> self.__wrapped__
def __rand__(self, other):
return other & self.__wrapped__
def __rxor__(self, other):
return other ^ self.__wrapped__
def __ror__(self, other):
return other | self.__wrapped__
def __iadd__(self, other):
self.__wrapped__ += other
return self
def __isub__(self, other):
self.__wrapped__ -= other
return self
def __imul__(self, other):
self.__wrapped__ *= other
return self
def __idiv__(self, other):
self.__wrapped__ = operator.idiv(self.__wrapped__, other)
return self
def __itruediv__(self, other):
self.__wrapped__ = operator.itruediv(self.__wrapped__, other)
return self
def __ifloordiv__(self, other):
self.__wrapped__ //= other
return self
def __imod__(self, other):
self.__wrapped__ %= other
return self
def __ipow__(self, other):
self.__wrapped__ **= other
return self
def __ilshift__(self, other):
self.__wrapped__ <<= other
return self
def __irshift__(self, other):
self.__wrapped__ >>= other
return self
def __iand__(self, other):
self.__wrapped__ &= other
return self
def __ixor__(self, other):
self.__wrapped__ ^= other
return self
def __ior__(self, other):
self.__wrapped__ |= other
return self
def __neg__(self):
return -self.__wrapped__
def __pos__(self):
return +self.__wrapped__
def __abs__(self):
return abs(self.__wrapped__)
def __invert__(self):
return ~self.__wrapped__
def __int__(self):
return int(self.__wrapped__)
if PY2:
def __long__(self):
return long(self.__wrapped__) # noqa
def __float__(self):
return float(self.__wrapped__)
def __oct__(self):
return oct(self.__wrapped__)
def __hex__(self):
return hex(self.__wrapped__)
def __index__(self):
if hasattr(self.__wrapped__, '__index__'):
return operator.index(self.__wrapped__)
else:
return int(self.__wrapped__)
def __len__(self):
return len(self.__wrapped__)
def __contains__(self, value):
return value in self.__wrapped__
def __getitem__(self, key):
return self.__wrapped__[key]
def __setitem__(self, key, value):
self.__wrapped__[key] = value
def __delitem__(self, key):
del self.__wrapped__[key]
def __getslice__(self, i, j):
return self.__wrapped__[i:j]
def __setslice__(self, i, j, value):
self.__wrapped__[i:j] = value
def __delslice__(self, i, j):
del self.__wrapped__[i:j]
def __enter__(self):
return self.__wrapped__.__enter__()
def __exit__(self, *args, **kwargs):
return self.__wrapped__.__exit__(*args, **kwargs)
def __iter__(self):
return iter(self.__wrapped__)
def __next__(self):
return next(self.__wrapped__)
def __call__(self, *args, **kwargs):
return self.__wrapped__(*args, **kwargs)
def __reduce__(self):
return identity, (self.__wrapped__,)
def __reduce_ex__(self, protocol):
return identity, (self.__wrapped__,)
if await_:
from .utils import __aenter__
from .utils import __aexit__
from .utils import __aiter__
from .utils import __anext__
from .utils import __await__
__aiter__, __anext__, __await__, __aenter__, __aexit__ # noqa
| [
"michaelokojie1@gmail.com"
] | michaelokojie1@gmail.com |
7274e0ec3bae37dc044fe026a5f528c981f2e609 | 65a3b321cb2e39902b20aa184483f756e615e442 | /SyntheticGenerator/Distribution/Distribution.py | 3e38a3a72b92c19bc23d06923e52c933f3f614a4 | [
"MIT"
] | permissive | duncankmckinnon/SyntheticDataGenerator | cfa8c32d421e9753c0fcabe2771f6faa6c474542 | 5a69401d6920e640af30f998b8ed2f8d511fe811 | refs/heads/master | 2020-12-10T22:14:58.406762 | 2020-01-16T20:40:36 | 2020-01-16T20:40:36 | 233,727,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,079 | py | import numpy as np
class Distribution:
"""
Distribution class - generates values to be used when weighting
attribute prevalence
Properties:
- name (str) - the object name (Distribution)
- type (str) - the type of distribution (constant, uniform, etc.)
- weights (np.array) - the array of weights corresponding to unique values of an attribute
Implements:
- generate(n -> int, sparsity -> float, **kwargs -> dict) --> list
"""
def __init__(self, distribution_type, weights = None):
self.name = 'Distribution'
self.type = distribution_type
self.weights = weights
def generate(self, n, sparsity = 0, **kwargs):
"""
generate a distribution of weights for n cardinality attribute
with sparsity% missing data
Parameters:
- n (0 < integer) - number of unique attribute values
- sparsity (0 <= float <= 1) - percentage of missing values (None)
- **kwargs - arguments to scipy.stats distribution function
"""
if self.weights == None:
self.weights = np.array([1/n] * n)
self.sparse_scale(sparsity)
return list(self.weights)
def _sparse_scale_(self, sparsity):
"""
scale weights to collectively sum to 100 - (100 * sparsity%)
Parameters:
- sparsity (0 <= float <= 1) - percentage of missing values (None)
"""
minw = np.min(self.weights)
if minw < 0:
self.weights -= minw
self.weights = (100 * (((1-sparsity) * (self.weights)) / np.sum(self.weights)))
def __str__(self):
return '{}{}():{}'.format(self.type, self.name, self.weights)
def __rep__(self):
return self.__str__()
class UniformDistribution(Distribution):
def __init__(self):
super().__init__('Uniform')
def generate(self, n, sparsity = 0, **kwargs):
self.weights = np.random.uniform(size = n)
self._sparse_scale_(sparsity)
return list(self.weights)
class NormalDistribution(Distribution):
def __init__(self):
super().__init__('Normal')
def generate(self, n, sparsity = 0, **kwargs):
scale = 1.0
if kwargs and 'scale' in kwargs:
scale = kwargs['scale']
self.weights = np.random.normal(size = n, scale = scale)
self._sparse_scale_(sparsity)
return list(self.weights)
class PowerLawDistribution(Distribution):
def __init__(self):
super().__init__('Power')
def generate(self, n, sparsity = 0, **kwargs):
a = 0.5
if kwargs and 'a' in kwargs:
a = kwargs['a']
if a <= 0:
raise AttributeError('Parameter a must be > 0')
self.weights = np.random.power(size = n, a = a)
self._sparse_scale_(sparsity)
return list(self.weights)
class ExponentialDistribution(Distribution):
def __init__(self):
super().__init__('Exponential')
def generate(self, n, sparsity = 0, **kwargs):
scale = 2
if kwargs and 'scale' in kwargs:
scale = kwargs['scale']
if scale == 0:
raise AttributeError('scale cannot be 0')
self.weights = np.random.exponential(size = n, scale = scale)
self._sparse_scale_(sparsity)
return list(self.weights)
class LaplaceDistribution(Distribution):
def __init__(self):
super().__init__('Laplace')
def generate(self, n, sparsity = 0, **kwargs):
loc = 0
scale = 2
if kwargs:
if 'loc' in kwargs:
loc = kwargs['loc']
if 'scale' in kwargs:
scale = kwargs['scale']
if scale == 0:
raise AttributeError('scale cannot be 0')
self.weights = np.random.laplace(size = n, loc = loc, scale = scale)
self._sparse_scale_(sparsity)
return list(self.weights)
| [
"noreply@github.com"
] | duncankmckinnon.noreply@github.com |
5d579d00095c6fb5555a6ecc7604850a52b775ad | 272c2841e9c573c84db4e66dbdde991791d37a65 | /plz.py | b88f32ebf02481cea8d55d2201720ef40baeb239 | [] | no_license | LiamMZ/Robotics-Project | 88296bec54e0f72fe48ce586719ba9b1511c9193 | 488e1db801174d3601ff15c1a1fab79e41531f2f | refs/heads/master | 2021-08-06T11:31:39.388739 | 2019-12-18T21:15:23 | 2019-12-18T21:15:23 | 224,291,728 | 0 | 0 | null | 2019-12-12T18:43:09 | 2019-11-26T21:46:04 | JavaScript | UTF-8 | Python | false | false | 2,663 | py | import intera_interface
import rospy
import copy
from geometry_msgs.msg import Pose, Point, Quaternion
g_limb = None
g_orientation_hand_down = None
g_position_neutral = None
def init():
global g_limb, g_orientation_hand_down, g_position_neutral
rospy.init_node('cairo_sawyer_ik_example')
g_limb = intera_interface.Limb('right')
# This quaternion will have the hand face straight down (ideal for picking tasks)
g_orientation_hand_down = Quaternion()
g_orientation_hand_down.x = 0.704238785359
g_orientation_hand_down.y =0.709956638597
g_orientation_hand_down.z = -0.00229009932359
g_orientation_hand_down.w = 0.00201493272073
# This is the default neutral position for the robot's hand (no guarantee this will move the joints to neutral though)
g_position_neutral = Point()
g_position_neutral.x = 0.449559195663
g_position_neutral.y = 0.16070379419
g_position_neutral.z = 0.212938808947
def main():
global g_limb, g_position_neutral, g_orientation_hand_down
init()
# Move the arm to its neutral position
g_limb.move_to_neutral()
rospy.loginfo("Old Hand Pose:\n %s" % str(g_limb._tip_states.states[0].pose))
rospy.loginfo("Old Joint Angles:\n %s" % str(g_limb.joint_angles()))
# Create a new pose (Position and Orientation) to solve for
target_pose = Pose()
target_pose.position = copy.deepcopy(g_position_neutral)
target_pose.orientation = copy.deepcopy(g_orientation_hand_down)
#MOVE THE ARM
target_pose.position.x += 0.2 # Add 20cm to the x axis position of the hand
target_pose.position.y += 0.2
# Call the IK service to solve for joint angles for the desired pose
target_joint_angles = g_limb.ik_request(target_pose, "right_hand")
# The IK Service returns false if it can't find a joint configuration
if target_joint_angles is False:
rospy.logerr("Couldn't solve for position %s" % str(target_pose))
return
# Set the robot speed (takes a value between 0 and 1)
g_limb.set_joint_position_speed(0.3)
# Send the robot arm to the joint angles in target_joint_angles, wait up to 2 seconds to finish
g_limb.move_to_joint_positions(target_joint_angles, timeout=2)
# Find the new coordinates of the hand and the angles the motors are currently at
new_hand_pose = copy.deepcopy(g_limb._tip_states.states[0].pose)
new_angles = g_limb.joint_angles()
rospy.loginfo("New Hand Pose:\n %s" % str(new_hand_pose))
rospy.loginfo("Target Joint Angles:\n %s" % str(target_joint_angles))
rospy.loginfo("New Joint Angles:\n %s" % str(new_angles))
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | LiamMZ.noreply@github.com |
a05e4847b2d7972adce0b18ac5c613042fe367e5 | c658112eb18c9533687265af06f8203ad26201f6 | /patient/management/commands/sync_employee.py | de06002513e35c90440df1bd4f4e522084414c00 | [] | no_license | yagami-cerberus/ukump_linebot | 7f3281779d33a15f0a73a4d1852f9752284832ff | af72a4aabc0bac098f93c2aeea2685d832a850fe | refs/heads/master | 2021-05-11T13:56:50.174425 | 2018-01-24T02:39:22 | 2018-01-24T02:39:22 | 117,690,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 836 | py |
from django.core.management.base import BaseCommand
from ukumpcore.crm.agile import get_employees_from_crm_document, update_employee_from_from_csv
class Command(BaseCommand):
help = 'Sync employee from CRM document'
def handle(self, *args, **options):
created, updated_from_hr_id, updated_from_email = 0, 0, 0
for doc in get_employees_from_crm_document():
is_created, is_updated_from_hr_id, is_updated_from_email = update_employee_from_from_csv(doc)
if is_created:
created += 1
elif is_updated_from_hr_id:
updated_from_hr_id += 1
elif is_updated_from_email:
updated_from_email += 1
print('Created: %i\nUpdated from hr id: %i\nUpdated from email: %i' % (created, updated_from_hr_id, updated_from_email))
| [
"yagami-cerberus@users.noreply.github.com"
] | yagami-cerberus@users.noreply.github.com |
47d761b1e62239c2d51a5ce04645eb29f2697524 | d7b3865f1669739ca2487f65b4af24ec0087f8cf | /plot/interference_3d_static.py | 7dd4d4f6bfe8e15ce586d7a4a7e01ac374e724ce | [] | no_license | agutikov/stuff | 78bee46eacfb67da4ce445dd5b7bd90924aad7de | 41ab186219440de6acfa537758dcfd66c35d320d | refs/heads/master | 2023-01-21T13:27:35.569670 | 2023-01-10T04:07:26 | 2023-01-10T04:07:26 | 8,577,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | #!/usr/bin/env ipython3
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pyplot as plt
import numpy as np
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
X = np.linspace(-5*np.pi, 10*np.pi, 100)
Y = np.linspace(-5*np.pi, 10*np.pi, 100)
X, Y = np.meshgrid(X, Y)
R1 = np.sqrt(X**2 + Y**2)
R2 = np.sqrt((X-4*np.pi)**2 + Y**2)
Z = np.sin(R1)/R1 + np.sin(R2)/R2
ax.plot_wireframe(X, Y, Z, rstride=1, cstride=1)
plt.show()
| [
"gutikoff@gmail.com"
] | gutikoff@gmail.com |
c3cfc3ac8bc731907ffbc6077dd34a584ed27d63 | 6881046de7ea357c00003c4e0aefb82b6ce4156f | /app/recipe/views.py | ac4f8d3a3af79fc6d77fea6d1a60b9bb3256cc20 | [
"MIT"
] | permissive | MGellman88/recipe-app-api | 9fa9bdda5bec7c958d218f7c393129f3a33975fb | 2831b67f8a10b01cfc5278f47981dd9ca2fb25df | refs/heads/master | 2020-04-24T07:03:53.974919 | 2019-03-25T23:00:26 | 2019-03-25T23:00:26 | 171,786,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,360 | py | from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework import viewsets, mixins, status
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from core.models import Tag, Ingredient, Recipe
from recipe import serializers
class BaseRecipeAttrViewSet(viewsets.GenericViewSet,
mixins.ListModelMixin,
mixins.CreateModelMixin):
"""Base viewset for user owned recipe attributes"""
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
def get_queryset(self):
"""Return objects for the current authenticated user only"""
return self.queryset.filter(user=self.request.user).order_by('-name')
def perform_create(self, serializer):
"""create a new object"""
serializer.save(user=self.request.user)
class TagViewSet(BaseRecipeAttrViewSet):
"""Manage Tags in the database"""
queryset = Tag.objects.all()
serializer_class = serializers.TagSerializer
class IngredientViewSet(BaseRecipeAttrViewSet):
"""Manage ingredients in the database"""
queryset = Ingredient.objects.all()
serializer_class = serializers.IngredientSerializer
class RecipeViewSet(viewsets.ModelViewSet):
"""Manage recipes in the database"""
serializer_class = serializers.RecipeSerializer
queryset = Recipe.objects.all()
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
def _params_to_ints(self, qs):
"""Convert a list of string IDs to a list of integers"""
return [int(str_id) for str_id in qs.split(',')]
def get_queryset(self):
"""Retrieve the recipes for the authenticated user"""
tags = self.request.query_params.get('tags')
ingredients = self.request.query_params.get('ingredients')
queryset = self.queryset
if tags:
tag_ids = self._params_to_ints(tags)
queryset = queryset.filter(tags__id__in=tag_ids)
if ingredients:
ingredient_ids = self._params_to_ints(ingredients)
queryset = queryset.filter(ingredients__id__in=ingredient_ids)
return self.queryset.filter(user=self.request.user)
def get_serializer_class(self):
"""Return appropriate serializer class"""
if self.action == 'retrieve':
return serializers.RecipeDetailSerializer
elif self.action == 'upload_image':
return serializers.RecipeImageSerializer
return self.serializer_class
def perform_create(self, serializer):
"""create a new recipe"""
serializer.save(user=self.request.user)
@action(methods=['POST'], detail=True, url_path='upload-image')
def upload_image(self, request, pk=None):
"""Upload an image to a recipe"""
recipe = self.get_object()
serializer = self.get_serializer(
recipe,
data=request.data
)
if serializer.is_valid():
serializer.save()
return Response(
serializer.data,
status=status.HTTP_200_OK
)
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
| [
"mikegellman88@gmail.com"
] | mikegellman88@gmail.com |
b8a96f06ba637a59d59b4af4aed4b0643dd6e7bf | 02ee9dd5b30093e6435340c460f245f0d922a406 | /cloudorite/wsgi.py | a27d7450e764e6ec8c6653b67f9f4e55e4ffad4f | [] | no_license | aavilagallego/Cloud-o-rite | 547c93908d5b62d80d9676b21ed755d4a3518389 | b7853d1e9ca6887f3287af1ce9834ffb54471812 | refs/heads/master | 2021-01-01T15:30:41.110224 | 2012-09-26T19:08:47 | 2012-09-26T19:08:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,142 | py | """
WSGI config for cloudorite project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cloudorite.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| [
"aavila@stock.ovh.net"
] | aavila@stock.ovh.net |
f1310946b614eb233fa50231567ff73d07c47218 | b02b1df555c9ede3bc422030b681966e0d2a389c | /IC/Stacks_Queues/max_stack.py | 004a81f9c34fb58e86941c05c92dded89f81e63a | [] | no_license | april-april/CTCI | 015709393550ff582c8ef775d0771b47209dc881 | cffaffa36cef807ae59d199fc7c1a9b0e4ed12da | refs/heads/master | 2020-03-20T16:55:31.584943 | 2018-09-07T04:32:28 | 2018-09-07T04:32:28 | 137,548,589 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 975 | py | class MaxStack(object):
def __init__(self):
self.stack = Stack()
self.maxes_stack = Stack()
def push(self, item):
"""Add a new item to the top of our stack."""
self.stack.push(item)
# If the item is greater than or equal to the last item in maxes_stack,
# it's the new max! So we'll add it to maxes_stack.
if self.maxes_stack.peek() is None or item >= self.maxes_stack.peek():
self.maxes_stack.push(item)
def pop(self):
"""Remove and return the top item from our stack."""
item = self.stack.pop()
# If it equals the top item in maxes_stack, they must have been pushed
# in together. So we'll pop it out of maxes_stack too.
if item == self.maxes_stack.peek():
self.maxes_stack.pop()
return item
def get_max(self):
"""The last item in maxes_stack is the max item in our stack."""
return self.maxes_stack.peek() | [
"vincentluoster93@gmail.com"
] | vincentluoster93@gmail.com |
fc451ae5171ff6413eb5371ac7f7c792cf544866 | e874e3b4312b2beebaa42fa1489b50c618055190 | /venv/Lib/site-packages/onyx/core/datatypes/rdate.py | 96d0f5c2af82ec4900dac49253b490f6f1a8a386 | [] | no_license | CarlosDinart/PUC-SP | 611a9acb6a82b7db2174d2d439b5666db48a530e | 5f5f1ea4b9c55c7d20b2dcd92c461b3d8ebbb664 | refs/heads/master | 2023-01-23T06:46:42.492764 | 2020-12-09T19:41:01 | 2020-12-09T19:41:01 | 320,058,535 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,973 | py | ###############################################################################
#
# Copyright: (c) 2015 Carlo Sbraccia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###############################################################################
from .date import Date
from .holiday_cal import HolidayCalendar
from dateutil.relativedelta import relativedelta, MO, WE, FR
import datetime
import re
__all__ = ["RDate"]
QUARTER_FIRST_MTH = [1, 1, 1, 4, 4, 4, 7, 7, 7, 10, 10, 10]
SPLITTER = re.compile("([\+,\-]\d*\w+)")
OPERANDS = {"+", "-"}
###############################################################################
class RDate(object):
"""
A date shift object that can be added to Dates to generate shifted dates.
"""
__slots__ = ("date_rule", "calendar")
# -------------------------------------------------------------------------
def __init__(self, date_rule, calendar=None):
"""
Inputs:
date_rule - a string specifying relative shift (see below for valid
date rules).
calendar - a holiday calendar used to identify business days
Rule definitions:
d = add calendar day
b = add business day
w = add calendar week
m = add calendar month
y = add calendar year
c = go to the required day in the month
e = go to end of month (ignores num)
J = go to first calendar day of month (ignores num)
M = go to closest Monday as specified by num
W = go to closest Wednesday as specified by num
F = go to closest Friday as specified by num
q = go to beginning of the quarter (ignores num)
Q = go to end of the quarter (ignores num)
A = go to beginning of the year (ignores num)
E = go to end of the year (ignores num)
"""
# --- use parent class setattr because RDate is implemented as an
# immutable class
super().__setattr__("date_rule", date_rule)
super().__setattr__("calendar", calendar or HolidayCalendar())
# -------------------------------------------------------------------------
def __setattr__(self, attr, value):
raise AttributeError("attribute '{0:s}' of RDate is not settable "
"as RDate is an immutable class".format(attr))
# -------------------------------------------------------------------------
def apply_rule(self, d):
# --- rule processing. If no operator is defined assume it's "+"
if self.date_rule[0] in OPERANDS:
atomic = SPLITTER.split(self.date_rule)[1::2]
else:
atomic = SPLITTER.split("+" + self.date_rule)[1::2]
# --- iteratively apply each atomic rule
for rule in atomic:
op = rule[0:-1]
r = rule[-1]
if op in OPERANDS:
op += "1"
# --- look for the proper rule to apply
if r == "d":
d += relativedelta(days=int(op))
elif r == "b":
nb = int(op[1:])
op1 = int(op[0] + "1")
if nb == 0 and self.calendar.is_holiday(d):
# --- go to the next (or previous) business day only if
# d is not already a business day
nb = 1
for i in range(nb):
d += relativedelta(days=op1)
while self.calendar.is_holiday(d):
d += relativedelta(days=op1)
elif r == "w":
d += relativedelta(weeks=int(op))
elif r == "m":
d += relativedelta(months=int(op))
elif r == "y":
d += relativedelta(years=int(op))
elif r == "c":
d += relativedelta(day=int(op))
elif r == "e":
d += relativedelta(day=31)
elif r == "J":
d += relativedelta(day=1)
elif r == "M":
d += relativedelta(weekday=MO(int(op)))
elif r == "W":
d += relativedelta(weekday=WE(int(op)))
elif r == "F":
d += relativedelta(weekday=FR(int(op)))
elif r == "q":
d = d.replace(day=1, month=QUARTER_FIRST_MTH[d.month-1])
elif r == "Q":
d = d.replace(day=1, month=QUARTER_FIRST_MTH[d.month-1]+2)
d += relativedelta(day=31)
elif r == "A":
d = d.replace(day=1, month=1)
elif r == "E":
d = d.replace(day=31, month=12)
else:
raise NameError("Atomic rule {0:s} is unknown. "
"Full rule is {1:s}".format(r, rule))
# --- conversion to Date is needed here because applying a
# relativedelta to a Date returns a datetime object
return Date.parse(d)
# -------------------------------------------------------------------------
# relative date algebra
def __radd__(self, date):
# --- check against the supercalss datetime.datetime
if not isinstance(date, (datetime.date, datetime.datetime)):
raise ValueError("RDate can only be applied to a Date. "
"{0!s} was passed instead".format(date.__class__))
return self.apply_rule(date)
| [
"cdinart@hotmail.com"
] | cdinart@hotmail.com |
b64be8f6065e5f3cd6c50ef246393c68a057757f | 767276e6c64a091c9b700330de2c0063a36e9259 | /search.py | 6a611f62e4890827f2ea951d919380a4199bd2dc | [] | no_license | PykeChen/pythonBasicGramer | 02ad11ceada4ed45ab9f503d90b40e2c12946e22 | 2599be5274ffb736973a7cef69751e699ff98095 | refs/heads/master | 2020-05-26T23:36:34.560476 | 2019-07-28T13:35:55 | 2019-07-28T13:35:55 | 188,412,856 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,674 | py | """ 深度搜索和广度搜索算法 """
import queue
class Node:
def __init__(self, value, left, right):
self.value = value
self.left = left
self.right = right
node4 = Node(4, None, None)
node5 = Node(5, None, None)
node6 = Node(6, None, None)
node7 = Node(7, None, None)
node2 = Node(2, node4, node5)
node3 = Node(3, node6, node7)
node1 = Node(1, node2, node3)
# 广度搜索算法
myQue = queue.Queue()
myQue.put(node1)
while not myQue.empty():
curNode = myQue.get()
if curNode.left is not None:
myQue.put(curNode.left)
if(curNode.right is not None):
myQue.put(curNode.right)
print(curNode.value, end=', ')
print('\n-------')
# 深度搜索算法-递归写法
def dsfSearch(curnode):
if(curnode is None):
return
print(curnode.value, end='-')
dsfSearch(curnode.left)
dsfSearch(curnode.right)
dsfSearch(node1)
print('\n-------')
# 深度搜索算法-递归写法 列表返回
def dsfSearchList(curnode):
if(curnode is None):
return []
return [curnode.value] + dsfSearchList(curnode.left) + dsfSearchList(curnode.right)
sdfList = dsfSearchList(node1)
print(sdfList)
# 深度搜索算法-非递归写法
def dsfSearchStack(myList, curnode):
if(curnode is None):
return
myStack = queue.deque()
myStack.append(curnode)
while len(myStack) > 0:
tmpNode = myStack.pop()
myList.append(tmpNode.value)
if(tmpNode.left is not None):
myStack.append(tmpNode.right)
if(tmpNode.right is not None):
myStack.append(tmpNode.left)
myList = []
dsfSearchStack(myList, node1)
print(myList)
| [
"cpy@meitu.com"
] | cpy@meitu.com |
e3b445263cacce5e32b206c66090017617afc249 | 20d3f13b70142794a34e7029b939eb680583d3db | /Kick Start 2019/Practice Round/mural.py | 85f40c75a8e88b2f6241c8ae961f1bcc9dbdac76 | [] | no_license | ninilo97/CodeWithGoogle-2019 | 6b5c6c3e77a683de78e18c9eecb58453a62cfe74 | 4874c7a85f4c83193736c945b4ac8ca1d815cf35 | refs/heads/master | 2020-05-05T10:33:58.448778 | 2019-07-12T15:38:00 | 2019-07-12T15:38:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 812 | py | def simple_beauty(mural):
mural_size_to_check = int((len(mural)+1)/2)
lst = []
for i in range(mural_size_to_check):
lst.append(sum(mural[i:i+mural_size_to_check]))
return max(lst)
def complex_beauty():
mural_size = int(input())
mural = [int(num) for num in str(input())]
mural_size_to_check = int((len(mural)+1)/2)
temp = 0 if len(mural)%2 else 1
lst = []
lst.append(sum(mural[:mural_size_to_check]))
for i in range(1,mural_size_to_check+temp):
lst.append(lst[i-1]-mural[i-1]+mural[i+mural_size_to_check-1])
return max(lst)
#nb=+NB
if __name__ == "__main__":
t = int(input())
for i in range(1,t+1):
#print("Case #{}: {}".format(i,simple_beauty(mural))) //failed submission
print("Case #{}: {}".format(i,complex_beauty()))
| [
"nikhillondhe36@gmail.com"
] | nikhillondhe36@gmail.com |
d90237cf9cb94f95a505e029df2cd390ab0b7cb5 | 71be505051c80905ec0e1712bbb8b6d58e600c01 | /baseline/optimizer/__init__.py | 6e375ac06e1aa463ca234e5ba6634d75f7e371a8 | [] | no_license | justHungryMan/tf-template | 77ae46db0ae72e44cfb6805960f5433fe3bed749 | 82680686f85d58580fbfd4eb6b2949f8593a15dc | refs/heads/main | 2023-08-22T09:15:39.005135 | 2021-10-13T04:55:50 | 2021-10-13T04:55:50 | 411,306,897 | 0 | 0 | null | 2021-10-13T04:55:50 | 2021-09-28T14:05:06 | null | UTF-8 | Python | false | false | 948 | py | import tensorflow as tf
import tensorflow_addons as tfa
from baseline.utils import get_logger
log = get_logger(__name__)
def create(config, model=None):
opt_type = config["type"].lower()
if opt_type == "sgd":
log.info(f"[optimizer] create {opt_type}")
return tf.keras.optimizers.SGD(**config["params"])
elif opt_type == "sgdw":
log.info(f"[optimizer] create {opt_type}")
return tfa.optimizers.SGDW(**config["params"])
elif opt_type == "adam":
log.info(f"[optimizer] create {opt_type}")
return tf.keras.optimizers.Adam(**config["params"])
elif opt_type == "adamw":
log.info(f"[optimizer] create {opt_type}")
return tfa.optimizers.AdamW(**config["params"])
elif opt_type == "lamb":
log.info(f"[optimizer] create {opt_type}")
return tfa.optimizers.LAMB(**config["params"])
raise AttributeError(f"not support optimizer config: {config}")
| [
"justhungryman@gmail.com"
] | justhungryman@gmail.com |
6828a845e8b4a33abe9c434db3f96ee98f8001fa | 36b624c0b7f0e691772f7521695c02f0709f9f89 | /day8.py | cc9d00263a5a71ed2da284db2e9df17031b7dd4f | [] | no_license | alex-huff/advent-of-code-2020 | 4ee30fdcd6b67aceb0c0fb919de9a18f61d9987c | 701cfcba1c952710c0bf0b9336f670141a9ac276 | refs/heads/main | 2023-02-02T13:51:37.722815 | 2020-12-22T01:32:21 | 2020-12-22T01:32:21 | 323,081,866 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,564 | py | def getValueBeforeLoop(code):
acc = 0
pc = 0
executionSet = set()
finished = False
while True:
if pc == len(code) - 1:
finished = True
break
if pc in executionSet:
break
inst = code[pc][0]
value = code[pc][1]
executionSet.add(pc)
if inst == NOP:
pc += 1
elif inst == ACC:
acc += value
pc += 1
elif inst == JMP:
pc += value
return (acc, finished)
def getInstruction(line, iSet):
operation = iSet.index(line[:line.index(' ')])
value = int(line[line.index(' ') + 1:])
return [operation, value]
raw_input = []
NOP = 0
ACC = 1
JMP = 2
instructionSet = ['nop', 'acc', 'jmp']
with open('input/day8input.txt') as file:
raw_input = [line.rstrip() for line in file]
code = [getInstruction(line, instructionSet) for line in raw_input]
# part 1
print(getValueBeforeLoop(code))
# part 2
for i, operation in enumerate(code):
if operation[0] == NOP:
operation[0] = JMP
result = getValueBeforeLoop(code)
if result[1] == True:
print(i, result[0], operation[0], operation[1])
break
else:
# revert
operation[0] = NOP
elif operation[0] == JMP:
operation[0] = NOP
result = getValueBeforeLoop(code)
if result[1] == True:
print(i, result[0], operation[0], operation[1])
break
else:
# revert
operation[0] = JMP
| [
"unconfigured@null.spigotmc.org"
] | unconfigured@null.spigotmc.org |
e3ffc675d896a299e817c812e15db8153b2ae2e7 | 8d09c41c5d91c01a6f39b8c57b3003cd7ee6e5df | /gena/codes/mesh_parse/sup_mirn_bar.py | 9cf679dce9ba50f098484132d2c91745e8d622f8 | [] | no_license | w4-dykim/Capstone-2017-2 | 56007dcfc46c07c661d621cf4a7369389a198a9d | db9efba5ff77c3ad1f2c09f47a6018997f579c45 | refs/heads/master | 2021-01-02T09:28:17.517931 | 2017-08-02T16:10:22 | 2017-08-02T16:11:12 | 99,217,084 | 0 | 0 | null | 2017-08-03T09:45:13 | 2017-08-03T09:45:13 | null | UTF-8 | Python | false | false | 499 | py | import pymysql
import re
conn = pymysql.connect(autocommit ='True', host='localhost', user='hogking', password='',db='HUBMED', charset='utf8')
curs = conn.cursor(pymysql.cursors.DictCursor)
query = "SELECT * FROM SUP WHERE S_NAME REGEXP 'MIRN-';"
curs.execute(query)
rows = curs.fetchall()
for row in rows:
original = row['PROCESSED']
processed = re.sub(r"-", r'', original)
query = "UPDATE SUP SET PROCESSED = (%s) WHERE S_ID = (%s)"
curs.execute(query,(processed, row['S_ID']))
| [
"noreply@github.com"
] | w4-dykim.noreply@github.com |
8637b6b9113c9b3cdbea2416eece9da64b7b3193 | 3a967c9583209dc7b3a3528dc45f057851194327 | /isim soy isim deneme.py | a67c0530a6121a521a0bb95a41d4fea4581657b0 | [] | no_license | mechibo/Isim-soyisim-eslestirme | d87968e369828f25c32e4a437c79688db7577e05 | 93322894466a336627ceddee58cfea64db54498f | refs/heads/main | 2023-06-11T22:21:28.286014 | 2021-07-01T13:16:41 | 2021-07-01T13:16:41 | 382,038,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 357 | py | isimler = ["Kerim", "Tarık", "Ezgi", "Kemal", "İlkay", "Şükran", "Merve"]
soyisimler = ["Yılmaz", "Öztürk", "Dağdeviren", "Atatürk", "Dikmen", "Kaya", "Polat"]
liste = list(zip(isimler,soyisimler))
def eşleştirme(demet):
return demet[0] + "\t" + demet[1]
liste2 = list(map(eşleştirme,liste))
for i in liste2:
print(i) | [
"noreply@github.com"
] | mechibo.noreply@github.com |
5fab325e48d296dbf2d5195cff7aac8bee3addb0 | 43a70287e5facf33105463dfff3e55ae380440b0 | /bg_matching/vis/demo.py | fae7664d91cfbfc4200251e2bd4a2d696ff07a6a | [] | no_license | rohitgirdhar-cmu-experimental/MemexScripts | 84a5caaaaea6b688cbe744556faee648b3956936 | c37b4b4d59118ebe0c702580541b218ab13161a2 | refs/heads/master | 2021-01-13T02:02:59.478684 | 2015-12-22T22:34:15 | 2015-12-22T22:34:15 | 33,696,076 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,917 | py | import sys
sys.path.append('/home/rgirdhar/data/Software/coding-utils/PyHTMLWriter/src');
from Element import Element
from TableRow import TableRow
from Table import Table
from TableWriter import TableWriter
import os
def readRetrievals(fpath, ):
f = open(fpath, 'r')
lines = f.read().splitlines()
f.close()
imids = []
allmatches = []
for line in lines:
imgid = int(line.split(';')[0])/10000
imids.append(imgid)
matches = line.split(';')[1].strip()
matches = [(int(m.split(':')[0])/DIV, float(m.split(':')[1])) for m in matches.split()]
allmatches.append(matches)
return (imids, allmatches)
t = Table()
methods = ['full.txt']
imgsdir = 'http://10.1.94.128:8000/~rgirdhar/memex/dataset/0001_Backpage/Images/'
imgslistpath = '/memexdata/Dataset/processed/0001_Backpage/Images/lists/Images.txt'
respath = '/memexdata/Dataset/processed/0001_Backpage/Search/results/temp/res'
segdir = 'http://10.1.94.128:8000/~rgirdhar/memex/dataset/0001_Backpage/Features/Segments'
respath_f = '/memexdata/Dataset/processed/0001_Backpage/Search/results/temp/res_fullImg'
f = open(imgslistpath)
imgslist = f.read().splitlines()
f.close()
print len(imgslist)
def readScores(fpath):
f = open(fpath)
s = f.read().splitlines()[0]
if len(s) == 0:
return([], [])
f.close()
matches = s.strip().split()
imnames = []
scores = []
for m in matches[:50]:
temp = m.strip().split(':')
imname = imgslist[int(temp[0]) / 10000]
score = float(temp[1])
imnames.append(imname)
scores.append(score)
return (imnames, scores)
def readScores2(fpath):
f = open(fpath)
s = f.read().splitlines()[0]
if len(s) == 0:
return([], [])
f.close()
matches = s.strip().split()
imnames = []
scores = []
for m in matches[:50]:
temp = m.strip().split(':')
imname = imgslist[int(temp[0]) / 10000 - 1]
score = float(temp[1])
imnames.append(imname)
scores.append(score)
return (imnames, scores)
t = Table()
r= TableRow(isHeader=True)
r.addElement(Element('Sno'))
r.addElement(Element('Method'))
r.addElement(Element('Query/Top Match'))
r.addElement(Element('Matches...'))
for i in range(500):
fpath = os.path.join(respath, str(i + 1) + '.txt');
fpath2 = os.path.join(respath_f, str(i + 2) + '.txt');
imnames, scores = readScores(fpath)
imnames2, scores2 = readScores2(fpath2)
if len(imnames) == 0 or len(imnames2) == 0:
continue
r = TableRow(rno=i)
r.addElement(Element('Background'))
for j in range(20):
e = Element()
e.addImg(os.path.join(imgsdir, imnames[j]),
overlay_path=os.path.join(segdir, imnames[j]))
e.addTxt(scores[j])
r.addElement(e)
t.addRow(r)
r = TableRow(rno=i)
r.addElement(Element('Full Image'))
for j in range(20):
e = Element()
e.addImg(os.path.join(imgsdir, imnames2[j]))
e.addTxt(scores2[j])
r.addElement(e)
t.addRow(r)
tw = TableWriter(t, 'out')
tw.write()
| [
"rgirdhar@andrew.cmu.edu"
] | rgirdhar@andrew.cmu.edu |
53051da9bc0451d17f7ded9986eaae7087ff679c | f013772126aae88ca306462d26b5becf63735803 | /scanhosts/admin.py | 31537473700cbc66f7a61cbe58b79fd724144adc | [] | no_license | ninesunzhang/CMDB | 1e8fd89d31a146c5844cf8f8343cf3a404930d92 | fd765c21438d0f532a1efa9a5a5a65445870e137 | refs/heads/master | 2022-11-21T00:59:36.944450 | 2020-07-30T02:56:25 | 2020-07-30T02:56:25 | 283,654,509 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | from django.contrib import admin
from .models import Server
# Register your models here.
admin.site.register(Server) | [
"823260878@qq.com"
] | 823260878@qq.com |
4a1df82c4f42ea938817956162ff327bf22882ec | f19779713d72b0471309a2813dc8efffd4017f8f | /books/migrations/0022_auto_20200819_0543.py | 145aa6588c8dcf3eace2108262d417406be8a3c9 | [] | no_license | skyeoh06/assign4-ecommence | 4fbfca1f1bff4825c30f87f83d8494a681881c6c | b2a3932ebc3b7da5bf67b4e27249824d61234260 | refs/heads/master | 2022-12-07T12:17:38.366261 | 2020-08-28T15:39:58 | 2020-08-28T15:39:58 | 285,970,589 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 800 | py | # Generated by Django 2.2.14 on 2020-08-19 05:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('books', '0021_auto_20200818_1148'),
]
operations = [
migrations.RemoveField(
model_name='author',
name='cover',
),
migrations.RemoveField(
model_name='book',
name='cover',
),
migrations.AddField(
model_name='author',
name='image',
field=models.URLField(default=None),
preserve_default=False,
),
migrations.AddField(
model_name='book',
name='image',
field=models.URLField(default=None),
preserve_default=False,
),
]
| [
"felyciayeoh@yahoo.com"
] | felyciayeoh@yahoo.com |
301d507040d644d6ddc8e77d11c75c42c9c382f2 | 81fe7f2faea91785ee13cb0297ef9228d832be93 | /HackerRank/Contests/101Hack42/cutting_paper_squares.py | fc51e8c2e28f90db86021f38bf68df6bf3be5567 | [] | no_license | blegloannec/CodeProblems | 92349c36e1a35cfc1c48206943d9c2686ea526f8 | 77fd0fa1f1a519d4d55265b9a7abf12f1bd7d19e | refs/heads/master | 2022-05-16T20:20:40.578760 | 2021-12-30T11:10:25 | 2022-04-22T08:11:07 | 54,330,243 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | #!/usr/bin/env python
import sys
# any way of cutting is minimal and uses n*m-1 cuts
n,m = map(int,sys.stdin.readline().split())
print n*m-1
| [
"blg@gmx.com"
] | blg@gmx.com |
e3f89f91d8c69abb44601e70897a53673388b28d | 87fdd8850df131b1f5df60ecad781ce0e79b1d28 | /models4CWProject/models4CWProject/urls.py | c80e8822580ca1366f5b958b11c6c5ceffeca4e2 | [] | no_license | cs-fullstack-2019-spring/django-models4-cw-Joshtg1104 | 3e1094fca12df27abbed756b137ff5f8d14a730c | 7a3e1e8a6e965ad63f9a9cc1799f55a89aa9f4d6 | refs/heads/master | 2020-04-24T12:09:08.907751 | 2019-02-24T22:17:19 | 2019-02-24T22:17:19 | 171,947,722 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | """models4CWProject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/dev/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('models4App.urls'))
]
| [
"joshtg1104@gmail.com"
] | joshtg1104@gmail.com |
300abebbed2333e357357a47e73f19850524efd9 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/392/usersdata/310/71048/submittedfiles/formula.py | ea9ddc63827097528e7fa6c3641fd8e29889c94f | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 208 | py | # -*- coding: utf-8 -*-
# STARTING
p = float(input('Digite o valor de p: '))
i = float(input('Digite o valor de i: '))
n = float(input('Digite o valor de n: '))
v = p*((((1+i)**n)-1)/i)
print ('%.2f' %v )
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
1d41b0546aac11192f42c3e5c6f39a712bc4a9e3 | 546afbb1e9d0f4a7852e71f9d5f19e1e06ed71bc | /mliac3/trees.py | 83f82c72659b6001eaa2a9327c16d653fbfa80e4 | [] | no_license | QuentinYao/mlia | 93f3c4f96f97e063aa7debf32e46c86f1eb6187f | aa17d2fad9c3ebbd1a55b696038c99578f76c86b | refs/heads/master | 2020-05-23T11:38:37.240710 | 2019-05-29T01:56:28 | 2019-05-29T01:56:28 | 186,740,796 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,394 | py | '''
Created on Oct 12, 2010
Decision Tree Source Code for Machine Learning in Action Ch. 3
@author: Peter Harrington
'''
from math import log
import operator
import treePlotter
def createDataSet():
dataSet = [[1, 1, 'yes'],
[1, 1, 'yes'],
[1, 0, 'no'],
[0, 1, 'no'],
[0, 1, 'no']]
labels = ['no surfacing', 'flippers']
# change to discrete values
return dataSet, labels
def calcShannonEnt(dataSet):
numEntries = len(dataSet)
labelCounts = {}
for featVec in dataSet: # the the number of unique elements and their occurance
currentLabel = featVec[-1]
if currentLabel not in labelCounts.keys(): labelCounts[currentLabel] = 0
labelCounts[currentLabel] += 1
shannonEnt = 0.0
for key in labelCounts:
prob = float(labelCounts[key]) / numEntries
shannonEnt -= prob * log(prob, 2) # log base 2
return shannonEnt
def splitDataSet(dataSet, axis, value):
retDataSet = []
for featVec in dataSet:
if featVec[axis] == value:
reducedFeatVec = featVec[:axis] # chop out axis used for splitting
reducedFeatVec.extend(featVec[axis + 1:])
retDataSet.append(reducedFeatVec)
return retDataSet
def chooseBestFeatureToSplit(dataSet):
numFeatures = len(dataSet[0]) - 1 # the last column is used for the labels
baseEntropy = calcShannonEnt(dataSet)
bestInfoGain = 0.0;
bestFeature = -1
for i in range(numFeatures): # iterate over all the features
featList = [example[i] for example in dataSet] # create a list of all the examples of this feature
uniqueVals = set(featList) # get a set of unique values
newEntropy = 0.0
for value in uniqueVals:
subDataSet = splitDataSet(dataSet, i, value)
prob = len(subDataSet) / float(len(dataSet))
newEntropy += prob * calcShannonEnt(subDataSet)
infoGain = baseEntropy - newEntropy # calculate the info gain; ie reduction in entropy
if (infoGain > bestInfoGain): # compare this to the best gain so far
bestInfoGain = infoGain # if better than current best, set to best
bestFeature = i
return bestFeature # returns an integer
def majorityCnt(classList):
classCount = {}
for vote in classList:
if vote not in classCount.keys(): classCount[vote] = 0
classCount[vote] += 1
sortedClassCount = sorted(classCount.items(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
def createTree(dataSet, labels):
classList = [example[-1] for example in dataSet]
if classList.count(classList[0]) == len(classList):
return classList[0] # stop splitting when all of the classes are equal
if len(dataSet[0]) == 1: # stop splitting when there are no more features in dataSet
return majorityCnt(classList)
bestFeat = chooseBestFeatureToSplit(dataSet)
bestFeatLabel = labels[bestFeat]
myTree = {bestFeatLabel: {}}
del (labels[bestFeat])
featValues = [example[bestFeat] for example in dataSet]
uniqueVals = set(featValues)
for value in uniqueVals:
subLabels = labels[:] # copy all of labels, so trees don't mess up existing labels
myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value), subLabels)
return myTree
def classify(inputTree, featLabels, testVec):
firstStr = inputTree.keys()[0]
secondDict = inputTree[firstStr]
featIndex = featLabels.index(firstStr)
key = testVec[featIndex]
valueOfFeat = secondDict[key]
if isinstance(valueOfFeat, dict):
classLabel = classify(valueOfFeat, featLabels, testVec)
else:
classLabel = valueOfFeat
return classLabel
def storeTree(inputTree, filename):
import pickle
fw = open(filename, 'w')
pickle.dump(inputTree, fw)
fw.close()
def grabTree(filename):
import pickle
fr = open(filename)
return pickle.load(fr)
def main():
fr = open('/home/quentinyao/PycharmProjects/mlia/mliac3/lenses.txt')
lenses = [inst.strip().split('\t') for inst in fr.readlines()]
lenseLabels = ['age', 'prescript', 'astigmatic', 'tearRate']
lensesTree = createTree(lenses, lenseLabels)
print(lensesTree)
treePlotter.createPlot(lensesTree)
if __name__ == "__main__":
main()
| [
"quentinyao75@gmail.com"
] | quentinyao75@gmail.com |
b4bba40f994e2408d97fcfb2cfd899c4a3556edb | d41043cec2e07369327219c8773c0fda94f5494c | /RawData/ProtozoaAnalysis/Figure2.py | 476438f2becd6ef187979c3a9857e8136a6cc1d3 | [] | no_license | Gurkan-Korkmaz/Stop-Codon-Frequencies | 652145c7d4cbe2336f7ee7ba469e6ebc7f405dba | bffd805719b4a8e61f6cbdd2b81bb16dd18479d0 | refs/heads/master | 2021-10-18T22:46:55.198613 | 2019-02-14T22:51:21 | 2019-02-14T22:51:21 | 112,545,321 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,000 | py | from matplotlib import pyplot as plt
from matplotlib import style
import csv
import pylab
#style.use('ggplot') #Uses the style ggplot
Title = 'Protozoa - CDS Off Stops'
plt.title(Title)
plt.xlabel ('GC (%)')
plt.ylabel ('frequency')
#pylab.xlim([0.1,0.8])#Changes the range of the x axis
pylab.xlim([0,1])
GC = []
TAG = []
TGA = []
TAA = []
with open ( 'AnalyzedAll.csv', 'r') as csvfile:
reader = csv.reader(csvfile, delimiter = ',')
next(reader)
for row in reader:
temp=row[24]
GC.append(temp)
temp=row[15]
TAG.append(temp)
temp=row[14]
TGA.append(temp)
temp=row[16]
TAA.append(temp)
plt.scatter(GC,TAG, label = 'TAG')
plt.scatter(GC,TGA, label = 'TGA')
plt.scatter(GC,TAA, label = 'TAA')
plt.legend(loc=2) #Various locations available
plt.savefig('ProtozoaCDSOffStopsFigure2.png', dpi=1000)
plt.show() #Otherwise it wont show the actual plot | [
"noreply@github.com"
] | Gurkan-Korkmaz.noreply@github.com |
513c734e5f42fa3c0906eb3309cba7ef169d6c1b | a7da58ad91b007b3650003708eb91928f1e3684a | /bt5/erp5_pdm/SkinTemplateItem/portal_skins/erp5_pdm/SupplyLine_init.py | c4b6e6d09c91538afb94bb6ecf72091943ac25a9 | [] | no_license | jgpjuniorj/j | 042d1bd7710fa2830355d4312a6b76103e29639d | dc02bfa887ffab9841abebc3f5c16d874388cef5 | refs/heads/master | 2021-01-01T09:26:36.121339 | 2020-01-31T10:34:17 | 2020-02-07T04:39:18 | 239,214,398 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 569 | py | # XXX Set a resource for the supply line inside a resource
parent_value = context.getParentValue()
if parent_value.getPortalType() in context.getPortalResourceTypeList():
context.setResourceValue(parent_value)
# Predicate ?
context.setMappedValuePropertyList([
'base_price', 'additional_price',
'discount_ratio', 'exclusive_discount_ratio',
'surcharge_ratio', 'variable_additional_price',
'non_discountable_additional_price',
'priced_quantity', 'base_unit_price',
])
| [
"georgios.dagkakis@nexedi.com"
] | georgios.dagkakis@nexedi.com |
83cfd75e6a045e7d8436f6ddee6100018566b76e | 9c693f7c8494270b66c892d46ae864f1db645ff4 | /PyPoll/main.py | 5cb25604235357416b1f5145faa1ac3697fccb9f | [] | no_license | epayne323/python-challenge | 861c55c6453b6d7d9b96a5c8f8de59d8d677e3e2 | 89d83bf01239751d6c6dc7271e5ca1935ffe10ad | refs/heads/master | 2020-08-21T22:33:16.944470 | 2019-10-25T02:51:48 | 2019-10-25T02:51:48 | 216,262,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,374 | py | import os
import csv
# the instructions state:
# Your scripts should work for each dataset provided. Run your script for each dataset separately
# to make sure that the code works for different data.
# However, there are only two csv files for this assignment, and they are formatted differently.
# to make this applicable to different csvs with the same format, I would use inputs:
# filename = input("Please enter name of csv file: ")
# filepath = os.path.join('..',filename)
electionPath = os.path.join('..','election_data.csv')
resultsPath = os.path.join('..','election_data_results.csv')
with open(electionPath, 'r', newline = '') as file1:
electionReader = csv.reader(file1, delimiter = ',')
electionHeader = next(electionReader)
totalVotes = 0
# store the candidate names as keys in a dictionary, with vote totals as values
candidateTotals = {}
for row in electionReader:
totalVotes += 1
# if the candidate name is already a key in the dictionary, add 1 to their vote total
if row[2] in candidateTotals:
candidateTotals[row[2]] += 1
# otherwise set their vote total to 1
else:
candidateTotals[row[2]] = 1
print("Election Results")
print("----------------")
print(f"Total Votes: {totalVotes}")
print("----------------")
for key in candidateTotals:
print(f"{key}: {candidateTotals[key]*100/totalVotes}% ({candidateTotals[key]})")
print("----------------")
# truly, lambdas are the intended solution.
# no really, use the built in max function, setting the key function to look at the values
print("Winner: " + max(candidateTotals, key = lambda x: candidateTotals[x]))
print("----------------")
with open(resultsPath, 'w', newline = '') as file2:
electionWriter = csv.writer(file2, delimiter = ',')
electionWriter.writerow(["Election Results"])
electionWriter.writerow(["----------------"])
electionWriter.writerow([f"Total Votes: {totalVotes}"])
electionWriter.writerow(["----------------"])
for key in candidateTotals:
electionWriter.writerow([f"{key}: {candidateTotals[key]*100/totalVotes}% ({candidateTotals[key]})"])
electionWriter.writerow(["----------------"])
electionWriter.writerow(["Winner: " + max(candidateTotals, key = lambda x: candidateTotals[x])])
electionWriter.writerow(["----------------"]) | [
"epayne323@gmail.com"
] | epayne323@gmail.com |
5e31ad4713e4435f047cee7f725c63e2f902aae6 | 193414c23fbda38b58111faa5478a6fad99555bc | /venv/bin/sqlformat | 15328fb53550ff26c0fcad5024e3691bca08b0c4 | [] | no_license | davidchoi-dev/facebookclone | 8efc7b9661785e55a786c8ea3c73de525ff6f1f0 | 81b3585e2b03b1fd8296f6c06bd2fb28570fee88 | refs/heads/master | 2022-04-16T01:41:26.115332 | 2020-04-14T16:38:34 | 2020-04-14T16:38:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 249 | #!/workspace/facebook/facebookclone/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sqlparse.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"woody@spacecloud.kr"
] | woody@spacecloud.kr | |
ca721489a0ab13c9992fa01fbfad93d381e71012 | 85b8a0b07942e206c1c0eb66035fd90ba3493865 | /resturants/jrlm.py | a9014d703cf7242eb23c9c674d9b0a9678466b25 | [] | no_license | XGYu/rrs | fe8b35dcb413b0309b80ed01b1ff390fdb7a8539 | 00206e5fa4b426f28f877f0ba138cd0ecde6299b | refs/heads/main | 2023-04-28T19:55:20.462160 | 2021-05-13T08:31:29 | 2021-05-13T08:31:29 | 349,459,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,067 | py | from gensim.models import word2vec
import numpy as np
sentences = word2vec.LineSentence('./data/nyc_jrlm_sentence.txt')
model = word2vec.Word2Vec(sentences, sg=0)
vec_dict = {}
for item in model.wv.vocab:
vec_dict[item] = model.wv.get_vector(item)
def pairwise_distance(vector_a, vector_b):
vector_a = np.mat(vector_a)
vector_b = np.mat(vector_b)
num = float(vector_a * vector_b.T)
denom = np.linalg.norm(vector_a) * np.linalg.norm(vector_b)
sim = num / denom
return 1.0 - sim
# 根据user和location进行一个top-k推荐,返回待推荐地点list
def RecTop(w, k, user, location):
vec_user = model.wv.get_vector(user)
vec_location = model.wv.get_vector(location)
vec_agg = w*vec_user + (1-w)*vec_location
score_dict = {}
for item in model.wv.vocab:
if item[0]=='l':
vec_item = vec_dict[item]
score = pairwise_distance(vec_agg, vec_item)
score_dict[item] = score
result = [x for x, v in sorted(score_dict.items(), key=lambda item: item[1])[:k]]
return result
| [
"1643513827@qq.com"
] | 1643513827@qq.com |
437f732ec69d6729576a3de3bdcfdc71d1bfb0b6 | 08d3b4903ed9aac0022724e7d9ef46ffa25de4d5 | /meiduo_mall/meiduo_mall/apps/verifications/serializers.py | 8dd33381b87e27cb25e318ead4556858849a9804 | [] | no_license | 22014471/shangchen_Django | d8dfcafd896f29141de8d4a31420fc315ce87c9c | bc1a908d91084e88061437435f4e9bed30072f69 | refs/heads/master | 2020-09-08T21:37:02.770088 | 2019-11-12T15:34:11 | 2019-11-12T15:34:11 | 221,249,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,079 | py | from django_redis import get_redis_connection
from rest_framework import serializers
class ImageCodeCheckSerializer(serializers.Serializer):
image_code_id = serializers.UUIDField()
text = serializers.CharField(max_length=4,min_length=4)
def validate(self, attrs):
redis_conn = get_redis_connection('verify_codes')
image_code_id = attrs["image_code_id"]
text = attrs["text"]
real_text = redis_conn.get('img_%s' % image_code_id)
if not real_text:
raise serializers.ValidationError("图片验证码已失效")
real_text = real_text.decode()
redis_conn.delete("img_%s" % image_code_id)
if not real_text:
raise serializers.ValidationError("图片验证码不存在")
if real_text.lower() != text.lower():
raise serializers.ValidationError("图片验证码不正确")
mobile = self.context["view"].kwargs["mobile"]
if redis_conn.get("send_sms_%s" % mobile):
raise serializers.ValidationError("不能频繁访问")
return attrs | [
"22014471@qq.com"
] | 22014471@qq.com |
2d157e6b043c49bbf4392ba1010da1ab43617f94 | 1698fe3ff15a6737c70501741b32b24fe68052f4 | /py-elasticsearch-django-master/spider/ESearch/spiders/haoyang_spider.py | a227d4b4b41603f8a992a695342ceb534df4a4bb | [] | no_license | menhswu/djangoapps | 4f3718244c8678640af2d2a095d20a405e337884 | 039a42aa9d1537e7beb4071d86bea7a42253d8b3 | refs/heads/master | 2023-03-04T03:56:01.070921 | 2021-01-28T07:35:02 | 2021-01-28T07:35:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,937 | py | # -*- coding:utf-8 -*-
import scrapy
import re
import urllib2
from scrapy.http import Request
from scrapy import Selector
from ESearch.items import XiangmuItem
from ESearch.utils.common import get_md5
# 32406
class DmozSpider(scrapy.Spider):
name = "haoyang"
start_urls = []
main_url = "http://www.9lizhi.com"
def start_requests(self):
file_object = open(r'haoyang_url.csv', 'r')
try:
for line in file_object:
x = line.strip()
self.start_urls.append(x)
for url in self.start_urls:
yield self.make_requests_from_url(url)
finally:
file_object.close()
def parse(self, response):
item = XiangmuItem()
item["book_name"] = ''
item["book_author"] = ''
item["book_type"] = ''
item["book_format"] = ''
item["book_time"] = ''
item["book_url"] = ''
item["book_size"] = ''
item["book_downl_url"] = ''
item["book_source"] = ''
item["book_intro"] = ''
item["book_content"] = ''
item["book_zip_pswd"] = ''
item["book_chinese"] = ''
item["book_id"] = ''
selector = Selector(response)
is_lists_page = selector.xpath('//ul[@id="resultsContainer"]')
if is_lists_page:
info_lists = is_lists_page.xpath('li/div[@class="item_title"]/strong/h2/a/@href').extract()
for each in info_lists:
yield Request(each, callback=self.parse)
page_lists = is_lists_page.xpath('//select[@name="select"]/option/@value').extract()
for each_page in page_lists[1:-1]:
yield Request(self.main_url + each_page, callback=self.parse)
pass
is_info_page = selector.xpath('//div[@id="detail"]')
if is_info_page:
item['book_url'] = response.url
item['book_id'] = get_md5(response.url)
item['book_downl_url'] = response.url
type = selector.xpath('//div[@class="posi"]/a/text()').extract()
type_url = selector.xpath('//div[@class="posi"]/a/@href').extract()
if "http://www" in type_url[-1]:
item['book_type'] = type[-2]
else:
item['book_type'] = type[-1]
information = is_info_page.xpath('div[@class="tb-detail-hd"]')
item['book_name'] = information.xpath('h1/text()').extract()
time = information.xpath('li[@class="dated"]/span[@class="datetime"]/text()').extract()
time = ''.join(time).split(':')[-1]
item['book_time'] = time
author = information.xpath('li[@class="dated"]/span[@class="author"]/text()').extract()
item['book_author'] = ''.join(author).replace('\r', '').replace('\n', '')
yield item
| [
"jinxufang@tencent.com"
] | jinxufang@tencent.com |
44b3f8b9bf6336102b52df080e47ad6e0a0e1c8d | 297497957c531d81ba286bc91253fbbb78b4d8be | /third_party/python/gyp/test/win/gyptest-quoting-commands.py | c6e3167e76c582ad10c5852afaa2c21f62439533 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | marco-c/gecko-dev-comments-removed | 7a9dd34045b07e6b22f0c636c0a836b9e639f9d3 | 61942784fb157763e65608e5a29b3729b0aa66fa | refs/heads/master | 2023-08-09T18:55:25.895853 | 2023-08-01T00:40:39 | 2023-08-01T00:40:39 | 211,297,481 | 0 | 0 | NOASSERTION | 2019-09-29T01:27:49 | 2019-09-27T10:44:24 | C++ | UTF-8 | Python | false | false | 565 | py |
"""
Make sure batch files run as actions. Regression test for previously missing
trailing quote on command line. cmd typically will implicitly insert a missing
quote, but if the command ends in a quote, it will not insert another, so the
command can sometimes become unterminated.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'batch-file-action'
test.run_gyp('batch-file-action.gyp', chdir=CHDIR)
test.build('batch-file-action.gyp', test.ALL, chdir=CHDIR)
test.pass_test()
| [
"mcastelluccio@mozilla.com"
] | mcastelluccio@mozilla.com |
5405911e05a84503d8d66160aa95f7173d528a58 | e173eb4404c512e010492804e1d866016f070d41 | /cqbtest.py | 44b71d79294e18a7376bc15444e194d9cf2e20e8 | [] | no_license | biao-666/game01 | 721e363ec5973c5aa89fc1ad0a530c500ca4c9ca | 201215d9b11de2a385279eb547cd625873569c06 | refs/heads/master | 2023-03-21T14:54:38.139029 | 2021-03-18T15:55:47 | 2021-03-18T15:55:47 | 343,121,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 360 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
' a test module '
__author__ = 'cqb'
class Test:
def __init__(self):
self.aaa = 111
print("我是类被实例化时就会执行")
def test001(self):
print(123)
self.test003 = "wc"
def test002(self):
self.test001()
print(self.test003)
print(Test)
print(Test())
# Test()
a = Test().test002()
| [
"1160410692@qq.com"
] | 1160410692@qq.com |
4ea5efef6caeb4a300a0dd8119c8a18fa5e25b88 | d7d24dc45da25922e84ac2536167689fbfde09a4 | /00 Renderer - Creating dataset/createh5format.py | e94b04589f0ccb3ccfce34262fe02943ff364b1d | [] | no_license | sarathcani/3D-Reconstruction-from-Single-Image | 3b8a4eab1b890be9fa6fc57185993abb70821b99 | 163dae7da4b701753c10ec4af91d9197f5102285 | refs/heads/master | 2023-06-03T10:50:34.079048 | 2021-06-21T11:14:12 | 2021-06-21T11:14:12 | 359,438,299 | 2 | 3 | null | 2021-06-08T17:31:17 | 2021-04-19T11:42:08 | Jupyter Notebook | UTF-8 | Python | false | false | 2,055 | py | ### Script for creating the rendered data into hdf5 format
#
#
# Install
# - h5py
# - numpy
# - imageio
# - tqdm (not necessary)
#
# Define
# - CURR_DIR
# - DATA_DIR (recommended to keep the data in './data')
# - LABEL
# - FILENAME
# Importing Libraries
import h5py
import os
import numpy as np
import imageio
from tqdm import tqdm
### Define these
CURR_DIR = os.getcwd()
DATA_DIR = os.path.join(CURR_DIR , "data" , "car")
LABEL = "CAR"
FILENAME = "ShapenetRendering_{}.h5".format(LABEL)
IMG_NAMES = ['00.png', '01.png', '02.png', '03.png', '04.png', '05.png', '06.png', '07.png', '08.png', '09.png', '10.png', '11.png', '12.png', '13.png', '14.png', '15.png', '16.png', '17.png', '18.png', '19.png', '20.png', '21.png', '22.png', '23.png']
## -------------------------------------------------------
def main() :
### This counter used for keeping track of the number of images/groups present in the h5 file
counter = 0
### Open the file with FILENAME
with h5py.File(FILENAME , 'w') as hdf :
### Loop through the models
### For each image in the model create a group
### with img and pose as dataset
for model_name in tqdm(os.listdir(DATA_DIR)) :
model_path = os.path.join(DATA_DIR , model_name , "rendering" )
r_metadata_path = os.path.join(model_path , "rendering_metadata.txt")
with open(r_metadata_path , 'r') as r_metadata :
for i in range(24) :
group_name = "{}_{}".format(model_name , i)
## Create a group in the h5 file with group_name
group = hdf.create_group(group_name)
line = r_metadata.readline()
azim = float(line.split()[0])
elev = float(line.split()[1])
pose = np.array([azim , elev])
img_path = os.path.join(model_path , IMG_NAMES[i])
## import image
## create it into a np array
img = imageio.imread(img_path)
group.create_dataset("image" , data = img)
group.create_dataset("pose" , data = pose)
counter += 1
print("Successfully created h5 format with {} images".format(counter))
if __name__ == '__main__':
main() | [
"mailsforsarath@cet.ac.in"
] | mailsforsarath@cet.ac.in |
e60bc54cc535f4ab73531ba9f4f5eac45fce071c | 09145b3b07273e0e064a855cf2302220cebcf181 | /textwrap_ex.py | 95b08fe738e17f49e6c04d9e5cc134b0a91c315a | [] | no_license | 01x01/python3-standard-library | 7fa762a12cbcb3535bd8b31128bd9c3aed167e10 | 1af424a04d3f16abf1c6bc42abf80ae357e35920 | refs/heads/master | 2020-06-24T12:23:38.563609 | 2019-08-12T09:42:34 | 2019-08-12T09:42:34 | 168,808,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,217 | py | # coding: utf-8
# 文本段落格式化处理
# 主要功能: 类似文件编辑器覆盖或填充特性的编程功能
# 文本内容
sample_text = '''The textwrap module can be used to format text for output in
situations where pretty-printing is desired. It offers
programmatic functionality similar to the paragraph wrapping
or filling features found in many text editors.
'''
# textwrap 函数总结
# wrap(text,width=70,**kw) 输出一个list,每一行为list中的一个元素
# fill(text,width=70,**kw) 文本第一行缩进
# shorten(text,width,**kw) 摘要功能
# dedent(text) 移除缩进,空格
# indent(text,prefix,predicate=None)
# wrap
import textwrap
t1 = textwrap.wrap(sample_text)
print(t1)
# fill
t2 = textwrap.fill(sample_text,initial_indent="*"*4)
print(t2)
# shorten
t3 = textwrap.shorten(sample_text,width=10)
print(t3) # The [...]
# dedent
dedent_text = textwrap.dedent(sample_text)
print(dedent_text)
# indent
final = textwrap.indent(sample_text,'>')
print(final)
for width in [45,80]:
print("{} Columns is : \n".format(width))
t = textwrap.fill(sample_text,width=width)
print(t)
t5 = textwrap.fill(sample_text,subsequent_indent="*")
print(t5) | [
"John.Wen@ehealth.com"
] | John.Wen@ehealth.com |
3c1d55877a00b6212503b3eab710b25288f0ecfb | 7e7982178d17a956ec6bf10a4282858a6db709fa | /pro_etc_2017/make_text.py | f8d6c3b87dae35f722638b308fe3450363d1a6ea | [] | no_license | beomso0/datajournalism-team-1 | 675622a6df14bc4edda3673b00f1f52321b6e62c | 26b547d3825f1732983217f9b465fcc640ccf183 | refs/heads/master | 2021-10-08T18:44:56.298234 | 2018-12-16T04:21:18 | 2018-12-16T04:21:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,517 | py | from bs4 import BeautifulSoup
import urllib.request
#1quarter
pro1 = 'balchick'
pro2 = 'voguemom'
pro3 = 'semobang'
pro4 = 'sectiontv'
pro5 = 'showmusic'
pro6 = 'ojimagic'
urlfile = '{}2017_url{}.txt'
textfile = '{}2017_text{}qt.txt'
'''
with open(urlfile.format(pro1, '1'), 'r') as f:
naver_url_balch1 = f.readlines()
with open(urlfile.format(pro1, '2'), 'r') as f:
naver_url_balch2 = f.readlines()
with open(urlfile.format(pro1, '3'), 'r') as f:
naver_url_balch3 = f.readlines()
with open(urlfile.format(pro1, '4'), 'r') as f:
naver_url_balch4 = f.readlines()
with open(urlfile.format(pro2, '1'), 'r') as f:
naver_url_vogue1 = f.readlines()
with open(urlfile.format(pro2, '2'), 'r') as f:
naver_url_vogue2 = f.readlines()
with open(urlfile.format(pro2, '3'), 'r') as f:
naver_url_vogue3 = f.readlines()
with open(urlfile.format(pro2, '4'), 'r') as f:
naver_url_vogue4 = f.readlines()
with open(urlfile.format(pro3, '1'), 'r') as f:
naver_url_semo1 = f.readlines()
with open(urlfile.format(pro3, '2'), 'r') as f:
naver_url_semo2 = f.readlines()
with open(urlfile.format(pro3, '3'), 'r') as f:
naver_url_semo3 = f.readlines()
with open(urlfile.format(pro3, '4'), 'r') as f:
naver_url_semo4 = f.readlines()
with open(urlfile.format(pro4, '1'), 'r') as f:
naver_url_section1 = f.readlines()
with open(urlfile.format(pro4, '2'), 'r') as f:
naver_url_section2 = f.readlines()
with open(urlfile.format(pro4, '3'), 'r') as f:
naver_url_section3 = f.readlines()
with open(urlfile.format(pro4, '4'), 'r') as f:
naver_url_section4 = f.readlines()
with open(urlfile.format(pro5, '1'), 'r') as f:
naver_url_show1 = f.readlines()
with open(urlfile.format(pro5, '2'), 'r') as f:
naver_url_show2 = f.readlines()
with open(urlfile.format(pro5, '3'), 'r') as f:
naver_url_show3 = f.readlines()
with open(urlfile.format(pro5, '4'), 'r') as f:
naver_url_show4 = f.readlines()
with open(urlfile.format(pro6, '1'), 'r') as f:
naver_url_oji1 = f.readlines()
'''
with open(urlfile.format(pro6, '2'), 'r') as f:
naver_url_oji2 = f.readlines()
with open(urlfile.format(pro6, '3'), 'r') as f:
naver_url_oji3 = f.readlines()
with open(urlfile.format(pro6, '4'), 'r') as f:
naver_url_oji4 = f.readlines()
'''
#1qt_balch
naver_url_list = []
for i in naver_url_balch1:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro1, '1'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#2qt
naver_url_list = []
for i in naver_url_balch2:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro1, '2'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#3qt
naver_url_list = []
for i in naver_url_balch3:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro1, '3'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#4qt
naver_url_list = []
for i in naver_url_balch4:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro1, '4'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#1qt_vogue
naver_url_list = []
for i in naver_url_vogue1:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro2, '1'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#2qt
naver_url_list = []
for i in naver_url_vogue2:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro2, '2'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#3qt
naver_url_list = []
for i in naver_url_vogue3:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro2, '3'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#4qt
naver_url_list = []
for i in naver_url_vogue4:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro2, '4'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#2qt
naver_url_list = []
for i in naver_url_semo2:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro3, '2'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#3qt
naver_url_list = []
for i in naver_url_semo3:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro3, '3'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#4qt
naver_url_list = []
for i in naver_url_semo4:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro3, '4'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#1qt_section
naver_url_list = []
for i in naver_url_section1:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro4, '1'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#2qt
naver_url_list = []
for i in naver_url_section2:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro4, '2'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#3qt
naver_url_list = []
for i in naver_url_section3:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro4, '3'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#4qt
naver_url_list = []
for i in naver_url_section4:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro4, '4'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#1qt_show
naver_url_list = []
for i in naver_url_show1:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro5, '1'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#2qt
naver_url_list = []
for i in naver_url_show2:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro5, '2'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#3qt
naver_url_list = []
for i in naver_url_show3:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro5, '3'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#4qt
naver_url_list = []
for i in naver_url_show4:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro5, '4'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
'''
#2qt_oji
naver_url_list = []
for i in naver_url_oji2:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro6, '2'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#3qt
naver_url_list = []
for i in naver_url_oji3:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro6, '3'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data)
#4qt
naver_url_list = []
for i in naver_url_oji4:
naver_url_list.append(i[0:len(i)-1])
print(len(naver_url_list))
text_list = []
for i in range(len(naver_url_list)):
print(i)
try:
with urllib.request.urlopen(naver_url_list[i]) as url:
doc = url.read()
soup = BeautifulSoup(doc, "html.parser")
try:
divs = soup.find_all("div", class_="article_body font1 size3")
for div in divs:
body = div.text.strip()
text_list.append(body)
except:
pass
except:
pass
with open(textfile.format(pro6, '4'), 'w', encoding='utf-8') as f:
for text in text_list:
data = text + '\n'
f.write(data) | [
"noreply@github.com"
] | beomso0.noreply@github.com |
cd94e17736402eba7a404538056255ad735a4111 | 6a8cb51c24fc08cfe0cbf79173b8b52cf1e11e98 | /parking/parking.py | acedfa2a32a295ab6869e0cf750749da9756265c | [] | no_license | ouyangqinjp/selenium-projects | 7f4a482f89de8550dd349223d8fe9b4089da47d1 | 370779d7de9914d470d86f8105bd7fdfdce808aa | refs/heads/master | 2021-01-10T06:01:11.248746 | 2015-11-30T22:06:02 | 2015-11-30T22:06:02 | 47,145,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,510 | py | import unittest
import time
#from datetime import datetime, date, time
from selenium.webdriver.common.by import By
from selenium import webdriver
import page
import os
import sys
class PythonOrgSearch(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.get("http://adam.goucher.ca/parkcalc/")
# self.driver.maximize_window()
self.driver.implicitly_wait(3)
# self.results_file = open("results.txt", "w")
# self.radio_results_file = open("radio_results.txt", "w")
def first_page(self):
self.main_page = page.MainPage(self.driver)
# self.main_page.list_all_options()
entry_time_list = ["02:00", "12:00", "13:50", "20:59"]
entry_month_list = ["June", "Auguest", "May", "December"]
entry_date_list = ["2", "5", "10", "30"]
leave_time_list = ["00:03", "12:00", "13:50", "23:59"]
leave_month_list = ["June", "September", "July", "December"]
leave_date_list = ["3", "8", "15", "30"]
i = 0
for i in range(len(entry_month_list)):
entry_month = entry_month_list[i]
entry_date = entry_date_list[i]
entry_time = entry_time_list[i]
leave_month = leave_month_list[i]
leave_date = leave_date_list[i]
leave_time = leave_time_list[i]
print "entry i=%s %s %s -- %s " \
% (i, entry_month, entry_date, entry_time)
print "leave i=%s %s %s -- %s " \
% (i, leave_month, leave_date, leave_time)
""" Entry Date """
self.main_page.input_entry_date = entry_time
self.main_page.click_radio_PM()
self.main_page.click_calender_img("EntryDate")
self.driver.switch_to_window(self.driver.window_handles[-1])
self.main_page.click_calender_date(entry_month, entry_date)
time.sleep(5)
""" Leave Date """
self.main_page.input_exist_date = leave_time
self.main_page.click_calender_img("LeaveDate")
self.driver.switch_to_window(self.driver.window_handles[-1])
self.main_page.click_calender_date(leave_month, leave_date)
time.sleep(5)
# print self.driver.find_element(By.NAME, 'EntryDate').text
def test_search_in_python_org(self):
self.first_page()
def tearDown(self):
self.driver.close()
if __name__ == "__main__":
unittest.main()
| [
"ouyangqinjp@yahoo.co.jp"
] | ouyangqinjp@yahoo.co.jp |
2901fab515399e58fb5deb81bdf31895765d505b | 742dd08e388faaf15d3d30134246cac39c8d5f20 | /src/network/layer/detection_target_region_mask.py | 16a85c483b50a711ddf7a94ef69b022c109322b3 | [] | no_license | taashi-s/Mask_RCNN_Kreas | b10b4636706795263af9b0eeb5dd894c3a8807ed | e88e9ace2ec40a57d129729cd033c27588b632cf | refs/heads/master | 2020-03-22T23:56:42.022715 | 2018-07-19T06:12:03 | 2018-07-19T06:12:03 | 140,835,596 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,093 | py | """
TODO : Write description
Detection Target Region Mask Layer Module
"""
import tensorflow as tf
import keras.engine.base_layer as KELayer
import keras.backend as KB
import keras.utils.conv_utils as KCUtils
from utils.regions_utils import RegionsUtils
class DetectionTargetRegionMask(KELayer.Layer):
"""
TODO : Write description
Detection Target Region Mask Layer class
"""
def __init__(self, positive_threshold=0.5, positive_ratio=0.33, image_shape=None
, batch_size=5, exclusion_threshold=0.1, count_per_batch=64
, mask_size=28, **kwargs):
super(DetectionTargetRegionMask, self).__init__(**kwargs)
self.__th = positive_threshold
self.__excl_th = exclusion_threshold
self.__count_per_batch = count_per_batch
self.__ratio = positive_ratio
self.__image_shape = image_shape
self.__batch_size = batch_size
(mask_h, mask_w) = KCUtils.normalize_tuple(mask_size, 2, 'mask_size')
self.__mask_size_h = mask_h
self.__mask_size_w = mask_w
def call(self, inputs, **kwargs):
return self.__detection_target_region_mask(*inputs)
def __detection_target_region_mask(self, cls_labels, reg_labels, msk_labels, regions):
norm_reg_labels = reg_labels
if self.__image_shape is not None:
img_h, img_w, _ = self.__image_shape
norm_reg_labels = RegionsUtils(reg_labels).normalize(img_h, img_w)
target_clss = []
target_ofss = []
target_regs = []
target_msks = []
zip_data = self.__zip_by_batch(cls_labels, norm_reg_labels, msk_labels
, regions, self.__batch_size)
for data in zip_data:
data_s = self.__shaping_inputs(*data)
target_data = self.__get_target_data(*data_s)
target_reg, target_ofs, target_cls, target_msk = target_data
target_clss.append(target_cls)
target_ofss.append(target_ofs)
target_regs.append(target_reg)
target_msks.append(target_msk)
return [KB.stack(target_clss), KB.stack(target_ofss)
, KB.stack(target_msks), KB.stack(target_regs)]
def __zip_by_batch(self, cls_labels, reg_labels, msk_labels, regions, batch_size):
split_cls_labels = tf.split(cls_labels, batch_size)
split_reg_labels = tf.split(reg_labels, batch_size)
split_msk_labels = tf.split(msk_labels, batch_size)
split_regions = tf.split(regions, batch_size)
return zip(split_cls_labels, split_reg_labels, split_msk_labels, split_regions)
def __shaping_inputs(self, cls_label, reg_label, msk_label, region):
cls_label_2d = KB.squeeze(cls_label, 0)
reg_label_2d = KB.squeeze(reg_label, 0)
msk_label_3d = KB.squeeze(msk_label, 0)
region_2d = KB.squeeze(region, 0)
target_lbl_ids = KB.flatten(tf.where(KB.any(reg_label_2d, axis=1)))
target_reg_ids = KB.flatten(tf.where(KB.any(region_2d, axis=1)))
cls_lbl = KB.gather(cls_label_2d, target_lbl_ids)
reg_lbl = KB.gather(reg_label_2d, target_lbl_ids)
msk_lbl = KB.gather(msk_label_3d, target_lbl_ids)
reg = KB.gather(region_2d, target_reg_ids)
return cls_lbl, reg_lbl, msk_lbl, reg
def __get_positive(self, ious):
max_iou = KB.max(ious, axis=1)
ids = KB.flatten(tf.where(max_iou >= self.__th))
count = round(self.__count_per_batch * self.__ratio)
return self.__get_shuffle_sample(ids, count)
def __get_negative(self, ious, positive_count):
max_iou = KB.max(ious, axis=1)
ids = KB.flatten(tf.where((self.__excl_th <= max_iou) & (max_iou < self.__th)))
count = self.__count_per_batch - positive_count
return self.__get_shuffle_sample(ids, count)
def __get_shuffle_sample(self, sample, count):
sample_num = KB.shape(sample)[0]
limit = KB.minimum(count, sample_num)
shuffle_sample = tf.random_shuffle(sample)[:limit]
return KB.switch(sample_num > 0, shuffle_sample, sample)
def __get_target_data(self, cls_lbl, reg_lbl, msk_lbl, reg):
ious = RegionsUtils(reg).calc_iou(reg_lbl)
positive_ids = self.__get_positive(ious)
negative_ids = self.__get_negative(ious, KB.shape(positive_ids)[0])
target_reg_ids = KB.concatenate((positive_ids, negative_ids))
max_iou_ids = KB.argmax(ious, axis=1)
target_reg_lbl_ids = KB.gather(max_iou_ids, target_reg_ids)
target_cls_lbl_ids = KB.gather(max_iou_ids, positive_ids)
target_reg = KB.gather(reg, target_reg_ids)
target_ofs = self.__get_target_offset(reg_lbl, target_reg_lbl_ids, target_reg)
target_cls = self.__get_target_class_label(cls_lbl, target_cls_lbl_ids, negative_ids)
target_msk = self.__get_target_mask_label(msk_lbl, target_reg_lbl_ids, target_reg)
return self.__padding_data(target_reg, target_ofs, target_cls, target_msk)
def __get_target_offset(self, reg_lbl, target_reg_lbl_ids, target_reg):
target_reg_lbl = KB.gather(reg_lbl, target_reg_lbl_ids)
return RegionsUtils(target_reg_lbl).calc_offset(target_reg)
def __get_target_class_label(self, cls_lbl, target_cls_lbl_ids, negative_ids):
target_cls = KB.squeeze(KB.cast(KB.gather(cls_lbl, target_cls_lbl_ids), 'int32'), 1)
padding = KB.zeros([KB.shape(negative_ids)[0]], dtype='int32')
return KB.expand_dims(KB.concatenate((target_cls, padding)), 1)
def __get_target_mask_label(self, msk_lbl, target_reg_lbl_ids, target_reg):
target_msk = KB.gather(msk_lbl, target_reg_lbl_ids)
target_msk_4d = tf.expand_dims(target_msk, -1)
ids = KB.arange(0, KB.shape(target_reg)[0])
target_msk_crop = tf.image.crop_and_resize(target_msk_4d, target_reg, ids
, (self.__mask_size_h, self.__mask_size_w))
target_msk_3d = tf.squeeze(target_msk_crop, axis=3)
return tf.round(target_msk_3d)
def __padding_data(self, regs, ofss, clss, msks):
padding_count = KB.maximum(self.__count_per_batch - KB.shape(regs)[0], 0)
paddings = [(0, padding_count), (0, 0)]
mask_shape = (self.__count_per_batch, self.__mask_size_h, self.__mask_size_w)
padding_regs = KB.reshape(tf.pad(regs, paddings), (self.__count_per_batch, 4))
padding_ofss = KB.reshape(tf.pad(ofss, paddings), (self.__count_per_batch, 4))
padding_clss = KB.reshape(tf.pad(clss, paddings), (self.__count_per_batch, 1))
padding_msks = KB.reshape(tf.pad(msks, [(0, padding_count), (0, 0), (0, 0)]), mask_shape)
return padding_regs, padding_ofss, padding_clss, padding_msks
def compute_output_shape(self, input_shape):
return [(None, self.__count_per_batch, 1)
, (None, self.__count_per_batch, 4)
, (None, self.__count_per_batch, self.__mask_size_h, self.__mask_size_w)
, (None, self.__count_per_batch, 4)
]
| [
"x7286x_ayk@yahoo.co.jp"
] | x7286x_ayk@yahoo.co.jp |
451b6207a23cffde0aefe69af7d33dc267b5371d | 6f1eb859e88bd2ce069a733531bf47a1fb0b0ccc | /src/trainer.py | b220941aa43c064ccf9e13767acc4cce4631dbf2 | [] | no_license | caoqian1995/simple-fairseq | e21ae523a7fbb5be7144774df762f3d8be66283a | 32a1886026b1993933ed022568d162ffbf452b3e | refs/heads/master | 2020-05-16T09:46:37.867599 | 2019-05-02T08:30:31 | 2019-05-02T08:30:31 | 182,961,193 | 0 | 0 | null | 2019-04-28T14:10:08 | 2019-04-23T07:40:17 | Python | UTF-8 | Python | false | false | 4,140 | py | from tqdm import tqdm
from logging import getLogger
import torch
from torch.nn.utils import clip_grad_norm_
from src.utils import get_optimizer
import torch.distributed as dist
logger = getLogger()
class TrainerMT():
def __init__(self, encoder, decoder, data, params, num_updates):
self.encoder = encoder
self.decoder = decoder
self.data = data
self.params = params
self.enc_dec_params = list(self.encoder.parameters())+list(self.decoder.parameters())
# optimizers
self.optimizer = get_optimizer(self.enc_dec_params, self.params.optim)
self.optimizer._num_updates = num_updates
# training statistics
self.epoch = getattr(params, 'now_epoch', 0)
self.n_iter = 0
self.oom = 0
self.n_sentences = 0
self.stats = {
'processed_s': 0,
'processed_w': 0,
'loss': []
}
def train_epoch(self):
self.iterator = self.get_iterator()
for (sent1, len1), (sent2, len2) in tqdm(self.iterator, mininterval=2, desc=' - (Training) ', leave=False, total=self.data.total):
self.train_step(sent1, len1, sent2, len2)
# save epoch checkpoint
if self.params.gpu_num == 1:
save = True
else:
if self.params.rank == 0:
save = True
else:
save = False
if save:
checkpoint = {
'encoder': self.encoder.state_dict(),
'decoder': self.decoder.state_dict(),
'params': self.params,
'epoch': self.epoch,
'num_updates': self.optimizer._num_updates
}
torch.save(checkpoint, '{}/model_epoch{}.pt'.format(self.params.checkpoint_dir, self.epoch))
self.epoch += 1
def train_step(self, sent1, len1, sent2, len2):
if self.params.update_freq == 1:
need_zero = True
need_reduction = True
else:
need_reduction = True if (self.n_iter+1) % self.params.update_freq == 0 else False
need_zero = True if self.n_iter % self.params.update_freq == 0 else False
self.encoder.train()
self.decoder.train()
sent1, sent2 = sent1.cuda(), sent2.cuda()
try:
if need_zero:
self.optimizer.zero_grad()
encoded = self.encoder(sent1, len1)
scores = self.decoder(encoded, sent2[:-1])
loss = self.decoder.loss_fn(scores.view(-1, self.decoder.n_words), sent2[1:].view(-1))
# check NaN
if (loss != loss).data.any():
logger.error("NaN detected")
exit()
# optimizer
loss.backward()
except Exception as e:
logger.error(e)
torch.cuda.empty_cache()
self.n_iter += 1
self.oom += 1
return
if need_reduction:
try:
if self.params.gpu_num > 1:
size = float(dist.get_world_size())
#for param in self.enc_dec_params:
# #dist.all_reduce(param.grad.data, op=dist.ReduceOp.SUM)
# param.grad.data.mul_(1/float(self.params.update_freq))
clip_grad_norm_(self.enc_dec_params, self.params.clip_grad_norm)
self.optimizer.step()
except Exception as e:
logger.error(e)
exit(0)
# number of processed sentences / words
self.stats['processed_s'] += len2.size(0)
self.stats['processed_w'] += len2.sum()
self.n_iter += 1
del loss
torch.cuda.empty_cache()
def get_iterator(self):
if self.params.gpu_num == 1:
iterator = self.data.get_iterator(shuffle=True, group_by_size=True)()
else:
iterator = self.data.get_iterator(shuffle=True, group_by_size=True, partition=self.params.rank)()
return iterator
| [
"rgwt1234@gmail.com"
] | rgwt1234@gmail.com |
10a1ae6338cc526f0c59cb27113e894dc878228c | 260ca90bc92f59bad610c8dbff428dba9d8dd4c4 | /preprocessing/ulog2csv_core.py | 5303b81adaf9d358c63eb7289bb999fb1092817b | [
"BSD-3-Clause"
] | permissive | majuid/DeepNav | 22d770c75384fe19d70f58db8a846662e3bff91c | 2ac3cbed300a6d599bcce47650956414f0d1c692 | refs/heads/master | 2023-07-29T17:07:51.540262 | 2021-09-14T17:52:36 | 2021-09-14T17:52:36 | 313,142,695 | 50 | 5 | null | null | null | null | UTF-8 | Python | false | false | 33,456 | py | """
Main Module to load and parse an ULog file
This script was obtained from
https://github.com/PX4/pyulog/blob/master/pyulog/core.py
without any modification
"""
from __future__ import print_function
import struct
import sys
import numpy as np
#pylint: disable=too-many-instance-attributes, unused-argument, missing-docstring
#pylint: disable=protected-access, too-many-branches
__author__ = "Beat Kueng"
# check python version
if sys.hexversion >= 0x030000F0:
_RUNNING_PYTHON3 = True
def _parse_string(cstr, errors='strict'):
return str(cstr, 'utf-8', errors)
else:
_RUNNING_PYTHON3 = False
def _parse_string(cstr):
return str(cstr)
class ULog(object):
"""
This class parses an ulog file
"""
## constants ##
HEADER_BYTES = b'\x55\x4c\x6f\x67\x01\x12\x35'
SYNC_BYTES = b'\x2F\x73\x13\x20\x25\x0C\xBB\x12'
# message types
MSG_TYPE_FORMAT = ord('F')
MSG_TYPE_DATA = ord('D')
MSG_TYPE_INFO = ord('I')
MSG_TYPE_INFO_MULTIPLE = ord('M')
MSG_TYPE_PARAMETER = ord('P')
MSG_TYPE_ADD_LOGGED_MSG = ord('A')
MSG_TYPE_REMOVE_LOGGED_MSG = ord('R')
MSG_TYPE_SYNC = ord('S')
MSG_TYPE_DROPOUT = ord('O')
MSG_TYPE_LOGGING = ord('L')
MSG_TYPE_LOGGING_TAGGED = ord('C')
MSG_TYPE_FLAG_BITS = ord('B')
_UNPACK_TYPES = {
'int8_t': ['b', 1, np.int8],
'uint8_t': ['B', 1, np.uint8],
'int16_t': ['h', 2, np.int16],
'uint16_t': ['H', 2, np.uint16],
'int32_t': ['i', 4, np.int32],
'uint32_t': ['I', 4, np.uint32],
'int64_t': ['q', 8, np.int64],
'uint64_t': ['Q', 8, np.uint64],
'float': ['f', 4, np.float32],
'double': ['d', 8, np.float64],
'bool': ['?', 1, np.int8],
'char': ['c', 1, np.int8]
}
@staticmethod
def get_field_size(type_str):
"""
get the field size in bytes.
:param type_str: type string, eg. 'int8_t'
"""
return ULog._UNPACK_TYPES[type_str][1]
# pre-init unpack structs for quicker use
_unpack_ushort_byte = struct.Struct('<HB').unpack
_unpack_ushort = struct.Struct('<H').unpack
_unpack_uint64 = struct.Struct('<Q').unpack
# when set to True disables string parsing exceptions
_disable_str_exceptions = False
@staticmethod
def parse_string(cstr):
"""
wrapper for _parse_string with
parametrized exception handling
"""
ret = ''
if _RUNNING_PYTHON3 and ULog._disable_str_exceptions:
ret = _parse_string(cstr, 'ignore')
else:
ret = _parse_string(cstr)
return ret
def __init__(self, log_file, message_name_filter_list=None, disable_str_exceptions=True):
"""
Initialize the object & load the file.
:param log_file: a file name (str) or a readable file object
:param message_name_filter_list: list of strings, to only load messages
with the given names. If None, load everything.
:param disable_str_parser_exceptions: If True, ignore string parsing errors
"""
self._debug = False
self._file_corrupt = False
self._start_timestamp = 0
self._last_timestamp = 0
self._msg_info_dict = {}
self._msg_info_multiple_dict = {}
self._initial_parameters = {}
self._changed_parameters = []
self._message_formats = {}
self._logged_messages = []
self._logged_messages_tagged = {}
self._dropouts = []
self._data_list = []
self._subscriptions = {} # dict of key=msg_id, value=_MessageAddLogged
self._filtered_message_ids = set() # _MessageAddLogged id's that are filtered
self._missing_message_ids = set() # _MessageAddLogged id's that could not be found
self._file_version = 0
self._compat_flags = [0] * 8
self._incompat_flags = [0] * 8
self._appended_offsets = [] # file offsets for appended data
self._has_sync = True # set to false when first file search for sync fails
self._sync_seq_cnt = 0 # number of sync packets found in file
ULog._disable_str_exceptions = disable_str_exceptions
self._load_file(log_file, message_name_filter_list)
## parsed data
@property
def start_timestamp(self):
""" timestamp of file start """
return self._start_timestamp
@property
def last_timestamp(self):
""" timestamp of last message """
return self._last_timestamp
@property
def msg_info_dict(self):
""" dictionary of all information messages (key is a string, value
depends on the type, usually string or int) """
return self._msg_info_dict
@property
def msg_info_multiple_dict(self):
""" dictionary of all information multiple messages (key is a string, value
is a list of lists that contains the messages) """
return self._msg_info_multiple_dict
@property
def initial_parameters(self):
""" dictionary of all initially set parameters (key=param name) """
return self._initial_parameters
@property
def changed_parameters(self):
""" list of all changed parameters (tuple of (timestamp, name, value))"""
return self._changed_parameters
@property
def message_formats(self):
""" dictionary with key = format name (MessageFormat.name),
value = MessageFormat object """
return self._message_formats
@property
def logged_messages(self):
""" list of MessageLogging objects """
return self._logged_messages
@property
def logged_messages_tagged(self):
""" dict of MessageLoggingTagged objects """
return self._logged_messages_tagged
@property
def dropouts(self):
""" list of MessageDropout objects """
return self._dropouts
@property
def data_list(self):
""" extracted data: list of Data objects """
return self._data_list
@property
def has_data_appended(self):
""" returns True if the log has data appended, False otherwise """
return self._incompat_flags[0] & 0x1
@property
def file_corruption(self):
""" True if a file corruption got detected """
return self._file_corrupt
def get_dataset(self, name, multi_instance=0):
""" get a specific dataset.
example:
try:
gyro_data = ulog.get_dataset('sensor_gyro')
except (KeyError, IndexError, ValueError) as error:
print(type(error), "(sensor_gyro):", error)
:param name: name of the dataset
:param multi_instance: the multi_id, defaults to the first
:raises KeyError, IndexError, ValueError: if name or instance not found
"""
return [elem for elem in self._data_list
if elem.name == name and elem.multi_id == multi_instance][0]
class Data(object):
""" contains the final topic data for a single topic and instance """
def __init__(self, message_add_logged_obj):
self.multi_id = message_add_logged_obj.multi_id
self.name = message_add_logged_obj.message_name
self.field_data = message_add_logged_obj.field_data
self.timestamp_idx = message_add_logged_obj.timestamp_idx
# get data as numpy.ndarray
np_array = np.frombuffer(message_add_logged_obj.buffer,
dtype=message_add_logged_obj.dtype)
# convert into dict of np.array (which is easier to handle)
self.data = {}
for name in np_array.dtype.names:
self.data[name] = np_array[name]
def list_value_changes(self, field_name):
""" get a list of (timestamp, value) tuples, whenever the value
changes. The first data point with non-zero timestamp is always
included, messages with timestamp = 0 are ignored """
t = self.data['timestamp']
x = self.data[field_name]
indices = t != 0 # filter out 0 values
t = t[indices]
x = x[indices]
if len(t) == 0: return []
ret = [(t[0], x[0])]
indices = np.where(x[:-1] != x[1:])[0] + 1
ret.extend(zip(t[indices], x[indices]))
return ret
## Representations of the messages from the log file ##
class _MessageHeader(object):
""" 3 bytes ULog message header """
def __init__(self):
self.msg_size = 0
self.msg_type = 0
def initialize(self, data):
self.msg_size, self.msg_type = ULog._unpack_ushort_byte(data)
class _MessageInfo(object):
""" ULog info message representation """
def __init__(self, data, header, is_info_multiple=False):
if is_info_multiple: # INFO_MULTIPLE message
self.is_continued, = struct.unpack('<B', data[0:1])
data = data[1:]
key_len, = struct.unpack('<B', data[0:1])
type_key = ULog.parse_string(data[1:1+key_len])
type_key_split = type_key.split(' ')
self.type = type_key_split[0]
self.key = type_key_split[1]
if self.type.startswith('char['): # it's a string
self.value = ULog.parse_string(data[1+key_len:])
elif self.type in ULog._UNPACK_TYPES:
unpack_type = ULog._UNPACK_TYPES[self.type]
self.value, = struct.unpack('<'+unpack_type[0], data[1+key_len:])
else: # probably an array (or non-basic type)
self.value = data[1+key_len:]
class _MessageFlagBits(object):
""" ULog message flag bits """
def __init__(self, data, header):
if header.msg_size > 8 + 8 + 3*8:
# we can still parse it but might miss some information
print('Warning: Flags Bit message is longer than expected')
self.compat_flags = list(struct.unpack('<'+'B'*8, data[0:8]))
self.incompat_flags = list(struct.unpack('<'+'B'*8, data[8:16]))
self.appended_offsets = list(struct.unpack('<'+'Q'*3, data[16:16+3*8]))
# remove the 0's at the end
while len(self.appended_offsets) > 0 and self.appended_offsets[-1] == 0:
self.appended_offsets.pop()
class MessageFormat(object):
""" ULog message format representation """
def __init__(self, data, header):
format_arr = ULog.parse_string(data).split(':')
self.name = format_arr[0]
types_str = format_arr[1].split(';')
self.fields = [] # list of tuples (type, array_size, name)
for t in types_str:
if len(t) > 0:
self.fields.append(self._extract_type(t))
@staticmethod
def _extract_type(field_str):
field_str_split = field_str.split(' ')
type_str = field_str_split[0]
name_str = field_str_split[1]
a_pos = type_str.find('[')
if a_pos == -1:
array_size = 1
type_name = type_str
else:
b_pos = type_str.find(']')
array_size = int(type_str[a_pos+1:b_pos])
type_name = type_str[:a_pos]
return type_name, array_size, name_str
class MessageLogging(object):
""" ULog logged string message representation """
def __init__(self, data, header):
self.log_level, = struct.unpack('<B', data[0:1])
self.timestamp, = struct.unpack('<Q', data[1:9])
self.message = ULog.parse_string(data[9:])
def log_level_str(self):
return {ord('0'): 'EMERGENCY',
ord('1'): 'ALERT',
ord('2'): 'CRITICAL',
ord('3'): 'ERROR',
ord('4'): 'WARNING',
ord('5'): 'NOTICE',
ord('6'): 'INFO',
ord('7'): 'DEBUG'}.get(self.log_level, 'UNKNOWN')
class MessageLoggingTagged(object):
""" ULog tagged log string message representation """
def __init__(self, data, header):
self.log_level, = struct.unpack('<B', data[0:1])
self.tag = struct.unpack('<H', data[1:3])
self.timestamp, = struct.unpack('<Q', data[3:11])
self.message = ULog.parse_string(data[11:])
def log_level_str(self):
return {ord('0'): 'EMERGENCY',
ord('1'): 'ALERT',
ord('2'): 'CRITICAL',
ord('3'): 'ERROR',
ord('4'): 'WARNING',
ord('5'): 'NOTICE',
ord('6'): 'INFO',
ord('7'): 'DEBUG'}.get(self.log_level, 'UNKNOWN')
class MessageDropout(object):
""" ULog dropout message representation """
def __init__(self, data, header, timestamp):
self.duration, = struct.unpack('<H', data)
self.timestamp = timestamp
class _FieldData(object):
""" Type and name of a single ULog data field """
def __init__(self, field_name, type_str):
self.field_name = field_name
self.type_str = type_str
class _MessageAddLogged(object):
""" ULog add logging data message representation """
def __init__(self, data, header, message_formats):
self.multi_id, = struct.unpack('<B', data[0:1])
self.msg_id, = struct.unpack('<H', data[1:3])
self.message_name = ULog.parse_string(data[3:])
self.field_data = [] # list of _FieldData
self.timestamp_idx = -1
self._parse_format(message_formats)
self.timestamp_offset = 0
for field in self.field_data:
if field.field_name == 'timestamp':
break
self.timestamp_offset += ULog._UNPACK_TYPES[field.type_str][1]
self.buffer = bytearray() # accumulate all message data here
# construct types for numpy
dtype_list = []
for field in self.field_data:
numpy_type = ULog._UNPACK_TYPES[field.type_str][2]
dtype_list.append((field.field_name, numpy_type))
self.dtype = np.dtype(dtype_list).newbyteorder('<')
def _parse_format(self, message_formats):
self._parse_nested_type('', self.message_name, message_formats)
# remove padding fields at the end
while (len(self.field_data) > 0 and
self.field_data[-1].field_name.startswith('_padding')):
self.field_data.pop()
def _parse_nested_type(self, prefix_str, type_name, message_formats):
# we flatten nested types
message_format = message_formats[type_name]
for (type_name_fmt, array_size, field_name) in message_format.fields:
if type_name_fmt in ULog._UNPACK_TYPES:
if array_size > 1:
for i in range(array_size):
self.field_data.append(ULog._FieldData(
prefix_str+field_name+'['+str(i)+']', type_name_fmt))
else:
self.field_data.append(ULog._FieldData(
prefix_str+field_name, type_name_fmt))
if prefix_str+field_name == 'timestamp':
self.timestamp_idx = len(self.field_data) - 1
else: # nested type
if array_size > 1:
for i in range(array_size):
self._parse_nested_type(prefix_str+field_name+'['+str(i)+'].',
type_name_fmt, message_formats)
else:
self._parse_nested_type(prefix_str+field_name+'.',
type_name_fmt, message_formats)
class _MessageData(object):
def __init__(self):
self.timestamp = 0
def initialize(self, data, header, subscriptions, ulog_object):
msg_id, = ULog._unpack_ushort(data[:2])
if msg_id in subscriptions:
subscription = subscriptions[msg_id]
# accumulate data to a buffer, will be parsed later
subscription.buffer += data[2:]
t_off = subscription.timestamp_offset
# TODO: the timestamp can have another size than uint64
self.timestamp, = ULog._unpack_uint64(data[t_off+2:t_off+10])
else:
if not msg_id in ulog_object._filtered_message_ids:
# this is an error, but make it non-fatal
if not msg_id in ulog_object._missing_message_ids:
ulog_object._missing_message_ids.add(msg_id)
if ulog_object._debug:
print(ulog_object._file_handle.tell())
print('Warning: no subscription found for message id {:}. Continuing,'
' but file is most likely corrupt'.format(msg_id))
self.timestamp = 0
def _add_message_info_multiple(self, msg_info):
""" add a message info multiple to self._msg_info_multiple_dict """
if msg_info.key in self._msg_info_multiple_dict:
if msg_info.is_continued:
self._msg_info_multiple_dict[msg_info.key][-1].append(msg_info.value)
else:
self._msg_info_multiple_dict[msg_info.key].append([msg_info.value])
else:
self._msg_info_multiple_dict[msg_info.key] = [[msg_info.value]]
def _load_file(self, log_file, message_name_filter_list):
""" load and parse an ULog file into memory """
if isinstance(log_file, str):
self._file_handle = open(log_file, "rb")
else:
self._file_handle = log_file
# parse the whole file
self._read_file_header()
self._last_timestamp = self._start_timestamp
self._read_file_definitions()
if self.has_data_appended and len(self._appended_offsets) > 0:
if self._debug:
print('This file has data appended')
for offset in self._appended_offsets:
self._read_file_data(message_name_filter_list, read_until=offset)
self._file_handle.seek(offset)
# read the whole file, or the rest if data appended
self._read_file_data(message_name_filter_list)
self._file_handle.close()
del self._file_handle
def _read_file_header(self):
header_data = self._file_handle.read(16)
if len(header_data) != 16:
raise Exception("Invalid file format (Header too short)")
if header_data[:7] != self.HEADER_BYTES:
raise Exception("Invalid file format (Failed to parse header)")
self._file_version, = struct.unpack('B', header_data[7:8])
if self._file_version > 1:
print("Warning: unknown file version. Will attempt to read it anyway")
# read timestamp
self._start_timestamp, = ULog._unpack_uint64(header_data[8:])
def _read_file_definitions(self):
header = self._MessageHeader()
while True:
data = self._file_handle.read(3)
if not data:
break
header.initialize(data)
data = self._file_handle.read(header.msg_size)
try:
if header.msg_type == self.MSG_TYPE_INFO:
msg_info = self._MessageInfo(data, header)
self._msg_info_dict[msg_info.key] = msg_info.value
elif header.msg_type == self.MSG_TYPE_INFO_MULTIPLE:
msg_info = self._MessageInfo(data, header, is_info_multiple=True)
self._add_message_info_multiple(msg_info)
elif header.msg_type == self.MSG_TYPE_FORMAT:
msg_format = self.MessageFormat(data, header)
self._message_formats[msg_format.name] = msg_format
elif header.msg_type == self.MSG_TYPE_PARAMETER:
msg_info = self._MessageInfo(data, header)
self._initial_parameters[msg_info.key] = msg_info.value
elif (header.msg_type == self.MSG_TYPE_ADD_LOGGED_MSG or
header.msg_type == self.MSG_TYPE_LOGGING or
header.msg_type == self.MSG_TYPE_LOGGING_TAGGED):
self._file_handle.seek(-(3+header.msg_size), 1)
break # end of section
elif header.msg_type == self.MSG_TYPE_FLAG_BITS:
# make sure this is the first message in the log
if self._file_handle.tell() != 16 + 3 + header.msg_size:
print('Error: FLAGS_BITS message must be first message. Offset:',
self._file_handle.tell())
msg_flag_bits = self._MessageFlagBits(data, header)
self._compat_flags = msg_flag_bits.compat_flags
self._incompat_flags = msg_flag_bits.incompat_flags
self._appended_offsets = msg_flag_bits.appended_offsets
if self._debug:
print('compat flags: ', self._compat_flags)
print('incompat flags:', self._incompat_flags)
print('appended offsets:', self._appended_offsets)
# check if there are bits set that we don't know
unknown_incompat_flag_msg = \
"Unknown incompatible flag set: cannot parse the log"
if self._incompat_flags[0] & ~1:
raise Exception(unknown_incompat_flag_msg)
for i in range(1, 8):
if self._incompat_flags[i]:
raise Exception(unknown_incompat_flag_msg)
else:
if self._debug:
print('read_file_definitions: unknown message type: %i (%s)' %
(header.msg_type, chr(header.msg_type)))
file_position = self._file_handle.tell()
print('file position: %i (0x%x) msg size: %i' % (
file_position, file_position, header.msg_size))
if self._check_packet_corruption(header):
# seek back to advance only by a single byte instead of
# skipping the message
self._file_handle.seek(-2-header.msg_size, 1)
except IndexError:
if not self._file_corrupt:
print("File corruption detected while reading file definitions!")
self._file_corrupt = True
def _find_sync(self, last_n_bytes=-1):
"""
read the file from a given location until the end of sync_byte sequence is found
or an end condition is met(reached EOF or searched all last_n_bytes).
:param last_n_bytes: optional arg to search only last_n_bytes for sync_bytes.
when provided, _find_sync searches for sync_byte sequence in the last_n_bytes
from current location, else, from current location till end of file.
return true if successful, else return false and seek back to initial position and
set _has_sync to false if searched till end of file
"""
sync_seq_found = False
initial_file_position = self._file_handle.tell()
current_file_position = initial_file_position
search_chunk_size = 512 # number of bytes that are searched at once
if last_n_bytes != -1:
current_file_position = self._file_handle.seek(-last_n_bytes, 1)
search_chunk_size = last_n_bytes
chunk = self._file_handle.read(search_chunk_size)
while len(chunk) >= len(ULog.SYNC_BYTES):
current_file_position += len(chunk)
chunk_index = chunk.find(ULog.SYNC_BYTES)
if chunk_index >= 0:
if self._debug:
print("Found sync at %i" % (current_file_position - len(chunk) + chunk_index))
# seek to end of sync sequence and break
current_file_position = self._file_handle.seek(current_file_position - len(chunk)\
+ chunk_index + len(ULog.SYNC_BYTES), 0)
sync_seq_found = True
break
elif last_n_bytes != -1:
# we read the whole last_n_bytes and did not find sync
break
# seek back 7 bytes to handle boundary condition and read next chunk
current_file_position = self._file_handle.seek(-(len(ULog.SYNC_BYTES)-1), 1)
chunk = self._file_handle.read(search_chunk_size)
if not sync_seq_found:
current_file_position = self._file_handle.seek(initial_file_position, 0)
if last_n_bytes == -1:
self._has_sync = False
if self._debug:
print("Failed to find sync in file from %i" % initial_file_position)
else:
if self._debug:
print("Failed to find sync in (%i, %i)" %\
(initial_file_position - last_n_bytes, initial_file_position))
else:
# declare file corrupt if we skipped bytes to sync sequence
self._file_corrupt = True
return sync_seq_found
def _read_file_data(self, message_name_filter_list, read_until=None):
"""
read the file data section
:param read_until: an optional file offset: if set, parse only up to
this offset (smaller than)
"""
if read_until is None:
read_until = 1 << 50 # make it larger than any possible log file
try:
# pre-init reusable objects
header = self._MessageHeader()
msg_data = self._MessageData()
curr_file_pos = self._file_handle.tell()
while True:
data = self._file_handle.read(3)
curr_file_pos += len(data)
header.initialize(data)
data = self._file_handle.read(header.msg_size)
curr_file_pos += len(data)
if len(data) < header.msg_size:
break # less data than expected. File is most likely cut
if curr_file_pos > read_until:
if self._debug:
print('read until offset=%i done, current pos=%i' %
(read_until, curr_file_pos))
break
try:
if header.msg_type == self.MSG_TYPE_INFO:
msg_info = self._MessageInfo(data, header)
self._msg_info_dict[msg_info.key] = msg_info.value
elif header.msg_type == self.MSG_TYPE_INFO_MULTIPLE:
msg_info = self._MessageInfo(data, header, is_info_multiple=True)
self._add_message_info_multiple(msg_info)
elif header.msg_type == self.MSG_TYPE_PARAMETER:
msg_info = self._MessageInfo(data, header)
self._changed_parameters.append((self._last_timestamp,
msg_info.key, msg_info.value))
elif header.msg_type == self.MSG_TYPE_ADD_LOGGED_MSG:
msg_add_logged = self._MessageAddLogged(data, header,
self._message_formats)
if (message_name_filter_list is None or
msg_add_logged.message_name in message_name_filter_list):
self._subscriptions[msg_add_logged.msg_id] = msg_add_logged
else:
self._filtered_message_ids.add(msg_add_logged.msg_id)
elif header.msg_type == self.MSG_TYPE_LOGGING:
msg_logging = self.MessageLogging(data, header)
self._logged_messages.append(msg_logging)
elif header.msg_type == self.MSG_TYPE_LOGGING_TAGGED:
msg_log_tagged = self.MessageLoggingTagged(data, header)
if msg_log_tagged.tag in self._logged_messages_tagged:
self._logged_messages_tagged[msg_log_tagged.tag].append(msg_log_tagged)
else:
self._logged_messages_tagged[msg_log_tagged.tag] = [msg_log_tagged]
elif header.msg_type == self.MSG_TYPE_DATA:
msg_data.initialize(data, header, self._subscriptions, self)
if msg_data.timestamp != 0 and msg_data.timestamp > self._last_timestamp:
self._last_timestamp = msg_data.timestamp
elif header.msg_type == self.MSG_TYPE_DROPOUT:
msg_dropout = self.MessageDropout(data, header,
self._last_timestamp)
self._dropouts.append(msg_dropout)
elif header.msg_type == self.MSG_TYPE_SYNC:
self._sync_seq_cnt = self._sync_seq_cnt + 1
else:
if self._debug:
print('_read_file_data: unknown message type: %i (%s)' %
(header.msg_type, chr(header.msg_type)))
print('file position: %i msg size: %i' % (
curr_file_pos, header.msg_size))
if self._check_packet_corruption(header):
# seek back to advance only by a single byte instead of
# skipping the message
curr_file_pos = self._file_handle.seek(-2-header.msg_size, 1)
# try recovery with sync sequence in case of unknown msg_type
if self._has_sync:
self._find_sync()
else:
# seek back msg_size to look for sync sequence in payload
if self._has_sync:
self._find_sync(header.msg_size)
except IndexError:
if not self._file_corrupt:
print("File corruption detected while reading file data!")
self._file_corrupt = True
except struct.error:
pass #we read past the end of the file
# convert into final representation
while self._subscriptions:
_, value = self._subscriptions.popitem()
if len(value.buffer) > 0: # only add if we have data
data_item = ULog.Data(value)
self._data_list.append(data_item)
def _check_packet_corruption(self, header):
"""
check for data corruption based on an unknown message type in the header
set _file_corrupt flag to true if a corrupt packet is found
We need to handle 2 cases:
- corrupt file (we do our best to read the rest of the file)
- new ULog message type got added (we just want to skip the message)
return true if packet associated with header is corrupt, else return false
"""
data_corrupt = False
if header.msg_type == 0 or header.msg_size == 0 or header.msg_size > 10000:
if not self._file_corrupt and self._debug:
print('File corruption detected')
data_corrupt = True
self._file_corrupt = True
return data_corrupt
def get_version_info(self, key_name='ver_sw_release'):
"""
get the (major, minor, patch, type) version information as tuple.
Returns None if not found
definition of type is:
>= 0: development
>= 64: alpha version
>= 128: beta version
>= 192: RC version
== 255: release version
"""
if key_name in self._msg_info_dict:
val = self._msg_info_dict[key_name]
return ((val >> 24) & 0xff, (val >> 16) & 0xff, (val >> 8) & 0xff, val & 0xff)
return None
def get_version_info_str(self, key_name='ver_sw_release'):
"""
get version information in the form 'v1.2.3 (RC)', or None if version
tag either not found or it's a development version
"""
version = self.get_version_info(key_name)
if not version is None and version[3] >= 64:
type_str = ''
if version[3] < 128: type_str = ' (alpha)'
elif version[3] < 192: type_str = ' (beta)'
elif version[3] < 255: type_str = ' (RC)'
return 'v{}.{}.{}{}'.format(version[0], version[1], version[2], type_str)
return None
| [
"62520510+majuid@users.noreply.github.com"
] | 62520510+majuid@users.noreply.github.com |
b741fb5ad71c5f866583c59b99471308f3687b26 | f3fb46ec8167c3d7e451265a76e1646aef78233f | /world/stat_checks/utils.py | 3d228a15fb3d369bed4b3c54c83fd1a965011308 | [
"MIT"
] | permissive | Arx-Game/arxcode | d1196941db4b551bb8ec96024241787cf4f34af3 | 363a1f14fd1a640580a4bf4486a1afe776757557 | refs/heads/stable_orphan | 2023-08-03T04:27:24.388330 | 2023-07-29T15:10:38 | 2023-07-29T15:10:38 | 144,421,010 | 52 | 45 | MIT | 2023-08-19T00:52:23 | 2018-08-11T22:06:07 | Python | UTF-8 | Python | false | false | 685 | py | """
Just a few utilities. Should NOT import anything in global scope to avoid
circular imports.
"""
def get_check_by_name(name: str):
"""
Convenience method to avoid worrying about circular imports when
fetching checks.
"""
from world.stat_checks.models import StatCheck
check = StatCheck.get_instance_by_name(name)
if not check:
raise StatCheck.DoesNotExist(f"No check exists by name '{name}'")
return check
def get_check_maker_by_name(name: str, character, **kwargs):
from world.stat_checks.check_maker import DefinedCheckMaker
return DefinedCheckMaker(
character=character, check=get_check_by_name(name), **kwargs
)
| [
"surly.mime@gmail.com"
] | surly.mime@gmail.com |
b3ceee13afd1ce7d15f2359a57e07019d7e67fac | c518166735f921122d7de9043a6cd386041e8668 | /examples/cifar10/edit_model.py | 3361c72a41107c76b78bcfa412da0680e4c47624 | [
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] | permissive | AkiChen/Correlative-Filters-Caffe | a84eeb81e983fe115684d0316a82bdfce907645b | 406153c3c4c5311ac5a5ad5054b9d9b653b2003b | refs/heads/master | 2021-01-11T07:36:07.648880 | 2015-08-27T11:24:09 | 2015-08-27T11:24:09 | 40,585,857 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,639 | py | #!/usr/bin/python
import sys
import re
def append(file_name):
updated_file = 'empty'
with open(file_name, 'r') as f:
full_text = f.read()
accuracy_layer = re.search(r'layers?\s\{[^}]*type:?\s"Accuracy"[^}]*\}',full_text)
label_layer_name = re.search(r'bottom:?\s\"((?!label)\w+)\"', accuracy_layer.group(0))
added_layer = 'layer {\nname: "softmax"\ntype: "Softmax"\nbottom: "bottom_name"\ntop: "softmax"\ninclude { phase: TEST }\n}'
added_layer = added_layer.replace('bottom_name',label_layer_name.group(1))
false_multi_view_line = re.search(r'multi_view:\s*false',full_text)
true_multi_view_line = false_multi_view_line.group(0).replace('false','true')
full_text = full_text + added_layer
full_text = full_text.replace(false_multi_view_line.group(0), true_multi_view_line)
updated_file = full_text
output = open(file_name, 'w')
output.write(updated_file)
output.close()
def remove(file_name):
updated_file = 'empty'
with open(file_name, 'r') as f:
full_text = f.read()
accuracy_layer = re.search(r'layers?\s\{[^}]*type:?\s"Accuracy"[^}]*\}',full_text)
label_layer_name = re.search(r'bottom:?\s\"((?!label)\w+)\"', accuracy_layer.group(0))
added_layer = 'layer {\nname: "softmax"\ntype: "Softmax"\nbottom: "bottom_name"\ntop: "softmax"\ninclude { phase: TEST }\n}'
added_layer = added_layer.replace('bottom_name',label_layer_name.group(1))
false_multi_view_line = re.search(r'multi_view:\s*true',full_text)
true_multi_view_line = false_multi_view_line.group(0).replace('true','false')
full_text = full_text.replace(added_layer, "")
full_text = full_text.replace(false_multi_view_line.group(0), true_multi_view_line)
updated_file = full_text
output = open(file_name, 'w')
output.write(updated_file)
output.close()
if __name__ == '__main__':
args = sys.argv
if len(args)!=3:
print('Error, the name of _train_test.prototxt file is needed. ')
else:
operation = args[1]
selfMod = __import__(__name__)
func = getattr(selfMod,operation)
file_name = args[2]
func(file_name) | [
"payenjoe@qq.com"
] | payenjoe@qq.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.