blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4b076855d9faf7d4b9b52c1ba3bcabde38de220d | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_200/3477.py | 445b282b68ddf7bc5304da572da944985b261730 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,508 | py | """
Problem B. Tidy Numbers
Problem
Tatiana likes to keep things tidy. Her toys are sorted from smallest to largest,
her pencils are sorted from shortest to longest and her computers from oldest to
newest. One day, when practicing her counting skills, she noticed that some
integers, when written in base 10 with no leading zeroes, have their digits
sorted in non-decreasing order. Some examples of this are 8, 123, 555, and
224488. She decided to call these numbers tidy. Numbers that do not have this
property, like 20, 321, 495 and 999990, are not tidy.
She just finished counting all positive integers in ascending order from 1 to N.
What was the last tidy number she counted?
Input
The first line of the input gives the number of test cases, T. T lines follow.
Each line describes a test case with a single integer N, the last number counted
by Tatiana.
Output
For each test case, output one line containing Case #x: y, where x is the test
case number (starting from 1) and y is the last tidy number counted by Tatiana.
Limits
1 ≤ T ≤ 100.
Small dataset
1 ≤ N ≤ 1000.
Large dataset
1 ≤ N ≤ 1018.
Sample
Input
Output
4
132
1000
7
111111111111111110
Case #1: 129
Case #2: 999
Case #3: 7
Case #4: 99999999999999999
"""
def get_closest_tidy_number(n):
if n < 10:
return n
n_str = str(n)
n_len = len(n_str)
prev_value = -1
break_idx = -1
# find position and value of the first digit to the left that breaks
# non-decreasing order
for idx in range(len(n_str)):
value = int(n_str[idx])
if value < prev_value:
break_idx = idx
break
prev_value = value
if break_idx == -1:
return n
# decimal place from the right: 0 means 1s, 1 means 10s and so on
# place = len(n_str) - break_idx - 1
tidy_value = int(n_str[:break_idx] + '0' * (n_len - break_idx)) - 1
n_str = str(tidy_value)
while break_idx > 1:
break_idx -= 1
if int(n_str[break_idx]) < int(n_str[break_idx - 1]):
tidy_value = int(n_str[:break_idx] + '0' * (n_len - break_idx)) - 1
n_str = str(tidy_value)
else:
return tidy_value
return tidy_value
test_cases = int(input())
for i in range(1, test_cases + 1):
input_str = int(input())
tidy_number = get_closest_tidy_number(input_str)
print("Case #{}: {}".format(i, tidy_number))
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
64309c48dc23fcf4e0244500c43fc8cc8d36779a | 618ff263c58557f7dbcd509b35b91555aef1f94d | /This-Day-Web/mycalendar/migrations/0003_auto_20190115_0752.py | fc019db1c0419c04abf260b71d7207858fb1c63a | [] | no_license | all0pen/This-Day-iOS | 33a78ad065373a12b7d9133d34eab0a44b70a403 | 43e311655ed054319770826c91c9aaa566ec45f2 | refs/heads/main | 2023-03-27T18:12:52.147617 | 2021-03-31T11:27:28 | 2021-03-31T11:27:28 | 353,315,993 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | # Generated by Django 2.1.5 on 2019-01-15 07:52
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('mycalendar', '0002_auto_20190115_0517'),
]
operations = [
migrations.RenameField(
model_name='calendarinfo',
old_name='user',
new_name='userID',
),
]
| [
"dadada1028@naver.com"
] | dadada1028@naver.com |
60606fec2139a4f63c1875b06161f26f583ba0f6 | f42fbea44f4d434d44367292c62f8e283e456373 | /leaderboard.py | f427560d982626c7812eb74d5d0a0bbffd7a5ea4 | [] | no_license | 2qar/pingbot | 4ead52191614255871f11acd83e841dbddbc3351 | b772df875bc7c7166f4d11fba5c70833ee542a97 | refs/heads/master | 2023-04-27T21:55:52.499572 | 2018-09-01T22:06:52 | 2018-09-01T22:06:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,011 | py | from config import error
from discord import Embed
from discord import Colour
import pingutils
import json
class Leaderboard:
colors = [Colour.gold(), Colour.light_grey(), Colour.dark_orange()]
async def check_server_leaderboard(msg_obj, client):
try:
data = None
with open("leaderboards/" + msg_obj.server.id + ".json") as file:
data = json.load(file)
for ping in range(len(data)):
await client.send_message(msg_obj.channel, embed=Leaderboard.embed_constructor(data[ping], msg_obj.server, ping + 1, Leaderboard.colors[ping]))
except Exception as e:
print(e)
await client.send_message(msg_obj.channel, error + "No leaderboard for this server!")
#TODO: Maybe divide interval by ping_count to determine a ping's "value" and compare values for leaderboard spots
# maybe only use the worth as a modifier to multiply ping_count by for a "value"
def try_add_to_leaderboard(server_id, ping):
# try to open an existing leaderboard and if possible add this ping
try:
leaderboard_path = "leaderboards/" + server_id + ".json"
print("checking if ping should be added")
data = None
insert_index = -1
ping_json = ping.asJSON()
with open(leaderboard_path) as file:
data = json.load(file)
ping_count = ping_json['ping_count']
for i in range(0, len(data)):
if data[i]['ping_count'] < ping_count:
insert_index = i
print("ping marked for insertion")
break
if insert_index == -1 and len(data) < 3:
insert_index = -2
# add the ping if insert_index was actually changed
if insert_index > -1:
with open(leaderboard_path, "w") as outfile:
data.insert(insert_index, ping_json)
if len(data) == 4:
del data[3]
json.dump(data, outfile)
print("ping inserted")
# if there's room for the ping, just add it
elif insert_index == -2:
with open(leaderboard_path, "w") as outfile:
data.append(ping_json)
print("ping appended")
json.dump(data, outfile)
# if there's no existing leaderboard for this server, make one and add this ping
except Exception as e:
print("error ", e)
print("making leaderboard")
with open("leaderboards/" + server_id + ".json", "w") as outfile:
data = []
data.append(ping.asJSON())
json.dump(data, outfile)
#TODO: Add discriminator to the JSON
def embed_constructor(ping_json, server, num, color):
embed = Embed()
author = ping_json["author"]
author_icon = ping_json["avatar"]
embed.colour = color
embed.set_author(name="#{0}".format(num))
embed.set_thumbnail(url=author_icon)
embed.add_field(name="Author", value = author)
embed.add_field(name="Message", value=ping_json["user_message"])
mentions = ping_json["mentions"]
embed.add_field(name="Mentions", value=pingutils.get_formatted_name_list(server, mentions))
embed.add_field(name="Ping Count", value=ping_json["ping_count"])
interval = ping_json["interval"]
embed.add_field(name="Interval", value="{0} seconds".format(interval))
return embed
| [
"tucker1014@hotmail.com"
] | tucker1014@hotmail.com |
b1b17de27b17b10b04b0d215f31b42d2845350ab | c31ee8136a57a96649196081e1cfde0676c2a481 | /larcv/app/arxiv/arxiv/LArOpenCVHandle/cfg/mac/arxiv/analyze.py | 1ff1c17fba6bd79428e15f9dc424c3ee27064942 | [
"MIT"
] | permissive | DeepLearnPhysics/larcv2 | b12b46168e5c6795c70461c9495e29b427cd88b5 | 31863c9b094a09db2a0286cfbb63ccd2f161e14d | refs/heads/develop | 2023-06-11T03:15:51.679864 | 2023-05-30T17:51:19 | 2023-05-30T17:51:19 | 107,551,725 | 16 | 19 | MIT | 2023-04-10T10:15:13 | 2017-10-19T13:42:39 | C++ | UTF-8 | Python | false | false | 1,113 | py | from larcv import larcv
import ROOT, sys
from ROOT import std
if len(sys.argv) < 2:
print 'Usage: python',sys.argv[0],'CONFIG_FILE [LARCV_FILE1 LARCV_FILE2 ...]'
sys.exit(1)
proc = larcv.ProcessDriver('ProcessDriver')
print "Loading config... ",sys.argv[1]
proc.configure(sys.argv[1])
print "Loaded"
print sys.argv
if len(sys.argv) > 1:
flist=ROOT.std.vector('std::string')()
for x in xrange(len(sys.argv)-6):
print "Pushing back...",sys.argv[x+6]
flist.push_back(sys.argv[x+6])
proc.override_input_file(flist)
proc.override_ana_file(sys.argv[2] + ".root")
proc.override_output_file(sys.argv[3] + ".root")
ana_id = proc.process_id("LArbysImageAna")
ext_id = proc.process_id("LArbysImageExtract")
out_id = proc.process_id("LArbysImageOut")
ana_proc = proc.process_ptr(ana_id)
ext_proc = proc.process_ptr(ext_id)
out_proc = proc.process_ptr(out_id)
out_proc.SetLArbysImageAna(ana_proc)
out_proc.SetLArbysImageExtract(ext_proc)
ana_proc.SetInputLArbysMCFile(sys.argv[4]);
ana_proc.SetInputLArbysRecoFile(sys.argv[5])
proc.initialize()
proc.batch_process()
proc.finalize()
| [
"kazuhiro@nevis.columbia.edu"
] | kazuhiro@nevis.columbia.edu |
7e221de13be5313611640449392570d027655ac8 | 0a7d76af2d8dced3c65fbcbda9af6d17b1e429c0 | /tests/forms_tests/tests/test_media.py | 2b73a041becbfaf07ab7b9065f321e9886cac50a | [
"Python-2.0",
"BSD-3-Clause"
] | permissive | manhnd1112/GR | 607d4e9add987dd994c0fe20629b03631769c02a | 2ee9da122afeb33b3ee589a7f64d3f74d2654a1a | refs/heads/master | 2022-12-11T00:36:05.143147 | 2018-05-31T10:03:35 | 2018-05-31T10:03:35 | 125,654,350 | 0 | 0 | BSD-3-Clause | 2022-12-08T00:58:26 | 2018-03-17T17:42:34 | Python | UTF-8 | Python | false | false | 24,831 | py | import warnings
from django.forms import CharField, Form, Media, MultiWidget, TextInput
from django.template import Context, Template
from django.test import SimpleTestCase, override_settings
@override_settings(
STATIC_URL='http://media.example.com/static/',
)
class FormsMediaTestCase(SimpleTestCase):
"""Tests for the media handling on widgets and forms"""
def test_construction(self):
# Check construction of media objects
m = Media(
css={'all': ('path/to/css1', '/path/to/css2')},
js=('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3'),
)
self.assertEqual(
str(m),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>"""
)
self.assertEqual(
repr(m),
"Media(css={'all': ('path/to/css1', '/path/to/css2')}, "
"js=('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3'))"
)
class Foo:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
m3 = Media(Foo)
self.assertEqual(
str(m3),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>"""
)
# A widget can exist without a media definition
class MyWidget(TextInput):
pass
w = MyWidget()
self.assertEqual(str(w.media), '')
def test_media_dsl(self):
###############################################################
# DSL Class-based media definitions
###############################################################
# A widget can define media if it needs to.
# Any absolute path will be preserved; relative paths are combined
# with the value of settings.MEDIA_URL
class MyWidget1(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
w1 = MyWidget1()
self.assertEqual(
str(w1.media),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>"""
)
# Media objects can be interrogated by media type
self.assertEqual(
str(w1.media['css']),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />"""
)
self.assertEqual(
str(w1.media['js']),
"""<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>"""
)
def test_combine_media(self):
# Media objects can be combined. Any given media resource will appear only
# once. Duplicated media definitions are ignored.
class MyWidget1(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
class MyWidget2(TextInput):
class Media:
css = {
'all': ('/path/to/css2', '/path/to/css3')
}
js = ('/path/to/js1', '/path/to/js4')
class MyWidget3(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css3')
}
js = ('/path/to/js1', '/path/to/js4')
w1 = MyWidget1()
w2 = MyWidget2()
w3 = MyWidget3()
self.assertEqual(
str(w1.media + w2.media + w3.media),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>
<script type="text/javascript" src="/path/to/js4"></script>"""
)
# media addition hasn't affected the original objects
self.assertEqual(
str(w1.media),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>"""
)
# Regression check for #12879: specifying the same CSS or JS file
# multiple times in a single Media instance should result in that file
# only being included once.
class MyWidget4(TextInput):
class Media:
css = {'all': ('/path/to/css1', '/path/to/css1')}
js = ('/path/to/js1', '/path/to/js1')
w4 = MyWidget4()
self.assertEqual(str(w4.media), """<link href="/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>""")
def test_media_property(self):
###############################################################
# Property-based media definitions
###############################################################
# Widget media can be defined as a property
class MyWidget4(TextInput):
def _media(self):
return Media(css={'all': ('/some/path',)}, js=('/some/js',))
media = property(_media)
w4 = MyWidget4()
self.assertEqual(str(w4.media), """<link href="/some/path" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/some/js"></script>""")
# Media properties can reference the media of their parents
class MyWidget5(MyWidget4):
def _media(self):
return super().media + Media(css={'all': ('/other/path',)}, js=('/other/js',))
media = property(_media)
w5 = MyWidget5()
self.assertEqual(str(w5.media), """<link href="/some/path" type="text/css" media="all" rel="stylesheet" />
<link href="/other/path" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/some/js"></script>
<script type="text/javascript" src="/other/js"></script>""")
def test_media_property_parent_references(self):
# Media properties can reference the media of their parents,
# even if the parent media was defined using a class
class MyWidget1(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
class MyWidget6(MyWidget1):
def _media(self):
return super().media + Media(css={'all': ('/other/path',)}, js=('/other/js',))
media = property(_media)
w6 = MyWidget6()
self.assertEqual(
str(w6.media),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<link href="/other/path" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>
<script type="text/javascript" src="/other/js"></script>"""
)
def test_media_inheritance(self):
###############################################################
# Inheritance of media
###############################################################
# If a widget extends another but provides no media definition, it inherits the parent widget's media
class MyWidget1(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
class MyWidget7(MyWidget1):
pass
w7 = MyWidget7()
self.assertEqual(
str(w7.media),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>"""
)
# If a widget extends another but defines media, it extends the parent widget's media by default
class MyWidget8(MyWidget1):
class Media:
css = {
'all': ('/path/to/css3', 'path/to/css1')
}
js = ('/path/to/js1', '/path/to/js4')
w8 = MyWidget8()
self.assertEqual(
str(w8.media),
"""<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>
<script type="text/javascript" src="/path/to/js4"></script>"""
)
def test_media_inheritance_from_property(self):
# If a widget extends another but defines media, it extends the parents widget's media,
# even if the parent defined media using a property.
class MyWidget1(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
class MyWidget4(TextInput):
def _media(self):
return Media(css={'all': ('/some/path',)}, js=('/some/js',))
media = property(_media)
class MyWidget9(MyWidget4):
class Media:
css = {
'all': ('/other/path',)
}
js = ('/other/js',)
w9 = MyWidget9()
self.assertEqual(
str(w9.media),
"""<link href="/some/path" type="text/css" media="all" rel="stylesheet" />
<link href="/other/path" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/some/js"></script>
<script type="text/javascript" src="/other/js"></script>"""
)
# A widget can disable media inheritance by specifying 'extend=False'
class MyWidget10(MyWidget1):
class Media:
extend = False
css = {
'all': ('/path/to/css3', 'path/to/css1')
}
js = ('/path/to/js1', '/path/to/js4')
w10 = MyWidget10()
self.assertEqual(str(w10.media), """<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="/path/to/js4"></script>""")
def test_media_inheritance_extends(self):
# A widget can explicitly enable full media inheritance by specifying 'extend=True'
class MyWidget1(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
class MyWidget11(MyWidget1):
class Media:
extend = True
css = {
'all': ('/path/to/css3', 'path/to/css1')
}
js = ('/path/to/js1', '/path/to/js4')
w11 = MyWidget11()
self.assertEqual(
str(w11.media),
"""<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>
<script type="text/javascript" src="/path/to/js4"></script>"""
)
def test_media_inheritance_single_type(self):
# A widget can enable inheritance of one media type by specifying extend as a tuple
class MyWidget1(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
class MyWidget12(MyWidget1):
class Media:
extend = ('css',)
css = {
'all': ('/path/to/css3', 'path/to/css1')
}
js = ('/path/to/js1', '/path/to/js4')
w12 = MyWidget12()
self.assertEqual(
str(w12.media),
"""<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="/path/to/js4"></script>"""
)
def test_multi_media(self):
###############################################################
# Multi-media handling for CSS
###############################################################
# A widget can define CSS media for multiple output media types
class MultimediaWidget(TextInput):
class Media:
css = {
'screen, print': ('/file1', '/file2'),
'screen': ('/file3',),
'print': ('/file4',)
}
js = ('/path/to/js1', '/path/to/js4')
multimedia = MultimediaWidget()
self.assertEqual(
str(multimedia.media),
"""<link href="/file4" type="text/css" media="print" rel="stylesheet" />
<link href="/file3" type="text/css" media="screen" rel="stylesheet" />
<link href="/file1" type="text/css" media="screen, print" rel="stylesheet" />
<link href="/file2" type="text/css" media="screen, print" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="/path/to/js4"></script>"""
)
def test_multi_widget(self):
###############################################################
# Multiwidget media handling
###############################################################
class MyWidget1(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
class MyWidget2(TextInput):
class Media:
css = {
'all': ('/path/to/css2', '/path/to/css3')
}
js = ('/path/to/js1', '/path/to/js4')
class MyWidget3(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css3')
}
js = ('/path/to/js1', '/path/to/js4')
# MultiWidgets have a default media definition that gets all the
# media from the component widgets
class MyMultiWidget(MultiWidget):
def __init__(self, attrs=None):
widgets = [MyWidget1, MyWidget2, MyWidget3]
super().__init__(widgets, attrs)
mymulti = MyMultiWidget()
self.assertEqual(
str(mymulti.media),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>
<script type="text/javascript" src="/path/to/js4"></script>"""
)
def test_form_media(self):
###############################################################
# Media processing for forms
###############################################################
class MyWidget1(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css2')
}
js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3')
class MyWidget2(TextInput):
class Media:
css = {
'all': ('/path/to/css2', '/path/to/css3')
}
js = ('/path/to/js1', '/path/to/js4')
class MyWidget3(TextInput):
class Media:
css = {
'all': ('path/to/css1', '/path/to/css3')
}
js = ('/path/to/js1', '/path/to/js4')
# You can ask a form for the media required by its widgets.
class MyForm(Form):
field1 = CharField(max_length=20, widget=MyWidget1())
field2 = CharField(max_length=20, widget=MyWidget2())
f1 = MyForm()
self.assertEqual(
str(f1.media),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>
<script type="text/javascript" src="/path/to/js4"></script>"""
)
# Form media can be combined to produce a single media definition.
class AnotherForm(Form):
field3 = CharField(max_length=20, widget=MyWidget3())
f2 = AnotherForm()
self.assertEqual(
str(f1.media + f2.media),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>
<script type="text/javascript" src="/path/to/js4"></script>"""
)
# Forms can also define media, following the same rules as widgets.
class FormWithMedia(Form):
field1 = CharField(max_length=20, widget=MyWidget1())
field2 = CharField(max_length=20, widget=MyWidget2())
class Media:
js = ('/some/form/javascript',)
css = {
'all': ('/some/form/css',)
}
f3 = FormWithMedia()
self.assertEqual(
str(f3.media),
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<link href="/some/form/css" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>
<script type="text/javascript" src="/path/to/js4"></script>
<script type="text/javascript" src="/some/form/javascript"></script>"""
)
# Media works in templates
self.assertEqual(
Template("{{ form.media.js }}{{ form.media.css }}").render(Context({'form': f3})),
"""<script type="text/javascript" src="/path/to/js1"></script>
<script type="text/javascript" src="http://media.other.com/path/to/js2"></script>
<script type="text/javascript" src="https://secure.other.com/path/to/js3"></script>
<script type="text/javascript" src="/path/to/js4"></script>
<script type="text/javascript" src="/some/form/javascript"></script>"""
"""<link href="http://media.example.com/static/path/to/css1" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css2" type="text/css" media="all" rel="stylesheet" />
<link href="/path/to/css3" type="text/css" media="all" rel="stylesheet" />
<link href="/some/form/css" type="text/css" media="all" rel="stylesheet" />"""
)
def test_html_safe(self):
media = Media(css={'all': ['/path/to/css']}, js=['/path/to/js'])
self.assertTrue(hasattr(Media, '__html__'))
self.assertEqual(str(media), media.__html__())
def test_merge(self):
test_values = (
(([1, 2], [3, 4]), [1, 2, 3, 4]),
(([1, 2], [2, 3]), [1, 2, 3]),
(([2, 3], [1, 2]), [1, 2, 3]),
(([1, 3], [2, 3]), [1, 2, 3]),
(([1, 2], [1, 3]), [1, 2, 3]),
(([1, 2], [3, 2]), [1, 3, 2]),
)
for (list1, list2), expected in test_values:
with self.subTest(list1=list1, list2=list2):
self.assertEqual(Media.merge(list1, list2), expected)
def test_merge_warning(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(Media.merge([1, 2], [2, 1]), [1, 2])
self.assertEqual(
str(w[-1].message),
'Detected duplicate Media files in an opposite order:\n1\n2'
)
| [
"manhnd@kaopiz.com"
] | manhnd@kaopiz.com |
ab0d84a6fac49ca7cccc019147bb426f5600a37a | d663659c446d13baefae58bb8207898349fc4bc0 | /Project Euler/3.py | 568ba13e8118d43108b353e6215406f27fc38f95 | [] | no_license | CanSevgi/Python | e1c9bcd6fd0220203aa514389a858f1286fd1dbf | 2afc6de29e3dfadf3da4431a6c64a3629c0ed754 | refs/heads/master | 2020-03-08T11:52:20.393579 | 2018-10-13T15:06:05 | 2018-10-13T15:06:05 | 128,110,368 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 756 | py | # -*- coding: utf-8 -*-
"""
Created on Fri May 4 19:02:01 2018
@author:Can Sevgi (github.com/CanSevgi)
We'll check for x%y==0 and y%z != 0 where y = (from x-1, to 0) and z = (from y-1, to 0)
"""
x= 600851475143
#i=0
#n=0
#m=0
c=0
a=0
for c in range (1,10000,1):
if x%c==0:
a +=1
print (a, " st. prime factor : " ,c)
x= (x//c)
print ("Result of the divide :",x)
#for i in range (1,x,1):
# if x%(x-i) ==0:
# a = (x-i)
# m=0
# for n in range (2,a,1):
# if (a)%(n)==0:
# break
# else :
# m +=1
# if m==(a-2) :
# print (a)
# break
| [
"noreply@github.com"
] | noreply@github.com |
9efff393f901a81783cc42050f4002da07a3b111 | 657d7416d0639ec3c6f42aace7ee02324c7833cd | /web_app/main/apps.py | e2c08b95ba3268cefa35a202191f58936add33b6 | [] | no_license | v-skiff/blogs | 4e4bac096df0d69fc49bef830d264fdb411eb1fc | 5261e9fbd83e34e41e4af8184a230e1c0ef78c83 | refs/heads/master | 2023-04-28T16:18:05.746701 | 2020-03-17T21:40:37 | 2020-03-17T21:40:37 | 247,424,787 | 0 | 0 | null | 2023-04-21T20:54:12 | 2020-03-15T08:01:10 | Python | UTF-8 | Python | false | false | 167 | py | from django.apps import AppConfig
class MainConfig(AppConfig):
name = 'main'
verbose_name = 'Blog platform'
def ready(self):
import main.signals | [
"skiff.udev@gmail.com"
] | skiff.udev@gmail.com |
810ab3e5093004285645359f5b89fd224183d58f | 872e7437a22e684c0fecce76e679464bb68e434d | /Anna_old/database/old/py_scripts/trypa_new_functions.py | 1e5e98334cae68c54ea163cc33e71e861d2141df | [] | no_license | tskalicky/ngs | d9a3c4da69544c9d5ae72c71167f2078137d1775 | 6056c627cdf23eca357bbb7cba0465abf6398f43 | refs/heads/master | 2020-12-30T22:34:28.112079 | 2018-10-31T09:09:39 | 2018-10-31T09:09:39 | 80,538,797 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 106,391 | py | #!/usr/bin/python
trypa_new_functions={
"Tb04.24M18.150": "hypothetical protein, conserved",
"Tb09.v4.0018": "expression site-associated gene (ESAG) protein, putative, expression site-associated gene 3 (ESAG3) protein, putative, chrIX additional, unordered contigs",
"Tb10.v4.0045": "prohibitin, putative, chrX additional, unordered contigs",
"Tb11.02.5380": "exosome complex exonuclease RRP44p homologue (RRP44)",
"Tb11.02.5390": "mRNA processing protein, putative",
"Tb11.0400": "P27 protein, putative (P27)",
"Tb927.1.1000": "developmentally regulated phosphoprotein",
"Tb927.1.1130": "glycerol-3-phosphate dehydrogenase (FAD-dependent), putative",
"Tb927.1.1160": "kinetoplast ribosomal PPR-repeat containing protein 3 (KRIPP3)",
"Tb927.1.120": "retrotransposon hot spot protein 4 (RHS4), putative",
"Tb927.1.1200": "SSU ribosomal protein, mitochondrial (MRPS15)",
"Tb927.1.1210": "conserved protein, unknown function",
"Tb927.1.1330": "Mitochondrial Editosome-like Complex TUTase (MEAT1)",
"Tb927.1.1580": "cytochrome c oxidase assembly factor, putative, electron transport protein SCO1/2, putative",
"Tb927.1.1610": "hypothetical protein, conserved",
"Tb927.1.1690": "Mitochondrial protein 90, RNA editing endoribonuclease, RNA-editing nuclease 1 (KREN1)",
"Tb927.1.1730": "conserved protein, unknown function",
"Tb927.1.1770": "glutaredoxin, putative",
"Tb927.1.180": "retrotransposon hot spot protein 1 (RHS1), putative",
"Tb927.1.2120": "Calpain-like protein CALP1.3, cysteine peptidase, Clan CA, family C2, putative (CALP1.3)",
"Tb927.1.2230": "calpain-like protein fragment, putative",
"Tb927.1.2260": "calpain-like protein fragment, putative",
"Tb927.1.2320": "hypothetical protein, conserved",
"Tb927.1.2330": "beta tubulin",
"Tb927.1.2340": "alpha tubulin",
"Tb927.1.2430": "histone H3, putative",
"Tb927.1.2730": "hypothetical protein, conserved",
"Tb927.1.2990": "PPR repeat family, putative",
"Tb927.1.3010": "mRNA processing protein, putative",
"Tb927.1.3030": "KREL2 (KREL2)",
"Tb927.1.3040": "hypothetical protein, conserved",
"Tb927.1.3110": "soluble N-ethylmaleimide sensitive factor (NSF) attachment protein, putative",
"Tb927.1.3150": "hypothetical protein, conserved",
"Tb927.1.3450": "hypothetical protein, conserved",
"Tb927.1.3800": "hypothetical protein, conserved (POMP18)",
"Tb927.1.3950": "alanine aminotransferase (ALAT)",
"Tb927.1.4010": "primase 2",
"Tb927.1.4050": "protein phosphatase with EF-Hand domains (PPEF), ser/thr protein phosphatase, putative",
"Tb927.1.4100": "cytochrome oxidase subunit IV (COXIV)",
"Tb927.1.420": "retrotransposon hot spot protein 5 (RHS5), putative",
"Tb927.1.4230": "hypothetical protein, conserved",
"Tb927.1.4310": "hypothetical protein, conserved",
"Tb927.1.4420": "ABC transporter, putative",
"Tb927.1.4970": "hypothetical protein",
"Tb927.1.5000": "hypothetical protein, conserved",
"Tb927.1.5030": "leucine-rich repeat protein (LRRP), putative",
"Tb927.1.710": "phosphoglycerate kinase (PGKB)",
"Tb927.1.720": "phosphoglycerate kinase (PGKA)",
"Tb927.1.730": "NADH-ubiquinone oxidoreductase complex I subunit, putative",
"Tb927.1.740": "phosphatidylinositol-4-phosphate 5-kinase related",
"Tb927.1.790": "hypothetical transmembrane protein, conserved",
"Tb927.1.840": "transmembrane protein, putative",
"Tb927.1.860": "transmembrane protein, putative",
"Tb927.1.880": "Midasin, putative (MDN1)",
"Tb927.1.90": "retrotransposon hot spot protein (RHS, pseudogene), putative",
"Tb927.10.10110": "PIG-P, putative",
"Tb927.10.10120": "hypothetical protein, conserved",
"Tb927.10.10130": "mitochondrial RNA binding complex 1 subunit (MRB10130)",
"Tb927.10.10140": "paraflagellar rod component, putative (PFC19)",
"Tb927.10.10160": "pentatricopeptide repeat domain containing protein, putative",
"Tb927.10.10280": "microtubule-associated protein, putative",
"Tb927.10.10300": "hypothetical protein, conserved",
"Tb927.10.10310": "hypothetical protein, conserved",
"Tb927.10.10360": "microtubule-associated protein, putative",
"Tb927.10.10390": "trypanothione reductase",
"Tb927.10.10400": "hypothetical protein, conserved",
"Tb927.10.10420": "monothiol glutaredoxin, putative",
"Tb927.10.10440": "mitochondrial carrier protein (MCP8)",
"Tb927.10.10460": "histone H2B, putative",
"Tb927.10.1050": "serine peptidase, Clan SC, Family S10 (CBP1)",
"Tb927.10.10610": "protein tyrosine phosphatase, putative",
"Tb927.10.10680": "hypothetical protein, conserved",
"Tb927.10.1070": "cdc2-related kinase 1, putative (CRK1)",
"Tb927.10.10740": "hypothetical protein, conserved",
"Tb927.10.10780": "hypothetical protein, conserved",
"Tb927.10.10830": "RGG-containing protein 2, MRB1-associated protein (RGG2)",
"Tb927.10.1100": "60S ribosomal protein L9, putative",
"Tb927.10.11030": "Archaic translocase of outer membrane 11 kDa subunit (ATOM11)",
"Tb927.10.11050": "hypothetical protein, conserved",
"Tb927.10.11160": "HIRA-interacting protein 5, putative",
"Tb927.10.11220": "procyclic form surface phosphoprotein (PSSA-2)",
"Tb927.10.11260": "hypothetical protein, conserved",
"Tb927.10.11280": "hypothetical protein",
"Tb927.10.11300": "paraflagellar rod component, putative (PFC16)",
"Tb927.10.11340": "hypothetical protein, conserved",
"Tb927.10.11350": "hypothetical protein, conserved",
"Tb927.10.11370": "hypothetical protein, conserved",
"Tb927.10.11390": "60S ribosomal protein L6, putative",
"Tb927.10.11420": "hypothetical protein, conserved",
"Tb927.10.11500": "hypothetical protein, conserved",
"Tb927.10.11520": "hypothetical protein, conserved",
"Tb927.10.1160": "hypothetical protein, conserved",
"Tb927.10.11770": "hypothetical protein, conserved",
"Tb927.10.11820": "hypothetical protein, conserved",
"Tb927.10.11870": "mitochondrial RNA binding protein (MRB11870)",
"Tb927.10.11890": "hypothetical protein, conserved",
"Tb927.10.11900": "thioredoxin, putative",
"Tb927.10.11930": "beta-D-hydroxybutyrate dehydrogenase",
"Tb927.10.11940": "ABC1 protein, putative",
"Tb927.10.12000": "iron-sulfur cluster assembly protein, putative",
"Tb927.10.12050": "hypothetical protein, conserved",
"Tb927.10.12080": "hypothetical protein, conserved",
"Tb927.10.12180": "hypothetical protein, conserved",
"Tb927.10.12240": "3-ketoacyl-CoA reductase, putative",
"Tb927.10.12250": "hypothetical protein, conserved",
"Tb927.10.12300": "hypothetical protein, conserved",
"Tb927.10.12330": "zinc finger protein family member, putative (ZC3H34)",
"Tb927.10.1240": "hypothetical protein, conserved",
"Tb927.10.12500": "P-type H+-ATPase, putative",
"Tb927.10.12510": "P-type H+-ATPase, putative",
"Tb927.10.12520": "hypothetical protein, conserved",
"Tb927.10.12540": "NADH-ubiquinone oxidoreductase complex I subunit, putative (NDUFS1)",
"Tb927.10.12550": "hypothetical protein, conserved",
"Tb927.10.12560": "hypothetical protein",
"Tb927.10.1260": "aspartyl-tRNA synthetase, putative",
"Tb927.10.12630": "hypothetical protein, conserved",
"Tb927.10.12690": "hypothetical protein, conserved",
"Tb927.10.12700": "pyruvate dehydrogenase E1 alpha subunit, putative",
"Tb927.10.12810": "cholinephosphate cytidylyltransferase A, putative",
"Tb927.10.12840": "mitochondrial carrier protein (MCP12)",
"Tb927.10.12880": "SET domain containing protein, putative",
"Tb927.10.12890": "bifunctional aminoacyl-tRNA synthetase, putative",
"Tb927.10.12910": "hypothetical protein, conserved",
"Tb927.10.12930": "hypothetical protein, conserved",
"Tb927.10.12960": "Ras-related protein Rab5A (RAB5A)",
"Tb927.10.12970": "AN1-like zinc finger, putative",
"Tb927.10.12980": "Multisite-specific tRNA:(cytosine-C(5))-methyltransferase, putative",
"Tb927.10.13040": "receptor-type adenylate cyclase GRESAG 4, putative (GRESAG4)",
"Tb927.10.13120": "mitochondrial carrier protein (MCP14)",
"Tb927.10.13200": "hypothetical protein, conserved",
"Tb927.10.13300": "30S ribosomal protein S8, putative",
"Tb927.10.13360": "Elongation factor Tu, mitochondrial (EF-Tu)",
"Tb927.10.13430": "citrate synthase, putative",
"Tb927.10.13480": "protein kinase, putative",
"Tb927.10.13510": "zinc metallopeptidase, putative",
"Tb927.10.13600": "hypothetical protein, conserved",
"Tb927.10.13620": "NADH-ubiquinone oxidoreductase complex I subunit,FT putative",
"Tb927.10.1370": "hypothetical protein, conserved",
"Tb927.10.13700": "phosphatidic acid phosphatase alpha, putative",
"Tb927.10.13740": "synaptotagmin, putative",
"Tb927.10.13760": "hypothetical protein, conserved",
"Tb927.10.13810": "hypothetical protein, conserved",
"Tb927.10.13820": "Domain of unknown function (DUF4379), putative",
"Tb927.10.13830": "chaperone protein DNAj, putative",
"Tb927.10.13850": "ATP12 chaperone protein, putative",
"Tb927.10.13860": "GPI-anchor transamidase subunit 8 (GPI8)",
"Tb927.10.13920": "hypothetical protein, conserved",
"Tb927.10.13990": "hypothetical protein, conserved",
"Tb927.10.14000": "aconitase (ACO)",
"Tb927.10.14010": "tubulin cofactor C domain-containing protein RP2 (rp2)",
"Tb927.10.14030": "hypothetical protein, conserved",
"Tb927.10.14050": "hypothetical protein",
"Tb927.10.14090": "transporter, putative",
"Tb927.10.1410": "hypothetical protein, conserved",
"Tb927.10.14150": "nuclear segregation protein, putative",
"Tb927.10.14170": "aquaglyceroporin 2 (AQP2)",
"Tb927.10.14210": "TFIIH basal transcription factor subunit (TFB5)",
"Tb927.10.14250": "hypothetical protein, conserved",
"Tb927.10.14280": "mitochondrial carrier protein (MCP20)",
"Tb927.10.1430": "hypothetical protein, conserved",
"Tb927.10.14350": "hypothetical protein, conserved",
"Tb927.10.14370": "hypothetical protein, conserved",
"Tb927.10.14490": "hypothetical protein, conserved",
"Tb927.10.14500": "hypothetical protein, conserved",
"Tb927.10.14510": "root hair defective 3 GTP-binding protein (RHD3), putative",
"Tb927.10.14600": "40S ribosomal protein S2, putative (RPS2)",
"Tb927.10.14740": "hypothetical protein, conserved",
"Tb927.10.14780": "mitogen-activated protein kinase kinase kinase, putative (CBPK1)",
"Tb927.10.14820": "Mitochondrial ADP/ATP carrier protein 5c (MCP5c)",
"Tb927.10.14830": "Mitochondrial ADP/ATP carrier protein 5b, putative (MCP5b)",
"Tb927.10.14840": "Mitochondrial ADP/ATP carrier protein 5a, putative (MCP5a)",
"Tb927.10.14860": "Complex 1 protein (LYR family), putative",
"Tb927.10.14930": "Zinc finger CCCH domain-containing protein 39 (ZC3H39)",
"Tb927.10.1500": "methionyl-tRNA synthetase, putative (MetRS)",
"Tb927.10.15010": "lipoic acid synthetase, mitochondrial precursor, putative",
"Tb927.10.15210": "nuclear cap binding complex subunit CBP30 (CBP30)",
"Tb927.10.15250": "paraflagellar rod component, putative (PFC15)",
"Tb927.10.15400": "kinesin, putative",
"Tb927.10.15410": "glycosomal malate dehydrogenase (gMDH)",
"Tb927.10.15420": "methyltransferase domain containing protein, putative",
"Tb927.10.15490": "GDP-mannose 4,6 dehydratase, putative",
"Tb927.10.1560": "hypothetical protein, conserved",
"Tb927.10.15650": "tRNA pseudouridine synthase A-like protein",
"Tb927.10.15660": "hypothetical protein, conserved",
"Tb927.10.1570": "ATPase subunit 9, putative",
"Tb927.10.15710": "mitochondrial carrier protein (MCP7)",
"Tb927.10.15750": "hypothetical protein, conserved",
"Tb927.10.15760": "hypothetical protein, conserved",
"Tb927.10.15790": "CHCH domain containing protein, putative",
"Tb927.10.1580": "Domain of unknown function (DUF543), putative",
"Tb927.10.15830": "hypothetical protein, conserved",
"Tb927.10.15900": "hypothetical protein, conserved",
"Tb927.10.15940": "cation transporter, putative",
"Tb927.10.160": "RNA polymerase II",
"Tb927.10.16000": "receptor-type adenylate cyclase GRESAG 4, putative, expression site-associated gene 4 (ESAG4) protein, putative",
"Tb927.10.16010": "hypothetical protein",
"Tb927.10.16080": "hypothetical protein, conserved",
"Tb927.10.16090": "hypothetical protein, conserved",
"Tb927.10.16120": "inosine-5'-monophosphate dehydrogenase",
"Tb927.10.16150": "ATP-dependent zinc metallopeptidase, putative",
"Tb927.10.16170": "potassium voltage-gated channel, putative",
"Tb927.10.1660": "hypothetical protein, conserved",
"Tb927.10.1730": "hypothetical protein, conserved",
"Tb927.10.1750": "hypothetical protein, conserved",
"Tb927.10.180": "ATP synthase F1 subunit gamma protein, putative",
"Tb927.10.1820": "hypothetical protein, conserved",
"Tb927.10.1870": "hypothetical protein, conserved",
"Tb927.10.1880": "hypothetical protein, conserved",
"Tb927.10.1930": "hypothetical protein, conserved",
"Tb927.10.1950": "leucine-rich repeat protein (LRRP), putative",
"Tb927.10.2000": "actin-like protein, putative",
"Tb927.10.2030": "hypothetical protein, conserved",
"Tb927.10.2050": "hypothetical protein, conserved",
"Tb927.10.2060": "hypothetical protein, conserved",
"Tb927.10.210": "GPI transamidase component GAA1 (TbGAA1)",
"Tb927.10.2100": "elongation factor 1-alpha, EF-1-alpha (TEF1)",
"Tb927.10.2190": "Protein of unknown function (DUF667), putative",
"Tb927.10.2230": "NADPH:adrenodoxin oxidoreductase, mitochondrial, putative",
"Tb927.10.2300": "hypothetical protein, conserved",
"Tb927.10.2350": "pyruvate dehydrogenase complex E3 binding protein, putative",
"Tb927.10.2440": "Metacaspase-4 (MCA4)",
"Tb927.10.2530": "adenylate kinase, putative",
"Tb927.10.2540": "adenylate kinase, putative",
"Tb927.10.2550": "malate dehydrogenase-related",
"Tb927.10.2560": "mitochondrial malate dehydrogenase (mMDH)",
"Tb927.10.2590": "methyltransferase domain containing protein, putative",
"Tb927.10.2680": "pyridine nucleotide-disulphide oxidoreductase, putative",
"Tb927.10.280": "cytochrome oxidase subunit VI (COXVI)",
"Tb927.10.2800": "hypothetical protein, conserved",
"Tb927.10.2880": "Voltage-dependent calcium channel subunit, putative, Flagellum surface protein 179 (FS179)",
"Tb927.10.2890": "enolase",
"Tb927.10.2960": "Translation factor GUF1, mitochondrial, putative (GUF1)",
"Tb927.10.2970": "hypothetical protein, conserved",
"Tb927.10.3040": "hypothetical protein, conserved",
"Tb927.10.3080": "methionine biosynthetic protein, putative",
"Tb927.10.3120": "cytochrome c oxidase assembly protein, putative",
"Tb927.10.3210": "delta-1-pyrroline-5-carboxylate dehydrogenase, putative",
"Tb927.10.3250": "hypothetical protein, conserved",
"Tb927.10.3260": "Long-chain-fatty-acid--CoA ligase 5 (EC 6.2.1.3) (Long-chain acyl-CoA synthetase 5) (LACS 5), putative",
"Tb927.10.3320": "hypothetical protein, conserved",
"Tb927.10.3370": "60S acidic ribosomal protein P2, putative",
"Tb927.10.340": "mitochondrial structure specific endonuclease I (SSE-1), putative",
"Tb927.10.3570": "RNA editing exonuclease 2 (KREX2)",
"Tb927.10.3580": "hypothetical protein, conserved",
"Tb927.10.3590": "hypothetical protein, conserved",
"Tb927.10.3610": "hypothetical protein, conserved",
"Tb927.10.3640": "nuclear transmembrane protein, putative (NP)",
"Tb927.10.3650": "NADH-dependent fumarate reductase, putative",
"Tb927.10.3690": "rRNA dimethyltransferase, putative",
"Tb927.10.3770": "predicted TPR repeat protein",
"Tb927.10.380": "mitochondrial RNA binding complex 1 subunit (PPR5)",
"Tb927.10.3810": "Nup53/35/40-type RNA recognition motif containing protein, putative",
"Tb927.10.3940": "40S ribosomal protein S3A, putative",
"Tb927.10.4000": "methylglutaconyl-CoA hydratase, mitochondrial precursor, putative",
"Tb927.10.4020": "hypothetical protein, conserved",
"Tb927.10.4040": "3-keto-dihydrosphingosine reductase",
"Tb927.10.4080": "hypothetical protein, conserved",
"Tb927.10.4130": "NDUFA5/B13 subunit, putative",
"Tb927.10.4240": "hypothetical protein, conserved",
"Tb927.10.4280": "ubiquinol-cytochrome c reductase complex 14kD subunit, putative",
"Tb927.10.430": "peroxidase, putative",
"Tb927.10.4310": "prohibitin 2, putative (PHB2)",
"Tb927.10.4330": "2-oxoisovalerate dehydrogenase beta subunit, mitochondrial precursor, putative",
"Tb927.10.4550": "hypothetical protein, conserved",
"Tb927.10.4560": "elongation factor 2",
"Tb927.10.4590": "mitochondrial inner membrane signal peptidase, putative",
"Tb927.10.470": "choline dehydrogenase, putative",
"Tb927.10.4760": "Integral membrane protein DUF106, putative",
"Tb927.10.4850": "hypothetical protein, conserved",
"Tb927.10.4880": "hypothetical protein, conserved",
"Tb927.10.4910": "mitochondrial carrier protein (MCP4)",
"Tb927.10.4940": "Nuclear Dbf2-related kinase (PK50)",
"Tb927.10.4990": "cdc2-related kinase 3, putative (CRK3)",
"Tb927.10.5010": "Elongation factor G1, mitochondrial, putative (EF-G1)",
"Tb927.10.5050": "Mitochondrial ATP synthase epsilon chain, putative",
"Tb927.10.5060": "Protein of unknown function (DUF3592), putative",
"Tb927.10.510": "hypothetical protein, conserved (POMP19)",
"Tb927.10.5110": "KREPA4 (KREPA4)",
"Tb927.10.5120": "KREPA6 (KREPA6)",
"Tb927.10.520": "hypothetical protein, conserved",
"Tb927.10.5220": "hypothetical protein, conserved",
"Tb927.10.5290": "HIRA-interacting protein 5, putative",
"Tb927.10.5320": "RNA editing endoribonuclease (KREN3)",
"Tb927.10.5330": "40S ribosomal protein S18, putative",
"Tb927.10.5400": "hypothetical protein, conserved",
"Tb927.10.5440": "RNA editing endoribonuclease (KREN2)",
"Tb927.10.5500": "hypothetical protein, conserved",
"Tb927.10.5540": "hypothetical protein, conserved",
"Tb927.10.5560": "GPI alpha-mannosyltransferase III (GPI10)",
"Tb927.10.5570": "hypothetical protein, conserved",
"Tb927.10.5590": "protein phosphatase 2C, putative",
"Tb927.10.5610": "40S ribosomal protein S9, putative",
"Tb927.10.5620": "fructose-bisphosphate aldolase, glycosomal (ALD)",
"Tb927.10.5660": "heat shock protein, putative",
"Tb927.10.5800": "hypothetical protein, conserved",
"Tb927.10.5820": "predicted RanGDP binding protein",
"Tb927.10.5830": "hypothetical protein, conserved",
"Tb927.10.5840": "translation elongation factor 1-beta, putative",
"Tb927.10.5900": "hypothetical protein, conserved",
"Tb927.10.600": "hypothetical protein, conserved",
"Tb927.10.6030": "proteasome subunit alpha type-1, putative, 20s proteasome subunit, putative (PSA1)",
"Tb927.10.6050": "clathrin heavy chain (CHC)",
"Tb927.10.6060": "universal minicircle sequence binding protein 2 (UMSBP2)",
"Tb927.10.6090": "tRNA pseudouridine synthase A, putative",
"Tb927.10.610": "hypothetical protein, conserved",
"Tb927.10.6120": "Peptidase M76 family, putative",
"Tb927.10.6170": "hypothetical protein, conserved",
"Tb927.10.6190": "aldehyde dehydrogenase, putative (ALDH)",
"Tb927.10.6200": "hypothetical protein, conserved",
"Tb927.10.6300": "Ribosomal protein S5, C-terminal domain containing protein, putative",
"Tb927.10.6400": "chaperonin HSP60, mitochondrial precursor (HSP60)",
"Tb927.10.6430": "hypothetical protein",
"Tb927.10.6510": "chaperonin HSP60, mitochondrial precursor (HSP60)",
"Tb927.10.660": "2-oxoisovalerate dehydrogenase alpha subunit, putative",
"Tb927.10.6610": "chaperone protein DNAj, putative",
"Tb927.10.6640": "transmembrane emp24 domain-containing protein",
"Tb927.10.670": "hypothetical protein, conserved",
"Tb927.10.6730": "Plasma-membrane choline transporter, putative",
"Tb927.10.680": "Protein of unknown function (DUF971), putative",
"Tb927.10.6840": "hypothetical protein, conserved",
"Tb927.10.6850": "Mitochondrial ribosomal protein S18, putative, mitochondrial RNA binding complex 1 subunit, mitochondrial edited mRNA stability factor 1 subunit, kinteoplast poly(A) polymerase complex 1 subunit (MRPS18)",
"Tb927.10.6910": "Sterol methyltransferase, putative",
"Tb927.10.7040": "hypothetical protein, conserved",
"Tb927.10.7090": "Alternative oxidase, mitochondrial, Trypanosome alternative oxidase, mitochondrial (AOX)",
"Tb927.10.710": "hypothetical protein, conserved",
"Tb927.10.7310": "terminal uridylyltransferase 3, putative (TUT3)",
"Tb927.10.7380": "hypothetical protein, conserved",
"Tb927.10.7410": "succinyl-CoA ligase [GDP-forming] beta-chain, putative",
"Tb927.10.7520": "folylpolyglutamate synthase, putative (FPGS)",
"Tb927.10.7570": "dihydrolipoamide acetyltransferase E2 subunit, putative",
"Tb927.10.760": "hypothetical protein, conserved",
"Tb927.10.7610": "hypothetical protein, conserved",
"Tb927.10.7620": "metallo-peptidase, Clan MA(E) Family M41, mitochondrial ATP-dependent zinc metallopeptidase, putative",
"Tb927.10.770": "RF-1 domain containing protein, putative",
"Tb927.10.7700": "ABC transporter, putative",
"Tb927.10.7720": "hypothetical protein, conserved",
"Tb927.10.7760": "hypothetical protein, conserved",
"Tb927.10.7770": "hypothetical protein",
"Tb927.10.7910": "hypothetical protein, conserved",
"Tb927.10.8010": "hypothetical protein, conserved",
"Tb927.10.8020": "serine peptidase clan SC, family S9A, putative, prolyl endopeptidase (POP)",
"Tb927.10.8030": "hypothetical protein, conserved",
"Tb927.10.8050": "TFIIF-stimulated CTD phosphatase, putative",
"Tb927.10.810": "pentatricopeptide repeat domain containing protein, putative",
"Tb927.10.8110": "Protein of unknown function (DUF2453), putative",
"Tb927.10.8160": "hypothetical protein, conserved",
"Tb927.10.8210": "KREPA2 (KREPA2)",
"Tb927.10.8230": "bloodstream- specific protein 2 precursor, protein disulfide isomerase 2 (PDI2)",
"Tb927.10.8320": "cytochrome oxidase subunit IX (COXIX)",
"Tb927.10.8450": "glucose transporter 1E (THT1E)",
"Tb927.10.8490": "glucose transporter, putative",
"Tb927.10.8610": "hypothetical protein, conserved",
"Tb927.10.8620": "hypothetical protein, conserved",
"Tb927.10.8630": "Sucrase/ferredoxin-like, putative",
"Tb927.10.8730": "ABC1 family, putative",
"Tb927.10.880": "thymidine kinase, putative",
"Tb927.10.8830": "hypothetical protein, conserved",
"Tb927.10.8890": "kinetoplast DNA-associated protein, putative",
"Tb927.10.8900": "choline/ethanolamine phosphotransferase (CEPT)",
"Tb927.10.8940": "flagellum targeting protein kharon1, putative (KH1)",
"Tb927.10.8950": "kinetoplast DNA-associated protein, putative",
"Tb927.10.8970": "kinetoplast DNA-associated protein, putative",
"Tb927.10.8980": "hypothetical protein, conserved",
"Tb927.10.900": "hypothetical protein, conserved",
"Tb927.10.9000": "hypothetical protein, conserved",
"Tb927.10.9080": "pteridine transporter, putative",
"Tb927.10.9120": "hypothetical protein, conserved",
"Tb927.10.9140": "FAD dependent oxidoreductase, putative (POMP20)",
"Tb927.10.9150": "hypothetical protein, conserved",
"Tb927.10.9190": "isoleucyl-tRNA synthetase, putative (IleRS)",
"Tb927.10.9270": "emp24/gp25L/p24 family/GOLD, putative",
"Tb927.10.9280": "hypothetical protein, conserved",
"Tb927.10.9420": "mitochondrial chaperone BCS1, putative",
"Tb927.10.9440": "NADH dehydrogenase (54 NDH2)",
"Tb927.10.9720": "RNA-editing-associated protein 1, RNA-binding protein, oligo (U) binding protein, RNA editing complex protein (REAP-1)",
"Tb927.10.9760": "alternative oxidase",
"Tb927.10.9790": "hypothetical protein, conserved",
"Tb927.10.9810": "hypothetical protein, conserved",
"Tb927.10.9820": "mitochondrial intermediate peptidase, putative, metallo-peptidase, Clan MA(E) Family M3",
"Tb927.10.9830": "hypothetical protein, conserved",
"Tb927.10.9860": "Mitochondrial N(5)-glutamine methyltransferase MTQ1, putative",
"Tb927.10.9900": "ABC1 protein, putative",
"Tb927.10.9940": "Predicted membrane protein, putative",
"Tb927.10.9970": "hypothetical protein, conserved",
"Tb927.11.10020": "short-chain dehydrogenase, putative",
"Tb927.11.10040": "hypothetical protein, conserved",
"Tb927.11.10050": "hypothetical protein, conserved",
"Tb927.11.10080": "hypothetical protein, conserved",
"Tb927.11.1010": "chaperone protein DNAj, putative",
"Tb927.11.10140": "hypothetical protein, conserved",
"Tb927.11.10150": "Enoyl-CoA hydratase/isomerase family, putative",
"Tb927.11.10170": "hypothetical protein, conserved",
"Tb927.11.10220": "hypothetical protein, conserved",
"Tb927.11.10240": "hslVU complex proteolytic subunit, threonine peptidase, Clan T(1), family T1B, ATP-dependent protease subunit HslV (HsIV)",
"Tb927.11.10400": "hypothetical protein, conserved",
"Tb927.11.10430": "hypothetical protein, conserved",
"Tb927.11.10510": "ubiquinone biosynthesis methyltransferase, putative",
"Tb927.11.10570": "hypothetical protein, conserved",
"Tb927.11.10590": "Glutathione S-transferase, C-terminal domain containing protein, putative",
"Tb927.11.10600": "hypothetical protein, conserved",
"Tb927.11.10690": "hypothetical protein, conserved",
"Tb927.11.10720": "hypothetical protein, conserved",
"Tb927.11.10760": "kinesin-like protein, putative",
"Tb927.11.10780": "hypothetical protein, conserved",
"Tb927.11.10870": "32 kDa ER-associated protein (ERAP32)",
"Tb927.11.1090": "cytoskeleton assocociated protein, putative, calpain-like protein, putative",
"Tb927.11.10950": "DnaJ domain containing protein, putative",
"Tb927.11.10960": "2OG-Fe(II) oxygenase superfamily, putative",
"Tb927.11.11010": "hypothetical protein, conserved",
"Tb927.11.11120": "hypothetical protein, conserved",
"Tb927.11.11130": "hypothetical protein, conserved",
"Tb927.11.11280": "hypothetical protein, conserved",
"Tb927.11.11290": "heat shock protein 70, putative",
"Tb927.11.11330": "heat shock protein 70",
"Tb927.11.11380": "hypothetical protein, conserved",
"Tb927.11.11390": "2OG-Fe(II) oxygenase superfamily, putative",
"Tb927.11.11440": "A/G-specific adenine glycosylase, putative",
"Tb927.11.11460": "Mitochondrial import receptor subunit ATOM69, Archaic Translocase of outer membrane 69 kDa subunit (ATOM69)",
"Tb927.11.11470": "hypothetical protein, conserved",
"Tb927.11.11490": "2,3-bisphosphoglycerate-independent phosphoglycerate mutase-like protein, putative",
"Tb927.11.11540": "unspecified product",
"Tb927.11.11620": "hypothetical protein, conserved",
"Tb927.11.11630": "hypothetical protein, conserved",
"Tb927.11.11680": "2-oxoglutarate dehydrogenase E2 component, putative",
"Tb927.11.11730": "MRP protein, putative",
"Tb927.11.11740": "membrane-bound acid phosphatase, putative",
"Tb927.11.11780": "acyl-CoA dehydrogenase, putative",
"Tb927.11.11820": "40S ribosomal protein S17, putative",
"Tb927.11.11870": "ADP-ribosylglycohydrolase, putative",
"Tb927.11.11950": "cytochrome oxidase assembly protein, putative",
"Tb927.11.11980": "corset-associated protein 15, cytoskeleton-associated protein 15 (CAP15)",
"Tb927.11.12040": "hypothetical protein, conserved",
"Tb927.11.12160": "hypothetical protein, conserved",
"Tb927.11.12220": "catalytic subunit of the vacuolar transporter chaperone 4",
"Tb927.11.12230": "ATP-dependent protease ATPase subunit HslU2 (HslU2)",
"Tb927.11.12240": "ubiquitin carboxyl-terminal hydrolase, putative, cysteine peptidase, Clan CA, family C19, putative",
"Tb927.11.12350": "DNA photolyase, putative",
"Tb927.11.1250": "Mitochondrial SSU ribosomal protein, putative",
"Tb927.11.12590": "Der1-like family, putative",
"Tb927.11.1270": "hypothetical protein, conserved",
"Tb927.11.12850": "oligopeptidase b, serine peptidase, clan SC, family S9A-like protein (OPB)",
"Tb927.11.1290": "hypothetical protein, conserved",
"Tb927.11.12960": "hypothetical protein, conserved",
"Tb927.11.13020": "calmodulin",
"Tb927.11.1310": "NADH-cytochrome b5 reductase, putative (B5R)",
"Tb927.11.13140": "cytochrome oxidase subunit X (COXX)",
"Tb927.11.13180": "hypothetical protein conserved (POMP10)",
"Tb927.11.1320": "NADH-ubiquinone oxidoreductase 20 kDa subunit, mitochondrial precursor, NADH-ubiquinone oxidoreductase complex I subunit, putative, NADH dehydrogenase [ubiquinone] iron-sulfur protein 7, mitochondrial (NDHK)",
"Tb927.11.13220": "phospholipid:diacylglycerol acyltransferase-like protein",
"Tb927.11.13230": "MSP (Major sperm protein) domain containing protein, putative",
"Tb927.11.13280": "mitochondrial RNA binding protein 2 (GBP25)",
"Tb927.11.13290": "mitochondrial import inner membrane translocase subunit tim17 (Tim17)",
"Tb927.11.13440": "mitochondrial carrier protein (MCP21)",
"Tb927.11.1350": "calcium uniporter protein, mitochondrial (MCU)",
"Tb927.11.13500": "par1",
"Tb927.11.13510": "hypothetical protein, conserved",
"Tb927.11.13520": "hypothetical protein, conserved",
"Tb927.11.13580": "ubiquinone biosynthesis protein-like protein",
"Tb927.11.13590": "hypothetical protein, conserved",
"Tb927.11.13610": "rhodanese-like domain containing protein, putative",
"Tb927.11.13620": "rhodanese-like domain containing protein, putative",
"Tb927.11.13650": "cytochrome b5, putative (CYB5)",
"Tb927.11.1370": "glutaredoxin, putative",
"Tb927.11.13710": "hypothetical protein, conserved",
"Tb927.11.13750": "Ras-related protein Rab4 (RAB4)",
"Tb927.11.13760": "hypothetical protein, conserved",
"Tb927.11.13830": "DnaJ domain containing protein, putative",
"Tb927.11.13840": "hypothetical protein, conserved",
"Tb927.11.13890": "AKAP7 2'5' RNA ligase-like domain containing protein, putative",
"Tb927.11.13910": "hypothetical protein, conserved",
"Tb927.11.1400": "hypothetical protein, conserved",
"Tb927.11.14080": "cyclin 2 (CYC2)",
"Tb927.11.14120": "phenylalanyl-tRNA synthetase alpha chain, putative",
"Tb927.11.14170": "hypothetical protein, conserved",
"Tb927.11.1430": "Repeat of unknown function (DUF1126)/EF-hand domain pair, putative",
"Tb927.11.14350": "mitochondrial carrier protein (MCP22)",
"Tb927.11.14360": "mitochondrial carrier protein (MCP2)",
"Tb927.11.14380": "kinetoplast polyadenylation/uridylation factor 2 (KPAF2)",
"Tb927.11.14440": "PPR repeat family, putative",
"Tb927.11.14480": "ubiquitin carboxyl-terminal hydrolase, putative",
"Tb927.11.1450": "2-oxoglutarate dehydrogenase E1 component, putative",
"Tb927.11.14570": "LicD family, putative",
"Tb927.11.14700": "hypothetical protein, conserved",
"Tb927.11.14730": "Metalloprotease M41 FtsH, putative",
"Tb927.11.14790": "DNA repair and recombination helicase protein PIF4 (TbPIF4)",
"Tb927.11.14940": "hypothetical protein, conserved",
"Tb927.11.14980": "phosphatidylethanolamine-binding protein, putative",
"Tb927.11.15020": "iron superoxide dismutase (TbSODB)",
"Tb927.11.15040": "heat shock protein 60, Chaperonin HSP60, mitochondrial (HSP60)",
"Tb927.11.15070": "hypothetical protein, conserved",
"Tb927.11.15130": "heat shock protein-like protein, putative, DnaJ homolog, putative",
"Tb927.11.15150": "1-acyl-sn-glycerol-3-phosphate acyltransferase, putative",
"Tb927.11.15230": "cytosolic coat protein, putative",
"Tb927.11.15240": "Ras-related protein RAB2B, putative (RAB2B)",
"Tb927.11.15300": "hypothetical protein, conserved",
"Tb927.11.1540": "isovaleryl-coA dehydrogenase, putative",
"Tb927.11.15440": "NADH-ubiquinone oxidoreductase complex I subunit, putative (TAP151)",
"Tb927.11.15470": "methionyl-tRNA formyltransferase",
"Tb927.11.15500": "hypothetical protein, conserved",
"Tb927.11.15530": "C-14 sterol reductase, putative",
"Tb927.11.15550": "NADH-cytochrome b5 reductase, putative (B5R)",
"Tb927.11.15620": "hypothetical protein, conserved",
"Tb927.11.15640": "mitochondrial RNA binding complex 1 subunit (MERS1)",
"Tb927.11.15650": "poly(A) polymerase, putative",
"Tb927.11.15700": "hypothetical protein, conserved",
"Tb927.11.15730": "cyclic nucleotide-binding domain containing protein, putative",
"Tb927.11.15750": "AMP deaminase, putative",
"Tb927.11.15760": "GPI transamidase subunit Tta1 (TTA1)",
"Tb927.11.15780": "hypothetical protein, conserved",
"Tb927.11.15810": "NADH dehydrogenase subunit NI2M, putative",
"Tb927.11.15820": "superoxide dismutase, putative",
"Tb927.11.15840": "L-Lysine transport protein, putative",
"Tb927.11.15850": "kinteoplast poly(A) polymerase complex 1 subunit",
"Tb927.11.15860": "L-Lysine transport protein, putative",
"Tb927.11.15870": "hypothetical protein, conserved",
"Tb927.11.15950": "amino acid transporter, putative",
"Tb927.11.16080": "Soluble NSF attachment protein, SNAP, putative",
"Tb927.11.16130": "nucleoside diphosphate kinase (NDPK)",
"Tb927.11.1620": "hypothetical protein, conserved",
"Tb927.11.16200": "cytoskeleton-associated protein 17, corset-associated protein 17 (CAP17)",
"Tb927.11.16220": "hypothetical protein, conserved",
"Tb927.11.1630": "hypothetical protein, conserved",
"Tb927.11.16360": "hypothetical protein, conserved",
"Tb927.11.16380": "TPR repeat, putative",
"Tb927.11.16400": "kinetoplast-associated protein 3, putative (KAP3)",
"Tb927.11.16420": "Methyltransferase TYW3, putative",
"Tb927.11.16430": "18 kDa ER-associated protein (ERAP18)",
"Tb927.11.16480": "enoyl-CoA hydratase/isomerase family protein, putative",
"Tb927.11.1650": "hypothetical protein, conserved",
"Tb927.11.16510": "hypothetical protein, conserved",
"Tb927.11.16530": "hypothetical protein, conserved",
"Tb927.11.1670": "cysteine desulfurase",
"Tb927.11.16710": "hypothetical protein, conserved",
"Tb927.11.16730": "dihydrolipoyl dehydrogenase (GCVL-2)",
"Tb927.11.16740": "chaperone protein DNAj, putative",
"Tb927.11.16750": "hypothetical protein, conserved",
"Tb927.11.1680": "vesicular-fusion protein SEC18, putative",
"Tb927.11.16860": "mitochondrial RNA binding protein (MRB8620)",
"Tb927.11.16870": "NADH dehydrogenase subunit NI8M, putative",
"Tb927.11.16930": "ABC transporter of the mitochondrion, putative (ABCB7)",
"Tb927.11.16940": "MIZ/SP-RING zinc finger, putative",
"Tb927.11.16990": "50S ribosome-binding GTPase, putative",
"Tb927.11.170": "hypothetical protein, conserved",
"Tb927.11.1700": "hypothetical protein, conserved",
"Tb927.11.17000": "leucine-rich repeat protein (LRRP), putative",
"Tb927.11.1710": "mitochondrial RNA binding protein 1, guide RNA-binding protein of 21 kDa (gBP21)",
"Tb927.11.1780": "protein phosphatase 2C, putative",
"Tb927.11.180": "electron transfer flavoprotein, putative",
"Tb927.11.1830": "hypothetical protein, conserved",
"Tb927.11.1840": "hypothetical protein, conserved",
"Tb927.11.1850": "hypothetical protein, conserved",
"Tb927.11.190": "elongation factor, putative",
"Tb927.11.1950": "hypothetical protein, conserved",
"Tb927.11.2000": "Cupin-like domain containing protein, putative",
"Tb927.11.2010": "hypothetical protein, conserved",
"Tb927.11.2160": "hypothetical protein, conserved",
"Tb927.11.2360": "phenylalanyl-tRNA synthetase (beta subunit), putative",
"Tb927.11.240": "hypothetical protein, conserved",
"Tb927.11.2410": "hypothetical protein, conserved",
"Tb927.11.2500": "carboxypeptidase, putative, metallo-peptidase, Clan MA(E) Family M32",
"Tb927.11.2530": "Mitochondrial SSU ribosomal protein, putative, mitochondrial RNA binding complex 1 subunit",
"Tb927.11.2580": "hypothetical protein, conserved (POMP11)",
"Tb927.11.2610": "hypothetical protein, conserved",
"Tb927.11.2650": "heat shock protein 84, putative",
"Tb927.11.2690": "succinyl-coA:3-ketoacid-coenzyme A transferase, mitochondrial precursor, putative",
"Tb927.11.270": "mitochondrial carrier protein (MCP10)",
"Tb927.11.2720": "hypothetical protein, conserved",
"Tb927.11.2750": "hypothetical protein, conserved (POMP12)",
"Tb927.11.2800": "hypothetical protein, conserved",
"Tb927.11.2910": "phosphoglycerate mutase, putative (iPGAM)",
"Tb927.11.2920": "glycerolphosphate mutase, putative",
"Tb927.11.2930": "hypothetical protein, conserved",
"Tb927.11.2990": "KREPB4 (KREPB4)",
"Tb927.11.3010": "hypothetical protein, conserved",
"Tb927.11.3040": "hypothetical protein",
"Tb927.11.3100": "hypothetical protein, conserved (POMP13)",
"Tb927.11.3220": "metacaspase, cysteine peptidase, Clan CD, family C13 (mca1)",
"Tb927.11.3250": "dynein heavy chain, putative",
"Tb927.11.3260": "mitochondrial DNA polymerase I protein D, putative (POLID)",
"Tb927.11.3270": "squalene monooxygenase, putative",
"Tb927.11.3290": "hypothetical protein, conserved (p166)",
"Tb927.11.3300": "T-complex protein 10 C-terminus, putative",
"Tb927.11.3530": "Complex1_LYR-like, putative",
"Tb927.11.3590": "40S ribosomal protein S4, putative",
"Tb927.11.3640": "hypothetical protein, conserved",
"Tb927.11.3670": "hypothetical protein, conserved",
"Tb927.11.3690": "Alpha/beta hydrolase family, putative",
"Tb927.11.3700": "Uncharacterised ACR, YagE family COG1723, putative",
"Tb927.11.3730": "leucyl-tRNA synthetase, putative (LeuRS)",
"Tb927.11.3750": "NADH-cytochrome b5 reductase, putative (B5R)",
"Tb927.11.3860": "hypothetical protein, conserved",
"Tb927.11.3900": "glycosyltransferase family-like protein, putative",
"Tb927.11.3940": "hypothetical protein, conserved",
"Tb927.11.3980": "mitochondrial processing peptidase alpha subunit, putative, metallo-peptidase, Clan ME, Family M16",
"Tb927.11.4020": "Complex1_LYR-like, putative",
"Tb927.11.4140": "hypothetical protein, conserved",
"Tb927.11.4160": "predicted C2 domain protein",
"Tb927.11.4200": "mannose-specific lectin, putative",
"Tb927.11.4320": "Uncharacterized ACR, COG1678, putative",
"Tb927.11.4330": "hypothetical protein, conserved",
"Tb927.11.4480": "radial spoke protein RSP4/6, putative",
"Tb927.11.4490": "long-chain-fatty-acid-CoA ligase, putative, fatty acid thiokinase (long chain), putative",
"Tb927.11.4650": "hypothetical protein, conserved",
"Tb927.11.4680": "SURF1 family, putative",
"Tb927.11.4690": "mitochondrial DNA polymerase I protein B (POLIB)",
"Tb927.11.4700": "prostaglandin f synthase",
"Tb927.11.4780": "pyruvate dehydrogenase (lipoamide) kinase, putative",
"Tb927.11.480": "small GTP-binding protein domain containing protein, putative",
"Tb927.11.4810": "hypothetical protein, conserved",
"Tb927.11.4850": "hypothetical protein, conserved",
"Tb927.11.4870": "hypothetical protein, conserved",
"Tb927.11.5030": "hypothetical protein, conserved",
"Tb927.11.5050": "fumarate hydratase, class I (FHm)",
"Tb927.11.5060": "Mitochondrial small ribosomal subunit Rsm22, putative",
"Tb927.11.5090": "aspartate aminotransferase, mitochondrial",
"Tb927.11.5180": "hypothetical protein, conserved",
"Tb927.11.5240": "Methyltransferase TRM13, putative",
"Tb927.11.5280": "ATPase subunit 9, putative",
"Tb927.11.5290": "mitochondrial carrier protein (MCP9)",
"Tb927.11.530": "RNA-binding protein, putative (RBP3)",
"Tb927.11.5370": "hypothetical protein, conserved",
"Tb927.11.5380": "Protein of unknown function (DUF523), putative",
"Tb927.11.5390": "hypothetical protein, conserved",
"Tb927.11.540": "ABC transporter, mitochondrial, putative, multidrug resistance protein, mitochondrial, putative (ABCT)",
"Tb927.11.5440": "malic enzyme",
"Tb927.11.5450": "malic enzyme",
"Tb927.11.5500": "Mitochondrial SSU ribosomal protein, putative (KRIPP1)",
"Tb927.11.5520": "triosephosphate isomerase (TIM)",
"Tb927.11.5530": "hypothetical protein, conserved",
"Tb927.11.5600": "Archaic Translocase of outer membrane 14 kDa subunit (ATOM14)",
"Tb927.11.5680": "dynein light chain, putative",
"Tb927.11.570": "DnaJ domain containing protein, putative",
"Tb927.11.5740": "formin-like protein",
"Tb927.11.5760": "hypothetical protein, conserved",
"Tb927.11.5780": "mitochondrial DNA-directed RNA polymerase (MTRNAP)",
"Tb927.11.5820": "Inhibitor of apoptosis-promoting Bax1, putative",
"Tb927.11.5880": "hypothetical protein, conserved",
"Tb927.11.590": "hypothetical protein, conserved",
"Tb927.11.5920": "hypothetical protein, conserved",
"Tb927.11.5960": "hypothetical protein, conserved",
"Tb927.11.5970": "phosphoinositide-specific phospholipase C, putative",
"Tb927.11.5990": "hypothetical protein, conserved",
"Tb927.11.600": "hypothetical protein, conserved",
"Tb927.11.6000": "ribosomal protein L4/L1 family, putative",
"Tb927.11.6010": "conserved protein, unknown function",
"Tb927.11.6040": "Nodulin-like, putative",
"Tb927.11.6100": "hypothetical protein, conserved",
"Tb927.11.6140": "40S ribosomal protein S15A, putative",
"Tb927.11.6150": "60Kd inner membrane protein, putative",
"Tb927.11.6210": "sterol 14-alpha-demethylase (CYP51)",
"Tb927.11.6230": "protein transport protein SEC61 subunit alpha, putative, pretranslocation protein, alpha subunit, putative",
"Tb927.11.6250": "hypothetical protein, conserved",
"Tb927.11.6280": "pyruvate phosphate dikinase (PPDK)",
"Tb927.11.6300": "40S ribosomal protein S5, putative",
"Tb927.11.6390": "START domain containing protein, putative",
"Tb927.11.640": "pentatricopeptide repeat domain containing protein, putative",
"Tb927.11.6460": "hypothetical protein, conserved",
"Tb927.11.6470": "hypothetical protein, conserved",
"Tb927.11.6550": "PUF nine target 1 (PNT1)",
"Tb927.11.6570": "Nodulin-like, putative",
"Tb927.11.6610": "hypothetical protein, conserved",
"Tb927.11.6630": "3-methylcrotonoyl-CoA carboxylase beta subunit, putative",
"Tb927.11.6640": "cytochrome b5, putative (CYB5)",
"Tb927.11.6660": "Tex-like protein N-terminal domain/Helix-hairpin-helix motif containing protein, putative",
"Tb927.11.6700": "hypothetical protein, conserved",
"Tb927.11.6770": "hypothetical protein, conserved",
"Tb927.11.6830": "Domain of unknown function(DUF2779), putative",
"Tb927.11.6890": "DNA repair and recombination helicase protein PIF1 (PIF1)",
"Tb927.11.6900": "DNA repair and recombination helicase protein PIF2 (PIF2)",
"Tb927.11.6970": "NADH-cytochrome b5 reductase-like protein",
"Tb927.11.6980": "hypothetical protein, conserved",
"Tb927.11.7040": "pterin-4-alpha-carbinolamine dehydratase, putative",
"Tb927.11.7170": "seryl-tRNA synthetase",
"Tb927.11.720": "hypothetical protein, conserved",
"Tb927.11.7212": "hypothetical protein, conserved",
"Tb927.11.7214": "hypothetical protein, conserved",
"Tb927.11.7218": "mitochondrial edited mRNA stability factor 1 subunit",
"Tb927.11.7250": "hypothetical protein, conserved",
"Tb927.11.7290": "pantothenate kinase subunit, putative",
"Tb927.11.7380": "glycerol-3-phosphate dehydrogenase (FAD-dependent), mitochondrial",
"Tb927.11.7390": "NADH-ubiquinone oxidoreductase complex I, 21 kDa subunit, putative",
"Tb927.11.7460": "glucose-regulated protein 78, putative, luminal binding protein 1 (BiP), putative (BiP)",
"Tb927.11.7470": "hypothetical protein, conserved",
"Tb927.11.7540": "electron-transfer-flavoprotein, alpha polypeptide, putative",
"Tb927.11.7590": "hypothetical protein, conserved",
"Tb927.11.7780": "Archaic translocase outer mitochondrial membrane 46 kDa subunit, Mitochondrial import receptor subunit ATOM46 (ATOM46)",
"Tb927.11.7790": "hypothetical protein, conserved",
"Tb927.11.7900": "mitochondrial RNA binding protein 16 (RBP16)",
"Tb927.11.7920": "hypothetical protein, conserved",
"Tb927.11.7960": "kinetoplast poly(A) polymerase 1",
"Tb927.11.8040": "hypothetical protein, conserved",
"Tb927.11.8090": "protein phosphatase 1, putative",
"Tb927.11.8110": "hypothetical protein, conserved",
"Tb927.11.8380": "NADPH--cytochrome P450 reductase, putative (CPR)",
"Tb927.11.8400": "Mitochondrial RNA processing endonuclease 1 (mRPN1)",
"Tb927.11.870": "hypothetical protein, conserved",
"Tb927.11.8800": "hypothetical protein, conserved",
"Tb927.11.8870": "DEAD-box ATP-dependent RNA helicase, mitochondrial (MHEL61)",
"Tb927.11.8910": "NADH dehydrogenase subunit NB6M, putative",
"Tb927.11.8980": "Elongation factor G 2, mitochondrial, putative (EF-G2)",
"Tb927.11.8990": "cation transporter, putative",
"Tb927.11.900": "isocitrate dehydrogenase, putative (IDH)",
"Tb927.11.9140": "mitochondrial RNA binding complex 1 subunit (MRB0880)",
"Tb927.11.9150": "hypothetical protein, conserved",
"Tb927.11.9280": "hypothetical protein, conserved",
"Tb927.11.9310": "tRNA pseudouridine synthase, putative",
"Tb927.11.9330": "helicase-like protein",
"Tb927.11.9360": "prenyltransferase, putative",
"Tb927.11.9390": "Octanoyltransferase, putative",
"Tb927.11.940": "KREPB5 (KREPB5)",
"Tb927.11.9450": "cyclophilin type peptidyl-prolyl cis-trans isomerase, putative (PPIase)",
"Tb927.11.9560": "oxidoreductase, putative",
"Tb927.11.9570": "hypothetical protein, conserved",
"Tb927.11.9600": "hypothetical protein, conserved",
"Tb927.11.9670": "aminomethyltransferase, mitochondrial, putative, aminomethyltransferase, mitochondrial, putative, glycine synthase, putative (GCV)",
"Tb927.11.9720": "40S ribosomal protein S27, putative",
"Tb927.11.9750": "Protein of unknown function (DUF498/DUF598), putative",
"Tb927.11.9820": "Protein of unknown function (DUF1619), putative",
"Tb927.11.9830": "hypothetical protein, conserved",
"Tb927.11.9860": "EF-hand domain pair, putative",
"Tb927.11.9900": "phytoene synthase, putative",
"Tb927.11.9920": "polyubiquitin, putative",
"Tb927.11.9930": "NADH-ubiquinone oxidoreductase complex I subunit, putative",
"Tb927.11.9980": "2-oxoglutarate dehydrogenase E1 component, putative",
"Tb927.2.100": "retrotransposon hot spot protein 1 (RHS1), putative",
"Tb927.2.1210": "retrotransposon hot spot protein 4 (RHS4), putative",
"Tb927.2.1330": "retrotransposon hot spot protein (RHS, pseudogene), putative, retrotransposon hot spot protein 6 (RHS6), degenerate",
"Tb927.2.1560": "cyclophilin-type peptidyl-prolyl cis-trans isomerase, putative",
"Tb927.2.1680": "cyclophilin-type peptidyl-prolyl cis-trans isomerase, putative",
"Tb927.2.1860": "mitochondrial RNA binding complex 1 subunit (MRB1860)",
"Tb927.2.2140": "hypothetical protein, conserved",
"Tb927.2.2210": "hypothetical protein, conserved",
"Tb927.2.2220": "hypothetical protein, conserved",
"Tb927.2.2400": "glycosyltransferase (GlcNAc), putative",
"Tb927.2.2430": "casein kinase II, alpha chain (CK2A2)",
"Tb927.2.2470": "Kinetoplastid RNA editing protein 1, Mitochondrial protein 81 (KREPA1)",
"Tb927.2.2510": "voltage-dependent anion-selective channel 1, Mitochondrial outer membrane protein porin 1 (VDAC1)",
"Tb927.2.2520": "voltage-dependent anion-selective channel 2, Mitochondrial outer membrane protein porin 2 (VDAC2)",
"Tb927.2.2940": "hypothetical protein, conserved",
"Tb927.2.2970": "mitochondrial carrier protein (MCP13)",
"Tb927.2.3030": "ATP-dependent Clp protease subunit, heat shock protein 78 (HSP78), putative",
"Tb927.2.3080": "fatty acid desaturase, putative, oleate desaturase, putative, cytochrome b5-dependent oleate desaturase, conserved protein",
"Tb927.2.3180": "kinetoplast polyadenylation/uridylation factor 1 (KPAF1)",
"Tb927.2.3330": "hypothetical protein",
"Tb927.2.3420": "hypothetical protein, conserved",
"Tb927.2.3460": "cysteine peptidase, Clan CA, family C51, putative",
"Tb927.2.3610": "hypothetical protein, conserved",
"Tb927.2.3800": "MRB1-associated protein, guide RNA associated protein 1 (GAP1)",
"Tb927.2.4090": "signal recognition particle receptor beta subunit, putative",
"Tb927.2.4110": "metallo-peptidase, Clan ME, Family M16, Mitochondrial-processing peptidase subunit alpha (MPPA)",
"Tb927.2.4130": "enoyl-CoA hydratase/Enoyl-CoA isomerase/3-hydroxyacyl-CoA dehydrogenase, putative",
"Tb927.2.4210": "glycosomal phosphoenolpyruvate carboxykinase (PEPCK)",
"Tb927.2.4230": "NUP-1 protein, putative",
"Tb927.2.4240": "GTP binding protein, putative",
"Tb927.2.4380": "hypothetical protein, conserved",
"Tb927.2.4400": "hypothetical protein, conserved",
"Tb927.2.4445": "hypothetical protein, conserved",
"Tb927.2.4590": "branched-chain amino acid aminotransferase, putative",
"Tb927.2.4610": "branched-chain amino acid aminotransferase, putative",
"Tb927.2.470": "retrotransposon hot spot protein 4 (RHS4), putative",
"Tb927.2.4700": "hypothetical protein, conserved",
"Tb927.2.4740": "ribosomal protein L11, putative",
"Tb927.2.4800": "hypothetical protein, conserved",
"Tb927.2.4830": "TFIIF-stimulated CTD phosphatase, putative",
"Tb927.2.4890": "ribosomal protein L11, putative",
"Tb927.2.4990": "hypothetical protein, conserved",
"Tb927.2.5020": "acyl-CoA oxidase, putative",
"Tb927.2.5140": "hypothetical protein, conserved",
"Tb927.2.5180": "aldo-keto reductase, putative",
"Tb927.2.5210": "3-oxoacyl-ACP reductase, putative",
"Tb927.2.5280": "trans-sialidase, putative",
"Tb927.2.5410": "ABC transporter family-like protein",
"Tb927.2.5530": "hypothetical protein, conserved (POMP22A)",
"Tb927.2.5720": "hypothetical protein",
"Tb927.2.5890": "hypothetical protein, conserved",
"Tb927.2.5900": "hypothetical protein, conserved",
"Tb927.2.5930": "hypothetical protein, conserved",
"Tb927.2.5970": "hypothetical protein, conserved",
"Tb927.2.6070": "mitochondrial RNA binding complex 1 subunit (MRB6070)",
"Tb927.2.830": "retrotransposon hot spot protein (RHS, pseudogene), putative, retrotransposon hot spot protein 1 (RHS1), interrupted",
"Tb927.3.1000": "frataxin-like, mitochondrial precursor, putative",
"Tb927.3.1010": "hypothetical protein, conserved",
"Tb927.3.1080": "Protein of unknown function (DUF1295), putative (POMP23)",
"Tb927.3.1100": "exonuclease, putative",
"Tb927.3.1120": "GTP-binding nuclear protein rtb2, putative (rtb2)",
"Tb927.3.1160": "hypothetical protein, conserved",
"Tb927.3.1180": "hypothetical protein, conserved",
"Tb927.3.1380": "ATP synthase subunit beta, mitochondrial, ATP synthase F1, beta subunit (ATPB)",
"Tb927.3.1410": "cytochrome oxidase subunit VII (COXVII)",
"Tb927.3.1590": "mitochondrial RNA binding complex 1 subunit (MRB1590)",
"Tb927.3.1600": "Tim10/DDP family zinc finger, putative",
"Tb927.3.1690": "hypothetical protein, conserved",
"Tb927.3.1710": "hypothetical protein, conserved",
"Tb927.3.1720": "hypothetical protein, conserved",
"Tb927.3.1730": "hypothetical protein, conserved",
"Tb927.3.1760": "chaperone protein DNAj, putative",
"Tb927.3.1790": "pyruvate dehydrogenase E1 beta subunit, putative",
"Tb927.3.1810": "hypothetical protein, conserved",
"Tb927.3.1820": "mitochondrial RNA binding complex 1 subunit (MRB1820)",
"Tb927.3.1840": "3-oxo-5-alpha-steroid 4-dehydrogenase, putative",
"Tb927.3.1890": "cytochrome c oxidase assembly protein, putative",
"Tb927.3.1900": "hypothetical protein, conserved",
"Tb927.3.1940": "TLP18.3, Psb32 and MOLO-1 founding proteins of phosphatase, putative",
"Tb927.3.2010": "hypothetical protein, conserved",
"Tb927.3.2050": "hypothetical protein, conserved",
"Tb927.3.2080": "hypothetical protein, conserved",
"Tb927.3.2130": "RNA pseudouridylate synthase, putative",
"Tb927.3.2150": "protein phosphatase 2C, putative",
"Tb927.3.2180": "hypothetical protein, conserved",
"Tb927.3.2230": "succinyl-CoA synthetase alpha subunit, putative",
"Tb927.3.2260": "Ubiquitin family, putative",
"Tb927.3.2300": "DNL zinc finger, putative",
"Tb927.3.2310": "PACRGA",
"Tb927.3.2370": "hypothetical protein, conserved",
"Tb927.3.2390": "hypothetical protein, conserved",
"Tb927.3.2420": "Microsomal signal peptidase 12 kDa subunit (SPC12), putative",
"Tb927.3.2630": "hypothetical protein, conserved",
"Tb927.3.2650": "cytochrome c oxidase copper chaperone, putative",
"Tb927.3.2670": "hypothetical protein, conserved",
"Tb927.3.2840": "inorganic pyrophosphatase, putative",
"Tb927.3.2860": "hypothetical protein, conserved",
"Tb927.3.2880": "hypothetical protein, conserved",
"Tb927.3.2920": "hypothetical protein, conserved",
"Tb927.3.2970": "hypothetical protein, conserved",
"Tb927.3.2980": "mitochondrial carrier protein (MCP17)",
"Tb927.3.3050": "hypothetical protein, conserved",
"Tb927.3.3130": "hypothetical protein, conserved (POMP24)",
"Tb927.3.3180": "Nucleoporin (TbNup98)",
"Tb927.3.3270": "ATP-dependent phosphofructokinase (PFK)",
"Tb927.3.3300": "hypothetical protein, conserved",
"Tb927.3.3330": "heat shock protein 20, putative",
"Tb927.3.3460": "hypothetical protein, conserved",
"Tb927.3.3520": "hypothetical protein, conserved (POMP25)",
"Tb927.3.3560": "U-box domain containing protein, putative",
"Tb927.3.3580": "heat shock protein 90, putative (LPG3)",
"Tb927.3.3620": "hypothetical protein, conserved",
"Tb927.3.3630": "Elongation factor Ts, mitochondrial, putative (EF-Ts)",
"Tb927.3.3660": "NADH-ubiquinone oxidoreductase complex I subunit, putative",
"Tb927.3.3680": "hypothetical protein, conserved",
"Tb927.3.3890": "hypothetical protein, conserved",
"Tb927.3.3900": "carnitine O-palmitoyltransferase II, putative (CPT II)",
"Tb927.3.3990": "KREPB6 (KREPB6)",
"Tb927.3.4030": "hypothetical protein, conserved",
"Tb927.3.4070": "Nodulin-like, putative",
"Tb927.3.4130": "hypothetical protein, conserved",
"Tb927.3.4140": "hypothetical protein, conserved",
"Tb927.3.4190": "endosomal integral membrane protein, putative",
"Tb927.3.4210": "hypothetical protein, conserved",
"Tb927.3.4260": "Thioesterase superfamily, putative",
"Tb927.3.4290": "73 kDa paraflagellar rod protein (PFRC)",
"Tb927.3.4380": "Tob55, putative",
"Tb927.3.4390": "dihydrolipoamide dehydrogenase, putative (GCVL-1)",
"Tb927.3.4420": "hypothetical protein, conserved",
"Tb927.3.4530": "hypothetical protein, conserved",
"Tb927.3.4550": "hypothetical protein, conserved",
"Tb927.3.4630": "UDP-glucose:glycoprotein glucosyltransferase, putative",
"Tb927.3.4640": "VIT family, putative (POMP26)",
"Tb927.3.4650": "C-8 sterol isomerase, putative",
"Tb927.3.4700": "Domain of unknown function (DUF4379), putative",
"Tb927.3.4740": "NAD-dependent glycerol-3-phosphate dehydrogenase N-terminus, putative",
"Tb927.3.4780": "NAD-dependent glycerol-3-phosphate dehydrogenase N-terminus, putative",
"Tb927.3.4820": "acyltransferase, putative",
"Tb927.3.4850": "enoyl-CoA hydratase, mitochondrial precursor, putative",
"Tb927.3.4890": "ubiquinone biosynthesis protein COQ7 homolog, putative",
"Tb927.3.4920": "LETM1 and EF-hand domain-containing protein 1, putative",
"Tb927.3.4950": "hypothetical protein, conserved",
"Tb927.3.4990": "oxidoreductase, putative",
"Tb927.3.5000": "hypothetical protein, conserved",
"Tb927.3.5130": "exonuclease, putative",
"Tb927.3.5210": "hypothetical protein, conserved",
"Tb927.3.5240": "hypothetical protein, conserved",
"Tb927.3.5350": "hypothetical protein, conserved",
"Tb927.3.5360": "FHA domain containing protein, putative",
"Tb927.3.5370": "hypothetical protein, conserved",
"Tb927.3.5430": "hypothetical protein, conserved",
"Tb927.3.5570": "syntaxin, putative",
"Tb927.3.5600": "hypothetical protein, conserved",
"Tb927.3.5610": "ribosomal protein L3 mitochondrial, putative",
"Tb927.3.5630": "3,2-trans-enoyl-CoA isomerase, mitochondrial precursor, putative",
"Tb927.3.590": "adenosine transporter, putative",
"Tb927.3.640": "hypothetical protein, conserved",
"Tb927.3.680": "cytochrome P450, putative",
"Tb927.3.700": "hypothetical protein, conserved",
"Tb927.3.750": "SET domain containing protein, putative",
"Tb927.3.770": "hypothetical protein, conserved",
"Tb927.3.800": "Vacuolar iron transporter 1, putative (VIT1)",
"Tb927.3.820": "hypothetical protein, conserved",
"Tb927.3.860": "Acyl carrier protein, mitochondrial, NADH-ubiquinone oxidoreductase complex I subunit, putative (ACP)",
"Tb927.3.930": "dynein heavy chain, putative",
"Tb927.3.940": "Complex 1 protein (LYR family), putative",
"Tb927.3.950": "2OG-Fe(II) oxygenase superfamily, putative",
"Tb927.3.960": "protein transport protein Sec61 gamma subunit, putative",
"Tb927.3.970": "NAD binding domain of 6-phosphogluconate dehydrogenase/NAD-binding of NADP-dependent 3-hydroxyisobutyrate dehydrogenase, putative",
"Tb927.4.1070": "50S ribosomal protein L13, putative",
"Tb927.4.1130": "Complex 1 protein (LYR family), putative",
"Tb927.4.1260": "hypothetical protein, conserved",
"Tb927.4.1280": "Tetratricopeptide repeat, putative",
"Tb927.4.130": "receptor-type adenylate cyclase GRESAG 4, pseudogene, putative, receptor-type adenylate cyclase GRESAG 4, degenerate",
"Tb927.4.1300": "amidinotransferase, putative",
"Tb927.4.1440": "probable rRNA maturation factor YbeY, putative",
"Tb927.4.1470": "Domain of unknown function (DUF4379), putative",
"Tb927.4.1500": "RNA editing associated helicase 2 (REH2)",
"Tb927.4.1540": "NAD dependent epimerase/dehydratase family, putative (POMP27)",
"Tb927.4.1610": "hypothetical protein, conserved",
"Tb927.4.1660": "mitochondrial carrier protein, putative (MCP6)",
"Tb927.4.1670": "cytochrome b5-like Heme/Steroid binding domain containing protein, putative (POMP29)",
"Tb927.4.1750": "hypothetical protein, conserved",
"Tb927.4.1760": "hypothetical protein, conserved",
"Tb927.4.1810": "hypothetical protein, conserved",
"Tb927.4.1920": "GPI transamidase subunit 16, putative (GPI16)",
"Tb927.4.2080": "C2 domain containing protein (CC2D)",
"Tb927.4.2150": "hypothetical protein, conserved",
"Tb927.4.2180": "60S ribosomal protein L35a, putative",
"Tb927.4.2260": "centrin, putative (centrin1)",
"Tb927.4.2310": "asparaginyl-tRNA synthetase, putative",
"Tb927.4.2360": "hypothetical protein, conserved",
"Tb927.4.2440": "SET domain containing protein, putative",
"Tb927.4.2450": "thioredoxin, putative",
"Tb927.4.2480": "8-oxoguanine DNA glycosylase, putative",
"Tb927.4.2530": "hypothetical protein, conserved",
"Tb927.4.2550": "hypothetical protein, conserved",
"Tb927.4.2560": "cardiolipin synthetase (cls)",
"Tb927.4.2580": "hypothetical protein, conserved",
"Tb927.4.2700": "Hydroxymethylglutaryl-CoA lyase, mitochondrial, putative",
"Tb927.4.2720": "ATP dependent DEAD-box helicase (RH)",
"Tb927.4.2740": "p25-alpha, putative",
"Tb927.4.2790": "hypothetical protein, conserved",
"Tb927.4.2900": "hypothetical protein, conserved",
"Tb927.4.2910": "hypothetical protein, conserved",
"Tb927.4.2940": "hypothetical protein, conserved",
"Tb927.4.2950": "mitochondrial DNA polymerase I protein A, putative",
"Tb927.4.3010": "hypothetical protein, conserved",
"Tb927.4.3040": "hypothetical protein, conserved",
"Tb927.4.3060": "hypothetical protein, conserved",
"Tb927.4.3070": "hypothetical protein, conserved",
"Tb927.4.3080": "Uncharacterised ACR, YggU family COG1872, putative",
"Tb927.4.3150": "hypothetical protein, conserved",
"Tb927.4.320": "Eukaryotic protein of unknown function (DUF914), putative",
"Tb927.4.330": "hypothetical protein, conserved",
"Tb927.4.3300": "mitochondrial ATP-dependent zinc metallopeptidase, putative, metallo-peptidase, Clan MA(E) Family M41",
"Tb927.4.3390": "tuzin, putative",
"Tb927.4.340": "hypothetical protein, conserved",
"Tb927.4.3430": "hypothetical protein, conserved",
"Tb927.4.3450": "hypothetical protein, conserved",
"Tb927.4.3660": "hypothetical protein, conserved",
"Tb927.4.3680": "protein phosphatase 2C, putative",
"Tb927.4.3690": "Iron/manganese superoxide dismutases, C-terminal domain containing protein, putative",
"Tb927.4.3800": "hypothetical protein, conserved",
"Tb927.4.3950": "cytoskeleton-associated protein CAP5.5, putative, cysteine peptidase, Clan CA, family C2, putative, Calpain-like protein 1 (CAP5.5)",
"Tb927.4.3960": "hypothetical protein, conserved",
"Tb927.4.4140": "hypothetical protein, conserved",
"Tb927.4.4150": "mitochondrial RNA binding complex 1 subunit (MRB4150)",
"Tb927.4.4160": "mitochondrial RNA binding protein (MRB4160)",
"Tb927.4.420": "phosphatidylinositol 3-kinase (tor2) (TOR2)",
"Tb927.4.4210": "metallo-peptidase, Clan MA(E) Family M41, ATP-dependent zinc metalloprotease AFG3-like, putative",
"Tb927.4.4300": "hypothetical protein, conserved",
"Tb927.4.4380": "Pyrophosphate-energized vacuolar membrane proton pump 1, Vacuolar proton pyrophosphatase 1 (VP1)",
"Tb927.4.440": "NADH:ubiquinone oxidoreductase, ESSS subunit, putative, NADH-ubiquinone oxidoreductase complex I subunit, putative",
"Tb927.4.4490": "multidrug resistance protein E, p-glycoprotein (MRPE)",
"Tb927.4.4600": "Mitochondrial ribosomal protein L51 / S25 / CI-B8 domain containing protein, putative",
"Tb927.4.4610": "hypothetical protein, conserved",
"Tb927.4.4620": "cytochrome oxidase subunit VIII (COXVIII)",
"Tb927.4.4680": "DNA replication complex GINS protein PSF3 (psf3)",
"Tb927.4.4700": "hypothetical protein, conserved",
"Tb927.4.4720": "hypothetical protein, conserved",
"Tb927.4.480": "Cullin binding, putative",
"Tb927.4.4910": "3,2-trans-enoyl-CoA isomerase, mitochondrial precursor, putative",
"Tb927.4.4980": "adrenodoxin precursor, putative",
"Tb927.4.5010": "calreticulin, putative",
"Tb927.4.5040": "dihydrolipoamide dehydrogenase, putative",
"Tb927.4.5050": "dihydrolipoamide dehydrogenase, pseudogene, putative, acetoin dehydrogenase e3 component, point mutation, dihydrolipoamide dehydrogenase, point mutation",
"Tb927.4.5130": "KH domain/Domain of unknown function (DUF1771)/Smr domain containing protein, putative",
"Tb927.4.5390": "serine/threonine-protein kinase NrkA, nek1/NIMA-related kinase A (NrkA)",
"Tb927.4.570": "Replication factor-A C terminal domain containing protein, putative",
"Tb927.4.590": "PQQ-like domain/Protein of unknown function (DUF1620), putative",
"Tb927.4.600": "Alpha/beta hydrolase family, putative",
"Tb927.4.630": "hypothetical protein, conserved",
"Tb927.4.650": "chaperone protein DNAj, putative",
"Tb927.4.700": "hypothetical protein, conserved",
"Tb927.4.720": "hypothetical protein, conserved",
"Tb927.4.830": "cytochrome c oxidase assembly protein PET191, putative",
"Tb927.4.930": "50S ribosomal protein L14, putative",
"Tb927.4.940": "hypothetical protein, conserved",
"Tb927.4.950": "hypothetical protein, conserved",
"Tb927.5.1020": "disulfide isomerase, putative",
"Tb927.5.1030": "Iron-sulfur assembly protein 2 (Isa2)",
"Tb927.5.1060": "mitochondrial processing peptidase, beta subunit, putative, metallo-peptidase, Clan ME, Family M16",
"Tb927.5.1130": "hypothetical protein, conserved",
"Tb927.5.1210": "short-chain dehydrogenase, putative",
"Tb927.5.1240": "LicD family, putative",
"Tb927.5.1310": "protoheme IX farnesyltransferase, putative",
"Tb927.5.1470": "NADH-cytochrome b5 reductase, putative (B5R)",
"Tb927.5.1510": "hypothetical protein, conserved",
"Tb927.5.1520": "ATP-dependent protease ATPase subunit HslU1 (HslU1)",
"Tb927.5.1530": "Flavin-binding monooxygenase-like, putative",
"Tb927.5.1550": "mitochondrial carrier protein (MCP23)",
"Tb927.5.1630": "hypothetical protein, conserved",
"Tb927.5.1710": "ribonucleoprotein p18, mitochondrial precursor, putative",
"Tb927.5.1720": "hypothetical protein, conserved",
"Tb927.5.1780": "hypothetical protein, conserved",
"Tb927.5.1790": "hypothetical protein, conserved",
"Tb927.5.1930": "signal peptidase subunit, putative",
"Tb927.5.1940": "hypothetical protein, conserved",
"Tb927.5.2000": "hypothetical protein",
"Tb927.5.2030": "hypothetical protein, conserved",
"Tb927.5.2070": "hypothetical protein, conserved",
"Tb927.5.2100": "RNA-binding protein, putative (RBP30)",
"Tb927.5.2150": "hypothetical protein, conserved",
"Tb927.5.2180": "hypothetical protein, conserved",
"Tb927.5.2260": "conserved protein",
"Tb927.5.2380": "hydrolase, alpha/beta fold family, putative",
"Tb927.5.2550": "hypothetical protein, conserved",
"Tb927.5.2560": "hypothetical protein, conserved",
"Tb927.5.2580": "hypothetical protein, conserved",
"Tb927.5.2590": "Macro domain containing protein, putative",
"Tb927.5.2700": "otubain cysteine peptidase, Clan CA, family C65, putative",
"Tb927.5.2780": "mitochondrial DNA polymerase beta",
"Tb927.5.2790": "mitochondrial DNA polymerase beta-PAK (Pol beta-PAK)",
"Tb927.5.2810": "hypothetical protein, conserved",
"Tb927.5.2830": "hypothetical protein, conserved",
"Tb927.5.2930": "hypothetical protein, conserved",
"Tb927.5.3000": "Thiopurine S-methyltransferase (TPMT), putative",
"Tb927.5.3010": "guide RNA binding protein, putative, MRB1 complex subunit MRB3010 (MRB3010)",
"Tb927.5.3040": "MIX protein",
"Tb927.5.3060": "hypothetical protein, conserved",
"Tb927.5.3090": "hypothetical protein, conserved",
"Tb927.5.3110": "hypothetical protein, conserved",
"Tb927.5.3140": "hypothetical protein, conserved",
"Tb927.5.320": "adenylyl cyclase, putative, receptor-type adenylate cyclase GRESAG 4, putative",
"Tb927.5.3220": "signal peptidase type I, putative",
"Tb927.5.3300": "hypothetical protein, conserved",
"Tb927.5.3340": "Tim10/DDP family zinc finger, putative",
"Tb927.5.3350": "iron superoxide dismutase, putative",
"Tb927.5.3360": "50S ribosomal protein L2, putative",
"Tb927.5.3390": "ADG1, pseudogene",
"Tb927.5.3400": "calcium-translocating P-type ATPase, calcium pump",
"Tb927.5.3410": "hypothetical protein, conserved",
"Tb927.5.3420": "ABC1 family, putative",
"Tb927.5.3440": "hypothetical protein, conserved",
"Tb927.5.3640": "hypothetical protein, conserved",
"Tb927.5.3690": "hypothetical protein, conserved",
"Tb927.5.3710": "essential neutral sphingomyelinase (TbnSMase)",
"Tb927.5.3770": "hypothetical protein, conserved",
"Tb927.5.3870": "hypothetical protein, conserved",
"Tb927.5.3910": "small nuclear RNA-activating protein (SNAP2)",
"Tb927.5.3980": "hypothetical protein, conserved",
"Tb927.5.4020": "hypothetical protein",
"Tb927.5.4040": "hypothetical protein, conserved",
"Tb927.5.4070": "PrimPol-like protein 1 (ppl1)",
"Tb927.5.4080": "hypothetical protein, conserved",
"Tb927.5.4120": "hypothetical protein, conserved",
"Tb927.5.4130": "hypothetical protein, conserved",
"Tb927.5.4170": "histone H4, putative",
"Tb927.5.4330": "dihydrolipoamide branched chain transacylase, putative",
"Tb927.5.4360": "hypothetical protein, conserved",
"Tb927.5.440": "trans-sialidase, putative",
"Tb927.5.450": "NADH-ubiquinone oxidoreductase, mitochondrial, putative (NUBM)",
"Tb927.5.500": "hypothetical protein, conserved",
"Tb927.5.510": "hypothetical protein, conserved",
"Tb927.5.520": "stomatin-like protein, putative",
"Tb927.5.530": "hypothetical protein, conserved",
"Tb927.5.560": "hypothetical protein, conserved",
"Tb927.5.680": "hypothetical protein, conserved",
"Tb927.5.690": "hypothetical protein, conserved",
"Tb927.5.740": "hypothetical protein, conserved",
"Tb927.5.770": "hypothetical protein, conserved",
"Tb927.5.800": "casein kinase I, isoform 2 (CK1.2)",
"Tb927.5.900": "oligosaccharyl transferase subunit, putative",
"Tb927.5.920": "rhodanese-like domain containing protein, putative",
"Tb927.5.930": "NADH-dependent fumarate reductase (FRDg)",
"Tb927.5.960": "ATPase family associated with various cellular activities (AAA), putative",
"Tb927.5.980": "2OG-Fe(II) oxygenase superfamily, putative",
"Tb927.6.1010": "cysteine peptidase, Clan CA, family C1, Cathepsin L-like (CP)",
"Tb927.6.1140": "dolichyl-P-Man:GDP-Man5GlcNAc2-PP-dolichyl alpha-1,2-mannosyltransferase, putative (ALG9)",
"Tb927.6.1200": "hypothetical protein, conserved",
"Tb927.6.1250": "Mitochondrial SSU ribosomal protein 29, Mitochondrial Ribosomal Protein of Small Subunit 29 (MRPS29)",
"Tb927.6.1410": "hypothetical protein, conserved",
"Tb927.6.1440": "hypothetical protein, conserved",
"Tb927.6.1510": "lysyl-tRNA synthetase, putative",
"Tb927.6.1520": "Aquaglyceroporin 1 (AQP1)",
"Tb927.6.1550": "leucine-rich repeat protein (LRRP), putative (POMP30)",
"Tb927.6.1570": "2-hydroxy-3-oxopropionate reductase, putative",
"Tb927.6.1590": "hypothetical protein, conserved",
"Tb927.6.1640": "single strand-specific nuclease, putative",
"Tb927.6.1680": "mitochondrial RNA binding protein 1 (MRB1680)",
"Tb927.6.1710": "hypothetical protein, conserved",
"Tb927.6.1840": "Diacylglycerol kinase catalytic domain containing protein, putative",
"Tb927.6.1860": "conserved protein",
"Tb927.6.1890": "hypothetical protein, conserved",
"Tb927.6.2010": "AMP-binding enzyme, putative",
"Tb927.6.2060": "histidyl-tRNA synthetase",
"Tb927.6.2070": "hypothetical protein, conserved",
"Tb927.6.2080": "hypothetical protein, conserved",
"Tb927.6.2140": "hypothetical protein, conserved",
"Tb927.6.2160": "hypothetical protein, conserved",
"Tb927.6.2170": "co-chaperone GrpE, putative",
"Tb927.6.2180": "hypothetical protein, conserved",
"Tb927.6.2230": "RGG protein",
"Tb927.6.2260": "hypothetical protein, conserved",
"Tb927.6.2290": "50S ribosome-binding GTPase, putative",
"Tb927.6.2350": "50S ribosome-binding GTPase, putative",
"Tb927.6.2380": "hypothetical protein, conserved",
"Tb927.6.2390": "conserved hypothetical protein, putative",
"Tb927.6.2420": "p22 protein precursor",
"Tb927.6.2470": "hypothetical protein, conserved",
"Tb927.6.2480": "chaperone protein DNAj, putative",
"Tb927.6.2490": "hypothetical protein, conserved",
"Tb927.6.2510": "Flavinator of succinate dehydrogenase, putative",
"Tb927.6.2540": "DREV methyltransferase, putative",
"Tb927.6.2560": "hypothetical protein, conserved",
"Tb927.6.2590": "hypothetical protein, conserved",
"Tb927.6.2600": "Protein of unknown function (DUF1077), putative",
"Tb927.6.2610": "hypothetical protein, conserved",
"Tb927.6.2630": "Hsp33 protein, putative",
"Tb927.6.2790": "L-threonine 3-dehydrogenase, putative",
"Tb927.6.3050": "aldehyde dehydrogenase family, putative",
"Tb927.6.3350": "hypothetical protein, conserved (YCF45)",
"Tb927.6.3360": "hypothetical protein, conserved",
"Tb927.6.3420": "Oligomerisation domain containing protein, putative",
"Tb927.6.3510": "tRNA modification enzyme, putative",
"Tb927.6.3530": "hypothetical protein, conserved",
"Tb927.6.3600": "hypothetical protein, conserved",
"Tb927.6.3610": "SET domain containing protein, putative",
"Tb927.6.3630": "sphingosine 1-phosphate lyase, putative (SPL)",
"Tb927.6.3680": "hypothetical protein, conserved (POMP31)",
"Tb927.6.3730": "chaperone protein DNAj, putative",
"Tb927.6.3740": "heat shock 70 kDa protein, mitochondrial precursor, putative",
"Tb927.6.3750": "heat shock 70 kDa protein, mitochondrial precursor, putative",
"Tb927.6.3800": "heat shock 70 kDa protein, mitochondrial precursor, putative",
"Tb927.6.3840": "reticulon domain protein",
"Tb927.6.3850": "chaperone protein DNAj, putative",
"Tb927.6.3930": "hypothetical protein, conserved",
"Tb927.6.4030": "superoxide dismutase, putative",
"Tb927.6.4040": "hypothetical protein, conserved",
"Tb927.6.4070": "hypothetical protein, conserved",
"Tb927.6.4080": "hypothetical protein, conserved",
"Tb927.6.4090": "chaperonin HSP60, mitochondrial precursor, putative (HSP60)",
"Tb927.6.4130": "hypothetical protein, conserved",
"Tb927.6.4150": "hypothetical protein, conserved",
"Tb927.6.4180": "FUN14 family, putative (POMP32)",
"Tb927.6.4200": "hypothetical protein, conserved",
"Tb927.6.4210": "aldehyde dehydrogenase, putative (ALDH)",
"Tb927.6.4270": "hypothetical protein, conserved",
"Tb927.6.4280": "glyceraldehyde 3-phosphate dehydrogenase, glycosomal (GAPDH)",
"Tb927.6.4320": "Protein of unknown function (DUF2817), putative",
"Tb927.6.4400": "hypothetical protein, conserved",
"Tb927.6.4440": "RNA-binding protein 42 (RNA-binding motif protein 42) (RBP42)",
"Tb927.6.4450": "hypothetical protein, conserved",
"Tb927.6.4500": "conserved repeat domain containing protein, putative",
"Tb927.6.4540": "3-hydroxy-3-methylglutaryl-CoA reductase, putative",
"Tb927.6.4560": "hypothetical protein, conserved",
"Tb927.6.4580": "hypothetical protein, conserved",
"Tb927.6.4700": "hypothetical protein, conserved",
"Tb927.6.4760": "T-complex protein 11, putative",
"Tb927.6.4790": "hypothetical protein, conserved",
"Tb927.6.4930": "thiosulfate sulfurtransferase, mitochondrial, putative",
"Tb927.6.4980": "40S ribosomal protein S14 (RPS14)",
"Tb927.6.4990": "ATP synthase, epsilon chain, putative",
"Tb927.6.5070": "hypothetical protein, conserved",
"Tb927.6.5080": "Protein of unknown function (DUF3808), putative",
"Tb927.6.5090": "hypothetical protein, conserved",
"Tb927.6.590": "hypothetical protein, conserved",
"Tb927.6.610": "hypothetical protein, conserved",
"Tb927.6.680": "hypothetical protein, conserved",
"Tb927.6.700": "alanyl-tRNA synthetase, putative",
"Tb927.6.710": "dephospho-CoA kinase, putative",
"Tb927.7.1000": "DNA repair and recombination helicase protein PIF1, putative (TbPIF8)",
"Tb927.7.1010": "hypothetical protein, conserved",
"Tb927.7.1030": "heat shock 70 kDa protein, putative (HSP70)",
"Tb927.7.1040": "40S ribosomal protein S16, putative",
"Tb927.7.1070": "KREX1 (KREX1)",
"Tb927.7.1080": "hypothetical protein, conserved",
"Tb927.7.1130": "trypanothione/tryparedoxin dependent peroxidase 2, glutathione peroxidase-like 2 (TDPX2)",
"Tb927.7.1200": "Protein of unknown function (DUF525), putative",
"Tb927.7.1250": "hypothetical protein, conserved",
"Tb927.7.1270": "hypothetical protein, conserved",
"Tb927.7.1290": "Protein of unknown function (DUF2012), putative",
"Tb927.7.1300": "protein disulfide isomerase, putative",
"Tb927.7.1320": "10 kDa heat shock protein, putative (HSP10)",
"Tb927.7.1340": "10 kDa heat shock protein, putative (HSP10)",
"Tb927.7.1350": "PPR repeat family, putative",
"Tb927.7.1370": "spliced leader RNA PSE-promoter transcription factor, putative (PPB1)",
"Tb927.7.1440": "non-canonical poly (A) polymerase (ncPAP2)",
"Tb927.7.1490": "hypothetical protein, conserved",
"Tb927.7.1550": "KRET2 (KRET2)",
"Tb927.7.1640": "ras-like small GTPase, putative (TbEAR)",
"Tb927.7.1720": "HIRA-interacting protein 5, putative (HIRIP5)",
"Tb927.7.190": "thimet oligopeptidase, putative (THOP1)",
"Tb927.7.1900": "protein kinase, putative, cdc2-related related kinase, putative (CRK7)",
"Tb927.7.1910": "pyridoxal phosphate containing glycine decarboxylase, putative (GCVP)",
"Tb927.7.2090": "hypothetical protein, conserved",
"Tb927.7.210": "proline dehydrogenase",
"Tb927.7.2190": "translocon-associated protein (TRAP), alpha subunit, putative",
"Tb927.7.220": "CDP-DAG synthase (CDS)",
"Tb927.7.2200": "Tim10/DDP family zinc finger, putative",
"Tb927.7.2220": "hypothetical protein, conserved",
"Tb927.7.2280": "hypothetical protein, conserved",
"Tb927.7.2390": "hypothetical protein, conserved",
"Tb927.7.2410": "Dynamin family, putative",
"Tb927.7.2490": "hypothetical protein, conserved",
"Tb927.7.2560": "hypothetical protein, conserved",
"Tb927.7.2570": "guide RNA associated protein, GAP2, mitochondrial RNA binding protein 1",
"Tb927.7.2620": "hypothetical protein, conserved",
"Tb927.7.2630": "50S ribosome-binding GTPase, putative",
"Tb927.7.2690": "hypothetical protein, conserved",
"Tb927.7.2700": "NADH-cytochrome b5 reductase, putative (B5R)",
"Tb927.7.2710": "NADH-cytochrome b5 reductase, putative",
"Tb927.7.2760": "ribosomal protein L22p/L17e, putative",
"Tb927.7.280": "cyclophilin-type peptidyl-prolyl cis-trans isomerase, putative (PPIase)",
"Tb927.7.2820": "histone H2A, putative",
"Tb927.7.2960": "hypothetical protein, conserved (POMP33)",
"Tb927.7.2980": "Nitroreductase family, putative",
"Tb927.7.2990": "hypothetical protein, conserved",
"Tb927.7.3030": "hypothetical protein, conserved",
"Tb927.7.3050": "hypothetical protein, conserved",
"Tb927.7.3060": "hypothetical protein, conserved",
"Tb927.7.3100": "hypothetical protein, conserved",
"Tb927.7.3140": "hypothetical protein, conserved",
"Tb927.7.3240": "hypothetical protein, conserved",
"Tb927.7.3270": "hypothetical protein, conserved",
"Tb927.7.3280": "translation initiation factor IF-2, putative",
"Tb927.7.3330": "hypothetical protein, conserved",
"Tb927.7.340": "Alpha/beta hydrolase family, putative",
"Tb927.7.3410": "centrin-4 (Centrin4)",
"Tb927.7.3430": "cyclophilin-type peptidyl-prolyl cis-trans isomerase, putative (PPIase)",
"Tb927.7.3440": "I/6 autoantigen",
"Tb927.7.3460": "hypothetical protein, conserved",
"Tb927.7.3470": "p22 protein precursor, putative",
"Tb927.7.3500": "glutathione-S-transferase/glutaredoxin, putative",
"Tb927.7.3510": "hypothetical protein, conserved",
"Tb927.7.3520": "mitochondrial pyruvate carrier protein 2, putative",
"Tb927.7.3550": "hypothetical protein, conserved",
"Tb927.7.3590": "hypothetical protein, conserved",
"Tb927.7.3750": "TFIIF-stimulated CTD phosphatase, putative",
"Tb927.7.3770": "YjeF family N-terminal domain/YjeF family C-terminal domain containing protein, putative",
"Tb927.7.3800": "hypothetical protein, conserved",
"Tb927.7.3810": "'Cold-shock' DNA-binding domain containing protein, putative",
"Tb927.7.3910": "hypothetical protein, conserved",
"Tb927.7.3940": "mitochondrial carrier protein, ADP/ATP mitochondrial translocase, putative, adenine nucleotide mitochondrial translocator, putative (MCP16)",
"Tb927.7.3950": "RNA-editing complex protein, RNA-editing 3' terminal uridylyl transferase 1, KRET1 (KRET1)",
"Tb927.7.3960": "50S ribosomal protein L16, putative",
"Tb927.7.3970": "hypothetical protein, conserved",
"Tb927.7.3990": "mitochondrial DNA polymerase I protein C (POL1C)",
"Tb927.7.4050": "hypothetical protein, conserved",
"Tb927.7.4070": "calpain-like cysteine peptidase, putative, cysteine peptidase, Clan CA, family C2, putative",
"Tb927.7.4140": "ribosomal protein L21, putative",
"Tb927.7.4160": "fatty acid elongase, putative, fatty acid elongase, putative",
"Tb927.7.4170": "fatty acid elongase, putative",
"Tb927.7.4180": "fatty acid elongase, putative",
"Tb927.7.4200": "hypothetical protein, conserved",
"Tb927.7.4210": "hypothetical protein, conserved",
"Tb927.7.4270": "hypothetical protein, conserved",
"Tb927.7.4310": "pyridine nucleotide-disulphide oxidoreductase, putative",
"Tb927.7.4420": "proteasome alpha 3 subunit, putative",
"Tb927.7.4430": "hypothetical protein, conserved",
"Tb927.7.4440": "NAD dependent epimerase/dehydratase family, putative",
"Tb927.7.4460": "von Willebrand factor type A domain containing protein, putative",
"Tb927.7.4470": "hypothetical protein, conserved",
"Tb927.7.4480": "adenosine 5'-monophosphoramidase, putative",
"Tb927.7.4550": "60S ribosomal protein-like",
"Tb927.7.4620": "hypothetical protein, conserved",
"Tb927.7.4710": "39S mitochondrial ribosomal protein L46, putative",
"Tb927.7.4730": "pumilio/PUF RNA binding protein 5",
"Tb927.7.4760": "hypothetical protein, conserved",
"Tb927.7.4770": "cyclophilin-type peptidyl-prolyl cis-trans isomerase, putative (PPIase)",
"Tb927.7.4810": "HD domain containing protein, putative",
"Tb927.7.4890": "hypothetical protein, conserved (POMP34)",
"Tb927.7.4910": "hypothetical protein, conserved",
"Tb927.7.4940": "oligopeptidase B protein, putative, serine peptidase, clan SC, family S9A-like protein",
"Tb927.7.4950": "NAD(P)-dependent steroid dehydrogenase protein, putative",
"Tb927.7.4980": "zinc finger protein family member, putative (ZC3H23)",
"Tb927.7.510": "hypothetical protein, conserved",
"Tb927.7.5110": "hypothetical protein, conserved",
"Tb927.7.5120": "SpoU rRNA Methylase family, putative",
"Tb927.7.5130": "hypothetical protein, conserved",
"Tb927.7.5160": "deoxyuridine triphosphatase, putative, dUTP diphosphatase",
"Tb927.7.5260": "Cytochrome c oxidase biogenesis protein Cmc1 like, putative",
"Tb927.7.5280": "hypothetical protein, conserved",
"Tb927.7.5340": "hypothetical protein, conserved",
"Tb927.7.540": "chaperone protein DNAj, putative",
"Tb927.7.5440": "hypothetical protein, conserved",
"Tb927.7.5470": "hypothetical protein, conserved (POMP36)",
"Tb927.7.5480": "dihydrofolate reductase-thymidylate synthase (DHFR-TS)",
"Tb927.7.5510": "hypothetical protein, conserved",
"Tb927.7.5540": "2,4-dienoyl-coa reductase-like protein",
"Tb927.7.5550": "TLD, putative",
"Tb927.7.560": "hypothetical protein, conserved",
"Tb927.7.5680": "deoxyribose-phosphate aldolase, putative",
"Tb927.7.5700": "hypothetical protein, conserved",
"Tb927.7.5720": "hypothetical protein, conserved",
"Tb927.7.5770": "Nuclear Dbf2-related kinase (PK53)",
"Tb927.7.5790": "protein disulfide isomerase, putative",
"Tb927.7.5820": "Monooxygenase, putative",
"Tb927.7.5840": "hypothetical protein, conserved",
"Tb927.7.590": "hypothetical protein, conserved",
"Tb927.7.5970": "protein associated with differentiation 5, putative (PAD5)",
"Tb927.7.600": "mitochondrial DNA ligase homolog, LIG k-beta",
"Tb927.7.610": "mitochondrial DNA ligase homolog, LIG k-alpha",
"Tb927.7.6100": "hypothetical protein",
"Tb927.7.6200": "chaperone protein DNAj, putative",
"Tb927.7.6260": "TPR repeat, putative",
"Tb927.7.630": "hypothetical protein, conserved",
"Tb927.7.6350": "NADH-ubiquinone oxidoreductase, mitochondrial, putative",
"Tb927.7.640": "hypothetical protein, conserved",
"Tb927.7.6410": "hypothetical protein, conserved",
"Tb927.7.6460": "FG-GAP repeat protein, putative, intergrin alpha chain protein, putative",
"Tb927.7.6660": "chaperone protein DNAj, putative",
"Tb927.7.6670": "hypothetical protein, conserved",
"Tb927.7.680": "chaperone protein DNAj, putative",
"Tb927.7.6800": "Alpha/beta hydrolase family, putative",
"Tb927.7.6830": "trans-sialidase, putative",
"Tb927.7.6850": "trans-sialidase (TS)",
"Tb927.7.6930": "ATPase, putative",
"Tb927.7.6940": "glutaminyl cyclase, putative",
"Tb927.7.6990": "hypothetical protein, conserved",
"Tb927.7.7010": "hypothetical protein, conserved",
"Tb927.7.7080": "mitochondrial glycoprotein-like protein",
"Tb927.7.7090": "hypothetical protein, conserved",
"Tb927.7.7170": "CYC2-like cyclin, putative (CYC4)",
"Tb927.7.7210": "conserved hypothetical protein, putative (POMP37)",
"Tb927.7.7230": "nitroreductase, NADH dehydrogenase, putative (NTR)",
"Tb927.7.7330": "hypothetical protein, conserved",
"Tb927.7.7360": "cdc2-related kinase 2 (CRK2)",
"Tb927.7.740": "chaperone protein DNAj, putative",
"Tb927.7.7410": "oxidoreductase, putative",
"Tb927.7.7420": "ATP synthase alpha chain, mitochondrial precursor, ATP synthase F1, alpha subunit",
"Tb927.7.7430": "ATP synthase alpha chain, mitochondrial precursor, ATP synthase F1, alpha subunit",
"Tb927.7.7440": "hypothetical protein, conserved",
"Tb927.7.800": "mitochondrial RNA binding complex 1 subunit (MRB800)",
"Tb927.7.840": "hypothetical protein, conserved",
"Tb927.7.870": "hypothetical protein, conserved",
"Tb927.7.890": "electron transfer protein, putative",
"Tb927.7.900": "hypothetical protein, conserved",
"Tb927.7.910": "hypothetical protein, conserved",
"Tb927.7.940": "protein kinase C substrate protein, heavy chain, putative, glucosidase II beta subunit, putative",
"Tb927.7.990": "chaperone protein DNAj, putative",
"Tb927.8.1010": "chaperone protein DNAj, putative",
"Tb927.8.1020": "6-phosphofructo-2-kinase/fructose-2,6-biphosphatase, putative",
"Tb927.8.1030": "chaperone protein DNAj, putative",
"Tb927.8.1060": "malonyl-CoA decarboxylase, mitochondrial precursor, putative",
"Tb927.8.1120": "hypothetical protein, conserved",
"Tb927.8.1160": "vacuolar-type Ca2+-ATPase, putative",
"Tb927.8.1240": "electron transfer flavoprotein-ubiquinone oxidoreductase, putative",
"Tb927.8.1310": "mitochondrial carrier protein, ADP/ATP mitochondrial translocase, putative, adenine nucleotide mitochondrial translocator, putative (MCP15)",
"Tb927.8.1420": "acyl-CoA dehydrogenase, mitochondrial precursor, putative",
"Tb927.8.1430": "hypothetical protein, conserved",
"Tb927.8.1440": "maoC-like dehydratase, putative",
"Tb927.8.1470": "hypothetical protein, conserved (POMP38)",
"Tb927.8.1490": "Protein of unknown function (DUF1674), putative",
"Tb927.8.1570": "Uncharacterised protein family (UPF0172), putative",
"Tb927.8.1590": "ubiquitin-protein ligase, putative (upl3)",
"Tb927.8.1620": "MSP-B, putative",
"Tb927.8.1700": "hypothetical protein, conserved",
"Tb927.8.1720": "phosphatidylglycerolphosphate synthase, mitochondrial (pgps)",
"Tb927.8.1740": "hypothetical protein, conserved",
"Tb927.8.1770": "hypothetical protein, conserved",
"Tb927.8.1850": "mitochondrial calcium uptake 1",
"Tb927.8.1860": "pitrilysin-like metalloprotease, metallo-peptidase, Clan ME, Family M16C",
"Tb927.8.1880": "hypothetical protein, conserved",
"Tb927.8.1890": "cytochrome c1, heme protein, mitochondrial precursor",
"Tb927.8.1990": "peroxidoxin (TRYP2)",
"Tb927.8.2020": "agmatinase, putative",
"Tb927.8.2030": "hypothetical protein, conserved",
"Tb927.8.2050": "GDP-mannose pyrophosphorylase",
"Tb927.8.2070": "hypothetical protein, conserved (POMP39A)",
"Tb927.8.2110": "Alpha/beta hydrolase family, putative",
"Tb927.8.2160": "multidrug resistance protein A, p-glycoprotein (PGPA)",
"Tb927.8.2170": "hypothetical protein, conserved",
"Tb927.8.2180": "hypothetical protein, conserved",
"Tb927.8.2190": "hypothetical protein, conserved",
"Tb927.8.2240": "Tryptophanyl-tRNA synthetase 2",
"Tb927.8.2300": "hypothetical protein, conserved",
"Tb927.8.2340": "hypothetical protein, conserved",
"Tb927.8.2400": "hypothetical protein, conserved",
"Tb927.8.2460": "hypothetical protein, conserved",
"Tb927.8.2470": "hypothetical protein, conserved",
"Tb927.8.2500": "ribosomal RNA methyltransferase, putative",
"Tb927.8.2530": "hypothetical protein, conserved",
"Tb927.8.2540": "3-ketoacyl-CoA thiolase, putative",
"Tb927.8.2550": "mitochondrial DNA primase (PRI1)",
"Tb927.8.2570": "hypothetical protein, conserved",
"Tb927.8.2630": "kinesin, putative",
"Tb927.8.2650": "metallo-beta-lactamase-like protein, putative",
"Tb927.8.2670": "Putative S-adenosyl-L-methionine-dependent methyltransferase, putative",
"Tb927.8.2700": "hypothetical protein, conserved",
"Tb927.8.2740": "mitochondrial RNA binding protein (TbRBP38)",
"Tb927.8.2750": "hypothetical protein, conserved",
"Tb927.8.2760": "50S ribosome-binding GTPase, putative",
"Tb927.8.2770": "inositol 1,4,5-trisphosphate receptor (IP3R)",
"Tb927.8.2820": "hypothetical protein, conserved",
"Tb927.8.2850": "Poly(A)-specific ribonuclease PARN-1",
"Tb927.8.2880": "hypothetical protein, conserved",
"Tb927.8.2920": "mannosyl-oligosaccharide 1,2-alpha-mannosidase IB, putative",
"Tb927.8.2970": "hypothetical protein, conserved",
"Tb927.8.2990": "5-formyltetrahydrofolate cyclo-ligase family, putative",
"Tb927.8.3040": "hypothetical protein, conserved",
"Tb927.8.3060": "cytosolic leucyl aminopeptidase, putative, metallo-peptidase, Clan MF, Family M17",
"Tb927.8.3070": "hypothetical protein, conserved",
"Tb927.8.3090": "hypothetical protein, conserved",
"Tb927.8.3110": "ribosomal protein S9/S16, putative",
"Tb927.8.3130": "S-adenosyl-methyltransferase mraW-like protein, putative",
"Tb927.8.3160": "hypothetical protein, conserved",
"Tb927.8.3170": "hypothetical protein, conserved",
"Tb927.8.3230": "hypothetical protein, conserved",
"Tb927.8.3260": "YceI-like domain containing protein, putative",
"Tb927.8.3300": "hypothetical protein, conserved",
"Tb927.8.3320": "hypothetical protein, conserved",
"Tb927.8.3330": "mitochondrial carrier protein (MCP18)",
"Tb927.8.3380": "electron transfer protein, putative",
"Tb927.8.3530": "glycerol-3-phosphate dehydrogenase [NAD+], glycosomal",
"Tb927.8.3560": "DNA repair and recombination helicase protein PIF5 (PIF5)",
"Tb927.8.3580": "ATP-dependent chaperone, putative, mitochondrial chaperone BCS1, putative",
"Tb927.8.3690": "isocitrate dehydrogenase [NADP], mitochondrial precursor, putative (IDH)",
"Tb927.8.3740": "pterin 4 alpha carbinolamine dehydratase, putative",
"Tb927.8.3810": "hypothetical protein, conserved",
"Tb927.8.3830": "ATP-dependent DEAD/H RNA helicase, putative, mitochondrial",
"Tb927.8.3840": "hypothetical protein, conserved",
"Tb927.8.3960": "hypothetical protein, conserved",
"Tb927.8.4000": "ABC1 family, putative",
"Tb927.8.4010": "flagellum-adhesion glycoprotein (fla1)",
"Tb927.8.4040": "endonuclease G, putative",
"Tb927.8.4050": "hypothetical protein, conserved",
"Tb927.8.4090": "endonuclease G, putative",
"Tb927.8.4150": "hypothetical protein, conserved",
"Tb927.8.4230": "hypothetical protein, conserved",
"Tb927.8.4240": "hypothetical protein, conserved",
"Tb927.8.4250": "hypothetical protein, conserved",
"Tb927.8.4330": "small GTP-binding protein Rab11 (RAB11)",
"Tb927.8.4380": "Archaic translocase of outer membrane 12 kDa subunit (ATOM12)",
"Tb927.8.4440": "mitochondrial carrier protein (MCP19)",
"Tb927.8.4470": "chaperone protein DNAj, putative",
"Tb927.8.4540": "PAB1-binding protein , putative (PBP1)",
"Tb927.8.4550": "hypothetical protein, conserved",
"Tb927.8.4580": "Tumour suppressor, Mitostatin, putative",
"Tb927.8.4610": "Ras-related protein RabX1 (RABX1)",
"Tb927.8.4640": "flagellar protofilament ribbon protein, putative",
"Tb927.8.4810": "prohibitin 1 (PHB1)",
"Tb927.8.4860": "hypothetical protein, conserved",
"Tb927.8.4920": "hypothetical protein, conserved",
"Tb927.8.4930": "hypothetical protein, conserved",
"Tb927.8.4950": "kinesin, putative",
"Tb927.8.4960": "hypothetical protein, conserved",
"Tb927.8.4970": "Paraflagellar rod protein 2 (PFR2)",
"Tb927.8.5100": "hypothetical protein, conserved",
"Tb927.8.5120": "cytochrome c",
"Tb927.8.5130": "hypothetical protein, conserved (POMP41)",
"Tb927.8.5140": "diacylglycerol kinase, putative",
"Tb927.8.5160": "hypothetical protein, conserved",
"Tb927.8.5200": "hypothetical protein, conserved",
"Tb927.8.5220": "hypothetical protein, conserved",
"Tb927.8.5280": "hypothetical protein, conserved",
"Tb927.8.5370": "hypothetical protein, conserved",
"Tb927.8.5380": "ubiquitin fold modifier protein, putative (UFM1)",
"Tb927.8.5420": "methyltransferase domain containing protein, putative",
"Tb927.8.5450": "amino acid permease 24 (AAT6)",
"Tb927.8.5460": "44 kDa calflagin, 44 kDa calcimedin, Flagellar calcium-binding protein 44 (Tb-44)",
"Tb927.8.5480": "hypothetical protein, conserved",
"Tb927.8.5540": "Iron-sulfur assembly protein 1 (Isa1)",
"Tb927.8.5560": "hypothetical protein, conserved",
"Tb927.8.560": "hypothetical protein, conserved",
"Tb927.8.5640": "hypothetical protein, conserved",
"Tb927.8.5660": "hypothetical protein, conserved",
"Tb927.8.5690": "KREPB8 (KREPB8)",
"Tb927.8.5790": "hypothetical protein",
"Tb927.8.580": "hypothetical protein, conserved",
"Tb927.8.5810": "mitochondrial carrier protein, mitochondrial carnitine/acylcarnitine carrier protein (MCP24)",
"Tb927.8.5860": "50S ribosomal protein L17, putative",
"Tb927.8.590": "carnitine O-palmitoyltransferase, putative",
"Tb927.8.6010": "hypothetical predicted multi-pass transmembrane protein",
"Tb927.8.6040": "hypothetical protein, conserved",
"Tb927.8.6050": "hypothetical protein, conserved",
"Tb927.8.6060": "2-amino-3-ketobutyrate coenzyme A ligase, putative, glycine acetyltransferase, putative",
"Tb927.8.6080": "Glycerophosphoryl diester phosphodiesterase family, putative (POMP42)",
"Tb927.8.6110": "3-hydroxy-3-methylglutaryl-CoA synthase, putative (HMGS)",
"Tb927.8.620": "KREPA3, RNA-editing complex protein MP42 (KREPA3)",
"Tb927.8.6240": "STOP axonemal protein",
"Tb927.8.630": "lipoate-protein ligase, putative, lipoyl ligase, putative, lipoyltransferase, putative, lipoate biosynthesis protein, putative",
"Tb927.8.6390": "lysophospholipase, putative, alpha/beta hydrolase, putative (TbLysoPLA)",
"Tb927.8.6400": "hypothetical protein, conserved",
"Tb927.8.6410": "short-chain dehydrogenase, putative",
"Tb927.8.6420": "beta-ketoacyl-ACP reductase 2 (KAR2)",
"Tb927.8.650": "cation-transporting ATPase, putative",
"Tb927.8.6520": "hypothetical protein, conserved",
"Tb927.8.6560": "SpoU rRNA Methylase family, putative",
"Tb927.8.6580": "succinate dehydrogenase flavoprotein, putative",
"Tb927.8.6590": "Outer mitochondrial membrane transport complex protein/Glutathione S-transferase, C-terminal domain containing protein, putative",
"Tb927.8.6600": "Glutathione S-transferase, C-terminal domain containing protein, putative",
"Tb927.8.6620": "hypothetical protein, conserved",
"Tb927.8.6640": "Male sterility protein, putative",
"Tb927.8.6750": "translationally controlled tumor protein (TCTP), putative",
"Tb927.8.680": "KREPA5 (KREPA5)",
"Tb927.8.6800": "hypothetical protein, conserved",
"Tb927.8.6820": "hypothetical protein, conserved",
"Tb927.8.6890": "hypothetical protein, conserved",
"Tb927.8.6960": "hypothetical protein, conserved",
"Tb927.8.6970": "3-methylcrotonyl-CoA carboxylase alpha subunit, putative",
"Tb927.8.700": "DNA repair and recombination helicase protein PIF7 (TbPIF7)",
"Tb927.8.7010": "chaperone protein DNAj, putative",
"Tb927.8.7040": "hypothetical protein, conserved",
"Tb927.8.7100": "acetyl-CoA carboxylase",
"Tb927.8.7120": "squalene synthase, putative",
"Tb927.8.7170": "inositol polyphosphate 1-phosphatase, putative",
"Tb927.8.7260": "kinetoplast-associated protein, putative",
"Tb927.8.7290": "Regulator of chromosome condensation (RCC1) repeat, putative",
"Tb927.8.7380": "dihydrolipoamide dehydrogenase, point mutation, acetoin dehydrogenase e3 component, putative",
"Tb927.8.7430": "ubiquinol-cytochrome c reductase, putative",
"Tb927.8.7530": "3,2-trans-enoyl-CoA isomerase, mitochondrial precursor, putative",
"Tb927.8.7600": "amino acid transporter, putative",
"Tb927.8.7730": "Sphingosine N-acyltransferase, putative, Ceramide synthase component Lag1/Lac1, putative, dihydroceramide synthase, putative (DHCS)",
"Tb927.8.7770": "aldehyde dehydrogenase family, putative",
"Tb927.8.7980": "Pyrophosphate-energized vacuolar membrane proton pump 2, putative, Vacuolar proton pyrophosphatase 2, putative (VP2)",
"Tb927.8.8020": "monoglyceride lipase, putative",
"Tb927.8.8170": "mitochondrial RNA binding protein 1, putative (MRB8170)",
"Tb927.8.8180": "mitochondrial RNA binding complex 1 subunit (MRB8180)",
"Tb927.8.8190": "hypothetical protein, conserved",
"Tb927.8.8300": "amino acid transporter, putative",
"Tb927.8.8360": "receptor-type adenylate cyclase GRESAG 4, putative",
"Tb927.8.890": "Ras-related protein Rab1A (Rab1A)",
"Tb927.8.920": "ubiquitin-conjugating enzyme E2, putative, ubiquitin-protein ligase, putative, ubiquitin carrier protein, putative (PEX4)",
"Tb927.8.940": "hypothetical protein, conserved",
"Tb927.9.10010": "chaperone protein DNAj, putative",
"Tb927.9.10070": "hypothetical protein, conserved",
"Tb927.9.10080": "phosphatidylserine decarboxylase, putative",
"Tb927.9.10160": "hypothetical protein, conserved",
"Tb927.9.10310": "mitochondrial carrier protein (MCP11)",
"Tb927.9.10400": "hypothetical protein, conserved",
"Tb927.9.10470": "hypothetical protein, conserved",
"Tb927.9.10500": "HAD hydrolase, TIGR01456 family/HAD hydrolase, family IIA, putative",
"Tb927.9.10520": "hypothetical protein, conserved",
"Tb927.9.10560": "hypothetical protein, conserved (POMP6)",
"Tb927.9.10580": "3-demethylubiquinone-9 3-methyltransferase, putative",
"Tb927.9.10640": "synaptojanin (N-terminal domain), putative",
"Tb927.9.10770": "Polyadenylate-binding protein 2 (Poly(A)-binding protein 2) (Poly(A)-binding protein II) (PABII) (Polyadenylate-binding nuclear protein 1) (Nuclear poly(A)-binding protein 1) (PABP2), putative (PABP2)",
"Tb927.9.11000": "small GTPase, putative, GTP-binding protein, putative (RAB7)",
"Tb927.9.11040": "mitochondrial carrier protein (MCP3)",
"Tb927.9.11120": "Chromosome passenger complex (CPC) protein INCENP N terminal, putative",
"Tb927.9.11220": "hypothetical protein, conserved",
"Tb927.9.11280": "unspecified product",
"Tb927.9.11350": "hypothetical protein, conserved",
"Tb927.9.11370": "hypothetical protein, conserved",
"Tb927.9.11510": "hypothetical protein, conserved",
"Tb927.9.11540": "hypothetical protein, conserved",
"Tb927.9.11580": "Gim5A protein, glycosomal membrane protein (gim5A)",
"Tb927.9.11660": "hypothetical protein, conserved",
"Tb927.9.11720": "Iron-sulfur cluster assembly protein",
"Tb927.9.11880": "hypothetical protein, conserved",
"Tb927.9.11900": "acyl transferase-like protein",
"Tb927.9.11910": "peroxidase, putative",
"Tb927.9.11940": "replication factor A protein 3, putative",
"Tb927.9.11970": "hypothetical protein, conserved",
"Tb927.9.12100": "Tetrapyrrole (Corrin/Porphyrin) Methylases, putative",
"Tb927.9.12120": "unspecified product",
"Tb927.9.12160": "hypothetical protein, conserved",
"Tb927.9.12480": "dynein light chain type 1, putative",
"Tb927.9.12500": "hypothetical protein, conserved (POMP7)",
"Tb927.9.12530": "DnaJ domain containing protein, putative",
"Tb927.9.12550": "glycerol kinase, glycosomal (glk1)",
"Tb927.9.12570": "glycerol kinase, glycosomal (glk1)",
"Tb927.9.12590": "glycerol kinase, glycosomal (glk1)",
"Tb927.9.12610": "glycerol kinase, glycosomal (glk1)",
"Tb927.9.12630": "glycerol kinase, glycosomal (glk1)",
"Tb927.9.12650": "ubiquitin-activating enzyme E1, putative (UBA2)",
"Tb927.9.12680": "hypothetical protein, conserved",
"Tb927.9.12730": "chaperone protein DNAj, putative",
"Tb927.9.12770": "hypothetical protein, conserved",
"Tb927.9.12850": "SpoU rRNA Methylase family, putative",
"Tb927.9.13010": "50S ribosome-binding GTPase, putative",
"Tb927.9.13040": "hypothetical protein, conserved",
"Tb927.9.13120": "hypothetical protein, conserved (POMP8)",
"Tb927.9.13200": "hypothetical protein",
"Tb927.9.13250": "NADPH--cytochrome P450 reductase, putative (CPR)",
"Tb927.9.13530": "hypothetical protein, conserved",
"Tb927.9.13540": "hypothetical protein, conserved",
"Tb927.9.13580": "hypothetical protein, conserved",
"Tb927.9.1360": "hypothetical protein, conserved",
"Tb927.9.13780": "hypothetical protein, conserved",
"Tb927.9.1380": "hypothetical protein, conserved",
"Tb927.9.13820": "kinetoplastid membrane protein KMP-11",
"Tb927.9.13880": "unspecified product",
"Tb927.9.13920": "kinetoplastid membrane protein KMP-11 (KMP-11)",
"Tb927.9.1400": "Monooxygenase, putative",
"Tb927.9.14050": "Lipase (class 3), putative",
"Tb927.9.14070": "short-chain dehydrogenase, putative",
"Tb927.9.14100": "hypothetical protein, conserved",
"Tb927.9.14160": "rieske iron-sulfur protein, mitochondrial precursor (RISP)",
"Tb927.9.1420": "hypothetical protein, conserved",
"Tb927.9.14200": "methyltransferase domain containing protein, putative",
"Tb927.9.14420": "cyclophilin-like protein, putative (PPIase)",
"Tb927.9.14990": "hypothetical protein, conserved",
"Tb927.9.15010": "hypothetical protein, conserved",
"Tb927.9.15090": "cytosolic coat protein, putative",
"Tb927.9.15230": "hypothetical protein, conserved",
"Tb927.9.15240": "ATP11 protein, putative",
"Tb927.9.15270": "hypothetical protein, conserved",
"Tb927.9.1530": "hypothetical protein, conserved",
"Tb927.9.15350": "hypothetical protein, conserved",
"Tb927.9.15360": "40S ribosomal protein S6, putative",
"Tb927.9.15380": "NADH-ubiquinone oxidoreductase complex I subunit, putative",
"Tb927.9.15460": "calcium motive p-type ATPase, putative",
"Tb927.9.1640": "hypothetical protein, conserved",
"Tb927.9.1650": "hypothetical protein, conserved",
"Tb927.9.1780": "sec1 family transport protein, putative (SLY1)",
"Tb927.9.1950": "hypothetical protein, conserved",
"Tb927.9.1960": "nitrilase, putative",
"Tb927.9.2050": "membrane protein YIP1, putative",
"Tb927.9.2100": "hypothetical protein, conserved",
"Tb927.9.2270": "hypothetical protein, conserved",
"Tb927.9.2320": "methyltransferase domain containing protein, putative (POMP1)",
"Tb927.9.2450": "electon transport protein SCO1/SCO2, putative",
"Tb927.9.2470": "nucleolar protein (NOP86)",
"Tb927.9.2560": "TLC domain containing protein, putative",
"Tb927.9.2620": "Domain of unknown function (DUF4460), putative",
"Tb927.9.2650": "hypothetical protein, conserved (POMP2)",
"Tb927.9.2670": "engulfment and cell motility domain 2, putative (POMP3)",
"Tb927.9.2700": "hypothetical protein, conserved",
"Tb927.9.3100": "hypothetical protein, conserved",
"Tb927.9.3170": "cytochrome oxidase subunit V (COXV)",
"Tb927.9.3340": "hypothetical protein, conserved",
"Tb927.9.3350": "pseudouridylate synthase, putative",
"Tb927.9.3370": "thioredoxin (trx)",
"Tb927.9.3470": "low molecular weight protein tyrosine phosphatase, putative",
"Tb927.9.3540": "hypothetical protein, conserved",
"Tb927.9.3590": "monothiol glutaredoxin, putative",
"Tb927.9.3600": "Glycosyl transferase family 11, putative",
"Tb927.9.3640": "hypothetical protein, conserved",
"Tb927.9.3670": "Endonuclease/Exonuclease/phosphatase family, putative",
"Tb927.9.3780": "Uncharacterised protein family (UPF0041), putative",
"Tb927.9.3990": "ribosomal protein S7, putative",
"Tb927.9.4190": "fatty acyl CoA syntetase 1 (ACS1)",
"Tb927.9.4210": "fatty acyl CoA synthetase 3 (ACS3)",
"Tb927.9.4230": "fatty acyl CoA synthetase 4 (ACS4)",
"Tb927.9.4310": "tricarboxylate carrier, putative",
"Tb927.9.4360": "KREL1 (KREL1)",
"Tb927.9.4440": "hypothetical protein, conserved",
"Tb927.9.4500": "heat shock protein, putative, HSP70-like protein",
"Tb927.9.4520": "metallo-peptidase, Clan ME, Family M16, Mitochondrial-processing peptidase subunit beta (MPPB)",
"Tb927.9.4620": "ubiquitin-activating enzyme E1, putative",
"Tb927.9.4680": "ATP-dependent DEAD box helicase, putative, eukaryotic initiation factor 4a, putative",
"Tb927.9.4810": "hypothetical protein, conserved",
"Tb927.9.4950": "radical SAM domain protein, putative",
"Tb927.9.5280": "unspecified product",
"Tb927.9.5370": "2OG-Fe(II) oxygenase superfamily, putative",
"Tb927.9.5590": "DNA topoisomerase ii (TOP2)",
"Tb927.9.5630": "KREPB7 (KREPB7)",
"Tb927.9.5650": "E2-like ubiquitin-conjugation enzyme, putative (UFC1)",
"Tb927.9.5690": "60S acidic ribosomal protein, putative",
"Tb927.9.5750": "unspecified product",
"Tb927.9.5890": "solanesyl-diphosphate synthase, putative",
"Tb927.9.5900": "glutamate dehydrogenase (GDH)",
"Tb927.9.5960": "succinate dehydrogenase, putative",
"Tb927.9.6040": "E1-like ubiquitin-activating enzyme, putative (UBA5)",
"Tb927.9.6060": "2Fe-2S iron-sulfur cluster binding domain containing protein, putative (POMP4)",
"Tb927.9.6090": "PTP1-interacting protein, 39 kDa",
"Tb927.9.6100": "TFIIF-stimulated CTD phosphatase, putative",
"Tb927.9.6170": "unspecified product",
"Tb927.9.6230": "unspecified product",
"Tb927.9.6270": "hypothetical protein, conserved",
"Tb927.9.6310": "ABC transporter, putative",
"Tb927.9.6410": "hypothetical protein, conserved",
"Tb927.9.6420": "hypothetical protein, conserved",
"Tb927.9.6460": "hypothetical protein, conserved",
"Tb927.9.6510": "hypothetical protein, conserved",
"Tb927.9.6620": "hypothetical protein, conserved",
"Tb927.9.6700": "hypothetical protein, unlikely",
"Tb927.9.6710": "flavoprotein monooxygenase, putative",
"Tb927.9.6820": "hypothetical protein, unlikely",
"Tb927.9.6910": "hypothetical protein, conserved",
"Tb927.9.6920": "hypothetical protein, conserved",
"Tb927.9.6980": "hypothetical protein, unlikely",
"Tb927.9.7020": "hypothetical protein, conserved",
"Tb927.9.7110": "GRAM domain containing protein, putative",
"Tb927.9.7170": "Mitochondrial 39-S ribosomal protein L47 (MRP-L47), putative",
"Tb927.9.7190": "oxidoreductase, putative",
"Tb927.9.7200": "hypothetical protein, conserved",
"Tb927.9.7210": "mitochondrial exoribonuclease DSS-1, exoribonuclease DSS-1 (EMBL:AY233297)",
"Tb927.9.7260": "phytanoyl-CoA dioxygenase (PhyH), putative",
"Tb927.9.7470": "purine nucleoside transporter (NT10)",
"Tb927.9.7770": "spermidine synthase (SpSyn)",
"Tb927.9.7800": "hypothetical protein, conserved",
"Tb927.9.7830": "hypothetical protein, conserved",
"Tb927.9.7890": "hypothetical protein, conserved",
"Tb927.9.7920": "hypothetical protein, conserved",
"Tb927.9.7980": "hypothetical protein, conserved",
"Tb927.9.8030": "hypothetical protein, conserved",
"Tb927.9.8070": "60S ribosomal protein L10, putative, QM-like protein (QM)",
"Tb927.9.8160": "chaperone protein DNAj, putative",
"Tb927.9.8210": "Peptide deformylase 2 (PDF2)",
"Tb927.9.8260": "rhomboid-like protein, serine peptidase, Clan S- , family S54, putative",
"Tb927.9.8290": "hypothetical protein, conserved",
"Tb927.9.8410": "chaperone protein DNAj, putative",
"Tb927.9.8680": "cytochrome c oxidase assembly factor, putative",
"Tb927.9.8710": "hypothetical protein, conserved",
"Tb927.9.8950": "CAAX prenyl protease 1, putative, metallo- peptidase, Clan M- Family M48 (FACE1)",
"Tb927.9.9080": "hypothetical protein, conserved (POMP5)",
"Tb927.9.9100": "hypothetical protein, conserved",
"Tb927.9.9150": "GTP-binding protein, putative",
"Tb927.9.9310": "hypothetical protein, conserved",
"Tb927.9.9550": "hypothetical protein, conserved",
"Tb927.9.9630": "TPR repeat/Tetratricopeptide repeat, putative",
"Tb927.9.9660": "Archaic translocase outer mitochondrial membrane 40, Mitochondrial import receptor subunit ATOM40 (ATOM40)",
"Tb927.9.9700": "hypothetical protein, conserved",
"Tb927.9.9710": "Histidine phosphatase superfamily (branch 1), putative",
"Tb927.9.9740": "AMP deaminase, putative",
"Tb927.9.9810": "hypothetical protein, conserved",
"Tb927.9.9840": "lipoic acid containing carrier protein, putative (GCVH)",
"Tb927.9.9860": "Hsp70 protein, putative",
"Tb927.9.9940": "PACRGB"
} | [
"tomas.skalicky@seznam.cz"
] | tomas.skalicky@seznam.cz |
46f10627d989389d4136ee96d1c3b804dbc77652 | a4447a60b67798cd90cbf6c99dc18b7465ecbfae | /customize_offerings.py | 0fdb8acd467de667c8fe2bba5ed3b84b874d7e19 | [] | no_license | gowshalinirajalingam/K-Means-Clustering | a8320c3a7e1c14b28e2474c2bc275071abdcbae9 | 15c281a7ee7cfc57620319fac33c9a1e1603cb12 | refs/heads/master | 2020-05-01T16:36:01.598113 | 2019-03-26T11:44:05 | 2019-03-26T11:44:05 | 177,576,274 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,266 | py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sn
data = pd.read_csv('customize_offerings.csv')
data.info()
x=data.values
#scale the data
from sklearn.preprocessing import StandardScaler
scaler=StandardScaler()
x_scaled=scaler.fit_transform(x)
#Figure out optimal number of clusters required.
from sklearn.cluster import KMeans
cluster_range=range(1,10)
cluster_errors=[]
for num_clusters in cluster_range:
clusters=KMeans(num_clusters)
clusters.fit(x_scaled)
cluster_errors.append(clusters.inertia_)
clusters_df=pd.DataFrame({"num_clusters":cluster_range,"cluster_errors":cluster_errors})
clusters_df[0:10]
plt.figure(figsize=(10,7))
plt.plot(clusters_df.num_clusters,clusters_df.cluster_errors,marker="o")
plt.xlabel("Number of clusters")
plt.ylabel("wss value")
#elbo turning point is at clustere 4
kmeans=KMeans(n_clusters=4)
kmeans.fit(x_scaled)
y_kmeans=kmeans.predict(x_scaled) #predicts the cluster group for each row
centers = kmeans.cluster_centers_
centers
plt.scatter(x_scaled[:, 0], x_scaled[:, 1], c=y_kmeans) #c means color
plt.scatter(centers[:, 0], centers[:, 1], c='black', s=200, alpha=0.5)
plt.xlabel('Premium savings are important')
plt.ylabel('Agent not important')
| [
"gowshalinirajalingam@gmail.com"
] | gowshalinirajalingam@gmail.com |
4fe96ae1f1d4635374a86f592f2827175c26e759 | b4e257546367368b2a980366b53796a8176939b6 | /ShoppingWeb/asgi.py | 1e51a5d441284cfb2c9cf865386508bf98806c69 | [] | no_license | faizan352/Eshop-2021 | 548b5ee7eff6ccf3cd5fc221ff98920397104cc9 | ee78ae2ab7c9660a98ac87507ff193cd2f9da1f8 | refs/heads/master | 2023-07-01T02:30:41.645754 | 2021-08-04T08:02:07 | 2021-08-04T08:02:07 | 392,601,365 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | """
ASGI config for ShoppingWeb project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ShoppingWeb.settings')
application = get_asgi_application()
| [
"faizanm352@gmail.com"
] | faizanm352@gmail.com |
9704f2152ae475830dc15c917f3fe61bda494b73 | 55ceefc747e19cdf853e329dba06723a44a42623 | /_CodeTopics/LeetCode/201-400/000387/000387.py | f9281d3ccb22db12c9f847e92d1c3e8f262be557 | [] | no_license | BIAOXYZ/variousCodes | 6c04f3e257dbf87cbe73c98c72aaa384fc033690 | ee59b82125f100970c842d5e1245287c484d6649 | refs/heads/master | 2023-09-04T10:01:31.998311 | 2023-08-26T19:44:39 | 2023-08-26T19:44:39 | 152,967,312 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 793 | py | class Solution(object):
def firstUniqChar(self, s):
"""
:type s: str
:rtype: int
"""
def str_to_dict(s):
dic = {}
for ch in s:
if ch in dic:
dic[ch] += 1
else:
dic[ch] = 1
return dic
dic = str_to_dict(s)
for i in range(len(s)):
if dic[s[i]] == 1:
return i
return -1
"""
https://leetcode-cn.com/submissions/detail/133018753/
104 / 104 个通过测试用例
状态:通过
执行用时: 108 ms
内存消耗: 13.8 MB
执行用时:108 ms, 在所有 Python 提交中击败了79.54%的用户
内存消耗:13.8 MB, 在所有 Python 提交中击败了21.14%的用户
"""
| [
"noreply@github.com"
] | noreply@github.com |
117aa7ef39c815b04a798008549fece016b10341 | 7989b18da1fbbe4c78b6b858683f56fd8017331a | /API_Review/review/serializers.py | e88098491b4dde917ab47c4b0b8e1ae535b0c231 | [] | no_license | Skladkansaeng/ReviewTutorial | 0d9e6f42a7d43b10996a438b19c44f8229127366 | d27159ec0d09303a7561ee4a2cd5dec77569a4f2 | refs/heads/master | 2020-05-31T15:28:53.181395 | 2019-06-05T08:47:15 | 2019-06-05T08:47:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | from django.conf import settings
from rest_framework import serializers
from .models import Review
class ReviewSerializer(serializers.Serializer):
user = serializers.CharField()
score = serializers.IntegerField()
review = serializers.CharField(max_length=255)
| [
"noreply@github.com"
] | noreply@github.com |
65d730747c16c944a0023b252b6f89c390b3257a | 1d496285bc93a6d30258868a0cb0af7d2a09f996 | /problem16/threeSumClosest.py | 36dbcd0e03cf9584af5537cc1aee30cbfd59f8ac | [] | no_license | hongtaozhang1/MyLeetCode | 5bb101bee65d7bc6c05e277feaa716b856df5fe5 | 315e85b13d80a861f1cf263e164bc30a4726e77e | refs/heads/master | 2020-05-02T19:36:04.225847 | 2015-08-23T23:15:14 | 2015-08-23T23:15:14 | 41,015,080 | 0 | 0 | null | 2015-08-19T05:35:22 | 2015-08-19T05:26:07 | Python | UTF-8 | Python | false | false | 664 | py | __author__ = 'hongtao'
class Solution:
def threeSumClosest(self, nums, target):
nums.sort()
mindiff = 10000000000
for i in range(len(nums)-2):
l = i+1
r = len(nums) -1
while l<r:
diff = target - nums[i] - nums[l] - nums[r]
if diff == 0:
return target
if diff > 0:
l += 1
if diff < 0:
r -= 1
if abs(diff) < abs(mindiff):
mindiff = diff
return target - mindiff
solution = Solution()
print solution.threeSumClosest([-1, 2, 1, -4], 1) | [
"zhang490@indiana.edu"
] | zhang490@indiana.edu |
05a469cc480e500bf829d0d976976b2b96478216 | 2d4af29250dca8c72b74e190e74d92f1467120a0 | /TaobaoSdk/Response/SimbaAdgroupOnlineitemsGetResponse.py | f660d7d21eb7fabf204fd071e5a8506e12f10f55 | [] | no_license | maimiaolmc/TaobaoOpenPythonSDK | 2c671be93c40cf487c0d7d644479ba7e1043004c | d349aa8ed6229ce6d76a09f279a0896a0f8075b3 | refs/heads/master | 2020-04-06T03:52:46.585927 | 2014-06-09T08:58:27 | 2014-06-09T08:58:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,175 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set ts=4 sts=4 sw=4 et:
## @brief 获取用户上架在线销售的全部宝贝
# @author wuliang@maimiaotech.com
# @date 2013-03-07 19:54:48
# @version: 0.0.0
from datetime import datetime
import os
import sys
import time
_jsonEnode = None
try:
import demjson
_jsonEnode = demjson.encode
except Exception:
try:
import simplejson
except Exception:
try:
import json
except Exception:
raise Exception("Can not import any json library")
else:
_jsonEnode = json.dumps
else:
_jsonEnode = simplejson.dumps
def __getCurrentPath():
return os.path.normpath(os.path.join(os.path.realpath(__file__), os.path.pardir))
__parentPath = os.path.normpath(os.path.join(__getCurrentPath(), os.path.pardir))
if __parentPath not in sys.path:
sys.path.insert(0, __parentPath)
from Domain.SimbaItemPartition import SimbaItemPartition
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">Response: 获取用户上架在线销售的全部宝贝</SPAN>
# <UL>
# </UL>
class SimbaAdgroupOnlineitemsGetResponse(object):
def __init__(self, kargs=dict()):
super(self.__class__, self).__init__()
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">请求的返回信息,包含状态等</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">dict</SPAN>
# </LI>
# </UL>
self.responseStatus = None
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">请求的响应内容</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">str</SPAN>
# </LI>
# </UL>
self.responseBody = None
self.code = None
self.msg = None
self.sub_code = None
self.sub_msg = None
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">带分页的淘宝商品</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">SimbaItemPartition</SPAN>
# </LI>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Level</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">Object</SPAN>
# </LI>
# </UL>
self.page_item = None
self.__init(kargs)
def isSuccess(self):
return self.code == None and self.sub_code == None
def _newInstance(self, name, value):
types = self._getPropertyType(name)
propertyType = types[0]
isArray = types[1]
if propertyType == bool:
if isArray:
if not value:
return []
return [x for x in value[value.keys()[0]]]
else:
return value
elif propertyType == datetime:
format = "%Y-%m-%d %H:%M:%S"
if isArray:
if not value:
return []
return [datetime.strptime(x, format) for x in value[value.keys()[0]]]
else:
return datetime.strptime(value, format)
elif propertyType == str:
if isArray:
if not value:
return []
return [x for x in value[value.keys()[0]]]
else:
#like taobao.simba.rpt.adgroupbase.get, response.rpt_adgroup_base_list is a json string,but will be decode into a list via python json lib
if not isinstance(value, basestring):
#the value should be a json string
return _jsonEnode(value)
return value
else:
if isArray:
if not value:
return []
return [propertyType(x) for x in value[value.keys()[0]]]
else:
return propertyType(value)
def _getPropertyType(self, name):
properties = {
"page_item": "SimbaItemPartition",
}
levels = {
"page_item": "Object",
}
nameType = properties[name]
pythonType = None
if nameType == "Number":
pythonType = int
elif nameType == "String":
pythonType = str
elif nameType == 'Boolean':
pythonType = bool
elif nameType == "Date":
pythonType = datetime
elif nameType == 'Field List':
pythonType == str
elif nameType == 'Price':
pythonType = float
elif nameType == 'byte[]':
pythonType = str
else:
pythonType = getattr(sys.modules["Domain.%s" % nameType], nameType)
# 是单个元素还是一个对象
level = levels[name]
if "Array" in level:
return (pythonType, True)
else:
return (pythonType, False)
def __init(self, kargs):
if kargs.has_key("page_item"):
self.page_item = self._newInstance("page_item", kargs["page_item"])
if kargs.has_key("code"):
self.code = kargs["code"]
if kargs.has_key("msg"):
self.msg = kargs["msg"]
if kargs.has_key("sub_code"):
self.sub_code = kargs["sub_code"]
if kargs.has_key("sub_msg"):
self.sub_msg = kargs["sub_msg"]
| [
"liyangmin@maimiaotech.com"
] | liyangmin@maimiaotech.com |
8a0d027cde859b98af687b1771f43aed9411ad40 | cd8af225356bd5bd97dbd032bb9001b43281b579 | /in-pr/aaaaaaaaaaa.py | dfcbe8defb2dd1cbc4995c369f5278d41dba3118 | [] | no_license | Forgetaccount/pythonfile | e8aca1ebd7b87746c11646562fe12be8d0ff428f | d94aacb6ddff44c8bf5030e4dd743805db10573f | refs/heads/master | 2022-12-19T23:28:58.650432 | 2020-09-18T02:13:39 | 2020-09-18T02:13:39 | 296,490,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,558 | py | from sklearn.model_selection import KFold
import numpy as np
from sklearn.model_selection import train_test_split
import math
f=open('node_num_n.txt','r')#1
node_num={}#node---num
node_n={}#node---asso
for line in f.readlines():
line=line.strip()
s=line.split()
x=s[0]
pinjie=s
node_num[s[0]]=s[1]
del(pinjie[0])
del(pinjie[0])
node_n[x]=pinjie
f.close()
f1=open('node_class.txt','r')#2
node_c={}#node---class
c_num={}#node-----num0--2188
num_c={}#所有数字对应node num0-2188----node
ll=0
for l in f1.readlines():
line=l.strip()
s=line.split()
node_c[s[0]]=s[1]
c_num[s[0]]=ll
num_c[ll] = s[0]
ll=ll+1
class_cl=['Technology','Healthcare','Financial','Energy','BasicMaterials',
'CapitalGoods','Utilities','ConsumerNonCyclical','Transportation',
'Conglomerates','ConsumerCyclical','Services']
weight={}
ff=open('pr-weight.txt','r')#2
for line in ff.readlines():
line=line.strip()
s=line.split(",")
pinjie=s[0]+","+s[1]
weight[pinjie] = s[2]
ff.close()
jj = 0
zl = 0
sum = 0
accu = 0
for jj in range(1):
v4 = open('v4.txt', 'w')
v5 = open('v5.txt', 'w')
v6 = open('v6.txt', 'w')
v7 = open('v7.txt', 'w')
tw = {}
vw = {}
print(jj)
wvrn = {}
train_data = []
valid_data = []
# 每个类按比例抽取
for c in class_cl:
list_class = []
class_dict = {} # 数字对应node的字典
i = 0
ii = 0
for i in range(2189): # 7 实体的个数-1 0--1440
no = num_c[i] # num对应的node
if node_c[no] == c: # node对应的class是否为该类
class_dict[ii] = no # 该类 num_update=node
list_class.append(no) # 每类的个数
#v7.write(str(ii) + " " + no + " " + node_c[no] + " " + node_num[no] + "\n")
ii = ii + 1
kf = KFold(n_splits=5, shuffle=True)
for train, valid in kf.split(list_class):
for k in train:
no = class_dict[k]
train_data.append(no)
#v4.write(str(k) + " " + no + " " + node_c[no] + " " + node_num[no] + "\n")
for k in valid:
no = class_dict[k]
valid_data.append(no)
#v6.write(str(k) + " " + no + " " + node_c[no] + " " + node_num[no] + "\n")
break
for key in class_cl:
for jk in valid_data:
vw[jk] = 0.5
val = str(vw[jk]) + "," +key
v7.write(jk + " " + str(vw[jk]) + " " + node_c[jk] + "\n")
for kk in train_data:
if node_c[kk] == key:
tw[kk] = 1
else:
tw[kk] = 0
v4.write(kk+" "+str(tw[kk])+" "+node_c[kk]+"\n")
for bianli in range(3):#5次迭代
aa = 0
for j in valid_data: # 每个节点对每一类的P值
v5.write(key+"\n")
ww = 0
ww2 = 0
asso = node_n[j]
for i in asso:
s1 = j + "," + i
s2 = i + "," + j
wei = 0
if s1 in weight:
wei = weight[s1]
v5.write(s1+" ")
else:
wei = weight[s2]
v5.write(s2 + " ")
if i in train_data:
ww2 = ww2 + float(wei)
v5.write("222train-weight-"+" "+str(i)+"关系节点i的类别:"+node_c[i]+" "+str(ww2)+"\n")
if node_c[i] == key:
ww = ww + float(wei) #* tw[i]
v5.write("wwtrain-weight-"+" "+str(i)+"关系节点i的类别:"+node_c[i]+" "+str(ww)+"\n")
if i in valid_data:
ww = ww + float(wei) * vw[i]
ww2 = ww2 + float(wei)
v5.write("222valid-weight-" + " " + str(i) + " ww2 :" + str(ww2) + "\n")
v5.write("wwvalid-weight-" + " " + str(i) + " ww: " + str(ww) + "\n")
v5.write(str(asso)+"\n")
if ww2 == 0:
aa=aa+1
continue
pvalue = ww / ww2
v5.write(j + " 属于该类的概率:" + key +" 真实类别:"+node_c[j]+ " " + str(ww)+" 所有的权重和:"+str(ww2)+"属于该类的概率:"+str(pvalue) + "\n")
vw[j] = ww / ww2
v5.write(str(j)+" 新的vw "+str(vw[j])+"\n" + "\n")
print(vw)
for k in vw:
if k in wvrn:
q = wvrn[k]
qq=q.split(",")
qq1 = qq[0]
if vw[k] > float(qq1):
#print(vw[k])
#print(q)
kagi = str(vw[k]) + "," + key
wvrn[k] = kagi
else:
wvrn[k]=str(vw[k]) + "," + key
#print(vw[k])
wv =open('www.txt','w')
zl = 0
fl = 0
for p in vw:
s=wvrn[p]
cla=s.split(",")
cla_c=cla[1]
sum=sum+1
if node_num[p]==0:
continue
if cla_c==node_c[p]:
zl=zl+1
else:
fl=fl+1
wv.write(str(p) + " " + s + " "+node_c[str(p)]+ "\n")
print(zl)
print(fl)
print(zl/sum)
accu=accu+zl/(zl+fl)
# 存储 节点,vw最大的值和类别,两个值都需要替换
print(accu/30)
| [
"yukito9@163.com"
] | yukito9@163.com |
68e4482c14a3dab16659aa7b39e7e1d5f4c639ed | edd1adb88112045d16d3e6417117d45ceed4a634 | /classical/tidybot-opt14-strips/api.py | fb141663dc9a4e046bd1d3dc18576e2df06bd7ef | [] | no_license | AI-Planning/classical-domains | 26de25bf23622f95c877960c1d52f444922d8737 | 4bd0b42d89ea02bd38af6f93cf20a0ab0cbda9d9 | refs/heads/main | 2023-04-27T07:55:55.832869 | 2023-03-29T01:46:11 | 2023-03-29T01:46:11 | 253,298,999 | 24 | 12 | null | 2023-04-18T01:45:39 | 2020-04-05T18:02:53 | PDDL | UTF-8 | Python | false | false | 2,822 | py | domains = [
{'description': 'The Tidybot domain models a household cleaning task, in which one or more robots must pick up a set of objects and put them into goal locations. The world is structured as a 2d grid, divided into navigable locations and surfaces on which objects may lie. Robots have a gripper, which moves relative to the robot, up to some maximum radius. Existing objects block the gripper, so that it may be necessary to move one object out of the way to put another one down. Robots can carry one object at a time in the gripper, but may also make use of a cart, that can hold multiple objects. The instance generator creates worlds that contain rectangular surfaces ("tables"), as well as U-shaped enclosures ("cupboards"), which are the goal locations of objects.',
'ipc': '2014',
'name': 'tidybot',
'problems': [('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p01.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p02.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p03.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p04.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p05.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p06.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p07.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p08.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p09.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p10.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p11.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p12.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p13.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p14.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p15.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p16.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p17.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p18.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p19.pddl'),
('tidybot-opt14-strips/domain.pddl',
'tidybot-opt14-strips/p20.pddl')]}
] | [
"christian.muise@gmail.com"
] | christian.muise@gmail.com |
86232a9f6b590d5de4eed9f2c2d9903e9cba69bc | 6f2bb5f2e628d1877f5c91d5f523837be96269cc | /users/models.py | deb8b8f802d03b0140d80d5ea45ab5e563151fac | [] | no_license | boryskuczkowski/django-react-tut | 54efa0c08e60af4efcbed617f4bf54698322cf27 | 19d3e47f22afe969f63cd87f16d9843816bd1666 | refs/heads/master | 2022-12-14T15:00:01.051710 | 2018-12-24T21:40:33 | 2018-12-24T21:40:33 | 163,010,578 | 0 | 0 | null | 2022-12-08T03:00:23 | 2018-12-24T17:11:54 | JavaScript | UTF-8 | Python | false | false | 418 | py | from django.db import models
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
is_student = models.BooleanField(default=True)
is_teacher = models.BooleanField(default=False)
def __str__(self):
return self.username
class Student(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
def __str__(self):
return self.user.username
| [
"boryskuczkowski@gmail.com"
] | boryskuczkowski@gmail.com |
32751f05ccc8c3974aaefa29038d9385903ee1be | 1f237c3fde884789a8ed82ee877620c45c7efa0a | /2021-2-6/test.py | ea24c1cee5282e4aeb277058fd3e6a42d5814b4c | [] | no_license | shenxuehao/A-C-language-novice | 302cc708b78a31edd1a7870c35b1683692ca81d8 | af1a963d6e7857685f9ae3d846a286317c8a6763 | refs/heads/master | 2023-04-08T15:41:42.809758 | 2021-04-12T02:45:11 | 2021-04-12T02:45:11 | 303,631,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | import multiprocessing
import time
# 跳舞任务
def dance():
for i in range(5):
print("跳舞中...")
time.sleep(0.2)
# 唱歌任务
def sing():
for i in range(5):
print("唱歌中...")
time.sleep(0.2)
if __name__ == '__main__':
# 创建跳舞的子进程
# group: 表示进程组,目前只能使用None
# target: 表示执行的目标任务名(函数名、方法名)
# name: 进程名称, 默认是Process-1, .....
dance_process = multiprocessing.Process(target=dance, name="myprocess1")
sing_process = multiprocessing.Process(target=sing)
# 启动子进程执行对应的任务
dance_process.start()
sing_process.start() | [
"2398444863@qq.com"
] | 2398444863@qq.com |
2b96a882466a0091c2c06f2b655f16faae4ba04a | c30e9b18ad1c3c08cc44b03bef94fbea15919724 | /emojilibrary.py | f92e8f743d873c90db247eb1f10ca04fb0a8cc2a | [] | no_license | jlollis/ranmoji | c335f055684fa67f2394ac2ba73467b47d473755 | 6996ed4fb65b9d88197726a220ecc82d9ed359a2 | refs/heads/master | 2020-03-26T10:32:49.593383 | 2016-10-29T23:49:10 | 2016-10-29T23:49:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,042 | py | # Thanks to the emoji library from @chazgiese found in emojigen
# https://github.com/chazgiese/EmojiGen/blob/master/emojigen.js
# as a part of the EmojiGen project, https://github.com/chazgiese/EmojiGen
emojis = [
"😀", "😬", "😁", "😂", "😃",
"😄", "😅", "😆", "😇", "😉",
"😊", "🙂", "🙃", "☺️", "😋",
"😌", "😍", "😘", "😗", "😙",
"😚", "😜", "😝", "😛", "🤑",
"🤓", "😎", "🤗", "😏", "😶",
"😐", "😑", "😒", "🙄", "🤔",
"😳", "😞", "😟", "😠", "😡",
"😔", "😕", "🙁", "☹️", "😣",
"😖", "😫", "😩", "😤", "😮",
"😱", "😨", "😰", "😯", "😦",
"😧", "😢", "😥", "😪", "😓",
"😭", "😵", "😲", "🤐", "😷",
"🤒", "🤕", "😴", "💤", "💩",
"😈", "👿", "👹", "👺", "💀",
"👻", "👽", "🤖", "😺", "😸",
"😹", "😻", "😼", "😽", "🙀",
"😿", "😾", "🙌", "👏", "👋",
"👍", "👎", "👊", "✊", "✌️",
"👌", "✋", "👐", "💪", "🙏",
"☝️", "👆", "👇", "👈", "👉",
"🖕", "🖐", "🤘", "🖖", "✍️",
"💅", "👄", "👅", "👂", "👃",
"👁", "👀", "👤", "👥", "🗣",
"👶", "👦", "👧", "👨", "👩",
"👱", "👴", "👵", "👲", "👳",
"👮", "👷", "💂", "🕵", "🎅",
"👼", "👸", "👰", "🚶", "🏃",
"💃", "👯", "👫", "👬", "👭",
"🙇", "💁", "🙅", "🙆", "🙋",
"🙎", "🙍", "💇", "💆", "💑",
"👩‍❤️‍👩",
"👨‍❤️‍👨", "💏",
"👩‍❤️‍💋‍👩",
"👨‍❤️‍💋‍👨",
"👪", "👨‍👩‍👧",
"👨‍👩‍👧‍👦",
"👨‍👩‍👦‍👦",
"👨‍👩‍👦‍👦",
"👨‍👩‍👧‍👧",
"👩‍👩‍👦",
"👩‍👩‍👧",
"👩‍👩‍👧‍👦",
"👩‍👩‍👦‍👦",
"👩‍👩‍👧‍👧",
"👨‍👨‍👦",
"👨‍👨‍👧",
"👨‍👨‍👧‍👦",
"👨‍👨‍👦‍👦",
"👨‍👨‍👧‍👧",
"👚", "👕", "👖", "👔", "👗",
"👙", "👘", "💄", "💋", "👣",
"👠", "👡", "👢", "👞", "👟",
"👒", "🎩", "⛑", "🎓", "👑",
"🎒", "👝", "👛", "👜", "💼",
"👓", "🕶", "💍", "🌂",
"🐶", "🐱", "🐭", "🐹", "🐰",
"🐻", "🐼", "🐨", "🐯", "🦁",
"🐮", "🐷", "🐽", "🐸", "🐙",
"🐵", "🙈", "🙉", "🙊", "🐒",
"🐔", "🐧", "🐦", "🐤", "🐣",
"🐥", "🐺", "🐗", "🐴", "🦄",
"🐝", "🐛", "🐌", "🐞", "🐜",
"🕷", "🦂", "🦀", "🐍", "🐢",
"🐠", "🐟", "🐡", "🐬", "🐳",
"🐋", "🐊", "🐆", "🐅", "🐃",
"🐂", "🐄", "🐪", "🐫", "🐘",
"🐐", "🐏", "🐑", "🐎", "🐖",
"🐀", "🐁", "🐓", "🦃", "🕊",
"🐕", "🐩", "🐈", "🐇", "🐿",
"🐾", "🐉", "🐲", "🌵", "🎄",
"🌲", "🌳", "🌴", "🌱", "🌿",
"☘", "🍀", "🎍", "🎋", "🍃",
"🍂", "🍁", "🌾", "🌺", "🌻",
"🌹", "🌷", "🌼", "🌸", "💐",
"🍄", "🌰", "🎃", "🐚", "🕸",
"🌎", "🌍", "🌏", "🌕", "🌖",
"🌗", "🌘", "🌑", "🌒", "🌓",
"🌔", "🌚", "🌝", "🌛", "🌜",
"🌞", "🌙", "⭐️", "🌟", "💫",
"✨", "☄️", "☀️", "🌤",
"⛅️",
"🌥", "🌦", "☁️", "🌧", "⛈",
"🌩", "⚡️", "🔥", "💥",
"❄️",
"🌨", "☃️", "⛄️", "🌬",
"💨",
"🌪", "🌫", "☂️", "☔️",
"💧",
"💦", "🌊",
"🍏", "🍎", "🍐", "🍊", "🍋",
"🍌", "🍉", "🍇", "🍓", "🍈",
"🍒", "🍑", "🍍", "🍅", "🍆",
"🌶", "🌽", "🍠", "🍯", "🍞",
"🧀", "🍗", "🍖", "🍤", "🍳",
"🍔", "🍟", "🌭", "🍕", "🍝",
"🌮", "🌯", "🍜", "🍲", "🍥",
"🍣", "🍱", "🍛", "🍙", "🍚",
"🍘", "🍢", "🍡", "🍧", "🍨",
"🍦", "🍰", "🎂", "🍮", "🍬",
"🍭", "🍫", "🍿", "🍩", "🍪",
"🍺", "🍻", "🍷", "🍸", "🍹",
"🍾", "🍶", "🍵", "☕️", "🍼",
"🍴", "🍽",
"⚽️", "🏀", "🏈", "⚾️",
"🎾",
"🏐", "🏉", "🎱", "⛳️", "🏌",
"🏓", "🏸", "🏒", "🏑", "🏏",
"🎿", "⛷", "🏂", "⛸", "🏹",
"🎣", "🚣", "🏊", "🏄", "🛀",
"⛹", "🏋", "🚴", "🚵", "🏇",
"🕴", "🏆", "🎽", "🏅", "🎖",
"🎗", "🏵", "🎫", "🎟", "🎭",
"🎨", "🎪", "🎤", "🎧", "🎼",
"🎹", "🎷", "🎺", "🎸", "🎻",
"🎬", "🎮", "👾", "🎯", "🎲",
"🎰", "🎳",
"🚗", "🚕", "🚙", "🚌", "🚎",
"🏎", "🚓", "🚑", "🚒", "🚐",
"🚚", "🚛", "🚜", "🏍", "🚲",
"🚨", "🚔", "🚍", "🚘", "🚖",
"🚡", "🚠", "🚟", "🚃", "🚋",
"🚝", "🚄", "🚅", "🚈", "🚞",
"🚂", "🚆", "🚇", "🚊", "🚉",
"🚁", "🛩", "✈️", "🛫", "🛬",
"⛵️", "🛥", "🚤", "⛴", "🛳",
"🚀", "🛰", "💺", "⚓️", "🚧",
"⛽️", "🚏", "🚦", "🚥", "🏁",
"🚢", "🎡", "🎢", "🎠", "🏗",
"🌁", "🗼", "🏭", "⛲️", "🎑",
"⛰", "🏔", "🗻", "🌋", "🗾",
"🏕", "⛺️", "🏞", "🛣", "🛤",
"🌅", "🌄", "🏜", "🏖", "🏝",
"🌇", "🌆", "🏙", "🌃", "🌉",
"🌌", "🌠", "🎇", "🎆", "🌈",
"🏘", "🏰", "🏯", "🏟", "🗽",
"🏠", "🏡", "🏚", "🏢", "🏬",
"🏣", "🏤", "🏥", "🏦", "🏨",
"🏪", "🏫", "🏩", "💒", "🏛",
"⛪️", "🕌", "🕍", "🕋", "⛩",
"⌚️", "📱", "📲", "💻", "⌨️",
"🖥", "🖨", "🖱", "🖲", "🕹",
"🗜", "💽", "💾", "💿", "📀",
"📼", "📷", "📸", "📹", "🎥",
"📽", "🎞", "📞", "☎️", "📟",
"📠", "📺", "📻", "🎙", "🎚",
"🎛", "⏱", "⏲", "⏰", "🕰",
"⏳", "⌛️", "📡", "🔋", "🔌",
"💡", "🔦", "🕯", "🗑", "🛢",
"💸", "💵", "💴", "💶", "💷",
"💰", "💳", "💎", "⚖", "🔧",
"🔨", "⚒", "🛠", "⛏", "🔩",
"⚙", "⛓", "💣", "🔪",
"🗡", "⚔", "🛡", "🚬", "☠️",
"⚰", "⚱", "🏺", "🔮", "📿",
"💈", "⚗", "🔭", "🔬", "🕳",
"💊", "💉", "🌡", "🏷", "🔖",
"🚽", "🚿", "🛁", "🔑", "🗝",
"🛋", "🛌", "🛏", "🚪", "🛎",
"🖼", "🗺", "⛱", "🗿", "🛍",
"🎈", "🎏", "🎀", "🎁", "🎊",
"🎉", "🎎", "🎐", "🎌", "🏮",
"✉️", "📩", "📨", "📧", "💌",
"📮", "📪", "📫", "📬", "📭",
"📦", "📯", "📥", "📤", "📜",
"📃", "📑", "📊", "📈", "📉",
"📄", "📅", "📆", "🗓", "📇",
"🗃", "🗳", "🗄", "📋", "🗒",
"📁", "📂", "🗂", "🗞", "📰",
"📓", "📕", "📗", "📘", "📙",
"📔", "📒", "📚", "📖", "🔗",
"📎", "🖇", "✂️", "📐", "📏",
"📌", "📍", "🚩", "🏳", "🏴",
"🔐", "🔒", "🔓", "🔏", "🖊",
"🖋", "✒️", "📝", "✏️", "🖍",
"🖌", "🔍", "🔎",
"❤️", "💛", "💚", "💙", "💜",
"💔", "❣️", "💕", "💞", "💓",
"💗", "💖", "💘", "💝", "💟",
"☮️", "✝️", "☪️", "🕉",
"☸️",
"✡️", "🔯", "🕎", "☯️",
"☦️",
"🛐", "⛎", "♈️", "♉️",
"♊️",
"♋️", "♌️", "♍️", "♎️",
"♏️",
"♐️", "♑️", "♒️", "♓️",
"🆔",
"⚛",
#"🈳",
"🈹", "☢️", "☣️",
"📴", "📳",
#jp specific symbols
#"🈶", "🈚️", "🈸", "🈺", "🈷️",
"✴️", "🆚",
"🉑",
"💮",
#jp specific symbols
#"🉐", "㊙️", "㊗️", "🈴", "🈵", "🈲",
"🅰️", "🅱️",
"🆎",
"🆑", "🅾️", "🆘", "⛔️",
"📛",
"🚫", "❌", "⭕️", "💢", "♨️",
"🚷", "🚯", "🚳", "🚱", "🔞",
"📵", "❗️", "❕", "❓", "❔",
"‼️", "⁉️", "💯", "🔅", "🔆",
"🔱", "⚜", "〽️", "⚠️", "🚸",
"🔰", "♻️",
#jp specific symbols
#"🈯️",
"💹",
"❇️",
"✳️", "❎", "✅", "💠", "🌀",
"➿", "🌐", "Ⓜ️", "🏧", "🈂️",
"🛂", "🛃", "🛄", "🛅", "♿️",
"🚭", "🚾", "🅿️", "🚰", "🚹",
"🚺", "🚼", "🚻", "🚮", "🎦",
"📶", "🈁", "🆖", "🆗", "🆙",
"🆒", "🆕", "🆓", "0️⃣",
"1️⃣",
"2️⃣", "3️⃣",
"4️⃣", "5️⃣",
"6️⃣", "7️⃣",
"8️⃣", "9️⃣",
"🔟", "🔢",
"▶️", "⏸", "⏯", "⏹", "⏺",
"⏭", "⏮", "⏩", "⏪", "🔀",
"🔁", "🔂", "◀️", "🔼", "🔽",
"⏫", "⏬", "➡️", "⬅️",
"⬆️",
"⬇️", "↗️", "↘️", "↙️",
"↖️",
"↕️", "↔️", "🔄", "↪️",
"↩️",
"⤴️", "⤵️", "#️⃣",
"*️⃣", "ℹ️",
"🔤", "🔡", "🔠", "🔣", "🎵",
"🎶", "〰️", "➰", "✔️", "🔃",
"➕", "➖", "➗", "✖️", "💲",
"💱", "©️", "®️", "™️",
"🔚",
"🔙", "🔛", "🔝", "🔜", "☑️",
"🔘", "⚪️", "⚫️", "🔴", "🔵",
"🔸", "🔹", "🔶", "🔷", "🔺",
"▪️", "▫️", "⬛️",
"⬜️", "🔻",
"◼️", "◻️", "◾️", "◽️",
"🔲",
"🔳", "🔈", "🔉", "🔊", "🔇",
"📣", "📢", "🔔", "🔕", "🃏",
"🀄️", "♠️", "♣️", "♥️",
"♦️",
"🎴", "👁‍🗨", "💭", "🗯",
"💬",
"🕐", "🕑", "🕒", "🕓", "🕔",
"🕕", "🕖", "🕗", "🕘", "🕙",
"🕚", "🕛", "🕜", "🕝", "🕞",
"🕟", "🕠", "🕡", "🕢", "🕣",
"🕤", "🕥", "🕦", "🕧",
# Flags Disabled
# "🇦🇫", "🇦🇽", "🇦🇱",
# "🇩🇿", "🇦🇸",
# "🇦🇩", "🇦🇴", "🇦🇮",
# "🇦🇶", "🇦🇬",
# "🇦🇷", "🇦🇲", "🇦🇼",
# "🇦🇺", "🇦🇹",
# "🇦🇿", "🇧🇸", "🇧🇭",
# "🇧🇩", "🇧🇧",
# "🇧🇾", "🇧🇪", "🇧🇿",
# "🇧🇯", "🇧🇲",
# "🇧🇹", "🇧🇴", "🇧🇶",
# "🇧🇦", "🇧🇼",
# "🇧🇷", "🇮🇴", "🇻🇬",
# "🇧🇳", "🇧🇬",
# "🇧🇫", "🇧🇮", "🇨🇻",
# "🇰🇭", "🇨🇲",
# "🇨🇦", "🇮🇨", "🇰🇾",
# "🇨🇫", "🇹🇩",
# "🇨🇱", "🇨🇳", "🇨🇽",
# "🇨🇨", "🇨🇴",
# "🇰🇲", "🇨🇬", "🇨🇩",
# "🇨🇰", "🇨🇷",
# "🇭🇷", "🇨🇺", "🇨🇼",
# "🇨🇾", "🇨🇿",
# "🇩🇰", "🇩🇯", "🇩🇲",
# "🇩🇴", "🇪🇨",
# "🇪🇬", "🇸🇻", "🇬🇶",
# "🇪🇷", "🇪🇪",
# "🇪🇹", "🇪🇺", "🇫🇰",
# "🇫🇴", "🇫🇯",
# "🇫🇮", "🇫🇷", "🇬🇫",
# "🇵🇫", "🇹🇫",
# "🇬🇦", "🇬🇲", "🇬🇪",
# "🇩🇪", "🇬🇭",
# "🇬🇮", "🇬🇷", "🇬🇱",
# "🇬🇩", "🇬🇵",
# "🇬🇺", "🇬🇹", "🇬🇬",
# "🇬🇳", "🇬🇼",
# "🇬🇾", "🇭🇹", "🇭🇳",
# "🇭🇰", "🇭🇺",
# "🇮🇸", "🇮🇳", "🇮🇩",
# "🇮🇷", "🇮🇶",
# "🇮🇪", "🇮🇲", "🇮🇱",
# "🇮🇹", "🇨🇮",
# "🇯🇲", "🇯🇵", "🇯🇪",
# "🇯🇴", "🇰🇿",
# "🇰🇪", "🇰🇮", "🇽🇰",
# "🇰🇼", "🇰🇬",
# "🇱🇦", "🇱🇻", "🇱🇧",
# "🇱🇸", "🇱🇷",
# "🇱🇾", "🇱🇮", "🇱🇹",
# "🇱🇺", "🇲🇴",
# "🇲🇰", "🇲🇬", "🇲🇼",
# "🇲🇾", "🇲🇻",
# "🇲🇱", "🇲🇹", "🇲🇭",
# "🇲🇶", "🇲🇷",
# "🇲🇺", "🇾🇹", "🇲🇽",
# "🇫🇲", "🇲🇩",
# "🇲🇨", "🇲🇳", "🇲🇪",
# "🇲🇸", "🇲🇦",
# "🇲🇿", "🇲🇲", "🇳🇦",
# "🇳🇷", "🇳🇵",
# "🇳🇱", "🇳🇨", "🇳🇿",
# "🇳🇮", "🇳🇪",
# "🇳🇬", "🇳🇺", "🇳🇫",
# "🇲🇵", "🇰🇵",
# "🇳🇴", "🇴🇲", "🇵🇰",
# "🇵🇼", "🇵🇸",
# "🇵🇦", "🇵🇬", "🇵🇾",
# "🇵🇪", "🇵🇭",
# "🇵🇳", "🇵🇱", "🇵🇹",
# "🇵🇷", "🇶🇦",
# "🇷🇪", "🇷🇴", "🇷🇺",
# "🇷🇼", "🇧🇱",
# "🇸🇭", "🇰🇳", "🇱🇨",
# "🇵🇲", "🇻🇨",
# "🇼🇸", "🇸🇲", "🇸🇹",
# "🇸🇦", "🇸🇳",
# "🇷🇸", "🇸🇨", "🇸🇱",
# "🇸🇬", "🇸🇽",
# "🇸🇰", "🇸🇮", "🇸🇧",
# "🇸🇴", "🇿🇦",
# "🇬🇸", "🇰🇷", "🇸🇸",
# "🇪🇸", "🇱🇰",
# "🇸🇩", "🇸🇷", "🇸🇿",
# "🇸🇪", "🇨🇭",
# "🇸🇾", "🇹🇼", "🇹🇯",
# "🇹🇿", "🇹🇭",
# "🇹🇱", "🇹🇬", "🇹🇰",
# "🇹🇴", "🇹🇹",
# "🇹🇳", "🇹🇷", "🇹🇲",
# "🇹🇨", "🇹🇻",
# "🇺🇬", "🇺🇦", "🇦🇪",
# "🇬🇧", "🇺🇸",
# "🇻🇮", "🇺🇾", "🇺🇿",
# "🇻🇺", "🇻🇦",
# "🇻🇪", "🇻🇳", "🇼🇫",
# "🇪🇭", "🇾🇪",
# "🇿🇲", "🇿🇼"
] | [
"ghagata@hugeinc.com"
] | ghagata@hugeinc.com |
95ffdd80c52479f6f3280b20217f6b35e155403f | 818b8d509146545c6462f91eb67c2281228e9658 | /src/test/resources/PageObjectRepository/STG/TOIHomePage.spec | 0f8ab03d5797c197ac505f67fab10f4ffb972eb8 | [] | no_license | shaliniqa/TOI_Test_V8 | aef79664fb5e7138cc6534b651c5af458f7cea1b | 59c29c1ebd235f21e14eee5ecca659845fcd8da6 | refs/heads/master | 2020-03-23T00:11:04.243344 | 2018-07-16T11:41:42 | 2018-07-16T11:41:42 | 140,848,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | spec | Times of India
=======================================================
TOI_title xpath //img[@title='News']
PopUp_dactive xpath //a[@class='ntfc_dactive']
=======================================================
| [
"Shalinichaurasia@qainfotech.com"
] | Shalinichaurasia@qainfotech.com |
c0a8667cdf5331a1785f146d4e0cc61f940bbe1b | 65ff9281d4793a1ed3a38fa9d453ae870099be10 | /mysql_query_as_csv.py | e3060322d7bb472f8e80c57e456d09961fe5438d | [] | no_license | gooddestiny/test_script | 417753ebaecfeec338c732ea2f6f2b44c25f4d1d | f0cccfca17e9a75e3101a29755c86b9c19bfd77d | refs/heads/master | 2022-04-20T05:35:18.586864 | 2020-04-21T12:40:08 | 2020-04-21T12:40:08 | 257,578,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,447 | py | -*- coding: utf-8 -*-_
!/bin/python
import csv
import MySQLdb
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
mysql_host = '127.0.0,1'
mysql_user = 'test'
mysql_passwd = 'yourpw'
dbname = 'db_table'
def main():
# 连接数据库
conn = MySQLdb.connect(
host = mysql_host,
user = mysql_user,
passwd = mysql_passwd,
charset='utf8',
)
cur = conn.cursor()
# 以写的方式打开 csv 文件并将内容写入到w
f = open("./out.csv", 'w')
write_file = csv.writer(f)
#channel_list = [2009, 2011]
#for channel in channel_list:
#print("query channel: %d" % channel)
# 从表里面读出数据,写入到 csv 文件里
for i in range(10):
for j in range(100):
print "query collect: %d, tableNo: %d" % (i, j)
#str(j).rjust(2,'0') 按照00~99格式
dbstr = "select * from db_%d.t_table_%s where create_time between '2020-04-13 00:00:00' and '2020-04-20 00:00:00' and F_channel_id in ( 2007, 2009, 2011, 2023 ) order by create_time" % (i, str(j).rjust(2,'0') )
cur.execute( dbstr )
while True:
row = cur.fetchone() #获取下一个查询结果集为一个对象
if not row:
break
write_file.writerow(row) #csv模块方法一行一行写入
f.close()
# 关闭连接
if cur:
cur.close()
if conn:
conn.close()
if __name__ == '__main__':
main()
print("Exec finish, please check\n")
| [
"noreply@github.com"
] | noreply@github.com |
00a5019f8170820d450523811adbc8132e5d6e18 | 7db050d1505ed83a820934e6a8593165df4a2659 | /log_food.py | 74dc3c460844d4b09f7693a13d3df9223c5f26a9 | [
"MIT"
] | permissive | closeAI/deepchef-deploy | 992e65fc7a289533602eb984ee01417644a0ee5d | 1ed4f106615b6fc0ccf40c7e1b46780e331c86ea | refs/heads/master | 2020-05-24T03:56:43.145825 | 2019-05-16T21:44:28 | 2019-05-16T21:44:28 | 187,082,959 | 0 | 0 | MIT | 2019-05-16T20:50:43 | 2019-05-16T18:41:24 | Python | UTF-8 | Python | false | false | 2,120 | py | import os.path
import json
from datetime import datetime
class LogFood:
"""Logs useful information for data analysis.
# Arguments
log_record: Dictionary which contains the information. Clears out after every request.
path: Path to the json file.
"""
def __init__(self, log_record={}, path=''):
self.log_record = log_record
self.path = path
def new_request(self):
"""Create new dictionary and time stamp."""
self.log_record['request'] = {}
self.log_record['request']['time_stamp'] = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
def new_calc_time(self, food_time):
"""Time needed for calculating the result food list."""
self.log_record['request']['calc_time_seconds'] = food_time
def new_food_ids(self, food_ids):
"""Result food ids."""
self.log_record['request']['result_food_ids'] = food_ids
def new_image_indexes(self, image_indexes):
"""Result food image indexes."""
self.log_record['request']['image_indexes'] = image_indexes
def new_inc_result(self, inc_result):
"""Predicted categories with probability."""
self.log_record['request']['inc_result'] = inc_result
def new_ann_result(self, ann_result):
"""Closest neighbor indexes and distance."""
self.log_record['request']['ann_result'] = ann_result
def get_log_record(self):
return self.log_record
def flush(self):
"""
https://stackoverflow.com/questions/18087397
This opens the file for both reading and writing. Then, it goes to the end
of the file (zero bytes from the end) to find out the file end's
position (relatively to the beginning of the file) and goes
last one byte back, which in a json file is expected to represent
character ]. In the end, it appends a new dictionary to the structure,
overriding the last character of the file and keeping it to be valid json.
It does not read the file into the memory.
"""
with open(self.path, 'r+') as json_file:
json_file.seek(0,2)
position = json_file.tell() - 2
json_file.seek(position)
json_file.write( ",{}]}}".format(json.dumps(self.log_record, sort_keys=True, indent=4)))
self.log_record = {} | [
"muriz.se@gmail.com"
] | muriz.se@gmail.com |
9e739f30c1f17d12582f91627d1ac5bc795c4cc8 | 6114855b02b3a7f8bf1dc233f80823e879b68076 | /d23_mulltilevel.py | b2d9b38d5a94dd58eb15d6e66c585ac2b9505f33 | [] | no_license | anilmondi/Python-Programs | 79bbf0305b4678779dff23492bbd6d8cd10e1257 | b64f3707fdbc802024522a5da022430766cce312 | refs/heads/master | 2022-11-26T06:48:17.903705 | 2020-07-31T12:17:30 | 2020-07-31T12:17:30 | 284,029,197 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | class A:
def x(self):
print("I am from class A")
class B(A):
def y(self):
print("I am from class B")
class C(B):
def z(self):
print("I am from class C")
class D(C):
pass
o=D()
o.x()
o.y()
o.z()
| [
"noreply@github.com"
] | noreply@github.com |
7b0209b5129a33a20957245a3ed25f1bda5ed1ce | e6d556d97081576da6469cf1e8c1dd14565db2da | /code/tkinter/icons.py | 32bdcc5c4a086dae60cb06cb946bb8bd9480cc34 | [] | no_license | Scotth72/codePractice | 0b7c795050d08a34dff2b99507b20094d233739a | 475482fab0d69f93d936dc1ba8c2511174089b7c | refs/heads/master | 2023-01-19T02:58:40.977634 | 2020-11-26T15:55:47 | 2020-11-26T15:55:47 | 313,863,106 | 0 | 0 | null | 2020-11-26T15:55:48 | 2020-11-18T08:00:39 | Python | UTF-8 | Python | false | false | 234 | py | from tkinter import *
from PIL import ImageTk, Image
root = Tk()
root.title("Learn to use Icons")
root.iconbitmap('../icons/mando.png')
btn_quit = Button(root, text="Exit Program", command=root.quit)
btn_quit.pack()
root.mainloop() | [
"you@example.com"
] | you@example.com |
b9b22331c0e74d708642b6e3f5ee2b5eae60c052 | 702c8d4b3778c97afacb7bb5d861a989e571d878 | /tests/test_http.py | 0354485994227a93cbb73c74db6e002195c2a8c9 | [
"MIT"
] | permissive | JeremyGrosser/clay | 1945b4d79b1a6429d48aae3b07d758531992458f | a54209b234ce195eba7a0dcf0c5ae884a3d83a58 | refs/heads/master | 2021-01-18T06:17:33.253383 | 2015-08-04T21:05:48 | 2015-08-04T21:05:48 | 40,208,420 | 0 | 1 | null | 2018-04-15T15:56:56 | 2015-08-04T20:50:42 | Python | UTF-8 | Python | false | false | 4,839 | py | from __future__ import absolute_import
import httplib
import mock
import clay.config
from clay import http
import urllib2
import tempfile
import shutil
import os.path
from unittest import TestCase
s = mock.sentinel
class RequestTestCase(TestCase):
def test_method_with_method(self):
req = http.Request(url='http://www.uber.com', method=s.method)
self.assertEqual(req.get_method(), s.method)
def test_method_no_data(self):
req = http.Request(url='http://www.uber.com', data=None)
self.assertEqual(req.get_method(), 'GET')
def test_method_data(self):
req = http.Request(url='http://www.uber.com', data={'1': 2})
self.assertEqual(req.get_method(), 'POST')
@mock.patch('ssl.get_server_certificate')
@mock.patch('urllib2.urlopen')
class LittleRequestTestCase(TestCase):
def test_error_returns_response(self, mock_urlopen, mock_get_cert):
e = urllib2.HTTPError('http://www.google.com', 404, 'Some message', {}, None)
mock_urlopen.side_effect = e
response = http.request('GET', 'http://www.google.com')
self.assertEqual(response, http.Response(status=404, headers={}, data=None))
def test_http_only(self, mock_urlopen, mock_get_cert):
self.assertRaises(urllib2.URLError, http.request, 'GET', 'ftp://google.com')
def test_good(self, mock_urlopen, mock_get_cert):
mock_response = mock.Mock(name='resp')
mock_response.getcode.return_value = 200
mock_response.read.return_value = s.body
mock_response.headers = {}
mock_urlopen.return_value = mock_response
response = http.request('GET', 'http://www.google.com')
self.assertEqual(response, http.Response(status=200, headers={}, data=s.body))
def test_timeout_passed(self, mock_urlopen, mock_get_cert):
http.request('GET', 'http://www.google.com', timeout=10)
mock_urlopen.assert_called_once_with(mock.ANY, timeout=10)
def create_mock_http_connection():
mock_conn = mock.Mock(name='https_connection')
mock_resp = mock.Mock(name='https_response')
mock_resp.read.return_value = ''
mock_resp.recv.return_value = ''
mock_resp.status = 200
mock_resp.reason = 'A OK'
mock_conn.getresponse.return_value = mock_resp
conn = mock.MagicMock(spec=httplib.HTTPSConnection, return_value=mock_conn)
return conn
@mock.patch('httplib.HTTPSConnection', new_callable=create_mock_http_connection)
@mock.patch('ssl.get_server_certificate')
class SSLTestCase(TestCase):
def setUp(self, *args, **kwargs):
self.wd = tempfile.mkdtemp()
with open(os.path.join(self.wd, 'ca.crt'), 'w') as fd:
fd.write('')
def tearDown(self, *args, **kwargs):
if self.wd is not None and os.path.exists(self.wd):
shutil.rmtree(self.wd)
def test_ssl_checks_if_enabled(self, mock_get_cert, mock_conn):
config_dict = {
'http': {
'ca_certs_file': os.path.join(self.wd, 'ca.crt'),
'verify_server_certificates': True,
}
}
with mock.patch.dict(clay.config.CONFIG.config, config_dict):
http.request('GET', 'https://www.google.com')
mock_get_cert.assert_called_once_with(('www.google.com', 443), ca_certs=os.path.join(self.wd, 'ca.crt'))
def test_ssl_checks_not_enabled(self, mock_get_cert, mock_conn):
config_dict = {
'http': {
'ca_certs_file': os.path.join(self.wd, 'ca.crt'),
'verify_server_certificates': False,
}
}
with mock.patch.dict(clay.config.CONFIG.config, config_dict):
http.request('GET', 'https://www.google.com')
self.assertEqual(mock_get_cert.call_count, 0)
def test_ssl_certs_disabled_if_no_file(self, mock_get_cert, mock_conn):
config_dict = {
'http': {
'ca_certs_file': os.path.join(self.wd, 'does_not_exist.crt'),
'verify_server_certificates': True,
}
}
with mock.patch.dict(clay.config.CONFIG.config, config_dict):
http.request('GET', 'https://www.google.com')
self.assertEqual(mock_get_cert.call_count, 0)
def test_ssl_checks_honored(self, mock_get_cert, mock_conn):
config_dict = {
'http': {
'ca_certs_file': os.path.join(self.wd, 'ca.crt'),
'verify_server_certificates': True,
}
}
mock_get_cert.side_effect = ValueError('Invalid SSL certificate')
with mock.patch.dict(clay.config.CONFIG.config, config_dict):
self.assertRaises(ValueError, http.request, 'GET', 'https://www.google.com')
mock_get_cert.assert_called_once_with(('www.google.com', 443), ca_certs=os.path.join(self.wd, 'ca.crt'))
| [
"jbrown@uber.com"
] | jbrown@uber.com |
8c5267a13b73d13e3e212e033e0f7f72e9eb1be1 | 3657f1b941cf5343645f92179516ebf3e2a49e86 | /backend/core/urls.py | 2e3774c5632f987e2c597501a8cda6d3dc5f9ff2 | [] | no_license | Highoc/folder_explorer | e489c4d8ef4d6199c9ecaff58ee70bfa25244e47 | fa41c7fff2d1829f900ebbd6ff153b598a491158 | refs/heads/master | 2022-12-06T19:52:42.736145 | 2019-09-23T20:42:49 | 2019-09-23T20:42:49 | 209,986,254 | 0 | 0 | null | 2022-11-22T04:15:53 | 2019-09-21T13:03:53 | JavaScript | UTF-8 | Python | false | false | 737 | py | from django.urls import re_path
from .views import FolderCreateView, FolderUpdateView,\
ImageCreateView, ImageUpdateView, ImageDeleteView,\
SearchView, ImageDownloadView
urlpatterns = [
re_path(r'^folder/create/$', FolderCreateView.as_view()),
re_path(r'^folder/update/(?P<key>[0-9a-f]{32})/$', FolderUpdateView.as_view()),
re_path(r'^image/create/$', ImageCreateView.as_view()),
re_path(r'^image/update/(?P<key>[0-9a-f]{32})/$', ImageUpdateView.as_view()),
re_path(r'^image/delete/(?P<key>[0-9a-f]{32})/$', ImageDeleteView.as_view()),
re_path(r'^image/download/(?P<key>[0-9a-f]{32})/$', ImageDownloadView.as_view()),
re_path(r'^search/$', SearchView.as_view()),
]
| [
"door0172@gmail.com"
] | door0172@gmail.com |
1398cabe4b4b3d902467f14f0abec5a76178baca | 6dc31356f9ee4bf984b54f1f00c6d249199a37ac | /No.448_Find All Numbers Disappeared in an Array.py | cf49d3ff95b6d91ed7a71826bb46444b9557fd0d | [] | no_license | miayuxin/leetcode | 0016af492744caab7717f9802e823739e64445eb | 0f565cc8a83c9f6143e0b7363a2bb47fb6dea7d0 | refs/heads/master | 2023-03-27T20:58:08.693006 | 2021-04-02T01:21:53 | 2021-04-02T01:21:53 | 261,366,360 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | class Solution:
def findDisappearedNumbers(self, nums):
i = 1
appeardSet = set(nums)
output = []
while i <= len(nums):
if i not in appeardSet:
output.append(i)
i += 1
else:
i += 1
return output
s = Solution()
nums = [4, 3, 2, 7, 8, 2, 3, 1]
print(s.findDisappearedNumbers(nums)) | [
"ikuyuhsin@gmail.com"
] | ikuyuhsin@gmail.com |
eb6724585a47c16c7058930111a03405d5276fc7 | 69439004c494c2d56018468d3fec8c9e56036fc8 | /tests/zeus/utils/test_auth.py | 5c2197a339a137df799456193c58afdd897db536 | [
"Apache-2.0"
] | permissive | buchiputaobutuputaopi/zeus-1 | 6dbc54e65c925040b1c1e01683625cea49299b4e | 8a606642d9ef8f239df2e8d7079ea4d130d78cb3 | refs/heads/master | 2021-06-25T07:26:52.278251 | 2017-09-06T03:53:04 | 2017-09-06T03:53:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | from zeus import auth
def test_login_user(client, default_user):
with client.session_transaction() as session:
auth.login_user(default_user.id, session=session)
assert session['uid'] == default_user.id
assert session['expire']
| [
"dcramer@gmail.com"
] | dcramer@gmail.com |
4136dbd6bfdbf3a09a15f84027bfc86322f96d4c | eed07a6f822f032718a1b2edb3fef95323353ae6 | /app/views/v1/common.py | 0ffdf937f14538f0f3bf79cd69b652fe1b0acdd4 | [
"MIT"
] | permissive | daghan/Ostrich | 6ffdf586c719ab10ecf6b37ecd6a5e5ef12c2b9d | b12057bee7b8b92aedf09ec40edc97a60340527b | refs/heads/master | 2020-12-03T20:24:31.387673 | 2020-01-02T23:52:31 | 2020-01-02T23:52:31 | 231,474,913 | 0 | 0 | MIT | 2020-01-02T23:16:16 | 2020-01-02T23:16:16 | null | UTF-8 | Python | false | false | 2,277 | py | from app import webapp
from app.models import Search, Order, User, Collection, Utils
from flask import jsonify, request
from app.decorators import async
from pymongo import MongoClient
from app.scripts.related_items import getRelatedItems
import json
@webapp.route('/startSession')
def startSession():
if 'android_id' in request.args:
android_id = request.args.get('android_id')
if android_id in []:
return jsonify({'debug':'True', 'ip': '52.74.20.228'})
# VERSION SPECIFIC
app_version = int(request.headers.get('App-Version')) if 'App-Version' in request.headers else 0
# Search().getContentData(key="recommendations")
# Search().getContentData(key="most_searched")
reading_multiplier = webapp.config['NEW_READING_RATE'] if app_version >= 6030000 else webapp.config['NEW_READING_RATE'] - 0.01
data = {
'most_searched': Collection(4).getObj()['items'],
'recommendations': Collection(5).getObj()['items'],
'categories': Search.getSearchCategoriesForApp(),
'return_days': webapp.config['DEFAULT_RETURN_DAYS'],
'reading_multiplier': reading_multiplier,
'time_slots': Order.getTimeSlotsForOrder(),
'user_model': None
}
if 'user_id' in request.args:
user_id = request.args.get('user_id')
if user_id and int(user_id) > -1:
user = User(user_id)
user.getOrderSlots()
data['user_model'] = user.getObj()
user.logMetadata(app_version)
return jsonify(data)
@webapp.route('/getRelatedItems')
def getRelatedItemsApi():
client = MongoClient(webapp.config['MONGO_DB'])
db = client.ostrich
item_id = Utils.getParam(request.args, 'item_id', 'int')
related_items_cursor = db.related_item_ids.find({'_id': item_id})
related_item_ids = [_ for _ in related_items_cursor]
if len(related_item_ids) == 0:
#check redis queue
getRelatedItemsAsyncWrapper(item_id)
return jsonify({'status': 'wait', 'message':'Crawling in progress'})
related_item_ids = related_item_ids[0]['item_ids']
items = Search().getById(related_item_ids)
return json.dumps(items)
@async
def getRelatedItemsAsyncWrapper(item_id):
getRelatedItems(item_id)
return
| [
"anant718@gmail.com"
] | anant718@gmail.com |
88748e4c1737c0724d79ac3be6be3022e4666080 | 3247f3d9b24e680543cd2a11538e126c520b4c8c | /dictpy.py | 56addbdcfb7ac2f8447890fbee798022c275c7fd | [] | no_license | TheM4st3r/DictyPy | f5c804ce49624aa807550d57a4664f879ee23610 | 2fda4a6d859c4fb66e7fee25b0db6b07d266440f | refs/heads/master | 2021-08-26T07:07:36.916547 | 2017-11-22T03:15:47 | 2017-11-22T03:15:47 | 111,629,772 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,204 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#Por Derick Santos
from PIL import Image
import pytesseract
import urllib.request, urllib.parse, urllib.error
import platform
import time
import os
import re
#Banner
def banner():
if platform.system() == "Windows":
os.system("cls")
else:
os.system("clear")
print("=======================================")
print("==== DictPy - Dicionário em Python ====")
print("==== Por Derick Santos ====")
print("==== Versão 1.0 ====")
print("=======================================")
print("==== https://fsocietybrasil.org/ ====")
print("=======================================")
index = banner()
time.sleep(5)
print("\nPressione CTRL + C para cancelar!\n")
time.sleep(3)
#Programa
def programa(index):
#Requisição
site = "https://s.dicio.com.br/"
termo = input("[D] - Digite uma palavra que você quer saber o significado: ")
url = site+termo+".jpg"
r = urllib.request.urlretrieve(url,termo+".jpg")
#Manipulação de imagens
imagem = termo+".jpg"
print(pytesseract.image_to_string(Image.open(imagem)))
print("")
while True:
programa(index)
| [
"noreply@github.com"
] | noreply@github.com |
6dc26d209cbda790e1095acf3e4839194eb290be | b3ce060c84549a0742393f4f0430a5e55b4cac29 | /backend/devices/migrations/0006_auto_20161222_1133.py | edf5eb771a0a4078aeebcfcb0c2a5b76e24e1658 | [] | no_license | aljazb/Website | 0443026d3ab631a5836fb02b9874ab3162ba9fbd | 88ea46883f95a87c5dce384e137848916e51d34e | refs/heads/master | 2021-01-13T08:06:18.907613 | 2017-01-14T12:11:04 | 2017-01-14T12:11:04 | 71,723,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 682 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-22 11:33
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('devices', '0005_auto_20161222_1132'),
]
operations = [
migrations.AlterField(
model_name='brand',
name='os',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='devices.Os'),
),
migrations.AlterField(
model_name='device',
name='rating',
field=models.IntegerField(),
),
]
| [
"aljaz.blazej@gmail.com"
] | aljaz.blazej@gmail.com |
994c474893829f7b9975bc0918d1ea72557c4f24 | 2aa3560f607f4ba04f8e989a95c55f34a1464ebb | /project/Forms/models.py | 822d5e0d836338ec74ee132737868290ed2f9a75 | [] | no_license | amilalizada/metm_forms | 1822ac21a5fc037aed1943d0f52914d3c5821852 | c75f02fb13fcfaaf04bd635f69b84628afdbfd05 | refs/heads/master | 2023-06-13T21:59:43.584475 | 2021-06-30T22:17:11 | 2021-06-30T22:17:11 | 381,845,016 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,655 | py | from django.db import models
from datetime import datetime
from django.utils.safestring import mark_safe
import json
# Create your models here.
class Forms(models.Model):
name = models.CharField('Name',max_length=1024)
is_published = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = ("Forma")
verbose_name_plural = ("Formalar")
def __str__(self):
return self.name
class Fields(models.Model):
form = models.ForeignKey(Forms,on_delete=models.CASCADE, db_index=True, related_name='form_relation')
field_choices = [
('1', 'text'),
('2', 'integer'),
('3', 'datetime'),
('4', 'date'),
('5', 'textarea'),
('6', 'email'),
('7', 'select'),
]
label = models.CharField('Label', max_length=256)
types = models.CharField('Tipler',max_length=50, choices=field_choices)
default_value = models.CharField('Default Value', max_length=256, blank=True , null=True)
requirement = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = ("Fields")
verbose_name_plural = ("Fields")
def __str__(self):
return self.label
def get_type(self):
return self.types
class Values(models.Model):
fields = models.ForeignKey(Fields,on_delete=models.CASCADE, db_index=True, related_name='field_relation')
value = models.TextField(max_length=1000)
class Meta:
verbose_name = ("Deyer")
verbose_name_plural = ("Deyerler")
class Types(models.Model):
#informations
types = models.CharField('Adı',max_length=20)
# moderations
is_published = models.BooleanField('is published', default=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = 'Tag'
verbose_name_plural = 'Tag'
def __str__(self):
return self.tag_name
class Emails(models.Model):
#informations
forms = models.ForeignKey(Forms,on_delete=models.CASCADE, db_index=True, related_name='email_relation')
email = models.CharField('Email',max_length=100)
# moderations
is_published = models.BooleanField('is published', default=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = 'email'
verbose_name_plural = 'emails'
def __str__(self):
return self.email
class SecondValues(models.Model):
forms = models.ForeignKey(Forms, verbose_name='Forms', on_delete=models.CASCADE,blank= True,null=True, related_name = 'list_of_value')
datas = models.CharField('Datalar', max_length=1000)
class Question(models.Model):
type_choise = [
('1', 'Test'),
('2', 'Video'),
('3', ' Voice Record'),
('4', ' Text'),
]
title = models.CharField('Title',max_length=50)
description = models.TextField('Description')
correct_answer = models.CharField('Correct answer',max_length=125)
type = models.CharField('Tipler',max_length=50, choices=type_choise)
is_auto = models.BooleanField('Is aouto', default=1)
# subject = models.ForeignKey()
class Meta():
verbose_name = 'Question'
verbose_name_plural = 'Questions'
# ordering = ('-created_at', '-title')
def __str__(self):
return f"{self.title}" | [
"elizade_amil@inbox.ru"
] | elizade_amil@inbox.ru |
c7eb94d3802446203c14a9fe04bd8e1df2843806 | 7f8cf59da8ade5ea3b6018722be1bbd70959dc4a | /main.py | 7441d446e830e290e73df09e03291d875c4fada3 | [
"MIT"
] | permissive | Amenkba/jackbot | 4d95462ad6c36a9c1b5506958e4e6bb0bd71ec9b | 9f1ef40f6a16d0d6a2fe34acda852ba846ee7d2b | refs/heads/master | 2023-01-30T01:35:39.981258 | 2020-12-10T17:00:02 | 2020-12-10T17:00:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,334 | py | #!/usr/bin/env python3
import discord # discord library
from discord.ext import commands # discord library extension to make stuff easier
import random
import minespy # library to make minesweeper boards
import ttt as tictt # library to help with tic tac toe
import c4 as con4 # library to help with connect 4
import re # regex
import json # json
import extra
import asyncio
# read token
with open("tokenfile","r") as tokenfile:
token = tokenfile.read()
# Help command specification
# (declaring everything needed for the help command)
with open("help.json", "r") as helpfile:
jsonhelp = json.loads(helpfile.read())
empty_string = " "
help_embed = discord.Embed(title="Help")
help_message_list = []
for category in jsonhelp:
field_text = ""
for command in jsonhelp[category]:
syntax = jsonhelp[category][command]["syntax"]
usage = jsonhelp[category][command]["usage"]
field_text += f"**{command}**: j!{command} {empty_string.join(syntax)}\n*{usage}*\n"
help_message_list.append(field_text)
help_embed.add_field(
name=category, value=help_message_list[len(help_message_list) - 1])
client = commands.Bot(command_prefix="j!",activity=discord.Game("connect 4"))
client.remove_command("help")
repomsg = discord.Embed(title="Repo",description="https://github.com/Vresod/jackbot")
log_channel = client.get_channel(784583344188817428)
# print message when bot turns on and also print every guild that its in
@client.event
async def on_ready():
print(f"logged in as {client.user}")
print(f"https://discord.com/oauth2/authorize?client_id={client.user.id}&permissions=8192&scope=bot")
for guild in client.guilds:
print(f"In guild: {guild.name}")
print(f"In {len(client.guilds)} guilds")
global log_channel
log_channel = client.get_channel(784583344188817428)
await log_channel.send("waking up")
# and also print every time it joins a guild
@client.event
async def on_guild_join(guild):
print(f"Joined guild: {guild.name}")
@client.command()
async def minesweeper(ctx, length: int = 6, width: int = 6, mines: int = 7):
if length * width > 196:
await ctx.send(embed=discord.Embed(title="Error",description="Board too large. Try something smaller."))
return
if mines >= (length * width):
mines = (length * width) - 1
gridstr = minespy.generategrid(length,width,mines)
while "0" in gridstr or "1" in gridstr or "2" in gridstr or "3" in gridstr or "4" in gridstr or "5" in gridstr or "6" in gridstr or "7" in gridstr or "7" in gridstr or "B" in gridstr: # stole this from stackoverflow
gridstr = gridstr.replace("0","||:zero:||")
gridstr = gridstr.replace("1","||:one:||")
gridstr = gridstr.replace("2","||:two:||")
gridstr = gridstr.replace("3","||:three:||")
gridstr = gridstr.replace("4","||:four:||")
gridstr = gridstr.replace("5","||:five:||")
gridstr = gridstr.replace("6","||:six:||")
gridstr = gridstr.replace("7","||:seven:||")
gridstr = gridstr.replace("8","||:eight:||")
gridstr = gridstr.replace("B","||:boom:||")
gridstr = extra.replacenth(gridstr,"||:zero:||",":zero:",random.randint(0,gridstr.count("||:zero:||")))
embed = discord.Embed(title=f"{length}x{width} with {mines} mines",description=gridstr)
await ctx.send(embed=embed)
@client.command()
async def rps(ctx,member):
otherguy = ctx.message.mentions[0]
if ctx.author.dm_channel == None:
await ctx.author.create_dm()
if otherguy.dm_channel == None:
await otherguy.create_dm()
authormsg = await ctx.author.dm_channel.send("Rock, paper, or scissors?")
otherguymsg = await otherguy.dm_channel.send("Rock, paper, or scissors?")
for i in u"\U0001f5ff\U0001f4f0\u2702": # rock/paper/scissors
await authormsg.add_reaction(i)
await otherguymsg.add_reaction(i)
def check(reaction,user):
return (user.id == ctx.author.id or user.id == otherguy.id) and (reaction.message == authormsg or reaction.message == otherguymsg)
players = []
winner = None
while len(players) < 2:
try:
reaction,user = await client.wait_for('reaction_add', timeout=60.0, check=check)
except asyncio.exceptions.TimeoutError:
await ctx.send("Game closed due to inactivity.")
return
stop = False
for i in players:
if user in i:
stop = True
if stop:
continue
players.append([reaction,user])
if str(players[0][0].emoji) == u"\U0001f5ff" and str(players[1][0].emoji) == u"\U0001f4f0": # rock < paper
winner = players[1][1].name
elif str(players[0][0].emoji) == u"\U0001f4f0" and str(players[1][0].emoji) == u"\U0001f5ff": # paper > rock
winner = players[0][1].name
elif str(players[0][0].emoji) == u"\u2702" and str(players[1][0].emoji) == u"\U0001f4f0": # paper < scissors
winner = players[0][1].name
elif str(players[0][0].emoji) == u"\U0001f4f0" and str(players[1][0].emoji) == u"\u2702": # scissors > paper
winner = players[1][1].name
elif str(players[0][0].emoji) == u"\u2702" and str(players[1][0].emoji) == u"\U0001f5ff": # scissors < rock
winner = players[1][1].name
elif str(players[0][0].emoji) == u"\U0001f5ff" and str(players[1][0].emoji) == u"\u2702": # rock > scissors
winner = players[0][1].name
else:
description = f"{players[0][0].emoji} v {players[1][0].emoji}\n\nIts a tie!"
if winner != None:
description = f"{players[0][0].emoji} v {players[1][0].emoji}\n\n{winner} wins!"
title = f"{players[0][1].name} v {players[1][1].name}"
game_embed = discord.Embed(title=title,description=description)
await ctx.send(embed=game_embed)
await otherguy.dm_channel.send(embed=game_embed)
await ctx.author.dm_channel.send(embed=game_embed)
valid_t_movements = ['w', 'a', 's', 'd', 'wa', 'wd', 'sa', 'sd', '.', 'q', 'aw', 'dw', 'as', 'sd']
@client.command()
async def tictactoe(ctx,member):
opponent = ctx.message.mentions[0]
await ctx.send(f"playing tic tac toe with {opponent.display_name}")
g = tictt.generategrid()
gs = g
for i in gs:
if str(i) in "123456789":
gs = gs.replace(i,":blue_square:")
msgembed = discord.Embed(title=f"Tic Tac Toe: {ctx.author.display_name} vs {opponent.display_name}")
msgembed.description = gs
bmsg = await ctx.send(embed=msgembed)
moves = 1
def check(message):
user = message.author
return ((user == opponent if moves % 2 == 0 else user == ctx.author) and (message.content in valid_t_movements or message.content)) or message.content == "q"
while moves <= 9:
try:
m = await client.wait_for('message',timeout=60.0,check=check)
except asyncio.exceptions.TimeoutError:
await ctx.send("Game closed due to inactivity.")
return
c = m.content.lower()
if c in ["as","ds","aw","dw"]:
c = c[::-1]
og = g
char = "X" if moves % 2 == 1 else "O"
if c == "q":
await ctx.send("Game closed.")
return
if c == "r":
bmsg = await ctx.send(embed=msgembed)
continue
if c == "wa":
g = g.replace("1",char)
elif c == "w":
g = g.replace("2",char)
elif c == "wd":
g = g.replace("3",char)
elif c == "a":
g = g.replace("4",char)
elif c == ".":
g = g.replace("5",char)
elif c == "d":
g = g.replace("6",char)
elif c == "sa":
g = g.replace("7",char)
elif c == "s":
g = g.replace("8",char)
elif c == "sd":
g = g.replace("9",char)
else:
continue
if og != g:
moves += 1
try:
await m.delete()
except discord.Forbidden:
pass
gs = g
gs = gs.replace("X",":regional_indicator_x:")
gs = gs.replace("O",":zero:")
for i in gs:
if str(i) in "123456789":
gs = gs.replace(i,":blue_square:")
msgembed.description = gs
await bmsg.edit(embed=msgembed)
glist = []
for i in g.split("\n"):
if i == "":
continue
gltmp = []
for j in i:
gltmp.append(j)
glist.append(gltmp)
if tictt.checkWin(glist):
winner = ctx.author.display_name if moves % 2 == 0 else opponent.display_name
await ctx.send(f"{winner} has won!")
return
elif moves > 9:
await ctx.send("Nobody won, the game is tied.")
return
valid_c_movements = [ str(i) for i in range(1,8) ]; valid_c_movements.append("q"); valid_c_movements.append("r")
@client.command()
async def connectfour(ctx,member):
opponent = ctx.message.mentions[0]
await ctx.send(f"playing connect 4 with {opponent.display_name}")
g = ["1111111\n", "2222222\n", "3333333\n", "4444444\n", "5555555\n", "6666666\n"]
nums = [ str(i) for i in range(1,7) ]
gridstr = "".join(g)
for i in nums:
gridstr = gridstr.replace(i,":blue_square:")
gridstr += ":one::two::three::four::five::six::seven:"
msgembed = discord.Embed(title=f"Connect 4: {ctx.author.display_name} vs {opponent.display_name}")
msgembed.description = gridstr
bmsg = await ctx.send(embed=msgembed)
if bmsg:
pass
moves = 1
while moves <= 42:
def check(message):
user = message.author
return ((user == opponent if moves % 2 == 0 else user == ctx.author) and (message.content in valid_t_movements or message.content)) or (message.content in ["q","r"] and (user == opponent or user == ctx.author))
m = await client.wait_for('message',timeout=None,check=check)
c = m.content
if c not in valid_c_movements:
continue
if c == "q":
await ctx.send("game ended")
return
elif c == "r":
msgembed = discord.Embed(title=f"Connect 4: {ctx.author.display_name} vs {opponent.display_name}")
msgembed.description = gridstr
bmsg = await ctx.send(embed=msgembed)
bg = list(g)
if c in "1234567":
for y in g:
# and not (y == g[0] and y[int(c) - 1] in ["X","O"])
if not y[int(c) - 1] in nums: continue
t = list(y)
t[int(c) - 1] = "X" if moves % 2 == 1 else "O"
g[g.index(y)] = "".join(t)
break
moves += 1 if bg != g else 0
else:
continue
gridstr = "".join(g[::-1])
for i in nums:
gridstr = gridstr.replace(i,":blue_square:")
gridstr = gridstr.replace("O", ":yellow_circle:").replace("X",":red_circle:")
gridstr += ":one::two::three::four::five::six::seven:"
msgembed = discord.Embed(title=f"Connect 4: {ctx.author.display_name} vs {opponent.display_name}")
msgembed.description = gridstr
await bmsg.edit(embed=msgembed)
await m.delete()
glist = []
for i in g:
if i == "\n":
continue
gltmp = []
for j in i:
gltmp.append(j)
glist.append(gltmp)
if con4.check_win(glist,"X") or con4.check_win(glist,"O"):
winner = ctx.author.display_name if moves % 2 == 0 else opponent.display_name
await ctx.send(f"{winner} has won!")
return
elif moves > 42:
await ctx.send("Nobody won, the game is tied. How did you manage to do that in connect 4?")
return
@client.command()
async def roll(ctx, number_of_dice: int, number_of_sides: int):
dice = [
str(random.choice(range(1, number_of_sides + 1)))
for _ in range(number_of_dice)
]
await ctx.send(', '.join(dice))
@client.command()
async def help(ctx,cmd = None):
if cmd is None:
await ctx.send(embed=help_embed)
elif cmd == "tictactoe":
await ctx.send("controls: ```aw w wd\na . d\nas s sd```")
elif cmd == "connect4":
await ctx.send(f"controls: {' '.join([ str(i) for i in range(1,8) ])}")
@client.command()
async def repo(ctx):
await ctx.send(embed=repomsg)
# aliases
@client.command()
async def ms(ctx, length: int = 6, width: int = 6, mines = 7):
await minesweeper(ctx,length,width,mines)
@client.command()
async def ttt(ctx,member):
await tictactoe(ctx,member)
@client.command()
async def c4(ctx,member):
await connectfour(ctx,member)
client.run(token)
# vim: noet ci pi sts=0 sw=4 ts=4:
| [
"lukebinkofsky@gmail.com"
] | lukebinkofsky@gmail.com |
e46adcbf04ac884ff740ca93934d7d39b5327460 | a48483634bc14ab1dbad3579c0f6bf5778235466 | /Mod 19 Naive Bayes/Spam Py.py | 9ca619449f0fd78ce377c32b249226ea00a93fd8 | [] | no_license | RajKumar1809/360DigitMG | 00a6d57aa9ad1a4d83744a43c413ab92b91e3cb3 | fd2c078ea29d1fe1f6b3f641bc39ca226d837108 | refs/heads/main | 2023-08-29T09:55:27.264523 | 2021-10-21T18:27:37 | 2021-10-21T18:27:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,779 | py | import pandas as pd #data manipulation
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
from sklearn.naive_bayes import MultinomialNB
from sklearn.naive_bayes import GaussianNB
from sklearn.naive_bayes import BernoulliNB
import re #cleaning the data
sms_raw = pd.read_csv("C:\\Users\\gopal\\Documents\\360DigiTMG\\mod 19\\sms_raw_NB.csv",encoding = "ISO-8859-1")
sms_raw.shape
#Cleaning data
stop_words = []
with open("C:\\Users\\gopal\\Documents\\360DigiTMG\\mod 19\\stopwords_en.txt") as f:stop_words = f.read() #loading the stop words
stop_words = stop_words.split("\n") #breaking the single string to list
#Function tp clean the data
def cleaningdata (i):
i= re.sub("[^A-Za-z" "]+"," ",i).lower()
i = re.sub("[0-9" "]+"," ",i)
w= []
for word in i.split(" "):
if len(word)>3:
w.append(word)
return(" ".join(w))
#Applying the custome function to email data text column
sms_raw["text"]= sms_raw["text"].apply(cleaningdata)
#Removing the Empty row
sms_raw = sms_raw.loc[sms_raw.text != " ",:]
#creating the predictors amd target
predictors = sms_raw.iloc[:,1]
target = sms_raw.iloc[:,0]
#Splitting the data
from sklearn.model_selection import train_test_split
x_train,x_test,y_train,y_test = train_test_split(predictors, target, test_size = 0.2)
#Creating a matrix of token counts for the entire text document
def split_if_words(i):
return [word for word in i.split(" ")]
#Convert sms text into word count matrix (bag of words)
sms_count = CountVectorizer(analyzer = split_if_words).fit(sms_raw["text"])
#Applying the count matrix on entire sms data
sms_matrix = sms_count.transform(sms_raw["text"])
sms_matrix.shape
#For training data
train_matrix = sms_count.transform(x_train)
train_matrix.shape
#For test data
test_matrix = sms_count.transform(x_test)
test_matrix.shape
#TFIDF transformation on word count matrix
tfidf = TfidfTransformer().fit(sms_matrix)
#Applying Tfidf on train matrix data
train_tfidf = tfidf.transform(train_matrix)
train_tfidf.shape
#Applying Tfidf on test matrix data
test_tfidf = tfidf.transform(test_matrix)
test_tfidf.shape
#Multinomial Naive Bayes model
M_NB = MultinomialNB()
M_NB.fit(train_tfidf,y_train)
M_NB_train = M_NB.predict(train_tfidf)
M_NB_train_Accu = np.mean(M_NB_train == y_train)
M_NB_train_Accu #.9718
pd.crosstab(M_NB_train, y_train)
M_NB_test = M_NB.predict(test_tfidf)
M_NB_test_Accu = np.mean(M_NB_test == y_test)
M_NB_test_Accu #0.9559
pd.crosstab(M_NB_test, y_test)
#Gaussiam naive bayes model
G_NB = GaussianNB()
G_NB.fit(train_tfidf.toarray(),y_train.values)
G_NB_train = G_NB.predict(train_tfidf.toarray())
G_NB_train_Accu = np.mean(G_NB_train == y_train)
G_NB_train_Accu #.8994
pd.crosstab(G_NB_train,y_train)
G_NB_test = G_NB.predict(test_tfidf.toarray())
accuracy_testgb_tfidf = np.mean(G_NB_test == y_test)
accuracy_testgb_tfidf #.8417
pd.crosstab(G_NB_test,y_test)
#Gaussiam naive bayes model
G_NB = GaussianNB()
G_NB.fit(train_tfidf.toarray(),y_train.values)
G_NB_train = G_NB.predict(train_tfidf.toarray())
G_NB_train_Accu = np.mean(G_NB_train == y_train)
G_NB_train_Accu #.9060
pd.crosstab(G_NB_train,y_train)
G_NB_test = G_NB.predict(test_tfidf.toarray())
accuracy_testgb_tfidf = np.mean(G_NB_test == y_test)
accuracy_testgb_tfidf #.8417
pd.crosstab(G_NB_test,y_test)
#Bernoulli Naive Bayes Model
B_NB = BernoulliNB()
B_NB.fit(train_tfidf.toarray(),y_train.values)
B_NB_train = B_NB.predict(train_tfidf.toarray())
B_NB_train_Accu = np.mean(B_NB_train == y_train)
B_NB_train_Accu #.9813
pd.crosstab(B_NB_train,y_train)
B_NB_test = B_NB.predict(test_tfidf.toarray())
accuracy_testgb_tfidf = np.mean(B_NB_test == y_test)
accuracy_testgb_tfidf #.9703
pd.crosstab(B_NB_test,y_test) | [
"70655245+Gopal-Data@users.noreply.github.com"
] | 70655245+Gopal-Data@users.noreply.github.com |
074a6d628a6afadd4503afdd629125f1a3ede4c6 | ac681706a3170c20a315f779c570195ae90cf7c2 | /SEPP_Project/Menu/migrations/0002_product_details_special.py | 1f547fabe6db5b13919211690ca33ae5a59f7aa6 | [] | no_license | Vrundan28/SEPP-Project | 8f0c635f9414b7eea464004c5e39367b5e7682d1 | 3daf29bb7deb214ef9cb5f73347207715d0d1835 | refs/heads/master | 2023-03-28T18:06:59.279931 | 2021-04-02T12:04:27 | 2021-04-02T12:04:27 | 329,265,454 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | # Generated by Django 3.1.5 on 2021-03-09 17:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Menu', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='product_details',
name='special',
field=models.IntegerField(default=0),
preserve_default=False,
),
]
| [
"jainil04trivedi@gmail.com"
] | jainil04trivedi@gmail.com |
7df41672480b20260359890a5b467b6b881f32cc | 2872aeb06424b381204ee60a970be154cfa15c07 | /day3/part2.py | 69c97b27edb891110178a7e1cc89c27717d87072 | [
"MIT"
] | permissive | jwplante/adventofcode2019 | 9d099abc65a6e9f8f6fb058ba56a31dc12d2d257 | bd1728f7a93651104cd5f42af813b3cb77940146 | refs/heads/master | 2020-09-22T13:10:05.874140 | 2019-12-17T19:35:12 | 2019-12-17T19:35:12 | 225,211,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,559 | py | class Point:
# Constructor
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return "Point(" + str(self.x) + ", " + str(self.y) + ")"
# Calculates the distance between two points
def distanceTo(self, p):
return abs(self.x - p.x) + abs(self.y - p.y)
# Generates a point a given distance away from itself
# given a direction string.
def pointFrom(self, string):
direction = string[0]
distance = int(string[1:])
if (direction == 'U'):
return Point(self.x, self.y + distance)
elif (direction == 'D'):
return Point(self.x, self.y - distance)
elif (direction == 'L'):
return Point(self.x - distance, self.y)
elif (direction == 'R'):
return Point(self.x + distance, self.y)
class Vector:
# Constructor
def __init__(self, startPt, mag_direc, prev_steps):
self.startPt = startPt # Start point
self.endPt = startPt.pointFrom(mag_direc) # End point
self.isHorizontal = True
self.mag_direc = mag_direc
self.steps = prev_steps + int(mag_direc[1:])
direc = mag_direc[0]
if (direc == 'U' or direc == 'D'):
self.isHorizontal = False
def __str__(self):
return "Vector(" + str(self.startPt) + ", " + str(self.endPt) + ", " + self.mag_direc + ", " + str(self.isHorizontal) +", " + str(self.steps) + ") "
# Calculates if perpendicular vectors are intersecting
# NOTE: Does not account for if vectors are part of same line
def ifIntersecting(self, other):
if (self.isHorizontal == other.isHorizontal):
return False
else:
if (self.isHorizontal):
left_x = min(self.startPt.x, self.endPt.x)
right_x = max(self.startPt.x, self.endPt.x)
bottom_y = min(other.startPt.y, other.endPt.y)
top_y = max(other.startPt.y, other.endPt.y)
return (left_x <= other.startPt.x and right_x >= other.startPt.x and bottom_y <= self.startPt.y and top_y >= self.startPt.y)
else:
left_x = min(other.startPt.x, other.endPt.x)
right_x = max(other.startPt.x, other.endPt.x)
bottom_y = min(self.startPt.y, self.endPt.y)
top_y = max(self.startPt.y, self.endPt.y)
return (left_x <= self.startPt.x and right_x >= self.startPt.x and bottom_y <= other.startPt.y and top_y >= other.startPt.y)
# Calculates the intersection point of two vectors
def intersectionPoint(self, other):
if (self.ifIntersecting(other)):
print(str(self) + " and " + str(other) + "intersect!")
if (self.isHorizontal):
return Point(other.startPt.x, self.startPt.y)
else:
return Point(self.startPt.x, other.startPt.y)
else:
return None
# Gets the real cost to an intersection given current steps
def realDistanceToIntersection(self, intersectionPoint):
return abs(self.steps - self.endPt.distanceTo(intersectionPoint))
def getVectorArray(wires):
vector_list = []
current_steps = 0
initial_point = Point(0,0)
# Point directions
for direction_string in wires.split(','):
current_vector = Vector(initial_point, direction_string, current_steps)
print(current_vector)
vector_list.append(current_vector)
current_steps = current_vector.steps
initial_point = current_vector.endPt
return vector_list
# Open and parse the file
f = open("input.txt", 'r')
wires_1 = f.readline()
wires_2 = f.readline()
vector_list_1 = getVectorArray(wires_1)
vector_list_2 = getVectorArray(wires_2)
origin = Point(0,0)
intersection_array = []
least_distance = 999999
least_steps = 999999
for vector1 in vector_list_1:
for vector2 in vector_list_2:
intersectionPoint = vector1.intersectionPoint(vector2)
# Intersection has been found
if (intersectionPoint != None):
print("at" + str(intersectionPoint))
current_distance = origin.distanceTo(intersectionPoint)
if (current_distance > 0):
least_distance = min(least_distance, current_distance)
least_steps = min(least_steps, vector1.realDistanceToIntersection(intersectionPoint) + vector2.realDistanceToIntersection(intersectionPoint))
print("Least distance is " + str(least_distance))
print("Least steps is " + str(least_steps))
| [
"jplante@wpi.edu"
] | jplante@wpi.edu |
cb0efe63e46a6d718265b4dce800debafedace6d | d276defb88bfca8e4619f94df70531bdd41d8c8d | /tests/test_binary_search.py | a72b95b33d8dd4eb5b2b8885216bad8c96ed7b47 | [] | no_license | ArturMalkov/Algorithms-in-Python | 037549d5fad981e34a4613951087c79ad6b7e279 | 043137d8622594f0ae3180264db6d38453877a4e | refs/heads/master | 2022-01-15T04:23:06.728621 | 2019-01-29T15:12:21 | 2019-01-29T15:12:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,226 | py | import random
from unittest import TestCase
from algorithms.search.binary_search import binary_search
class TestBinarySearch(TestCase):
arr: list
def setUp(self):
length = random.randint(10, 1000)
self.arr = range(0, length)
def test_find_value_in_array(self):
val = random.randint(0, len(self.arr) - 1)
pos = binary_search(self.arr, val)
self.assertEqual(
val,
pos,
f'Result {pos} not equal to expected {val}'
)
def test_find_non_existent(self):
val = random.randint(1, 100) * -1
result = binary_search(self.arr, val)
self.assertIsNone(
result,
f'Result {result} not None as expected'
)
def test_find_first_element(self):
val = 0
result = binary_search(self.arr, val)
self.assertEqual(
result,
0,
f'Result {result} is not 0 as expected'
)
def test_find_last_item(self):
val = len(self.arr) - 1
result = binary_search(self.arr, val)
self.assertEqual(
result,
val,
f'Result {result} is not {val} as expected'
)
| [
"peter.j.ullrich@gmail.com"
] | peter.j.ullrich@gmail.com |
96b9713d9cbdcfaf580b86638d5ce9c0f08f5285 | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/GluGluToHToTauTau_M-100_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7C-v1/AODSIM/PAT_CMG_V5_16_0_1377467448/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_4/run_cfg.py | d8c7fb4def9e957dabac9d51c5ec12ae8fa44d92 | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,507 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/GluGluToHToTauTau_M-100_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7C-v1/AODSIM/PAT_CMG_V5_16_0_1377467448/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/group/cmgtools/CMG/GluGluToHToTauTau_M-100_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7C-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_23_1_Yum.root',
'/store/cmst3/group/cmgtools/CMG/GluGluToHToTauTau_M-100_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7C-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_24_1_892.root',
'/store/cmst3/group/cmgtools/CMG/GluGluToHToTauTau_M-100_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7C-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_25_1_9AW.root',
'/store/cmst3/group/cmgtools/CMG/GluGluToHToTauTau_M-100_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7C-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_26_1_347.root',
'/store/cmst3/group/cmgtools/CMG/GluGluToHToTauTau_M-100_8TeV-powheg-pythia6/Summer12_DR53X-PU_S10_START53_V7C-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_27_1_dAe.root')
)
| [
"riccardo.manzoni@cern.ch"
] | riccardo.manzoni@cern.ch |
e62a5a241308dfd1cd33f0003ec928e127b1c4f6 | 59509a21a049f50dbd43e109c51cc1b35cef9b81 | /ros/src/twist_controller/twist_controller.py | 2eaeabeb5e7e46a33342b908e5766b27f37fdba2 | [] | no_license | cooloney/system-integration-project | d8667add91e7ea940ca2107fbe68b82070f22e1a | 48ae6ce8127294c602a48550ce51f5465ee19cb7 | refs/heads/master | 2021-03-27T09:11:09.840227 | 2018-01-08T15:49:36 | 2018-01-10T05:59:49 | 116,696,187 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,242 | py | from pid import PID
from yaw_controller import YawController
import rospy
GAS_DENSITY = 2.858 # this is in kg / gallon it seems
MPH_TO_MPS = 0.44704
MAX_STEERING = 0.43 # 25 degrees in radians
class Controller(object):
def __init__(self, **kwargs):
self.vehicle_mass = kwargs["vehicle_mass"]
self.fuel_capacity = kwargs["fuel_capacity"]
self.brake_deadband = kwargs["brake_deadband"]
self.decel_limit = kwargs["decel_limit"]
self.accel_limit = kwargs["accel_limit"]
self.wheel_radius = kwargs["wheel_radius"]
self.wheel_base = kwargs["wheel_base"]
self.steer_ratio = kwargs["steer_ratio"]
self.max_lat_accel = kwargs["max_lat_accel"]
self.max_steer_angle = kwargs["max_steer_angle"]
self.min_speed = kwargs["min_speed"]
self.refresh_rate = kwargs["refresh_rate"]
# get max speed from config
self.max_speed = rospy.get_param('/waypoint_updater/max_speed_mph', 10) * MPH_TO_MPS
# create a refresh rate of 50 Hz
#self.refresh_rate = 0.02
# initialise PID controller
# self.linear_velocity_PID = PID(1.0, 0.1, 0.5, mn=0, mx=self.max_speed) # replaced with below
self.throttle_control = PID(1.0, 0.1, 0.0, mn=0.0, mx=1.0)
# for steering, clamp the output to +- 25 degrees (in radians)
self.angular_velocity_PID = PID(5.0, 0.1,0.5,mn=-MAX_STEERING, mx=MAX_STEERING)
# create a yaw controller
self.yaw_controller = YawController(self.wheel_base, self.steer_ratio, self.min_speed, self.max_lat_accel, self.max_steer_angle)
self.timestamp = None
def reset(self):
self.throttle_control.reset()
self.angular_velocity_PID.reset()
def control(self, current_linear_velocity, current_angular_velocity, target_linear_velocity, target_angular_velocity):
if self.timestamp == None:
self.timestamp = rospy.get_time()
return 0.0, 0.0, 0.0
delta_time = rospy.get_time() - self.timestamp
self.timestamp = rospy.get_time()
# presummably the mass is dependent on remaining fuel
# so in the control step, update the mass based on the available fuel
# although this doesn't seem to change for the project - good practice
vehicle_mass = self.vehicle_mass + self.fuel_capacity * GAS_DENSITY
# calculate a velocity error
# velocity_error = target_linear_velocity - current_linear_velocity # removed and replaced with acceleration
# calculate the acceleration required
acceleration = max(min((target_linear_velocity - current_linear_velocity), self.accel_limit), self.decel_limit)
# calculate the throttle, with limitations set between 0 and 1
throttle = self.throttle_control.step(acceleration, delta_time)
# removed and replaced with the above
# pass the error to the PID controller, with a delta sample time
# throttle_cmd = self.linear_velocity_PID.step(velocity_error, delta_time)
# then limit the acceleration
# acceleration = throttle_cmd - current_linear_velocity
# throttle = min(acceleration, self.accel_limit)
# Obtain the two components for the steering
#corrective_steer = self.yaw_controller.get_steering(target_linear_velocity, target_angular_velocity, current_linear_velocity)
#predictive_steer = self.angular_velocity_PID.step(target_angular_velocity, 1.0 / self.refresh_rate)
# add the two components to produce final steer value
#steer = corrective_steer + predictive_steer
steer = self.yaw_controller.get_steering(target_linear_velocity, target_angular_velocity, current_linear_velocity)
# TODO refine braking
brake = 0
# simple braking just so we can get the car to stop at the light
if acceleration < 0:
brake = 0
throttle = 0
if acceleration < -self.brake_deadband:
throttle = 0
brake = max(acceleration + self.brake_deadband, self.decel_limit) * vehicle_mass * self.wheel_radius * -1
return throttle, brake, steer
| [
"bryan@xmotors.ai"
] | bryan@xmotors.ai |
bc1e4713048fff7e4cc96fdf6e7e0c26fb0e0f23 | ccf94dcb6b1500fcbbd56964ae8c4832a496b8b3 | /python/baiduads-sdk-auto/baiduads/dpacreativefeed/model/update_creative_feed_response_wrapper_body.py | b75f4e07354b58dd0fce557dbdad17c8b1b70fd0 | [
"Apache-2.0"
] | permissive | baidu/baiduads-sdk | 24c36b5cf3da9362ec5c8ecd417ff280421198ff | 176363de5e8a4e98aaca039e4300703c3964c1c7 | refs/heads/main | 2023-06-08T15:40:24.787863 | 2023-05-20T03:40:51 | 2023-05-20T03:40:51 | 446,718,177 | 16 | 11 | Apache-2.0 | 2023-06-02T05:19:40 | 2022-01-11T07:23:17 | Python | UTF-8 | Python | false | false | 11,241 | py | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from baiduads.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from baiduads.exceptions import ApiAttributeError
def lazy_import():
from baiduads.dpacreativefeed.model.dpa_creative_feed_type import DpaCreativeFeedType
globals()['DpaCreativeFeedType'] = DpaCreativeFeedType
class UpdateCreativeFeedResponseWrapperBody(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'data': ([DpaCreativeFeedType],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'data': 'data', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""UpdateCreativeFeedResponseWrapperBody - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
data ([DpaCreativeFeedType]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""UpdateCreativeFeedResponseWrapperBody - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
data ([DpaCreativeFeedType]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| [
"jiangyuan04@baidu.com"
] | jiangyuan04@baidu.com |
868b7ff049ad97d4c8b0eddbdb0bf75e9774f4a1 | 84ccc074405ff485f9a9f803358d3d08363e86b8 | /phylogenomics/generate_au_topologies.py | 38343ae3e2815b4ffea2310d503def513d7e7bb2 | [
"MIT"
] | permissive | maxemil/picozoa-scripts | dad98aba99eb2dd0804f5e553c7621b0531de255 | 89a4e961e9a9bdc298a7d73cc3f03122c4df4229 | refs/heads/master | 2023-04-09T20:19:19.787002 | 2021-08-17T13:44:23 | 2021-08-17T13:44:23 | 307,336,011 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,585 | py | import ete3
def get_ancestor(nodes):
ancestor = nodes[0]
for n in nodes:
ancestor = ancestor.get_common_ancestor(n)
return ancestor
def get_clade(tree, clades):
prr = [l for l in tree.get_leaves() if any([c in l.name for c in clades])]
return get_ancestor(prr)
def make_sisters(tree, clade1, clade2):
anc = get_clade(tree, clade1)
anc_up = anc.up
anc.detach()
anc_up.delete()
sis = get_clade(tree, clade2)
sis_up = sis.up
sis.detach()
new_node = ete3.PhyloNode()
sis_up.add_child(new_node)
new_node.add_child(sis)
new_node.add_child(anc)
topologies = [(['Picozoa'], ['Rhodelphis', 'Rhodophyta']),
(['Picozoa'], ['Rhodophyta']),
(['Picozoa'], ['Rhodelphis']),
(['Picozoa'], ['Viridiplantae', 'Glaucophyta']),
(['Picozoa'], ['Glaucophyta']),
(['Picozoa'], ['Viridiplantae']),
(['Picozoa'], ['Archaeplastida']),
(['Picozoa'], ['Telonemia']),
(['Picozoa'], ['Telonemia', 'Rhizaria', 'Stramenopila', 'Alveolata']),
(['Picozoa'], ['Cryptista']),
(['Picozoa', 'Cryptista'], ['Rhodophyta', 'Rhodelphis']),
(['Picozoa', 'Cryptista'], ['Rhodophyta']),
(['Picozoa', 'Cryptista'], ['Viridiplantae', 'Glaucophyta']),
(['Picozoa', 'Cryptista'], ['Glaucophyta']),
(['Picozoa', 'Cryptista'], ['Viridiplantae'])]
tree = ete3.PhyloTree("orig_topology.new", format=0)
with open('all_topologies2test.new', 'w') as out:
for c1, c2 in topologies:
# print("({}),({})".format(','.join([c for c in c1]), ','.join([c for c in c2])))
make_sisters(tree, c1, c2)
print(tree.write(format=9), file=out)
| [
"max-emil.schon@icm.uu.se"
] | max-emil.schon@icm.uu.se |
c126c43fda055ac72e8fe2b31e62891056509d59 | 8e7ab055201e6afd26a36686c9a806e41d34d099 | /Quiz/views.py | aaa8d25afe37a826705fa99e9c3955ed0afe6df1 | [] | no_license | grupo15quiz/QUIZ_CHACO | 98be3e56dbda81fc70652b666ce1d767eb7e94cb | 5430a655fb53d5993a2b9e3ded6c157e259676ec | refs/heads/main | 2023-07-14T21:35:04.881852 | 2021-09-08T18:35:51 | 2021-09-08T18:35:51 | 402,938,004 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,932 | py | from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth import authenticate, login, logout
from .forms import RegistroFormulario, UsuarioLoginFormulario
from .models import QuizUsuario, Pregunta, PreguntasRespondidas
def inicio(request):
context = {
"bienvenido": "Bienvenido"
}
return render(request, "inicio.html", context)
def HomeUsuario(request):
return render(request, "Usuario/home.html")
def tablero(request):
total_usuarios_quiz = QuizUsuario.objects.order.by("-puntaje_total")[:10]
contador = total_usuarios_quiz.count()
context = {
"usuario_quiz":total_usuarios_quiz,
"contar_user":contador
}
return render(request, "play/tablero.html", context)
def jugar(request):
QuizUser, created = QuizUsuario.objects.get_or_create(usuario=request.user)
if request.method == "POST":
pregunta_pk = request.POST.get("pregunta_pk")
pregunta_respondida = QuizUser.intentos.selec_related("pregunta").get(pregunta__pk=pregunta_pk)
respuesta_pk = request.POST.get("respuesta_pk")
try:
opcion_seleccionada = pregunta_respondida.pregunta.opciones.get(pk=respuesta_pk)
except ObjectDoesNotExist:
raise Http404
QuizUser.validar_intento(pregunta_respondida, opcion_seleccionada)
return redirect("resultado", pregunta_respondida.pk)
else:
pregunta = QuizUser.obtener_nuevas_preguntas()
if pregunta is not None:
QuizUser.crear_intentos(pregunta)
context = {
"pregunta": pregunta
}
return render(request,"play/jugar.html", context)
def resultado_pregunta(request, pregunta_respondida_pk):
respondida = get_object_or_404(PreguntasRespondidas, pk=pregunta_respondida_pk)
context = {
"respondida" : respondida
}
return render(request, "play/resultados.html", context)
def loginView(request):
titulo = "login"
form = UsuarioLoginFormulario(request.POST or None)
if form.is_valid():
username = form.cleaned_data.get("username")
password = form.cleaned_data.get("password")
usuario = authenticate(username=username,password=password)
login(request,usuario)
return redirect("HomeUsuario")
context = {
"form":form,
"titulo":titulo
}
return render(request,"Usuario/login.html", context)
def registro(request):
titulo = "Crear una Cuenta"
if request.method == "POST":
form = RegistroFormulario(request.POST)
if form.is_valid():
form.save()
return redirect("login")
else:
form = RegistroFormulario()
context = {
"form":form,
"titulo":titulo
}
return render(request,"Usuario/registro.html", context)
def logout_vista(request):
logout(request)
return redirect("/") | [
"carlaavaleria@gmail.com"
] | carlaavaleria@gmail.com |
c9a9244ebd51e60c2febd39b2b422a97a3369412 | 4152d8ee5c8fd172663938a70d595679078040f0 | /getter.py | de9cc397780f6513cb5bc1b3ab7ce97225810f71 | [] | no_license | jjapp/ibtrade | c6055afabb2852d7e05a3157aa0cc7027d3245fd | 574a4e7d8ea189eeb6366d25fc68d612ccea051b | refs/heads/master | 2021-01-19T00:43:57.680643 | 2016-11-11T23:57:53 | 2016-11-11T23:57:53 | 73,217,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 608 | py | # -*- coding: utf-8 -*-
'''
file to get and print real time quotes from interactive brokers
'''
from ib.opt import Connection
def print_message_from_ib(msg):
print(msg)
def main():
conn = Connection.create(port=7496, clientId=100)
conn.registerAll(print_message_from_ib)
conn.connect()
#In future blog posts, this is where we'll write code that actually does
#something useful, like place orders, get real-time prices, etc.
import time
time.sleep(1) #Simply to give the program time to print messages sent from IB
conn.disconnect()
if __name__ == "__main__": main()
| [
"appertjt@gmail.com"
] | appertjt@gmail.com |
5e58216533cbea36156578ada9e27bf29bc35781 | b51d62fff72dadb82a8aef037a2923e738d8399d | /ganttlogger/modules/WinObservePackages.py | 47941e9faece46e7f39b3c7170f90700a7bee469 | [
"MIT"
] | permissive | KagenoMoheji/GanttLogger | 58e3d96384b97e77192f07d99e49654b643ce558 | 2d7c88e1c48d56126904d14e780a2588c69336fc | refs/heads/master | 2022-01-18T06:25:12.793540 | 2019-08-07T16:11:33 | 2019-08-07T16:11:33 | 193,551,382 | 0 | 2 | null | 2019-08-07T16:11:34 | 2019-06-24T17:39:49 | Python | UTF-8 | Python | false | false | 3,399 | py | from datetime import datetime
import time
import psutil
import win32gui as wg
import win32process as wp
import ganttlogger.modules.Global as global_v
class ActiveTabObserver:
uuid = ""
data_process = None
def __init__(self, uuid, is_alone):
self.uuid = uuid
if is_alone:
self.data_process = self.enqueue_data
else:
self.data_process = self.send_json
def run(self):
try:
recent_active_tab_text = "START!"
while not global_v.is_sleeping:
try:
fw = wg.GetForegroundWindow()
active_pid = wp.GetWindowThreadProcessId(fw)[-1]
active_name = psutil.Process(active_pid).name()
active_tab_text = wg.GetWindowText(fw)
if recent_active_tab_text != active_tab_text.upper():
switched_time = datetime.now().strftime("%Y/%m/%d %H:%M:%S.%f")
recent_active_tab_text = active_tab_text.upper()
splitted_active_tab_text = active_tab_text.split(" - ")
if len(splitted_active_tab_text) > 1:
# Remove application name from tab text
active_tab_text = " - ".join(splitted_active_tab_text[:-1])
self.data_process(switched_time, active_name, active_tab_text)
# print("ActiveTab[{time}]: {pid}: {active_name}({tab_text})".format(
# time=switched_time,
# pid=active_pid,
# active_name=active_name,
# tab_text=active_tab_text))
time.sleep(0.001)
except (KeyError, ValueError, psutil.NoSuchProcess):
# If not in time to get pid
# print("Warning: Failed in getting process information")
continue
except KeyboardInterrupt:
continue
# Output the last log
switched_time = datetime.now().strftime("%Y/%m/%d %H:%M:%S.%f")
splitted_active_tab_text = active_tab_text.split(" - ")
if len(splitted_active_tab_text) > 1:
# Remove application name from tab text
active_tab_text = " - ".join(splitted_active_tab_text[:-1])
self.data_process(switched_time, active_name, active_tab_text)
except:
# If this thread stopped by rebooting from sleep, maybe...
import traceback
print("Thread loop exited by any problem!!!!")
global_v.is_threadloop_error = True
global_v.is_sleeping = True
traceback.print_exc()
def send_json(self, t, active_name, tab_text):
pass
def enqueue_data(self, t, active_name, tab_text):
global_v.tab_id += 1
global_v.tab_queue.append({
"uuid": self.uuid,
"type": "t",
"id": global_v.tab_id,
"activeName": active_name,
"tabText": tab_text,
"startTime": t
})
'''
# If neccesary to implement observer for each OS, implement below.
class MouseObserver:
pass
class KeyboardObserver:
pass
''' | [
"reeg.gold.vercerms@gmail.com"
] | reeg.gold.vercerms@gmail.com |
19656a2f3fca9bd207d78405f357aba554eddc54 | 4164f6b7908fe18892cf50693603d67ffeea88b8 | /pyWorkHorse.py | d46a26cdbca039912bcb21925f4e317eb26f64f3 | [] | no_license | cpaternostro/pyWorkHorse | c7cded1267a0b8dede4903ed839e843b9351ecb6 | b493dda31fa9442ab9d3ec19e2911ede7ec6624d | refs/heads/master | 2023-03-17T06:18:20.795669 | 2020-09-24T06:59:38 | 2020-09-24T06:59:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,278 | py | #!/usr/bin/python
#-*- coding: utf-8 -*-
import sys
import os
import struct as st
import argparse as ap
import datetime
from utils.pyGeneralClass import *
# convenience function reused for header, length, and checksum
def __nextLittleEndianUnsignedShort(file):
"""Get next little endian unsigned short from file"""
raw = file.read(2)
return (raw, st.unpack('<H', raw)[0])
# factored for readability
def __computeChecksum(header, length, ensemble):
"""Compute a checksum from header, length, and ensemble"""
cs = 0
for byte in header:
cs += byte
for byte in length:
cs += byte
for byte in ensemble:
cs += byte
return cs & 0xffff
#----------------------------------------
#- Date validation for input parameter -
#----------------------------------------
def valid_datetime_type(arg_datetime_str):
"""custom argparse type for user datetime values given from the command line"""
try:
return datetime.datetime.strptime(arg_datetime_str, "%d-%m-%Y %H:%M:%S.%f")
except ValueError:
msg = "Given Datetime ({0}) not valid! Expected format, 'dd-mm-YYYY hh:mm:ss.ss'!".format(arg_datetime_str)
raise ap.ArgumentTypeError(msg)
#----------------------------------------
#--- MAIN ---
#----------------------------------------
def main():
# Parameters management
parser = ap.ArgumentParser()
parser.add_argument('-i', '-infile',
dest='infile',
required=True,
help="ADCP file to read")
parser.add_argument('-o', '-outfile',
dest='outfile',
required=False,
default='./export-ADCP.txt',
help="ADCP file to write. Default is <export-ADCP.txt>")
parser.add_argument('-s', '--start-datetime',
dest='start_datetime',
type=valid_datetime_type,
default=None,
required=False,
help='start datetime in format "dd-mm-YYYY hh:mm:ss.ss"')
parser.add_argument('-e', '--end-datetime',
dest='end_datetime',
type=valid_datetime_type,
default=None,
required=False,
help='end datetime in format "dd-mm-YYYY hh:mm:ss.ss"')
parser.add_argument("-c", "--count",
dest='count',
type=int,
default=-1,
help="Number of element to read")
parser.add_argument("-size", "--size",
dest='size',
type=int,
default=0,
required=False,
help='split output file every <size> kilo bytes. Default=0 (not split)')
parser.add_argument("-sys", "--system",
dest='coordinatesystem',
default='BEAM',
required=False,
help='Coordinate system for velocities. Default: BEAM. Valid values: BEAM, INSTRUMENT, EARTH')
parser.add_argument("-b", "--binary",
dest='binary',
type=bool,
default=False,
help="Outputs in binary format for numpy use")
parser.add_argument("-d", "--data",
dest='data',
default='VEL,INT,PG,CORR',
help="Data output: Default: VEL,INT,PG,CORR. VEL: velocity, INT: intensity, PG: percent good, \
CORR: correlation. Choose a combination of data separated by a comma.")
args = parser.parse_args()
# Test validity of date time if given
if args.start_datetime != None and args.end_datetime != None:
if args.start_datetime > args.end_datetime:
msg = "Start date can not be after end date !"
raise ap.ArgumentTypeError(msg)
if args.start_datetime == None and args.end_datetime != None:
msg = "End date is given but start date is not given\n\tYou should provide both dates"
raise ap.ArgumentTypeError(msg)
# Test validity of ADCP file name
if not os.path.isfile(args.infile):
raise IOError('%s is not a valid file ADCP file name' % args.infile)
# Opening the ADCP file
try:
infile = open(args.infile,'rb')
except:
raise IOError('Unable to open file {}'.format(args.infile))
# Set default name of output file if needed
if args.outfile == './export-ADCP.txt':
args.outfile = './export-{}.{}'.format(args.infile.split('.')[0],'txt')
# Test and open output file
try:
if args.binary:
outfile = open(args.outfile, 'wb')
else:
outfile = open(args.outfile,'w')
except:
raise IOError('Unable to create file {}'.format(args.outfile))
# Test validity of coordinate system
if (args.coordinatesystem != 'BEAM') & (args.coordinatesystem != 'INSTRUMENT') & (args.coordinatesystem != 'EARTH'):
msg = 'Invalid coordinate system ({}). Valid value: BEAM, INSTRUMENT, EARTH'.format(args.coordinatesystem)
raise ap.ArgumentTypeError(msg)
coordSystem = args.coordinatesystem
# End of argument management
# Variable initiatilization
# Number of element written
elementCount = 0
# Control of the file size
outfileSize = 0
# Control of the file number (in case of multiple files)
fileCount = 0
# Retreive first element of interest, i.e. wave or current
firstCurrents, firstWaves = getFirstWavesCurrentsID(infile)
# get the starting point by throwing out unfound headers
# and selecting the minumum
firstEnsemble = min(filter(lambda x: x >= 0,(firstWaves,firstCurrents)))
#seeks to the first occurence of a waves or currents data
infile.seek(firstEnsemble-2)
# Get file information: file size
#fileSize = os.stat(args.infile).st_size
# Read header ID
rawHeader, header = __nextLittleEndianUnsignedShort(infile)
# loop through raw data
while (header == WAVESID) or (header == PD0HEADERID):
# print statistics
#sys.stdout.write("{:2.1}%\r".format(str((infile.tell()/fileSize)*100.0)))
#sys.stdout.flush()
# get ensemble length
rawLength, length = __nextLittleEndianUnsignedShort(infile)
# TODO: deal with wave data also
# actually just jump to next ensemble
if header == WAVESID:
print("Wave data found\n")
infile.seek(length+2,1)
continue
# read up to the checksum
rawEnsemble = infile.read(length-4)
# Read the current ensemble and get the data
re = readEnsemble(rawEnsemble)
re.readEnsembleData()
nbDataTypes = re.getEnsembleItem(0).GetNbDataTypes()
if nbDataTypes > 100:
raise IOError('Incorrect number of data types ({})'.format(nbDataTypes))
# Manage actions
if args.end_datetime != None and args.start_datetime != None:
if re.getEnsembleItem(2).getStartDateTime() > args.start_datetime and re.getEnsembleItem(2).getStartDateTime() < args.end_datetime :
if args.count != -1:
if elementCount < args.count:
# both dates and a count
outfile.write('{}'.format(re.write(coordSystem)))
elementCount = elementCount + 1
if args.size > 0:
outfileSize = outfile.tell()
else:
break
else:
# both dates only
outfile.write('{}'.format(re.write(coordSystem)))
# Stop just after end date
if re.getEnsembleItem(2).getStartDateTime() > args.end_datetime:
break
else:
if args.start_datetime != None:
if re.getEnsembleItem(2).getStartDateTime() > args.start_datetime:
if args.count != -1:
if elementCount < args.count:
# only start date and a count
outfile.write('{}'.format(re.write(coordSystem)))
elementCount = elementCount + 1
if args.size > 0:
outfileSize = outfile.tell()
else:
break
else:
# only start date
outfile.write('{}'.format(re.write(coordSystem)))
else:
if args.count != -1:
if elementCount < args.count:
# only count
outfile.write('{}'.format(re.write(coordSystem)))
elementCount = elementCount + 1
if args.size > 0:
outfileSize = outfile.tell()
else:
break
else:
# Total file
outfile.write('{}'.format(re.write(coordSystem)))
if args.size > 0:
outfileSize = outfile.tell()
# get checksum
rawChecksum, checksum = __nextLittleEndianUnsignedShort(infile)
computedChecksum = __computeChecksum(rawHeader, rawLength, rawEnsemble)
if checksum != computedChecksum:
print('Position:{}\tSize to read:{}'.format(hex(infile.tell()),length))
print('Checksum error\nChecksum:{}\tComputed:{}'.format(checksum,computedChecksum))
outfile.write('Checksum error::{}'.format(checksum))
#raise IOError('Checksum error\nChecksum:{}\tComputed:{}'.format(checksum,computedChecksum))
# TODO: Manage output file size here
if outfileSize >= (args.size*1000) and args.size > 0:
outfile.close()
try:
outfile = open('{}{}.{}'.format(args.outfile.split('.')[0],fileCount+1,args.outfile.split('.')[1]),'w')
fileCount += 1
except:
raise IOError('Unable to create file {}{}'.format(args.outfile,fileCount+1))
try:
rawHeader, header = __nextLittleEndianUnsignedShort(infile)
except st.error:
break
infile.close()
outfile.close()
if __name__== "__main__":
main()
| [
"emmanuel.poizot@lecnam.net"
] | emmanuel.poizot@lecnam.net |
060f125f2240b9ac37d4ccf9903b17cea4f5ac6f | 89bac5c002bbf76547066ded9446de50355b3771 | /PythonSource/executionnode/components/base/pc.py | 233bec1bdfa8bff9aa83ee54532de50a56756422 | [] | no_license | Zylanx/TIS-100_EvalKit | c74aaae885182a92831461ebc8b2c3d1ae7ed357 | 651f3628edc5cc9859ec225834ca1ff0973c91e3 | refs/heads/master | 2021-05-01T10:12:32.984351 | 2018-03-05T08:14:57 | 2018-03-05T08:14:57 | 121,105,542 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 677 | py | __author__ = 'Zylanx'
from myhdl import *
from executionnode.utils import PCControlInterface
@block
def PC(clk, rst, clkEnable, controlBits: PCControlInterface, dataLoad, pcOut):
""" PC (Program Counter).
inputs:
clk -- clock
rst -- reset
clkEnable -- clock enable
load -- load value into program counter
dataLoad -- data to load in
outputs:
pcOut -- PC output value
"""
count = Signal(modbv(0, 0, 16))
@always_comb
def outProc():
pcOut.next = count
@always_seq(clk.posedge, reset=rst)
def clkProc():
if clkEnable:
if controlBits.load:
count.next = dataLoad
elif controlBits.inc:
count.next = count + 1
return outProc, clkProc | [
"zylanx@users.noreply.github.com"
] | zylanx@users.noreply.github.com |
fc0385958dcc0472b849ea4e05c33d17a5e4d777 | 23a636ea83b936e3f8ae4e45c4602647b352c591 | /src/receive/receive_aplication.py | 20c86ed45b90672497016a5a206fcef5e2135d2b | [] | no_license | enricofd/camada_fisica_client_server | d319864df0a814a97325e69c14ce51f6f53a2d8c | a383ab7a16fab356bcd350b5e50965315451f714 | refs/heads/master | 2022-12-09T18:40:16.713114 | 2020-09-04T22:08:32 | 2020-09-04T22:08:32 | 292,954,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,674 | py | # -*- coding: utf-8 -*-
# Enrico F. D.
# 09/03/2020
# Insper - Camada Física da Computação
from src.lower_levels.enlace import *
import time
serial_name = "COM7"
def receive():
try:
com = Enlace(serial_name)
com.enable()
rx_buffer, time_sent = com.get_data()
time_sent_main_raw = [
str(int.from_bytes([x], byteorder="big")) for x in time_sent[0:10]
]
time_sent_mili_raw = [
str(int.from_bytes([x], byteorder="big")) for x in time_sent[10::]
]
separator = ""
time_sent = float(
separator.join(time_sent_main_raw)
+ "."
+ separator.join(time_sent_mili_raw)
)
total_time = round(time.time() - time_sent, 3)
print("\n-------------------------")
print("-------------------------\n")
path = input("Enter path for saving file: ")
f = open(path, "wb")
f.write(rx_buffer)
f.close()
print("\nReceived")
print("Port: " + serial_name)
print("Payload size: " + str(len(rx_buffer)) + " bytes")
print("Total elapsed time: " + str(total_time) + " seconds")
print(
"Total transfer speed: "
+ str(round((len(rx_buffer) + 20) / total_time, 3))
+ " bytes/seconds"
)
print("End of communication")
print("-------------------------")
print("-------------------------")
com.disable()
except:
print("Message not received, please try again")
print("-------------------------")
print("-------------------------")
com.disable()
| [
"noreply@github.com"
] | noreply@github.com |
2e41be46d93b69388225a8069b630b3049d72b40 | 3cf11edf4468545e72bdada3121367a586fc5774 | /CustomContent/settings.py | 8bea5c3934dd99aa9444a5148414be2ec68855dd | [] | no_license | SharmaLlama/CustomContentAggregator | 0e6cdaf35584e1b84b81fb1441b997551e093058 | c5307c48ab81f9a4020ae3c33da50b7c85cb356a | refs/heads/master | 2023-03-29T05:55:13.681695 | 2021-03-31T15:02:05 | 2021-03-31T15:02:05 | 352,039,886 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,164 | py | """
Django settings for CustomContent project.
Generated by 'django-admin startproject' using Django 2.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'o2mgj52jed(ffmxl@vmk4&8ua!=$lzr_em)xqbyh3wgu!$t)8b'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# own
'NewsData'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'CustomContent.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'CustomContent.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| [
"sharmautkarsh0504@gmail.com"
] | sharmautkarsh0504@gmail.com |
6429ff3a5cdd451090741ad95d4eb7c834662443 | 7ae0f100b49763f79b276260bbc0e87bd904da3e | /src/wdf/management/commands/prepare_dump.py | e65ea353701bb3108f1a5dec39c80cfd359756f9 | [] | no_license | wondersell/wildsearch-indexer | d88a5b3bce17acc1cb61d365f55ab5d9f63f61ae | 67d5f29f6d405c055cfa211ddf0b70521382a671 | refs/heads/master | 2023-07-19T00:33:34.371231 | 2020-12-31T11:20:00 | 2020-12-31T11:20:00 | 285,488,583 | 2 | 0 | null | 2021-07-19T06:26:44 | 2020-08-06T06:09:51 | Python | UTF-8 | Python | false | false | 1,339 | py | import logging
from django.core.management.base import BaseCommand
from wdf.exceptions import DumpStateError
from wdf.indexer import Indexer
from wdf.tasks import prepare_dump
class Command(BaseCommand):
help = 'Prepares job for importing' # noqa: VNE003
def add_arguments(self, parser):
parser.add_argument('job_id', type=str)
parser.add_argument('--chunk_size', type=int, default=5000, required=False)
parser.add_argument('--background', choices=['yes', 'no'], default='yes')
def handle(self, *args, **options):
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter('[%(levelname)s] %(name)s: %(message)s'))
logger = logging.getLogger('')
logger.addHandler(console)
job_id = options['job_id']
if options['background'] == 'yes':
prepare_dump.delay(job_id=job_id)
self.stdout.write(self.style.SUCCESS(f'Job #{job_id} added to process queue for preparing'))
else:
try:
indexer = Indexer(get_chunk_size=options['chunk_size'])
indexer.prepare_dump(job_id=options['job_id'])
except DumpStateError as error:
self.stdout.write(self.style.ERROR(f'Job #{job_id} processing failed: {error}'))
| [
"artem.kiselev@gmail.com"
] | artem.kiselev@gmail.com |
5fc93e7f2f9831b87b87d703437ee1b48abb649a | 4348bd57a84f8cb25fa84595cc494f61ff78f56e | /bentoml/artifact/fastai_model_artifact.py | 4b0b5769b30f99356c8bf27fff13778d4fec339d | [
"Apache-2.0"
] | permissive | Korusuke/BentoML | 656db91e6e8ae61a24631a70add4ef8a52efab92 | 46267647e896006d436305f107422b7a96cdbec8 | refs/heads/master | 2022-12-02T03:03:45.449686 | 2020-07-22T05:35:59 | 2020-07-22T05:35:59 | 271,658,473 | 2 | 0 | Apache-2.0 | 2020-06-11T22:28:36 | 2020-06-11T22:28:36 | null | UTF-8 | Python | false | false | 4,027 | py | # Copyright 2019 Atalaya Tech, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import logging
from bentoml.artifact import BentoServiceArtifact
from bentoml.exceptions import MissingDependencyException, InvalidArgument
from bentoml.service_env import BentoServiceEnv
logger = logging.getLogger(__name__)
def _import_fastai_module():
try:
import fastai.basic_train
except ImportError:
raise MissingDependencyException(
"fastai package is required to use " "bentoml.artifacts.FastaiModelArtifact"
)
return fastai
class FastaiModelArtifact(BentoServiceArtifact):
"""Saving and Loading FastAI Model
Args:
name (str): Name for the fastai model
Raises:
MissingDependencyException: Require fastai package to use Fast ai model artifact
InvalidArgument: invalid argument type, model being packed must be instance of
fastai.basic_train.Learner
Example usage:
>>> from fastai.tabular import *
>>>
>>> # prepare data
>>> data = TabularList.from_df(...)
>>> learn = tabular_learner(data, ...)
>>> # train model
>>>
>>> import bentoml
>>> from bentoml.adapters import DataframeInput
>>> from bentoml.artifact import FastaiModelArtifact
>>>
>>> @bentoml.artifacts([FastaiModelArtifact('model')])
>>> @bentoml.env(auto_pip_dependencies=True)
>>> class FastaiModelService(bentoml.BentoService):
>>>
>>> @api(input=DataframeInput())
>>> def predict(self, df):
>>> results = []
>>> for _, row in df.iterrows():
>>> prediction = self.artifacts.model.predict(row)
>>> results.append(prediction[0].obj)
>>> return results
>>>
>>> svc = FastaiModelService()
>>>
>>> # Pack fastai basic_learner directly
>>> svc.pack('model', learn)
"""
def __init__(self, name):
super(FastaiModelArtifact, self).__init__(name)
self._file_name = name + '.pkl'
self._model = None
def _model_file_path(self, base_path):
return os.path.join(base_path, self._file_name)
def pack(self, model): # pylint:disable=arguments-differ
fastai_module = _import_fastai_module()
if not isinstance(model, fastai_module.basic_train.Learner):
raise InvalidArgument(
"Expect `model` argument to be `fastai.basic_train.Learner` instance"
)
self._model = model
return self
def load(self, path):
fastai_module = _import_fastai_module()
model = fastai_module.basic_train.load_learner(path, self._file_name)
return self.pack(model)
def set_dependencies(self, env: BentoServiceEnv):
logger.warning(
"BentoML by default does not include spacy and torchvision package when "
"using FastaiModelArtifact. To make sure BentoML bundle those packages if "
"they are required for your model, either import those packages in "
"BentoService definition file or manually add them via "
"`@env(pip_dependencies=['torchvision'])` when defining a BentoService"
)
env.add_pip_dependencies_if_missing(['torch', "fastai"])
def save(self, dst):
self._model.export(file=self._file_name)
shutil.copyfile(
os.path.join(self._model.path, self._file_name), self._model_file_path(dst),
)
def get(self):
return self._model
| [
"noreply@github.com"
] | noreply@github.com |
604de96287cbc0f5d49f5b3f1962d89231a0d7f7 | 08a843580249deff3475c0585ed8d132c813b697 | /src/oomugi/model.py | 5639bd6f4c3cdfe59e0f6e977019d53c6a6e23d3 | [] | no_license | umeykato/omg_instance_segmentation | 854603afadd657f93c4a12f74ef0c910ff9b0656 | ef7b484f7a6ae36f1b7ab36710939e47e0b7df8e | refs/heads/master | 2020-04-07T06:51:16.827300 | 2018-12-26T02:07:31 | 2018-12-26T02:07:31 | 158,152,905 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,723 | py | """
This implementation is based on following code:
https://github.com/milesial/Pytorch-UNet
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
class double_conv(nn.Module):
'''(conv => BN => ReLU) * 2'''
def __init__(self, in_ch, out_ch):
super(double_conv, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_ch, out_ch, 3, padding=1),
nn.BatchNorm2d(out_ch),
nn.ReLU(inplace=True),
nn.Conv2d(out_ch, out_ch, 3, padding=1),
nn.BatchNorm2d(out_ch),
nn.ReLU(inplace=True)
)
def forward(self, x):
x = self.conv(x)
return x
class inconv(nn.Module):
def __init__(self, in_ch, out_ch):
super(inconv, self).__init__()
self.conv = double_conv(in_ch, out_ch)
def forward(self, x):
x = self.conv(x)
return x
class down(nn.Module):
def __init__(self, in_ch, out_ch):
super(down, self).__init__()
self.mpconv = nn.Sequential(
nn.MaxPool2d(2),
double_conv(in_ch, out_ch)
)
def forward(self, x):
x = self.mpconv(x)
return x
class up(nn.Module):
def __init__(self, in_ch, out_ch, bilinear=True):
super(up, self).__init__()
# would be a nice idea if the upsampling could be learned too,
# but my machine do not have enough memory to handle all those weights
if bilinear:
self.up = nn.Upsample(scale_factor=2, mode='bilinear')
else:
self.up = nn.ConvTranspose2d(in_ch, out_ch, 2, stride=2)
self.conv = double_conv(in_ch, out_ch)
def forward(self, x1, x2):
x1 = self.up(x1)
diffX = x1.size()[2] - x2.size()[2]
diffY = x1.size()[3] - x2.size()[3]
x2 = F.pad(x2, (diffX // 2, int(diffX / 2),
diffY // 2, int(diffY / 2)))
x = torch.cat([x2, x1], dim=1)
x = self.conv(x)
return x
class outconv(nn.Module):
def __init__(self, in_ch, out_ch):
super(outconv, self).__init__()
# self.conv = nn.Conv2d(in_ch, out_ch, 1)
self.conv = nn.Sequential(
nn.Conv2d(in_ch, in_ch//2, 1),
nn.BatchNorm2d(in_ch//2),
nn.ReLU(inplace=True),
nn.Conv2d(in_ch//2, out_ch, 1),
)
def forward(self, x):
x = self.conv(x)
return x
class UNet(nn.Module):
def __init__(self):
super(UNet, self).__init__()
# self.inc = inconv(1, 64)
self.inc = inconv(3, 64)
self.down1 = down(64, 128)
self.down2 = down(128, 256)
self.down3 = down(256, 512)
self.down4 = down(512, 512)
self.up1 = up(1024, 256)
self.up2 = up(512, 128)
self.up3 = up(256, 64)
self.up4 = up(128, 64)
self.sem_out = outconv(64, 2)
# self.ins_out = outconv(64, 16)
self.ins_out = outconv(64, 60)
def forward(self, x):
x1 = self.inc(x)
# print('x1 : ', x1.size())
x2 = self.down1(x1)
# print('x2 : ', x2.size())
x3 = self.down2(x2)
# print('x3 : ', x3.size())
x4 = self.down3(x3)
# print('x4 : ', x4.size())
x5 = self.down4(x4)
# print('x5 : ', x5.size())
x = self.up1(x5, x4)
# print('x : ', x.size())
x = self.up2(x, x3)
# print('x : ', x.size())
x = self.up3(x, x2)
# print('x : ', x.size())
x = self.up4(x, x1)
# print('x : ', x.size())
sem = self.sem_out(x)
# print('sem : ', sem.size())
ins = self.ins_out(x)
# print('ins : ', ins.size())
return sem, ins
| [
"y_kato@umelab.jp"
] | y_kato@umelab.jp |
1eee8f7e222c26b2fd3155eedee17fd766f05e79 | a483fa5309fde5a915308c51791c2d34786e2035 | /bin/forward_ivec_ln_lstmp.py | a81f085fb144716a221a1a2e48b1f826bc96df84 | [] | no_license | gunkisu/asr | 699d3ce14b68aea0dab2024461a122e4b8e769b4 | 0244e9c91d69f51df1251afda2a4739e7dd81ac7 | refs/heads/master | 2021-07-06T21:49:45.744659 | 2017-09-27T19:50:50 | 2017-09-27T19:50:50 | 105,699,109 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,179 | py | from __future__ import print_function
import pickle
import sys
import argparse
import theano
from theano import tensor as T
import numpy
from lasagne.layers import get_all_params, count_params
from lasagne.layers import get_output
from libs.lasagne_libs.utils import set_model_param_value
from models.gating_hyper_nets import deep_projection_ivector_ln_model_fix
from data.wsj.fuel_utils import get_feat_stream, get_uttid_stream
import kaldi_io
floatX = theano.config.floatX
input_dim = 123
output_dim = 3436
def add_params(parser):
parser.add_argument('--batch_size', default=16, help='batch size', type=int)
parser.add_argument('--num_conds', default=1, help='number of hidden units', type=int)
parser.add_argument('--num_layers', default=3, help='number of hidden units', type=int)
parser.add_argument('--num_units', default=512, help='number of hidden units', type=int)
parser.add_argument('--num_factors', default=64, help='number of factors', type=int)
parser.add_argument('--learn_rate', default=0.001, help='learning rate', type=float)
parser.add_argument('--grad_clipping', default=1.0, help='gradient clipping', type=float)
parser.add_argument('--dropout', default=0.2, help='dropout', type=float)
parser.add_argument('--data_path', help='data path', default='/u/songinch/song/data/speech/wsj_fbank123.h5')
parser.add_argument('--save_path', help='save path', default='./')
parser.add_argument('--num_epochs', help='number of epochs', default=50, type=int)
parser.add_argument('--updater', help='sgd or momentum', default='momentum')
parser.add_argument('--train_disp_freq', help='how ferquently to display progress', default=100, type=int)
parser.add_argument('--feat_reg', default=0.0, help='feat_reg', type=float)
parser.add_argument('--train_dataset', help='dataset for training', default='train_si284')
parser.add_argument('--valid_dataset', help='dataset for validation', default='test_dev93')
parser.add_argument('--test_dataset', help='dataset for test', default='test_eval92')
parser.add_argument('--reload_model', help='model path to load')
parser.add_argument('--tmpdir', help='directory name in the /Tmp directory to save data locally',
default='/Tmp/taesup/data/speech')
parser.add_argument('--no-copy', help='do not copy data from NFS to local machine', action='store_true')
parser.add_argument('--model', default=None )
parser.add_argument('--dataset', default='test_eval92')
parser.add_argument('wxfilename')
def get_arg_parser():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
add_params(parser)
return parser
def ff(network, input_data, input_cond, input_mask):
predict_data = get_output(network, deterministic=True)
predict_data = predict_data - T.max(predict_data, axis=-1, keepdims=True)
predict_data = predict_data - T.log(T.sum(T.exp(predict_data), axis=-1, keepdims=True))
inputs = [input_data, input_cond, input_mask]
predict_fn = theano.function(inputs=inputs,
outputs=[predict_data])
return predict_fn
if __name__ == '__main__':
parser = get_arg_parser()
args = parser.parse_args()
print(args, file=sys.stderr)
input_data = T.ftensor3('input_data')
input_cond = T.ftensor3('input_cond')
input_mask = T.fmatrix('input_mask')
network = deep_projection_ivector_ln_model_fix(input_var=input_data,
cond_var=input_cond,
mask_var=input_mask,
num_inputs=input_dim,
num_outputs=output_dim,
num_conds=args.num_conds,
num_layers=args.num_layers,
num_factors=args.num_factors,
num_units=args.num_units,
grad_clipping=args.grad_clipping,
dropout=args.dropout)[0]
network_params = get_all_params(network, trainable=True)
param_count = count_params(network, trainable=True)
print('Number of parameters of the network: {:.2f}M'.format(float(param_count) / 1000000), file=sys.stderr)
print('Loading Parameters...', file=sys.stderr)
if args.model:
with open(args.model, 'rb') as f:
[pretrain_network_params_val,
pretrain_update_params_val,
pretrain_total_epoch_cnt] = pickle.load(f)
set_model_param_value(network_params, pretrain_network_params_val)
else:
print('Must specfiy network to load', file=sys.stderr)
sys.exit(1)
ff_fn = ff(network, input_data, input_cond, input_mask)
test_datastream = get_feat_stream(path=args.data_path,
which_set=args.dataset,
batch_size=args.batch_size,
use_ivectors=True)
uttid_datastream = get_uttid_stream(path=args.data_path,
which_set=args.dataset,
batch_size=args.batch_size)
writer = kaldi_io.BaseFloatMatrixWriter(args.wxfilename)
for batch_idx, (feat_batch, uttid_batch) in enumerate(zip(test_datastream.get_epoch_iterator(),
uttid_datastream.get_epoch_iterator())):
input_data, input_mask, input_cond, _ = feat_batch
feat_lens = input_mask.sum(axis=1)
print('Feed-forwarding...', file=sys.stderr)
net_output = ff_fn(input_data, input_cond, input_mask)
print('Writing outputs...', file=sys.stderr)
for out_idx, (output, uttid) in enumerate(zip(net_output[0], uttid_batch[0])):
valid_len = int(feat_lens[out_idx])
writer.write(uttid.encode('ascii'), output[:valid_len])
writer.close()
| [
"taesup.kim@umontreal.ca"
] | taesup.kim@umontreal.ca |
d1e8d70b961b1be945693a91169e369f2646ef5b | ac216a2cc36f91625e440247986ead2cd8cce350 | /appengine/findit/pipelines/test/send_notification_for_culprit_pipeline_test.py | 511524ebf3afcb0224df7cc05d4923d14340ae07 | [
"BSD-3-Clause"
] | permissive | xinghun61/infra | b77cdc566d9a63c5d97f9e30e8d589982b1678ab | b5d4783f99461438ca9e6a477535617fadab6ba3 | refs/heads/master | 2023-01-12T21:36:49.360274 | 2019-10-01T18:09:22 | 2019-10-01T18:09:22 | 212,168,656 | 2 | 1 | BSD-3-Clause | 2023-01-07T10:18:03 | 2019-10-01T18:22:44 | Python | UTF-8 | Python | false | false | 1,037 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import mock
from common.waterfall import failure_type
from pipelines.send_notification_for_culprit_pipeline import (
SendNotificationForCulpritPipeline)
from services import constants
from services import culprit_action
from services.parameters import SendNotificationForCulpritParameters
from waterfall.test import wf_testcase
class SendNotificationForCulpritPipelineTest(wf_testcase.WaterfallTestCase):
@mock.patch.object(
culprit_action, 'SendNotificationForCulprit', return_value=True)
def testSendNotification(self, _):
pipeline_input = SendNotificationForCulpritParameters(
cl_key='mockurlsafekey',
force_notify=True,
revert_status=constants.CREATED_BY_SHERIFF,
failure_type=failure_type.COMPILE)
pipeline = SendNotificationForCulpritPipeline(pipeline_input)
self.assertTrue(pipeline.run(pipeline_input))
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
3624b72aec7c47907590fc95a3e60425fa317756 | 6d0b699ccd256917999a75b1ea082f413528d607 | /Day 9: Recursion 3 | 3ea7c157fd6c8faf6f9753668a9bdd878e25234f | [] | no_license | usatya99/30-Days-of-Code | 80da412cee4e121392d8afe356cc9c09411c87b6 | d9a0d656d1b8c8c6475f776520d039d5174645dc | refs/heads/master | 2022-11-26T01:03:59.984712 | 2020-08-02T06:11:50 | 2020-08-02T06:11:50 | 277,282,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 374 | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the factorial function below.
def factorial(n):
c=1
for i in range(1,n+1):
c*=i
return c
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
result = factorial(n)
fptr.write(str(result) + '\n')
fptr.close()
| [
"noreply@github.com"
] | noreply@github.com | |
b8ecdf70b95876d080b65ecd3955f70116dff885 | 002b6fc2b459fe2b63c782aef80f3e1f851c9b39 | /myTeam2.py | 526a5d78a5eb746bdc0e2d5682236b728b19050c | [] | no_license | EricMLee/pacman | 0e4b2f0c2b368bab779b6786a0398adbc6a9220c | ef1f2d46c5e0bbd6ccab3fbab1d37fe18c539202 | refs/heads/master | 2023-03-31T20:29:37.494332 | 2021-04-08T04:13:01 | 2021-04-08T04:13:01 | 343,594,710 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,614 | py | from pacai.util import reflection
from pacai.core.directions import Directions
from pacai.util import counter
from pacai.agents.capture.reflex import ReflexCaptureAgent
from pacai.agents.capture.capture import CaptureAgent
from pacai.core import distanceCalculator
import random
import math
def createTeam(firstIndex, secondIndex, isRed,
first = 'pacai.student.team_TeamHardcode',
second = 'pacai.student.team_TeamHardcode'):
"""
This function should return a list of two agents that will form the capture team,
initialized using firstIndex and secondIndex as their agent indexed.
isRed is True if the red team is being created,
and will be False if the blue team is being created.
"""
firstAgent = AttackAgent
secondAgent = DefensiveAgent
return [
firstAgent(firstIndex),
secondAgent(secondIndex),
]
class AttackAgent(ReflexCaptureAgent):
midpointTiles = []
targetTile = 0
badTile = 0
maxDistance = 0
pacmanCounter = 0
totalFood = 0
totalCapsules = 0
def __init__(self, index, **kwargs):
super().__init__(index)
global midpointTiles2
global targetTile
global badTile
badTile = 0
global maxDistance
global pacmanCounter
global totalFood
global totalCapsules
def chooseAction(self, gameState):
"""
Picks among the actions with the highest return from `ReflexCaptureAgent.evaluate`.
"""
global midpointTiles2
global badTile
global pacmanCounter
global targetTile
actions = gameState.getLegalActions(self.index)
myState = gameState.getAgentState(self.index)
if myState.isPacman():
pacmanCounter += 1
else:
pacmanCounter = 0
badTile += 1
if badTile % 20 == 0:
temp = midpointTiles2.index(random.choice(midpointTiles2))
temp2 = midpointTiles2.index(random.choice(midpointTiles2))
for i in range(1,3):
if abs(targetTile - temp) < abs(targetTile - temp2):
temp = temp2
temp2 = midpointTiles2.index(random.choice(midpointTiles2))
targetTile = temp
global oldDefenders
enemies = self.getOpponents(gameState)
oldDefenders = []
oldAttackers = []
for enemy in enemies:
if gameState.getAgentState(enemy).isPacman():
oldAttackers.append(gameState.getAgentState(enemy))
else:
oldDefenders.append(gameState.getAgentState(enemy))
global defenderID
if len(oldDefenders) != 0:
defenderID = oldDefenders[0]
# *** Gamestate Ghost Distance ***
global oldClosestDefender
global myOldPos
myOldPos = gameState.getAgentState(self.index).getPosition()
oldClosestDefender = float("inf")
for defender in oldDefenders:
defenderDistance = self.getMazeDistance(defender.getPosition(), myOldPos)
if defenderDistance < oldClosestDefender:
oldClosestDefender = defenderDistance
if len(oldDefenders) == 0:
oldClosestDefender = 0
# *** Gamestate Food ***
global oldTotalFoodDistance
global oldClosestFood
global oldFoodList
global oldGettableFood
oldTotalFoodDistance = 0
oldClosestFood = float("inf")
oldFoodList = self.getFood(gameState).asList()
oldGettableFood = float("inf")
for food in oldFoodList:
currentFoodDistance = self.getMazeDistance(myOldPos, food)
oldTotalFoodDistance += currentFoodDistance
if len(oldDefenders) != 0:
defendFood = self.getMazeDistance(defenderID.getPosition(), food)
if currentFoodDistance < oldClosestFood and defendFood >= currentFoodDistance:
oldGettableFood = currentFoodDistance
if currentFoodDistance < oldClosestFood:
oldGettableFood = currentFoodDistance
oldClosestFood = currentFoodDistance
# *** Gamestate Capsules ***
global oldNumCapsules
global oldClosestCapsule
oldNumCapsules = self.getCapsules(gameState)
oldClosestCapsule = float('inf')
for capsule in oldNumCapsules:
oldCapsuleDistance = self.getMazeDistance(myOldPos, capsule)
if oldCapsuleDistance < oldClosestCapsule:
oldClosestCapsule = oldCapsuleDistance
if oldClosestCapsule == float('inf'):
oldClosestCapsule = 0
return self.minimax(gameState, 1)
def legalActions(self, state, agent):
actions = state.getLegalActions(agent)
if Directions.STOP in actions:
actions.remove(Directions.STOP)
return actions
def maximum(self, state, depth, alpha, beta):
if state.isWin():
return float("inf")
if state.isLose():
return -float("inf")
if depth == 0:
return self.evaluate(state, Directions.STOP)
maximumValue = - float("inf")
for action in self.legalActions(state, self.index):
successor = state.generateSuccessor(self.index, action)
opponents = self.getOpponents(state)
current = self.minimum(successor, depth, alpha, beta, opponents, 0)
maximumValue = max(maximumValue, current)
alpha = max(alpha, current)
if beta <= alpha:
break
return maximumValue
def minimum(self, state, depth, alpha, beta, ghost, index):
if state.isWin():
return float("inf")
if state.isLose():
return -float("inf")
if depth == 0:
return self.evaluate(state, Directions.STOP)
minimumValue = float("inf")
actions = self.legalActions(state, ghost[index])
for action in actions:
successor = state.generateSuccessor(ghost[index], action)
if index == len(ghost) - 1:
current = self.maximum(successor, depth - 1, alpha, beta)
else:
current = self.minimum(successor, depth, alpha, beta, ghost, index + 1)
minimumValue = min(minimumValue, current)
beta = min(beta, current)
if beta <= alpha:
break
return minimumValue
def minimax(self, state, depth):
# initialize the first action to STOP
# initialize score negative infinity
# get all the legal moves for given state
Action = Directions.STOP
Score = - float("inf")
actions = self.legalActions(state, self.index)
# go through all the legal actions
for action in actions:
# get the successor state for the agent after the given action is
# taken
successor = state.generateSuccessor(self.index, action)
# apply the maximum function with the given successor state
# and the depth given initially, and negative and positive infinity
score = self.maximum(successor, depth, - float("inf"), float("inf"))
# if this newly calculated score is larger than the initial
# score, make the action in this loop the new Action and
# update the new score into Score
if score > Score:
Action = action
Score = score
return Action
def registerInitialState(self, gameState):
self.red = gameState.isOnRedTeam(self.index)
self.distancer = distanceCalculator.Distancer(gameState.getInitialLayout())
self.distancer.getMazeDistances()
abc = gameState.getInitialLayout()
width = abc.getWidth()
height = abc.getHeight()
midpoint = int(width/2)
global midpointTiles2
global targetTile
global maxDistance
global totalCapsules
global totalFood
totalFood = len(self.getFood(gameState).asList())
totalCapsules = len(self.getCapsules(gameState))
midpointTiles2 = []
redPenalty = 1
redPenalty2 = -1
if self.red:
redPenalty = -1
redPenalty2 = -1
longestPath = 0
shortestPath = float("inf")
for i in range(1, height - 1):
if not abc.isWall((midpoint + redPenalty2, i)) and not abc.isWall((midpoint + redPenalty2 + (redPenalty*-1), i)):
midpointTiles2.append(((midpoint + redPenalty2), i))
temp = self.getMazeDistance((midpoint + redPenalty2, i), gameState.getAgentState(self.index).getPosition())
if shortestPath > temp:
shortestPath = temp
targetTile = len(midpointTiles2) - 1
if longestPath < temp:
maxDistance = temp
# print("Initial Target Tile",targetTile)
targetTile = 0
def aStarSearch(problem, heuristic):
"""
Search the node that has the lowest combined cost and heuristic first.
"""
# *** Your Code Here ***
visited_nodes = []
# using priority queue again
created_priorityQueue = PriorityQueue()
# cost and heuristics
created_priorityQueue.push((problem.startingState(), [], 0),
heuristic(problem.startingState(), problem))
while not created_priorityQueue.isEmpty():
state, actions, cost = created_priorityQueue.pop()
if problem.isGoal(state):
return actions
if state in visited_nodes:
continue
visited_nodes.append(state)
Successors = problem.successorStates(state)
for s in Successors:
# taking the cost and heuristics into account while pushing
created_priorityQueue.push((s[0], actions + [s[1]], cost + s[2]),
s[2] + cost + heuristic(s[0], problem))
return None
def getFeatures(self, gameState, action):
features = counter.Counter()
successor = self.getSuccessor(gameState, action)
myState = successor.getAgentState(self.index)
myPos = myState.getPosition()
global midpointTiles2
global targetTile
global pacmanCounter
global totalFood
global totalCapsules
global oldDefenders
global oldClosestDefender
global myOldPos
global oldTotalFoodDistance
global oldClosestFood
global oldFoodList
global oldGettableFood
global oldNumCapsules
global oldClosestCapsule
global defenderID
global targetTile
# if abs(midpointTiles2[targetTile][0] - myPos[0]) > 4:
# features['isGhost'] = abs(midpointTiles2[targetTile][0] - myPos[0])
if myState.isGhost:
features['newPath'] = self.getMazeDistance(midpointTiles2[targetTile], myPos)
else:
features['isPacman'] = 1
enemies = self.getOpponents(successor)
defenders = []
attackers = []
for enemy in enemies:
if successor.getAgentState(enemy).isPacman():
attackers.append(successor.getAgentState(enemy))
else:
defenders.append(successor.getAgentState(enemy))
# features['newPath'] = self.getMazeDistance(midpointTiles2[targetTile], myPos)
# *** Successor Ghost Distance ***
closestDefender = float("inf")
for defender in defenders:
defenderDistance = self.getMazeDistance(defender.getPosition(), myPos)
if defenderDistance < closestDefender:
closestDefender = defenderDistance
defenderID = defender
if len(defenders) == 0:
closestDefender = 0
# *** Ghost Comparing ***
if closestDefender > 5 and myState.isGhost():
features['isGhost'] = 1
if closestDefender <= 5:
features['closestGhost'] = closestDefender
if closestDefender <= 1:
if not defenderID.isScared():
features['tooClose'] = 1
# *** Successor Food ***
totalFoodDistance = 0
closestFood = float("inf")
foodList = self.getFood(successor).asList()
gettableFood = float("inf")
for food in foodList:
currentFoodDistance = self.getMazeDistance(myPos, food)
totalFoodDistance += currentFoodDistance
if len(defenders) != 0:
defendFood = self.getMazeDistance(defenderID.getPosition(), food)
if currentFoodDistance < closestFood and defendFood >= currentFoodDistance:
gettableFood = currentFoodDistance
if currentFoodDistance < closestFood:
closestFood = currentFoodDistance
gettableFood = currentFoodDistance
# *** Food Comparing ***
if len(oldFoodList) - len(foodList) == 0:
if gettableFood < oldGettableFood:
features['gotCloserToGettableFood'] = 1
elif closestFood < oldClosestFood:
features['ghostDistance'] = closestDefender
# features['gotCloserToFood'] = 1
else:
features['eatFood'] = 1
# *** Successor Capsules ***
closestCapsuleID = 0
numCapsules = self.getCapsules(successor)
closestCapsule = float('inf')
for capsule in numCapsules:
currentCapsuleDistance = self.getMazeDistance(myPos, capsule)
if len(defenders) != 0:
defendCapsule = self.getMazeDistance(defenderID.getPosition(), capsule)
if currentCapsuleDistance < closestCapsule and defendCapsule > currentCapsuleDistance:
closestCapsule = currentCapsuleDistance
# *** Capsules Comparing ***
if len(oldNumCapsules) > len(numCapsules):
features['gotCapsule'] = 1
if closestCapsule < oldClosestCapsule:
features['gotCloserToCapsule'] = 1
if closestCapsule == float('inf'):
closestCapsule = 0
return features
def getWeights(self, gameState, action):
return {
'newPath': -1000,
'tooClose': -100,
'isGhost': -10000,
'isPacman': 0,
'closestGhost': 100,
'gotCloserToGettableFood': 200,
'eatFood': 1000,
'gotCapsule': 20000,
'gotCloserToCapsule': 1500,
'gotCloserToFood': 50,
'ghostDistance': 5000
}
class DefensiveAgent(ReflexCaptureAgent):
def __init__(self, index, **kwargs):
global test
test = 0
global assumedAttacker
assumedAttacker = 0
global tracker
global midpointTiles
midpointTiles = []
global invader
global deadends
# tracker = [0, 0, 0, 0]
super().__init__(index)
def chooseAction(self, gameState):
"""
Picks among the actions with the highest return from `ReflexCaptureAgent.evaluate`.
"""
enemies = self.getOpponents(gameState)
invaders = []
for enemy in enemies:
if gameState.getAgentState(enemy).isPacman():
invaders.append(gameState.getAgentState(enemy))
global invader
if len(invaders) != 0:
invader = 1
else:
invader = 0
actions = gameState.getLegalActions(self.index)
values = [self.evaluate(gameState, a) for a in actions]
maxValue = max(values)
bestActions = [a for a, v in zip(actions, values) if v == maxValue]
return random.choice(bestActions)
def registerInitialState(self, gameState):
self.red = gameState.isOnRedTeam(self.index)
self.distancer = distanceCalculator.Distancer(gameState.getInitialLayout())
self.distancer.getMazeDistances()
global invader
invader = 0
abc = gameState.getInitialLayout()
width = abc.getWidth()
height = abc.getHeight()
midpoint = int(width/2)
global midpointTiles
midpointTiles = []
midpointTiles.append((0, 0))
redPenalty = 1
redPenalty2 = 0
if self.red:
redPenalty = -1
redPenalty2 = -1
for i in range(1, height - 1):
counter = 0
while abc.isWall((midpoint + redPenalty2 + ((2 + counter) * redPenalty), i)):
counter += 1
midpointTiles.append(((midpoint + redPenalty2 + ((2 + counter) * redPenalty)), i))
global deadends
deadends = {}
for n in range(1, height - 1):
for i in range(1, width - 1):
if not abc.isWall((i, n)):
counter = 0
opening = (0, 0)
if not abc.isWall((i - 1, n)):
opening = (i - 1, n)
counter += 1
if not abc.isWall((i + 1, n)):
opening = (i + 1, n)
counter += 1
if not abc.isWall((i, n + 1)):
opening = (i, n + 1)
counter += 1
if not abc.isWall((i, n - 1)):
opening = (i, n - 1)
counter += 1
if counter == 1:
counter = 0
opening2 = (0,0)
if not abc.isWall((opening[0] - 1, opening[1])):
if((opening[0] - 1, opening[1]) != (i, n)):
opening2 = (opening[0] - 1, opening[1])
counter += 1
if not abc.isWall((opening[0] + 1, opening[1])):
if((opening[0] + 1, opening[1]) != (i, n)):
opening2 = (opening[0] + 1, opening[1])
counter += 1
if not abc.isWall((opening[0], opening[1] - 1)):
if((opening[0], opening[1] - 1) != (i, n)):
opening2 = (opening[0], opening[1] - 1)
counter += 1
if not abc.isWall((opening[0], opening[1] + 1)):
if((opening[0], opening[1] + 1) != (i, n)):
opening2 = (opening[0], opening[1] + 1)
counter += 1
if counter == 2:
deadends[opening2] = [opening, (i, n)]
print(deadends)
def minimax(self, state, depth):
bestScore = -float("inf")
bestMove = Directions.STOP
listOfActions = state.getLegalActions(self.index)
if Directions.STOP in listOfActions:
listOfActions.remove(Directions.STOP) # Remove stop from moves
for action in listOfActions:
successorState = state.generateSuccessor(self.index, action)
opponents = self.getOpponents(state)
successorScore = self.minState(successorState, depth, opponents, 0, -1000000, 1000000)
if bestScore <= successorScore:
bestMove = action
bestScore = successorScore
return bestMove
def minState(self, state, depth, ghosts, start, alpha, beta):
lowestScore = 1000000000
score = 0
if state.isWin():
return 10000
if state.isLose():
return -10000
if depth == 0:
return self.evaluate(state, Directions.STOP)
listOfActions = state.getLegalActions(ghosts[start])
if Directions.STOP in listOfActions:
listOfActions.remove(Directions.STOP) # Remove stop from moves
for action in listOfActions:
nextState = state.generateSuccessor(ghosts[start], action)
if start == len(ghosts) - 1:
score = self.maxState(nextState, depth - 1, alpha, beta)
else:
score = self.minState(nextState, depth, ghosts, start + 1, alpha, beta)
if lowestScore > score:
lowestScore = score
if beta > score:
beta = score
if beta < alpha:
break
return lowestScore
def maxState(self, state, depth, alpha, beta):
highestScore = -100000000
if state.isWin():
return 10000
if state.isLose():
return -10000
if depth == 0:
return self.evaluate(state, Directions.STOP)
listOfActions = state.getLegalActions(self.index)
if Directions.STOP in listOfActions:
listOfActions.remove(Directions.STOP)
for action in listOfActions:
successorState = state.generateSuccessor(self.index, action)
opponents = self.getOpponents(state)
successorScore = self.minState(successorState, depth, opponents, 0, alpha, beta)
if highestScore <= successorScore:
highestScore = successorScore
if alpha < successorScore:
alpha = successorScore
if beta < alpha:
break
return highestScore
def getFeatures(self, gameState, action):
global deadends
features = counter.Counter()
successor = self.getSuccessor(gameState, action)
myState = successor.getAgentState(self.index)
myPos = myState.getPosition()
# Computes whether we're on defense (1) or offense (0).
features['onDefense'] = 1
if (myState.isPacman()):
features['onDefense'] = 0
else:
features['isGhost'] = 1
# Computes distance to invaders we can see.
enemies = [successor.getAgentState(i) for i in self.getOpponents(successor)]
invaders = [a for a in enemies if a.isPacman() and a.getPosition() is not None]
if ((int(myPos[0]), int(myPos[1]))) in deadends:
myList = deadends[myPos]
for attacker in invaders:
if attacker.getPosition() in myList and not myState.isScared():
features['trapped'] = 1
print("Trapped")
if invader == 1:
features['numInvaders'] = 1
features['invaderDistance'] = 2
if (len(invaders) > 0):
dists = [self.getMazeDistance(myPos, a.getPosition()) for a in invaders]
features['invaderDistance'] = min(dists)
if myState.isScared():
if min(dists) == 2:
features['scared'] = 1
enemies = self.getOpponents(gameState)
defenders = []
attackers = []
d = 0
for enemy in enemies:
if gameState.getAgentState(enemy).isPacman():
attackers.append(gameState.getAgentState(enemy))
global assumedAttacker
assumedAttacker = enemy
global test
test = 1
else:
defenders.append(gameState.getAgentState(enemy))
d = enemy
# Make defender not wait right on border
if test == 1 and len(attackers) == 0:
attackerState = successor.getAgentState(assumedAttacker)
attPos = attackerState.getPosition()
targetDest = midpointTiles[int(attPos[1])]
features['chaser'] = self.getMazeDistance(targetDest, myPos)
if len(attackers) == 0 and test == 0:
attackerState = successor.getAgentState(d)
attPos = attackerState.getPosition()
targetDest = midpointTiles[int(attPos[1])]
features['chaser'] = self.getMazeDistance(targetDest, myPos)
return features
def getWeights(self, gameState, action):
return {
'numInvaders': -100000,
'onDefense': 100,
'invaderDistance': -10,
'closeFood': -.2,
'xChase': -20,
'yChase': -20,
'chaser': -100,
'isGhost': 100000,
'trapped': 100000,
'scared': 200
}
| [
"EricLee0497@gmail.com"
] | EricLee0497@gmail.com |
1b2603db33a6d30fc510ef9b6fd941c16bf4721d | c4750ec6eeda0092e3a5515d4878cfe42e117e90 | /test/test_inference/test_compiled.py | d863528604a3ebdc39f003c9c320c12eab01a952 | [
"MIT"
] | permissive | phgn0/jedi | 6e5e83778fe699d9735ab52a46ee94dec2a8be99 | eb9af151ea0f447ab9d5d00d14e8fee542bc09d1 | refs/heads/master | 2020-09-02T23:38:36.442447 | 2019-11-10T14:03:49 | 2019-11-10T14:03:49 | 219,332,443 | 1 | 0 | NOASSERTION | 2019-11-03T16:42:27 | 2019-11-03T16:42:26 | null | UTF-8 | Python | false | false | 5,139 | py | from textwrap import dedent
import math
import sys
from collections import Counter
from datetime import datetime
import pytest
from jedi.inference import compiled
from jedi.inference.compiled.access import DirectObjectAccess
from jedi.inference.gradual.conversion import _stub_to_python_value_set
def test_simple(inference_state, environment):
obj = compiled.create_simple_object(inference_state, u'_str_')
upper, = obj.py__getattribute__(u'upper')
objs = list(upper.execute_with_values())
assert len(objs) == 1
if environment.version_info.major == 2:
expected = 'unicode'
else:
expected = 'str'
assert objs[0].name.string_name == expected
def test_builtin_loading(inference_state):
string, = inference_state.builtins_module.py__getattribute__(u'str')
from_name, = string.py__getattribute__(u'__init__')
assert from_name.tree_node
assert not from_name.py__doc__() # It's a stub
def test_next_docstr(inference_state):
next_ = compiled.builtin_from_name(inference_state, u'next')
assert next_.tree_node is not None
assert next_.py__doc__() == '' # It's a stub
for non_stub in _stub_to_python_value_set(next_):
assert non_stub.py__doc__() == next.__doc__
def test_parse_function_doc_illegal_docstr():
docstr = """
test_func(o
doesn't have a closing bracket.
"""
assert ('', '') == compiled.value._parse_function_doc(docstr)
def test_doc(inference_state):
"""
Even CompiledObject docs always return empty docstrings - not None, that's
just a Jedi API definition.
"""
str_ = compiled.create_simple_object(inference_state, u'')
# Equals `''.__getnewargs__`
obj, = str_.py__getattribute__(u'__getnewargs__')
assert obj.py__doc__() == ''
def test_string_literals(Script, environment):
def typ(string):
d = Script("a = %s; a" % string).goto_definitions()[0]
return d.name
assert typ('""') == 'str'
assert typ('r""') == 'str'
if environment.version_info.major > 2:
assert typ('br""') == 'bytes'
assert typ('b""') == 'bytes'
assert typ('u""') == 'str'
else:
assert typ('b""') == 'str'
assert typ('u""') == 'unicode'
def test_method_completion(Script, environment):
code = dedent('''
class Foo:
def bar(self):
pass
foo = Foo()
foo.bar.__func__''')
assert [c.name for c in Script(code).completions()] == ['__func__']
def test_time_docstring(Script):
import time
comp, = Script('import time\ntime.sleep').completions()
assert comp.docstring(raw=True) == time.sleep.__doc__
expected = 'sleep(secs: float) -> None\n\n' + time.sleep.__doc__
assert comp.docstring() == expected
def test_dict_values(Script, environment):
if environment.version_info.major == 2:
# It looks like typeshed for Python 2 returns Any.
pytest.skip()
assert Script('import sys\nsys.modules["alshdb;lasdhf"]').goto_definitions()
def test_getitem_on_none(Script):
script = Script('None[1j]')
assert not script.goto_definitions()
issue, = script._inference_state.analysis
assert issue.name == 'type-error-not-subscriptable'
def _return_int():
return 1
@pytest.mark.parametrize(
'attribute, expected_name, expected_parent', [
('x', 'int', 'builtins'),
('y', 'int', 'builtins'),
('z', 'bool', 'builtins'),
('cos', 'cos', 'math'),
('dec', 'Decimal', 'decimal'),
('dt', 'datetime', 'datetime'),
('ret_int', '_return_int', 'test.test_inference.test_compiled'),
]
)
def test_parent_context(same_process_inference_state, attribute, expected_name, expected_parent):
import decimal
class C:
x = 1
y = int
z = True
cos = math.cos
dec = decimal.Decimal(1)
dt = datetime(2000, 1, 1)
ret_int = _return_int
o = compiled.CompiledObject(
same_process_inference_state,
DirectObjectAccess(same_process_inference_state, C)
)
x, = o.py__getattribute__(attribute)
assert x.py__name__() == expected_name
module_name = x.parent_context.py__name__()
if module_name == '__builtin__':
module_name = 'builtins' # Python 2
assert module_name == expected_parent
assert x.parent_context.parent_context is None
@pytest.mark.skipif(sys.version_info[0] == 2, reason="Ignore Python 2, because EOL")
@pytest.mark.parametrize(
'obj, expected_names', [
('', ['str']),
(str, ['str']),
(''.upper, ['str', 'upper']),
(str.upper, ['str', 'upper']),
(math.cos, ['cos']),
(Counter, ['Counter']),
(Counter(""), ['Counter']),
(Counter.most_common, ['Counter', 'most_common']),
(Counter("").most_common, ['Counter', 'most_common']),
]
)
def test_qualified_names(same_process_inference_state, obj, expected_names):
o = compiled.CompiledObject(
same_process_inference_state,
DirectObjectAccess(same_process_inference_state, obj)
)
assert o.get_qualified_names() == tuple(expected_names)
| [
"davidhalter88@gmail.com"
] | davidhalter88@gmail.com |
d1761605154de9ab4c388192b6d4be3b6ab6c031 | f4b75bdd45185b90a1037bc9e448933653ea086e | /conda_primer_db/primer_db_app/primer_db_site/tests.py | 576dd6a57f15debf93aadff6e94a6ce1ccca5ebb | [] | no_license | Ashley-Pritchard/Primer_Database_App | 9965a3f9deddb85d76053ec01a74a477946bee28 | e94a212a3d316bfd50d60d066cca9e669441ee4a | refs/heads/master | 2022-12-10T03:32:29.017526 | 2020-09-09T12:27:39 | 2020-09-09T12:27:39 | 231,628,505 | 3 | 1 | null | 2022-12-08T04:19:55 | 2020-01-03T16:51:47 | Python | UTF-8 | Python | false | false | 5,369 | py | from django.test import SimpleTestCase, TestCase, Client
from django.urls import reverse, resolve
from primer_db_site.views import *
from primer_db_site.models import *
import json
#unit tests
#url tests
class TestUrls(SimpleTestCase):
def test_search_url_resolves(self):
url = reverse('search')
self.assertEquals(resolve(url).func, search)
def test_primer_url_resolves(self):
url = reverse('primer')
self.assertEquals(resolve(url).func, primer)
def test_amplicon_url_resolves(self):
url = reverse('amplicon')
self.assertEquals(resolve(url).func, amplicon)
def test_order_url_resolves(self):
url = reverse('order')
self.assertEquals(resolve(url).func, order)
def test_order_form_url_resolves(self):
url = reverse('order_form')
self.assertEquals(resolve(url).func, order_form)
def test_submitted_url_resolves(self):
url = reverse('submitted')
self.assertEquals(resolve(url).func, submitted)
def test_ordered_url_resolves(self):
url = reverse('ordered')
self.assertEquals(resolve(url).func, ordered)
def test_order_to_amplicon_url_resolves(self):
url = reverse('order_to_amplicon')
self.assertEquals(resolve(url).func, order_to_amplicon)
def test_submitted_to_amplicon_url_resolves(self):
url = reverse('submitted_to_amplicon')
self.assertEquals(resolve(url).func, submitted_to_amplicon)
def test_reorder_primer_url_resolves(self):
url = reverse('reorder_primer')
self.assertEquals(resolve(url).func, reorder_primer)
def test_archive_primer_url_resolves(self):
url = reverse('archive_primer')
self.assertEquals(resolve(url).func, archive_primer)
def test_submit_order_url_resolves(self):
url = reverse('submit_order')
self.assertEquals(resolve(url).func, submit_order)
def test_order_placed_url_resolves(self):
url = reverse('order_placed')
self.assertEquals(resolve(url).func, order_placed)
def test_order_recieved_url_resolves(self):
url = reverse('order_recieved')
self.assertEquals(resolve(url).func, order_recieved)
def test_in_testing_url_resolves(self):
url = reverse('in_testing')
self.assertEquals(resolve(url).func, in_testing)
def test_tested_url_resolves(self):
url = reverse('tested')
self.assertEquals(resolve(url).func, tested)
def test_failed_url_resolves(self):
url = reverse('failed')
self.assertEquals(resolve(url).func, failed)
def test_remove_failed_url_resolves(self):
url = reverse('remove_failed')
self.assertEquals(resolve(url).func, remove_failed)
#view tests
class TestViews(TestCase):
def setUp(self):
self.client = Client()
self.index_url = reverse('index')
self.order_url = reverse('order')
self.ordered_url = reverse('ordered')
self.order_to_amplicon_url = reverse('order_to_amplicon')
self.reorder_primer_url = reverse('reorder_primer')
self.submit_order_url = reverse('submit_order')
self.order_placed_url = reverse('order_placed')
self.order_recieved_url = reverse('order_recieved')
self.in_testing_url = reverse('in_testing')
self.tested_url = reverse('tested')
self.failed_url = reverse('failed')
self.remove_failed_url = reverse('remove_failed')
def test_index_GET(self):
response = self.client.get(self.index_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'index.html')
def test_order_GET(self):
response = self.client.get(self.order_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'order.html')
def test_ordered_GET(self):
response = self.client.get(self.ordered_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'ordered.html')
def test_order_to_amplicon_GET(self):
response = self.client.get(self.order_to_amplicon_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'order_to_amplicon.html')
def test_reoder_primer_GET(self):
response = self.client.get(self.reorder_primer_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'submitted_reorder_primer.html')
def test_submit_order_GET(self):
response = self.client.get(self.submit_order_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'submit_order.html')
def test_order_placed_GET(self):
response = self.client.get(self.order_placed_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'order_placed.html')
def test_order_recieved_GET(self):
response = self.client.get(self.order_recieved_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'order_recieved.html')
def test_in_testing_GET(self):
response = self.client.get(self.in_testing_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'in_testing.html')
def test_tested_GET(self):
response = self.client.get(self.tested_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'tested.html')
def test_failed_GET(self):
response = self.client.get(self.failed_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'failed.html')
def test_remove_failed_GET(self):
response = self.client.get(self.remove_failed_url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'remove_failed.html')
| [
"ashley.pritchard@ouh.nhs.uk"
] | ashley.pritchard@ouh.nhs.uk |
f7ecb1e8ebd33827ff942b8c272360739540dd95 | 7fb1b5d89d4803c88d6da0adda7c913c9ceae884 | /KaggleLearn/DogVsCat/vgg16_getv.py | e13ef1e1f3fd70f48499813b2249c2f729e13082 | [] | no_license | hzy0110/PythonAI | 4102ba1f8a2ecace2bfa10e2c02534d11980a897 | f1f3d6c6ebd03ef70ea90ac67bfdf3b4289c7e9b | refs/heads/master | 2021-03-12T19:32:40.788382 | 2019-09-30T15:38:37 | 2019-09-30T15:38:37 | 102,879,764 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,665 | py | ########################################################################################
# Davi Frossard, 2016 #
# VGG16 implementation in TensorFlow #
# Details: #
# http://www.cs.toronto.edu/~frossard/post/vgg16/ #
# #
# Model from https://gist.github.com/ksimonyan/211839e770f7b538e2d8#file-readme-md #
# Weights from Caffe converted using https://github.com/ethereon/caffe-tensorflow #
########################################################################################
import tensorflow as tf
import numpy as np
from scipy.misc import imread, imresize
from KaggleLearn.DogVsCat.imagenet_classes import class_names
class vgg16_getv:
def __init__(self, imgs, weights=None, sess=None):
self.imgs = imgs
# self.convlayers()
# self.fc_layers()
f3 = vgg16_getv.convlayers()
self.probs = tf.nn.softmax(f3)
if weights is not None and sess is not None:
vgg16_getv.load_weights_tf(weights, sess)
def convlayers(self):
# zero-mean input
with tf.variable_scope('preprocess') as scope:
mean = tf.constant([123.68, 116.779, 103.939], dtype=tf.float32, shape=[1, 1, 1, 3], name='img_mean')
images = imgs - mean
# conv1_1
with tf.variable_scope('conv1_1') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 3, 64], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(images, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[64], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv1_1 = tf.nn.relu(out, name=scope.name)
# conv1_2
with tf.variable_scope('conv1_2') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 64, 64], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv1_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[64], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv1_2 = tf.nn.relu(out, name=scope.name)
# pool1
pool1 = tf.nn.max_pool(conv1_2,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool1')
# conv2_1
with tf.variable_scope('conv2_1') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 64, 128], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[128], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv2_1 = tf.nn.relu(out, name=scope.name)
# conv2_2
with tf.variable_scope('conv2_2') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 128, 128], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv2_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[128], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv2_2 = tf.nn.relu(out, name=scope.name)
# pool2
pool2 = tf.nn.max_pool(conv2_2,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool2')
# conv3_1
with tf.variable_scope('conv3_1') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 128, 256], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool2, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[256], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv3_1 = tf.nn.relu(out, name=scope.name)
# conv3_2
with tf.variable_scope('conv3_2') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 256, 256], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv3_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[256], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv3_2 = tf.nn.relu(out, name=scope.name)
# conv3_3
with tf.variable_scope('conv3_3') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 256, 256], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv3_2, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[256], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv3_3 = tf.nn.relu(out, name=scope.name)
# pool3
pool3 = tf.nn.max_pool(conv3_3,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool3')
# conv4_1
with tf.variable_scope('conv4_1') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 256, 512], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool3, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[512], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv4_1 = tf.nn.relu(out, name=scope.name)
# conv4_2
with tf.variable_scope('conv4_2') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 512, 512], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv4_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[512], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv4_2 = tf.nn.relu(out, name=scope.name)
# conv4_3
with tf.variable_scope('conv4_3') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 512, 512], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv4_2, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[512], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv4_3 = tf.nn.relu(out, name=scope.name)
# pool4
pool4 = tf.nn.max_pool(conv4_3,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool4')
# conv5_1
with tf.variable_scope('conv5_1') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 512, 512], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool4, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[512], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv5_1 = tf.nn.relu(out, name=scope.name)
# conv5_2
with tf.variable_scope('conv5_2') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 512, 512], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv5_1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[512], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv5_2 = tf.nn.relu(out, name=scope.name)
# conv5_3
with tf.variable_scope('conv5_3') as scope:
kernel = tf.get_variable(initializer=tf.truncated_normal([3, 3, 512, 512], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv5_2, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.get_variable(initializer=tf.constant(0.0, shape=[512], dtype=tf.float32),
trainable=True, name='biases')
out = tf.nn.bias_add(conv, biases)
conv5_3 = tf.nn.relu(out, name=scope.name)
# pool5
pool5 = tf.nn.max_pool(conv5_3,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME',
name='pool5')
with tf.variable_scope('fc6'):
shape = int(np.prod(pool5.get_shape()[1:]))
fc1w = tf.get_variable(initializer=tf.truncated_normal([shape, 4096],
dtype=tf.float32,
stddev=1e-1), name='weights')
fc1b = tf.get_variable(initializer=tf.constant(1.0, shape=[4096], dtype=tf.float32),
trainable=True, name='biases')
pool5_flat = tf.reshape(pool5, [-1, shape])
fc1l = tf.nn.bias_add(tf.matmul(pool5_flat, fc1w), fc1b)
fc1 = tf.nn.relu(fc1l)
with tf.variable_scope('fc7'):
fc2w = tf.get_variable(initializer=tf.truncated_normal([4096, 4096],
dtype=tf.float32,
stddev=1e-1), name='weights')
fc2b = tf.get_variable(initializer=tf.constant(1.0, shape=[4096], dtype=tf.float32),
trainable=True, name='biases')
fc2l = tf.nn.bias_add(tf.matmul(fc1, fc2w), fc2b)
fc2 = tf.nn.relu(fc2l)
with tf.variable_scope('fc8'):
fc3w = tf.get_variable(initializer=tf.truncated_normal([4096, 1000],
dtype=tf.float32,
stddev=1e-1), name='weights')
fc3b = tf.get_variable(initializer=tf.constant(1.0, shape=[1000], dtype=tf.float32),
trainable=True, name='biases')
fc3l = tf.nn.bias_add(tf.matmul(fc2, fc3w), fc3b)
return fc3l
def load_weights_tf(self, weight_file, sess):
weights = np.load(weight_file)
keys = sorted(weights.keys())
print("keys", keys)
for key in keys:
x = str(key[:-2])
with tf.variable_scope(key[:-2], reuse=True):
if key[-1:] is "W":
subkey = "weights"
else:
subkey = "biases"
xxx = weights[key]
print("k,np.shape", key, np.shape(weights[key]))
sess.run(tf.get_variable(subkey).assign(weights[key]))
def load_weights_npy_tf(self, weight_file, sess):
weights = np.load(weight_file, encoding='latin1').item()
keys = sorted(weights.keys())
print("keys", keys)
for i, k in enumerate(keys):
for subkey in (0, 1):
# xxx1 = weights[k][subkey]
# xxx2 = weights[k][subkey]
# print("xxx1.shape", xxx1.shape)
# print("xxx2.shape", xxx2.shape)
# print("i,k,subkey,np.shape", j, k, subkey, np.shape(weights[k][subkey]))
# sess.run(parameters[j].assign(weights[k][subkey]))
sess.run(tf.get_variable(subkey).assign(weights[subkey]))
# sess.run(parameters[j].assign(weights[k][subkey]))
# j += 1
if __name__ == '__main__':
sess = tf.Session()
imgs = tf.placeholder(tf.float32, [None, 224, 224, 3])
vgg = vgg16_getv(imgs, './VGG16/vgg16_weights.npz', sess)
# vgg = vgg16(imgs, './VGG16/vgg16.npy', sess)
img1 = imread('./test_data/tiger.jpeg', mode='RGB')
img1 = imresize(img1, (224, 224))
prob = sess.run(vgg.probs, feed_dict={vgg.imgs: [img1]})[0]
preds = (np.argsort(prob)[::-1])[0:5]
for p in preds:
print(class_names[p], prob[p])
# down load websiet: https://www.cs.toronto.edu/~frossard/
| [
"hzy0110@163.com"
] | hzy0110@163.com |
c4e8dbc6684184e78245deb69b8a5f098817f5d9 | f6f632bee57875e76e1a2aa713fdbe9f25e18d66 | /python/CrackingTheCodingInterview_6/01_08_zero-matrix-lcci.py | 064080aa330c0cdb7f50aa9177f2c29ebc6ce08e | [] | no_license | Wang-Yann/LeetCodeMe | b50ee60beeeb3661869bb948bef4fbe21fc6d904 | 44765a7d89423b7ec2c159f70b1a6f6e446523c2 | refs/heads/master | 2023-08-07T05:31:23.428240 | 2021-09-30T15:33:53 | 2021-09-30T15:33:53 | 253,497,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,138 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : Rock Wayne
# @Created : 2020-07-12 00:02:47
# @Last Modified : 2020-07-12 00:02:47
# @Mail : lostlorder@gmail.com
# @Version : 1.0.0
"""
# 编写一种算法,若M × N矩阵中某个元素为0,则将其所在的行与列清零。
#
#
#
# 示例 1:
#
# 输入:
# [
# [1,1,1],
# [1,0,1],
# [1,1,1]
# ]
# 输出:
# [
# [1,0,1],
# [0,0,0],
# [1,0,1]
# ]
#
#
# 示例 2:
#
# 输入:
# [
# [0,1,2,0],
# [3,4,5,2],
# [1,3,1,5]
# ]
# 输出:
# [
# [0,0,0,0],
# [0,4,5,0],
# [0,3,1,0]
# ]
#
# Related Topics 数组
# 👍 10 👎 0
"""
from typing import List
import pytest
# leetcode submit region begin(Prohibit modification and deletion)
class Solution:
def setZeroes(self, matrix: List[List[int]]) -> None:
"""
Do not return anything, modify matrix in-place instead.
"""
if not matrix:
return
rows = set()
cols = set()
m, n = len(matrix), len(matrix[0])
for i in range(m):
for j in range(n):
if not matrix[i][j]:
rows.add(i)
cols.add(j)
for i in range(m):
for j in range(n):
if i in rows or j in cols:
matrix[i][j] = 0
# leetcode submit region end(Prohibit modification and deletion)
@pytest.mark.parametrize("args,expected", [
(
[
[1, 1, 1],
[1, 0, 1],
[1, 1, 1]
],
[
[1, 0, 1],
[0, 0, 0],
[1, 0, 1]
]),
pytest.param(
[
[0, 1, 2, 0],
[3, 4, 5, 2],
[1, 3, 1, 5]
],
[
[0, 0, 0, 0],
[0, 4, 5, 0],
[0, 3, 1, 0]
]),
])
def test_solutions(args, expected):
Solution().setZeroes(args)
assert args == expected
if __name__ == '__main__':
pytest.main(["-q", "--color=yes", "--capture=tee-sys", __file__])
| [
"wzy-511@163.com"
] | wzy-511@163.com |
ed8c41ec34cf718b87ecb1eb8f5ac9e984af1550 | 00f3468d8917ac0c1b4df8b4dc50e82c0d9be3fa | /hhalign_with_hmms.py | d7ff5f7a284a3efec299ba27c0e89dd5efbc57e9 | [] | no_license | berkeleyphylogenomics/BPG_utilities | 4e332bb401b8c057502a1a0a1d532396bfff9542 | bbf5df137a0a459598c3f9073d80f0086e5f7550 | refs/heads/master | 2021-01-01T19:21:13.740575 | 2014-11-05T18:40:31 | 2014-11-05T18:40:31 | 24,867,074 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,914 | py | #!/usr/bin/python
"""
run_hhalign_with_input_hmms.py Terry Farrah May 2008 Sjolander lab
Align 2 HMMs using HHalign.
Do not let HHalign generate HMMs; instead, provide HMMs.
Optionally, use PSI-PRED to generate secondary structure and
tell HHalign to use this info in its alignment.
Warnings:
Must run in same dir as input hmms (because of SAM pickiness).
Path for PSIPRED data is hard-coded.
SAM hmms tend to be here:
/home/ruchira/SHMM-SHMM/current_method/1a7w/1a7w.mod
/home/ruchira/SHMM-SHMM/current_method/1a7w/ascii_ghmm/1a7w.mod
Those on Ohana are generated using a newer version of FlowerPower.
6/11/08
WORK REMAINING:
-- get psi-pred installed properly on ohana. Does psi-pred make use of
a shell variable to tell it where the psi-pred data is? If so, set it.
Get script/binary installed in /usr/bin.
-- remove hardcoded path to script_dir (should be non-issue after above is done)
"""
import os, sys, glob, BPG_common.fasta
from optparse import OptionParser
from matchmaker.shmm_shmm_lib import *
from matchmaker.align_originals_via_shmms_and_score import *
script_dir = "/home/tfarrah/hmm-hmm"
psipred_data_dir = "/clusterfs/ohana/software/lib/psipred/data"
def add_ss_info_to_hmmer( hmm_filename, psipred_filename,
out_filename):
""" Input: 2) .horiz file generated by psipred suite
(specifically, by psipass2)
1) HMMER format HMM file
Output: HMM file with psipred secondary structure records
added, specifically for input to HHalign
"""
# open secondary structure file
psipred_file = open(psipred_filename, "r")
# read all lines, saving Pred: and Conf: lines in two lists (after cleaving the prefixes)
pred_lines = []
conf_lines = []
for line in psipred_file.readlines():
if line.startswith("Pred:"): pred_lines.append(line[6:].strip())
if line.startswith("Conf:"): conf_lines.append(line[6:].strip())
# get sequence length from this data
pred_seqlen = sum(map (len, pred_lines))
conf_seqlen = sum(map (len, conf_lines))
# if sequence length is zero, or if the two lengths differ, issue warning
if pred_seqlen == 0:
print >> sys.stderr, \
"Error in file %s: no lines beginning with Pred:, or all such lines empty" \
% (psipred_filename)
sys.exit(0)
if pred_seqlen != conf_seqlen:
print >> sys.stderr, \
"Error in file %s: lengths of Pred, Conf strings differ (%d v. %d)" \
% (psipred_filename, pred_seqlen, conf_seqlen)
sys.exit(0)
# close .horiz file
psipred_file.close()
# open hmm file and output file
hmm_file = open(hmm_filename, "r")
if out_filename:
out_file = open(out_filename, "w")
else:
out_file = sys.stdout
# read and copy all lines to output file,
# inserting SS info in appropriate place
for line in hmm_file.readlines():
if line.startswith("XT"):
print >> out_file, "SSCIT", "Secondary structure info generated by PSIPRED"
for ssline in pred_lines: print >> out_file, "SSPRD", ssline
for ssline in conf_lines: print >> out_file, "SSCNF", ssline
# skip any pre-existing SS info lines
if line.startswith("SSPRD") or line.startswith("SSCNF"):
continue
# check to see that HMM length matches length of SS prediction
if line.startswith("LENG"):
hmm_seqlen = int (line[6:])
if hmm_seqlen != pred_seqlen:
print >> sys.stderr, \
"Warning: lengths of SS prediction, HMM differ (%d v. %d)" \
% (hmm_seqlen, pred_seqlen)
print >> out_file, line.rstrip()
# count match states while doing so
# compare match state count with seq length
# and issue warning if different
# close hmm file and output file
hmm_file.close()
out_file.close()
def extract_XY_alignment(seqX, seqY, file):
# extract fasta format alignment from HHalign
# standard output, adding X's and -'s to each end
# (-ofas gives fasta output but leaves off residue #s)
def add_to_seq(hhalign_line, seq):
return seq + hhalign_line
def prefix_X_and_dash(numX_for_Z, numX_for_W, seqZ, seqW):
for i in range(0,numX_for_Z):
seqZ = 'X' + seqZ
seqW = '-' + seqW
for i in range(0,numX_for_W):
seqW = 'X' + seqW
seqZ = '-' + seqZ
return(seqZ, seqW)
def append_X_and_dash(numX_for_Z, numX_for_W, seqZ, seqW):
for i in range(0,numX_for_Z):
seqZ = seqZ + 'X'
seqW = seqW + '-'
for i in range(0,numX_for_W):
seqW = seqW + 'X'
seqZ = seqZ + '-'
return(seqZ, seqW)
# Open file and read sequences
hhalign_file = open(file, "r")
lines = hhalign_file.readlines()
seq_start = False
seqZ = ""
seqW = ""
seqZ_start = None
seqW_start = None
for line in lines:
line = line.strip()
if line.startswith("No 1"):
seq_start = True
continue
if seq_start:
if (line.startswith("Q") or line.startswith("T")) \
and not line[2:].startswith("Consensus") \
and not line[2:].startswith("ss_pred"):
(symbol, seqname, start_res_string, seq, end_res_string,
seqlen_string) = line.split()
start_res = int(start_res_string)
end_res = int(end_res_string)
seqlen = int(seqlen_string[1:-1])
if symbol == "Q":
seqZ = seqZ + seq
if seqZ_start == None: seqZ_start = start_res
seqZ_end = end_res
seqZ_len = seqlen
elif symbol == "T":
seqW = seqW + seq
if seqW_start == None: seqW_start = start_res
seqW_end = end_res
seqW_len = seqlen
(seqZ, seqW) = prefix_X_and_dash(seqZ_start-1, seqW_start-1, seqZ, seqW)
(seqZ, seqW) = append_X_and_dash(seqZ_len-seqZ_end, seqW_len-seqW_end, seqZ, seqW)
# Create X-Y alignment according to W-Z alignment.
alignment = align_four_way(seqX, seqZ, seqW, seqY)
return alignment
def main_test():
file = sys.argv[1]
extract_XY_alignment(None, None, file)
def main():
#====================
# Process command line
#====================
parser = OptionParser()
parser.add_option("--hmm1",
dest="hmm1_filename",
help="first input HMM in SAM (.mod) format. NOTE: all input files must be in the same directory, and the program needs to be run from that directory. ALSO NOTE: the number of match states in the HMMs must equal the sequence length of the corresponding seed",
metavar="FILE")
parser.add_option("--hmm2",
dest="hmm2_filename",
help="second input HMM in SAM (.mod) format",
metavar="FILE")
parser.add_option("--seq1",
dest="seq1_filename",
help="first seed seq in fasta (.fa) format",
metavar="FILE")
parser.add_option("--seq2",
dest="seq2_filename",
help="second seed seq in fasta (.fa) format",
metavar="FILE")
parser.add_option("-o", "--out",
dest="out_filename",
help="alignment output file",
metavar="FILE")
parser.add_option("-s", "--ss",
dest="use_sec_struc",
default=False,
action="store_true",
help="use and display secondary structure information",
metavar="SS",)
parser.add_option("-w", "--ssw",
dest="sec_struc_weight",
metavar="FRACTION",
type="float",
default=0.11,
help="Weight to give secondary structure info; default =0.11")
parser.add_option("-d", "--ppdata",
dest="psipred_data_dir",
metavar="DIR",
default="/clusterfs/ohana/software/lib/psipred/data",
help="Location of weight files for PSI-PRED, default /clusterfs/ohana/software/lib/psipred/data")
(options, args) = parser.parse_args()
# check that necessary options are given, and that values are valid
# assign option values to variables
hmm_filename = [None, None]
seq_filename = [None, None]
if not options.hmm1_filename:
parser.error("Option --hmm1 required")
else:
hmm_filename[0] = options.hmm1_filename
if not options.hmm2_filename:
parser.error("Option --hmm2 required")
else:
hmm_filename[1] = options.hmm2_filename
if not options.seq1_filename:
parser.error("Option --seq1 required")
else:
seq_filename[0] = options.seq1_filename
if not options.seq2_filename:
parser.error("Option --seq2 required")
else:
seq_filename[1] = options.seq2_filename
if options.out_filename:
out_file = open(options.out_filename, "w")
else:
out_file = sys.stdout
use_sec_struc = options.use_sec_struc
sec_struc_weight = options.sec_struc_weight
psipred_data_dir = options.psipred_data_dir
runname = [None, None]
for i in range(0,2):
hmm_file_basename = os.path.basename(hmm_filename[i])
#print hmm_file_basename
runname[i] = os.path.splitext(hmm_file_basename)[0]
#print runname[i]
# Create HMMER format HMM
# creates .con.hmm and .asc.mod files; we only need the first
cmd = "convert.pl %s" % hmm_filename[i]
#print cmd
os.system(cmd)
hmmer_hmm_filename = os.path.splitext(hmm_filename[i])[0] + ".con.hmm"
hhalign_hmm_filename = os.path.splitext(hmm_file_basename)[0] + ".hhm"
if use_sec_struc:
cmd = "sam2psi %s -i %s" % (runname[i], hmm_filename[i])
#print cmd
os.system(cmd)
# we must run makemat on a copy of the .ckp file, because
# it will overwrite the original
cmd = "cp %s.ckp %s.makemat.ckp" % (runname[i], runname[i])
#print cmd
os.system(cmd)
cmd = "echo %s.makemat.ckp > %s.pn" % (runname[i], runname[i])
#print cmd
os.system(cmd)
cmd = "echo %s.cks > %s.sn" % (runname[i], runname[i])
#print cmd
os.system(cmd)
cmd = "makemat -P %s" % (runname[i])
#print cmd
os.system(cmd)
# the name of the makemat output file is stored in a file
makemat_matrix_record_filename = runname[i] + ".mn"
makemat_matrix_record_file = open(makemat_matrix_record_filename, "r")
makemat_matrix_filename = makemat_matrix_record_file.readline().strip()
#print makemat_matrix_filename
cmd = "cp %s %s.mtx" % (makemat_matrix_filename, runname[i])
#print cmd
os.system(cmd)
cmd ="psipred %s.mtx %s/weights.dat %s/weights.dat2 %s/weights.dat3 %s/weights.dat4 > %s.ss" % \
(runname[i], psipred_data_dir, psipred_data_dir, psipred_data_dir,
psipred_data_dir, runname[i])
#print cmd
os.system(cmd)
cmd = "psipass2 %s/weights_p2.dat 1 1.0 1.0 %s.ss2 %s.ss > %s.horiz" % \
(psipred_data_dir, runname[i], runname[i], runname[i])
#print cmd
os.system(cmd)
# we want to make this into a function
# instead of a separate script
#cmd = "%s/add_ss_info_to_hmmer.py -i %s.con.hmm -s %s.horiz -o %s.ss.hmm" % \
#(script_dir, runname[i], runname[i], runname[i])
#print cmd
#os.system(cmd)
add_ss_info_to_hmmer("%s.con.hmm" % runname[i], "%s.horiz" % runname[i],
"%s.ss.hmm" % runname[i])
# convert HMMER format to HHalign (.hhm) format
# optional but increases performance according to HHsearch doc
# however in my experience when HHalign is run with the resulting
# .hhm files instead of the .hmm files, it does not seem to make
# use of the secondary structure information.
#cmd = "hhmake -i %s.ss.hmm -o %s.ss.hhm" % (runname[i], runname[i])
#print cmd
#os.system(cmd)
else:
pass #see above for why
# convert HMMER format to HHalign (.hhm) format
# optional but increases performance according to HHsearch doc
#cmd = "hhmake -i %s.con.hmm -o %s.hhm" % (runname[i], runname[i])
#print cmd
#os.system(cmd)
pair = "%s_%s" % (runname[0], runname[1])
if use_sec_struc:
hhalign_output_base_filename = "%s.ss.hhalign" % (pair)
hhalign_alignment_filename = "%s.align" % (hhalign_output_base_filename)
cmd = "hhalign -i %s.ss.hmm -t %s.ss.hmm -ssw %f -o %s.align -ofas %s.fa > /dev/null" % \
(runname[0], runname[1], sec_struc_weight, hhalign_output_base_filename, hhalign_output_base_filename)
#print cmd
os.system(cmd)
else:
hhalign_output_base_filename = "%s.hhalign" % (pair)
hhalign_alignment_filename = "%s.align" % (hhalign_output_base_filename)
cmd = "hhalign -i %s.con.hmm -t %s.con.hmm -o %s.align -ofas %s.fa > /dev/null" % \
(runname[0], runname[1], hhalign_output_base_filename, hhalign_output_base_filename)
#print cmd
os.system(cmd)
# get X and Y sequences
seqX = BPG_common.fasta.ReadOneSequence(seq_filename[0])
seqY = BPG_common.fasta.ReadOneSequence(seq_filename[1])
# alignment is a 4-tuple of equal-length strings: X,Z,W,Y
alignment = extract_XY_alignment(seqX, seqY, hhalign_alignment_filename)
print >> out_file, ">%s" % runname[0]
print >> out_file, alignment[0]
print >> out_file, ">%s" % runname[1]
print >> out_file, alignment[3]
# to activate the below, need to store alignment in file, and need reference alignment
# CS_score_filename = "%s_CS.out" % pair
# compute_sp_cs_score (alignment_filename, reference_alignment_filename,
#CS_score_filename)
if __name__ == "__main__":
main()
| [
"afrasiabi@berkeley.edu"
] | afrasiabi@berkeley.edu |
0c9f915ad0956041421ba3152c8f1d36b03896a0 | b0a64cf2d36c7da2c81f920cab6f67e8a8e5b2d4 | /models/VGG_mini_BN_PReLU.py | c0390f9195d03450ae354830944220579419c08a | [] | no_license | OminiaVincit/chainer-cifar10 | 69407a114e35b9100af56142092ee9e14577a423 | 449c55f205ea5fd59313598af0f27feb51b18da4 | refs/heads/master | 2021-01-19T06:31:02.379472 | 2015-07-15T20:29:14 | 2015-07-15T20:29:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,699 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from chainer import Variable, FunctionSet
import chainer.functions as F
class VGG_mini_BN_PReLU(FunctionSet):
"""
VGGnet for CIFAR-10
"""
def __init__(self):
super(VGG_mini_BN_PReLU, self).__init__(
conv1_1=F.Convolution2D(3, 64, 3, stride=1, pad=1),
bn1_1=F.BatchNormalization(64, decay=0.9, eps=1e-5),
prelu1_1=F.PReLU(64),
conv1_2=F.Convolution2D(64, 64, 3, stride=1, pad=1),
bn1_2=F.BatchNormalization(64, decay=0.9, eps=1e-5),
prelu1_2=F.PReLU(64),
conv2_1=F.Convolution2D(64, 128, 3, stride=1, pad=1),
bn2_1=F.BatchNormalization(128, decay=0.9, eps=1e-5),
prelu2_1=F.PReLU(128),
conv2_2=F.Convolution2D(128, 128, 3, stride=1, pad=1),
bn2_2=F.BatchNormalization(128, decay=0.9, eps=1e-5),
prelu2_2=F.PReLU(128),
conv3_1=F.Convolution2D(128, 256, 3, stride=1, pad=1),
prelu3_1=F.PReLU(256),
conv3_2=F.Convolution2D(256, 256, 3, stride=1, pad=1),
prelu3_2=F.PReLU(256),
conv3_3=F.Convolution2D(256, 256, 3, stride=1, pad=1),
prelu3_3=F.PReLU(256),
conv3_4=F.Convolution2D(256, 256, 3, stride=1, pad=1),
prelu3_4=F.PReLU(256),
fc4=F.Linear(4096, 1024),
prelu4=F.PReLU(),
fc5=F.Linear(1024, 1024),
prelu5=F.PReLU(),
fc6=F.Linear(1024, 10)
)
def forward(self, x_data, y_data, train=True):
x = Variable(x_data, volatile=not train)
t = Variable(y_data, volatile=not train)
h = self.prelu1_1(self.bn1_1(self.conv1_1(x)))
h = self.prelu1_2(self.bn1_2(self.conv1_2(h)))
h = F.max_pooling_2d(h, 2, stride=2)
h = F.dropout(h, ratio=0.25, train=train)
h = self.prelu2_1(self.bn2_1(self.conv2_1(h)))
h = self.prelu2_2(self.bn2_2(self.conv2_2(h)))
h = F.max_pooling_2d(h, 2, stride=2)
h = F.dropout(h, ratio=0.25, train=train)
h = self.prelu3_1(self.conv3_1(h))
h = self.prelu3_2(self.conv3_2(h))
h = self.prelu3_3(self.conv3_3(h))
h = self.prelu3_4(self.conv3_4(h))
h = F.max_pooling_2d(h, 2, stride=2)
h = F.dropout(h, ratio=0.25, train=train)
h = F.dropout(self.prelu4(self.fc4(h)), train=train, ratio=0.5)
h = F.dropout(self.prelu5(self.fc5(h)), train=train, ratio=0.5)
h = self.fc6(h)
if train:
return F.softmax_cross_entropy(h, t), F.accuracy(h, t)
else:
return F.softmax_cross_entropy(h, t), F.accuracy(h, t), h
| [
"shunta.saito@gmail.com"
] | shunta.saito@gmail.com |
63352d406cc1efddc9d08d0be72f31fc7f1f32b9 | c16ce21d7ee508f1e4330a06fe211210f3bc0fcd | /sumlinkedlist.py | 3b3fa1d9a759581c91b36aeab62ffe7421ac7313 | [] | no_license | lavi09/Linked-Lists | 9e2e171408c5443852d22d08203ef7174e47d2a2 | 3d5ad3dee49553f31d2fbb6521c32927877f37b1 | refs/heads/master | 2020-06-03T03:27:58.037550 | 2019-06-17T04:42:18 | 2019-06-17T04:42:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,398 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jun 11 10:18:28 2019
@author: Lavi
"""
class Node:
def __init__(self,data):
self.data=data
self.next=None
class LinkedList:
def __init__(self):
self.head=None
def push(self,new_data):
new_node=Node(new_data)
new_node.next=self.head
self.head=new_node
def printlist(self):
temp=self.head
while(temp):
print(temp.data)
temp=temp.next
def add(self,a,b,carry=0):
if (not a and not b and not carry ):
return None
a_val=a.data if a else 0
b_val=b.data if b else 0
total=a_val+b_val+carry
a_next=a.next if a else None
b_next=b.next if b else None
carry_next=1 if total >=10 else 0
print (total%10)
return Node(total%10,add(a_next,b_next,carry_next))
firstlist=LinkedList()
firstlist.push(3)
firstlist.push(6)
firstlist.push(5)
print("First List is :")
firstlist.printlist()
secondlist=LinkedList()
secondlist.push(2)
secondlist.push(4)
secondlist.push(8)
print("second list is:")
secondlist.printlist()
print("Sum of the list:")
resultlist=LinkedList()
resultlist.add(firstlist.head,secondlist.head)
resultlist.printlist() | [
"noreply@github.com"
] | noreply@github.com |
b8432a646bf06d2c41da4bf584e5132b94d497b3 | bd9b3bab0573ea0740b9fad6f1964bee7e46cb65 | /cachehandler.py | dd5eae14653be931b318f8e3e12a5b11c0e547ab | [] | no_license | meain/cachehandler | de3140eadbf19271d22ce65eecd40b5c0c8395f2 | e0c392a2ec7bc792460e097801d1b5642e8fa39e | refs/heads/master | 2021-01-09T20:40:38.197453 | 2016-07-07T11:10:47 | 2016-07-07T11:11:11 | 62,623,966 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,389 | py | from unqlite import UnQLite
import datetime
import os
import urllib
try:
import pickle
except:
import cPickle as pickle
class apihandler:
'''
Apihandler:
* Cache data is stored in a NoSQL database
* On incoming api request check for availability in cache for those which can be cached, otherwise jsut make a request and ...
* Image caching will be done using another class, make calls to that when we get image links
* The final image returned from the api will be a address of image from eywa server
'''
def __init__(self):
self.db = UnQLite('cachehandlerdata')
self.api_cache = self.db.collection('apicache')
self.api_cache.create()
def write_cache(self, api_function, args, kwargs, data):
self.api_cache.store({'api_function':str(api_function).split(' ')[2], 'args':str(args), 'kwargs':str(kwargs), 'data':pickle.dumps(data), 'time':pickle.dumps(datetime.datetime.now())})
def read_cache(self, api_function, args, kwargs):
cache_data = self.api_cache.filter(lambda api: api['api_function']==str(api_function).split(' ')[2] and api['args']==str(args) and api['kwargs']==str(kwargs))
if not cache_data or cache_data[-1]['data'] is None:
return None
else :
return pickle.loads(cache_data[-1]['data'])
def api_call(self, api_function, *args, **kwargs):
cache_data = self.read_cache(api_function, args, kwargs)
if cache_data is None:
print ('Querying api as no data found in cache...')
data = api_function(*args, **kwargs)
self.write_cache(api_function, args, kwargs, data)
return data
else :
print ('Getting data from cache...')
data = cache_data
return data
class imagehandler:
'''
imagehandler:
* cache data is stored in a nosql database
'''
def __init__(self):
self.db = UnQLite('cachehandlerdata')
self.api_cache = self.db.collection('imagecache')
self.api_cache.create()
def write_cache(self,image_link):
# base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
name = self.api_cache.__len__() + 1
extension = image_link.split('.')[-1]
filename = str(name) + '.' + str(extension)
image_location = filename
urllib.urlretrieve(image_link, image_location)
self.api_cache.store({'image_link':image_link,'location':image_location, 'time':datetime.datetime.now()})
return image_location
def read_cache(self, image_link):
cache_data = self.api_cache.filter(lambda api: api['image_link']==image_link)
if not cache_data:
return cache_data
else :
return cache_data[-1]['location']
def get_image(self, image_link):
cache_data = self.read_cache(image_link)
if not cache_data :
print ('Downloading image...')
location = self.write_cache(image_link)
return location
else :
print ('Getting image from cache...')
location = cache_data
if os.path.isfile(location):
return location
else :
print ('Cached but file deleted...')
location = self.write_cache(image_link)
return location
| [
"abinsimon10@gmail.com"
] | abinsimon10@gmail.com |
1a0cdef8bd9da9edcf9f2a4dd5a6df09997594fa | 7d23cbeaf8fae2dca34180d8fa084b14c28cd1fc | /src/test/common/browser.py | 187cfe84bc5d7f43e67989726a84ddecb5fe1a76 | [] | no_license | Cnnnnnnn/test-framework | e29dfb34474f8e8fe52a2007334cc1668d046077 | 73f6e8fb88b8dc38bb53d3a88b7ae2faa9aa3521 | refs/heads/master | 2020-04-03T09:52:25.326538 | 2018-10-30T05:27:40 | 2018-10-30T05:27:40 | 155,178,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,633 | py | import time
import os
from selenium import webdriver
from src.utils.config import DRIVER_PATH, REPORT_PATH
# 可根据需要自行扩展
CHROMEDRIVER_PATH = DRIVER_PATH + '\chromedriver.exe'
#FIREFOXDRIVER_PATH = DRIVER_PATH + '\geckodriver.exe'
TYPES = {'firefox': webdriver.Firefox, 'chrome': webdriver.Chrome}
EXECUTABLE_PATH = {'firefox': 'wires', 'chrome': CHROMEDRIVER_PATH}
class UnSupportBrowserTypeError(Exception):
pass
class Browser(object):
def __init__(self, browser_type='firefox'):
self._type = browser_type.lower()
if self._type in TYPES:
self.browser = TYPES[self._type]
else:
raise UnSupportBrowserTypeError('仅支持%s!' % ', '.join(TYPES.keys()))
self.driver = None
def get(self, url, maximize_window=True, implicitly_wait=10):
self.driver = self.browser(executable_path=EXECUTABLE_PATH[self._type])
self.driver.get(url)
if maximize_window:
self.driver.maximize_window()
self.driver.implicitly_wait(implicitly_wait)
return self
def save_screen_shot(self, name='screen_shot'):
day = time.strftime('%Y%m%d', time.localtime(time.time()))
screenshot_path = REPORT_PATH + '\screenshot_%s' % day
if not os.path.exists(screenshot_path):
os.makedirs(screenshot_path)
tm = time.strftime('%H%M%S', time.localtime(time.time()))
screenshot = self.driver.save_screenshot(screenshot_path + '\\%s_%s.png' % (name, tm))
return screenshot
def close(self):
self.driver.close()
def quit(self):
self.driver.quit() | [
"cnn@thinkerx.com"
] | cnn@thinkerx.com |
45896edf844d2e31964bee92787f3b953bb9ff0b | 82b4ec4abbdcdf6a790fc91365e1863517a28e1c | /build/week4_moveit_config/catkin_generated/pkg.installspace.context.pc.py | b4b8a74ab0cc056862c8e4a053b9251357fedf23 | [] | no_license | SiChiTong/ROS_factory_picknplace | 5fb21b703852465c1f87e7994803344db54f13f7 | 1365d465889a9960fce0b7a6872ba403d7fd2626 | refs/heads/master | 2022-04-17T10:36:57.887468 | 2020-04-12T03:41:35 | 2020-04-12T03:41:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "week4_moveit_config"
PROJECT_SPACE_DIR = "/home/lxu9/hrwros_ws/install"
PROJECT_VERSION = "0.3.0"
| [
"pring5@ford.com"
] | pring5@ford.com |
31dfa1864febcd5467c041ab7587d731a7dcf566 | 18ba42a5238a03c0ad651f30cff4fca324b8f0cf | /prediction_bias.py | 6e9aa60698beb6f81921183ded43c2d6af80bb85 | [] | no_license | nassimtaleb/feature_somellier | 381de7185490090197441cfc10c26824c187d62c | 67c90713a8e5d2c08b18c106e1c854b1b26fce99 | refs/heads/master | 2022-12-13T18:38:24.673292 | 2020-08-24T18:09:14 | 2020-08-24T18:09:14 | 289,996,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,464 | py | #prediction_bias.py
from .statistic_functions import *
import math
import scipy as sc
from scipy import stats
import pandas as pd
def object_columns(df):
return filter(lambda x: df[x].dtype == object , df.columns )
def continuous_columns(df):
return filter(lambda x: df[x].dtype in (int,float), df.columns )
def test_catgorical_fisher_asociation(df,significant_pval=0.8):
def get_pval_for_single_value(val,categorical_col):
tab_ocur=pd.crosstab(df["pred_state"],df[categorical_col]==val)
if tab_ocur.shape != (2,2):
return 1
return stats.fisher_exact(tab_ocur,alternative="greater")[1]
return map(lambda col: filter(lambda test_result: test_result[0]<significant_pval, map( lambda value: (get_pval_for_single_value(value,col),col,value), df[col].unique())),object_columns(df))
def multicategorical_significance(df,significant_correlation=0.1):
return filter(lambda y :y[0]>=significant_correlation , map(lambda x: (cramers_corrected_stat(df[x],df["pred_state"]),x),object_columns(df)))
def forward_multicategorical_significance(df,significant_correlation=0.1):
return filter(lambda y :y[0]>=significant_correlation , map(lambda x: (theils_u(df[x],df["pred_state"]),x),object_columns(df)))
def backward_multicategorical_significance(df,significant_correlation=0.1):
return filter(lambda y :y[0]>=significant_correlation , map(lambda x: (theils_u(df["pred_state"],df[x]),x),object_columns(df)))
def continuous_significance(df, significant_pval=0.01):
df=df.copy()
df=df.replace([np.inf, -np.inf], np.nan)
df=df.dropna(subset=list(continuous_columns(df)))
return filter(lambda y :y[0]<=significant_pval, map(lambda x: (stats.pointbiserialr(df["pred_state"],df[x])[1],x),continuous_columns(df)))
class stat_summarizer:
def __init__(self,evaluator , legend):
self.evaluator=evaluator
self.legend=legend
def __call__(self,df,**kwargs):
print("On evaluating {}".format(self.evaluator.__name__))
self.legend(self.evaluator(df,**kwargs))
pass
def make_per_value_summary(filter_result):
for col_result in filter_result:
[print("column {0} was found asociated to the target at value {2} with a significance of {1} ".format(result[1],result[0],result[2])) for result in col_result]
pass
def make_column_summary(filter_result):
[print("column {0} was found asociated to the target with a significance of {1} ".format(result[1],result[0])) for result in filter_result]
pass
def test_grouped_catgorical_fisher_asociation(df,groupby_col="question_id",significant_pval=0.8):
def get_pval_for_single_value(val,categorical_col,groupby_col=groupby_col,df=df):
df=df.copy()
df[categorical_col]= df[categorical_col]==val
df=df[["pred_state",categorical_col]].groupby(groupby_col).max(axis=1)
tab_ocur=pd.crosstab(df["pred_state"],df[categorical_col])
if tab_ocur.shape != (2,2):
return 1
return stats.fisher_exact(tab_ocur,alternative="greater")[1]
return map(lambda col: filter(lambda test_result: test_result[0]<significant_pval, map( lambda value: (get_pval_for_single_value(value,col),col,value), df[col].unique())),object_columns(df))
| [
"felibivortha@ar0fvfch1rbl415.ml.com"
] | felibivortha@ar0fvfch1rbl415.ml.com |
3e59bc00c410829dd3189f765d7800bbf2858f7b | a626636f537685b580662cdaba59df545443707f | /apps/brand/models.py | 0937c60f0fc229bd19309bd7212fbf3d4dab1a50 | [] | no_license | friacode/rambutan | a26e9575cba94d2d37416a268935f6a3ad196083 | cb00f850293e3ad5e1a997f4e6db409643dbeb8f | refs/heads/main | 2023-04-28T13:31:20.566681 | 2021-05-10T12:47:55 | 2021-05-10T12:47:55 | 355,410,011 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 532 | py | from django.db import models
# Create your models here.
class Brand(models.Model):
name = models.CharField('상품 브랜드', max_length=100, null=False)
create_at = models.DateTimeField('Create Date', auto_now=False, auto_now_add=True)
updated = models.DateTimeField('Update Date', auto_now=True, auto_now_add=False)
class Meta:
verbose_name = 'brand'
verbose_name_plural = 'brands'
db_table = 'rt_brand'
ordering = ('-updated',)
def __str__(self):
return self.name
| [
"friacode@gmail.com"
] | friacode@gmail.com |
aee8c7382e9ef929fa8a4ad1ab7d892c274853c4 | aef3c65af32b823178b14849fd041a3b8c5af4bf | /logisticR_example.py | d36f024f7f04ec3c276720316d42fcfcdc1848e9 | [
"MIT"
] | permissive | endsley/sklearn_example | dd57fcc4df84fcd19aa34be7adb4f3b3bb5d7abe | cc16219c6ca1314106f8eb991479329b4fac532c | refs/heads/master | 2021-06-16T10:12:10.244605 | 2021-05-11T17:32:55 | 2021-05-11T17:32:55 | 176,625,922 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | #!/usr/bin/env python
import numpy as np
from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
X = np.vstack((np.random.randn(4,2), np.random.randn(4,2)+5))
Y = np.hstack((np.zeros(4) , np.ones(4)))
clf = LogisticRegression(random_state=0).fit(X, Y)
clf.predict(X)
print(clf.score(X, Y))
| [
"juliusctw@gmail.com"
] | juliusctw@gmail.com |
5e1a2a2c6ab177e2393c7ea72fd3730a6fa52ec4 | 83c585b9cb7318e5dfd2c13a1da25e1f32c41d2b | /SKIN_TEST/madgwickahrs.py | 56b9caa28e4dfa55345ac73336e6271a33a5f96e | [] | no_license | hehonglu123/IMU_3D_Tracking | d0f24252846406602311ae2325d033613392da33 | 1cae1d57413e810db9a73d170e00955365df051a | refs/heads/master | 2022-12-13T18:42:22.997290 | 2019-05-09T11:50:19 | 2019-05-09T11:50:19 | 181,344,661 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,041 | py | # -*- coding: utf-8 -*-
"""
Copyright (c) 2015 Jonas Böer, jonas.boeer@student.kit.edu
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import warnings
import numpy as np
from numpy.linalg import norm
from quaternion import Quaternion
class MadgwickAHRS:
samplePeriod = 1/256
quaternion = Quaternion(1, 0, 0, 0)
beta = 1
def __init__(self, sampleperiod=None, quaternion=None, beta=None):
"""
Initialize the class with the given parameters.
:param sampleperiod: The sample period
:param quaternion: Initial quaternion
:param beta: Algorithm gain beta
:return:
"""
if sampleperiod is not None:
self.samplePeriod = sampleperiod
if quaternion is not None:
self.quaternion = quaternion
if beta is not None:
self.beta = beta
def update(self, gyroscope, accelerometer, magnetometer):
"""
Perform one update step with data from a AHRS sensor array
:param gyroscope: A three-element array containing the gyroscope data in radians per second.
:param accelerometer: A three-element array containing the accelerometer data. Can be any unit since a normalized value is used.
:param magnetometer: A three-element array containing the magnetometer data. Can be any unit since a normalized value is used.
:return:
"""
q = self.quaternion
gyroscope = np.array(gyroscope, dtype=float).flatten()
accelerometer = np.array(accelerometer, dtype=float).flatten()
magnetometer = np.array(magnetometer, dtype=float).flatten()
# Normalise accelerometer measurement
if norm(accelerometer) is 0:
warnings.warn("accelerometer is zero")
return
accelerometer /= norm(accelerometer)
# Normalise magnetometer measurement
if norm(magnetometer) is 0:
warnings.warn("magnetometer is zero")
return
magnetometer /= norm(magnetometer)
h = q * (Quaternion(0, magnetometer[0], magnetometer[1], magnetometer[2]) * q.conj())
b = np.array([0, norm(h[1:3]), 0, h[3]])
# Gradient descent algorithm corrective step
f = np.array([
2*(q[1]*q[3] - q[0]*q[2]) - accelerometer[0],
2*(q[0]*q[1] + q[2]*q[3]) - accelerometer[1],
2*(0.5 - q[1]**2 - q[2]**2) - accelerometer[2],
2*b[1]*(0.5 - q[2]**2 - q[3]**2) + 2*b[3]*(q[1]*q[3] - q[0]*q[2]) - magnetometer[0],
2*b[1]*(q[1]*q[2] - q[0]*q[3]) + 2*b[3]*(q[0]*q[1] + q[2]*q[3]) - magnetometer[1],
2*b[1]*(q[0]*q[2] + q[1]*q[3]) + 2*b[3]*(0.5 - q[1]**2 - q[2]**2) - magnetometer[2]
])
j = np.array([
[-2*q[2], 2*q[3], -2*q[0], 2*q[1]],
[2*q[1], 2*q[0], 2*q[3], 2*q[2]],
[0, -4*q[1], -4*q[2], 0],
[-2*b[3]*q[2], 2*b[3]*q[3], -4*b[1]*q[2]-2*b[3]*q[0], -4*b[1]*q[3]+2*b[3]*q[1]],
[-2*b[1]*q[3]+2*b[3]*q[1], 2*b[1]*q[2]+2*b[3]*q[0], 2*b[1]*q[1]+2*b[3]*q[3], -2*b[1]*q[0]+2*b[3]*q[2]],
[2*b[1]*q[2], 2*b[1]*q[3]-4*b[3]*q[1], 2*b[1]*q[0]-4*b[3]*q[2], 2*b[1]*q[1]]
])
step = j.T.dot(f)
step /= norm(step) # normalise step magnitude
# Compute rate of change of quaternion
qdot = (q * Quaternion(0, gyroscope[0], gyroscope[1], gyroscope[2])) * 0.5 - self.beta * step.T
# Integrate to yield quaternion
q += qdot * self.samplePeriod
self.quaternion = Quaternion(q / norm(q)) # normalise quaternion
def update_imu(self, gyroscope, accelerometer):
"""
Perform one update step with data from a IMU sensor array
:param gyroscope: A three-element array containing the gyroscope data in radians per second.
:param accelerometer: A three-element array containing the accelerometer data. Can be any unit since a normalized value is used.
"""
q = self.quaternion
gyroscope = np.array(gyroscope, dtype=float).flatten()
accelerometer = np.array(accelerometer, dtype=float).flatten()
# Normalise accelerometer measurement
if norm(accelerometer) is 0:
warnings.warn("accelerometer is zero")
return
accelerometer /= norm(accelerometer)
# Gradient descent algorithm corrective step
f = np.array([
2*(q[1]*q[3] - q[0]*q[2]) - accelerometer[0],
2*(q[0]*q[1] + q[2]*q[3]) - accelerometer[1],
2*(0.5 - q[1]**2 - q[2]**2) - accelerometer[2]
])
j = np.array([
[-2*q[2], 2*q[3], -2*q[0], 2*q[1]],
[2*q[1], 2*q[0], 2*q[3], 2*q[2]],
[0, -4*q[1], -4*q[2], 0]
])
step = j.T.dot(f)
step /= norm(step) # normalise step magnitude
# Compute rate of change of quaternion
qdot = (q * Quaternion(0, gyroscope[0], gyroscope[1], gyroscope[2])) * 0.5 - self.beta * step.T
# Integrate to yield quaternion
q += qdot * self.samplePeriod
self.quaternion = Quaternion(q / norm(q)) # normalise quaternion
def set_rate(self, sampleRate=1/256):
self.samplePeriod = sampleRate | [
"ppng2@illinois.edu"
] | ppng2@illinois.edu |
2bff614c1249f3b2c78aa00f910b568a2f77cbb8 | 3ff503a22bfbe8dde11d5fc49cdafc38dab75117 | /lib/log_config.py | cb3621e6d42b63954a38c2cb31fe27fc9b545fcc | [] | no_license | CharnyshMM/python_tracker | 893958892aa3278bf55cdb335461e766b377ae0b | 619b460724974ecdb83132de2da2075a68188fbb | refs/heads/master | 2020-06-29T22:08:27.523135 | 2018-06-18T10:19:18 | 2018-06-18T10:19:18 | 200,637,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | import logging
import os
DEFAULT_DIR = os.path.join(os.environ['HOME'], 'py_tracker')
DEFAULT_LOGFILE = 'py_tracker_lib.log'
DEFAULT_LEVEL = logging.DEBUG
DEFAULT_FORMAT = '%(asctime)s - %(name)s : %(levelname)s : %(funcName)s : %(module)s : %(message)s'
LOGGING_ENABLED = True | [
"mr.nick.cher@gmail.com"
] | mr.nick.cher@gmail.com |
8b339010a4f7f37b45ff078b967f4f02cbb8b8d1 | 37138f86b2704a87c3840cf3a7e6fa0e169ebc81 | /diccionary_scrapper.py | 8a953317a70600da881a9a843775dbd20b9681d2 | [] | no_license | vmoreno014/Python-practice | 1f989a6f751e643dd180e8c9e35f3d7b530c356c | 91f4781525c9935f1dadbe32f61f5950d7f83055 | refs/heads/main | 2023-03-22T15:15:06.347758 | 2021-03-13T22:17:26 | 2021-03-13T22:17:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | import urllib.request, urllib.parse, urllib.error
fhand = urllib.request.urlopen("http://data.pr4e.org/romeo.txt")
counts = dict()
for line in fhand:
words = line.decode().split()
for word in words:
counts[word] = counts.get(word, 0) + 1
print(counts)
| [
"noreply@github.com"
] | noreply@github.com |
ee28104231e39d74f9252de0420dfa501e38557a | 6efacaed48c9c2015b20baae5b1e7812cf2614a0 | /Po/test/Abackground_mouse_one.py | 533335d6d971031ab7fe5f3398b20fcedabe8681 | [] | no_license | Felixshao/play | 53e12b7b592634a3e5515addde978e1b2a2a4591 | 4364cb91141bbbca835688d19bddb87aa0beb6b4 | refs/heads/master | 2021-05-23T19:49:56.095083 | 2020-04-07T06:09:10 | 2020-04-07T06:09:10 | 253,441,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,318 | py | import pygame, os
from pygame.locals import *
from sys import exit
from config.GetProjectPath import get_project_path
path = get_project_path()
background_img_filepath = os.path.join(path, 'img', 'sushiplate.jpg')
mouse_img_filepath = os.path.join(path, 'img', 'fugu.png')
# 初始化pygame,未硬件做准备
pygame.init()
# 新建窗口, 传入参数:分辨率、标志(0代表不用特性)、色深
screen = pygame.display.set_mode((1920, 1080), 0, 32)
# 设置窗口标题
pygame.display.set_caption('Abcakground_mouse_one')
# 加载并转换图像, convert()方法,将图像数据转化为Surface对象,convert_alpha()处理掉透明部分
background = pygame.image.load(background_img_filepath).convert()
mouse = pygame.image.load(mouse_img_filepath).convert_alpha()
# 游戏主循环
while True:
for event in pygame.event.get():
# 接收到退出指令后退出游戏
if event.type == QUIT:
exit()
# 画上背景, bit方法,传参:Surface对象,左上角坐标
screen.blit(background, (0, 0))
# 获取鼠标位置
x, y = pygame.mouse.get_pos()
# 计算光标左上角位置
x -= mouse.get_width() / 2
y -= mouse.get_height() / 2
# 画上光标
screen.blit(mouse, (x, y))
# 刷新画面
pygame.display.update() | [
"shaoyufei1234@163.com"
] | shaoyufei1234@163.com |
cce0a2deb48c56ec7c0fa8038dd01ce051024d95 | d3a9701c70c296f7a6e1abda0defe87e0a697c65 | /app.py | 2f623da99cc625e0fcccb2b7ba2e7fb6aba72ac5 | [] | no_license | MikeKerkhove/BonjourLaBiere | ed0c1025add84ddc5ea479bd3cc88576954a713d | 8a3d7a23df192cea8aad5cd7104d2d39c92e96e7 | refs/heads/master | 2022-11-07T08:58:23.659869 | 2020-06-24T15:29:49 | 2020-06-24T15:29:49 | 274,091,364 | 0 | 0 | null | 2020-06-24T15:29:52 | 2020-06-22T09:12:00 | Python | UTF-8 | Python | false | false | 4,122 | py | from flask import Flask, render_template, request, redirect, flash, make_response
from flask_sqlalchemy import SQLAlchemy
from datetime import date
from werkzeug.security import generate_password_hash, check_password_hash
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///database/beerdb.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.secret_key = b'_5#y2L"F4Q8z\n\xec]/'
admin_pass = 'pbkdf2:sha256:150000$QgbX2ojM$0956b0a6ce9f7165f1741ea30f7416b7f6f9056250109b03d4d3b3e51e097ce1'
db = SQLAlchemy(app)
### Init DB ###
class Pictures(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
active = db.Column(db.Boolean, nullable=False, default=False)
date = db.Column(db.Date, nullable=True, default=None)
link = db.Column(db.Text, nullable=False)
by = db.Column(db.String(255), nullable=False)
def __repr__(self):
return '<Pictures "{}">'.format(self.name)
### pages url ###
@app.route('/', defaults={'page_num': 1})
@app.route('/page/<int:page_num>')
def home(page_num):
pics = Pictures.query.filter_by(active=1).order_by(Pictures.date.desc()).paginate(max_per_page=1, page=page_num)
return render_template('pages/home.html', pics=pics)
@app.route('/new')
def new():
return render_template('pages/newpics.html')
@app.route('/about')
def about():
return render_template('pages/about.html')
@app.route('/admin')
def admin():
if request.authorization and request.authorization.username == 'Admin' and check_password_hash(admin_pass, request.authorization.password):
flash(u'Vous êtes bien connecté en tant qu\'administrateur', 'success')
pics = Pictures.query.all()
return render_template('pages/admin.html', pics=pics)
return make_response('Erreur dans les identifiants/MDP.', 401, {'WWW-Authenticate' : 'Basic realm="Login requis"'})
@app.errorhandler(404)
def page_not_found(error):
flash(u'La page que vous demandez n\'existe pas.', 'danger')
return redirect("/", code=302)
### CRUD ###
@app.route('/post/create', methods=['post'])
def create():
if request.form['name'] and request.form['link'] and request.form['by']:
picName = request.form['name']
picLink = request.form['link']
picBy = request.form['by']
newPic = Pictures(name=picName,link=picLink,by=picBy)
db.session.add(newPic)
db.session.commit()
flash(u'Votre proposition a bien été transmise.', 'success')
return redirect("/", code=302)
else:
flash(u'Vous n\'avez pas rempli le formulaire correctement.', 'danger')
return redirect("/", code=302)
@app.route('/post/update')
def update():
if request.form['name'] and request.form['link'] and request.form['by'] and request.form['date' and request.form['valid'] and request.form['active']]:
picName = request.form['name']
picLink = request.form['link']
picBy = request.form['by']
picDate = request.form['date']
picValid = request.form['valid']
picActive = request.form['active']
updatePic = Pictures(name=picName,link=picLink,by=picBy,date=picDate,valid=picValid,active=picActive)
db.session.add(updatePic)
db.session.commit()
flash(u'La photo a bien été mise à jour.', 'success')
return redirect("/", code=302)
else:
flash(u'Il y a eu une erreur lors de la mise a jour de la photo.', 'danger')
return redirect("/", code=302)
@app.route('/post/delete/<int:id>')
def delete(id):
picToDelete = Pictures.query.get(id)
db.session.delete(picToDelete)
db.session.commit()
flash(u'La photo a bien été supprimé.', 'success')
return redirect("/", code=302)
@app.route('/get/all')
def getAll():
allPics = Pictures.query.all()
print(allPics)
return redirect("/", code=302)
@app.route('/get/<int:id>')
def get(id):
picture = Pictures.query.get(id)
print(picture)
return redirect("/", code=302)
if __name__ == '__main__':
db.create_all()
app.run(debug=True) | [
"m.kerkhove92@gmail.com"
] | m.kerkhove92@gmail.com |
8cfa750d806ed9fc7f34c288b6df085114ffb7d0 | 4ab0b96b9dd131bb3ccffc3800699b94cd0f77fb | /Assignment-9.py | 3eea5f7fa6604df8729add5f814b13915ca7dcef | [] | no_license | pankajdeep/Assignment-9 | 9f53212d1ec1984642ae2a1a456b856d7c27598d | f11a93dba4031df8952a771598cccb70400cb067 | refs/heads/master | 2020-03-27T09:12:36.950460 | 2018-08-27T16:12:02 | 2018-08-27T16:12:02 | 146,322,367 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,674 | py | #Q.1- Name and handle the exception occured in the following program:
#a=3
#if a<4:
# a=a/(a-3)
# print(a)
a=3
try:
if(a<4):
a=a/(a-3)
print(a)
except ZeroDivisionError as msg:
print("The name of the exception is ZeroDivisionError and the message is:",msg)
#Q.2- Name and handle the exception occurred in the following program:
#l=[1,2,3]
#print(l[3])
l=[1,2,3]
try:
print(l[3])
except:
print("The name of the Exception is IndexError and the message is list index out of range")
#Q.3- What will be the output of the following code:
# Program to depict Raising Exception
#try:
# raise NameError("Hi there") # Raise Error
#except NameError:
# print("An exception")
# raise # To determine whether the exception was raised or not
Output is:
An exception
NameError:Hi there
#Q.4- What will be the output of the following code:
# Function which returns a/b
#def AbyB(a , b):
# try:
# c = ((a+b) / (a-b))
# except ZeroDivisionError:
# print("a/b result in 0")
# else:
# print(c)
#Driver program to test above function
# AbyB(2.0, 3.0)
# AbyB(3.0, 3.0)
Output is:
case 1 output: -5
case 2 output: a/b result in 0
#Q.5- Write a program to show and handle following exceptions:
#1. Import Error
#2. Value Error
#3. Index Error
1.Import Error
try:
import maths
except ImportError:
print("Import error Occured")
2.Value Error
try:
inp=int(input("Enter a string to throw value error exception"))
except ValueError:
print("Cant enter string to an integer variable")
3.Index Error
try:
arr=['a','b',3,4]
print(arr[4])
except IndexError:
print("Index out of bound")
| [
"pankajdeepsahota@gmail.com"
] | pankajdeepsahota@gmail.com |
1570ed0180207a321a0286bc17723f5f4ad582ad | 2fc6981c6ea90d8310b47be0b0cb5c41674ded63 | /test.py | 68fa407d2b0fba077166352521f020e96640b14f | [] | no_license | Akanksha461/Appium | 2a473f01a0407ee00921159a2720e5547706969b | 858aab9de66e672ab2bd978c6f2b9c9a5e383f8e | refs/heads/master | 2021-05-09T13:31:07.654707 | 2018-01-26T12:21:32 | 2018-01-26T12:21:32 | 119,037,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,104 | py | import os
from appium import webdriver
import time
web_contexts=[]
desired_caps={}
desired_caps['platformName']='Android'
#this is the android version you want to target- this should match the android version in the device
desired_caps['platformVersion']='6.0.1'
desired_caps['deviceName']='samsung'
desired_caps['app']=os.path.join('path/of/your/apk')
driver = webdriver.Remote('http://localhost:4723/wd/hub',desired_caps)
#now locate the elements using chrome inspect
#switches context to webview since Appium cannot work with webview
print(driver.current_context)
web_contexts = driver.contexts
# it would print NATIVE_APP, WEBVIEW_com.app.destimoney
webview = web_contexts[1]
driver.switch_to.context('WEBVIEW_com.app.destimoney')
print(driver.current_context)
#just a test to see an element is displayed
print(driver.find_element_by_css_selector('.title').is_displayed())
#clicks on skip button on splash screen once
driver.find_element_by_xpath('//span[contains(text(),"Skip")]').click()
#clicks on Next button on splash screen once
# driver.find_element_by_xpath('//span[contains(text(),"Next")]').click()
#
# time.sleep(5)
#
# driver.find_element_by_xpath('//span[contains(text(),"Next")]').click()
#
# time.sleep(1)
#
# driver.find_element_by_xpath('//span[contains(text(),"Next")]').click()
# time.sleep(1)
driver.find_element_by_id("login_btn").click()
# signup
# driver.find_element_by_xpath('//*[@class="text-input text-input-md"]').send_keys("akanksha")
#
# time.sleep(1)
#
# driver.find_element_by_xpath('(//*[@class="text-input text-input-md"])[2]').send_keys("ak")
# time.sleep(1)
#
# driver.find_element_by_xpath('(//*[@class="text-input text-input-md"])[3]').send_keys("verma")
# time.sleep(1)
#
# driver.find_element_by_xpath('(//*[@class="text-input text-input-md"])[4]').send_keys("id")
# time.sleep(1)
#
# driver.find_element_by_xpath('(//*[@class="text-input text-input-md"])[5]').send_keys("123455677")
# time.sleep(1)
#
# driver.find_element_by_xpath('(//*[@class="text-input text-input-md"])[6]').send_keys("111111")
# time.sleep(1)
# driver.find_element_by_class_name("item-cover disable-hover item-cover-md item-cover-default item-cover-default-mdcheckbox-icon").click()
# time.sleep(3)
# driver.find_element_by_id("undefined").click()
#
# driver.find_element_by_xpath('//span[contains(text(),"Sign Up")]').click()
#
# time.sleep(10)
# for otp
# driver.find_element_by_class_name("pinbox input input-md ng-pristine ng-valid ng-touched").send_keys("4")
# # driver.find_element_by_xpath('(//*[@class="item item-block item-md item-input item-input-disabled ng-untouched ng-pristine ng-valid"])[1]').send_keys("111111")
# driver.find_element_by_id("login_btn").click()
time.sleep(5)
# google sign in
driver.find_element_by_xpath('//span[contains(text(),"Google")]').click()
time.sleep(10)
# driver.find_elements_by_css_selector('[ng-reflect-name="menu"]').click()
driver.find_elements_by_css_selector("ion-icon.icon.icon-md.ion-md-menu").click()
time.sleep(10)
# driver.find_elements_by_css_selector('div[class="button-effect"]').click()
# driver.quit()
| [
"noreply@github.com"
] | noreply@github.com |
36000e3e071a83a02bce91911373d936ae50680c | c6c5db5310c61f7bd03691e48b89fb1f39d8533c | /src/rf/prediction.evolution.rf.bert.time.py | c756ba924a280d9531c7375a9562c0e1114c99a3 | [] | no_license | twoexponents/reddit | 1f5aab42aefd71e134c0f3489e54f906569a1097 | 9da7572424ff6f35f45f6c1d81921f05e279a1aa | refs/heads/master | 2020-05-05T01:48:02.928714 | 2020-01-17T04:58:20 | 2020-01-17T04:58:20 | 179,615,217 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 813 | py | import tensorflow as tf
import numpy as np
import sys
from myrflib import runRFModel
from myloaddatalib import makeLearnTestSet
def main(argv):
exclude_newbie = 0; input_length = 1
if len(sys.argv) >= 3:
exclude_newbie = int(sys.argv[2])
if len(sys.argv) >= 2:
input_length = int(sys.argv[1])
print ('exclude_newbie: %d'%(exclude_newbie))
for seq_length in range(3, 4):
f = open('/home/jhlim/data/seq.learn.%d.csv'%(seq_length), 'r')
learn_instances = list(map(lambda x:x.replace('\n', '').split(','), f.readlines()))
f.close()
learn_X, learn_Y, test_X, test_Y = makeLearnTestSet(seq_length, bert=1, time=1, exclude_newbie=exclude_newbie, rf=1)
runRFModel(seq_length, learn_X, learn_Y, test_X, test_Y)
if __name__ == '__main__':
tf.app.run(main=main, argv=[sys.argv])
| [
"twoexponents@gmail.com"
] | twoexponents@gmail.com |
765478bbc01b00211d961da6d0bd4bdab237f828 | 208baab269ddffab1a93e7dc70b052d07bf50560 | /hood/migrations/0002_auto_20200120_1140.py | a6e6fe56c70f1ee382edb53a3eebe174b83a3671 | [] | no_license | marysinaida/Neighborhood | a1035f09515ae9a24bed74ddf1263e06db134c94 | a285df5528bb99d6cb69f9ab41e320682422fe9d | refs/heads/master | 2020-12-13T23:29:18.148498 | 2020-01-21T15:04:53 | 2020-01-21T15:04:53 | 234,562,242 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,691 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2020-01-20 08:40
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hood', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Business',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bName', models.CharField(max_length=100)),
('bEmail', models.EmailField(max_length=100)),
],
options={
'ordering': ['bName'],
},
),
migrations.CreateModel(
name='Neighborhood',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('location', models.CharField(max_length=50)),
('occupants', models.PositiveIntegerField()),
('health_contact', models.PositiveIntegerField()),
('police_contact', models.PositiveIntegerField()),
('hood_pic', models.ImageField(blank=True, upload_to='images/')),
('admin', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('content', models.TextField()),
('image', models.ImageField(blank=True, upload_to='posts/')),
('date_posted', models.DateTimeField(auto_now_add=True)),
('hood', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hood.Neighborhood')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-date_posted'],
},
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bio', models.TextField(blank=True)),
('email', models.EmailField(blank=True, max_length=100)),
('name', models.CharField(blank=True, max_length=50)),
('profile_pic', models.ImageField(blank=True, upload_to='images/')),
('hood', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='hood.Neighborhood')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterModelOptions(
name='editor',
options={'ordering': ['first_name']},
),
migrations.AddField(
model_name='business',
name='hood',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hood.Neighborhood'),
),
migrations.AddField(
model_name='business',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| [
"marydorcassinaida54@gmail.com"
] | marydorcassinaida54@gmail.com |
795a6ff4fb0dad2d56d19872bbe9ead37d8f7856 | a10b295054c882972a24456ac3de13788d236872 | /server/server.py | 7e5b01eab5ab22fd324fc5aa79e3975626545b2e | [] | no_license | mayur1064/DSHousingProject | 0333be33f7723c3f2f6e196a71222f75b290868a | 683f8f076f532a116c95a2e6c8f9947fd0fa4140 | refs/heads/master | 2023-01-12T16:07:35.893410 | 2020-11-20T11:24:55 | 2020-11-20T11:24:55 | 311,639,949 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,664 | py | from flask import Flask, request, jsonify
import util
import dutil
app = Flask(__name__)
@app.route('/get_location_names', methods=['GET'])
def get_location_names():
response = jsonify({
'locations': util.get_location_names()
})
response.headers.add('Access-Control-Allow-Origin', '*')
return response
@app.route('/get_location_names_delhi', methods=['GET'])
def get_location_names_delhi():
response = jsonify({
'locations': dutil.get_location_names()
})
response.headers.add('Access-Control-Allow-Origin', '*')
return response
@app.route('/predict_home_price', methods=['GET', 'POST'])
def predict_home_price():
total_sqft = float(request.form['total_sqft'])
location = request.form['location']
bhk = int(request.form['bhk'])
bath = int(request.form['bath'])
response = jsonify({
'estimated_price': util.get_estimated_price(location,total_sqft,bhk,bath)
})
response.headers.add('Access-Control-Allow-Origin', '*')
return response
@app.route('/predict_home_price_delhi', methods=['GET', 'POST'])
def predict_home_price_delhi():
total_sqft = float(request.form['total_sqft'])
location = request.form['location']
bhk = int(request.form['bhk'])
bath = int(request.form['bath'])
response = jsonify({
'estimated_price': dutil.get_estimated_price(location,total_sqft,bhk,bath)
})
response.headers.add('Access-Control-Allow-Origin', '*')
return response
if __name__ == "__main__":
print("Starting Python Flask Server For Home Price Prediction...")
util.load_saved_artifacts()
dutil.load_saved_artifacts()
app.run() | [
"mayurtra@gmail.com"
] | mayurtra@gmail.com |
39a93553eb9938131e1ee184e2e28fc42ce5fa64 | ad0fd89e22230aff5f73503decea1d20ba37f106 | /prosb.py | 49bf4f45ee6fda00e01b8eb17343a6d5c77fed70 | [] | no_license | arifin0/Bot5 | 7874de48798206e337a8d8388944e36dea585d37 | 5d939928bf040ab429109a5e53907942f3de11a9 | refs/heads/master | 2020-04-10T12:24:28.766289 | 2018-12-09T08:59:04 | 2018-12-09T08:59:04 | 161,021,595 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152,433 | py | # -*- coding: utf-8 -*-
import linepy
from linepy import *
from akad.ttypes import *
from multiprocessing import Pool, Process
from time import sleep
import pytz, datetime, pafy, time, timeit, random, sys, ast, re, os, json, subprocess, threading, string, codecs, requests, tweepy, ctypes, urllib, wikipedia
from datetime import timedelta, date
from datetime import datetime
from bs4 import BeautifulSoup
from googletrans import Translator
import youtube_dl
#cl = LineClient()
cl = LineClient(authToken='Ez8dZeTUdMrwr2cGZAL5.mJlyH3NyhxOTPSgXiheu1q.YK+BZLN97A5up3vDOMDq2zzOs85AGE/5yNmbv0NgzlE=')
cl.log("Auth Token : " + str(cl.authToken))
channel = LineChannel(cl)
cl.log("Channel Access Token : " + str(channel.channelAccessToken))
#ki = LineClient()
ki = LineClient(authToken='Eznst4vd19e9k1HYoADf.BttbzOQwXc7Ncjw1Sn/VJW.JXRQtK6XBvRwP6QwyP2O7FfoALGiEASHNMCJoxn1IA4=')
ki.log("Auth Token : " + str(ki.authToken))
channel1 = LineChannel(ki)
ki.log("Channel Access Token : " + str(channel1.channelAccessToken))
#kk = LineClient()
kk = LineClient(authToken='EzAS2appcZv9kkFJus0d.GnkMXyBqiyqMfoxEnrxZpq.JvA5VFlxyPrD8n34MkVk+VaClnEtVJKYhwCFRUth970=')
kk.log("Auth Token : " + str(kk.authToken))
channel2 = LineChannel(kk)
kk.log("Channel Access Token : " + str(channel2.channelAccessToken))
#kc = LineClient()
kc = LineClient(authToken='Ez2xzac2y9m92ojdXBa3.Bq5ClrpnIru9E3wY4mSLmW.fJxI8oHZH+I5bFgo7L+Jc2UqlZ/3lVa7egYCMHwSSII=')
kc.log("Auth Token : " + str(kc.authToken))
channel3 = LineChannel(kc)
kc.log("Channel Access Token : " + str(channel3.channelAccessToken))
poll = LinePoll(cl)
call = cl
creator = ["u3232d88779d81d7a2da6c1b1207114d5"]
owner = ["u3232d88779d81d7a2da6c1b1207114d5"]
admin = ["u3232d88779d81d7a2da6c1b1207114d5"]
staff = ["u3232d88779d81d7a2da6c1b1207114d5"]
myMid = cl.profile.mid
mid = cl.getProfile().mid
Amid = ki.getProfile().mid
Bmid = kk.getProfile().mid
Cmid = kc.getProfile().mid
KAC = [cl,ki,kk,kc]
ABC = [ki,kk,kc]
Bots = [mid,Amid,Bmid,Cmid]
Team = admin + staff
protectqr = []
protectkick = []
protectjoin = []
protectinvite = []
protectcancel = []
welcome = []
responsename1 = ki.getProfile().displayName
responsename2 = kk.getProfile().displayName
responsename3 = kc.getProfile().displayName
settings = {
"Picture":False,
"group":{},
"groupPicture":False,
"changePicture":False,
"autoJoinTicket":False,
"userAgent": [
"Mozilla/5.0 (X11; U; Linux i586; de; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (X11; U; Linux amd64; rv:5.0) Gecko/20100101 Firefox/5.0 (Debian)",
"Mozilla/5.0 (X11; U; Linux amd64; en-US; rv:5.0) Gecko/20110619 Firefox/5.0",
"Mozilla/5.0 (X11; Linux) Gecko Firefox/5.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:5.0) Gecko/20100101 Firefox/5.0 FirePHP/0.5",
"Mozilla/5.0 (X11; Linux x86_64; rv:5.0) Gecko/20100101 Firefox/5.0 Firefox/5.0",
"Mozilla/5.0 (X11; Linux x86_64) Gecko Firefox/5.0",
"Mozilla/5.0 (X11; Linux ppc; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (X11; Linux AMD64) Gecko Firefox/5.0",
"Mozilla/5.0 (X11; FreeBSD amd64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.2; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:5.0) Gecko/20110619 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.1; rv:6.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.1.1; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.2; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.1; U; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.1; rv:2.0.1) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.0; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.0; rv:5.0) Gecko/20100101 Firefox/5.0"
]
}
wait = {
"limit": 1,
"owner":{},
"admin":{},
"addadmin":False,
"delladmin":False,
"staff":{},
"addstaff":False,
"dellstaff":False,
"bots":{},
"addbots":False,
"dellbots":False,
"blacklist":{},
"wblacklist":False,
"dblacklist":False,
"Talkblacklist":{},
"Talkwblacklist":False,
"Talkdblacklist":False,
"talkban":True,
"contact":False,
'autoJoin':True,
'autoAdd':True,
'autoLeave':False,
'autoLeave1':False,
"detectMention":True,
"Mentionkick":False,
"welcomeOn":False,
"sticker":False,
"selfbot":True,
"mention":"NGINTIPPP!!!",
"Respontag":"SEKANG TAG BESOK JATUH CINTA",
"welcome":"Selamat datang & betah",
"comment":"Like like & like by ARIFISTIFIK",
"message":"Terimakasih sudah add saya 😃",
}
read = {
"readPoint":{},
"readMember":{},
"readTime":{},
"ROM":{},
}
cctv = {
"cyduk":{},
"point":{},
"sidermem":{}
}
Setbot = codecs.open("setting.json","r","utf-8")
Setmain = json.load(Setbot)
mulai = time.time()
tz = pytz.timezone("Asia/Jakarta")
timeNow = datetime.now(tz=tz)
def restart_program():
python = sys.executable
os.execl(python, python, * sys.argv)
def restartBot():
python = sys.executable
os.execl(python, python, *sys.argv)
def waktu(secs):
mins, secs = divmod(secs,60)
hours, mins = divmod(mins,60)
days, hours = divmod(hours, 24)
return '%02d Hari %02d Jam %02d Menit %02d Detik' % (days, hours, mins, secs)
def runtime(secs):
mins, secs = divmod(secs,60)
hours, mins = divmod(mins,60)
days, hours = divmod(hours, 24)
return '%02d Hari %02d Jam %02d Menit %02d Detik' % (days, hours, mins, secs)
def mentionMembers(to, mid):
try:
arrData = ""
textx = "[{}]MENTION MEMBER\n ".format(str(len(mid)))
arr = []
no = 1
num = 2
for i in mid:
mention = "@x\n"
slen = str(len(textx))
elen = str(len(textx) + len(mention) - 1)
arrData = {'S':slen, 'E':elen, 'M':i}
arr.append(arrData)
textx += mention
if no < len(mid):
no += 1
textx += "%i. " % (num)
num=(num+1)
else:
try:
no = "\n╰──[ {} ]".format(str(cl.getGroup(to).name))
except:
no = "\n╰──[ Success ]"
cl.sendMessage(to, textx, {'MENTION': str('{"MENTIONEES":' + json.dumps(arr) + '}')}, 0)
except Exception as error:
cl.sendMessage(to, "Ada yang tidak beres...!!\n" + str(error))
def siderMembers(to, mid):
try:
arrData = ""
textx = "Total Sider User[{}]\nHaii ".format(str(len(mid)))
arr = []
no = 1
num = 2
for i in mid:
mention = "@x\n"
slen = str(len(textx))
elen = str(len(textx) + len(mention) - 1)
arrData = {'S':slen, 'E':elen, 'M':i}
arr.append(arrData)
textx += mention+wait["mention"]
if no < len(mid):
no += 1
textx += "%i. " % (num)
num=(num+1)
else:
try:
no = "\n╰──[ {} ]".format(str(cl.getGroup(to).name))
except:
no = "\n╰──[ Success ]"
cl.sendMessage(to, textx, {'MENTION': str('{"MENTIONEES":' + json.dumps(arr) + '}')}, 0)
except Exception as error:
cl.sendMessage(to, "Ada yang tidak beres...!!\n" + str(error))
def welcomeMembers(to, mid):
try:
arrData = ""
textx = "Total Member Masuk[{}]\nHaii ".format(str(len(mid)))
arr = []
no = 1
num = 2
for i in mid:
ginfo = cl.getGroup(to)
mention = "@x\n"
slen = str(len(textx))
elen = str(len(textx) + len(mention) - 1)
arrData = {'S':slen, 'E':elen, 'M':i}
arr.append(arrData)
textx += mention+wait["welcome"]+"\nNama grup : "+str(ginfo.name)
if no < len(mid):
no += 1
textx += "%i " % (num)
num=(num+1)
else:
try:
no = "\n╰──[ {} ]".format(str(cl.getGroup(to).name))
except:
no = "\n╰──[ Success ]"
cl.sendMessage(to, textx, {'MENTION': str('{"MENTIONEES":' + json.dumps(arr) + '}')}, 0)
except Exception as error:
cl.sendMessage(to, "Ada yang tidak beres...!!\n" + str(error))
def sendMention(to, mid, firstmessage):
try:
arrData = ""
text = "%s " %(str(firstmessage))
arr = []
mention = "@x \n"
slen = str(len(text))
elen = str(len(text) + len(mention) - 1)
arrData = {'S':slen, 'E':elen, 'M':mid}
arr.append(arrData)
today = datetime.today()
future = datetime(2018,12,12)
hari = (str(future - today))
comma = hari.find(",")
hari = hari[:comma]
teman = cl.getAllContactIdsx()
gid = cl.getGroupIdsJoined()
tz = pytz.timezone("Asia/Jakarta")
timeNow = datetime.now(tz=tz)
eltime = time.time() - mulai
bot = runtime(eltime)
text += mention+"✴ Jam : "+datetime.strftime(timeNow,'%H:%M:%S')+" Wib\n✴ Group : "+str(len(gid))+"\n✴ Teman : "+str(len(teman))+"\n✴ Expired : In "+hari+"\n✴ Version : ARIFISTIFIK\n✴ Tanggal : "+datetime.strftime(timeNow,'%Y-%m-%d')+"\n✴ Runtime : \n • "+bot
cl.sendMessage(to, text, {'MENTION': str('{"MENTIONEES":' + json.dumps(arr) + '}')}, 0)
except Exception as error:
cl.sendMessage(to, "Ada yang tidak beres...!!\n" + str(error))
def mentionMembers(to, mids=[]):
if myMid in mids: mids.remove(myMid)
parsed_len = len(mids)//20+1
result = '╭───[ Mention Members ]\n'
mention = '@arifistifi\n'
no = 0
for point in range(parsed_len):
mentionees = []
for mid in mids[point*20:(point+1)*20]:
no += 1
result += '│ %i. %s' % (no, mention)
slen = len(result) - 12
elen = len(result) + 3
mentionees.append({'S': str(slen), 'E': str(elen - 4), 'M': mid})
if mid == mids[-1]:
result += '╰───[ Mention Members]\n'
if result:
if result.endswith('\n'): result = result[:-1]
cl.sendMessage(to, result, {'MENTION': json.dumps({'MENTIONEES': mentionees})}, 0)
result = ''
def command(text):
pesan = text.lower()
if pesan.startswith(Setmain["keyCommand"]):
cmd = pesan.replace(Setmain["keyCommand"],"")
else:
cmd = "command"
return cmd
def help():
key = Setmain["keyCommand"]
key = key.title()
helpMessage = "━━━━┅═❉ইई❉═┅━━━━\n" + \
"╭━━━━━━━━━━━━━━━━\n" + \
"║╭❉ HELP MESSAGE\n" + \
"║┝───────────────" + "\n" + \
"║│ " + key + "help2\n" + \
"║│ " + key + "help3\n" + \
"║┝─[❇SELF COMMAND❇]" + "\n" + \
"║│ " + key + "Me\n" + \
"║│ " + key + "Mid「@」\n" + \
"║│ " + key + "Info「@」\n" + \
"║│ " + key + "Nk「@」\n" + \
"║│ " + key + "Kick1「@」\n" + \
"║│ " + key + "Mybot\n" + \
"║│ " + key + "Status\n" + \
"║│ " + key + "About\n" + \
"║│ " + key + "Restart\n" + \
"║│ " + key + "Runtime\n" + \
"║│ " + key + "Creator\n" + \
"║│ " + key + "Speed/Sp\n" + \
"║│ " + key + "Sprespon\n" + \
"║┝───────────────\n" + \
"║┝─[❇GRUP COMMAND❇]" + "\n" + \
"║│ " + key + "Tagall\n" + \
"║│ " + key + "Joinall\n" + \
"║│ " + key + "Byeall\n" + \
"║│ " + key + "Byeme\n" + \
"║│ " + key + "Leave「Namagrup」\n" + \
"║│ " + key + "Ginfo\n" + \
"║│ " + key + "Open\n" + \
"║│ " + key + "Close\n" + \
"║│ " + key + "Url grup\n" + \
"║│ " + key + "Gruplist\n" + \
"║│ " + key + "Infogrup「angka」\n" + \
"║│ " + key + "Infomem「angka」\n" + \
"║│ " + key + "Remove chat\n" + \
"║│ " + key + "Lurking「on/off」\n" + \
"║│ " + key + "Lurkers\n" + \
"║│ " + key + "Sider「on/off」\n" + \
"║│ " + key + "Updatefoto\n" + \
"║│ " + key + "Updategrup\n" + \
"║│ " + key + "Updatebot\n" + \
"║│ " + key + "Broadcast:「Text」\n" + \
"║│ " + key + "Setkey「New Key」\n" + \
"║│ " + key + "Mykey\n" + \
"║│ " + key + "Resetkey\n" + \
"║┝───────────────\n" + \
"║┝─[❇ HIBURAN ❇]──" + "\n" + \
"║│ " + key + "Musik:「Judul Musik」\n" + \
"║│ " + key + "Ytmp4:「Judul Video」\n" + \
"║│ " + key + "Spamtag:「angka」\n" + \
"║│ " + key + "Spamtag「@」\n" + \
"║│ " + key + "Spamcall:「jml」\n" + \
"║│ " + key + "Spamcall\n" + \
"║╰❉──────────────\n" + \
"╰━━━━━━━━━━━━━━━━\n" + \
"━━━━┅═❉ইई❉═┅━━━━"
return helpMessage
def helpbot():
key = Setmain["keyCommand"]
key = key.title()
helpMessage1 = "━━━━┅═❉ইई❉═┅━━━━\n" + \
"╭━━━━━━━━━━━━━━━━\n" + \
"║╭❉ HELP2 ❇\n" + \
"║┝───────────────" + "\n" + \
"║│ " + key + "Blc\n" + \
"║│ " + key + "Blc\n" + \
"║│ " + key + "Ban:on\n" + \
"║│ " + key + "Unban:on\n" + \
"║│ " + key + "Ban「@」\n" + \
"║│ " + key + "Unban「@」\n" + \
"║│ " + key + "Talkban「@」\n" + \
"║│ " + key + "Untalkban「@」\n" + \
"║│ " + key + "Talkban:on\n" + \
"║│ " + key + "Untalkban:on\n" + \
"║│ " + key + "Banlist\n" + \
"║│ " + key + "Talkbanlist\n" + \
"║│ " + key + "Clearban\n" + \
"║│ " + key + "Refresh\n" + \
"║┝───────────────\n" + \
"║│ " + key + "Cek sider\n" + \
"║│ " + key + "Cek spam\n" + \
"║│ " + key + "Cek pesan \n" + \
"║│ " + key + "Cek respon \n" + \
"║│ " + key + "Cek welcome\n" + \
"║│ " + key + "Set sider:「Text」\n" + \
"║│ " + key + "Set spam:「Text」\n" + \
"║│ " + key + "Set pesan:「Text」\n" + \
"║│ " + key + "Set respon:「Text」\n" + \
"║│ " + key + "Set welcome:「Text」\n" + \
"║│ " + key + "Myname:「Text」\n" + \
"║│ " + key + "Bot1name:「Text」\n" + \
"║│ " + key + "Bot2name:「Text」\n" + \
"║│ " + key + "Bot3name:「Text」\n" + \
"║│ " + key + "Bot1up\n" + \
"║│ " + key + "Bot2up\n" + \
"║│ " + key + "Bot3up\n" + \
"║│ " + key + "Gift:「Mid 」「jml」\n" + \
"║│ " + key + "Spam:「Mid 」「jml」\n" + \
"║╰❉SELFBOT PROTECT\n" + \
"╰━━━━━━━━━━━━━━━━\n" + \
"━━━━┅═❉ইई❉═┅━━━━"
return helpMessage1
def helpadmin():
key = Setmain["keyCommand"]
key = key.title()
helpMessage2 = "━━━━┅═❉ইई❉═┅━━━━\n" + \
"╭━━━━━━━━━━━━━━━━\n" + \
"║╭❉ HELP3 ❇\n" + \
"║┝───────────────" + "\n" + \
"║┝─[❇PROTECTION❇]" + "\n" + \
"║│ " + key + "Notag「on/off」\n" + \
"║│ " + key + "Allpro「on/off」\n" + \
"║│ " + key + "Protecturl「on/off」\n" + \
"║│ " + key + "Protectjoin「on/off」\n" + \
"║│ " + key + "Protectkick「on/off」\n" + \
"║│ " + key + "Protectcancel「on/off」\n" + \
"║│ " + key + "Sticker「on/off」\n" + \
"║│ " + key + "Respon「on/off」\n" + \
"║│ " + key + "Contact「on/off」\n" + \
"║│ " + key + "Autojoin「on/off」\n" + \
"║│ " + key + "Autoadd「on/off」\n" + \
"║│ " + key + "Welcome「on/off」\n" + \
"║│ " + key + "Autoleave「on/off」\n" + \
"║┝───────────────\n" + \
"║┝─[❇ADMIN COMMAND❇]" + "\n" + \
"║│ " + key + "Admin:on\n" + \
"║│ " + key + "Admin:repeat\n" + \
"║│ " + key + "Staff:on\n" + \
"║│ " + key + "Staff:repeat\n" + \
"║│ " + key + "Bot:on\n" + \
"║│ " + key + "Bot:repeat\n" + \
"║│ " + key + "Adminadd「@」\n" + \
"║│ " + key + "Admindell「@」\n" + \
"║│ " + key + "Staffadd「@」\n" + \
"║│ " + key + "Staffdell「@」\n" + \
"║│ " + key + "Botadd「@」\n" + \
"║│ " + key + "Botdell「@」\n" + \
"║│ " + key + "Refresh\n" + \
"║│ " + key + "Listbot\n" + \
"║│ " + key + "Listadmin\n" + \
"║│ " + key + "Listprotect\n" + \
"║╰❉SELFBOT BY DPK\n" + \
"╰━━━━━━━━━━━━━━━━\n" + \
"━━━━┅═❉ইई❉═┅━━━━"
return helpMessage2
def bot(op):
global time
global ast
global groupParam
try:
if op.type == 0:
return
if op.type == 11:
if op.param1 in protectqr:
try:
if cl.getGroup(op.param1).preventedJoinByTicket == False:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
cl.reissueGroupTicket(op.param1)
X = cl.getGroup(op.param1)
X.preventedJoinByTicket = True
cl.updateGroup(X)
cl.sendMessage(op.param1, None, contentMetadata={'mid': op.param2}, contentType=13)
except:
try:
if ki.getGroup(op.param1).preventedJoinByTicket == False:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
ki.reissueGroupTicket(op.param1)
X = ki.getGroup(op.param1)
X.preventedJoinByTicket = True
ki.updateGroup(X)
cl.sendMessage(op.param1, None, contentMetadata={'mid': op.param2}, contentType=13)
except:
try:
if kk.getGroup(op.param1).preventedJoinByTicket == False:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
kk.reissueGroupTicket(op.param1)
X = kk.getGroup(op.param1)
X.preventedJoinByTicket = True
kk.updateGroup(X)
cl.sendMessage(op.param1, None, contentMetadata={'mid': op.param2}, contentType=13)
except:
try:
if kc.getGroup(op.param1).preventedJoinByTicket == False:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
kc.reissueGroupTicket(op.param1)
X = kc.getGroup(op.param1)
X.preventedJoinByTicket = True
kc.updateGroup(X)
cl.sendMessage(op.param1, None, contentMetadata={'mid': op.param2}, contentType=13)
except:
try:
if cl.getGroup(op.param1).preventedJoinByTicket == False:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
cl.reissueGroupTicket(op.param1)
X = cl.getGroup(op.param1)
X.preventedJoinByTicket = True
cl.updateGroup(X)
cl.sendMessage(op.param1, None, contentMetadata={'mid': op.param2}, contentType=13)
except:
try:
if ki.getGroup(op.param1).preventedJoinByTicket == False:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
ki.reissueGroupTicket(op.param1)
X = ki.getGroup(op.param1)
X.preventedJoinByTicket = True
ki.updateGroup(X)
cl.sendMessage(op.param1, None, contentMetadata={'mid': op.param2}, contentType=13)
except:
pass
if op.type == 13:
if mid in op.param3:
if wait["autoLeave"] == True:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
cl.acceptGroupInvitation(op.param1)
ginfo = cl.getGroup(op.param1)
cl.sendMessage(op.param1,"Selamat Tinggal\n Group " +str(ginfo.name))
cl.leaveGroup(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
ginfo = cl.getGroup(op.param1)
cl.sendMessage(op.param1,"Hai " + str(ginfo.name))
if op.type == 13:
if mid in op.param3:
if wait["autoJoin"] == True:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
cl.acceptGroupInvitation(op.param1)
ginfo = cl.getGroup(op.param1)
cl.sendMessage(op.param1,"Haii " +str(ginfo.name))
else:
cl.acceptGroupInvitation(op.param1)
ginfo = cl.getGroup(op.param1)
cl.sendMessage(op.param1,"Haii " + str(ginfo.name))
if Amid in op.param3:
if wait["autoJoin"] == True:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
ki.acceptGroupInvitation(op.param1)
ginfo = ki.getGroup(op.param1)
ki.sendMessage(op.param1,"Selamat Tinggal\n Group " +str(ginfo.name))
ki.leaveGroup(op.param1)
else:
ki.acceptGroupInvitation(op.param1)
ginfo = ki.getGroup(op.param1)
ki.sendMessage(op.param1,"Hai " + str(ginfo.name))
if Bmid in op.param3:
if wait["autoJoin"] == True:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
kk.acceptGroupInvitation(op.param1)
ginfo = kk.getGroup(op.param1)
ki.sendMessage(op.param1,"Selamat Tinggal\n Group " +str(ginfo.name))
kk.leaveGroup(op.param1)
else:
kk.acceptGroupInvitation(op.param1)
ginfo = kk.getGroup(op.param1)
kk.sendMessage(op.param1,"Hai " + str(ginfo.name))
if Cmid in op.param3:
if wait["autoJoin"] == True:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
kc.acceptGroupInvitation(op.param1)
ginfo = kc.getGroup(op.param1)
kc.sendMessage(op.param1,"Selamat Tinggal\n Group " +str(ginfo.name))
kc.leaveGroup(op.param1)
else:
kc.acceptGroupInvitation(op.param1)
ginfo = kc.getGroup(op.param1)
kc.sendMessage(op.param1,"Hai " + str(ginfo.name))
if op.type == 13:
if op.param1 in protectinvite:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
try:
group = cl.getGroup(op.param1)
gMembMids = [contact.mid for contact in group.invitee]
for _mid in gMembMids:
cl.cancelGroupInvitation(op.param1,[_mid])
except:
try:
group = ki.getGroup(op.param1)
gMembMids = [contact.mid for contact in group.invitee]
for _mid in gMembMids:
ki.cancelGroupInvitation(op.param1,[_mid])
except:
try:
group = kk.getGroup(op.param1)
gMembMids = [contact.mid for contact in group.invitee]
for _mid in gMembMids:
kk.cancelGroupInvitation(op.param1,[_mid])
except:
try:
group = kc.getGroup(op.param1)
gMembMids = [contact.mid for contact in group.invitee]
for _mid in gMembMids:
kc.cancelGroupInvitation(op.param1,[_mid])
except:
pass
if op.type == 17:
if op.param2 in wait["blacklist"]:
random.choice(ABC).kickoutFromGroup(op.param1,[op.param2])
else:
pass
if op.type == 17:
if op.param1 in welcome:
if op.param2 in Bots:
pass
ginfo = cl.getGroup(op.param1)
contact = cl.getContact(op.param2).picturePath
image = 'http://dl.profile.line.naver.jp'+contact
welcomeMembers(op.param1, [op.param2])
cl.sendImageWithURL(op.param1, image)
if op.type == 17:
if op.param1 in protectjoin:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
wait["blacklist"][op.param2] = True
try:
if op.param3 not in wait["blacklist"]:
kc.kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param3 not in wait["blacklist"]:
ki.kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param3 not in wait["blacklist"]:
kk.kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param3 not in wait["blacklist"]:
cl.kickoutFromGroup(op.param1,[op.param2])
except:
pass
return
if op.type == 0:
return
if op.type == 5:
if wait["autoAdd"] == True:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
if (wait["message"] in [" "," ","\n",None]):
pass
else:
cl.sendText(op.param1, wait["message"])
if op.type == 19:
if op.param1 in protectkick:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
wait["blacklist"][op.param2] = True
random.choice(ABC).kickoutFromGroup(op.param1,[op.param2])
else:
pass
if op.type == 32:
if op.param1 in protectcancel:
if op.param2 not in Bots and op.param2 not in owner and op.param2 not in admin and op.param2 not in staff:
wait["blacklist"][op.param2] = True
try:
if op.param3 not in wait["blacklist"]:
ki.kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param3 not in wait["blacklist"]:
kk.kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param3 not in wait["blacklist"]:
kc.kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param3 not in wait["blacklist"]:
ki.kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param3 not in wait["blacklist"]:
kk.kickoutFromGroup(op.param1,[op.param2])
except:
try:
if op.param3 not in wait["blacklist"]:
cl.kickoutFromGroup(op.param1,[op.param2])
except:
pass
return
if op.type == 19:
if mid in op.param3:
if op.param2 in Bots:
pass
if op.param2 in owner:
pass
if op.param2 in admin:
pass
if op.param2 in staff:
pass
else:
wait["blacklist"][op.param2] = True
try:
ki.kickoutFromGroup(op.param1,[op.param2])
ki.inviteIntoGroup(op.param1,[op.param3])
cl.acceptGroupInvitation(op.param1)
except:
try:
kk.kickoutFromGroup(op.param1,[op.param2])
kk.inviteIntoGroup(op.param1,[op.param3])
cl.acceptGroupInvitation(op.param1)
except:
try:
kc.kickoutFromGroup(op.param1,[op.param2])
kc.inviteIntoGroup(op.param1,[op.param3])
cl.acceptGroupInvitation(op.param1)
except:
try:
G = ki.getGroup(op.param1)
G.preventedJoinByTicket = False
ki.kickoutFromGroup(op.param1,[op.param2])
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
kk.acceptGroupInvitationByTicket(op.param1,Ticket)
kc.acceptGroupInvitationByTicket(op.param1,Ticket)
G = ki.getGroup(op.param1)
G.preventedJoinByTicket = True
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
except:
try:
ki.kickoutFromGroup(op.param1,[op.param2])
ki.inviteIntoGroup(op.param1,[op.param3])
cl.acceptGroupInvitation(op.param1)
except:
try:
kk.kickoutFromGroup(op.param1,[op.param2])
kk.inviteIntoGroup(op.param1,[op.param3])
cl.acceptGroupInvitation(op.param1)
except:
pass
return
if Amid in op.param3:
if op.param2 in Bots:
pass
if op.param2 in owner:
pass
if op.param2 in admin:
pass
if op.param2 in staff:
pass
else:
wait["blacklist"][op.param2] = True
try:
kk.kickoutFromGroup(op.param1,[op.param2])
kk.inviteIntoGroup(op.param1,[op.param3])
ki.acceptGroupInvitation(op.param1)
except:
try:
kc.kickoutFromGroup(op.param1,[op.param2])
kc.inviteIntoGroup(op.param1,[op.param3])
ki.acceptGroupInvitation(op.param1)
except:
try:
cl.kickoutFromGroup(op.param1,[op.param2])
cl.inviteIntoGroup(op.param1,[op.param3])
ki.acceptGroupInvitation(op.param1)
except:
try:
G = kk.getGroup(op.param1)
G.preventedJoinByTicket = False
kk.kickoutFromGroup(op.param1,[op.param2])
kk.updateGroup(G)
Ticket = kk.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
kk.acceptGroupInvitationByTicket(op.param1,Ticket)
kc.acceptGroupInvitationByTicket(op.param1,Ticket)
G = kk.getGroup(op.param1)
G.preventedJoinByTicket = True
kk.updateGroup(G)
Ticket = kk.reissueGroupTicket(op.param1)
except:
try:
kk.kickoutFromGroup(op.param1,[op.param2])
kk.inviteIntoGroup(op.param1,[op.param3])
ki.acceptGroupInvitation(op.param1)
except:
try:
kc.kickoutFromGroup(op.param1,[op.param2])
kc.inviteIntoGroup(op.param1,[op.param3])
ki.acceptGroupInvitation(op.param1)
except:
pass
return
if Bmid in op.param3:
if op.param2 in Bots:
pass
if op.param2 in owner:
pass
if op.param2 in admin:
pass
if op.param2 in staff:
pass
else:
wait["blacklist"][op.param2] = True
try:
kc.kickoutFromGroup(op.param1,[op.param2])
kc.inviteIntoGroup(op.param1,[op.param3])
kk.acceptGroupInvitation(op.param1)
except:
try:
cl.kickoutFromGroup(op.param1,[op.param2])
cl.inviteIntoGroup(op.param1,[op.param3])
kk.acceptGroupInvitation(op.param1)
except:
try:
ki.kickoutFromGroup(op.param1,[op.param2])
ki.inviteIntoGroup(op.param1,[op.param3])
kk.acceptGroupInvitation(op.param1)
except:
try:
G = kc.getGroup(op.param1)
G.preventedJoinByTicket = False
kc.kickoutFromGroup(op.param1,[op.param2])
kc.updateGroup(G)
Ticket = kc.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
kk.acceptGroupInvitationByTicket(op.param1,Ticket)
kc.acceptGroupInvitationByTicket(op.param1,Ticket)
G = kc.getGroup(op.param1)
G.preventedJoinByTicket = True
kc.updateGroup(G)
Ticket = kc.reissueGroupTicket(op.param1)
except:
try:
ki.kickoutFromGroup(op.param1,[op.param2])
ki.inviteIntoGroup(op.param1,[op.param3])
kk.acceptGroupInvitation(op.param1)
except:
try:
kc.kickoutFromGroup(op.param1,[op.param2])
kc.inviteIntoGroup(op.param1,[op.param3])
kk.acceptGroupInvitation(op.param1)
except:
pass
return
if Cmid in op.param3:
if op.param2 in Bots:
pass
if op.param2 in owner:
pass
if op.param2 in admin:
pass
if op.param2 in staff:
pass
else:
wait["blacklist"][op.param2] = True
try:
cl.kickoutFromGroup(op.param1,[op.param2])
cl.inviteIntoGroup(op.param1,[op.param3])
kc.acceptGroupInvitation(op.param1)
except:
try:
ki.kickoutFromGroup(op.param1,[op.param2])
ki.inviteIntoGroup(op.param1,[op.param3])
kc.acceptGroupInvitation(op.param1)
except:
try:
kk.kickoutFromGroup(op.param1,[op.param2])
kk.inviteIntoGroup(op.param1,[op.param3])
kc.acceptGroupInvitation(op.param1)
except:
try:
G = cl.getGroup(op.param1)
G.preventedJoinByTicket = False
cl.kickoutFromGroup(op.param1,[op.param2])
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
kk.acceptGroupInvitationByTicket(op.param1,Ticket)
kc.acceptGroupInvitationByTicket(op.param1,Ticket)
G = cl.getGroup(op.param1)
G.preventedJoinByTicket = True
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
except:
try:
cl.kickoutFromGroup(op.param1,[op.param2])
cl.inviteIntoGroup(op.param1,[op.param3])
kc.acceptGroupInvitation(op.param1)
except:
try:
ki.kickoutFromGroup(op.param1,[op.param2])
ki.inviteIntoGroup(op.param1,[op.param3])
kc.acceptGroupInvitation(op.param1)
except:
pass
return
if admin in op.param3:
if op.param2 in Bots:
pass
if op.param2 in owner:
pass
if op.param2 in admin:
pass
if op.param2 in staff:
pass
else:
wait["blacklist"][op.param2] = True
try:
cl.kickoutFromGroup(op.param1,[op.param2])
cl.findAndAddContactsByMid(op.param1,admin)
cl.inviteIntoGroup(op.param1,admin)
except:
try:
ki.kickoutFromGroup(op.param1,[op.param2])
ki.findAndAddContactsByMid(op.param1,admin)
ki.inviteIntoGroup(op.param1,admin)
except:
try:
kk.kickoutFromGroup(op.param1,[op.param2])
kk.findAndAddContactsByMid(op.param1,admin)
kk.inviteIntoGroup(op.param1,admin)
except:
pass
return
if staff in op.param3:
if op.param2 in Bots:
pass
if op.param2 in owner:
pass
if op.param2 in admin:
pass
if op.param2 in staff:
pass
else:
wait["blacklist"][op.param2] = True
try:
ki.kickoutFromGroup(op.param1,[op.param2])
ki.findAndAddContactsByMid(op.param1,staff)
ki.inviteIntoGroup(op.param1,staff)
except:
try:
kk.kickoutFromGroup(op.param1,[op.param2])
kk.findAndAddContactsByMid(op.param1,staff)
kk.inviteIntoGroup(op.param1,staff)
except:
try:
kc.kickoutFromGroup(op.param1,[op.param2])
kc.findAndAddContactsByMid(op.param1,staff)
kc.inviteIntoGroup(op.param1,staff)
except:
pass
return
if op.type == 55:
if op.param2 in wait["blacklist"]:
random.choice(ABC).kickoutFromGroup(op.param1,[op.param2])
else:
pass
if op.type == 55:
try:
if op.param1 in Setmain["ARreadPoint"]:
if op.param2 in Setmain["ARreadMember"][op.param1]:
pass
else:
Setmain["ARreadMember"][op.param1][op.param2] = True
else:
pass
except:
pass
if cctv['cyduk'][op.param1]==True:
if op.param1 in cctv['point']:
Name = cl.getContact(op.param2).displayName
if Name in cctv['sidermem'][op.param1]:
pass
else:
cctv['sidermem'][op.param1] += "\n~ " + Name
siderMembers(op.param1, [op.param2])
contact = cl.getContact(op.param2)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendImageWithURL(op.param1, image)
if op.type == 26:
if wait["selfbot"] == True:
msg = op.message
if msg._from not in Bots:
if wait["talkban"] == True:
if msg._from in wait["Talkblacklist"]:
try:
random.choice(ABC).kickoutFromGroup(msg.to, [msg._from])
except:
try:
random.choice(ABC).kickoutFromGroup(msg.to, [msg._from])
except:
random.choice(ABC).kickoutFromGroup(msg.to, [msg._from])
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["detectMention"] == True:
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention ['M'] in Bots:
cl.sendMessage(msg.to, wait["Respontag"])
cl.sendMessage(msg.to, None, contentMetadata={"STKID":"7839705","STKPKGID":"1192862","STKVER":"1"}, contentType=7)
break
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["Mentionkick"] == True:
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention ['M'] in Bots:
cl.mentiontag(msg.to,[msg._from])
cl.sendMessage(msg.to, "Jangan tag saya....")
cl.kickoutFromGroup(msg.to, [msg._from])
break
if msg.contentType == 7:
if wait["sticker"] == True:
msg.contentType = 0
cl.sendMessage(msg.to,"[Cek ID Sticker]\n│ STKID : " + msg.contentMetadata["STKID"] + "\n│ STKPKGID : " + msg.contentMetadata["STKPKGID"] + "\n│ STKVER : " + msg.contentMetadata["STKVER"]+ "\n\n[Link Sticker]" + "\nline://shop/detail/" + msg.contentMetadata["STKPKGID"])
if msg.contentType == 13:
if wait["contact"] == True:
msg.contentType = 0
cl.sendMessage(msg.to,msg.contentMetadata["mid"])
if 'displayName' in msg.contentMetadata:
contact = cl.getContact(msg.contentMetadata["mid"])
path = cl.getContact(msg.contentMetadata["mid"]).picturePath
image = 'http://dl.profile.line.naver.jp'+path
cl.sendMessage(msg.to,"• Nama : " + msg.contentMetadata["displayName"] + "\n• MID : " + msg.contentMetadata["mid"] + "\n• Status Msg : " + contact.statusMessage + "\n• Picture URL : http://dl.profile.line-cdn.net/" + contact.pictureStatus)
cl.sendImageWithURL(msg.to, image)
if op.type == 25 or op.type == 26:
msg = op.message
text = msg.text
msg_id = msg.id
receiver = msg.to
sender = msg._from
if msg.toType == 0 or msg.toType == 2:
if msg.toType == 0:
to = receiver
elif msg.toType == 2:
to = receiver
if msg.contentType == 7:
if wait["sticker"] == True:
msg.contentType = 0
cl.sendMessage(msg.to,"STKID : " + msg.contentMetadata["STKID"] + "\nSTKPKGID : " + msg.contentMetadata["STKPKGID"] + "\nSTKVER : " + msg.contentMetadata["STKVER"]+ "\n\n[Link Sticker]" + "\nline://shop/detail/" + msg.contentMetadata["STKPKGID"])
if msg.contentType == 13:
if wait["contact"] == True:
msg.contentType = 0
cl.sendMessage(msg.to,msg.contentMetadata["mid"])
if 'displayName' in msg.contentMetadata:
contact = cl.getContact(msg.contentMetadata["mid"])
path = cl.getContact(msg.contentMetadata["mid"]).picturePath
image = 'http://dl.profile.line.naver.jp'+path
cl.sendMessage(msg.to,"• Nama : " + msg.contentMetadata["displayName"] + "\n• MID : " + msg.contentMetadata["mid"] + "\n• Status Msg : " + contact.statusMessage + "\n• Picture URL : http://dl.profile.line-cdn.net/" + contact.pictureStatus)
cl.sendImageWithURL(msg.to, image)
#ADD Bots
if msg.contentType == 13:
if msg._from in admin:
if wait["addbots"] == True:
if msg.contentMetadata["mid"] in Bots:
cl.sendMessage(msg.to,"Contact itu sudah jadi anggota bot")
wait["addbots"] = True
else:
Bots.append(msg.contentMetadata["mid"])
wait["addbots"] = True
cl.sendMessage(msg.to,"Berhasil menambahkan ke anggota bot")
if wait["dellbots"] == True:
if msg.contentMetadata["mid"] in Bots:
Bots.remove(msg.contentMetadata["mid"])
cl.sendMessage(msg.to,"Berhasil menghapus dari anggota bot")
else:
wait["dellbots"] = True
cl.sendMessage(msg.to,"Contact itu bukan anggota bot saints")
#ADD STAFF
if msg._from in admin:
if wait["addstaff"] == True:
if msg.contentMetadata["mid"] in staff:
cl.sendMessage(msg.to,"Contact itu sudah jadi staff")
wait["addstaff"] = True
else:
staff.append(msg.contentMetadata["mid"])
wait["addstaff"] = True
cl.sendMessage(msg.to,"Berhasil menambahkan ke staff")
if wait["dellstaff"] == True:
if msg.contentMetadata["mid"] in staff:
staff.remove(msg.contentMetadata["mid"])
cl.sendMessage(msg.to,"Berhasil menghapus dari staff")
wait["dellstaff"] = True
else:
wait["dellstaff"] = True
cl.sendMessage(msg.to,"Contact itu bukan staff")
#ADD ADMIN
if msg._from in admin:
if wait["addadmin"] == True:
if msg.contentMetadata["mid"] in admin:
cl.sendMessage(msg.to,"Contact itu sudah jadi admin")
wait["addadmin"] = True
else:
admin.append(msg.contentMetadata["mid"])
wait["addadmin"] = True
cl.sendMessage(msg.to,"Berhasil menambahkan ke admin")
if wait["delladmin"] == True:
if msg.contentMetadata["mid"] in admin:
admin.remove(msg.contentMetadata["mid"])
cl.sendMessage(msg.to,"Berhasil menghapus dari admin")
else:
wait["delladmin"] = True
cl.sendMessage(msg.to,"Contact itu bukan admin")
#ADD BLACKLIST
if msg._from in admin:
if wait["wblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
cl.sendMessage(msg.to,"Contact itu sudah ada di blacklist")
wait["wblacklist"] = True
else:
wait["blacklist"][msg.contentMetadata["mid"]] = True
wait["wblacklist"] = True
cl.sendMessage(msg.to,"Berhasil menambahkan ke blacklist user")
if wait["dblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
del wait["blacklist"][msg.contentMetadata["mid"]]
cl.sendMessage(msg.to,"Berhasil menghapus dari blacklist user")
else:
wait["dblacklist"] = True
cl.sendMessage(msg.to,"Contact itu tidak ada di blacklist")
#TALKBAN
if msg._from in admin:
if wait["Talkwblacklist"] == True:
if msg.contentMetadata["mid"] in wait["Talkblacklist"]:
cl.sendMessage(msg.to,"Contact itu sudah ada di Talkban")
wait["Talkwblacklist"] = True
else:
wait["Talkblacklist"][msg.contentMetadata["mid"]] = True
wait["Talkwblacklist"] = True
cl.sendMessage(msg.to,"Berhasil menambahkan ke Talkban user")
if wait["Talkdblacklist"] == True:
if msg.contentMetadata["mid"] in wait["Talkblacklist"]:
del wait["Talkblacklist"][msg.contentMetadata["mid"]]
cl.sendMessage(msg.to,"Berhasil menghapus dari Talkban user")
else:
wait["Talkdblacklist"] = True
cl.sendMessage(msg.to,"Contact itu tidak ada di Talkban")
#UPDATE FOTO
if msg.contentType == 1:
if msg._from in admin:
if Setmain["Addimage"] == True:
msgid = msg.id
fotoo = "https://obs.line-apps.com/talk/m/download.nhn?oid="+msgid
headers = cl.Talk.Headers
r = requests.get(fotoo, headers=headers, stream=True)
if r.status_code == 200:
path = os.path.join(os.path.dirname(__file__), 'dataPhotos/%s.jpg' % Setmain["Img"])
with open(path, 'wb') as fp:
shutil.copyfileobj(r.raw, fp)
cl.sendText(msg.to, "Berhasil menambahkan gambar")
Setmain["Img"] = {}
Setmain["Addimage"] = False
if msg.toType == 2:
if msg._from in admin:
if settings["groupPicture"] == True:
path = cl.downloadObjectMsg(msg_id)
settings["groupPicture"] = False
cl.updateGroupPicture(msg.to, path)
cl.sendMessage(msg.to, "Berhasil mengubah foto group")
if msg.contentType == 1:
if msg._from in admin:
if mid in Setmain["foto"]:
path = cl.downloadObjectMsg(msg_id)
del Setmain["foto"][mid]
cl.updateProfilePicture(path)
cl.sendMessage(msg.to,"Foto berhasil dirubah")
if msg.contentType == 1:
if msg._from in admin:
if Amid in Setmain["foto"]:
path = ki.downloadObjectMsg(msg_id)
del Setmain["foto"][Amid]
ki.updateProfilePicture(path)
ki.sendMessage(msg.to,"Foto berhasil dirubah")
elif Bmid in Setmain["foto"]:
path = kk.downloadObjectMsg(msg_id)
del Setmain["foto"][Bmid]
kk.updateProfilePicture(path)
kk.sendMessage(msg.to,"Foto berhasil dirubah")
elif Cmid in Setmain["foto"]:
path = kc.downloadObjectMsg(msg_id)
del Setmain["foto"][Cmid]
kc.updateProfilePicture(path)
kc.sendMessage(msg.to,"Foto berhasil dirubah")
if msg.contentType == 1:
if msg._from in admin:
if settings["changePicture"] == True:
path1 = ki.downloadObjectMsg(msg_id)
path2 = kk.downloadObjectMsg(msg_id)
path3 = kc.downloadObjectMsg(msg_id)
settings["changePicture"] = False
ki.updateProfilePicture(path1)
ki.sendMessage(msg.to, "Berhasil mengubah foto profile bot")
kk.updateProfilePicture(path2)
kk.sendMessage(msg.to, "Berhasil mengubah foto profile bot")
kc.updateProfilePicture(path3)
kc.sendMessage(msg.to, "Berhasil mengubah foto profile bot")
if msg.contentType == 0:
if Setmain["autoRead"] == True:
cl.sendChatChecked(msg.to, msg_id)
if text is None:
return
else:
cmd = command(text)
if cmd == "help":
if wait["selfbot"] == True:
if msg._from in admin:
helpMessage = help()
cl.sendMessage(msg.to, str(helpMessage))
if cmd == "self on":
if msg._from in admin:
wait["selfbot"] = True
cl.sendText(msg.to, "Selfbot diaktifkan")
elif cmd == "self off":
if msg._from in admin:
wait["selfbot"] = False
cl.sendText(msg.to, "Selfbot dinonaktifkan")
elif cmd == "help2":
if wait["selfbot"] == True:
if msg._from in admin:
helpMessage1 = helpbot()
cl.sendMessage(msg.to, str(helpMessage1))
elif cmd == "help3":
if wait["selfbot"] == True:
if msg._from in admin:
helpMessage2 = helpadmin()
cl.sendMessage(msg.to, str(helpMessage2))
elif cmd == "status":
if wait["selfbot"] == True:
if msg._from in admin:
tz = pytz.timezone("Asia/Jakarta")
timeNow = datetime.now(tz=tz)
md = "╭──[ STATUS PROTECTION]\n"
if wait["sticker"] == True: md+="│ ⚫ Sticker[ON]\n"
else: md+="│ ⭕ Sticker[OFF]\n"
if wait["contact"] == True: md+="│ ⚫ Contact[ON]\n"
else: md+="│ ⭕ Contact[OFF]\n"
if wait["talkban"] == True: md+="│ ⚫ Talkban[ON]\n"
else: md+="│ ⭕ Talkban[OFF]\n"
if wait["Mentionkick"] == True: md+="│ ⚫ Notag[ON]\n"
else: md+="│ ⭕ Notag[OFF]\n"
if wait["detectMention"] == True: md+="│ ⚫ Respon[ON]\n"
else: md+="│ ⭕ Respon[OFF]\n"
if wait["autoJoin"] == True: md+="│ ⚫ Autojoin[ON]\n"
else: md+="│ ⭕ Autojoin[OFF]\n"
if wait["autoAdd"] == True: md+="│ ⚫ Autoadd[ON]\n"
else: md+="│ ⭕ Autoadd[OFF]\n"
if msg.to in welcome: md+="│ ⚫ Welcome[ON]\n"
else: md+="│ ⭕ Welcome[OFF]\n"
if wait["autoLeave"] == True: md+="│ ⚫ Autoleave[ON]\n"
else: md+="│ ⭕ Autoleave[OFF]\n"
if msg.to in protectqr: md+="│ ⚫ Protecturl[ON]\n"
else: md+="│ ⭕ Protecturl[OFF]\n"
if msg.to in protectjoin: md+="│ ⚫ Protectjoin[ON]\n"
else: md+="│ ⭕ Protectjoin[OFF]\n"
if msg.to in protectkick: md+="│ ⚫ Protectkick[ON]\n"
else: md+="│ ⭕ Protectkick[OFF]\n"
if msg.to in protectcancel: md+="│ ⚫ Protectcancel[ON]\n"
else: md+="│ ⭕ Protectcancel[OFF]\n"
cl.sendMessage(msg.to, md+"┝───────────────\n│Tanggal : "+ datetime.strftime(timeNow,'%Y-%m-%d')+"\n│Jam [ "+ datetime.strftime(timeNow,'%H:%M:%S')+" ]\n╰──[STATUS PROTECTION]")
elif cmd == "creator" or text.lower() == 'creator':
if msg._from in admin:
cl.sendText(msg.to,"Creator ARIFISTIFIK")
ma = ""
for i in creator:
ma = cl.getContact(i)
cl.sendMessage(msg.to, None, contentMetadata={'mid': i}, contentType=13)
elif cmd == "about" or cmd == "informasi":
if wait["selfbot"] == True:
if msg._from in admin:
sendMention(msg.to, sender, "[ Type Selfbot ]\n")
cl.sendMessage(msg.to, None, contentMetadata={'mid': mid}, contentType=13)
elif cmd == "me" or text.lower() == 'me':
if wait["selfbot"] == True:
if msg._from in admin:
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage1(msg)
elif text.lower() == "mid":
cl.sendMessage(msg.to, msg._from)
elif ("Mid " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
mi = cl.getContact(key1)
cl.sendMessage(msg.to, "Nama : "+str(mi.displayName)+"\nMID : " +key1)
cl.sendMessage(msg.to, None, contentMetadata={'mid': key1}, contentType=13)
elif ("Info " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
mi = cl.getContact(key1)
cl.sendMessage(msg.to, "│ Nama : "+str(mi.displayName)+"\n│ Mid : " +key1+"\n│ Status Msg"+str(mi.statusMessage))
cl.sendMessage(msg.to, None, contentMetadata={'mid': key1}, contentType=13)
if "videoProfile='{" in str(cl.getContact(key1)):
cl.sendVideoWithURL(msg.to, 'http://dl.profile.line.naver.jp'+str(mi.picturePath)+'/vp.small')
else:
cl.sendImageWithURL(msg.to, 'http://dl.profile.line.naver.jp'+str(mi.picturePath))
elif cmd == "mybot":
if wait["selfbot"] == True:
if msg._from in admin:
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage1(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': Amid}
cl.sendMessage1(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': Bmid}
cl.sendMessage1(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': Cmid}
cl.sendMessage1(msg)
elif text.lower() == "hapus chat":
if wait["selfbot"] == True:
if msg._from in admin:
try:
cl.removeAllMessages(op.param2)
except:
pass
elif text.lower() == "remove chat":
if wait["selfbot"] == True:
if msg._from in admin:
try:
cl.removeAllMessages(op.param2)
ki.removeAllMessages(op.param2)
kk.removeAllMessages(op.param2)
kc.removeAllMessages(op.param2)
cl.sendText(msg.to,"Chat dibersihkan...")
except:
pass
elif cmd.startswith("broadcast: "):
if wait["selfbot"] == True:
if msg._from in admin:
sep = text.split(" ")
pesan = text.replace(sep[0] + " ","")
saya = cl.getGroupIdsJoined()
for group in saya:
cl.sendMessage(group,"[ Broadcast ]\n" + str(pesan))
elif text.lower() == "mykey":
if wait["selfbot"] == True:
if msg._from in admin:
cl.sendMessage(msg.to, "[Mykey]\nSetkey bot mu[ " + str(Setmain["keyCommand"]) + " ]")
elif cmd.startswith("setkey "):
if wait["selfbot"] == True:
if msg._from in admin:
sep = text.split(" ")
key = text.replace(sep[0] + " ","")
if key in [""," ","\n",None]:
cl.sendMessage(msg.to, "Gagal mengganti key")
else:
Setmain["keyCommand"] = str(key).lower()
cl.sendMessage(msg.to, "[Setkey]\nSetkey diganti jadi[{}]".format(str(key).lower()))
elif text.lower() == "resetkey":
if wait["selfbot"] == True:
if msg._from in admin:
Setmain["keyCommand"] = ""
cl.sendMessage(msg.to, "[Setkey]\nSetkey mu kembali ke awal")
elif cmd == "restart":
if wait["selfbot"] == True:
if msg._from in admin:
cl.sendMessage(msg.to, "Tunggu sebentar...")
Setmain["restartPoint"] = msg.to
restartBot()
cl.sendMessage(msg.to, "Silahkan gunakan seperti semula...")
elif cmd == "runtime":
if wait["selfbot"] == True:
if msg._from in admin:
eltime = time.time() - mulai
bot = "Aktif " +waktu(eltime)
cl.sendMessage(msg.to,bot)
elif cmd == "ginfo":
if msg._from in admin:
try:
G = cl.getGroup(msg.to)
if G.invitee is None:
gPending = "0"
else:
gPending = str(len(G.invitee))
if G.preventedJoinByTicket == True:
gQr = "Tertutup"
gTicket = "Tidak ada"
else:
gQr = "Terbuka"
gTicket = "https://line.me/R/ti/g/{}".format(str(cl.reissueGroupTicket(G.id)))
timeCreated = []
timeCreated.append(time.strftime("%d-%m-%Y [ %H:%M:%S ]", time.localtime(int(G.createdTime) / 1000)))
cl.sendMessage(msg.to, "✴[Grup Info]\n\n│ Nama Group : {}".format(G.name)+ "\n│ ID Group : {}".format(G.id)+ "\n│ Pembuat : {}".format(G.creator.displayName)+ "\n│ Waktu Dibuat : {}".format(str(timeCreated))+ "\n│ Jumlah Member : {}".format(str(len(G.members)))+ "\n│ Jumlah Pending : {}".format(gPending)+ "\n│ Group Qr : {}".format(gQr)+ "\n│ Group Ticket : {}".format(gTicket))
cl.sendMessage(msg.to, None, contentMetadata={'mid': G.creator.mid}, contentType=13)
cl.sendImageWithURL(msg.to, 'http://dl.profile.line-cdn.net/'+G.pictureStatus)
except Exception as e:
cl.sendMessage(msg.to, str(e))
elif cmd.startswith("infogrup "):
if msg._from in admin:
separate = text.split(" ")
number = text.replace(separate[0] + " ","")
groups = cl.getGroupIdsJoined()
ret_ = ""
try:
group = groups[int(number)-1]
G = cl.getGroup(group)
try:
gCreator = G.creator.displayName
except:
gCreator = "Tidak ditemukan"
if G.invitee is None:
gPending = "0"
else:
gPending = str(len(G.invitee))
if G.preventedJoinByTicket == True:
gQr = "Tertutup"
gTicket = "Tidak ada"
else:
gQr = "Terbuka"
gTicket = "https://line.me/R/ti/g/{}".format(str(cl.reissueGroupTicket(G.id)))
timeCreated = []
timeCreated.append(time.strftime("%d-%m-%Y [ %H:%M:%S ]", time.localtime(int(G.createdTime) / 1000)))
ret_ += "╭───[Grup Info]"
ret_ += "\n│ Nama Group : {}".format(G.name)
ret_ += "\n│ ID Group : {}".format(G.id)
ret_ += "\n│ Pembuat : {}".format(gCreator)
ret_ += "\n│ Waktu Dibuat : {}".format(str(timeCreated))
ret_ += "\n│ Jumlah Member : {}".format(str(len(G.members)))
ret_ += "\n│ Jumlah Pending : {}".format(gPending)
ret_ += "\n│ Group Qr : {}".format(gQr)
ret_ += "\n│ Group Ticket : {}".format(gTicket)
ret_ += "\n╰───[Grup Info]"
cl.sendMessage(to, str(ret_))
except:
pass
elif cmd.startswith("infomem "):
if msg._from in admin:
separate = msg.text.split(" ")
number = msg.text.replace(separate[0] + " ","")
groups = cl.getGroupIdsJoined()
ret_ = ""
try:
group = groups[int(number)-1]
G = cl.getGroup(group)
no = 0
ret_ = ""
for mem in G.members:
no += 1
ret_ += "\n " "│ "+ str(no) + ". " + mem.displayName
cl.sendMessage(to,"│ Group Name : [ " + str(G.name) + " ]\n\n [ List Member ]\n" + ret_ + "\n\n[Total %i Members]" % len(G.members))
except:
pass
elif cmd.startswith("leave: "):
if msg._from in admin:
separate = msg.text.split(" ")
number = msg.text.replace(separate[0] + " ","")
groups = cl.getGroupIdsJoined()
group = groups[int(number)-1]
for i in group:
ginfo = cl.getGroup(i)
if ginfo == group:
ki.leaveGroup(i)
kk.leaveGroup(i)
kc.leaveGroup(i)
cl.sendMessage(msg.to,"Berhasil keluar di grup " +str(ginfo.name))
elif cmd == "fiendlist":
if wait["selfbot"] == True:
if msg._from in admin:
ma = ""
a = 0
gid = cl.getAllContactIds()
for i in gid:
G = cl.getContact(i)
a = a + 1
end = "\n"
ma += "│ • " + str(a) + ". " +G.displayName+ "\n"
cl.sendMessage(msg.to,"╭──[ FRIEND LIST ]\n│\n"+ma+"│\n╰──[ Total["+str(len(gid))+"]Friends ]")
elif cmd == "gruplist":
if wait["selfbot"] == True:
if msg._from in admin:
ma = ""
a = 0
gid = cl.getGroupIdsJoined()
for i in gid:
G = cl.getGroup(i)
a = a + 1
end = "\n"
ma += "│ • " + str(a) + ". " +G.name+ "\n"
cl.sendMessage(msg.to,"╭──[ GROUP LIST ]\n│\n"+ma+"│\n╰──[ Total["+str(len(gid))+"]Groups ]")
elif cmd == "gruplist1":
if msg._from in admin:
ma = ""
a = 0
gid = ki.getGroupIdsJoined()
for i in gid:
G = ki.getGroup(i)
a = a + 1
end = "\n"
ma += "│ • " + str(a) + ". " +G.name+ "\n"
ki.sendMessage(msg.to,"╭──[ GROUP LIST ]\n│\n"+ma+"│\n╰──[ Total["+str(len(gid))+"]Groups ]")
elif cmd == "gruplist2":
if msg._from in admin:
ma = ""
a = 0
gid = kk.getGroupIdsJoined()
for i in gid:
G = kk.getGroup(i)
a = a + 1
end = "\n"
ma += "│ • " + str(a) + ". " +G.name+ "\n"
kk.sendMessage(msg.to,"╭──[ GROUP LIST ]\n│\n"+ma+"│\n╰──[ Total["+str(len(gid))+"]Groups ]")
elif cmd == "gruplist3":
if msg._from in admin:
ma = ""
a = 0
gid = kc.getGroupIdsJoined()
for i in gid:
G = kc.getGroup(i)
a = a + 1
end = "\n"
ma += "│ • " + str(a) + ". " +G.name+ "\n"
kc.sendMessage(msg.to,"╭──[ GROUP LIST ]\n│\n"+ma+"│\n╰──[ Total["+str(len(gid))+"]Groups ]")
elif cmd == "open":
if wait["selfbot"] == True:
if msg._from in admin:
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.preventedJoinByTicket = False
cl.updateGroup(X)
cl.sendMessage(msg.to, "Url Opened")
elif cmd == "close":
if wait["selfbot"] == True:
if msg._from in admin:
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.preventedJoinByTicket = True
cl.updateGroup(X)
cl.sendMessage(msg.to, "Url Closed")
elif cmd == "url grup":
if wait["selfbot"] == True:
if msg._from in admin:
if msg.toType == 2:
x = cl.getGroup(msg.to)
if x.preventedJoinByTicket == True:
x.preventedJoinByTicket = False
cl.updateGroup(x)
gurl = cl.reissueGroupTicket(msg.to)
cl.sendMessage(msg.to, "Nama : "+str(x.name)+ "\nUrl grup : http://line.me/R/ti/g/"+gurl)
#===========BOT UPDATE============#
elif cmd == "updategrup":
if wait["selfbot"] == True:
if msg._from in admin:
if msg.toType == 2:
settings["groupPicture"] = True
cl.sendText(msg.to,"Kirim fotonya.....")
elif cmd == "updatebot":
if wait["selfbot"] == True:
if msg._from in admin:
settings["changePicture"] = True
cl.sendText(msg.to,"Kirim fotonya.....")
elif cmd == "updatefoto":
if wait["selfbot"] == True:
if msg._from in admin:
Setmain["foto"][mid] = True
cl.sendText(msg.to,"Kirim fotonya.....")
elif cmd == "bot1up":
if msg._from in admin:
Setmain["foto"][Amid] = True
ki.sendText(msg.to,"Kirim fotonya.....")
elif cmd == "bot2up":
if msg._from in admin:
Setmain["foto"][Bmid] = True
kk.sendText(msg.to,"Kirim fotonya.....")
elif cmd == "bot3up":
if msg._from in admin:
Setmain["foto"][Cmid] = True
kc.sendText(msg.to,"Kirim fotonya.....")
elif cmd.startswith("myname: "):
if msg._from in admin:
separate = msg.text.split(" ")
string = msg.text.replace(separate[0] + " ","")
if len(string) <= 10000000000:
profile = cl.getProfile()
profile.displayName = string
cl.updateProfile(profile)
cl.sendMessage(msg.to,"Nama diganti jadi " + string + "")
elif cmd.startswith("bot1name: "):
if msg._from in admin:
separate = msg.text.split(" ")
string = msg.text.replace(separate[0] + " ","")
if len(string) <= 10000000000:
profile = ki.getProfile()
profile.displayName = string
ki.updateProfile(profile)
ki.sendMessage(msg.to,"Nama diganti jadi " + string + "")
elif cmd.startswith("bot2name: "):
if msg._from in admin:
separate = msg.text.split(" ")
string = msg.text.replace(separate[0] + " ","")
if len(string) <= 10000000000:
profile = kk.getProfile()
profile.displayName = string
kk.updateProfile(profile)
kk.sendMessage(msg.to,"Nama diganti jadi " + string + "")
elif cmd.startswith("bot3name: "):
if msg._from in admin:
separate = msg.text.split(" ")
string = msg.text.replace(separate[0] + " ","")
if len(string) <= 10000000000:
profile = kc.getProfile()
profile.displayName = string
kc.updateProfile(profile)
kc.sendMessage(msg.to,"Nama diganti jadi " + string + "")
#===========BOT UPDATE============#
elif cmd == "tagall" or text.lower() == '😆':
if wait["selfbot"] == True:
if msg._from in admin:
members = []
if msg.toType == 1:
room = cl.getCompactRoom(to)
members = [mem.mid for mem in room.contacts]
elif msg.toType == 2:
group = cl.getCompactGroup(to)
members = [mem.mid for mem in group.members]
else:
return cl.sendMessage(to, 'Failed mentionall members, use this command only on room or group chat')
if members:
mentionMembers(to, members)
elif cmd == "listbot":
if wait["selfbot"] == True:
if msg._from in admin:
ma = ""
a = 0
for m_id in Bots:
a = a + 1
end = '\n'
ma += str(a) + ". " +cl.getContact(m_id).displayName + "\n"
cl.sendMessage(msg.to,"LIST MY BOTS\n"+ma+"\nTotal[%s] Bots" %(str(len(Bots))))
elif cmd == "listadmin":
if wait["selfbot"] == True:
if msg._from in admin:
ma = ""
mb = ""
mc = ""
a = 0
b = 0
c = 0
for m_id in owner:
a = a + 1
end = '\n'
ma += str(a) + ". " +cl.getContact(m_id).displayName + "\n"
for m_id in admin:
b = b + 1
end = '\n'
mb += str(b) + ". " +cl.getContact(m_id).displayName + "\n"
for m_id in staff:
c = c + 1
end = '\n'
mc += str(c) + ". " +cl.getContact(m_id).displayName + "\n"
cl.sendMessage(msg.to,"LIST ADMIN\nSuper admin:\n"+ma+"\nAdmin:\n"+mb+"\nStaff:\n"+mc+"\nTotal[%s]Family Team" %(str(len(owner)+len(admin)+len(staff))))
elif cmd == "listprotect":
if wait["selfbot"] == True:
if msg._from in admin:
ma = ""
mb = ""
mc = ""
md = ""
a = 0
b = 0
c = 0
d = 0
gid = protectqr
for group in gid:
a = a + 1
end = '\n'
ma += str(a) + ". " +cl.getGroup(group).name + "\n"
gid = protectkick
for group in gid:
b = b + 1
end = '\n'
mb += str(b) + ". " +cl.getGroup(group).name + "\n"
gid = protectjoin
for group in gid:
d = d + 1
end = '\n'
md += str(d) + ". " +cl.getGroup(group).name + "\n"
gid = protectcancel
for group in gid:
c = c + 1
end = '\n'
mc += str(c) + ". " +cl.getGroup(group).name + "\n"
cl.sendMessage(msg.to,"Protection\n\n PROTECT URL :\n"+ma+"\n PROTECT KICK :\n"+mb+"\n PROTECT JOIN :\n"+md+"\n PROTECT CANCEL:\n"+mc+"\nTotal[%s]Grup yg dijaga" %(str(len(protectqr)+len(protectkick)+len(protectjoin)+len(protectcancel))))
elif cmd == "respon":
if wait["selfbot"] == True:
if msg._from in admin:
ki.sendMessage(msg.to,responsename1)
kk.sendMessage(msg.to,responsename2)
kc.sendMessage(msg.to,responsename3)
elif cmd == "invitebot":
if wait["selfbot"] == True:
if msg._from in admin:
try:
anggota = [Bmid,Cmid,Amid]
cl.inviteIntoGroup(msg.to, anggota)
kk.acceptGroupInvitation(msg.to)
kc.acceptGroupInvitation(msg.to)
ki.acceptGroupInvitation(msg.to)
except:
pass
elif cmd == "joinall":
if wait["selfbot"] == True:
if msg._from in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventedJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
kk.acceptGroupInvitationByTicket(msg.to,Ticket)
kc.acceptGroupInvitationByTicket(msg.to,Ticket)
G = kc.getGroup(msg.to)
G.preventedJoinByTicket = True
kc.updateGroup(G)
elif cmd == "byeall":
if wait["selfbot"] == True:
if msg._from in admin:
G = cl.getGroup(msg.to)
ki.sendText(msg.to, "Bye bye fams "+str(G.name))
ki.leaveGroup(msg.to)
kk.leaveGroup(msg.to)
kc.leaveGroup(msg.to)
elif cmd == "byeme":
if wait["selfbot"] == True:
if msg._from in admin:
G = cl.getGroup(msg.to)
cl.sendText(msg.to, "Bye bye fams "+str(G.name))
cl.leaveGroup(msg.to)
elif cmd.startswith("leave "):
if msg._from in admin:
proses = text.split(" ")
ng = text.replace(proses[0] + " ","")
gid = cl.getGroupIdsJoined()
for i in gid:
h = cl.getGroup(i).name
if h == ng:
ki.sendMessage(i, "Silahkan admin invite atau masukan kembali")
ki.leaveGroup(i)
kk.leaveGroup(i)
kc.leaveGroup(i)
cl.sendMessage(to,"Berhasil keluar dari grup " +h)
elif cmd == "assist1":
if msg._from in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventedJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
G = ki.getGroup(msg.to)
G.preventedJoinByTicket = True
ki.updateGroup(G)
elif cmd == "assist2":
if msg._from in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventedJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
kk.acceptGroupInvitationByTicket(msg.to,Ticket)
G = kk.getGroup(msg.to)
G.preventedJoinByTicket = True
kk.updateGroup(G)
elif cmd == "assist3":
if msg._from in admin:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventedJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
kc.acceptGroupInvitationByTicket(msg.to,Ticket)
G = kc.getGroup(msg.to)
G.preventedJoinByTicket = True
kc.updateGroup(G)
elif cmd == "sprespon":
if wait["selfbot"] == True:
if msg._from in admin:
get_profile_time_start = time.time()
get_profile = cl.getProfile()
get_profile_time = time.time() - get_profile_time_start
get_group_time_start = time.time()
get_group = cl.getGroupIdsJoined()
get_group_time = time.time() - get_group_time_start
get_contact_time_start = time.time()
get_contact = cl.getContact(mid)
get_contact_time = time.time() - get_contact_time_start
cl.sendMessage(msg.to, "Speed respon\n - Get Profile\n %.10f\n - Get Contact\n %.10f\n - Get Group\n %.10f" % (get_profile_time/3,get_contact_time/3,get_group_time/3))
elif cmd == "speed" or cmd == "sp":
if wait["selfbot"] == True:
if msg._from in admin:
start = time.time()
cl.sendMessage(msg.to, "Progres speed...")
elapsed_time = time.time() - start
cl.sendMessage(msg.to, "{} detik".format(str(elapsed_time)))
elif cmd == "lurking on":
if wait["selfbot"] == True:
if msg._from in admin:
tz = pytz.timezone("Asia/Jakarta")
timeNow = datetime.now(tz=tz)
Setmain['readPoint'][msg.to] = msg_id
Setmain['readMember'][msg.to] = {}
cl.sendText(msg.to, "Lurking berhasil diaktifkan\n\nTanggal : "+ datetime.strftime(timeNow,'%Y-%m-%d')+"\nJam [ "+ datetime.strftime(timeNow,'%H:%M:%S')+" ]")
elif cmd == "lurking off":
if wait["selfbot"] == True:
if msg._from in admin:
tz = pytz.timezone("Asia/Jakarta")
timeNow = datetime.now(tz=tz)
del Setmain['readPoint'][msg.to]
del Setmain['readMember'][msg.to]
cl.sendText(msg.to, "Lurking berhasil dinoaktifkan\n\nTanggal : "+ datetime.strftime(timeNow,'%Y-%m-%d')+"\nJam [ "+ datetime.strftime(timeNow,'%H:%M:%S')+" ]")
elif cmd == "lurkers":
if msg._from in admin:
if msg.to in Setmain['readPoint']:
if Setmain['readMember'][msg.to] != {}:
aa = []
for x in Setmain['readMember'][msg.to]:
aa.append(x)
try:
arrData = ""
textx = " [ Result {} member ] \n\n [ Lurkers ]\n1. ".format(str(len(aa)))
arr = []
no = 1
b = 1
for i in aa:
b = b + 1
end = "\n"
mention = "@x\n"
slen = str(len(textx))
elen = str(len(textx) + len(mention) - 1)
arrData = {'S':slen, 'E':elen, 'M':i}
arr.append(arrData)
tz = pytz.timezone("Asia/Jakarta")
timeNow = datetime.now(tz=tz)
textx += mention
if no < len(aa):
no += 1
textx += str(b) + ". "
else:
try:
no = "[ {} ]".format(str(cl.getGroup(msg.to).name))
except:
no = " "
msg.to = msg.to
msg.text = textx+"\nTanggal : "+ datetime.strftime(timeNow,'%Y-%m-%d')+"\nJam [ "+ datetime.strftime(timeNow,'%H:%M:%S')+" ]"
msg.contentMetadata = {'MENTION': str('{"MENTIONEES":' + json.dumps(arr) + '}')}
msg.contentType = 0
cl.sendMessage1(msg)
except:
pass
try:
del Setmain['readPoint'][msg.to]
del Setmain['readMember'][msg.to]
except:
pass
Setmain['readPoint'][msg.to] = msg.id
Setmain['readMember'][msg.to] = {}
else:
cl.sendText(msg.to, "User kosong...")
else:
cl.sendText(msg.to, "Ketik lurking on dulu")
elif cmd == "sider on":
if wait["selfbot"] == True:
if msg._from in admin:
try:
tz = pytz.timezone("Asia/Jakarta")
timeNow = datetime.now(tz=tz)
cl.sendMessage(msg.to, "Cek sider diaktifkan\n\nTanggal : "+ datetime.strftime(timeNow,'%Y-%m-%d')+"\nJam [ "+ datetime.strftime(timeNow,'%H:%M:%S')+" ]")
del cctv['point'][msg.to]
del cctv['sidermem'][msg.to]
del cctv['cyduk'][msg.to]
except:
pass
cctv['point'][msg.to] = msg.id
cctv['sidermem'][msg.to] = ""
cctv['cyduk'][msg.to]=True
elif cmd == "sider off":
if wait["selfbot"] == True:
if msg._from in admin:
if msg.to in cctv['point']:
tz = pytz.timezone("Asia/Jakarta")
timeNow = datetime.now(tz=tz)
cctv['cyduk'][msg.to]=False
cl.sendMessage(msg.to, "Cek sider dinonaktifkan\n\nTanggal : "+ datetime.strftime(timeNow,'%Y-%m-%d')+"\nJam [ "+ datetime.strftime(timeNow,'%H:%M:%S')+" ]")
else:
cl.sendMessage(msg.to, "Sudak tidak aktif")
#===========Hiburan============#
elif cmd.startswith("ytmp4: "):
if msg._from in admin:
try:
sep = msg.text.split(" ")
textToSearch = msg.text.replace(sep[0] + " ","")
query = urllib.parse.quote(textToSearch)
search_url="https://www.youtube.com/results?search_query="
mozhdr = {'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'}
sb_url = search_url + query
sb_get = requests.get(sb_url, headers = mozhdr)
soupeddata = BeautifulSoup(sb_get.content, "html.parser")
yt_links = soupeddata.find_all("a", class_ = "yt-uix-tile-link")
x = (yt_links[1])
yt_href = x.get("href")
yt_href = yt_href.replace("watch?v=", "")
qx = "https://youtu.be" + str(yt_href)
vid = pafy.new(qx)
stream = vid.streams
best = vid.getbest()
best.resolution, best.extension
for s in stream:
me = best.url
hasil = ""
title = "Judul [ " + vid.title + " ]"
author = '\n\n│ Author : ' + str(vid.author)
durasi = '\n│ Duration : ' + str(vid.duration)
suka = '\n│ Likes : ' + str(vid.likes)
rating = '\n│ Rating : ' + str(vid.rating)
deskripsi = '\n│ Deskripsi : ' + str(vid.description)
cl.sendVideoWithURL(msg.to, me)
cl.sendText(msg.to,title+ author+ durasi+ suka+ rating+ deskripsi)
except Exception as e:
cl.sendText(msg.to,str(e))
elif cmd.startswith("musik: "):
if msg._from in admin:
try:
search = msg.text.replace("musik: ","")
r = requests.get("https://rest.farzain.com/api/joox.php?apikey=tjfoPmtksGg222NdQdZypSqEV&id={}".format(urllib.parse.quote(search)))
data = r.text
data = json.loads(data)
info = data["info"]
audio = data["audio"]
hasil = "「 Hasil Musik 」\n"
hasil += "\nPenyanyi : {}".format(str(info["penyanyi"]))
hasil += "\nJudul : {}".format(str(info["judul"]))
hasil += "\nAlbum : {}".format(str(info["album"]))
hasil += "\n\nLink : \n1. Image : {}".format(str(data["gambar"]))
hasil += "\n\nLink : \n2. MP3 : {}".format(str(audio["mp3"]))
cl.sendImageWithURL(msg.to, str(data["gambar"]))
cl.sendMessage(msg.to, str(hasil))
cl.sendAudioWithURL(msg.to, str(audio["mp3"]))
except Exception as e:
cl.sendMessage(msg.to, "Selamat Menikmati ")
elif cmd.startswith("spamtag: "):
if wait["selfbot"] == True:
if msg._from in admin:
proses = text.split(":")
strnum = text.replace(proses[0] + ":","")
num = int(strnum)
Setmain["limit"] = num
cl.sendText(msg.to,"Total Spamtag Diubah Menjadi " +strnum)
elif cmd.startswith("spamcall: "):
if wait["selfbot"] == True:
if msg._from in admin:
proses = text.split(":")
strnum = text.replace(proses[0] + ":","")
num = int(strnum)
wait["limit"] = num
cl.sendText(msg.to,"Total Spamcall Diubah Menjadi " +strnum)
elif cmd.startswith("spamtag "):
if wait["selfbot"] == True:
if msg._from in admin:
if 'MENTION' in msg.contentMetadata.keys()!=None:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
zx = ""
zxc = " "
zx2 = []
pesan2 = "@a"" "
xlen = str(len(zxc))
xlen2 = str(len(zxc)+len(pesan2)-1)
zx = {'S':xlen, 'E':xlen2, 'M':key1}
zx2.append(zx)
zxc += pesan2
msg.contentType = 0
msg.text = zxc
lol = {'MENTION':str('{"MENTIONEES":'+json.dumps(zx2).replace(' ','')+'}')}
msg.contentMetadata = lol
jmlh = int(Setmain["limit"])
if jmlh <= 1000:
for x in range(jmlh):
try:
cl.sendMessage1(msg)
except Exception as e:
cl.sendText(msg.to,str(e))
else:
cl.sendText(msg.to,"Jumlah melebihi 1000")
elif cmd == "spamcall":
if wait["selfbot"] == True:
if msg._from in admin:
if msg.toType == 2:
group = cl.getGroup(to)
members = [mem.mid for mem in group.members]
jmlh = int(wait["limit"])
cl.sendMessage(msg.to, "Berhasil mengundang {} undangan Call Grup".format(str(wait["limit"])))
if jmlh <= 1000:
for x in range(jmlh):
try:
call.acquireGroupCallRoute(to)
call.inviteIntoGroupCall(to, contactIds=members)
except Exception as e:
cl.sendText(msg.to,str(e))
else:
cl.sendText(msg.to,"Jumlah melebihi batas")
elif 'Gift: ' in msg.text:
if wait["selfbot"] == True:
if msg._from in admin:
korban = msg.text.replace('Gift: ','')
korban2 = korban.split()
midd = korban2[0]
jumlah = int(korban2[1])
if jumlah <= 1000:
for var in range(0,jumlah):
cl.sendMessage(midd, None, contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '6'}, contentType=9)
ki.sendMessage(midd, None, contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '6'}, contentType=9)
kk.sendMessage(midd, None, contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '6'}, contentType=9)
kc.sendMessage(midd, None, contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '6'}, contentType=9)
elif 'Spam: ' in msg.text:
if wait["selfbot"] == True:
if msg._from in admin:
korban = msg.text.replace('Spam: ','')
korban2 = korban.split()
midd = korban2[0]
jumlah = int(korban2[1])
if jumlah <= 1000:
for var in range(0,jumlah):
cl.sendMessage(midd, str(Setmain["message1"]))
ki.sendMessage(midd, str(Setmain["message1"]))
kk.sendMessage(midd, str(Setmain["message1"]))
kc.sendMessage(midd, str(Setmain["message1"]))
#===========Protection============#
elif 'Welcome ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Welcome ','')
if spl == 'on':
if msg.to in welcome:
msgs = "Welcome Msg sudah aktif"
else:
welcome.append(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Welcome Msg diaktifkan\nDi Group : " +str(ginfo.name)
cl.sendMessage(msg.to, "[Diaktifkan]\n" + msgs)
elif spl == 'off':
if msg.to in welcome:
welcome.remove(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Welcome Msg dinonaktifkan\nDi Group : " +str(ginfo.name)
else:
msgs = "Welcome Msg sudah tidak aktif"
cl.sendMessage(msg.to, "[Dinonaktifkan]\n" + msgs)
elif 'Protecturl ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Protecturl ','')
if spl == 'on':
if msg.to in protectqr:
msgs = "Protect url sudah aktif"
else:
protectqr.append(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Protect url diaktifkan\nDi Group : " +str(ginfo.name)
cl.sendMessage(msg.to, "[Diaktifkan]\n" + msgs)
elif spl == 'off':
if msg.to in protectqr:
protectqr.remove(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Protect url dinonaktifkan\nDi Group : " +str(ginfo.name)
else:
msgs = "Protect url sudah tidak aktif"
cl.sendMessage(msg.to, "[Dinonaktifkan]\n" + msgs)
elif 'Protectkick ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Protectkick ','')
if spl == 'on':
if msg.to in protectkick:
msgs = "Protect kick sudah aktif"
else:
protectkick.append(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Protect kick diaktifkan\nDi Group : " +str(ginfo.name)
cl.sendMessage(msg.to, "[Diaktifkan]\n" + msgs)
elif spl == 'off':
if msg.to in protectkick:
protectkick.remove(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Protect kick dinonaktifkan\nDi Group : " +str(ginfo.name)
else:
msgs = "Protect kick sudah tidak aktif"
cl.sendMessage(msg.to, "[Dinonaktifkan]\n" + msgs)
elif 'Protectjoin ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Protectjoin ','')
if spl == 'on':
if msg.to in protectjoin:
msgs = "Protect join sudah aktif"
else:
protectjoin.append(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Protect join diaktifkan\nDi Group : " +str(ginfo.name)
cl.sendMessage(msg.to, "[Diaktifkan]\n" + msgs)
elif spl == 'off':
if msg.to in protectjoin:
protectjoin.remove(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Protect join dinonaktifkan\nDi Group : " +str(ginfo.name)
else:
msgs = "Protect join sudah tidak aktif"
cl.sendMessage(msg.to, "[Dinonaktifkan]\n" + msgs)
elif 'Protectcancel ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Protectcancel ','')
if spl == 'on':
if msg.to in protectcancel:
msgs = "Protect cancel sudah aktif"
else:
protectcancel.append(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Protect cancel diaktifkan\nDi Group : " +str(ginfo.name)
cl.sendMessage(msg.to, "[Diaktifkan]\n" + msgs)
elif spl == 'off':
if msg.to in protectcancel:
protectcancel.remove(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Protect cancel dinonaktifkan\nDi Group : " +str(ginfo.name)
else:
msgs = "Protect cancel sudah tidak aktif"
cl.sendMessage(msg.to, "[Dinonaktifkan]\n" + msgs)
elif 'backup ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('backup ','')
if spl == 'on':
if msg.to in protectqr:
msgs = ""
else:
protectqr.append(msg.to)
if msg.to in protectkick:
msgs = ""
else:
protectkick.append(msg.to)
if msg.to in protectjoin:
msgs = ""
else:
protectjoin.append(msg.to)
if msg.to in protectcancel:
ginfo = cl.getGroup(msg.to)
msgs = "bacup sudah on\nDi Group : " +str(ginfo.name)
else:
protectcancel.append(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Berhasil mengaktifkan backup\nDi Group : " +str(ginfo.name)
cl.sendMessage(msg.to, "[Diaktifkan]\n" + msgs)
elif spl == 'off':
if msg.to in protectqr:
protectqr.remove(msg.to)
else:
msgs = ""
if msg.to in protectkick:
protectkick.remove(msg.to)
else:
msgs = ""
if msg.to in protectjoin:
protectjoin.remove(msg.to)
else:
msgs = ""
if msg.to in protectcancel:
protectcancel.remove(msg.to)
ginfo = cl.getGroup(msg.to)
msgs = "Berhasil menonaktifkan backup\nDi Group : " +str(ginfo.name)
else:
ginfo = cl.getGroup(msg.to)
msgs = "backup sudah off\nDi Group : " +str(ginfo.name)
cl.sendMessage(msg.to, "[Dinonaktifkan]\n" + msgs)
#===========KICKOUT============#
elif ("Kick " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
if target not in Bots:
try:
random.choice(ABC).kickoutFromGroup(msg.to, [target])
except:
pass
#===========ADMIN ADD============#
elif ("Adminadd " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
admin.append(target)
cl.sendMessage(msg.to,"Berhasil menambahkan admin")
except:
pass
elif ("Staffadd " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
staff.append(target)
cl.sendMessage(msg.to,"Berhasil menambahkan staff")
except:
pass
elif ("Botadd " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
Bots.append(target)
cl.sendMessage(msg.to,"Berhasil menambahkan bot")
except:
pass
elif ("Admindell " in msg.text):
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
if target not in Team:
try:
admin.remove(target)
cl.sendMessage(msg.to,"Berhasil menghapus admin")
except:
pass
elif ("Staffdell " in msg.text):
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
if target not in Team:
try:
staff.remove(target)
cl.sendMessage(msg.to,"Berhasil menghapus admin")
except:
pass
elif ("Botdell " in msg.text):
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
if target not in Team:
try:
Bots.remove(target)
cl.sendMessage(msg.to,"Berhasil menghapus admin")
except:
pass
elif cmd == "admin:on" or text.lower() == 'admin:on':
if msg._from in admin:
wait["addadmin"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif cmd == "admin:repeat" or text.lower() == 'admin:repeat':
if msg._from in admin:
wait["delladmin"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif cmd == "staff:on" or text.lower() == 'staff:on':
if msg._from in admin:
wait["addstaff"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif cmd == "staff:repeat" or text.lower() == 'staff:repeat':
if msg._from in admin:
wait["dellstaff"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif cmd == "bot:on" or text.lower() == 'bot:on':
if msg._from in admin:
wait["addbots"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif cmd == "bot:repeat" or text.lower() == 'bot:repeat':
if msg._from in admin:
wait["dellbots"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif cmd == "refresh" or text.lower() == 'refresh':
if msg._from in admin:
wait["addadmin"] = False
wait["delladmin"] = False
wait["addstaff"] = False
wait["dellstaff"] = False
wait["addbots"] = False
wait["dellbots"] = False
wait["wblacklist"] = False
wait["dblacklist"] = False
wait["Talkwblacklist"] = False
wait["Talkdblacklist"] = False
cl.sendText(msg.to,"Berhasil di Refresh...")
elif cmd == "contact admin" or text.lower() == 'contact admin':
if msg._from in admin:
ma = ""
for i in admin:
ma = cl.getContact(i)
cl.sendMessage(msg.to, None, contentMetadata={'mid': i}, contentType=13)
elif cmd == "contact staff" or text.lower() == 'contact staff':
if msg._from in admin:
ma = ""
for i in staff:
ma = cl.getContact(i)
cl.sendMessage(msg.to, None, contentMetadata={'mid': i}, contentType=13)
elif cmd == "contact bot" or text.lower() == 'contact bot':
if msg._from in admin:
ma = ""
for i in Bots:
ma = cl.getContact(i)
cl.sendMessage(msg.to, None, contentMetadata={'mid': i}, contentType=13)
#===========COMMAND ON OFF============#
elif cmd == "notag on" or text.lower() == 'notag on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["Mentionkick"] = True
cl.sendText(msg.to,"Notag diaktifkan")
elif cmd == "notag off" or text.lower() == 'notag off':
if wait["selfbot"] == True:
if msg._from in admin:
wait["MentionKick"] = False
cl.sendText(msg.to,"Notag dinonaktifkan")
elif cmd == "contact on" or text.lower() == 'contact on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["contact"] = True
cl.sendText(msg.to,"Deteksi contact diaktifkan")
elif cmd == "contact off" or text.lower() == 'contact off':
if wait["selfbot"] == True:
if msg._from in admin:
wait["contact"] = False
cl.sendText(msg.to,"Deteksi contact dinonaktifkan")
elif cmd == "respon on" or text.lower() == 'respon on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["detectMention"] = True
cl.sendText(msg.to,"Auto respon diaktifkan")
elif cmd == "respon off" or text.lower() == 'respon off':
if wait["selfbot"] == True:
if msg._from in admin:
wait["detectMention"] = False
cl.sendText(msg.to,"Auto respon dinonaktifkan")
elif cmd == "autojoin on" or text.lower() == 'autojoin on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["autoJoin"] = True
cl.sendText(msg.to,"Autojoin diaktifkan")
elif cmd == "autojoin off" or text.lower() == 'autojoin off':
if wait["selfbot"] == True:
if msg._from in admin:
wait["autoJoin"] = False
cl.sendText(msg.to,"Autojoin dinonaktifkan")
elif cmd == "autoleave on" or text.lower() == 'autoleave on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["autoLeave"] = True
cl.sendText(msg.to,"Autoleave diaktifkan")
elif cmd == "autoleave off" or text.lower() == 'autoleave off':
if wait["selfbot"] == True:
if msg._from in admin:
wait["autoLeave"] = False
cl.sendText(msg.to,"Autoleave dinonaktifkan")
elif cmd == "autoadd on" or text.lower() == 'autoadd on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["autoAdd"] = True
cl.sendText(msg.to,"Auto add diaktifkan")
elif cmd == "autoadd off" or text.lower() == 'autoadd off':
if wait["selfbot"] == True:
if msg._from in admin:
wait["autoAdd"] = False
cl.sendText(msg.to,"Auto add dinonaktifkan")
elif cmd == "sticker on" or text.lower() == 'sticker on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["sticker"] = True
cl.sendText(msg.to,"Deteksi sticker diaktifkan")
elif cmd == "sticker off" or text.lower() == 'sticker off':
if wait["selfbot"] == True:
if msg._from in admin:
wait["sticker"] = False
cl.sendText(msg.to,"Deteksi sticker dinonaktifkan")
elif cmd == "jointicket on" or text.lower() == 'jointicket on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["autoJoinTicket"] = True
cl.sendText(msg.to,"Join ticket diaktifkan")
elif cmd == "jointicket off" or text.lower() == 'jointicket off':
if wait["selfbot"] == True:
if msg._from in admin:
wait["autoJoinTicket"] = False
cl.sendText(msg.to,"Notag dinonaktifkan")
#===========COMMAND BLACKLIST============#
elif ("Talkban " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
wait["Talkblacklist"][target] = True
cl.sendMessage(msg.to,"Berhasil menambahkan blacklist")
except:
pass
elif ("Untalkban " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
del wait["Talkblacklist"][target]
cl.sendMessage(msg.to,"Berhasil menghapus blacklist")
except:
pass
elif cmd == "talkban:on" or text.lower() == 'talkban:on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["Talkwblacklist"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif cmd == "untalkban:on" or text.lower() == 'untalkban:on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["Talkdblacklist"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif ("Ban " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
wait["blacklist"][target] = True
cl.sendMessage(msg.to,"Berhasil menambahkan blacklist")
except:
pass
elif ("Unban " in msg.text):
if wait["selfbot"] == True:
if msg._from in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
del wait["blacklist"][target]
cl.sendMessage(msg.to,"Berhasil menghapus blacklist")
except:
pass
elif cmd == "ban:on" or text.lower() == 'ban:on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["wblacklist"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif cmd == "unban:on" or text.lower() == 'unban:on':
if wait["selfbot"] == True:
if msg._from in admin:
wait["dblacklist"] = True
cl.sendText(msg.to,"Kirim kontaknya...")
elif cmd == "banlist" or text.lower() == 'banlist':
if wait["selfbot"] == True:
if msg._from in admin:
if wait["blacklist"] == {}:
cl.sendMessage(msg.to,"Tidak ada blacklist")
else:
ma = ""
a = 0
for m_id in wait["blacklist"]:
a = a + 1
end = '\n'
ma += str(a) + ". " +cl.getContact(m_id).displayName + "\n"
cl.sendMessage(msg.to,"Blacklist User\n\n"+ma+"\nTotal[%s]Blacklist User" %(str(len(wait["blacklist"]))))
elif cmd == "talkbanlist" or text.lower() == 'talkbanlist':
if wait["selfbot"] == True:
if msg._from in admin:
if wait["Talkblacklist"] == {}:
cl.sendMessage(msg.to,"Tidak ada Talkban user")
else:
ma = ""
a = 0
for m_id in wait["Talkblacklist"]:
a = a + 1
end = '\n'
ma += str(a) + ". " +cl.getContact(m_id).displayName + "\n"
cl.sendMessage(msg.to,"Talkban User\n\n"+ma+"\nTotal[%s]Talkban User" %(str(len(wait["Talkblacklist"]))))
elif cmd == "blc" or text.lower() == 'blc':
if wait["selfbot"] == True:
if msg._from in admin:
if wait["blacklist"] == {}:
cl.sendMessage(msg.to,"Tidak ada blacklist")
else:
ma = ""
for i in wait["blacklist"]:
ma = cl.getContact(i)
cl.sendMessage(msg.to, None, contentMetadata={'mid': i}, contentType=13)
elif cmd == "clearban" or text.lower() == 'clearban':
if wait["selfbot"] == True:
if msg._from in admin:
wait["blacklist"] = {}
ragets = cl.getContacts(wait["blacklist"])
mc = "[%i]User Blacklist" % len(ragets)
cl.sendMessage(msg.to,"Sukses membersihkan " +mc)
#===========COMMAND SET============#
elif 'Set pesan: ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Set pesan: ','')
if spl in [""," ","\n",None]:
cl.sendMessage(msg.to, "Gagal mengganti Pesan Msg")
else:
wait["message"] = spl
cl.sendMessage(msg.to, "[Pesan Msg]\nPesan Msg diganti jadi :\n\n[{}]".format(str(spl)))
elif 'Set welcome: ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Set welcome: ','')
if spl in [""," ","\n",None]:
cl.sendMessage(msg.to, "Gagal mengganti Welcome Msg")
else:
wait["welcome"] = spl
cl.sendMessage(msg.to, "[Welcome Msg]\nWelcome Msg diganti jadi :\n\n[{}]".format(str(spl)))
elif 'Set respon: ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Set respon: ','')
if spl in [""," ","\n",None]:
cl.sendMessage(msg.to, "Gagal mengganti Respon Msg")
else:
wait["Respontag"] = spl
cl.sendMessage(msg.to, "[Respon Msg]\nRespon Msg diganti jadi :\n\n[{}]".format(str(spl)))
elif 'Set spam: ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Set spam: ','')
if spl in [""," ","\n",None]:
cl.sendMessage(msg.to, "Gagal mengganti Spam")
else:
Setmain["message1"] = spl
cl.sendMessage(msg.to, "[Spam Msg]\nSpam Msg diganti jadi :\n\n[{}]".format(str(spl)))
elif 'Set sider: ' in msg.text:
if msg._from in admin:
spl = msg.text.replace('Set sider: ','')
if spl in [""," ","\n",None]:
cl.sendMessage(msg.to, "Gagal mengganti Sider Msg")
else:
wait["mention"] = spl
cl.sendMessage(msg.to, "[Sider Msg]\nSider Msg diganti jadi :\n\n[{}]".format(str(spl)))
elif text.lower() == "cek pesan":
if msg._from in admin:
cl.sendMessage(msg.to, "[Pesan Msg]\nPesan Msg mu :\n\n[ " + str(wait["message"]) + " ]")
elif text.lower() == "cek welcome":
if msg._from in admin:
cl.sendMessage(msg.to, "[Welcome Msg]\nWelcome Msg mu :\n\n[ " + str(wait["welcome"]) + " ]")
elif text.lower() == "cek respon":
if msg._from in admin:
cl.sendMessage(msg.to, "[Respon Msg]\nRespon Msg mu :\n\n[ " + str(wait["Respontag"]) + " ]")
elif text.lower() == "cek spam":
if msg._from in admin:
cl.sendMessage(msg.to, "[Spam Msg]\nSpam Msg mu :\n\n[ " + str(Setmain["message1"]) + " ]")
elif text.lower() == "cek sider":
if msg._from in admin:
cl.sendMessage(msg.to, "[Sider Msg]\nSider Msg mu :\n\n[ " + str(wait["mention"]) + " ]")
#===========JOIN TICKET============#
elif "/ti/g/" in msg.text.lower():
if wait["selfbot"] == True:
if settings["autoJoinTicket"] == True:
link_re = re.compile('(?:line\:\/|line\.me\/R)\/ti\/g\/([a-zA-Z0-9_-]+)?')
links = link_re.findall(text)
n_links = []
for l in links:
if l not in n_links:
n_links.append(l)
for ticket_id in n_links:
group = cl.findGroupByTicket(ticket_id)
cl.acceptGroupInvitationByTicket(group.id,ticket_id)
cl.sendMessage(msg.to, "Masuk : %s" % str(group.name))
group1 = ki.findGroupByTicket(ticket_id)
ki.acceptGroupInvitationByTicket(group1.id,ticket_id)
ki.sendMessage(msg.to, "Masuk : %s" % str(group.name))
group2 = kk.findGroupByTicket(ticket_id)
kk.acceptGroupInvitationByTicket(group2.id,ticket_id)
kk.sendMessage(msg.to, "Masuk : %s" % str(group.name))
group3 = kc.findGroupByTicket(ticket_id)
kc.acceptGroupInvitationByTicket(group3.id,ticket_id)
kc.sendMessage(msg.to, "Masuk : %s" % str(group.name))
except Exception as error:
print (error)
while True:
try:
ops = poll.singleTrace(count=50)
if ops is not None:
for op in ops:
# bot(op)
# Don't remove this line, if you wan't get error soon!
poll.setRevision(op.revision)
thread1 = threading.Thread(target=bot, args=(op,))#self.OpInterrupt[op.type], args=(op,)
#thread1.daemon = True
thread1.start()
thread1.join()
except Exception as e:
pass
| [
"noreply@github.com"
] | noreply@github.com |
940da212b1914c6cfacdba04faedca860cb3c8db | 03d0e2c1c0557e2d7a229a3e149cc5c39fb1d019 | /application/application/models/Category.py | 9524afab04def769de0343e468cba6a2a73910d9 | [
"MIT"
] | permissive | Terkea/kiosk | 6b8441431dbc968ce5abada3ed3d00448a23f9ef | 86a6520f9d91d7bacca915d3740da802e3efb510 | refs/heads/master | 2022-06-14T22:06:57.655553 | 2020-06-26T01:25:04 | 2020-06-26T01:25:04 | 239,501,379 | 0 | 0 | MIT | 2022-05-25T05:12:48 | 2020-02-10T12:00:15 | CSS | UTF-8 | Python | false | false | 431 | py | from sqlalchemy import Column, Integer, String, Sequence
from sqlalchemy.orm import relationship
from application.database import Base
class Category(Base):
__tablename__ = 'category'
id = Column(Integer, Sequence('id_seq'), primary_key=True)
name = Column(String(255), nullable=False)
autoload = True
event = relationship("Event")
def __repr__(self):
return str(self.__dict__) | [
"terkeabt@gmail.com"
] | terkeabt@gmail.com |
a26d82926450e0d98d636e666521f79b6eb9e2d3 | 04091b59cad8c3aadf9cde04e50e4aceb6698789 | /temperature.py | c5d2994ee6cae967fb8bf012d2c94cea12f85d03 | [] | no_license | trent-hodgins-01/ICS3U-Unit5-01-Python | 8093bd2ce31ef10dddb3e2ac37729f5b25325bda | d66bb316cbb41415fbd7013635663ea340ed8eb2 | refs/heads/main | 2023-08-18T07:12:30.738185 | 2021-10-12T16:43:23 | 2021-10-12T16:43:23 | 416,400,189 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 775 | py | # !/user/bin/env python3
# Created by Trent Hodgins
# Created on 10/12/2021
# This is the temperature program
# The user enters in the temperature in celsius
# The program displays the temperature in Fahrenheit
def celsius_to_fahrenheit():
# calculates celsius to fahrenheit
# input
celsius_as_string = input("Enter in the temperature(Celsius): ")
# process
try:
celsius_as_number = int(celsius_as_string)
fahrenheit = (9 / 5) * celsius_as_number + 32
# output
print("It is {0}° Fahrenheit".format(fahrenheit))
except Exception:
print("Invalid input")
def main():
# this function just calls other functions
# call functions
celsius_to_fahrenheit()
if __name__ == "__main__":
main()
| [
"ubuntu@ip-172-31-12-63.ec2.internal"
] | ubuntu@ip-172-31-12-63.ec2.internal |
a11e6a88e2df0cc8ed5a8508675ce893139ef5e2 | 9f3f0255e06cf399a6f2e984073c0dd401635e60 | /django/django_orm/LoginRegistrationCopy/apps/LoginRegistrationApps/migrations/0003_auto_20190918_0046.py | b4bf6a865dac09cb74e2a359e4294bfeb2366efd | [] | no_license | Nassertakkesh/Python | dd51ddeb7340060d1d31e1420b6beeae86ffdff7 | 23932ab6ee1d545cc022737e5cba8937e0ea1bba | refs/heads/master | 2022-10-09T01:19:47.895585 | 2019-10-21T04:09:02 | 2019-10-21T04:09:02 | 216,474,407 | 0 | 1 | null | 2022-10-02T20:27:50 | 2019-10-21T04:00:44 | Python | UTF-8 | Python | false | false | 430 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2019-09-18 00:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('LoginRegistrationApps', '0002_comment_post'),
]
operations = [
migrations.RenameField(
model_name='post',
old_name='posts',
new_name='users',
),
]
| [
"Nasser@Nassers-MacBook-Pro.local"
] | Nasser@Nassers-MacBook-Pro.local |
7735edf0ede0e46b192766f22a139d6b3d34941b | 712a568549a854b1457a327992b6e77d3a80528e | /framework/migrations/0003_auto_20160612_0323.py | f5d98047c21a97faf32958f20bebf255cbc761d4 | [] | no_license | hugoren/drf_frame | 62b96516350906bb93aba9510d888fbc03179cd1 | 0124967817918fc93555ede40703fbbd7f90d0e3 | refs/heads/master | 2021-01-17T08:36:52.868610 | 2016-07-05T07:42:00 | 2016-07-05T07:42:00 | 60,762,851 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 509 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('framework', '0002_auto_20160612_0245'),
]
operations = [
migrations.AlterField(
model_name='book',
name='owner',
field=models.ForeignKey(related_name='framework_book_owner', verbose_name=b'owner', to=settings.AUTH_USER_MODEL),
),
]
| [
"chenpengren@ymt.corp"
] | chenpengren@ymt.corp |
382bbfa2c7de9ffedc43567a39225596a5c39d23 | b88edefdc174435abac8692b309dc2a738117000 | /main.py | a8317f556834f5f6d9c775a06de2ce0e9b3de459 | [
"MIT"
] | permissive | chenneng/FashionAI | 17036196f2e8fdec91ad9276231290c20f85cd32 | a462bb2eafb70a4f3e6a59da5ba79046d6aef72f | refs/heads/master | 2020-04-19T10:42:30.425785 | 2019-02-19T11:43:44 | 2019-02-19T11:43:44 | 168,148,055 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,081 | py | import os
import sys
import os.path as osp
import argparse
import torch
import torch.nn as nn
import torch.optim as optim
from serialization import Logger
from model import creat_model
from dataset import fashionData
from train import train, evaluate
import torchvision.transforms as transforms
parser = argparse.ArgumentParser(description='fashion classification')
parser.add_argument('--data_root', type = str, default = '../datasets/fashionAI_attributes_train1', help = 'data root path')
parser.add_argument('--batch', type = int, default = 128, help = 'batch size for training (default: 128)')
parser.add_argument('--test_batch', type = int, default = 64, help = 'batch size for testing (default: 64)')
parser.add_argument('--epochs', type = int, default = 30, help = 'number of epochs')
parser.add_argument('--lr', type = float, default = 0.0001, help = 'learning rate (default: 0.001)')
parser.add_argument('--adjust_lr', type = bool, default = True, help = 'adjust learning rate')
parser.add_argument('--momentum', type = float, default = 0.9, help = 'SGD momentum (default: 0.9)')
parser.add_argument('--gpu', type = bool, default = True, help = 'GPU training')
parser.add_argument('--shuffle', type = bool, default = True, help = 'data shuffle')
parser.add_argument('--resume', type = str, default = None, help = 'resume model path')
parser.add_argument('--height', type = int, default = 224, help = 'height')
parser.add_argument('--width', type = int, default = 224, help = 'width')
parser.add_argument('--evaluate_interval', type = int, default = 1, help = 'epochs before evaluate model')
parser.add_argument('--save_interval', type = int, default = 5, help = 'epochs before save model')
parser.add_argument('--save_dir', type = str, default = './models', help = 'log and model save dir')
parser.add_argument('--test_only', type = bool, default = False, help = 'only evaluate the model')
args = parser.parse_args()
data_transform = transforms.Compose(transforms = [
transforms.RandomHorizontalFlip(),
transforms.RandomRotation(10),
transforms.Resize((args.height, args.width), interpolation = 3),
transforms.ColorJitter(brightness = 0.5, contrast = 0.5, hue = 0.5),
transforms.ToTensor(),
transforms.Normalize(mean=[0.647, 0.609, 0.596], std=[0.089, 0.093, 0.094])
])
kwargs = {'num_workers': 6, 'pin_memory': True} if args.gpu else {}
model = creat_model()
if args.resume:
#model.load_state_dict(torch.load(args.resume))
model.load_state_dict({k.replace('module.',''):v for k,v in torch.load(args.resume).items()})
if args.gpu:
model = nn.DataParallel(model).cuda()
#optimizer = optim.SGD(model.parameters(), lr = args.lr, momentum = args.momentum)
optimizer = optim.Adam(model.parameters(), lr = args.lr)
def adjust_lr(base_lr, optimizer, epoch):
lr = base_lr * (0.1 ** (epoch // 10))
for para in optimizer.param_groups:
para['lr'] = lr
if not os.path.exists(args.save_dir):
os.makedirs(args.save_dir)
sys.stdout = Logger(osp.join(args.save_dir, 'log_train.txt'))
print('let us begin:')
trainset = fashionData(args.data_root, split = 0.8, data_type = 'train', transform = data_transform)
testset = fashionData(args.data_root, split = 0.8, data_type = 'test', transform = data_transform)
train_loader = torch.utils.data.DataLoader(trainset, batch_size = args.batch, shuffle = True, **kwargs)
test_loader = torch.utils.data.DataLoader(testset, batch_size = args.test_batch, shuffle = True, **kwargs)
if args.test_only and args.resume is not None:
evaluate(model, test_loader)
if args.test_only is False:
start_epoch = 0
for epoch in range(start_epoch, args.epochs):
if args.adjust_lr:
adjust_lr(args.lr, optimizer, epoch)
train(epoch, model, optimizer, train_loader)
if epoch % args.evaluate_interval == 0 or epoch == args.epochs - 1:
evaluate(model, test_loader)
if epoch % args.save_interval == 0 or epoch == args.epochs - 1:
print('saving model..')
torch.save(model.state_dict(), osp.join(args.save_dir, ('model_{}.pth'.format(epoch))))
| [
"beiluo1990@vip.qq.com"
] | beiluo1990@vip.qq.com |
b5ddb5c8af232999eb8ae226c25d305066c76157 | fddc2ed5301b00f668bcb772518e0348db459538 | /convokit/communityEmbedder.py | 143d36c2f5b9fefe78dddccf919d797401191a38 | [
"MIT"
] | permissive | deepthimhegde/Cornell-Conversational-Analysis-Toolkit | 289fd22a81d9d06c7aeb5270c11acc4ec40424bf | eb9a103f8d5e34d378b0b6d6bda3fa43587363a1 | refs/heads/master | 2020-05-29T11:16:01.765154 | 2019-05-17T18:29:27 | 2019-05-17T18:29:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,889 | py | import numpy as np
from sklearn.decomposition import TruncatedSVD
from sklearn.manifold import TSNE
from collections import defaultdict
from .transformer import Transformer
class CommunityEmbedder(Transformer):
"""
Must be run after threadEmbedder.fit_transform()
Groups threads together into communities
in this space for visualization or other such purposes.
:param community_key: Key in "meta" dictionary of each utterance
whose corresponding value we'll use as the community label for that
utterance (see threadEmbedder)
:param n_components: Number of dimensions to embed communities into
:param method: Embedding method; "svd", "tsne" or "none"
"""
def __init__(self, community_key=None, n_components=2, method="none"):
self.community_key = community_key
self.n_components = n_components
self.method = method
def transform(self, corpus):
"""
Same as fit_transform()
"""
return self.fit_transform(corpus)
def fit_transform(self, corpus):
"""
:param corpus: the Corpus to use
:return: a Corpus with new meta key: "communityEmbedder",
value: Dict, containing "pts": an array with rows corresponding
to embedded communities, and "labels": an array whose ith entry is
the community of the ith row of X.
"""
if self.community_key is None:
raise RuntimeError("Must specify community_key to retrieve label information from utterance")
corpus_meta = corpus.get_meta()
if "threadEmbedder" not in corpus_meta:
raise RuntimeError("Missing threadEmbedder metadata: "
"threadEmbedder.fit_transform() must be run on the Corpus first")
thread_embed_data = corpus_meta["threadEmbedder"]
X_mid = thread_embed_data["X"]
roots = thread_embed_data["roots"]
if self.method.lower() == "svd":
f = TruncatedSVD
elif self.method.lower() == "tsne":
f = TSNE
elif self.method.lower() == "none":
f = None
else:
raise Exception("Invalid embed_communities embedding method")
if f is not None:
X_embedded = f(n_components=self.n_components).fit_transform(X_mid)
else:
X_embedded = X_mid
labels = [corpus.get_utterance(root).get("meta")[self.community_key]
for root in roots]
# label_counts = Counter(labels)
subs = defaultdict(list)
for x, label in zip(X_embedded, labels):
subs[label].append(x / np.linalg.norm(x))
labels, subs = zip(*subs.items())
pts = [np.mean(sub, axis=0) for sub in subs]
retval = {"pts": pts, "labels": labels}
corpus.add_meta("communityEmbedder", retval)
return corpus
| [
"calebchiam@gmail.com"
] | calebchiam@gmail.com |
742366b9a5cef021a809ed02a8e8bfe456dc5b99 | 9244dcf5374761c00f87e3b1f16ba7346601bb7a | /PyPoll/Main.py | 2843fdea5c634bf36725882c9782768c77c7ead4 | [] | no_license | MikeT9/Python-Challenge | 017ea89561722d691dbb05123c73bf878baae107 | 3198c2dbf25c2f75868abfee1cc266f2cc6e446d | refs/heads/main | 2023-02-27T02:15:22.578614 | 2021-02-06T06:07:51 | 2021-02-06T06:07:51 | 334,793,301 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,171 | py | # Import the os module
# This will allow us to create file paths across operating systems
import os
# Module for reading CSV files
import csv
csvpath = os.path.join('Resources', 'election_data.csv')
# Improved Reading using CSV module
with open(csvpath) as csvfile:
# CSV reader specifies delimiter and variable that holds contents
csvreader = csv.reader(csvfile, delimiter=',')
# Read the header row first (skip this step if there is now header)
csv_header = next(csvreader)
# Make a list of voters by Voter ID
Voter = []
# Create a List to hold the politician's names.
Pol_List = []
# Create a list for total votes of respective politicians
T_Vote = []
# Create a lists to hold votes for each politician
Khan_Vote = []
Correy_Vote = []
Li_Vote = []
OTooley_Vote = []
# Loop through data...Weeeeeeeeeee
for row in csvreader:
# Add Voter to list
Voter.append(row[0])
# Add Politician to list
if row[2] not in Pol_List:
Pol_List.append(str(row[2]))
# Add votes for Politicians to lists
if row[2] == Pol_List[0]:
Khan_Vote.append(row[2])
elif row[2] == Pol_List[1]:
Correy_Vote.append(row[2])
elif row[2] == Pol_List[2]:
Li_Vote.append(row[2])
elif row[2] == Pol_List[3]:
OTooley_Vote.append(row[2])
# Add Values to T_Vote list
T_Vote.append(len(Khan_Vote))
T_Vote.append(len(Correy_Vote))
T_Vote.append(len(Li_Vote))
T_Vote.append(len(OTooley_Vote))
# Print Analysis
print(f'Election Results')
print(f'----------------')
print(f'Total Votes: {len(Voter)}')
print(f'----------------')
print(f'{Pol_List[0]}: {((T_Vote[0] / len(Voter)) * 100):.3f}% ({T_Vote[0]})')
print(f'{Pol_List[1]}: {((T_Vote[1] / len(Voter)) * 100):.3f}% ({T_Vote[1]})')
print(f'{Pol_List[2]}: {((T_Vote[2] / len(Voter)) * 100):.3f}% ({T_Vote[2]})')
print(f'{Pol_List[3]}: {((T_Vote[3] / len(Voter)) * 100):.3f}% ({T_Vote[3]})')
print(f'----------------')
print(f'Winner: {Pol_List[T_Vote.index(max(T_Vote))]}')
print(f'----------------')
# Set variable for output file
output_file = os.path.join("Analysis","Election_Analysis.txt")
# Open the output file
with open(output_file, "w", newline="") as datafile:
writer = csv.writer(datafile, delimiter=',')
# Write the Analysis
writer.writerow([f'Election Results'])
writer.writerow([f'----------------'])
writer.writerow([f'Total Votes: {len(Voter)}'])
writer.writerow([f'----------------'])
writer.writerow([f'{Pol_List[0]}: {((T_Vote[0] / len(Voter)) * 100):.3f}% ({T_Vote[0]})'])
writer.writerow([f'{Pol_List[1]}: {((T_Vote[1] / len(Voter)) * 100):.3f}% ({T_Vote[1]})'])
writer.writerow([f'{Pol_List[2]}: {((T_Vote[2] / len(Voter)) * 100):.3f}% ({T_Vote[2]})'])
writer.writerow([f'{Pol_List[3]}: {((T_Vote[3] / len(Voter)) * 100):.3f}% ({T_Vote[3]})'])
writer.writerow([f'----------------'])
writer.writerow([f'Winner: {Pol_List[T_Vote.index(max(T_Vote))]}'])
writer.writerow([f'----------------']) | [
"michaelraythomas.hou@gmail.com"
] | michaelraythomas.hou@gmail.com |
69c0254fc58677d9c90edb6ffe6b6a837050fef6 | 95aabdb737b611c9b228a15e625a702b5061c36d | /clickstream/oozie/throttle_ip_ua_ref.py | 79414a487cb0f6fade5c07248da0229cff5201e6 | [] | no_license | santhoshtr/wmf | 70f0cbcb9b07dbb532270abca1941e96c94ea8bf | 0d034c012176ae8510b701e73ac932ed39a90ffb | refs/heads/master | 2021-01-11T17:13:29.881896 | 2016-01-20T19:23:44 | 2016-01-20T19:23:44 | 54,969,878 | 1 | 0 | null | 2016-03-29T11:20:49 | 2016-03-29T11:20:49 | null | UTF-8 | Python | false | false | 2,348 | py | import sys
import math
"""
This script is used for removing traffic from fast moving crawlers.
It addresses the scenario in which the crawler requests a page
and then proceeds to requests pages linked from that page at higher
frequency than a human would, say, open tabs. It is myopic in the sense
that it only operates on one minute of request at a time.
Input:
Records of the schema: [ip, user_agent, referer, minute, second, uri_path],
sorted by (ip, user_agent, referer, minute, second)
When using this script as a hive transform function be sure to
distribute by (ip, user_agent, referer, minute) and sort by
(ip, user_agent, referer, minute, second)
"""
def throttle(requests):
"""
Computes the rate of requests with the same
referer and compares it to a dynamic threshold.
If the rate is too high, the set of requests is discarded.
"""
second_index = 4
num_requests = float(len(requests))
# one pageview per minute is certainly acceptible
if num_requests == 1.0:
emit(requests)
# an empty referer may give too low entropy
#elif len(requests[0][2]) < 2:
# emit(requests)
# check rate rate
else:
try:
start_second = float(requests[0][second_index])
except:
start_second = 0.0
try:
stop_second = float(requests[-1][second_index])
except:
stop_second = 59.0
request_interval = 1.0 + stop_second - start_second
rate = num_requests / request_interval
# compute rate that it would take a human to generate that many views
max_rate = num_requests / (num_requests + 0.1*num_requests**2)
if rate < max_rate:
emit(requests)
def emit(requests):
for r in requests:
print '\t'.join(r)
def main():
"""
Send each set of records with the same
(ip, user_agent, uri_path, minute) fields to
throttle function to determine if the
requests should be dropped.
"""
requests = []
curr_group = None
print_result = False
for line in sys.stdin:
row = line.rstrip('\n').split('\t')
if len(row) != 6:
continue
group = row[:4]
if group != curr_group:
if print_result:
throttle(requests)
requests = []
curr_group = group
print_result = True
requests.append(row)
if print_result:
throttle(requests)
if __name__ == '__main__':
main() | [
"ellerywulczyn@gmail.com"
] | ellerywulczyn@gmail.com |
f7584104cbead887604e66f450506be4fe13361b | 3cf1cd66536f020041f601137338b2378040f133 | /syp/subrecipes/validate.py | f9b3025d2620f4de8bb7761317cc0680e7be4898 | [] | no_license | carlosfranzreb/syp | 7e8b4f2b71cf6ef29b47097ca5a8ee7a80f755b4 | c4a33e7ab2741e1c63b8fad20c1d48977ea18654 | refs/heads/master | 2022-07-02T16:04:47.881506 | 2020-05-03T19:13:25 | 2020-05-03T19:13:25 | 232,362,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,626 | py | """ Help functions to validate subrecipes.
Name must be unique for each user.
Ingredients must be chosen from the list.
At least one non-empty step. Max. length of steps is checked by the form. """
from flask_login import current_user
from syp.models.subrecipe import Subrecipe
from syp.models.ingredient import Ingredient
from syp.recipes.utils import get_url_from_name
def validate(form):
""" Validate the name and URL by checking if they are unique
and call a function to validate the ingredients. """
errors = list()
name = form.name.data # name of the subrecipe.
url = get_url_from_name(name)
query = Subrecipe.query.filter_by(id_user=current_user.id)
if name == 'Nueva subreceta':
errors.append(
'Cambia el nombre de la subreceta donde pone "Nueva subreceta"'
)
elif query.filter_by(name=name).first() is not None:
errors.append(
f'Ya existe una subreceta llamada "{name}". ¡Cambia el nombre!'
)
elif query.filter_by(url=url).first() is not None:
errors.append(
f'Ya existe una subreceta cuya URL es "{url}". ¡Cambia el nombre!'
)
return errors + validate_ingredients(form)
def validate_ingredients(form):
""" Verify that all ingredients are part of the catalogue. """
errors = list()
for subquantity in form.ingredients.data:
name = subquantity['ingredient']
if (Ingredient.query.filter_by(name=name).first()) is None:
errors.append(
f'El ingrediente "{name}" no existe. Escoge uno de la lista.'
)
return errors
| [
"carlosfranzreb@gmail.com"
] | carlosfranzreb@gmail.com |
3983fd5b19edfe65dc64e462e25cb5f0984e844d | dcd6f098ad58b59277cc3de9ec665a16e16106c4 | /module/MyRBM.py | f5f01e7d10580250e5b56d2a8ad0263c086577e5 | [] | no_license | zanghu/MyDNNmodule | 37d956cceeeaaaa6dece51ca78ef24e6d4a8332b | 4cf776d7c110cc2979617a8e7c3d7fc6ce335b94 | refs/heads/master | 2016-08-04T02:16:31.277117 | 2014-03-08T04:01:59 | 2014-03-08T04:01:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,110 | py | import time
import os
from itertools import izip
import copy
import numpy
import theano
import theano.tensor as T
from theano.tensor.shared_randomstreams import RandomStreams
from theano.sandbox.rng_mrg import MRG_RandomStreams
from theano.compat.python2x import OrderedDict
from pylearn2.base import Block
from pylearn2.models.model import Model
from pylearn2.linear.matrixmul import MatrixMul
from pylearn2.costs.cost import Cost
from pylearn2.datasets.mnist import MNIST
from pylearn2.space import VectorSpace
from pylearn2.costs.cost import SumOfCosts
theano.config.compute_test_value = 'off'
class HonglakLeeSparse(Cost):
def __init__(self, p=0.02):
self.p = p
def expr(self, model, data):
v = data
p_h_given_v_matrix = model.propup(v)[-1]
sum_meta = T.square(self.p - T.mean(p_h_given_v_matrix, axis=0, dtype=theano.config.floatX))
expr = T.sum(sum_meta)
return expr
def get_data_specs(self, model):
return (model.get_input_space(), model.get_input_source())
class MyContrastiveDivergence(Cost):
def __init__(self, k, chain_num=None): # CD-k
# k: CD-k
self.k = k
self.chain_num = chain_num
def expr(self, model, data):
return None
def get_data_specs(self, model):
return model.get_monitoring_data_specs()
class MyCD_for(MyContrastiveDivergence):
def get_gradients(self, model, data, ** kwargs):
#print 'get_gradients'
pos_v = data
#chain_start = pos_v
v_samples = pos_v
#print 'v_samples', v_samples.ndim
for i in xrange(self.k):
v_samples = model.gibbs_vhv(v_samples)[-1]
#[act_hids, hid_mfs, hid_samples, act_vis, vis_mfs, vis_samples], updates = theano.scan(fn = model.gibbs_vhv, sequences=None,
#outputs_info=[None, None, None, None, None, chain_start], non_sequences=None, n_steps=self.k)
neg_v = v_samples
cost = -(- model.free_energy(pos_v).mean() + model.free_energy(neg_v).mean())
params = list(model.get_params())
grads = T.grad(cost, params, disconnected_inputs = 'ignore', consider_constant=[pos_v, neg_v])
gradients = OrderedDict(izip(params, grads))
updates = OrderedDict()
return gradients, updates
class MyCD_scan(MyContrastiveDivergence):
def get_gradients(self, model, data, ** kwargs):
#print 'get_gradients'
pos_v = data
#chain_start = pos_v
v_samples = pos_v
#print 'v_samples', v_samples.ndim
[act_hids, hid_mfs, hid_samples, act_vis, vis_mfs, vis_samples], scan_updates = theano.scan(fn = model.gibbs_vhv, sequences=None,
outputs_info=[None, None, None, None, None, v_samples], non_sequences=None, n_steps=self.k)
neg_v = vis_samples[-1]
cost = -(- model.free_energy(pos_v).mean() + model.free_energy(neg_v).mean())
params = list(model.get_params())
grads = T.grad(cost, params, disconnected_inputs = 'ignore', consider_constant=[pos_v, neg_v])
gradients = OrderedDict(izip(params, grads))
updates = OrderedDict()
updates.update(scan_updates) # add scan_updates
return gradients, updates
class MyCD_energy_scan(MyContrastiveDivergence):
def get_gradients(self, model, data, ** kwargs):
#print 'get_gradients'
pos_v = data
pos_h = model.sample_h_given_v(pos_v)[-1]
#chain_start = pos_v
h_samples = pos_h
#print 'v_samples', v_samples.ndim
[act_vis, vis_mfs, vis_samples, act_hids, hid_mfs, hid_samples], scan_updates = theano.scan(fn = model.gibbs_hvh, sequences=None,
outputs_info=[None, None, None, None, None, h_samples], non_sequences=None, n_steps=self.k)
neg_v = vis_samples[-1]
neg_h = hid_samples[-1]
cost = -(- model.energy(pos_v, pos_h).mean() + model.energy(neg_v, neg_h).mean())
params = list(model.get_params())
grads = T.grad(cost, params, disconnected_inputs = 'ignore', consider_constant=[pos_v, pos_h, neg_v, neg_h])
gradients = OrderedDict(izip(params, grads))
updates = OrderedDict()
updates.update(scan_updates) # add scan_updates
return gradients, updates
class MyCD_free_energy_scan(MyContrastiveDivergence):
def get_gradients(self, model, data, ** kwargs):
#print 'get_gradients'
pos_v = data
#pos_h = model.sample_h_given_v(pos_v)[-1]
#chain_start = pos_v
#h_samples = pos_h
#print 'v_samples', v_samples.ndim
[act_hids, hid_mfs, hid_samples, act_vis, vis_mfs, vis_samples], scan_updates = theano.scan(fn = model.gibbs_vhv, sequences=None,
outputs_info=[None, None, None, None, None, pos_v], non_sequences=None, n_steps=self.k)
neg_v = vis_samples[-1]
#neg_h = hid_samples[-1]
cost = -(- model.free_energy(pos_v).mean() + model.free_energy(neg_v).mean())
params = list(model.get_params())
grads = T.grad(cost, params, disconnected_inputs = 'ignore', consider_constant=[pos_v, neg_v])
gradients = OrderedDict(izip(params, grads))
updates = OrderedDict()
updates.update(scan_updates) # add scan_updates
return gradients, updates
class MyPCD_for(MyContrastiveDivergence):
def get_gradients(self, model, data, ** kwargs):
#print 'get_gradients'
chain_start = theano.shared(numpy.zeros(shape=(self.chain_num, model.n_vis)), name=None, borrow=True)
v_samples = chain_start
for i in xrange(self.k):
v_samples = model.gibbs_vhv(v_samples)[-1]
chain_end = v_samples
#print 'chain_end', chain_end.ndim
chain_updates = {}
chain_updates[chain_start] = chain_end
pos_v = data
#neg_v = self.get_neg_v(model)
cost = -(- model.free_energy(pos_v).mean() + model.free_energy(chain_end).mean())
params = list(model.get_params())
grads = T.grad(cost, params, disconnected_inputs = 'ignore', consider_constant=[chain_end])
gradients = OrderedDict(izip(params, grads))
updates = OrderedDict()
updates.update(chain_updates) # manual added
return gradients, updates
class MyPCD_scan(MyContrastiveDivergence):
def get_gradients(self, model, data, ** kwargs):
#print 'get_gradients'
chain_start = theano.shared(numpy.zeros(shape=(self.chain_num, model.n_vis), dtype=theano.config.floatX), name='chain_start', borrow=True)
[act_hids, hid_mfs, hid_samples, act_vis, vis_mfs, vis_samples], scan_updates = theano.scan(fn = model.gibbs_vhv, sequences=None,
outputs_info=[None, None, None, None, None, chain_start], non_sequences=None, n_steps=self.k)
chain_end = vis_samples[-1]
scan_updates[chain_start] = chain_end
pos_v = data
cost = -(- model.free_energy(pos_v).mean() + model.free_energy(chain_end).mean())
params = list(model.get_params())
grads = T.grad(cost, params, disconnected_inputs = 'ignore', consider_constant=[pos_v, chain_end])
gradients = OrderedDict(izip(params, grads))
updates = OrderedDict()
updates.update(scan_updates) # manual added
return gradients, updates
# Is that necessary to inherit Layer class??
class MyRBM(Model, Block):
"""Restricted Boltzmann Machine (RBM) """
def __init__(self, n_vis, n_hid, W=None, h_bias=None, v_bias=None, numpy_rng=None,theano_rng=None):
Model.__init__(self) # self.names_to_del = set(); self._test_batch_size = 2
Block.__init__(self) # self.fn = None; self.cpu_only = False
self.n_vis = n_vis
self.n_hid = n_hid
self.input_space = VectorSpace(dim=self.n_vis) # add input_space
self.output_space = VectorSpace(dim=self.n_hid) # add output_space
if numpy_rng is None:
# create a number generator
numpy_rng = numpy.random.RandomState(seed=19900418)
self.numpy_rng = numpy_rng
if theano_rng is None:
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
self.theano_rng = theano_rng
if W is None:
init_W = numpy.asarray(numpy_rng.uniform(
low=-4 * numpy.sqrt(6. / (n_hid + n_vis)),
high=4 * numpy.sqrt(6. / (n_hid + n_vis)),
size=(n_vis, n_hid)),
dtype=theano.config.floatX)
# theano shared variables for weights and biases
W = theano.shared(value=init_W, name='W', borrow=True)
if h_bias is None:
# create shared variable for hidden units bias
h_bias = theano.shared(value=numpy.zeros(n_hid, dtype=theano.config.floatX), name='h_bias', borrow=True)
if v_bias is None:
# create shared variable for visible units bias
v_bias = theano.shared(value=numpy.zeros(n_vis, dtype=theano.config.floatX), name='v_bias', borrow=True)
self.W = W
self.h_bias = h_bias
self.v_bias = v_bias
self._params = [self.W, self.h_bias, self.v_bias]
def get_monitoring_data_specs(self):
return (self.get_input_space(), self.get_input_source())
def get_monitoring_channels(self, data):
v = data
#H = self.sample_h_given_v(v)[1]
#h = H.mean(axis=0)
channels = {}
#channels = { 'bias_hid_min' : T.min(self.h_bias),
# 'bias_hid_mean' : T.mean(self.h_bias),
# 'bias_hid_max' : T.max(self.h_bias),
# 'bias_vis_min' : T.min(self.v_bias),
# 'bias_vis_mean' : T.mean(self.v_bias),
# 'bias_vis_max': T.max(self.v_bias),
# 'h_min' : T.min(h),
# 'h_mean': T.mean(h),
# 'h_max' : T.max(h),
#'W_min' : T.min(self.weights),
#'W_max' : T.max(self.weights),
#'W_norms_min' : T.min(norms),
#'W_norms_max' : T.max(norms),
#'W_norms_mean' : T.mean(norms),
#}
# recon_error
channel_name = 'recon_error'
p_v_given_h, v_sample = self.gibbs_vhv(v)[4:]
recon_error = ((p_v_given_h - v) ** 2).mean(axis=0).sum()
channels[channel_name] = recon_error
#pos_v = data
#[h_act, h_mean, h_sample, v_act, v_mean, v_sample], scan_updates = theano.scan(fn = self.gibbs_vhv, sequences=None,
# outputs_info=[None, None, None, None, None, pos_v], non_sequences=None, n_steps=1)
#pos_h = h_sample[0]
#neg_v = v_sample[-1]
#neg_h = self.sample_h_given_v(v_sample[-1])[-1]
#cost = -(- self.energy(pos_v, pos_h).mean() + self.energy(neg_v, neg_h).mean())
#channels['energy_cost'] = cost
#chain_start = theano.shared(numpy.zeros(shape=(20, self.n_vis), dtype=theano.config.floatX), name='chain_start', borrow=True)
#[h_act, h_mean, h_sample, v_act, v_mean, v_sample], scan_updates = theano.scan(fn = self.gibbs_vhv, sequences=None,
# outputs_info=[None, None, None, None, None, chain_start], non_sequences=None, n_steps=1)
#chain_end = v_sample[-1]
#scan_updates[chain_start] = chain_end
#pos_v = data
#cost = -(- self.free_energy(pos_v).mean() + self.free_energy(chain_end).mean())
#channels['free_enegy_cost'] = cost
#pseudo_likelihood
#channel_name = 'pseudo_likelihood'
#bit_i_idx = theano.shared(value=0, name='bit_i_idx')
#xi = T.round(v)
#print 'xi',xi.ndim
#fe_xi = self.free_energy(xi)
#xi_flip = T.set_subtensor(xi[:, bit_i_idx], 1 - xi[:, bit_i_idx])
#print 'xi_flip', xi_flip.ndim
#fe_xi_flip = self.free_energy(xi_flip)
#cost = T.mean(self.n_vis * T.log(T.nnet.sigmoid(fe_xi_flip - fe_xi)))
#updates[bit_i_idx] = (bit_i_idx + 1) % self.n_vis
#channels[channel_name] = cost
return channels
def energy(self, v, h):
W, c, b = self.get_params()
#energy = - (T.dot(v, b) + T.dot(h, c) + T.dot(T.dot(v, W), h.T) * T.eye(n=v.shape[0], m=h.shape[0]).sum(axis=0))
energy = - (T.dot(v, b) + T.dot(h, c) + (T.dot(v, W) * h).sum(axis=1))
#energy = - (T.dot(v, b) + T.dot(h, c) + T.dot((T.dot(v, W)).T, h))
return energy
def free_energy(self, v_sample):
#print 'free_energy'
''' Function to compute the free energy '''
wx_b = T.dot(v_sample, self.W) + self.h_bias
#print 'wx_b', wx_b.ndim
v_bias_term = T.dot(v_sample, self.v_bias)
softplus_term = T.sum(T.nnet.softplus(wx_b), axis=1)
return - v_bias_term - softplus_term
def propup(self, vis):
pre_sigmoid_activation = T.dot(vis, self.W) + self.h_bias
return [pre_sigmoid_activation, T.nnet.sigmoid(pre_sigmoid_activation)]
def sample_h_given_v(self, v0_sample):
''' This function infers state of hidden units given visible units '''
# compute the activation of the hidden units given a sample of
# the visibles
pre_sigmoid_h1, h1_mean = self.propup(v0_sample)
h1_sample = self.theano_rng.binomial(size=h1_mean.shape,
n=1, p=h1_mean,
dtype=theano.config.floatX)
return [pre_sigmoid_h1, h1_mean, h1_sample]
def propdown(self, hid):
pre_sigmoid_activation = T.dot(hid, self.W.T) + self.v_bias
return [pre_sigmoid_activation, T.nnet.sigmoid(pre_sigmoid_activation)]
def sample_v_given_h(self, h0_sample):
''' This function infers state of visible units given hidden units '''
# compute the activation of the visible given the hidden sample
pre_sigmoid_v1, v1_mean = self.propdown(h0_sample)
v1_sample = self.theano_rng.binomial(size=v1_mean.shape,
n=1, p=v1_mean,
dtype=theano.config.floatX)
return [pre_sigmoid_v1, v1_mean, v1_sample]
def gibbs_hvh(self, h0_sample):
''' This function implements one step of Gibbs sampling,
starting from the hidden state'''
pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h0_sample)
pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v1_sample)
return [pre_sigmoid_v1, v1_mean, v1_sample,
pre_sigmoid_h1, h1_mean, h1_sample]
def gibbs_vhv(self, v0_sample):
''' This function implements one step of Gibbs sampling,
starting from the visible state'''
pre_sigmoid_h1, h1_mean, h1_sample = self.sample_h_given_v(v0_sample)
pre_sigmoid_v1, v1_mean, v1_sample = self.sample_v_given_h(h1_sample)
return [pre_sigmoid_h1, h1_mean, h1_sample, pre_sigmoid_v1, v1_mean, v1_sample]
# interface for pylearn2.model.mlp PretraindLayer
def upward_pass(self, state_below):
return self.propup(state_below)[1]
# default cost is cd-1
def get_default_cost(self):
return MyCD_free_energy_scan(k=1)
def make_dataset(self, dataset,sample=False): # use rbm as a feature extractor, daatset pass through the filter and produce a new datset
orin = T.matrix()
if sample == False:
f = theano.function([orin], self.propup(orin)[-1])
else:
f = theano.function([orin], self.sample_h_given_v(orin)[-1])
X_new = f(dataset.X)
new_ds = DatasetFromDesign(design_matrix=X_new, label=dataset.y)
#print new_ds.__dict__
#print X_new.shape
return new_ds
| [
"zanghu20080701@126.com"
] | zanghu20080701@126.com |
03a350f6bb5407dde0032e82c4975dac03d1e4e9 | 186c92eb8572fa1c6580b5171e71dac61175d246 | /pi_string.py | e80c43f88768406e1fbafab35c7ff2daa54dfb47 | [] | no_license | xieqing181/Pythowork-Chapter10 | a5807d3d4bdb8fa0b5a78c6449bb20465b9ec0b8 | 44c1e228f2fa8f993becebda527de1cf1ff34b27 | refs/heads/master | 2021-07-18T21:41:03.199357 | 2017-10-26T10:27:26 | 2017-10-26T10:27:26 | 108,112,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | file_name = '/Users/xieqing/Pythonwork/pcc-master/chapter_10/pi_million_digits.txt'
with open(file_name) as file_object:
lines = file_object.readlines()
pi_string = ''
for line in lines:
pi_string += line.strip()
'''
print(pi_string[:52] + '...')
print(len(pi_string))
'''
birthday = input("Enter your birthday, in the form mmddyy: ")
if birthday in pi_string:
print("Your birthday appears in the first million digits of pi!")
else:
print("Your birthday does NOT appear in the first million digits of pi!")
| [
"33027473+xieqing181@users.noreply.github.com"
] | 33027473+xieqing181@users.noreply.github.com |
10296a2cb689ad888d9e13f671dcb33a755b3ac2 | 378e1e93cfc0c02aa347cd1682b019e5df17e746 | /tweets/forms.py | bae17f8b325f1775b8342d80c4fbe9bfa416f251 | [] | no_license | Robert0B0/TweetLite | d11dea6baab3b4ab21172b00590fa59bd57566cf | a797104603a89ae99e5229b27852ed258714fd5a | refs/heads/master | 2023-03-22T03:18:54.853446 | 2021-03-13T13:27:08 | 2021-03-13T13:27:08 | 347,006,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | from django import forms
from .models import Tweet
MAX_TWEET_LENGTH = 240
class TweetForm(forms.ModelForm):
class Meta:
model = Tweet
fields = ['content']
def clean_content(self):
content = self.cleaned_data.get('content')
if len(content) > MAX_TWEET_LENGTH:
raise forms.ValidationError("This tweet is to long")
return content
| [
"robert.balog000@gmail.com"
] | robert.balog000@gmail.com |
537e212b0035f18ca59d4d106ffba818be1a76a6 | f7751d01dce3d0ad2446bbf0ba6ca1e1ed652ecc | /forceDirected/views.py | c84e061c4ea6c568dbf2bfaccd011a0d5bbab848 | [] | no_license | LyonsLab/Blacktea | 3f2b497f40e9e01a5512e8fce8e2c7aff181ed20 | 421e1ab0970e6cc207f6fa6178606d88ded7f0c4 | refs/heads/master | 2020-12-24T15:50:00.873866 | 2016-03-09T23:46:15 | 2016-03-09T23:46:15 | 9,042,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,610 | py | from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from django.db.models import Count
import json
from .models import User, Log
# Create your views here.
def index(request):
return render(request, 'forceDirected/index.html', "text/html")
def users(request):
users = [user.__json__() for user in User.objects.all()]
response = { "name" : "root",
"children" : users
}
return HttpResponse(json.dumps(response), "text/json")
def user_details(request, user_id):
details = Log.objects.filter(user=user_id).values('page', 'user').annotate(count=Count('link'))
response = []
for detail in details:
response.append({ "name" : detail['page'],
"user_id" : detail['user'],
"type" : "Type",
"size" : detail['count'],
"children" : []
})
return HttpResponse(json.dumps(response), "text/json")
def job(request, user_id, job):
details = Log.objects.filter(user=user_id, page=job).values('page', 'user', 'link', 'time', 'log_id')
response = []
for detail in details:
response.append({ "name" : detail['page'],
"user_id" : detail['user'],
"type" : "Job",
"link" : detail['link'],
"date" : str(detail['time']),
"log_id" : detail['log_id'],
})
return HttpResponse(json.dumps(response), "text/json")
| [
"rchasman@gmail.com"
] | rchasman@gmail.com |
579e3e2dec98370e537152c245c05df0dd77fa46 | 4ba58260d3e6fe7918324581be97aa01a3385804 | /main.py | 4c0abeb46aa822387e362ff8b17ee094785ff19e | [] | no_license | xvhaorong/lab2p | d937707aff05fc881c8eca7b65a73e371e4e5283 | 14beebaa5919871d9e7fec2701a47eba2ecb9d77 | refs/heads/master | 2022-12-17T09:21:52.442942 | 2020-09-10T20:57:50 | 2020-09-10T20:57:50 | 294,507,251 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 618 | py | # Author: Haorong Xu hxx5086@psu.edu
# Collaborator: Joshua Chang jvc6690@psu.edu
# Collaborator: Christian Dell'Edera cmd6705@psu.edu
# Section: 12
# Breakout: 16
def getLetterGrade(gd):
if gd>=93.0:
lt="A"
elif gd>=90:
lt="A-"
elif gd>=87:
lt="B+"
elif gd>=83:
lt="B"
elif gd>=80:
lt="B-"
elif gd>=77:
lt="C+"
elif gd>=70:
lt="C"
elif gd>=60:
lt="D"
else:
lt="F"
return lt
def run():
finalgrade=float(input("Enter your CMPSC 131 grade: "))
print(f"Your letter grade for CMPSC 131 is {getLetterGrade(finalgrade)}.")
if __name__ == "__main__":
run()
| [
"hxx5086@psu.edu"
] | hxx5086@psu.edu |
045ab7be275b2619843d30d15a97cd7736e1886b | 7729ca00624a6ed7125f1ee3d71fa12856ec7268 | /psarticles/psarticles/middlewares.py | 1c9f285aa8578a68c12ecba9cf8190f2a091f5fd | [] | no_license | AJszabo/dissertation | f8957485953b99b4b62e7bf7680239cb06ef849c | 4bd6c078cedde9eb28fa3b6465171e534dc33369 | refs/heads/master | 2020-05-22T11:05:35.659018 | 2019-05-13T08:09:53 | 2019-05-13T08:09:53 | 186,314,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,605 | py | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class PsarticlesSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class PsarticlesDownloaderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| [
"noreply@github.com"
] | noreply@github.com |
cd1ed8d96f0e50c4d2a00bdb59e9d23fd8e220b3 | af05f5878780a085571cc4282ffd12ef943f61e8 | /main.py | f391055ec54276be7d6fe1a4a572143d28a3cf0e | [] | no_license | Introduction-to-Programming-OSOWSKI/2-4-closer-to-10-CarterWurst2023 | 2be2fa0cf49e71bd94e8638fb886f5e352b1f12b | c21a64e451fb6f72f344c23d3fc921112e34feb0 | refs/heads/master | 2023-08-30T15:16:39.709538 | 2021-09-28T14:38:07 | 2021-09-28T14:38:07 | 411,314,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 180 | py | def close10(x,y):
if abs(10-x) < abs(10-y):
return x
elif abs(10-x) > abs(10-y):
return y
else:
return 0
print(close10(7,13))
| [
"816568@mystma.org"
] | 816568@mystma.org |
e23bd3752e533ae891a8a458e4234a537b1a2759 | 0495e863a73124ff8041f675247c6914034066a2 | /check_directory_files.py | f0ca199a10f258a8b78774d0ae9aa2750e8d3ec0 | [] | no_license | thomas-nguyen-3/python | 923b59ef4944609344186980da4addf6f652110a | 2703e90d31178fcd4fb863005f88e8b728001eaf | refs/heads/master | 2020-04-11T04:57:26.880037 | 2018-12-20T16:57:35 | 2018-12-20T16:57:35 | 161,533,025 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,647 | py | import os,cgi,json,time,datetime,base64,MySQLdb,MySQLdb.cursors,paramiko
from pprint import pprint
"""
12/12/2018
goal: check path in FUSQA table and insert 1 or 0 if files exist
workflow note:
- query connect to FUSQA table (DFRandomForestHCCResponse.FUSQA to be exact)
- loop through the path and check if the path contain any files
key script: len(os.listdir(path))
- insert 1,0 accordingly into FUSDIRBool column
Note: the keyscript can actually show the number of files exists and compare with PACs to see if we get everything
"""
db = MySQLdb.connect(xxxx)
##NORMAL CURSOR: DATA SAVE IN ARRAY - WE DONT USE THIS
cur = db.cursor()
##DICTIONARY CURSOR: DATA SAVE AS DICTIONARY (SAVE TIME) - USE THIS
d_cur = db.cursor(MySQLdb.cursors.DictCursor)
######
## this is for
def checkFUS4():
d_cur.execute ('SELECT id, FUSDIRPath, FUSDIRBool FROM DFRandomForestHCCResponse.FUSQA')
rdata = d_cur.fetchall();
print
print("start checking")
for row in rdata:
path = row['FUSDIRPath']
id = row['id']
try:
emptycheck = len(os.listdir(path))
except:
emptycheck = 0
##update 1,0 to Bool accordingly
d_cur.execute('SET SQL_SAFE_UPDATES = 0;')
q = """UPDATE DFRandomForestHCCResponse.FUSQA
SET FUSDIRBool = %s WHERE id =%s
"""
if emptycheck == 0:
v = ('0',id)
print(q % v)
cur.execute(q,v)
else:
v = ('1',id)
##print(q % v)
cur.execute(q,v)
db.commit()
print('done')
db.close()
if action =='blank':
print
print"doing nothing to prevent the code run by accident"
elif action =='run_me':
checkFUS4()
| [
"noreply@github.com"
] | noreply@github.com |
48d6e9a8f1cd30cb302f9c81eb5ca4370302e805 | c190538d85c00e03bf655af83629a5bddfd6d797 | /src/dcos_e2e_cli/dcos_vagrant/commands/install_dcos.py | 111fd161ceac49b9b4021c8e8b78de2ff50e1e44 | [
"Apache-2.0"
] | permissive | yankcrime/dcos-e2e | e8d52aa10ecfba029c28b269354fea9fe0f85f7b | 449ca9ebc98399efc00e424d9131d2634de0471c | refs/heads/master | 2020-05-30T00:00:07.725954 | 2019-05-30T15:57:37 | 2019-05-30T15:57:37 | 189,449,013 | 0 | 0 | Apache-2.0 | 2019-05-30T16:42:28 | 2019-05-30T16:42:28 | null | UTF-8 | Python | false | false | 2,927 | py | """
Install DC/OS on a provisioned Vagrant cluster.
"""
from pathlib import Path
from typing import Any, Dict, Optional
import click
from dcos_e2e.backends import Vagrant
from dcos_e2e_cli.common.arguments import installer_argument
from dcos_e2e_cli.common.create import get_config
from dcos_e2e_cli.common.doctor import get_doctor_message
from dcos_e2e_cli.common.install import (
install_dcos_from_path,
run_post_install_steps,
)
from dcos_e2e_cli.common.options import (
cluster_id_option,
extra_config_option,
genconf_dir_option,
license_key_option,
security_mode_option,
variant_option,
verbosity_option,
)
from dcos_e2e_cli.common.utils import command_path
from dcos_e2e_cli.common.variants import get_install_variant
from dcos_e2e_cli.common.workspaces import workspace_dir_option
from ._common import ClusterVMs
from ._wait_for_dcos import wait_for_dcos_option
from .doctor import doctor
from .wait import wait
@click.command('install')
@installer_argument
@extra_config_option
@workspace_dir_option
@variant_option
@license_key_option
@genconf_dir_option
@security_mode_option
@cluster_id_option
@verbosity_option
@wait_for_dcos_option
@click.pass_context
def install_dcos(
ctx: click.core.Context,
installer: Path,
extra_config: Dict[str, Any],
variant: str,
workspace_dir: Path,
license_key: Optional[Path],
security_mode: Optional[str],
cluster_id: str,
genconf_dir: Optional[Path],
wait_for_dcos: bool,
) -> None:
"""
Install DC/OS on a provisioned Vagrant cluster.
"""
doctor_command_name = command_path(sibling_ctx=ctx, command=doctor)
wait_command_name = command_path(sibling_ctx=ctx, command=wait)
doctor_message = get_doctor_message(
doctor_command_name=doctor_command_name,
)
dcos_variant = get_install_variant(
given_variant=variant,
installer_path=installer,
workspace_dir=workspace_dir,
doctor_message=doctor_message,
)
cluster_backend = Vagrant()
cluster_vms = ClusterVMs(cluster_id=cluster_id)
dcos_config = get_config(
cluster_representation=cluster_vms,
extra_config=extra_config,
dcos_variant=dcos_variant,
security_mode=security_mode,
license_key=license_key,
)
cluster = cluster_vms.cluster
install_dcos_from_path(
cluster=cluster,
cluster_representation=cluster_vms,
dcos_config=dcos_config,
ip_detect_path=cluster_backend.ip_detect_path,
doctor_message=doctor_message,
dcos_installer=installer,
local_genconf_dir=genconf_dir,
)
run_post_install_steps(
cluster=cluster,
cluster_id=cluster_id,
dcos_config=dcos_config,
doctor_command_name=doctor_command_name,
http_checks=True,
wait_command_name=wait_command_name,
wait_for_dcos=wait_for_dcos,
)
| [
"adamdangoor@gmail.com"
] | adamdangoor@gmail.com |
d33535490a49ccc63731773d42cd5a17f661d234 | a2948d87a8f1901c6faf922f7b8cfba825f84d9b | /resources.py | c5484e2f0cc861b20e66986f69bf1105fbfacb38 | [] | no_license | sourcery-ai-bot/4x2d | 03360fdcd5cfb135acbe0dfbdf571fb1e4d98a5a | 68a5daf2410ae6ffe1220bb7ce85b95647097157 | refs/heads/main | 2023-03-11T10:38:01.353467 | 2021-02-28T06:57:06 | 2021-02-28T06:57:06 | 344,102,678 | 0 | 0 | null | 2021-03-03T11:25:28 | 2021-03-03T11:25:27 | null | UTF-8 | Python | false | false | 412 | py | import os
import sys
def resource_path(relative_path): # needed for bundling
""" Get absolute path to resource, works for dev and for PyInstaller """
base_path = getattr(sys, '_MEIPASS', os.path.dirname(os.path.abspath(__file__)))
return os.path.join(base_path, relative_path) | [
"morganquirk@gmail.com"
] | morganquirk@gmail.com |
52425699c2b0f3f4f3701d850f4388930fbaf38d | 62babb33b9bede95aac217db04636956279bb2e2 | /sort/topological sort/1385E Directing Edges.py | 269591b4f10d040c69bde6a7be642cc5a8b56613 | [] | no_license | tycyd/codeforces | 0322e31daf18544944c769fd2a50c6d006015e34 | e0773f069c6c5793f9d9a07b61878a589e375a5f | refs/heads/master | 2023-08-12T05:00:39.467404 | 2021-09-30T16:39:21 | 2021-09-30T16:39:21 | 266,847,425 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,647 | py | from sys import stdin, stdout
if __name__ == '__main__':
def directing_edges(n, m, ind, dic, seq):
q = []
res = []
for i in range(n):
if ind[i] == 0:
q.append(i)
#while len(q) > 0:
while q:
#cnt = len(q)
#for i in range(cnt):
cur = q.pop()
res.append(cur)
if cur in dic:
for next in dic[cur]:
ind[next] -= 1
if ind[next] == 0:
q.append(next)
#print(res)
if len(res) < n:
stdout.write("NO\n")
else:
stdout.write("YES\n")
pos = [0]*n
for i in range(n):
pos[res[i]] = i
#print(pos)
for sq in seq:
if pos[sq[0]] < pos[sq[1]]:
#stdout.write(str(sq[0]+1) + " " + str(sq[1]+1) + '\n')
print(sq[0]+1, sq[1]+1)
else:
#stdout.write(str(sq[1]+1) + " " + str(sq[0]+1) + '\n')
print(sq[1] + 1, sq[0] + 1)
t = int(stdin.readline())
for i in range(t):
n, m = map(int, stdin.readline().split())
dic = {}
ind = [0] * n
seq = []
for j in range(m):
t, x, y = map(int, stdin.readline().split())
x -= 1
y -= 1
seq.append([x, y])
if t == 1:
if x not in dic:
dic[x] = []
dic[x].append(y)
ind[y] += 1
directing_edges(n, m, ind, dic, seq) | [
"tycyd@hotmail.com"
] | tycyd@hotmail.com |
60496ab10d5cda02c956024215be0374ea200b25 | c3b097df549aead765b92278e5fc948237c2e1bb | /app/controllers/api.py | f40541a0cdcd319c2e6093795030ddf2001e286d | [] | no_license | mareimorsy/genie | a8c5ff44945ecc7d3c17a868f5fcbaee3459ca8e | 571b7242d1b2a9827d479ee15d379610225ea183 | refs/heads/master | 2022-03-26T21:43:29.155665 | 2020-01-08T13:08:58 | 2020-01-08T13:08:58 | 232,563,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 644 | py | from app import app
from flask import render_template, jsonify, request
from app.helpers.k8s.deployment import get_pods, pod_cmd
import json
from flask import session
from app.config import config
@app.route('/api/namespaces')
def namespaces():
return render_template("index.html")
@app.route('/api/pod/exec', methods=['GET', 'POST'])
def pod_exec():
pod = request.get_json()
# return jsonify(request.get_json())
return pod_cmd(pod["name"], pod["namespace"], pod["command"])
@app.route('/api/pod')
def pod():
return render_template("mocks/pods.json")
@app.route('/api/pods')
def pods():
return jsonify(get_pods(config)) | [
"mareimorsy@Marei-MacBook-Pro.local"
] | mareimorsy@Marei-MacBook-Pro.local |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.