blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
281
content_id
stringlengths
40
40
detected_licenses
listlengths
0
57
license_type
stringclasses
2 values
repo_name
stringlengths
6
116
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
313 values
visit_date
timestamp[us]
revision_date
timestamp[us]
committer_date
timestamp[us]
github_id
int64
18.2k
668M
star_events_count
int64
0
102k
fork_events_count
int64
0
38.2k
gha_license_id
stringclasses
17 values
gha_event_created_at
timestamp[us]
gha_created_at
timestamp[us]
gha_language
stringclasses
107 values
src_encoding
stringclasses
20 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
4
6.02M
extension
stringclasses
78 values
content
stringlengths
2
6.02M
authors
listlengths
1
1
author
stringlengths
0
175
0f907498ef454193d80fa1dbac7f4ef357cb0485
f9308d5a8efe2dbb48e9cc87cd06405b60a9dc7b
/samples/python/apidocs/ee_featurecollection_aggregate_total_var.py
6f681ac3fa4a89f5f1bdf70d9c5cf99b8aef7c56
[ "Apache-2.0", "CC-BY-4.0" ]
permissive
google/earthengine-community
4e054b421f66f03507d58668084aee981062fc24
ce931040c518860f8788b4888c0acfdebd2952fc
refs/heads/master
2023-09-01T14:47:54.812703
2023-08-31T23:01:00
2023-08-31T23:01:39
200,732,820
428
552
Apache-2.0
2023-09-13T21:46:51
2019-08-05T21:42:11
Jupyter Notebook
UTF-8
Python
false
false
1,021
py
# Copyright 2023 The Google Earth Engine Community Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # [START earthengine__apidocs__ee_featurecollection_aggregate_total_var] # FeatureCollection of power plants in Belgium. fc = ee.FeatureCollection('WRI/GPPD/power_plants').filter( 'country_lg == "Belgium"') print('Total variance of power plant capacities (MW):', fc.aggregate_total_var('capacitymw').getInfo()) # 214307.38335169878 # [END earthengine__apidocs__ee_featurecollection_aggregate_total_var]
[ "copybara-worker@google.com" ]
copybara-worker@google.com
e479319f2f40f31a8fdd22b4981b19d8cc53f1f7
caa7e7d7d60d9f073b67661661ddd898b0bdb506
/setup.py
404c0124fe14f511cefcf51ad915fde6f68bf0e2
[ "MIT" ]
permissive
kanghyojun/s2protocol
aa687ef79c2d8e508fa3a4f9f87adac81c190bac
6e48a010938aa3adea0c9f7ec6ad9bea7b57540e
refs/heads/master
2021-10-22T20:10:54.604454
2014-02-05T16:46:23
2014-02-05T16:46:23
16,060,557
0
0
null
null
null
null
UTF-8
Python
false
false
225
py
# -*- coding: utf-8 -*- from setuptools import setup, find_packages setup(name='s2protocol', version='0.0.0', author='Blizzard', install_requires=[ 'mpyq' ], packages=find_packages())
[ "hyojun@admire.kr" ]
hyojun@admire.kr
6793a7e2ed84de8b67e05f62dbff2c37d60be349
6c0beed4cd8719bf48982a853183121cea35cadf
/thread_syn_scanr_final.py
a312b10f43247c3ca188d52d82f490d1d797ed92
[]
no_license
vicky288/pythonScripts
c90406dd2addc7a72275be0526daae7eba7d8c29
2b31c726b5924314b31a37f3a4eb86c132816859
refs/heads/master
2021-04-30T16:35:30.739238
2017-02-09T00:06:16
2017-02-09T00:06:16
80,077,402
0
0
null
null
null
null
UTF-8
Python
false
false
1,356
py
#!/usr/bin/env python import threading import Queue import time from scapy.all import * class WorkerThread(threading.Thread) : def __init__(self, queue, tid) : threading.Thread.__init__(self) self.queue = queue self.tid = tid print "Worker %d Reporting for Service Sir!" %self.tid def run(self) : total_ports = 0 while True : port = 0 try : port = self.queue.get(timeout=1) except Queue.Empty : print "Worker %d exiting. Scanned %d ports ..." % (self.tid, total_ports) return # port scanning to begin # we rely on scapy to do this ip = sys.argv[1] response = sr1(IP(dst=ip)/TCP(dport=port, flags="S"), verbose=False, timeout=.2) # only checking for SYN-ACK == flags = 18 # filtererd ports etc. is another story altogether if response : if response[TCP].flags == 18 : print "ThreadId %d: Received port number %d Status: OPEN" %(self.tid, port) self.queue.task_done() total_ports += 1 queue = Queue.Queue() threads = [] for i in range(1, 10) : print "Creating WorkerThread : %d"%i worker = WorkerThread(queue, i) worker.setDaemon(True) worker.start() threads.append(worker) print "WorkerThread %d Created!"%i for j in range (1,1000) : queue.put(j) queue.join() # wait for all threads to exit for item in threads : item.join() print "Scanning Complete!"
[ "root@localhost.localdomain" ]
root@localhost.localdomain
51dd7f85ef5c0d7b69d593c9c71aa7bfd3232709
31af83f735a4722ba3b7f6a3864d4b9c5be16d90
/manage.py
f4c005520e48f7273436e9f6bfd757eda1bcdf32
[]
no_license
DrewCCannedy/brohaus
300ba75e42324549d8bbe803338f050f31a5ae38
6dea8d22fc27ae2199bedd50211c4a4c49e6e055
refs/heads/master
2023-08-31T14:15:22.509059
2020-07-01T22:22:23
2020-07-01T22:22:23
276,479,305
0
0
null
2021-09-22T19:21:04
2020-07-01T20:50:44
Python
UTF-8
Python
false
false
648
py
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'brohaus.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
[ "dakillac@gmail.com" ]
dakillac@gmail.com
0fd14d1a2133a3e80d9f1f3ab67434b34c00c4e9
76eb7570deba15cd8df8ddd96f52e30623c680f0
/Proj1_v1.py
0f2f6a2fc69cf613966d0de6b2b8f4c0745e80fa
[]
no_license
tanveerm176/Flask-Knot
7978277ac042abd1e6dd10de96c193688eea3633
0b897dc4906bfabd4c42f638cec88542bfeedb61
refs/heads/master
2021-01-24T22:03:07.656733
2015-07-20T16:04:15
2015-07-20T16:04:15
null
0
0
null
null
null
null
UTF-8
Python
false
false
4,067
py
from flask import Flask, render_template, request from data import sentencegenerator as story from data import Reader,cipher,markov_v3 as chain Proj1_v1=Flask(__name__) @Proj1_v1.route('/') def root(): return render_template('main.html', title='Main Page') @Proj1_v1.route('/rotn', methods=["POST","GET"]) def rotn(): if request.method=="GET": return render_template('rotn.html', title='Caesar Cipher') elif request.method=="POST": request_data=request.form if request_data['key'].isdigit()==True: cypher=cipher.rotn(request_data['word'],int(request_data['key'])) return render_template('rotn.html', title='Caesar Cipher', error=cypher) else: return render_template('rotn.html', title='Caesar Cipher', error="Not a valid key") else: return 'yo' @Proj1_v1.route('/sengen', methods=['POST','GET']) def sengen(): if request.method=="GET": return render_template('sengen.html', title='Sentence Generator', Sentences='') elif request.method=="POST": sentences=request.form if int(sentences['numsen'])>0: return render_template('sengen.html', title='Sentence Generator', Sentences=story.senGen(int(sentences['numsen']))) else: return render_template('sengen.html', title='Sentence Generator', Sentences='Invalid number of sentences') else: return 'yo' @Proj1_v1.route('/markov', methods=['POST',"GET"]) def markov_result(): if request.method=='GET': return render_template('markov.html', title='Markov Text Generator', book='', text='') elif request.method=='POST': book=request.form['booklist'] if book!=' ': return render_template('markov.html', title='Markov Text Generator', book=book, text=chain.markov_generator(book)) else: yes='yes' else: return 'yo' def validate(u,p): request_data=request.form credentials=Reader.getCsvDict('./data/credentials.txt') if request_data['user'] in credentials and request_data['user']!='': if request_data['pswd']==credentials[request_data['user']][0]: return True else: return False else: return False @Proj1_v1.route('/login', methods=['POST','GET'])#Allows both Post (going through the form) and Get (going directly to the page) def log_in(): if request.method=="GET": return render_template('form.html', title='login') elif request.method=="POST":#Verifies the username and password against constants at the top request_data=request.form#Takes the immutable dictionary of the user's inputs and saves it in a variable credentials=Reader.getCsvDict('./data/credentials.txt') if validate(request_data['user'],request_data['pswd']): return render_template('form.html', title='login', error='Successful login') else: return render_template('form.html', error='Invalid username or password', title='Login') else: return 'yo' @Proj1_v1.route('/signup', methods=['POST', 'GET']) def signup(): previousCredentials=Reader.read_file('./data/credentials.txt') if request.method=="GET": return render_template('signup.html', title='Sign up') elif request.method=="POST": new_user=request.form['nuser'] new_pswd=request.form['npswd'] new_credentials='%s,%s'%(new_user,new_pswd) if new_user=='' or new_pswd=='' or new_user in previousCredentials or new_user.find(',')!=-1 or new_pswd.find(',')!=-1: return render_template('form.html', error='Invalid signup credentials', title='Login') else: Reader.write_file('./data/credentials.txt',new_credentials,'a') return render_template('form.html', error='Successfully signed up!', title='Login') else: return 'yo' if __name__=='__main__': Proj1_v1.debug=True Proj1_v1.run(host='0.0.0.0')
[ "benjihofing@gmail.com" ]
benjihofing@gmail.com
39d76bd576224e3946ff7daa7c0eafa7235abd11
cab7fbf7e7b1df18459ff46b8f5981eff2745104
/depth_and_motion_learning/transform_utils.py
08c12d61bc8cbeaae72f4af4ddc62d331e8ef828
[]
no_license
bolianchen/unsupervised-depth-predictors
d134d791103bc2f8a990c5bc88820b9e1ba65556
082103e6caa70f76559f3b38cd9daa30acafa8fe
refs/heads/main
2023-08-20T08:22:20.341063
2021-09-27T09:11:42
2021-09-27T09:11:42
387,762,636
0
2
null
null
null
null
UTF-8
Python
false
false
6,454
py
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Helper functions for geometric transforms.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow.compat.v1 as tf from tensorflow_graphics.geometry.transformation import euler from tensorflow_graphics.geometry.transformation import rotation_matrix_3d def matrix_from_angles(rot): """Create a rotation matrix from a triplet of rotation angles. Args: rot: a tf.Tensor of shape [..., 3], where the last dimension is the rotation angles, along x, y, and z. Returns: A tf.tensor of shape [..., 3, 3], where the last two dimensions are the rotation matrix. This function mimics _euler2mat from struct2depth/project.py, for backward compatibility, but wraps tensorflow_graphics instead of reimplementing it. The negation and transposition are needed to bridge the differences between the two. """ rank = tf.rank(rot) # Swap the two last dimensions perm = tf.concat([tf.range(rank - 1), [rank], [rank - 1]], axis=0) return tf.transpose(rotation_matrix_3d.from_euler(-rot), perm) def angles_from_matrix(matrix): """Get a triplet of rotation angles from a rotation matrix. Args: matrix: A tf.tensor of shape [..., 3, 3], where the last two dimensions are the rotation matrix. Returns: A tf.Tensor of shape [..., 3], where the last dimension is the rotation angles, along x, y, and z. This function mimics _euler2mat from struct2depth/project.py, for backward compatibility, but wraps tensorflow_graphics instead of reimplementing it. The negation and transposition are needed to bridge the differences between the two. """ rank = tf.rank(matrix) # Swap the two last dimensions perm = tf.concat([tf.range(rank - 2), [rank - 1], [rank - 2]], axis=0) return -euler.from_rotation_matrix(tf.transpose(matrix, perm)) def unstacked_matrix_from_angles(rx, ry, rz, name=None): """Create an unstacked rotation matrix from rotation angles. Args: rx: A tf.Tensor of rotation angles abound x, of any shape. ry: A tf.Tensor of rotation angles abound y (of the same shape as x). rz: A tf.Tensor of rotation angles abound z (of the same shape as x). name: A string, name for the op. Returns: A 3-tuple of 3-tuple of tf.Tensors of the same shape as x, representing the respective rotation matrix. The small 3x3 dimensions are unstacked into a tuple to avoid tensors with small dimensions, which bloat the TPU HBM memory. Unstacking is one of the recommended methods for resolving the problem. """ with tf.name_scope(name, 'BuildUnstackedRotationMatrix', [rx, ry, rz]): angles = [-rx, -ry, -rz] sx, sy, sz = [tf.sin(a) for a in angles] cx, cy, cz = [tf.cos(a) for a in angles] m00 = cy * cz m10 = (sx * sy * cz) - (cx * sz) m20 = (cx * sy * cz) + (sx * sz) m01 = cy * sz m11 = (sx * sy * sz) + (cx * cz) m21 = (cx * sy * sz) - (sx * cz) m02 = -sy m12 = sx * cy m22 = cx * cy return ((m00, m01, m02), (m10, m11, m12), (m20, m21, m22)) def invert_rot_and_trans(rot, trans): """Inverts a transform comprised of a rotation and a translation. Args: rot: a tf.Tensor of shape [..., 3] representing rotatation angles. trans: a tf.Tensor of shape [..., 3] representing translation vectors. Returns: a tuple (inv_rot, inv_trans), representing rotation angles and translation vectors, such that applting rot, transm inv_rot, inv_trans, in succession results in identity. """ inv_rot = inverse_euler(rot) # inv_rot = -rot for small angles inv_rot_mat = matrix_from_angles(inv_rot) inv_trans = -tf.matmul(inv_rot_mat, tf.expand_dims(trans, -1)) inv_trans = tf.squeeze(inv_trans, -1) return inv_rot, inv_trans def inverse_euler(angles): """Returns the euler angles that are the inverse of the input. Args: angles: a tf.Tensor of shape [..., 3] Returns: A tensor of the same shape, representing the inverse rotation. """ sin_angles = tf.sin(angles) cos_angles = tf.cos(angles) sz, sy, sx = tf.unstack(-sin_angles, axis=-1) cz, _, cx = tf.unstack(cos_angles, axis=-1) y = tf.asin((cx * sy * cz) + (sx * sz)) x = -tf.asin((sx * sy * cz) - (cx * sz)) / tf.cos(y) z = -tf.asin((cx * sy * sz) - (sx * cz)) / tf.cos(y) return tf.stack([x, y, z], axis=-1) def combine(rot_mat1, trans_vec1, rot_mat2, trans_vec2): """Composes two transformations, each has a rotation and a translation. Args: rot_mat1: A tf.tensor of shape [..., 3, 3] representing rotation matrices. trans_vec1: A tf.tensor of shape [..., 3] representing translation vectors. rot_mat2: A tf.tensor of shape [..., 3, 3] representing rotation matrices. trans_vec2: A tf.tensor of shape [..., 3] representing translation vectors. Returns: A tuple of 2 tf.Tensors, representing rotation matrices and translation vectors, of the same shapes as the input, representing the result of applying rot1, trans1, rot2, trans2, in succession. """ # Building a 4D transform matrix from each rotation and translation, and # multiplying the two, we'd get: # # ( R2 t2) . ( R1 t1) = (R2R1 R2t1 + t2) # (0 0 0 1 ) (0 0 0 1 ) (0 0 0 1 ) # # Where each R is a 3x3 matrix, each t is a 3-long column vector, and 0 0 0 is # a row vector of 3 zeros. We see that the total rotation is R2*R1 and the t # total translation is R2*t1 + t2. rot_mat1 = tf.cast(rot_mat1, tf.float64) rot_mat2 = tf.cast(rot_mat2, tf.float64) trans_vec1 = tf.cast(trans_vec1, tf.float64) r2r1 = tf.matmul(rot_mat2, rot_mat1) r2t1 = tf.matmul(rot_mat2, tf.expand_dims(trans_vec1, -1)) r2t1 = tf.squeeze(r2t1, axis=-1) r2r1 = tf.cast(r2r1, tf.float32) r2t1 = tf.cast(r2t1, tf.float32) return r2r1, r2t1 + trans_vec2
[ "bolianchen@gmail.com" ]
bolianchen@gmail.com
6e2ca1f18a82a23e4f8e995107279bf2358e4e14
04599835d6fd478900b2117689f3168e1ee728bc
/qa/rpc-tests/wallet-dump.py
5391b421d112f5e233678e2b9b78d2c9061a0ca4
[ "MIT" ]
permissive
cryptonclub/cryptoncoin
373371122d5986524831badd595c8d7c9b0c9c6a
70abd6815e30087b3367dca957be63d8bc7cd2fa
refs/heads/master
2020-12-14T22:58:26.288976
2020-01-19T13:55:47
2020-01-19T13:55:47
234,900,865
2
1
null
null
null
null
UTF-8
Python
false
false
5,243
py
#!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the dumpwallet RPC.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * def read_dump(file_name, addrs, hd_master_addr_old): """ Read the given dump, count the addrs that match, count change and reserve. Also check that the old hd_master is inactive """ with open(file_name, encoding='utf8') as inputfile: found_addr = 0 found_addr_chg = 0 found_addr_rsv = 0 hd_master_addr_ret = None for line in inputfile: # only read non comment lines if line[0] != "#" and len(line) > 10: # split out some data key_label, comment = line.split("#") # key = key_label.split(" ")[0] keytype = key_label.split(" ")[2] if len(comment) > 1: addr_keypath = comment.split(" addr=")[1] addr = addr_keypath.split(" ")[0] keypath = None if keytype == "inactivehdmaster=1": # ensure the old master is still available assert(hd_master_addr_old == addr) elif keytype == "hdmaster=1": # ensure we have generated a new hd master key assert(hd_master_addr_old != addr) hd_master_addr_ret = addr else: keypath = addr_keypath.rstrip().split("hdkeypath=")[1] # count key types for addrObj in addrs: if addrObj['address'] == addr and addrObj['hdkeypath'] == keypath and keytype == "label=": found_addr += 1 break elif keytype == "change=1": found_addr_chg += 1 break elif keytype == "reserve=1": found_addr_rsv += 1 break return found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret class WalletDumpTest(BitcoinTestFramework): def __init__(self): super().__init__() self.setup_clean_chain = False self.num_nodes = 1 self.extra_args = [["-keypool=90", "-usehd=1"]] def setup_chain(self): # TODO remove this when usehd=1 becomes the default # use our own cache and -usehd=1 as extra arg as the default cache is run with -usehd=0 initialize_chain(self.options.tmpdir, self.num_nodes, self.options.cachedir + "/hd", ["-usehd=1"], redirect_stderr=True) set_cache_mocktime() def setup_network(self, split=False): # Use 1 minute timeout because the initial getnewaddress RPC can take # longer than the default 30 seconds due to an expensive # CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in # the test often takes even longer. self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, self.extra_args, timewait=60, redirect_stderr=True) def run_test (self): tmpdir = self.options.tmpdir # generate 20 addresses to compare against the dump test_addr_count = 20 addrs = [] for i in range(0,test_addr_count): addr = self.nodes[0].getnewaddress() vaddr= self.nodes[0].validateaddress(addr) #required to get hd keypath addrs.append(vaddr) # Should be a no-op: self.nodes[0].keypoolrefill() # dump unencrypted wallet self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump") found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \ read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, None) assert_equal(found_addr, test_addr_count) # all keys must be in the dump assert_equal(found_addr_chg, 50) # 50 blocks where mined assert_equal(found_addr_rsv, 180) # keypool size (external+internal) #encrypt wallet, restart, unlock and dump self.nodes[0].encryptwallet('test') bitcoind_processes[0].wait() self.nodes[0] = start_node(0, self.options.tmpdir, self.extra_args[0]) self.nodes[0].walletpassphrase('test', 10) # Should be a no-op: self.nodes[0].keypoolrefill() self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump") found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \ read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc) assert_equal(found_addr, test_addr_count) # TODO clarify if we want the behavior that is tested below in Cryptoncoin (only when HD seed was generated and not user-provided) # assert_equal(found_addr_chg, 180 + 50) # old reserve keys are marked as change now assert_equal(found_addr_rsv, 180) # keypool size if __name__ == '__main__': WalletDumpTest().main ()
[ "support@cryptonclub.com" ]
support@cryptonclub.com
b66a370d3cf4f34e4b12732477ebe0307503fb8e
2766d1c9d257a68306d03cca5ac2b05ebea92f50
/CatPic/views.py
ed197d93be594096be3a7e53d21a69aee730cad2
[]
no_license
DorkMage/CatPic
c11e59079f11838572ebcc6824711fb3889abdae
69d91b05a1d42b96ad6dab095df5915f0db69481
refs/heads/main
2023-01-29T04:19:10.641432
2020-12-13T17:08:02
2020-12-13T17:08:02
317,402,214
1
0
null
null
null
null
UTF-8
Python
false
false
1,104
py
from django.shortcuts import render from django.http import HttpResponse from django.template import loader from django.utils.safestring import mark_safe from .models import Pic from .models import Filter from random import randint import json def index(request): clowder = dict() catpics = Pic.objects.all() for i, pic in enumerate(catpics): url = pic.pic_url.url cats = pic.pic_cats text = pic.pic_name clowder[i] = ([url, cats, text]) filters_qs = Filter.objects.all() filters = dict() for i, fil in enumerate(filters_qs): fil_text = fil.fil_text fil_cat = fil.fil_cat filters[i] = ([fil_text, fil_cat]) template = loader.get_template('catpic/index.html') try: catpic = catpics[randint(0, len(catpics) - 1)] except: catpic = None #catpic = None context = { 'filters_qs': filters_qs, 'filters': mark_safe(json.dumps(filters)), 'catpic': catpic, 'clowder': mark_safe(json.dumps(clowder)) } return HttpResponse(template.render(context, request))
[ "kittywright@gmail.com" ]
kittywright@gmail.com
34c69a2e6e5163f82c16f2066cc150e2915edd2e
13a416a2694d1f6aa1a68cd47610236bf61cafbc
/CodePractice/Turtle/circleturtle.py
a465b8fe30527faaecef0725052e7c92e49ae8e1
[]
no_license
Highjune/Python
c637f7d0f9e5d1ac9d6ad87b4e54833b8ff4ae11
1be43816d22f5f3b8679cf0cd3939e9d9f54497a
refs/heads/master
2022-11-24T01:20:54.470172
2020-07-27T18:01:47
2020-07-27T18:01:47
263,271,337
0
0
null
null
null
null
UTF-8
Python
false
false
170
py
import turtle as t t.shape("turtle") num = int(input('num : ')) go = int((num-1)/2) for i in range(1,num): for j in range(1,i): t.forward(1) t.right(90)
[ "highjune37@gmail.com" ]
highjune37@gmail.com
623020b87f72239243d8c9c47e74679b83117736
32d96d224ce074758d0d740e77e97bb30982d4e3
/test.py
4c56e3bd72ae85110aa2532e377310cb5a11016c
[]
no_license
Moinahmed7777/Pac_man_Agent
a79c5b6e577f4c2fb61f2756a8d0912eaff0dbc2
07a92a16ec9930acfa6fd5f000efeab344945ae0
refs/heads/master
2022-11-27T20:52:38.780714
2020-08-13T20:34:51
2020-08-13T20:34:51
287,374,555
0
0
null
null
null
null
UTF-8
Python
false
false
689
py
# -*- coding: utf-8 -*- """ Created on Wed Jul 15 14:30:29 2020 @author: Necro """ X=[(4, 3), 'West', 1, [(5, 3), 'South', 1, [(5, 4), 'South', 1, ((5, 5), None, 0)]]] #X=[ ((4, 3), 'West', 1), [((5, 3), 'South', 1), [((5, 4), 'South', 1), [((5, 5), None, 0), None]] ] ] #[(((5, 3), 'South', 1), (((5, 4), 'South', 1), (((5, 5), None, 0), None)))] Y=[] #X=list(X) #popitemlist= while True: #X=list(X) popitem=X.pop() print(popitem) print('after pop:',X,len(X)) Y.append(X[1]) #X=list(popitem) print(popitem) #X=list(popitem) if len(popitem)==0: break X=list(popitem) print(Y) #print(popitem)
[ "noreply@github.com" ]
noreply@github.com
5095b5341c9fb69f1fd83369f1b8a9b3d1c26d60
05f9f0434623bcdc8983eec7510a0ebc80a8e86d
/Classes/DiPrime_Class.py
e25ca63ff3d614f32ff8e22d8c80a004b9f1fa0c
[]
no_license
shoryaconsul/DiPriMeFlip_Forest
e55af955906ca66d059eae732990c5c60b3d0b32
9f51f0e8e768b07b25b46da2e0bea235e3a03ca4
refs/heads/main
2023-02-27T08:37:09.677725
2021-02-05T03:16:37
2021-02-05T03:16:37
330,514,141
1
0
null
null
null
null
UTF-8
Python
false
false
16,242
py
import numpy as np import numpy.random as rn import random from copy import deepcopy # This file implements the proposed algorithm for classifcation (stores fractions). # The variables mean refer to fractions in this code. # %%################################################################ # Querying the value of a scalar attribute # x: True atrribute value # s: Sensitivity of attribute # e: Privacy budget def query(x, s, e): if s/e < 0: print('s = ', s, ' e = ', e) return rn.laplace(x, s/e) # Query median of continuous attribute # Xj: Values of feature for each data point (M,) # Xrange: Min and max value of Xj (2,) # num: Number of points to choose from (Deprecated) # eps: Privacy budget. None indicates no privatization # Returns: Value of attribute to split at def query_cont_med(Xj, Xrange, eps=None): M = np.shape(Xj)[0] Xj_sort = np.sort(Xj) # print('M: ',M) if M <= 1: return random.uniform(Xrange[0], Xrange[1]) elif eps is None: # Return true median return np.median(Xj) else: # Private median bin_size = np.concatenate(([Xj_sort[0]-Xrange[0]], np.diff(Xj_sort), [Xrange[1]-Xj_sort[-1]])) score_lr = np.abs(np.arange(-M, M+1, 2)) # Scoring function is N_L-N_R # print('SCORE: ', score) prob = np.exp(np.log(bin_size)-0.5*eps*score_lr) + 1e-12 # Probability disribution for median selection prob = prob/sum(prob) # Normalizing distribution ind_med = rn.choice(M+1, p=prob) if ind_med == 0: x_pick = random.uniform(Xrange[0], Xj_sort[0]) elif ind_med == M: x_pick = random.uniform(Xj_sort[-1], Xrange[1]) else: x_pick = random.uniform(Xj_sort[ind_med-1], Xj_sort[ind_med]) # h,x = np.histogram(Xj,bins=num+1,range=(Xrange[0],Xrange[1])) # N_left = np.cumsum(h)[:-1] # score = -np.abs(2*N_left - M) # prob = np.exp(0.5*eps*score)+1e-6 # Probability disribution for median selection # prob = prob/sum(prob) # Normalizing distribution # x_pick = rn.choice(x[1:-1],p=prob) return x_pick # Query median of categorical attribute # Xj: Values of feature for each data point (M,) # Xdict: Possible values of Xj (list of variable length) # eps: Privacy budget. None indicates no privatization # Returns: List of attributes split for both children def query_cat_med(Xj, Xdict, eps=None): M = np.shape(Xj)[0] Xnum = len(Xdict) unique, counts = np.unique(Xj, return_counts=True) num_val = np.zeros(Xnum) # Number of data points with with each possible value i = 0 for val in Xdict: if val in unique: num_val[i] = counts[unique == val][0] i = i+1 # Function to convert decimal to logical lists blist = lambda i, n: [False]*(n-int(np.ceil(np.log2(i+1)))-int(i == 0) ) + [bool(int(j)) for j in bin(i)[2:]] N_child = np.zeros(2**(Xnum-1)-1) # Number of points in child node for i in range(1, 2**(Xnum-1)): # Look at all possible split N_child[i-1] = np.sum(num_val[blist(i, Xnum)]) if eps is None: # Return true median i_min = np.argmin(np.abs(N_child-M/2)) split_log = blist(i_min+1, Xnum) # Membership to left or right chiled val_min = list(np.array(Xdict)[split_log]) val_comp = list(set(Xdict)-set(val_min)) return val_min, val_comp else: # Private median score = -np.abs(2*N_child - M) prob = np.exp(0.5*eps*score)+1e-6 # Probability disribution for median selection prob = prob/sum(prob) # Normalizing distribution i_pick = rn.choice(2**(Xnum-1)-1, p=prob) split_log = blist(i_pick+1, Xnum) # Membership to left or right chiled val_pick = list(np.array(Xdict)[split_log]) val_comp = list(set(Xdict)-set(val_pick)) return val_pick, val_comp # Function to find best split for selected attributes # X: Data (M,N) # y: Target (M,) # By: Max absolute value of target (1,) # A: Dictionary of feature ranges (for cont)/values (for categorical) # cat_idx: List of indices for categorical features # K: Number of features to consider for split # num_cand: Number of candidates for continuous split (Deprecated) # eps: Privacy budget # Returns: Splitting feature index, splitting value/categories def split_count(X, y, A, By, cat_idx, K, eps=None): M, N = np.shape(X) K1 = min(K, len(list(A.keys()))) # In case there are less than K attributes idx_cand = rn.permutation(list(A.keys()))[:K1] # Select k feature indices # Finding median splits val_cand = dict() if eps is None: eps_fn = None else: eps_fn = eps/2 for idx in idx_cand: if idx in cat_idx: val_cand[idx] = query_cat_med(X[:, idx], A[idx], eps_fn) else: val_cand[idx] = query_cont_med(X[:, idx], A[idx], eps_fn) # Finding indices in children sse_idx = np.zeros(K1) for idx in idx_cand: if idx in cat_idx: ind_upp = np.where(X[:, idx] == np.expand_dims( np.array(val_cand[idx][0]), axis=1))[1] ind_low = np.where(X[:, idx] == np.expand_dims( np.array(val_cand[idx][0]), axis=1))[1] else: ind_upp = np.where(X[:, idx] >= val_cand[idx])[0] ind_low = np.where(X[:, idx] < val_cand[idx])[0] y_upp = y[ind_upp] y_low = y[ind_low] pos = np.where(idx_cand == idx)[0][0] if len(y_upp) != 0 and len(y_low) != 0: # Checking that children are not empty sse_idx[pos] = (len(y_upp)*np.var(y_upp) + len(y_low)*np.var(y_low))/len(y) elif len(y_upp) == 0 and len(y_low) == 0: sse_idx[pos] = 40*By**2 else: sse_idx[pos] = np.var(y) if eps is None: idx_split = idx_cand[np.argmin(sse_idx)] else: # Exponential mechanism to pick split if len(y) != 0: score_split = np.exp(-0.5*sse_idx*eps_fn/(4*By**2/len(y))) else: # Empty node, so all splits are equivalent score_split = np.ones(K1) # print('# SPLIT: ',len(y_upp),len(y_low)) if np.any(np.isnan(score_split/np.sum(score_split))): score_split = np.ones(K1) idx_split = rn.choice(idx_cand, p=score_split/np.sum(score_split)) return idx_split, val_cand[idx_split] # %% Base classes for Extremely Random Trees with splits along median class DiPrimeCTree(): def __init__(self, depth=0, max_depth=np.inf, max_features=None, parent=None): self.mean = np.nan self.count = None # For book-keeping self.split_ind = None # Index of feature for split self.split_val = None # Feature value at split self.max_features = max_features # Features to consider per split self.left = None self.right = None self.parent = parent if depth > max_depth: raise ValueError('Depth larger than max depth') self.depth = depth # Depth = 0 as tree is empty self.max_depth = max_depth # %% Fitting random tree to data given target values - only leaf nodes noised # Left is larger than split, right is smaller than split # A: Dictionary of feature ranges (for cont)/values (for categorical) # features. A only contains values for features that can be split on # cat_idx: List of indices for categorical features # tbound: Class values. If single value given then classes assumed to be [-tbound, tbound] # eps: Privacy budget for tree # b_med: Fraction of privacy budget for determining median split def fit(self, X, y, A, cat_idx, tbound, eps=None, b_med=0.5): # X: considered to have M samples and N features (M x N) # y: value to be predicted (M,) if eps is not None and b_med is None: raise ValueError('Budget split for median required') if tbound is not None: # B_L, B_U are lower and upper bounds on y if len(tbound) == 2: # mean_def = np.mean(tbound) # Default value of mean for empty node B_L = tbound[0] B_U = tbound[1] elif isinstance(tbound, int) or isinstance(tbound, float): # mean_def = 0 # Default value of mean for empty node B_L = -np.abs(tbound) B_U = np.abs(tbound) else: raise ValueError("Invalid value passed to tbound.") mean_def = random.uniform(B_L, B_U) M, N = np.shape(X) self.count = int(M) if eps is not None: eps_med = eps*b_med/self.max_depth # Privacy budget for split eps_level = eps*(1-b_med) # Privacy budget for mean else: eps_med = None if self.depth == self.max_depth or not A: # Reached leaf node or no more attributes to split on # Private case if eps is not None: # Compute and noise sufficient statistics count_clip = len(y) # print('Depth: ',self.depth,' True count: ',len(y),' Stored count: ',self.count) if np.isnan(np.mean(y)): # Empty node self.mean = mean_def else: count_0 = max(0, query(x=np.sum(y == B_L), s=1, e=eps_level)) # Query class counts count_1 = max(0, query(x=np.sum(y == B_U), s=1, e=eps_level)) try: self.mean = count_1/(count_0+count_1) except: if count_1 > count_0: self.mean = 1 else: self.mean = 0.5 # Non-private case else: # Compute sufficient statistics if np.isnan(np.mean(y)): # Empty node self.mean = mean_def else: self.mean = np.sum(y == B_U)/len(y) return # Finding split # Number of features to consider at each split if self.max_features is None: K = int(N) else: K = int(self.max_features) By = np.amax(np.abs(tbound)) # num_cand = 30 # Number of candidates self.split_ind, feat_val = split_count(X, y, A, By, cat_idx, K, eps_med) if self.split_ind in cat_idx: # If categorical feat_left, feat_right = feat_val self.split_val = feat_left.copy() else: # If continuous self.split_val = 1*feat_val # Splitting data if self.split_ind in cat_idx: ind_upp = np.where(X[:, self.split_ind] == np.expand_dims( np.array(feat_left), axis=1))[1] ind_low = np.where(X[:, self.split_ind] == np.expand_dims( np.array(feat_right), axis=1))[1] A_upp = deepcopy(A) if len(feat_left) == 1: A_upp.pop(self.split_ind, None) # Remove from allowed splits else: A_upp[self.split_ind] = feat_left.copy() # Update feature values A_low = deepcopy(A) if len(feat_right) == 1: A_low.pop(self.split_ind, None) # Remove from allowed splits else: A_low[self.split_ind] = feat_right.copy() # Update feature values else: ind_upp = np.where(X[:, self.split_ind] >= self.split_val)[0] ind_low = np.where(X[:, self.split_ind] < self.split_val)[0] A_upp = deepcopy(A) if A_upp[self.split_ind][1] <= self.split_val: # If split no longer possible # print('POP: ',A_upp[self.split_ind][1],self.split_val) A_upp.pop(self.split_ind, None) else: A_upp[self.split_ind][0] = self.split_val # Updating lower bound A_low = deepcopy(A) if A_low[self.split_ind][0] >= self.split_val: # If split no longer possible # print('POP: ',A_low[self.split_ind][0],self.split_val) A_low.pop(self.split_ind, None) else: A_low[self.split_ind][1] = self.split_val # Updating upper bound X_upp = X[ind_upp, :] y_upp = y[ind_upp] X_low = X[ind_low, :] y_low = y[ind_low] # print('LEFT: ',len(y_upp),'RIGHT: ',len(y_low)) # Recursively splitting tree_upp = DiPrimeCTree(depth=self.depth+1, max_depth=self.max_depth, max_features=self.max_features, parent=self) tree_upp.fit(X_upp, y_upp, A_upp, cat_idx, tbound, eps, b_med) self.left = tree_upp tree_low = DiPrimeCTree(depth=self.depth+1, max_depth=self.max_depth, max_features=self.max_features, parent=self) tree_low.fit(X_low, y_low, A_low, cat_idx, tbound, eps, b_med) self.right = tree_low # %% Predicting target values based on attributes # X: M samples and N features (M x N) # cat_idx: List of indices for categorical features # Returns predicted y def predict(self, X, cat_idx): if self.depth == 0 and self.split_ind is None: raise ValueError('Tree not fit to data') else: M, N = np.shape(X) y = np.zeros(M) # Predicted values for i in range(M): y[i] = self.predict_y(X[i, :], cat_idx) return y # x: Sample of N features (N,) def predict_y(self, x, cat_idx): ind = self.split_ind if self.split_ind is None: # Leaf node return self.mean # If true, go right if ind in cat_idx: dirn = (x[ind] not in self.split_val) else: dirn = x[ind] < self.split_val if not dirn: # Go to right child return self.left.predict_y(x, cat_idx) else: # Go to left child return self.right.predict_y(x, cat_idx) # %############################################################################ # %% Extremely Random Forest # n_trees: Number of random trees # partition: If true, grow tree on disjoint subsets (rows) of data class DiPrimeCForest(): def __init__(self, n_trees=10, max_depth=np.inf, max_features=None, partition=True): self.num_trees = n_trees # Numm=ber of trees self.trees = [] # ExtRandTrees self.partition = partition for i in range(n_trees): # Initialize all trees self.trees.append(DiPrimeCTree( depth=0, max_depth=max_depth, max_features=max_features, parent=None)) # %% Fitting random tree to data given target values # Left is larger than split, right is smaller than split def fit(self, X, y, A, cat_idx, tbound, eps=None, b_med=0.5): if self.partition: # Growing trees on disjoint subsets of rows M, N = np.shape(X) ind_part = np.array_split(rn.permutation(M), self.num_trees) # Partition for i, tree in enumerate(self.trees): # Fit tree to partition if eps is None: tree.fit(X[ind_part[i], :], y[ind_part[i]], A, cat_idx, tbound) else: tree.fit(X[ind_part[i], :], y[ind_part[i]], A, cat_idx, tbound, eps, b_med) else: # Growing trees on all the data for (t, tree) in enumerate(self.trees): if eps is None: tree.fit(X, y, A, cat_idx, tbound) else: tree.fit(X, y, A, cat_idx, tbound, eps/self.num_trees, b_med) # %% Predicting target values based on attributes # X: M samples and N features (M x N) # cat_idx: List of indices for categorical features # Returns predicted y def predict(self, X, cat_idx): M, N = np.shape(X) pred = np.zeros((M, self.num_trees)) for i in range(self.num_trees): # Prediction from each tree pred[:, i] = self.trees[i].predict(X, cat_idx) return np.mean(pred, axis=1)
[ "shoryaconsul@gmail.com" ]
shoryaconsul@gmail.com
f546e9e52b380e7d078d7a83b9522f48799bf1fb
6e172edee44d5985d19327cf61865d861395d595
/2020/11/y2020_d11_p01.py
14c61f5a7ba331bb0fdd890b3ea3476e58d1cce5
[]
no_license
rHermes/adventofcode
bbac479ec1c84c55484effa2cd94889d621b3718
4cbe7a952678c5f09438702562b7f6f673a1cf83
refs/heads/master
2023-01-14T07:05:35.769426
2023-01-01T09:53:11
2023-01-01T10:13:29
225,170,580
1
0
null
null
null
null
UTF-8
Python
false
false
1,699
py
import fileinput import itertools as it # Let's build a jump table def jumptbl(M, ROWS, COLS, x, y): arounds = [] for dy, dx in [(-1,-1), (-1, 0), (-1, 1), (0,-1), (0, 1), (1,-1), (1,0), (1,1)]: zx = x + dx zy = y + dy idx = zy*COLS + zx if 0 <= zx < COLS and 0 <= zy < ROWS and M[idx] != None: arounds.append(idx) return arounds # Creates a compressed version of a jump array def compress(M, ROWS, COLS): comp = [] # translate from full to sparse trans = {} # Build spare index for y in range(COLS): for x in range(ROWS): idx = y*COLS + x if M[idx] == None: continue trans[idx] = len(comp) comp.append(M[idx]) # Build jump table jmp = {} for oidx, nidx in trans.items(): y = oidx // COLS x = oidx % COLS # Second pass, now to create jump table adj = frozenset(trans[k] for k in jumptbl(M, ROWS, COLS, x, y)) if len(adj) < 4: comp[nidx] = True else: jmp[nidx] = adj return (comp, jmp) # Step from M to N uing jmp def step(M, N, jmp): changed = False for idx, adj in jmp.items(): t = sum(M[x] for x in adj) N[idx] = (M[idx] and t < 4) or ((not M[idx]) and t == 0) changed |= N[idx] != M[idx] return changed lines = [line.rstrip() for line in fileinput.input()] ROWS = len(lines) COLS = len(lines[0]) # None takes the spot of Empty M = [{'L': False, '#': True, '.': None}[x] for x in it.chain(*lines)] comp, jmp = compress(M, ROWS, COLS) A = comp B = A.copy() while step(A, B, jmp): B, A = A, B print(sum(A))
[ "teodor@spaeren.no" ]
teodor@spaeren.no
3924fa132bfe328ae6a8faf35578e47d92f3d63e
2e5cac4d666bfd926112111060059feda98f5743
/online.py
d96915e0ad8a8f411dd31069315ef10ac790c2cc
[]
no_license
swifton/Template
014b471cce502b3f997fc5e6f8b269a0fe40288f
3eea5e31335163dfda0976e3b2ba9ea2bb8e51df
refs/heads/master
2021-01-01T05:30:58.946791
2018-04-11T06:14:06
2018-04-11T06:14:06
40,803,098
0
0
null
null
null
null
UTF-8
Python
false
false
1,444
py
import tornado.websocket import os.path players = [] update_rate = 60 class Application(tornado.web.Application): def __init__(self): handlers = [(r"/", MainHandler), (r"/socket", SocketHandler), (r"/lib/(.*)", tornado.web.StaticFileHandler, {'path': "../template/js"})] settings = dict( cookie_secret="IguyZ4KsTOOL0igwXWHJPnTQaZgULkmNom4uO6xXxrE", template_path=os.path.join(os.path.dirname(__file__), "templates"), static_path=os.path.join(os.path.dirname(__file__), "static"), xsrf_cookies=True, debug=True # TODO: make it impossible to forget to change this in production ) super(Application, self).__init__(handlers, **settings) class MainHandler(tornado.web.RequestHandler): def get(self): self.render("index.html") class SocketHandler(tornado.websocket.WebSocketHandler): def open(self): players.append(self) # handle new player def on_close(self): player_index = game.players.index(self) players.remove(self) # handle unplugged player def on_message(self, message): player_index = players.index(self) # handle the message if __name__ == "__main__": app = Application() app.listen(80) loop = tornado.ioloop.IOLoop.current() tornado.ioloop.PeriodicCallback(game.game_loop, 1000 / update_rate).start() loop.start()
[ "anton.swifton@gmail.com" ]
anton.swifton@gmail.com
94d6e7a4e245070004aceed465e415b48a91fdd8
5f7bc18fb99b00e8d47865621c42d19de406ae4f
/page/page_shop_transfer.py
60030d56af440882b4964de52c648dd2d7e2ee15
[]
no_license
ZYG-Test/WebTest
0958652095c59ac73978a8b3efc5ac153e6e050c
e1e6e180561f67e5368a858dff27c3975cb5696e
refs/heads/master
2022-12-20T08:21:54.969577
2020-09-09T09:38:36
2020-09-09T09:38:36
287,206,592
0
0
null
null
null
null
UTF-8
Python
false
false
2,952
py
import time import page from base.base import Base import faker f = faker.Faker() class PageTransfer(Base): # 点击跨境汇款 def page_click_money_transfer(self): self.base_click(page.shop_money_transfer) #收款人 def page_receive(self,name,lastname,account,amount,number): # 输入收款人姓名 self.base_input(page.shop_receive_name, name) # 输入姓氏 self.base_input(page.shop_receive_lastname, lastname) # 输入收款人账户 self.base_input(page.shop_receive_account, account) # 输入收款金额 self.base_input(page.shop_receive_amount, amount) # 输入收款人联系电话 self.base_input(page.shop_receive_number, number) # 选择汇款用途 self.base_click(page.shop_receive_use) time.sleep(1) self.base_click(page.shop_receive_use_select) # 汇款人 def page_certificate(self,country="CHN"): #输入姓名 self.base_input(page.shop_remitter_name,f.last_name()) #输入姓氏 self.base_input(page.shop_remitter_lastname,f.last_name()) #输入生日 self.base_input(page.shop_remitter_birthday,f.date()) #输入国籍 self.base_input(page.shop_remitter_country,country) #证件类型选择 self.base_click(page.shop_remitter_certificate) time.sleep(1) self.base_click(page.shop_certificate_select) #证件所属国籍 self.base_input(page.shop_certificate_country, country) #输入证件号 self.base_input(page.shop_certificate_number, f.credit_card_number()) #输入地址 self.base_input(page.shop_certificate_address,f.street_address()) #输入市区 self.base_input(page.shop_certificate_area, f.city_suffix()) # 勾选认证点击下一步 def page_check_next(self): self.base_click(page.shop_certification_check) self.base_click(page.shop_nextBtn) self.base_click(page.shop_nextBtn_two) # 输入支付密码 def page_input_pwd(self,pwd): self.base_input(page.shop_input_pay,pwd) # 点击确认汇款 def page_click_determine(self): self.base_click(page.shop_determine) # 获取跨境汇款成功返回信息 def page_get_remittance_info(self): return self.base_get_text(page.shop_remittance_info) '''跨境汇款组合业务''' def cross_remittance(self,name,lastname,account,amount,number,pwd): # 点击跨境汇款 self.page_click_money_transfer() # 输入收款人信息 self.page_receive(name,lastname,account,amount,number) # 输入汇款人信息 self.page_certificate() # 勾选已认证点击下一步 self.page_check_next() # 输入支付密码 self.page_input_pwd(pwd) # 确认缴费 self.page_click_determine()
[ "18142651995@163.com" ]
18142651995@163.com
d8ff5b2120acc3d38e45ebe0b0362c23ee03ccad
6dfdc65b1d633847763fc5012aeb4ca1438fe6ae
/Python and Advanced Analytics/Database/Mongo/product.py
a612bd231d21867bec183f6463c9296e891fa1d1
[]
no_license
SaurabhRuikar/CdacRepo
71537432885c83995ea6f2248d2a20ef08b4c9d4
12c2a4be8141443e4a567104506a81f115baeab1
refs/heads/master
2022-05-26T02:22:02.748779
2021-03-03T15:33:13
2021-03-03T15:33:13
223,311,490
0
0
null
2022-04-12T21:57:09
2019-11-22T02:57:34
HTML
UTF-8
Python
false
false
2,092
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sat Dec 7 16:50:58 2019 @author: student """ from pymongo import MongoClient import sys client=MongoClient('localhost:27017') print('connection done') db=client.Product def insert(): try: pid=int(input('Enter Product id : ')) pName=input('Enter Product Name : ') pquantity=int(input('Enter Product Quantity : ')) price=int(input('Enter Price : ')) db.product.insert_one( { "id":pid, "name":pName, "pquantity":pquantity, "price":price } ) print('Inserted successfully') except Exception as e: print(str(e)) def read(): try: r=db.product.find() for i in r: print(i) except Exception as e: print(str(e)) def delete(): try: pid=int(input("\n Enter Product id to Delete : ")) db.product.delete_many({'id':pid}) print('\n Deletion Successful \n') except Exception as e: print(str(e)) def update(): try: pid=int(input('Enter Product id : ')) pName=input('Enter Product Name : ') pquantity=int(input('Enter Product Quantity : ')) price=int(input('Enter Price : ')) db.product.update_one( {'id':pid}, { "$set":{ "name":pName, 'pquantity':pquantity, "price":price } } ) print('\n Updated successfully') except Exception as e: print(str(e)) choice=0 while(1): choice=input('\n Select \n 1. Insert \n 2. Update \n 3. Read \n 4. Delete \n 5. Exit \n') if choice=='1': insert() elif choice=='2': update() elif choice=='3': read() elif choice=='4': delete() else: sys.exit(0)
[ "saurabh2830076@gmail.com" ]
saurabh2830076@gmail.com
9164e169998f84df100794fc7306eac40e464065
cd685857125d6649bb226e19bc6de48905457bd1
/C04Recursion/Creativity/c18.py
c96b0db58152e16942f286b1feb30eff9aa06b8b
[]
no_license
nickramos94/DataStructuresAndAlgorithmsInPython
b37dd22018628125001f2853a8e05812e021db3f
b5d5d322e720d3e2411500257f7befb07b6ee4be
refs/heads/master
2022-12-17T17:33:14.968439
2020-01-14T10:08:59
2020-01-14T10:08:59
292,007,780
0
0
null
2020-09-01T13:38:24
2020-09-01T13:38:23
null
UTF-8
Python
false
false
385
py
def conson_more(s): VOWELS = {*"aeiouAEIOU"} def consonants_minus_vowels(index): if index == -1: return 0 count = -1 if s[index] in VOWELS else 1 return count + consonants_minus_vowels(index - 1) return consonants_minus_vowels(len(s) - 1) > 0 if __name__ == "__main__": print(conson_more("abcde")) print(conson_more("abce"))
[ "axel.brisse@gmail.com" ]
axel.brisse@gmail.com
9c9dd3628ce7fc7213ab36ddaa75d362eea81405
c6bc84336e5ccb62494a91a7d79fb8998fb6cf66
/lottery_ndriver.py
dd3751d0bbb8929c70d94ceac0cea7fbfa814171
[]
no_license
wonjr/python_lottery
a702d0b287c89b9c9844fddb9b519f0f0c952b65
bf2f689d6bb52d77ca98da37d808eb5ec9a50761
refs/heads/main
2023-04-05T13:08:25.629941
2021-04-13T10:34:34
2021-04-13T10:34:34
357,500,721
0
0
null
null
null
null
UTF-8
Python
false
false
7,576
py
import lottery HEAD_HTML = ''' <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags --> <title>로또</title> <style type="text/css" media="screen"> body { background-color: #FAFAFA; padding-top: 80px; } .navbar { background-color: #29B895; border-radius: 0 !important; } .navbar-brand { color: white; } .navbar-brand:hover { color: white; } .jumbotron { border-radius: 10px; padding: 20px; } .jumbotron h1 { margin: 0 0 20px 0; font-size: 24px !important; } .cash-div { font-size: 24px; } .cash-div b { margin-right: 10px; } .red { background-color: #D84134; color: white; } .green { background-color: #6AC83B; } .yellow { background-color: #FBC34B; } .blue { background-color: #528FD2; color: white; } .black { background-color: #414141; } .plus { font-size: 30px; margin: 0 10px 0 10px; } .ball { color: white; font-size: 20px; border-radius: 50%; display: inline-block; width: 60px; height: 60px; text-align: center; line-height: 60px; } .attempt-numbers { font-size: 18px; outline: 1px solid black; display: inline-block; margin-bottom: 20px; margin-right: 20px; height: 30px; } .attempt-number { display: inline-block; width: 30px; text-align: center; line-height: 30px; } .attempt-prize { display: inline-block; font-size: 18px; } </style> <!-- Latest compiled and minified CSS --> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" crossorigin="anonymous"> <!-- Optional theme --> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap-theme.min.css" integrity="sha384-fLW2N01lMqjakBkx3l/M9EahuwpSfeNvV63J5ezn3uZzapT0u7EYsXMjQV+0En5r" crossorigin="anonymous"> <!-- Latest compiled and minified JavaScript --> <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js" integrity="sha384-0mSbJDEHialfmuBBQP6A4Qrprq5OVfW37PRR3j5ELqxss1yVqOtnepnHVP9aJ7xS" crossorigin="anonymous"></script> </head> ''' MAIN_HTML = ''' <!DOCTYPE html> <html> <body> <nav class="navbar navbar-fixed-top"> <div class="container-fluid"> <!-- Brand and toggle get grouped for better mobile display --> <div class="navbar-header"> <a class="navbar-brand" href="#">LOTTO</a> </div> </div><!-- /.container-fluid --> </nav> <div class="col-xs-10 col-xs-offset-1"> <div class="jumbotron"> <h1><b>당첨 번호</b></h1> {numbers} </div> </div> <div class="col-xs-5 col-xs-offset-1"> <div class="jumbotron cash-div"> <b>당첨 금액</b> <span>‎₩{total_prize}</span> </div> </div> <div class="col-xs-5"> <div class="jumbotron cash-div"> <b>쓴 금액</b> <span>‎₩{total_cost}</span> </div> </div> <div class="col-xs-10 col-xs-offset-1"> <div class="jumbotron"> <h1><b>내 번호</b></h1> {attempts} </div> </div> <!-- jQuery (necessary for Bootstrap's JavaScript plugins) --> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script> <!-- Include all compiled plugins (below), or include individual files as needed --> </body> </html> ''' def get_color(number): if number <= 10: return "yellow" elif number <= 20: return "blue" elif number <= 30: return "red" elif number <= 40: return "black" else: return "green" def generate_numbers_html(numbers): # template for winning ball ball_html = ''' <div class="ball {color}"> {number} </div> ''' html = "" for number in numbers[:6]: html += ball_html.format(number = number, color = get_color(number)) html += '<span class="plus">+</span>' html += ball_html.format(number = numbers[-1], color = get_color(numbers[-1])) return html def generate_attempt_html(attempt, winning_numbers): number_html = ''' <span class="attempt-number">{number}</span> ''' red_number_html = ''' <span class="attempt-number red">{number}</span> ''' blue_number_html = ''' <span class="attempt-number blue">{number}</span> ''' attempt_html = "" for num in attempt[0]: if num in winning_numbers[:6]: attempt_html += red_number_html.format(number=num) elif num in winning_numbers[6:]: attempt_html += blue_number_html.format(number=num) else: attempt_html += number_html.format(number=num) html = ''' <div class="attempt"> <div class="attempt-numbers"> {attempt} </div> <div class="attempt-prize"> ‎₩{prize} </div> </div> '''.format(attempt=attempt_html, prize=attempt[1]) return html def main(winning_numbers, tries, total_prize, total_cost): out_file = open('lottery.html', 'w', encoding='utf-8') winning_numbers_html = generate_numbers_html(winning_numbers) attempts_html = "" for attempt in tries: attempts_html += generate_attempt_html(attempt, winning_numbers) out_file.write(HEAD_HTML + MAIN_HTML.format( numbers=winning_numbers_html, attempts=attempts_html, total_prize=total_prize, total_cost=total_cost) ) out_file.close() NUM_TRIES = int(input("돈을 넣어주세요 한 개에 천원\n돈: ")) // 1000 WINNING_NUMBERS = lottery.draw_winning_numbers() tries = [] total_prize = 0 total_cost = 0 for i in range(NUM_TRIES): select_menu = int(input("1.자동 2.번호 선택\n입력: ")) if select_menu == 1: attempt = sorted(lottery.generate_numbers(6)) else: attempt = sorted(lottery.generate_input_numbers(6)) prize = lottery.check(attempt, WINNING_NUMBERS) tries.append((attempt, prize)) total_prize += prize total_cost += 1000 main( WINNING_NUMBERS, sorted(tries, key=lambda x: -x[1]), total_prize, total_cost )
[ "noreply@github.com" ]
noreply@github.com
a0322b2f81ed8ef731da2cc2a758f162c0d92b65
9b36652dafb58888b7a584806ee69a33fcb609d5
/objutils/pickleif.py
fd210c74a9e0aa035425e908bbf6ad39a83c3423
[]
no_license
pySART/objutils
db33e4576cf68111cb4debbafec06a0204844938
5ba4631b2245caae80d4dbe0053db0f2706ba53f
refs/heads/master
2020-06-29T03:35:24.485977
2016-11-21T14:21:56
2016-11-21T14:21:56
74,451,500
5
2
null
2016-11-22T08:36:10
2016-11-22T08:36:10
null
UTF-8
Python
false
false
1,385
py
#!/usr/bin/env python # -*- coding: utf-8 -*- __version__ = "0.1.0" __copyright__ = """ pyObjUtils - Object file library for Python. (C) 2010-2013 by Christoph Schueler <github.com/Christoph2, cpu12.gems@googlemail.com> All Rights Reserved This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ import abc DUMMY_PROTOCOL = None class PickleIF(object): __metaclass__ = abc.ABCMeta @abc.abstractmethod def dump(self, obj, file_, protocol = DUMMY_PROTOCOL): pass @abc.abstractmethod def dumps(self, obj, protocol = DUMMY_PROTOCOL): pass @abc.abstractmethod def load(self, file_): pass @abc.abstractmethod def loads(self, string_): pass
[ "cpu12.gems@googlemail.com" ]
cpu12.gems@googlemail.com
a856bc79b318b4773ae61f88951e1698c816a3f8
19b93af5cb896d93f5d962cec6b9f177eeff0339
/zinc-launcher
e90e21f845ca8b3f2f4a5ccb7c3b0b21efa24f3a
[]
no_license
mattn/zinc-launcher
0acccb454aafbe957d4599ee64891b0a0f7f6359
c1181cf7313797bf7ccef32aed7cc2f4741d2ad9
refs/heads/master
2023-06-27T05:42:18.735780
2013-09-16T15:44:06
2013-09-16T15:44:06
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,604
#!/usr/bin/python import gtk import os import subprocess import threading class zinc_thread(threading.Thread): def __init__(self, id): threading.Thread.__init__(self) self.id = id def run(self): os.system("padsp ./zinc " + self.id) gtk.gdk.threads_init() win = gtk.Window() win.set_title("ZiNc Launcher") win.set_default_size(400, 500) win.connect('destroy', gtk.main_quit) swin = gtk.ScrolledWindow() swin.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS) tree_model = gtk.ListStore(str, str) tree_view = gtk.TreeView(tree_model) renderer = gtk.CellRendererText() tree_view.append_column(gtk.TreeViewColumn("ID", renderer, text=0)) tree_view.append_column(gtk.TreeViewColumn("Title", renderer, text=1)) def on_activated(tree_view, path, column): model = tree_view.get_model() iter = model.get_iter(path) id = model.get_value(iter, 0) t = zinc_thread(id) t.start() tree_view.set_sensitive(False) while t.isAlive(): gtk.main_iteration(False) tree_view.set_sensitive(True) tree_view.connect('row-activated', on_activated) os.putenv("LD_LIBRARY_PATH", ".") p = subprocess.Popen("./zinc --list-games", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) (stdouterr, stdin) = (p.stdout, p.stdin) stdouterr.readline() while True: line = stdouterr.readline().strip() if not line: break iter = tree_model.append() item = line.split(" ", 1) tree_model.set_value(iter, 0, item[0]) tree_model.set_value(iter, 1, item[1]) swin.add(tree_view) win.add(swin) win.show_all() gtk.main()
[ "mattn.jp@gmail.com" ]
mattn.jp@gmail.com
2c0f774cf9e57e9947263de535f7d1b98fb4245a
411e8d3400ec0f3b5b8647a37202a52fa4df2c24
/wx/wx_views.py
5430f525f4432f732c822af933ee912134eb0d1c
[]
no_license
skymaxu/abh-tornado
f987fe6b26814d54d8aa220c2b082bfc1f5b2d9e
e1d896d29a6383fc6829235a7e53636fff8a006b
refs/heads/master
2021-01-17T17:36:42.962059
2016-07-06T02:43:17
2016-07-06T02:43:17
62,541,089
0
0
null
null
null
null
UTF-8
Python
false
false
6,497
py
#!/usr/bin/python #coding: utf-8 import json import os import sys import time import traceback import tornado.ioloop import tornado.options import tornado.web from views import BaseHandler class MainHandler(BaseHandler): def get(self): return self.render('wx/index.html') # class LoginHandler(BaseHandler): # def get(self): # self.set_secure_cookie('user', 'xxxxxxxx') # resp = {} # resp['ret'] = retcode.OK # resp['ret_msg'] = retcode.get_ret_msg(resp['ret']) # jsonstr = json.dumps(resp) # self.set_header('Content-Type', 'application/json') # self.write(jsonstr) # # class LogoutHandler(BaseHandler): # @tornado.web.authenticated # def get(self): # self.clear_cookie('user') # ret = 0 # resp = {} # resp['ret'] = ret # resp['ret_msg'] = retcode.get_ret_msg(resp['ret']) # jsonstr = json.dumps(resp) # self.set_header('Content-Type', 'application/json') # self.write(jsonstr) # # class InterfaceMainHandler(BaseHandler): # def get(self): # self.write_log.critical('Ileggal intrution!') # if ('signature' not in self.request.arguments or # 'timestamp' not in self.request.arguments or # 'nonce' not in self.request.arguments or # 'echostr' not in self.request.arguments): # return # # # check signature # signature = self.get_argument('signature') # timestamp = self.get_argument('timestamp') # nonce = self.get_argument('nonce') # echostr = self.get_argument('echostr') # if not interface_logic.check_signature(signature, timestamp, nonce, config.OA_TOKEN): # self.write_log.critical('Ileggal intrution!') # return # # self.write(echostr) # # def post(self): # if ('signature' not in self.request.arguments or # 'timestamp' not in self.request.arguments or # 'nonce' not in self.request.arguments): # return # # # check signature # signature = self.get_argument('signature') # timestamp = self.get_argument('timestamp') # nonce = self.get_argument('nonce') # if not interface_logic.check_signature(signature, timestamp, nonce, config.OA_TOKEN): # self.write_log.critical('Ileggal intrution!') # return # # # parse msg # msg = self.request.body # self.write_log.info('Recv msg: ' + msg) # msgdict = msg_helper.parse_input_msg(msg) # # # check if send to the right oa # to_user_name = msg_helper.get_value_by_key(msgdict, 'ToUserName') # if to_user_name != config.OA_USERNAME: # self.write_log.critical('Send to the wrong official account!') # return # # # process msg # reply_msg = interface_logic.process_msg(msgdict) # if reply_msg != None: # self.write_log.info('Reply msg: ' + reply_msg) # self.write(reply_msg) # return # # class UserViewHandler(BaseHandler): # @tornado.web.authenticated # def get(self): # if ('id' not in self.request.arguments): # resp = {} # resp['ret'] = retcode.MISS_ARGUMENT # resp['ret_msg'] = retcode.get_ret_msg(resp['ret']) # jsonstr = json.dumps(resp) # self.set_header('Content-Type', 'application/json') # self.write(jsonstr) # return # # id = int(self.get_argument('id')) # # if (id < 0): # resp = {} # resp['ret'] = retcode.INVALID_ARGUMENT_VALUE # resp['ret_msg'] = retcode.get_ret_msg(resp['ret']) # jsonstr = json.dumps(resp) # self.set_header('Content-Type', 'application/json') # self.write(jsonstr) # return # # ret, data = admin_logic.view_user(id) # # resp = {} # resp['ret'] = ret # resp['ret_msg'] = retcode.get_ret_msg(resp['ret']) # resp['data'] = data # jsonstr = json.dumps(resp) # self.set_header('Content-Type', 'application/json') # self.write(jsonstr) class WxHandler(BaseHandler): def parse_request_xml(self, rootElem): msg = {} if rootElem.tag == 'xml': for child in rootElem: msg[child.tag] = child.text # 获得内容 return msg def get(self): # 获取输入参数 signature = self.get_argument('signature', '') timestamp = self.get_argument('timestamp', '') nonce = self.get_argument('nonce', '') echostr = self.get_argument('echostr', '') # 自己的token token = "abhweixin" # 这里改写你在微信公众平台里输入的token # 字典序排序 list = [token, timestamp, nonce] list.sort() sha1 = hashlib.sha1() map(sha1.update, list) hashcode = sha1.hexdigest() # sha1加密算法 # 如果是来自微信的请求,则回复echostr if hashcode == signature: self.write(echostr) def post(self): rawstr = self.request.body msg = self.parse_request_xml(ET.fromstring(rawstr)) MsgType = tornado.escape.utf8(msg.get("MsgType")) Content = tornado.escape.utf8(msg.get("Content")) FromUserName = tornado.escape.utf8(msg.get("FromUserName")) CreateTime = tornado.escape.utf8(msg.get("CreateTime")) ToUserName = tornado.escape.utf8(msg.get("ToUserName")) if MsgType != "text": Content = "Sorry,亲,你的style我不懂!" if not Content: Content = "感谢您关注爱博华科技有限公司!我们拥有专业的技术团队,为您提供专业的项目外包服务!如果您有什么需要,可以随时联系我们,我们竭诚为您服务!" data = '''<xml> <ToUserName><![CDATA[%s]]></ToUserName> <FromUserName><![CDATA[%s]]></FromUserName> <CreateTime>%s</CreateTime> <MsgType><![CDATA[%s]]></MsgType> <Content><![CDATA[%s]]></Content> </xml> ''' % (FromUserName, ToUserName, int(time.time()), 'text', Content) self.write(data) # 提交信息
[ "xutianhua88@qq.com" ]
xutianhua88@qq.com
ea787b4ae53065aa2e9d7866bddf6587d52b9eab
a68518f4ae6381ee47628afb0f6c8f3a77ee41a8
/setup.py
ec9ad8435a7b1f14774a368e082a4ee79803c0c5
[ "MIT" ]
permissive
theSoenke/pytorch-trainer
d2c26fac6a28bb092bea0237fe671cd83152257e
e2bdd881afcc7619b0abd58006e594dac7daa789
refs/heads/master
2020-08-17T05:32:22.853416
2019-11-25T22:23:10
2019-11-25T22:23:10
215,616,393
1
0
null
null
null
null
UTF-8
Python
false
false
691
py
import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="pytorch-trainer", version="0.8.1", description="Lightweight wrapper around PyTorch ", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/theSoenke/pytorch-trainer", packages=setuptools.find_packages(), install_requires=[ 'torch>=1.2.0', 'tqdm>=4.35.0', 'pandas' ], classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], python_requires='>=3.6', )
[ "soenke.behrendt@gmail.com" ]
soenke.behrendt@gmail.com
f9e1d014f00ad100e068a2d024c3d380291478c1
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
/HBKAGJZ62JkCTgYX3_21.py
715e80114cfa7428c4cd12fde12de966ea9febf6
[]
no_license
daniel-reich/ubiquitous-fiesta
26e80f0082f8589e51d359ce7953117a3da7d38c
9af2700dbe59284f5697e612491499841a6c126f
refs/heads/master
2023-04-05T06:40:37.328213
2021-04-06T20:17:44
2021-04-06T20:17:44
355,318,759
0
0
null
null
null
null
UTF-8
Python
false
false
123
py
def last(a, n): if n == 0: return [] elif n > len(a): return "invalid" elif n <= len(a): return a[-n:]
[ "daniel.reich@danielreichs-MacBook-Pro.local" ]
daniel.reich@danielreichs-MacBook-Pro.local
b7ae2023cd59626ee6d6a26f868b5a4f029c5c81
a19abd5a7c8d0d963eaf6d86d3118dfb8e54973f
/utils.py
b445fb65836a0b97e46426300eea9a820179797a
[ "MIT" ]
permissive
AK391/vits
460039c33cba369550487e3e13a88515a7412582
1b6db7437cd5224baae9776139f890ca315fad8a
refs/heads/main
2023-05-26T23:19:52.615763
2021-06-14T23:23:56
2021-06-14T23:23:56
376,908,210
1
0
MIT
2021-06-14T17:42:08
2021-06-14T17:42:08
null
UTF-8
Python
false
false
7,396
py
import os import glob import sys import argparse import logging import json import subprocess import numpy as np from scipy.io.wavfile import read import torch MATPLOTLIB_FLAG = False logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) logger = logging def load_checkpoint(checkpoint_path, model, optimizer=None): assert os.path.isfile(checkpoint_path) checkpoint_dict = torch.load(checkpoint_path, map_location='cpu') iteration = checkpoint_dict['iteration'] learning_rate = checkpoint_dict['learning_rate'] if optimizer is not None: optimizer.load_state_dict(checkpoint_dict['optimizer']) saved_state_dict = checkpoint_dict['model'] if hasattr(model, 'module'): state_dict = model.module.state_dict() else: state_dict = model.state_dict() new_state_dict= {} for k, v in state_dict.items(): try: new_state_dict[k] = saved_state_dict[k] except: logger.info("%s is not in the checkpoint" % k) new_state_dict[k] = v if hasattr(model, 'module'): model.module.load_state_dict(new_state_dict) else: model.load_state_dict(new_state_dict) logger.info("Loaded checkpoint '{}' (iteration {})" .format( checkpoint_path, iteration)) return model, optimizer, learning_rate, iteration def save_checkpoint(model, optimizer, learning_rate, iteration, checkpoint_path): logger.info("Saving model and optimizer state at iteration {} to {}".format( iteration, checkpoint_path)) if hasattr(model, 'module'): state_dict = model.module.state_dict() else: state_dict = model.state_dict() torch.save({'model': state_dict, 'iteration': iteration, 'optimizer': optimizer.state_dict(), 'learning_rate': learning_rate}, checkpoint_path) def summarize(writer, global_step, scalars={}, histograms={}, images={}, audios={}, audio_sampling_rate=22050): for k, v in scalars.items(): writer.add_scalar(k, v, global_step) for k, v in histograms.items(): writer.add_histogram(k, v, global_step) for k, v in images.items(): writer.add_image(k, v, global_step, dataformats='HWC') for k, v in audios.items(): writer.add_audio(k, v, global_step, audio_sampling_rate) def latest_checkpoint_path(dir_path, regex="G_*.pth"): f_list = glob.glob(os.path.join(dir_path, regex)) f_list.sort(key=lambda f: int("".join(filter(str.isdigit, f)))) x = f_list[-1] print(x) return x def plot_spectrogram_to_numpy(spectrogram): global MATPLOTLIB_FLAG if not MATPLOTLIB_FLAG: import matplotlib matplotlib.use("Agg") MATPLOTLIB_FLAG = True mpl_logger = logging.getLogger('matplotlib') mpl_logger.setLevel(logging.WARNING) import matplotlib.pylab as plt import numpy as np fig, ax = plt.subplots(figsize=(10,2)) im = ax.imshow(spectrogram, aspect="auto", origin="lower", interpolation='none') plt.colorbar(im, ax=ax) plt.xlabel("Frames") plt.ylabel("Channels") plt.tight_layout() fig.canvas.draw() data = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='') data = data.reshape(fig.canvas.get_width_height()[::-1] + (3,)) plt.close() return data def plot_alignment_to_numpy(alignment, info=None): global MATPLOTLIB_FLAG if not MATPLOTLIB_FLAG: import matplotlib matplotlib.use("Agg") MATPLOTLIB_FLAG = True mpl_logger = logging.getLogger('matplotlib') mpl_logger.setLevel(logging.WARNING) import matplotlib.pylab as plt import numpy as np fig, ax = plt.subplots(figsize=(6, 4)) im = ax.imshow(alignment.transpose(), aspect='auto', origin='lower', interpolation='none') fig.colorbar(im, ax=ax) xlabel = 'Decoder timestep' if info is not None: xlabel += '\n\n' + info plt.xlabel(xlabel) plt.ylabel('Encoder timestep') plt.tight_layout() fig.canvas.draw() data = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='') data = data.reshape(fig.canvas.get_width_height()[::-1] + (3,)) plt.close() return data def load_wav_to_torch(full_path): sampling_rate, data = read(full_path) return torch.FloatTensor(data.astype(np.float32)), sampling_rate def load_filepaths_and_text(filename, split="|"): with open(filename, encoding='utf-8') as f: filepaths_and_text = [line.strip().split(split) for line in f] return filepaths_and_text def get_hparams(init=True): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', type=str, default="./configs/base.json", help='JSON file for configuration') parser.add_argument('-m', '--model', type=str, required=True, help='Model name') args = parser.parse_args() model_dir = os.path.join("./logs", args.model) if not os.path.exists(model_dir): os.makedirs(model_dir) config_path = args.config config_save_path = os.path.join(model_dir, "config.json") if init: with open(config_path, "r") as f: data = f.read() with open(config_save_path, "w") as f: f.write(data) else: with open(config_save_path, "r") as f: data = f.read() config = json.loads(data) hparams = HParams(**config) hparams.model_dir = model_dir return hparams def get_hparams_from_dir(model_dir): config_save_path = os.path.join(model_dir, "config.json") with open(config_save_path, "r") as f: data = f.read() config = json.loads(data) hparams =HParams(**config) hparams.model_dir = model_dir return hparams def get_hparams_from_file(config_path): with open(config_path, "r") as f: data = f.read() config = json.loads(data) hparams =HParams(**config) return hparams def check_git_hash(model_dir): source_dir = os.path.dirname(os.path.realpath(__file__)) if not os.path.exists(os.path.join(source_dir, ".git")): logger.warn("{} is not a git repository, therefore hash value comparison will be ignored.".format( source_dir )) return cur_hash = subprocess.getoutput("git rev-parse HEAD") path = os.path.join(model_dir, "githash") if os.path.exists(path): saved_hash = open(path).read() if saved_hash != cur_hash: logger.warn("git hash values are different. {}(saved) != {}(current)".format( saved_hash[:8], cur_hash[:8])) else: open(path, "w").write(cur_hash) def get_logger(model_dir, filename="train.log"): global logger logger = logging.getLogger(os.path.basename(model_dir)) logger.setLevel(logging.DEBUG) formatter = logging.Formatter("%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s") if not os.path.exists(model_dir): os.makedirs(model_dir) h = logging.FileHandler(os.path.join(model_dir, filename)) h.setLevel(logging.DEBUG) h.setFormatter(formatter) logger.addHandler(h) return logger class HParams(): def __init__(self, **kwargs): for k, v in kwargs.items(): if type(v) == dict: v = HParams(**v) self[k] = v def keys(self): return self.__dict__.keys() def items(self): return self.__dict__.items() def values(self): return self.__dict__.values() def __len__(self): return len(self.__dict__) def __getitem__(self, key): return getattr(self, key) def __setitem__(self, key, value): return setattr(self, key, value) def __contains__(self, key): return key in self.__dict__ def __repr__(self): return self.__dict__.__repr__()
[ "jaywalnut310@gmail.com" ]
jaywalnut310@gmail.com
830a673b50b9c3faacb8e3f2012031d17ab895e6
daf830ddbe9a17c499c829eab41e9c483f05aadf
/client/views/Register.py
5ff3f4a3ed1d29c54d3432323d5b612b9d5cd0b6
[]
no_license
c475/fileshare
c751ea13e5e3636a29869a8a6c689e07b910361a
32f90b4ee73d308ea0a6e8cc57732c98ed3dbda4
refs/heads/master
2021-01-13T14:44:55.518861
2016-12-25T06:28:24
2016-12-25T06:28:24
76,702,143
0
0
null
null
null
null
UTF-8
Python
false
false
736
py
from client.models import User from client.forms import UserCreationForm from client.mixins import NotLoggedInRequired from django.views.generic.edit import CreateView from django.contrib.auth import ( login, authenticate ) class Register(NotLoggedInRequired, CreateView): model = User template_name = 'registration/register.html' success_url = '/' form_class = UserCreationForm def get_success_url(self): new_user = authenticate( username=self.request.POST.get("username", None), password=self.request.POST.get("password1", None) ) if new_user is not None: login(self.request, new_user) return super(Register, self).get_success_url()
[ "danjamesbond@gmail.com" ]
danjamesbond@gmail.com
115a66c23ea4661c8cc03e3030a4ed6b60a0b04e
bd521d50f33960207d9ad174099630f9ee47e6f8
/study/python-basic/tentoone.py
e4f7d1b0301c61ecf0b938b20c2dd1d338de6638
[]
no_license
GGRMR/python-study
c44660552daec255c1724a12c3c0a1dd301b012c
ba74c982283b98ec6bdb4b0569df75fa8a55094f
refs/heads/master
2020-05-04T21:05:51.427503
2019-05-21T15:05:12
2019-05-21T15:05:12
179,464,009
1
0
null
null
null
null
UTF-8
Python
false
false
134
py
# -*- coding: utf-8 -*- """ bubble_list @author: Park Jieun """ for i in range(10,0,-1): print(i)
[ "wldmsdl9707@naver.com" ]
wldmsdl9707@naver.com
82eca83d6d1d1d11af6b32a044d2bb6b30cf6621
b8fa1144dd2fc3202524ec4aebb480ad75f5c316
/categories/views.py
4efdd0a20671b10be6d0451b3803bf641b36eefb
[]
no_license
srijan113/Django-flix
17f7ca79fbfae023cc17075890164929da60162b
0eaf7a57357813cc93ea574b590ced0270b5d083
refs/heads/main
2023-04-21T11:45:18.362208
2021-05-11T16:41:48
2021-05-11T16:41:48
366,449,836
1
0
null
null
null
null
UTF-8
Python
false
false
1,136
py
from django.http.response import Http404 from django.shortcuts import render from django.db.models import Count from django.views import generic from .models import Category from playlist.models import Playlist from playlist.mixins import PlaylistMixin class CategoryList(generic.ListView): queryset = Category.objects.all().filter(active=True).annotate(pl_count = Count('playlists')).filter(pl_count__gt=0) class CategoryDetailView(PlaylistMixin, generic.ListView): """ Another list view for playlist """ def get_context_data(self): context = super().get_context_data() try: obj = Category.objects.get(slug = self.kwargs.get('slug')) except Category.DoesNotExist: raise Http404 except Category.MultipleObjectsReturned: raise Http404 except: obj = None context['object'] = obj if obj is not None: context['title'] = obj.title return context def get_queryset(self): slug = self.kwargs.get('slug') return Playlist.objects.filter(category__slug = slug).movie_or_show()
[ "srijan.pokhrel113@gmail.com" ]
srijan.pokhrel113@gmail.com
246e039f2cc85f5282bfb659128b315d0ff2d1f9
4244d1f3dc2acd98cd31b1db03645a233bc57a3b
/myapp/usertypes.py
4f4b45458f4f1c8279c495a17df9a1a95ec48684
[]
no_license
ekoputrapratama/plugin-architecture-pyqt5-qwebengine
371837ad8e32730b7a682359412edd37df366378
330d391b232de352d72c1dba86b3a74125b13f8a
refs/heads/master
2022-11-26T23:50:48.903616
2019-11-22T09:54:56
2019-11-22T09:54:56
null
0
0
null
null
null
null
UTF-8
Python
false
false
179
py
import enum import typing from PyQt5.QtCore import QUrl Url = typing.TypeVar('Url', str, QUrl) LoadEvent = enum.Enum( "LoadEvent", ["FINISHED", "STARTED", "BEFORE_LOAD"] )
[ "muhammad.sayuti94@gmail.com" ]
muhammad.sayuti94@gmail.com
f8818640916f448155ad0113d32a8f22b2140525
988dfbd1a762e40b9717398f7e8d40e3de962180
/cdnmanager/cdnmanager/cdn/videorepo/migrations/0002_auto__add_projectpolicy__add_videoproject.py
7a5104dd75908f0e5a7928252b351ffdffcff092
[]
no_license
carribeiro/vdeli
cf0e594c408ff913355dbd6ea1397cda48f3e77a
cf27b2d17aac2975e3eb48a17132e8bd22325876
refs/heads/master
2021-10-27T22:43:40.348580
2011-07-01T11:07:52
2011-07-01T11:07:52
null
0
0
null
null
null
null
UTF-8
Python
false
false
9,082
py
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'ProjectPolicy' db.create_table('videorepo_projectpolicy', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('video_project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['videorepo.VideoProject'])), ('cdnregion', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['videorepo.CDNRegion'])), ('protocol', self.gf('django.db.models.fields.CharField')(max_length=5)), ('max_simultaneous_segments', self.gf('django.db.models.fields.IntegerField')()), ('segment_size', self.gf('django.db.models.fields.IntegerField')()), ('max_bandwidth_per_segment_mbps', self.gf('django.db.models.fields.IntegerField')()), )) db.send_create_signal('videorepo', ['ProjectPolicy']) # Adding model 'VideoProject' db.create_table('videorepo_videoproject', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=30)), ('creation_date', self.gf('django.db.models.fields.DateTimeField')()), ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), )) db.send_create_signal('videorepo', ['VideoProject']) def backwards(self, orm): # Deleting model 'ProjectPolicy' db.delete_table('videorepo_projectpolicy') # Deleting model 'VideoProject' db.delete_table('videorepo_videoproject') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'videorepo.cdnregion': { 'Meta': {'object_name': 'CDNRegion'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'region_name': ('django.db.models.fields.CharField', [], {'max_length': '60'}) }, 'videorepo.cdnserver': { 'Meta': {'object_name': 'CDNServer'}, 'cdn_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videorepo.CDNRegion']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}), 'node_name': ('django.db.models.fields.CharField', [], {'max_length': '60'}), 'server_port': ('django.db.models.fields.IntegerField', [], {}) }, 'videorepo.projectpolicy': { 'Meta': {'object_name': 'ProjectPolicy'}, 'cdnregion': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videorepo.CDNRegion']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'max_bandwidth_per_segment_mbps': ('django.db.models.fields.IntegerField', [], {}), 'max_simultaneous_segments': ('django.db.models.fields.IntegerField', [], {}), 'protocol': ('django.db.models.fields.CharField', [], {'max_length': '5'}), 'segment_size': ('django.db.models.fields.IntegerField', [], {}), 'video_project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videorepo.VideoProject']"}) }, 'videorepo.segmentqueue': { 'Meta': {'object_name': 'SegmentQueue'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'queue_entry': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videorepo.TransferQueue']"}), 'segment_end': ('django.db.models.fields.IntegerField', [], {}), 'segment_start': ('django.db.models.fields.IntegerField', [], {}), 'segment_status': ('django.db.models.fields.CharField', [], {'max_length': '15'}) }, 'videorepo.transferqueue': { 'Meta': {'object_name': 'TransferQueue'}, 'current_segments': ('django.db.models.fields.IntegerField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'max_bandwidth_mbps': ('django.db.models.fields.IntegerField', [], {}), 'max_simultaneous_segments': ('django.db.models.fields.IntegerField', [], {}), 'protocol': ('django.db.models.fields.CharField', [], {'max_length': '5'}), 'segment_size': ('django.db.models.fields.IntegerField', [], {}), 'server': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videorepo.CDNServer']"}), 'transfer_status': ('django.db.models.fields.CharField', [], {'max_length': '15'}), 'transfer_type': ('django.db.models.fields.CharField', [], {'max_length': '15'}), 'video_file': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videorepo.VideoFile']"}) }, 'videorepo.videofile': { 'Meta': {'object_name': 'VideoFile'}, 'file_hash': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'file_name': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'file_size': ('django.db.models.fields.IntegerField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'upload_date': ('django.db.models.fields.DateTimeField', [], {}) }, 'videorepo.videoproject': { 'Meta': {'object_name': 'VideoProject'}, 'creation_date': ('django.db.models.fields.DateTimeField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) } } complete_apps = ['videorepo']
[ "unitoff@gmail.com" ]
unitoff@gmail.com
8eabc5915442c74698de459405acdb8a6cb90fa6
18b3ad3b0e1f7f10969738251e1201d01dfbc6bf
/backup_files/practice/rect.py
00e007de1004f6dc31ae22f14c65ace2161a43fa
[]
no_license
sahthi/backup2
11d509b980e731c73733b1399a8143780779e75a
16bed38f0867fd7c766c2a008c8d43b0660f0cb0
refs/heads/master
2020-03-21T12:39:56.890129
2018-07-09T08:12:46
2018-07-09T08:12:46
138,565,151
0
0
null
null
null
null
UTF-8
Python
false
false
352
py
#!/usr/bin/python class rectangle: def __init__(self,length,breadth): self.length=length self.breadth=breadth def area(self): return self.breadth*self.length a=input("enter the length of rectangle:") b=input("enter the breadth of rectangle:") obj=rectangle(a,b) print("area of rectangle:",obj.area())
[ "siddamsetty.sahithi@votarytech.com" ]
siddamsetty.sahithi@votarytech.com
b28fdc07e948fed01cb39d809336fa86c0eb6f57
13789dc6b4faa7d708c45564b0ca6a62fe89b87d
/BEAC_ERP/urls.py
b0197cece49216609b13477e9efdbeab2feb1608
[]
no_license
MbadingaMoudouyi/beac_apc
343485a9cdddb7a76fa9450bb2dacd6dd5566907
6fbd82a62a72449572a6f1f73b19fd4162b2a7b1
refs/heads/main
2023-06-28T13:09:44.781339
2021-07-27T11:21:58
2021-07-27T11:21:58
389,919,907
0
0
null
null
null
null
UTF-8
Python
false
false
798
py
"""BEAC_ERP URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/3.2/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.urls import path, include urlpatterns = [ path('admin/', admin.site.urls), path('apc/', include('apc.urls')), ]
[ "mlionelstephen@gmail.com" ]
mlionelstephen@gmail.com
1ed418b0c91b56556c45e359d752ea4523d1576b
97c1558464f6e2c4350c207c9e856dcd76ea05af
/A Star Algorithm/AStarAlgorithm.py
50eeebcf335bf823433c8e465259841b473857d4
[]
no_license
harshpatel010202/Path-Finding-Python-Project
0810dce5e3a44e16be278ef89d51a31299f906ac
c06e39c97647ccaa626c9d2f62a055d90c4a0648
refs/heads/main
2023-04-19T10:36:23.482380
2021-05-05T03:33:52
2021-05-05T03:33:52
364,451,485
0
0
null
null
null
null
UTF-8
Python
false
false
7,327
py
from queue import PriorityQueue import pygame WIDTH = 1000 WINDOW = pygame.display.set_mode((WIDTH, WIDTH)) pygame.display.set_caption("A* SEARCH ALGORITHM") RED = (255, 0, 0) LIME = (0, 255, 0) YELLOW = (255, 255, 0) WHITE = (255, 255, 255) BLACK = (0, 0, 0) GRAY = (128, 128, 128) NAVY = (0, 0, 128) class Node: def __init__(self, row, col, width, sum_rows): self.row = row self.col = col self.x = row * width self.y = col * width self.sum_rows = sum_rows self.width = width self.color = BLACK self.nearest = [] def get_its_pos(self): return self.row, self.col def is_pos_closed(self): return self.color == YELLOW def is_pos_open(self): return self.color == NAVY def is_pos_obstacle(self): return self.color == WHITE def is_pos_commence(self): return self.color == YELLOW def is_pos_finish(self): return self.color == LIME def pos_reset(self): self.color = BLACK def create_pos_path(self): self.color = LIME def create_pos_obstacle(self): self.color = WHITE def create_pos_closed(self): self.color = YELLOW def create_pos_commence(self): self.color = RED def create_pos_open(self): self.color = NAVY def create_pos_finish(self): self.color = LIME def sketch(self, win): pygame.draw.rect(win, self.color, (self.x, self.y, self.width, self.width)) def update_nearest(self, grid): self.nearest = [] if self.row < self.sum_rows - 1 and not grid[self.row + 1][self.col].is_pos_obstacle(): self.nearest.append(grid[self.row + 1][self.col]) if self.col < self.sum_rows - 1 and not grid[self.row][self.col + 1].is_pos_obstacle(): self.nearest.append(grid[self.row][self.col + 1]) if self.row > 0 and not grid[self.row - 1][self.col].is_pos_obstacle(): self.nearest.append(grid[self.row - 1][self.col]) if self.col > 0 and not grid[self.row][self.col - 1].is_pos_obstacle(): self.nearest.append(grid[self.row][self.col - 1]) # lt means less than def __lt__(self, other): return False def area(pos1, pos2): x1, y1 = pos1 x2, y2 = pos2 return abs(x1 - x2) + abs(y1 - y2) # algorithm def algorithm(sketch, grid, commence, finish): counter = 0 available_set = PriorityQueue() available_set.put((0, counter, commence)) evolved = {} past_cost_score = {node: float("inf") for row in grid for node in row} past_cost_score[commence] = 0 heru_score = {node: float("inf") for row in grid for node in row} heru_score[commence] = area(commence.get_its_pos(), finish.get_its_pos()) available_set_hash = {commence} while not available_set.empty(): for event in pygame.event.get(): if event.type == pygame.QUIT: pygame.quit() currentNode = available_set.get()[2] available_set_hash.remove(currentNode) if currentNode == finish: final_step(evolved, finish, sketch) finish.create_pos_finish() return True for nearest in currentNode.nearest: node_past_cost_score = past_cost_score[currentNode] + 1 if node_past_cost_score < past_cost_score[nearest]: evolved[nearest] = currentNode past_cost_score[nearest] = node_past_cost_score heru_score[nearest] = node_past_cost_score + area(nearest.get_its_pos(), finish.get_its_pos()) if nearest not in available_set_hash: counter += 1 available_set.put((heru_score[nearest], counter, nearest)) available_set_hash.add(nearest) nearest.create_pos_open() sketch() if currentNode != commence: currentNode.create_pos_closed() return False def create_its_grid(MAKE_ROWS, width): grid = [] space = width // MAKE_ROWS for i in range(MAKE_ROWS): grid.append([]) for j in range(MAKE_ROWS): node = Node(i, j, space, MAKE_ROWS) grid[i].append(node) return grid def sketch_grid(win, MAKE_ROWS, width): space = width // MAKE_ROWS for i in range(MAKE_ROWS): pygame.draw.line(win, GRAY, (0, i * space), (width, i * space)) for j in range(MAKE_ROWS): pygame.draw.line(win, GRAY, (j * space, 0), (j * space, width)) def sketch(win, grid, MAKE_ROWS, width): win.fill(BLACK) for row in grid: for node in row: node.sketch(win) sketch_grid(win, MAKE_ROWS, width) pygame.display.update() def pos_clicked(pos, MAKE_ROWS, width): space = width // MAKE_ROWS y, x = pos row = y // space col = x // space return row, col # final step def final_step(evolved, currentNode, sketch): while currentNode in evolved: currentNode = evolved[currentNode] currentNode.create_pos_path() sketch() def main(win, width): MAKE_ROWS = 40 grid = create_its_grid(MAKE_ROWS, width) #SET BOTH COMMENCE AND FINISH TO NONE# commence = None finish = None run = True #THIS MEANS WHILE IT IS RUNNING THE FOLLOWING THING IN THE LOOP WILL OCCUR while run: sketch(win, grid, MAKE_ROWS, width) for event in pygame.event.get(): if event.type == pygame.QUIT: run = False #TO POSITION COMMENCE, FINISH AND OBSTACLES POSITIONS --> CAN BE DONE USING LEFT CLICK if pygame.mouse.get_pressed()[0]: pos = pygame.mouse.get_pos() row, col = pos_clicked(pos, MAKE_ROWS, width) node = grid[row][col] if not commence and node != finish: commence = node commence.create_pos_commence() elif not finish and node != commence: finish = node finish.create_pos_finish() elif node != finish and node != commence: node.create_pos_obstacle() #TO ERASE THE COMMENCE, FINISH AND OBSTACLES POSITIONS --> CAN BE DONE USING RIGHT CLICK elif pygame.mouse.get_pressed()[2]: pos = pygame.mouse.get_pos() row, col = pos_clicked(pos, MAKE_ROWS, width) node = grid[row][col] node.pos_reset() if node == commence: commence = None elif node == finish: finish = None # if space bar is clicked if event.type == pygame.KEYDOWN: if event.key == pygame.K_SPACE and commence and finish: for row in grid: for node in row: node.update_nearest(grid) algorithm(lambda: sketch(win, grid, MAKE_ROWS, width), grid, commence, finish) # if space bar is clicked if event.key == pygame.K_c: commence = None finish = None grid = create_its_grid(MAKE_ROWS, width) pygame.quit() main(WINDOW, WIDTH)
[ "harshpatel@uvic.ca" ]
harshpatel@uvic.ca
a3d20cd0cb67b56a2cee5a0ae9a699eb96bf1677
4f862ab15ad1a5ff562f4e068ac8e8051e5064ee
/training_stuff/cifar10_multi_gpu_train.py
583e81978bdbe2536fa3a48852d99301db7e793f
[]
no_license
skassam21/CSC411
cd4a64ed4c9879b85432c33fc293fb44650dca76
4bf4685338febabcadbadbef657d1273a0aa2db4
refs/heads/master
2020-06-27T10:49:40.620559
2016-11-23T22:26:14
2016-11-23T22:26:14
74,525,062
0
0
null
null
null
null
UTF-8
Python
false
false
10,163
py
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """A binary to train CIFAR-10 using multiple GPU's with synchronous updates. Accuracy: cifar10_multi_gpu_train.py achieves ~86% accuracy after 100K steps (256 epochs of data) as judged by cifar10_eval.py. Speed: With batch_size 128. System | Step Time (sec/batch) | Accuracy -------------------------------------------------------------------- 1 Tesla K20m | 0.35-0.60 | ~86% at 60K steps (5 hours) 1 Tesla K40m | 0.25-0.35 | ~86% at 100K steps (4 hours) 2 Tesla K20m | 0.13-0.20 | ~84% at 30K steps (2.5 hours) 3 Tesla K20m | 0.13-0.18 | ~84% at 30K steps 4 Tesla K20m | ~0.10 | ~84% at 30K steps Usage: Please see the tutorial and website for how to download the CIFAR-10 data set, compile the program and train the model. http://tensorflow.org/tutorials/deep_cnn/ """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from datetime import datetime import os.path import re import time import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from training_stuff import cifar FLAGS = tf.app.flags.FLAGS tf.app.flags.DEFINE_string('train_dir', '/tmp/cifar10_train', """Directory where to write event logs """ """and checkpoint.""") tf.app.flags.DEFINE_integer('max_steps', 1000000, """Number of batches to run.""") tf.app.flags.DEFINE_integer('num_gpus', 1, """How many GPUs to use.""") tf.app.flags.DEFINE_boolean('log_device_placement', False, """Whether to log device placement.""") def tower_loss(scope): """Calculate the total loss on a single tower running the CIFAR model. Args: scope: unique prefix string identifying the CIFAR tower, e.g. 'tower_0' Returns: Tensor of shape [] containing the total loss for a batch of data """ # Get images and labels for CIFAR-10. images, labels = cifar.distorted_inputs() # Build inference Graph. logits = cifar.inference(images) # Build the portion of the Graph calculating the losses. Note that we will # assemble the total_loss using a custom function below. _ = cifar.loss(logits, labels) # Assemble all of the losses for the current tower only. losses = tf.get_collection('losses', scope) # Calculate the total loss for the current tower. total_loss = tf.add_n(losses, name='total_loss') # Attach a scalar summary to all individual losses and the total loss; do the # same for the averaged version of the losses. for l in losses + [total_loss]: # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training # session. This helps the clarity of presentation on tensorboard. loss_name = re.sub('%s_[0-9]*/' % cifar.TOWER_NAME, '', l.op.name) tf.scalar_summary(loss_name, l) return total_loss def average_gradients(tower_grads): """Calculate the average gradient for each shared variable across all towers. Note that this function provides a synchronization point across all towers. Args: tower_grads: List of lists of (gradient, variable) tuples. The outer list is over individual gradients. The inner list is over the gradient calculation for each tower. Returns: List of pairs of (gradient, variable) where the gradient has been averaged across all towers. """ average_grads = [] for grad_and_vars in zip(*tower_grads): # Note that each grad_and_vars looks like the following: # ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN)) grads = [] for g, _ in grad_and_vars: # Add 0 dimension to the gradients to represent the tower. expanded_g = tf.expand_dims(g, 0) # Append on a 'tower' dimension which we will average over below. grads.append(expanded_g) # Average over the 'tower' dimension. grad = tf.concat(0, grads) grad = tf.reduce_mean(grad, 0) # Keep in mind that the Variables are redundant because they are shared # across towers. So .. we will just return the first tower's pointer to # the Variable. v = grad_and_vars[0][1] grad_and_var = (grad, v) average_grads.append(grad_and_var) return average_grads def train(): """Train CIFAR-10 for a number of steps.""" with tf.Graph().as_default(), tf.device('/cpu:0'): # Create a variable to count the number of train() calls. This equals the # number of batches processed * FLAGS.num_gpus. global_step = tf.get_variable( 'global_step', [], initializer=tf.constant_initializer(0), trainable=False) # Calculate the learning rate schedule. num_batches_per_epoch = (cifar.NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN / FLAGS.batch_size) decay_steps = int(num_batches_per_epoch * cifar.NUM_EPOCHS_PER_DECAY) # Decay the learning rate exponentially based on the number of steps. lr = tf.train.exponential_decay(cifar.INITIAL_LEARNING_RATE, global_step, decay_steps, cifar.LEARNING_RATE_DECAY_FACTOR, staircase=True) # Create an optimizer that performs gradient descent. opt = tf.train.GradientDescentOptimizer(lr) # Calculate the gradients for each model tower. tower_grads = [] for i in xrange(FLAGS.num_gpus): with tf.device('/gpu:%d' % i): with tf.name_scope('%s_%d' % (cifar.TOWER_NAME, i)) as scope: # Calculate the loss for one tower of the CIFAR model. This function # constructs the entire CIFAR model but shares the variables across # all towers. loss = tower_loss(scope) # Reuse variables for the next tower. tf.get_variable_scope().reuse_variables() # Retain the summaries from the final tower. summaries = tf.get_collection(tf.GraphKeys.SUMMARIES, scope) # Calculate the gradients for the batch of data on this CIFAR tower. grads = opt.compute_gradients(loss) # Keep track of the gradients across all towers. tower_grads.append(grads) # We must calculate the mean of each gradient. Note that this is the # synchronization point across all towers. grads = average_gradients(tower_grads) # Add a summary to track the learning rate. summaries.append(tf.scalar_summary('learning_rate', lr)) # Add histograms for gradients. for grad, var in grads: if grad is not None: summaries.append( tf.histogram_summary(var.op.name + '/gradients', grad)) # Apply the gradients to adjust the shared variables. apply_gradient_op = opt.apply_gradients(grads, global_step=global_step) # Add histograms for trainable variables. for var in tf.trainable_variables(): summaries.append(tf.histogram_summary(var.op.name, var)) # Track the moving averages of all trainable variables. variable_averages = tf.train.ExponentialMovingAverage( cifar.MOVING_AVERAGE_DECAY, global_step) variables_averages_op = variable_averages.apply(tf.trainable_variables()) # Group all updates to into a single train op. train_op = tf.group(apply_gradient_op, variables_averages_op) # Create a saver. saver = tf.train.Saver(tf.all_variables()) # Build the summary operation from the last tower summaries. summary_op = tf.merge_summary(summaries) # Build an initialization operation to run below. init = tf.global_variables_initializer() # Start running operations on the Graph. allow_soft_placement must be set to # True to build towers on GPU, as some of the ops do not have GPU # implementations. sess = tf.Session(config=tf.ConfigProto( allow_soft_placement=True, log_device_placement=FLAGS.log_device_placement)) sess.run(init) # Start the queue runners. tf.train.start_queue_runners(sess=sess) summary_writer = tf.train.SummaryWriter(FLAGS.train_dir, sess.graph) for step in xrange(FLAGS.max_steps): start_time = time.time() _, loss_value = sess.run([train_op, loss]) duration = time.time() - start_time assert not np.isnan(loss_value), 'Model diverged with loss = NaN' if step % 10 == 0: num_examples_per_step = FLAGS.batch_size * FLAGS.num_gpus examples_per_sec = num_examples_per_step / duration sec_per_batch = duration / FLAGS.num_gpus format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f ' 'sec/batch)') print (format_str % (datetime.now(), step, loss_value, examples_per_sec, sec_per_batch)) if step % 100 == 0: summary_str = sess.run(summary_op) summary_writer.add_summary(summary_str, step) # Save the model checkpoint periodically. if step % 1000 == 0 or (step + 1) == FLAGS.max_steps: checkpoint_path = os.path.join(FLAGS.train_dir, 'model.ckpt') saver.save(sess, checkpoint_path, global_step=step) def main(argv=None): # pylint: disable=unused-argument cifar.maybe_download_and_extract() if tf.gfile.Exists(FLAGS.train_dir): tf.gfile.DeleteRecursively(FLAGS.train_dir) tf.gfile.MakeDirs(FLAGS.train_dir) train() if __name__ == '__main__': tf.app.run()
[ "skassam21@gmail.com" ]
skassam21@gmail.com
9d5eed9ac51e2c80d2bf186b88f4046d7c5f5a3a
080c13cd91a073457bd9eddc2a3d13fc2e0e56ae
/MY_REPOS/awesome-4-new-developers/tensorflow-master/tensorflow/python/lib/io/file_io_test.py
e4767806896723cb6003db317ab58e1c9a241678
[ "Apache-2.0" ]
permissive
Portfolio-Projects42/UsefulResourceRepo2.0
1dccc8961a09347f124d3ed7c27c6d73b9806189
75b1e23c757845b5f1894ebe53551a1cf759c6a3
refs/heads/master
2023-08-04T12:23:48.862451
2021-09-15T12:51:35
2021-09-15T12:51:35
null
0
0
null
null
null
null
UTF-8
Python
false
false
28,674
py
# This Python file uses the following encoding: utf-8 # Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Testing File IO operations in file_io.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os.path from absl.testing import parameterized import numpy as np from tensorflow.python.framework import errors from tensorflow.python.lib.io import file_io from tensorflow.python.platform import gfile from tensorflow.python.platform import test class PathLike(object): """Backport of pathlib.Path for Python < 3.6""" def __init__(self, name): self.name = name def __fspath__(self): return self.name def __str__(self): return self.name run_all_path_types = parameterized.named_parameters( ("str", os.path.join), ("pathlike", lambda *paths: PathLike(os.path.join(*paths))) ) class FileIoTest(test.TestCase, parameterized.TestCase): def setUp(self): self._base_dir = os.path.join(self.get_temp_dir(), "base_dir") file_io.create_dir(self._base_dir) def tearDown(self): file_io.delete_recursively(self._base_dir) def testEmptyFilename(self): f = file_io.FileIO("", mode="r") with self.assertRaises(errors.NotFoundError): _ = f.read() @run_all_path_types def testFileDoesntExist(self, join): file_path = join(self._base_dir, "temp_file") self.assertFalse(file_io.file_exists(file_path)) with self.assertRaises(errors.NotFoundError): _ = file_io.read_file_to_string(file_path) @run_all_path_types def testWriteToString(self, join): file_path = join(self._base_dir, "temp_file") file_io.write_string_to_file(file_path, "testing") self.assertTrue(file_io.file_exists(file_path)) file_contents = file_io.read_file_to_string(file_path) self.assertEqual("testing", file_contents) def testAtomicWriteStringToFile(self): file_path = os.path.join(self._base_dir, "temp_file") file_io.atomic_write_string_to_file(file_path, "testing") self.assertTrue(file_io.file_exists(file_path)) file_contents = file_io.read_file_to_string(file_path) self.assertEqual("testing", file_contents) def testAtomicWriteStringToFileOverwriteFalse(self): file_path = os.path.join(self._base_dir, "temp_file") file_io.atomic_write_string_to_file(file_path, "old", overwrite=False) with self.assertRaises(errors.AlreadyExistsError): file_io.atomic_write_string_to_file(file_path, "new", overwrite=False) file_contents = file_io.read_file_to_string(file_path) self.assertEqual("old", file_contents) file_io.delete_file(file_path) file_io.atomic_write_string_to_file(file_path, "new", overwrite=False) file_contents = file_io.read_file_to_string(file_path) self.assertEqual("new", file_contents) @run_all_path_types def testReadBinaryMode(self, join): file_path = join(self._base_dir, "temp_file") file_io.write_string_to_file(file_path, "testing") with file_io.FileIO(file_path, mode="rb") as f: self.assertEqual(b"testing", f.read()) @run_all_path_types def testWriteBinaryMode(self, join): file_path = join(self._base_dir, "temp_file") file_io.FileIO(file_path, "wb").write("testing") with file_io.FileIO(file_path, mode="r") as f: self.assertEqual("testing", f.read()) def testAppend(self): file_path = os.path.join(self._base_dir, "temp_file") with file_io.FileIO(file_path, mode="w") as f: f.write("begin\n") with file_io.FileIO(file_path, mode="a") as f: f.write("a1\n") with file_io.FileIO(file_path, mode="a") as f: f.write("a2\n") with file_io.FileIO(file_path, mode="r") as f: file_contents = f.read() self.assertEqual("begin\na1\na2\n", file_contents) def testMultipleFiles(self): file_prefix = os.path.join(self._base_dir, "temp_file") for i in range(5000): f = file_io.FileIO(file_prefix + str(i), mode="w+") f.write("testing") f.flush() self.assertEqual("testing", f.read()) f.close() def testMultipleWrites(self): file_path = os.path.join(self._base_dir, "temp_file") with file_io.FileIO(file_path, mode="w") as f: f.write("line1\n") f.write("line2") file_contents = file_io.read_file_to_string(file_path) self.assertEqual("line1\nline2", file_contents) def testFileWriteBadMode(self): file_path = os.path.join(self._base_dir, "temp_file") with self.assertRaises(errors.PermissionDeniedError): file_io.FileIO(file_path, mode="r").write("testing") def testFileReadBadMode(self): file_path = os.path.join(self._base_dir, "temp_file") file_io.FileIO(file_path, mode="w").write("testing") self.assertTrue(file_io.file_exists(file_path)) with self.assertRaises(errors.PermissionDeniedError): file_io.FileIO(file_path, mode="w").read() @run_all_path_types def testFileDelete(self, join): file_path = join(self._base_dir, "temp_file") file_io.FileIO(file_path, mode="w").write("testing") file_io.delete_file(file_path) self.assertFalse(file_io.file_exists(file_path)) def testFileDeleteFail(self): file_path = os.path.join(self._base_dir, "temp_file") with self.assertRaises(errors.NotFoundError): file_io.delete_file(file_path) def testGetMatchingFiles(self): dir_path = os.path.join(self._base_dir, "temp_dir") file_io.create_dir(dir_path) files = ["file1.txt", "file2.txt", "file3.txt", "file*.txt"] for name in files: file_path = os.path.join(dir_path, name) file_io.FileIO(file_path, mode="w").write("testing") expected_match = [os.path.join(dir_path, name) for name in files] self.assertItemsEqual( file_io.get_matching_files(os.path.join(dir_path, "file*.txt")), expected_match, ) self.assertItemsEqual(file_io.get_matching_files(tuple()), []) files_subset = [ os.path.join(dir_path, files[0]), os.path.join(dir_path, files[2]), ] self.assertItemsEqual(file_io.get_matching_files(files_subset), files_subset) file_io.delete_recursively(dir_path) self.assertFalse(file_io.file_exists(os.path.join(dir_path, "file3.txt"))) def testGetMatchingFilesWhenParentDirContainsParantheses(self): dir_path = os.path.join(self._base_dir, "dir_(special)") file_io.create_dir(dir_path) files = ["file1.txt", "file(2).txt"] for name in files: file_path = os.path.join(dir_path, name) file_io.FileIO(file_path, mode="w").write("testing") expected_match = [os.path.join(dir_path, name) for name in files] glob_pattern = os.path.join(dir_path, "*") self.assertItemsEqual(file_io.get_matching_files(glob_pattern), expected_match) @run_all_path_types def testCreateRecursiveDir(self, join): dir_path = join(self._base_dir, "temp_dir/temp_dir1/temp_dir2") file_io.recursive_create_dir(dir_path) file_io.recursive_create_dir(dir_path) # repeat creation file_path = os.path.join(str(dir_path), "temp_file") file_io.FileIO(file_path, mode="w").write("testing") self.assertTrue(file_io.file_exists(file_path)) file_io.delete_recursively(os.path.join(self._base_dir, "temp_dir")) self.assertFalse(file_io.file_exists(file_path)) @run_all_path_types def testCopy(self, join): file_path = join(self._base_dir, "temp_file") file_io.FileIO(file_path, mode="w").write("testing") copy_path = join(self._base_dir, "copy_file") file_io.copy(file_path, copy_path) self.assertTrue(file_io.file_exists(copy_path)) f = file_io.FileIO(file_path, mode="r") self.assertEqual("testing", f.read()) self.assertEqual(7, f.tell()) def testCopyOverwrite(self): file_path = os.path.join(self._base_dir, "temp_file") file_io.FileIO(file_path, mode="w").write("testing") copy_path = os.path.join(self._base_dir, "copy_file") file_io.FileIO(copy_path, mode="w").write("copy") file_io.copy(file_path, copy_path, overwrite=True) self.assertTrue(file_io.file_exists(copy_path)) self.assertEqual("testing", file_io.FileIO(file_path, mode="r").read()) def testCopyOverwriteFalse(self): file_path = os.path.join(self._base_dir, "temp_file") file_io.FileIO(file_path, mode="w").write("testing") copy_path = os.path.join(self._base_dir, "copy_file") file_io.FileIO(copy_path, mode="w").write("copy") with self.assertRaises(errors.AlreadyExistsError): file_io.copy(file_path, copy_path, overwrite=False) @run_all_path_types def testRename(self, join): file_path = join(self._base_dir, "temp_file") file_io.FileIO(file_path, mode="w").write("testing") rename_path = join(self._base_dir, "rename_file") file_io.rename(file_path, rename_path) self.assertTrue(file_io.file_exists(rename_path)) self.assertFalse(file_io.file_exists(file_path)) def testRenameOverwrite(self): file_path = os.path.join(self._base_dir, "temp_file") file_io.FileIO(file_path, mode="w").write("testing") rename_path = os.path.join(self._base_dir, "rename_file") file_io.FileIO(rename_path, mode="w").write("rename") file_io.rename(file_path, rename_path, overwrite=True) self.assertTrue(file_io.file_exists(rename_path)) self.assertFalse(file_io.file_exists(file_path)) def testRenameOverwriteFalse(self): file_path = os.path.join(self._base_dir, "temp_file") file_io.FileIO(file_path, mode="w").write("testing") rename_path = os.path.join(self._base_dir, "rename_file") file_io.FileIO(rename_path, mode="w").write("rename") with self.assertRaises(errors.AlreadyExistsError): file_io.rename(file_path, rename_path, overwrite=False) self.assertTrue(file_io.file_exists(rename_path)) self.assertTrue(file_io.file_exists(file_path)) def testDeleteRecursivelyFail(self): fake_dir_path = os.path.join(self._base_dir, "temp_dir") with self.assertRaises(errors.NotFoundError): file_io.delete_recursively(fake_dir_path) @run_all_path_types def testIsDirectory(self, join): dir_path = join(self._base_dir, "test_dir") # Failure for a non-existing dir. self.assertFalse(file_io.is_directory(dir_path)) file_io.create_dir(dir_path) self.assertTrue(file_io.is_directory(dir_path)) file_path = join(str(dir_path), "test_file") file_io.FileIO(file_path, mode="w").write("test") # False for a file. self.assertFalse(file_io.is_directory(file_path)) # Test that the value returned from `stat()` has `is_directory` set. file_statistics = file_io.stat(dir_path) self.assertTrue(file_statistics.is_directory) @run_all_path_types def testListDirectory(self, join): dir_path = join(self._base_dir, "test_dir") file_io.create_dir(dir_path) files = ["file1.txt", "file2.txt", "file3.txt"] for name in files: file_path = join(str(dir_path), name) file_io.FileIO(file_path, mode="w").write("testing") subdir_path = join(str(dir_path), "sub_dir") file_io.create_dir(subdir_path) subdir_file_path = join(str(subdir_path), "file4.txt") file_io.FileIO(subdir_file_path, mode="w").write("testing") dir_list = file_io.list_directory(dir_path) self.assertItemsEqual(files + ["sub_dir"], dir_list) def testListDirectoryFailure(self): dir_path = os.path.join(self._base_dir, "test_dir") with self.assertRaises(errors.NotFoundError): file_io.list_directory(dir_path) def _setupWalkDirectories(self, dir_path): # Creating a file structure as follows # test_dir -> file: file1.txt; dirs: subdir1_1, subdir1_2, subdir1_3 # subdir1_1 -> file: file3.txt # subdir1_2 -> dir: subdir2 file_io.create_dir(dir_path) file_io.FileIO(os.path.join(dir_path, "file1.txt"), mode="w").write("testing") sub_dirs1 = ["subdir1_1", "subdir1_2", "subdir1_3"] for name in sub_dirs1: file_io.create_dir(os.path.join(dir_path, name)) file_io.FileIO(os.path.join(dir_path, "subdir1_1/file2.txt"), mode="w").write( "testing" ) file_io.create_dir(os.path.join(dir_path, "subdir1_2/subdir2")) @run_all_path_types def testWalkInOrder(self, join): dir_path_str = os.path.join(self._base_dir, "test_dir") dir_path = join(self._base_dir, "test_dir") self._setupWalkDirectories(dir_path_str) # Now test the walk (in_order = True) all_dirs = [] all_subdirs = [] all_files = [] for (w_dir, w_subdirs, w_files) in file_io.walk(dir_path, in_order=True): all_dirs.append(w_dir) all_subdirs.append(w_subdirs) all_files.append(w_files) self.assertItemsEqual( all_dirs, [dir_path_str] + [ os.path.join(dir_path_str, item) for item in ["subdir1_1", "subdir1_2", "subdir1_2/subdir2", "subdir1_3"] ], ) self.assertEqual(dir_path_str, all_dirs[0]) self.assertLess( all_dirs.index(os.path.join(dir_path_str, "subdir1_2")), all_dirs.index(os.path.join(dir_path_str, "subdir1_2/subdir2")), ) self.assertItemsEqual(all_subdirs[1:5], [[], ["subdir2"], [], []]) self.assertItemsEqual(all_subdirs[0], ["subdir1_1", "subdir1_2", "subdir1_3"]) self.assertItemsEqual(all_files, [["file1.txt"], ["file2.txt"], [], [], []]) self.assertLess(all_files.index(["file1.txt"]), all_files.index(["file2.txt"])) def testWalkPostOrder(self): dir_path = os.path.join(self._base_dir, "test_dir") self._setupWalkDirectories(dir_path) # Now test the walk (in_order = False) all_dirs = [] all_subdirs = [] all_files = [] for (w_dir, w_subdirs, w_files) in file_io.walk(dir_path, in_order=False): all_dirs.append(w_dir) all_subdirs.append(w_subdirs) all_files.append(w_files) self.assertItemsEqual( all_dirs, [ os.path.join(dir_path, item) for item in ["subdir1_1", "subdir1_2/subdir2", "subdir1_2", "subdir1_3"] ] + [dir_path], ) self.assertEqual(dir_path, all_dirs[4]) self.assertLess( all_dirs.index(os.path.join(dir_path, "subdir1_2/subdir2")), all_dirs.index(os.path.join(dir_path, "subdir1_2")), ) self.assertItemsEqual(all_subdirs[0:4], [[], [], ["subdir2"], []]) self.assertItemsEqual(all_subdirs[4], ["subdir1_1", "subdir1_2", "subdir1_3"]) self.assertItemsEqual(all_files, [["file2.txt"], [], [], [], ["file1.txt"]]) self.assertLess(all_files.index(["file2.txt"]), all_files.index(["file1.txt"])) def testWalkFailure(self): dir_path = os.path.join(self._base_dir, "test_dir") # Try walking a directory that wasn't created. all_dirs = [] all_subdirs = [] all_files = [] for (w_dir, w_subdirs, w_files) in file_io.walk(dir_path, in_order=False): all_dirs.append(w_dir) all_subdirs.append(w_subdirs) all_files.append(w_files) self.assertItemsEqual(all_dirs, []) self.assertItemsEqual(all_subdirs, []) self.assertItemsEqual(all_files, []) @run_all_path_types def testStat(self, join): file_path = join(self._base_dir, "temp_file") file_io.FileIO(file_path, mode="w").write("testing") file_statistics = file_io.stat(file_path) os_statistics = os.stat(str(file_path)) self.assertEqual(7, file_statistics.length) self.assertEqual( int(os_statistics.st_mtime), int(file_statistics.mtime_nsec / 1e9) ) self.assertFalse(file_statistics.is_directory) def testReadLine(self): file_path = os.path.join(self._base_dir, "temp_file") with file_io.FileIO(file_path, mode="r+") as f: f.write("testing1\ntesting2\ntesting3\n\ntesting5") self.assertEqual(36, f.size()) self.assertEqual("testing1\n", f.readline()) self.assertEqual("testing2\n", f.readline()) self.assertEqual("testing3\n", f.readline()) self.assertEqual("\n", f.readline()) self.assertEqual("testing5", f.readline()) self.assertEqual("", f.readline()) def testRead(self): file_path = os.path.join(self._base_dir, "temp_file") with file_io.FileIO(file_path, mode="r+") as f: f.write("testing1\ntesting2\ntesting3\n\ntesting5") self.assertEqual(36, f.size()) self.assertEqual("testing1\n", f.read(9)) self.assertEqual("testing2\n", f.read(9)) self.assertEqual("t", f.read(1)) self.assertEqual("esting3\n\ntesting5", f.read()) def testReadErrorReacquiresGil(self): file_path = os.path.join(self._base_dir, "temp_file") with file_io.FileIO(file_path, mode="r+") as f: f.write("testing1\ntesting2\ntesting3\n\ntesting5") with self.assertRaises(errors.InvalidArgumentError): # At present, this is sufficient to convince ourselves that the change # fixes the problem. That is, this test will seg fault without the change, # and pass with it. Unfortunately, this is brittle, as it relies on the # Python layer to pass the argument along to the wrapped C++ without # checking the argument itself. f.read(-2) def testTell(self): file_path = os.path.join(self._base_dir, "temp_file") with file_io.FileIO(file_path, mode="r+") as f: f.write("testing1\ntesting2\ntesting3\n\ntesting5") self.assertEqual(0, f.tell()) self.assertEqual("testing1\n", f.readline()) self.assertEqual(9, f.tell()) self.assertEqual("testing2\n", f.readline()) self.assertEqual(18, f.tell()) self.assertEqual("testing3\n", f.readline()) self.assertEqual(27, f.tell()) self.assertEqual("\n", f.readline()) self.assertEqual(28, f.tell()) self.assertEqual("testing5", f.readline()) self.assertEqual(36, f.tell()) self.assertEqual("", f.readline()) self.assertEqual(36, f.tell()) def testSeek(self): file_path = os.path.join(self._base_dir, "temp_file") with file_io.FileIO(file_path, mode="r+") as f: f.write("testing1\ntesting2\ntesting3\n\ntesting5") self.assertEqual("testing1\n", f.readline()) self.assertEqual(9, f.tell()) # Seek to 18 f.seek(18) self.assertEqual(18, f.tell()) self.assertEqual("testing3\n", f.readline()) # Seek back to 9 f.seek(9) self.assertEqual(9, f.tell()) self.assertEqual("testing2\n", f.readline()) f.seek(0) self.assertEqual(0, f.tell()) self.assertEqual("testing1\n", f.readline()) with self.assertRaises(errors.InvalidArgumentError): f.seek(-1) with self.assertRaises(TypeError): f.seek() # TODO(jhseu): Delete after position deprecation. with self.assertRaises(TypeError): f.seek(offset=0, position=0) f.seek(position=9) self.assertEqual(9, f.tell()) self.assertEqual("testing2\n", f.readline()) def testSeekFromWhat(self): file_path = os.path.join(self._base_dir, "temp_file") with file_io.FileIO(file_path, mode="r+") as f: f.write("testing1\ntesting2\ntesting3\n\ntesting5") self.assertEqual("testing1\n", f.readline()) self.assertEqual(9, f.tell()) # Seek to 18 f.seek(9, 1) self.assertEqual(18, f.tell()) self.assertEqual("testing3\n", f.readline()) # Seek back to 9 f.seek(9, 0) self.assertEqual(9, f.tell()) self.assertEqual("testing2\n", f.readline()) f.seek(-f.size(), 2) self.assertEqual(0, f.tell()) self.assertEqual("testing1\n", f.readline()) with self.assertRaises(errors.InvalidArgumentError): f.seek(0, 3) def testReadingIterator(self): file_path = os.path.join(self._base_dir, "temp_file") data = ["testing1\n", "testing2\n", "testing3\n", "\n", "testing5"] with file_io.FileIO(file_path, mode="r+") as f: f.write("".join(data)) actual_data = [] for line in f: actual_data.append(line) self.assertSequenceEqual(actual_data, data) def testReadlines(self): file_path = os.path.join(self._base_dir, "temp_file") data = ["testing1\n", "testing2\n", "testing3\n", "\n", "testing5"] f = file_io.FileIO(file_path, mode="r+") f.write("".join(data)) f.flush() lines = f.readlines() self.assertSequenceEqual(lines, data) def testUTF8StringPath(self): file_path = os.path.join(self._base_dir, "UTF8测试_file") file_io.write_string_to_file(file_path, "testing") with file_io.FileIO(file_path, mode="rb") as f: self.assertEqual(b"testing", f.read()) def testEof(self): """Test that reading past EOF does not raise an exception.""" file_path = os.path.join(self._base_dir, "temp_file") f = file_io.FileIO(file_path, mode="r+") content = "testing" f.write(content) f.flush() self.assertEqual(content, f.read(len(content) + 1)) @run_all_path_types def testUTF8StringPathExists(self, join): file_path = join(self._base_dir, "UTF8测试_file_exist") file_io.write_string_to_file(file_path, "testing") v = file_io.file_exists(file_path) self.assertEqual(v, True) def testFilecmp(self): file1 = os.path.join(self._base_dir, "file1") file_io.write_string_to_file(file1, "This is a sentence\n" * 100) file2 = os.path.join(self._base_dir, "file2") file_io.write_string_to_file(file2, "This is another sentence\n" * 100) file3 = os.path.join(self._base_dir, "file3") file_io.write_string_to_file(file3, u"This is another sentence\n" * 100) self.assertFalse(file_io.filecmp(file1, file2)) self.assertTrue(file_io.filecmp(file2, file3)) def testFilecmpSameSize(self): file1 = os.path.join(self._base_dir, "file1") file_io.write_string_to_file(file1, "This is a sentence\n" * 100) file2 = os.path.join(self._base_dir, "file2") file_io.write_string_to_file(file2, "This is b sentence\n" * 100) file3 = os.path.join(self._base_dir, "file3") file_io.write_string_to_file(file3, u"This is b sentence\n" * 100) self.assertFalse(file_io.filecmp(file1, file2)) self.assertTrue(file_io.filecmp(file2, file3)) def testFilecmpBinary(self): file1 = os.path.join(self._base_dir, "file1") file_io.FileIO(file1, "wb").write("testing\n\na") file2 = os.path.join(self._base_dir, "file2") file_io.FileIO(file2, "wb").write("testing\n\nb") file3 = os.path.join(self._base_dir, "file3") file_io.FileIO(file3, "wb").write("testing\n\nb") file4 = os.path.join(self._base_dir, "file4") file_io.FileIO(file4, "wb").write("testing\n\ntesting") self.assertFalse(file_io.filecmp(file1, file2)) self.assertFalse(file_io.filecmp(file1, file4)) self.assertTrue(file_io.filecmp(file2, file3)) def testFileCrc32(self): file1 = os.path.join(self._base_dir, "file1") file_io.write_string_to_file(file1, "This is a sentence\n" * 100) crc1 = file_io.file_crc32(file1) file2 = os.path.join(self._base_dir, "file2") file_io.write_string_to_file(file2, "This is another sentence\n" * 100) crc2 = file_io.file_crc32(file2) file3 = os.path.join(self._base_dir, "file3") file_io.write_string_to_file(file3, "This is another sentence\n" * 100) crc3 = file_io.file_crc32(file3) self.assertTrue(crc1 != crc2) self.assertEqual(crc2, crc3) def testFileCrc32WithBytes(self): file1 = os.path.join(self._base_dir, "file1") file_io.write_string_to_file(file1, "This is a sentence\n" * 100) crc1 = file_io.file_crc32(file1, block_size=24) file2 = os.path.join(self._base_dir, "file2") file_io.write_string_to_file(file2, "This is another sentence\n" * 100) crc2 = file_io.file_crc32(file2, block_size=24) file3 = os.path.join(self._base_dir, "file3") file_io.write_string_to_file(file3, "This is another sentence\n" * 100) crc3 = file_io.file_crc32(file3, block_size=-1) self.assertTrue(crc1 != crc2) self.assertEqual(crc2, crc3) def testFileCrc32Binary(self): file1 = os.path.join(self._base_dir, "file1") file_io.FileIO(file1, "wb").write("testing\n\n") crc1 = file_io.file_crc32(file1) file2 = os.path.join(self._base_dir, "file2") file_io.FileIO(file2, "wb").write("testing\n\n\n") crc2 = file_io.file_crc32(file2) file3 = os.path.join(self._base_dir, "file3") file_io.FileIO(file3, "wb").write("testing\n\n\n") crc3 = file_io.file_crc32(file3) self.assertTrue(crc1 != crc2) self.assertEqual(crc2, crc3) def testMatchingFilesPermission(self): # Create top level directory test_dir. dir_path = os.path.join(self._base_dir, "test_dir") file_io.create_dir(dir_path) # Create second level directories `noread` and `any`. noread_path = os.path.join(dir_path, "noread") file_io.create_dir(noread_path) any_path = os.path.join(dir_path, "any") file_io.create_dir(any_path) files = ["file1.txt", "file2.txt", "file3.txt"] for name in files: file_path = os.path.join(any_path, name) file_io.FileIO(file_path, mode="w").write("testing") file_path = os.path.join(noread_path, "file4.txt") file_io.FileIO(file_path, mode="w").write("testing") # Change noread to noread access. os.chmod(noread_path, 0) expected_match = [os.path.join(any_path, name) for name in files] self.assertItemsEqual( file_io.get_matching_files(os.path.join(dir_path, "*", "file*.txt")), expected_match, ) # Change noread back so that it could be cleaned during tearDown. os.chmod(noread_path, 0o777) def testFileSeekableWithZip(self): # Note: Test case for GitHub issue 27276, issue only exposed in python 3.7+. filename = os.path.join(self._base_dir, "a.npz") np.savez_compressed(filename, {"a": 1, "b": 2}) with gfile.GFile(filename, "rb") as f: info = np.load( f, allow_pickle=True ) # pylint: disable=unexpected-keyword-arg _ = [i for i in info.items()] def testHasAtomicMove(self): self.assertTrue(file_io.has_atomic_move("/a/b/c")) if __name__ == "__main__": test.main()
[ "bryan.guner@gmail.com" ]
bryan.guner@gmail.com
70252ccd8d751ddb991b9baf48cccda96d0787ae
00758be070825c33d9178c8a50d1a59ee2c3c790
/ppci/format/pefile/pefile.py
9828d6c93963c5f9a97d85c328a03344460d59d7
[ "BSD-2-Clause" ]
permissive
jsdelivrbot/ppci-mirror
d2a87f21a735a9495ad1130959b599ab317a62f6
67195d628275e2332ceaf44c9e13fc58d0877157
refs/heads/master
2020-04-10T06:23:38.964744
2018-12-07T17:05:05
2018-12-07T17:05:05
160,853,011
0
0
BSD-2-Clause
2018-12-07T17:07:00
2018-12-07T17:07:00
null
UTF-8
Python
false
false
312
py
from .headers import DosHeader, CoffHeader, PeOptionalHeader64 from .headers import ImageSectionHeader, PeHeader, DataDirectoryHeader from .headers import ImportDirectoryTable class PeFile: """ Pe (exe) file """ def __init__(self): self.pe_header = PeHeader() class ExeFile(PeFile): pass
[ "windel@windel.nl" ]
windel@windel.nl
a6ad73f8fa4ad85278553a44d738a6860b414064
913ffcf29991e57c504bc639cfabe471dfd41782
/Tank Game/iniciar_juego.py
9b0bb5c20e6eb64d3db20df8b75aa549bc3c4941
[]
no_license
JaimeGo/PyQt-Projects
ef30761c5c2c025b9f98db7ed7e7d66b32d9b535
c54eeaff69424ab463d64391422005bba3ceabd7
refs/heads/master
2020-03-18T13:52:40.067614
2018-05-25T07:01:57
2018-05-25T07:01:57
134,814,650
0
0
null
null
null
null
UTF-8
Python
false
false
236
py
from mainwindow import comenzar_main_window from PyQt4 import uic, QtCore, QtGui from menu_inicial import MenuInicial if __name__ == '__main__': app = QtGui.QApplication([]) form = MenuInicial() form.show() app.exec_()
[ "jgonzalez1@uc.cl" ]
jgonzalez1@uc.cl
07c1226fe221bea2d784ad146118712a1e324f3f
fd0def9fe74e934e14839f5f733977a096ef318f
/neural_network_example.py
eb0a671c50644f080fcbbdfc95134ddf20cc284b
[]
no_license
MarcoBz/neural_network
2043801f3ee9725e1325f62e476b267dea5c223b
29a70864f868cd208e023686e267414723fbb212
refs/heads/master
2020-03-25T08:27:18.858311
2018-08-05T13:05:08
2018-08-05T13:05:08
143,613,212
2
0
null
null
null
null
UTF-8
Python
false
false
3,449
py
# -*- coding: utf-8 -*- """ Created on Sun Aug 5 14:08:44 2018 @author: WinUser """ import numpy # scipy.special for the sigmoid function expit()# scipy import scipy.special # neural network class definition# neura class neuralNetwork: # initialise the neural network def __init__(self, inputnodes, hiddennodes, outputnodes, learningrate): # set number of nodes in each input, hidden, output layer self.inodes = inputnodes self.hnodes = hiddennodes self.onodes = outputnodes # link weight matrices, wih and who # weights inside the arrays are w_i_j, where link is from node i to node j in the next layer # w11 w21 # w12 w22 etc #self.wih = numpy.random.normal(0.0, pow(self.inodes, -0.5), (self.hnodes, self.inodes)) #self.who = numpy.random.normal(0.0, pow(self.hnodes, -0.5), (self.onodes, self.hnodes)) self.wih = numpy.ones((self.hnodes, self.inodes)) self.who = numpy.ones((self.onodes, self.hnodes)) # learning rate self.lr = learningrate # activation function is the sigmoid function self.activation_function = lambda x: scipy.special.expit(x) pass # train the neural network def train(self, inputs_list, targets_list): # convert inputs list to 2d array inputs = numpy.array(inputs_list, ndmin=2).T targets = numpy.array(targets_list, ndmin=2).T # calculate signals into hidden layer hidden_inputs = numpy.dot(self.wih, inputs) # calculate the signals emerging from hidden layer hidden_outputs = self.activation_function(hidden_inputs) # calculate signals into final output layer final_inputs = numpy.dot(self.who, hidden_outputs) # calculate the signals emerging from final output layer final_outputs = self.activation_function(final_inputs) # output layer error is the (target - actual) output_errors = targets - final_outputs # hidden layer error is the output_errors, split by weights, recombined at hidden nodes hidden_errors = numpy.dot(self.who.T, output_errors) # update the weights for the links between the hidden and output layers self.who += self.lr * numpy.dot((output_errors * final_outputs * (1.0 - final_outputs)), numpy.transpose(hidden_outputs)) # update the weights for the links between the input and hidden layers self.wih += self.lr * numpy.dot((hidden_errors * hidden_outputs * (1.0 - hidden_outputs)), numpy.transpose(inputs)) pass # query the neural network def query(self, inputs_list): # convert inputs list to 2d array inputs = numpy.array(inputs_list, ndmin=2).T # calculate signals into hidden layer hidden_inputs = numpy.dot(self.wih, inputs) # calculate the signals emerging from hidden layer hidden_outputs = self.activation_function(hidden_inputs) # calculate signals into final output layer final_inputs = numpy.dot(self.who, hidden_outputs) # calculate the signals emerging from final output layer final_outputs = self.activation_function(final_inputs) return final_outputs
[ "noreply@github.com" ]
noreply@github.com
a4056e610f35a5a1bfbe93990398a2a61a725fde
b7620d0f1a90390224c8ab71774b9c906ab3e8e9
/aliyun-python-sdk-imm/aliyunsdkimm/request/v20200930/CreateFigureClusteringTaskRequest.py
cfa9780027b8c39cc94abaae16dbba18b98bda90
[ "Apache-2.0" ]
permissive
YaoYinYing/aliyun-openapi-python-sdk
e9c62940baee1a35b9ec4a9fbd1e4eb0aaf93b2f
e9a93cc94bd8290d1b1a391a9cb0fad2e6c64627
refs/heads/master
2022-10-17T16:39:04.515562
2022-10-10T15:18:34
2022-10-10T15:18:34
117,057,304
0
0
null
2018-01-11T06:03:02
2018-01-11T06:03:01
null
UTF-8
Python
false
false
2,476
py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkimm.endpoint import endpoint_data import json class CreateFigureClusteringTaskRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'imm', '2020-09-30', 'CreateFigureClusteringTask','imm') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_UserData(self): # String return self.get_query_params().get('UserData') def set_UserData(self, UserData): # String self.add_query_param('UserData', UserData) def get_ProjectName(self): # String return self.get_query_params().get('ProjectName') def set_ProjectName(self, ProjectName): # String self.add_query_param('ProjectName', ProjectName) def get_NotifyTopicName(self): # String return self.get_query_params().get('NotifyTopicName') def set_NotifyTopicName(self, NotifyTopicName): # String self.add_query_param('NotifyTopicName', NotifyTopicName) def get_NotifyEndpoint(self): # String return self.get_query_params().get('NotifyEndpoint') def set_NotifyEndpoint(self, NotifyEndpoint): # String self.add_query_param('NotifyEndpoint', NotifyEndpoint) def get_DatasetName(self): # String return self.get_query_params().get('DatasetName') def set_DatasetName(self, DatasetName): # String self.add_query_param('DatasetName', DatasetName) def get_Tags(self): # Map return self.get_query_params().get('Tags') def set_Tags(self, Tags): # Map self.add_query_param("Tags", json.dumps(Tags))
[ "sdk-team@alibabacloud.com" ]
sdk-team@alibabacloud.com
56f881b70b7d27484498f6ee6da3d31c88d37dc0
25ef3ecf50f3807197eb84ab617ad5051c2edfc6
/tweets/migrations/0003_friends_current_user.py
6b1cc18382d94c375bb8b609d87d323236f48b35
[]
no_license
hamiteksi/TwitterClone
bfa30f309d2514d51983dd328c961d56e664636f
e5b7c59015ca79588d40b16801748f5c765d58c0
refs/heads/master
2020-07-01T19:06:45.331751
2019-08-08T13:38:23
2019-08-08T13:38:23
201,266,799
0
0
null
null
null
null
UTF-8
Python
false
false
605
py
# Generated by Django 2.2.2 on 2019-08-05 10:11 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('tweets', '0002_friends'), ] operations = [ migrations.AddField( model_name='friends', name='current_user', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='Owner', to=settings.AUTH_USER_MODEL), ), ]
[ "hamiteksi@outlook.com" ]
hamiteksi@outlook.com
3e12ff790865811a2ef847850a4264105f4e606d
dfc3d113015ba71756271adf746e4e6976a29ab7
/apps/aboutus/migrations/0001_initial.py
86df78ee4264c1b5738200cb632c2454a124e99c
[]
no_license
abinabraham/capitalshipping
169f9a7be71b53b2fd515906ecfb79a5119bdfb0
1ff1950c322ea1221a44e2ce141e1926e6c3169c
refs/heads/main
2023-05-31T09:57:47.865671
2021-06-18T08:02:25
2021-06-18T08:02:25
378,076,804
0
0
null
null
null
null
UTF-8
Python
false
false
1,061
py
# Generated by Django 3.0.6 on 2020-05-30 11:41 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='AboutIntro', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=200, verbose_name='Titles')), ('des', models.TextField(verbose_name='Description')), ('image', models.ImageField(blank=True, upload_to='about/images/%y%m%d')), ('is_verified', models.BooleanField(default=True)), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ], options={ 'verbose_name': 'About Intro', 'verbose_name_plural': 'About Intro', 'ordering': ['-id'], }, ), ]
[ "abinabrahamcs@gmail.com" ]
abinabrahamcs@gmail.com
cdc1db5c843207dd2d1ee052bc775e821e56eb11
c639e5c7bb55268a13f756b265a67b18e9f27b52
/blog/migrations/0002_remove_blog_date.py
f2785517b16148b8fed0cdab49f24b555fca54a5
[]
no_license
LionMillion/django3-person-portfolio
b6b68456dd3c6ec0b426c1dd867d4a1e1847b0e8
f1ce466b64e8cf50e0f65b5672b310e277aef394
refs/heads/master
2022-11-30T10:04:55.632497
2020-03-18T18:01:01
2020-03-18T18:01:01
248,221,469
0
0
null
2022-11-22T05:24:34
2020-03-18T12:09:33
Python
UTF-8
Python
false
false
310
py
# Generated by Django 3.0.4 on 2020-03-17 18:29 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('blog', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='blog', name='date', ), ]
[ "lionmilion111@gmail.com" ]
lionmilion111@gmail.com
4306cf5a501f3de00edb62f24e757973416594b6
e1794a1da4728bfdedcb84edeb62c5e1362feba4
/government_covid19/government_covid19/spiders/getLinks_Connecticut.py
87bc385902e88e1f31f00c9a5203a58a58cdf755
[]
no_license
djEdwards/Government-Scraper
5ca871cbf71707cbd497412bee402da8b6b08c42
848c2dfaa370f0acf80bd768fc7f530e795ecb15
refs/heads/master
2023-02-09T10:47:55.038135
2020-11-27T22:56:32
2020-11-27T22:56:32
327,820,213
0
0
null
null
null
null
UTF-8
Python
false
false
472
py
################# ## Connecticut - GET LINKS - Scraper ## 06/29/20 ## DJ Edwards ################# import scrapy class getLinks_Connecticut(scrapy.Spider): name = "CT_links" start_urls = ['https://portal.ct.gov/Coronavirus/Pages/Governors-Press-Releases' ] def parse(self, response): filename = 'all_CT_links.txt' links = response.css('td a::attr(href)').getall() with open(filename,'a') as f: f.write(','.join(links))
[ "DJ@DJ.attlocal.net" ]
DJ@DJ.attlocal.net
82b9644f113f5b32dff7bbc6ece5ed6b64b7a878
503210080fc382f929fd958d97fe56707143faa5
/DictionaryTagger.py
a1cd639610e258ba816ee3bc376ab1cad517eeb7
[]
no_license
KaptaanBarbosa/eldorado
dbf63e61cfb7abc5d3461f7141f82423502f170e
b577d9fbcaea6097fb7f3bd384d6c4af8f5cb9fc
refs/heads/master
2020-05-16T21:50:46.145650
2015-04-10T07:05:05
2015-04-10T07:05:05
32,032,167
0
0
null
null
null
null
UTF-8
Python
false
false
3,615
py
__author__ = 'shrivas' import yaml class DictionaryTagger(object): def __init__(self, dictionary_paths): files = [open(path, 'r') for path in dictionary_paths] print(files); for dict_file in files: self.dictionaries = yaml.load(dict_file) # def tag_sentence(self, sentence, tag_with_lemmas=False): # """ # the result is only one tagging of all the possible ones. # The resulting tagging is determined by these two priority rules: # - longest matches have higher priority # - search is made from left to right # """ # tag_sentence = [] # N = len(sentence) # if self.max_key_size == 0: # self.max_key_size = N # i = 0 # while (i < N): # j = min(i + self.max_key_size, N) #avoid overflow # tagged = False # while (j > i): # expression_form = ' '.join([word[0] for word in sentence[i:j]]).lower() # expression_lemma = ' '.join([word[1] for word in sentence[i:j]]).lower() # if tag_with_lemmas: # literal = expression_lemma # else: # literal = expression_form # if literal in self.dictionary: # #self.logger.debug("found: %s" % literal) # is_single_token = j - i == 1 # original_position = i # i = j # taggings = [tag for tag in self.dictionary[literal]] # tagged_expression = (expression_form, expression_lemma, taggings) # if is_single_token: #if the tagged literal is a single token, conserve its previous taggings: # original_token_tagging = sentence[original_position][2] # tagged_expression[2].extend(original_token_tagging) # tag_sentence.append(tagged_expression) # tagged = True # else: # j = j - 1 # if not tagged: # tag_sentence.append(sentence[i]) # i += 1 # return tag_sentence contentArray =['Starbucks is not doing very well lately.', 'Overall, while it may seem there is already a Starbucks on every corner, Starbucks still has a lot of room to grow.', 'They just began expansion into food products, which has been going quite well so far for them.', 'I can attest that my own expenditure when going to Starbucks has increased, in lieu of these food products.', 'Starbucks is also indeed expanding their number of stores as well.', 'Starbucks still sees strong sales growth here in the united states, and intends to actually continue increasing this.', 'Starbucks also has one of the more successful loyalty programs, which accounts for 30% of all transactions being loyalty-program-based.', 'As if news could not get any more positive for the company, Brazilian weather has become ideal for producing coffee beans.', 'Brazil is the world\'s #1 coffee producer, the source of about 1/3rd of the entire world\'s supply!', 'Given the dry weather, coffee farmers have amped up production, to take as much of an advantage as possible with the dry weather.', 'Increase in supply... well you know the rules...',] dicttagger = DictionaryTagger([ '/users/shrivas/Desktop/positive.yml', '/users/shrivas/Desktop/negative.yml'])
[ "shri.sid@gmail.com" ]
shri.sid@gmail.com
08aff1c9deeb30d9b508c41d8160a58e126e8571
7f2f46167e5c7362a703203a5c35d03ce7236489
/dtelbot/oop.py
de0e0370ac35779b8699bf041c5eb182538d19c7
[]
no_license
dalor/dsr-bot
fb15b97ceb2688f0c4455b1432e064de1d4f7886
a2889fdc2eba520da9d4f56bc8cdfe1ed1569381
refs/heads/master
2020-05-26T17:54:41.270765
2019-05-24T08:56:15
2019-05-24T08:56:15
188,327,528
0
0
null
null
null
null
UTF-8
Python
false
false
1,271
py
from .bot_core import BotCore class Bot(BotCore): def __init__(self, token, proxy=None): super().__init__(token, proxy) self.oop = True def message(self, text, path=['text']): def reg(old): self.register(['message'], old, text, path) return old return reg def edited_message(self, text, path=['text']): def reg(old): self.register(['edited_message'], old, text, path) return old return reg def channel_post(self, text, path=['text']): def reg(old): self.register(['channel_post'], old, text, path) return old return reg def edited_channel_post(self, text, path=['text']): def reg(old): self.register(['edited_channel_post'], old, text, path) return old return reg def inline_query(self, text, path=['query']): def reg(old): self.register(['inline_query'], old, text, path) return old return reg def chosen_inline_result(self, text, path=['result_id']): def reg(old): self.register(['chosen_inline_result'], old, text, path) return old return reg def callback_query(self, text, path=['data']): def reg(old): self.register(['callback_query'], old, text, path) return old return reg
[ "dalor@i.ua" ]
dalor@i.ua
b92f48348a9611da3ff38531ee8b12c81121f7ca
16e5c3118faf5184a3a5553d576cf5aa86d51cef
/stats-ms/src/measures/light.py
bde2871d0091f570f375c8e31e7ea2d811a5b1cf
[ "ISC" ]
permissive
7Rocky/IoT_Microservices
a2913372e3413622c6749e33a7404c815fda6e74
c42de668609965068f8feb87ee80fc618524b679
refs/heads/master
2023-03-15T13:18:32.016924
2021-06-12T10:02:13
2021-06-12T10:02:13
236,727,290
3
0
ISC
2023-03-05T01:11:59
2020-01-28T12:12:49
TypeScript
UTF-8
Python
false
false
1,289
py
from statistics import fmean from src.measures.measure import Measure class Light(Measure): def __init__(self, queue_collection, max_items): super().__init__(queue_collection, max_items) def calculate_stats(self, data): dates = [ d.get('date') for d in data ] digital_values = [ d.get('digital_value') for d in data ] timestamps = [ d.get('timestamp') for d in data ] init_date, end_date = min(dates), max(dates) init_timestamp, end_timestamp = min(timestamps), max(timestamps) time_span = end_timestamp - init_timestamp mean_value = fmean(digital_values) n_samples = len(data) sensor = data[0].get('sensor') username = data[0].get('username') ip = data[0].get('ip') measure = data[0].get('measure') return { 'digital_values': digital_values, 'end_date': end_date, 'end_timestamp': end_timestamp, 'init_date': init_date, 'init_timestamp': init_timestamp, 'ip': ip, 'mean_value': round(mean_value, 1), 'measure': measure, 'n_samples': n_samples, 'sensor': sensor, 'time_span': time_span, 'username': username }
[ "rockygitt@gmail.com" ]
rockygitt@gmail.com
f3202b4a5d15caf6eee16f017b65661b6f612ab3
dc30b23b8ae7cfc9c217b9d5704622403a35545a
/transfer_learning.py
2259c6cb20dbb789c8a014f0b80ae29bf559937b
[]
no_license
AlexTaguchi/transfer-learning-tutorial
ef1ee6fa8ac9a00ffa73556a5f2ee31c86ced78a
405eab6371649eeac2384d95a945b96c78b45248
refs/heads/master
2020-12-21T10:37:01.476687
2020-01-27T03:15:53
2020-01-27T03:15:53
236,404,558
0
0
null
null
null
null
UTF-8
Python
false
false
6,002
py
# Modules import torch import torch.nn as nn import torch.optim as optim from torch.optim import lr_scheduler import numpy as np import torchvision from torchvision import datasets, models, transforms import matplotlib.pyplot as plt import time import os import copy # Parameters directory = 'data' epochs = 20 # Normalize data and augmentation for training data_transforms = { 'train': transforms.Compose([ transforms.RandomResizedCrop(224), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]), 'val': transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]), } # Read image data directory image_datasets = {x: datasets.ImageFolder(os.path.join(directory, x), data_transforms[x]) for x in ['train', 'val']} dataset_sizes = {x: len(image_datasets[x]) for x in ['train', 'val']} class_names = image_datasets['train'].classes # Initialize dataloader dataloaders = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=10, shuffle=True, num_workers=4) for x in ['train', 'val']} # Run on GPU if available device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # Train model def train_model(model, criterion, optimizer, scheduler, epochs=10): # Record start time of training start_time = time.time() # Preallocate best model weights and accuracy best_model_weights = copy.deepcopy(model.state_dict()) best_accuracy = 0.0 # Train over multiple epochs for epoch in range(1, epochs + 1): print(f'Epoch {epoch}/{epochs}') print('-' * 10) # Start with train phase then switch to validation phase for phase in ['train', 'val']: if phase == 'train': model.train() else: model.eval() # Preallocate the running loss and number of correct predictions running_loss = 0.0 running_corrects = 0 # Pass inputs through model for inputs, labels in dataloaders[phase]: inputs = inputs.to(device) labels = labels.to(device) # Zero the parameter gradients optimizer.zero_grad() # Remember gradients for backpropagation only in training phase with torch.set_grad_enabled(phase == 'train'): outputs = model(inputs) _, preds = torch.max(outputs, 1) loss = criterion(outputs, labels) # Optimize model only in training phase if phase == 'train': loss.backward() optimizer.step() # Increment running statistics running_loss += loss.item() * inputs.size(0) running_corrects += torch.sum(preds == labels.data) # Schedule optimizer learning rate if phase == 'train': scheduler.step() # Report loss and accuracy of current phase epoch_loss = running_loss / dataset_sizes[phase] epoch_accuracy = running_corrects.double() / dataset_sizes[phase] print(f'{phase.capitalize()} Loss: {epoch_loss:.4f} Accuracy: {epoch_accuracy:.4f}') # Keep track of the best running model on the validation set if phase == 'val' and epoch_accuracy > best_accuracy: best_accuracy = epoch_accuracy best_model_weights = copy.deepcopy(model.state_dict()) print() # Report final model performance time_elapsed = time.time() - start_time print(f'Training complete in {time_elapsed // 60:.0f}m {time_elapsed % 60:.0f}s') print(f'Best Validation Accuracy: {best_accuracy:4f}') # Return best model weights model.load_state_dict(best_model_weights) return model # Visualize model def visualize_model(model): # Remember model training mode and switch to evaluation mode training_mode = model.training model.eval() # Pass a batch of validation inputs without recording gradients plt.figure() with torch.no_grad(): for i, (inputs, labels) in enumerate(dataloaders['val']): # Predict labels from inputs inputs = inputs.to(device) labels = labels.to(device) outputs = model(inputs) _, preds = torch.max(outputs, 1) # Plot images and predicted labels if i < 9: ax = plt.subplot(3, 3, i + 1) ax.axis('off') ax.set_title('Predicted: {}'.format(class_names[preds[i]])) inp = inputs.cpu().data[i].numpy().transpose((1, 2, 0)) mean = np.array([0.485, 0.456, 0.406]) std = np.array([0.229, 0.224, 0.225]) inp = inp * std + mean inp = np.clip(inp, 0, 1) plt.imshow(inp) plt.pause(0.001) else: break # Return model back to original training mode model.train(mode=training_mode) # Load pretrained model model = models.resnet18(pretrained=True) # Replace the final fully connected layer for one with the correct number of outputs features = model.fc.in_features model.fc = nn.Linear(features, len(class_names)) model = model.to(device) # Set up loss function and optimizer criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9) # Decay learning rate by a factor of 0.1 every 5 epochs scheduler = lr_scheduler.StepLR(optimizer, step_size=5, gamma=0.1) # Train model model = train_model(model, criterion, optimizer, scheduler, epochs=epochs) # Visualize model performance visualize_model(model) plt.show()
[ "thatflippingfool@gmail.com" ]
thatflippingfool@gmail.com
9c5292e5596095a44765634c9f056877293f618d
b9a110d34d0de3ef91598c7557d41864de3faedb
/SystemCode/Level_Up_Chatbot/Level_Up_App/careerendpointrules.py
37da14129b3cc65d0b4626c860c6265b2cd4cc55
[]
no_license
raymondng76/IRS-CS-2019-07-29-IS1FT-GRP-Team10-Personal-Career-Manager-Software-Agent
d316570e6062e72c306942b19fa77d96406c5fb7
3f4dd28906575adc30e25e8a5abef545d27b06fd
refs/heads/master
2020-06-27T12:11:36.544329
2019-08-31T08:24:30
2019-08-31T08:24:30
199,951,810
1
2
null
2019-08-31T08:24:31
2019-08-01T01:10:30
Python
UTF-8
Python
false
false
313
py
from experta import * class QuestionaireFact(Fact): """Fact input from the questionaire""" pass careerendpoint = '' class CareerEndPointRecommender(KnowledgeEngine): @Rule() def recommend(self): """Recommend Career End Point""" global careerendpoint careerendpoint = ''
[ "35454616+raymondng76@users.noreply.github.com" ]
35454616+raymondng76@users.noreply.github.com
d440eb8ab95e9e6084cfda38196829a2ddf59596
4643079e397dff1bfa0e001f9afa00708386da2a
/apps/system/models.py
50dee401737268ee253bf99b8ceecbe731142cf1
[]
no_license
3x7r3m157/python_blackbelt
7bb27f2bd3e279ece9389a794ba0d5ea97bce974
fb2a70c3352e2604fe155dab33fc2ab46fea2c64
refs/heads/master
2020-03-17T22:13:27.082424
2018-05-18T19:09:04
2018-05-18T19:09:04
133,994,314
0
0
null
null
null
null
UTF-8
Python
false
false
3,770
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals import bcrypt, re, random from django.db import models class UserManager(models.Manager): def validation(request, postData): status = { 'valid' : True, 'error': [] } if not re.match(r'^[a-zA-Z ]+$', postData['name']) or not re.search(r'[a-zA-Z]{3,50}',postData['name']): status['valid'] = False status['error'].append('Your name must be at least 3 characters and no longer than 50, sucka.') if not re.match(r'^[a-zA-Z0-9 ]+$', postData['username'] or not re.search(r'[a-zA-Z0-9]{3,50}', postData['username'])): status['valid'] = False status['error'].append('Your username must be at least 3 characters and no longer than 50, sucka.') if not re.match(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$', postData['email']): status['valid'] = False status['error'].append('Your email address sucks. Try again.') if len(User.objects.filter(email=postData['email'])) > 0: status['valid'] = False status['error'].append('That email has been taken.') if len(postData['password']) < 8: status['valid'] = False status['error'].append('Password has to be at least 8 characters long.') if not postData['password'] == postData['confirmpassword']: status['valid'] = False status['error'].append('Your password doesnt match.') if not postData['birthday']: status['valid'] = False status['error'].append('Please enter your birthday.') return status def startlogin(self, postData): user = User.objects.filter(email=postData['email']) status = { 'verified' : False, 'error' : 'Your email or password doesnt match.', } if len(user) > 0 and bcrypt.checkpw(postData['password'].encode(),user[0].password.encode()): status['user'] = user[0] status['verified'] = True return status def usercreator(self, postData): self.create(name=postData['name'],username=postData['username'],email=postData['email'],password=bcrypt.hashpw(postData['password'].encode(), bcrypt.gensalt()), birthday=postData['birthday']) class User(models.Model): name = models.CharField(max_length=50) username = models.CharField(max_length=50) email = models.CharField(max_length=50) password = models.CharField(max_length=50) birthday = models.DateField(auto_now=False) objects = UserManager() def __repr__(self): return "<User object: {} {} {} {}>".format(self.name, self.username, self.email, self.password) class ApptManager(models.Manager): def createAppt(self, postData, user_id): me = User.objects.get(id=user_id) self.create(task=postData['task'],time=postData['time'],date=postData['date'],madefor=me) def destroyAppt(self, postData): Appt.objects.get(id=postData['appt_id']).delete() def updateAppt(self, postData): appt = Appt.objects.get(id=postData['appt_id']) appt.task = postData['task'] appt.time = postData['time'] appt.date = postData['date'] appt.status = postData['status'] appt.save() class Appt(models.Model): task = models.CharField(max_length=50) time = models.TimeField(auto_now_add=False) date = models.DateField(auto_now_add=False) status = models.CharField(max_length=50, default='pending') madefor = models.ForeignKey(User, related_name='appointments') objects = ApptManager() def __repr__(self): return "<|||APPOINTMENT - task: {} time: {} date: {} status: {}|||>".format(self.task, self.time, self.date, self.status)
[ "austin@hackernest.com" ]
austin@hackernest.com
10a23acbe5b537f474ea2602ec0dc6108d8992ac
529e713a78e82de2ae5d44cfb8ef209e0894d72a
/subprocess/popen_pipe.py
35776238852d3f4187f52a4212983d822d6e614e
[ "MIT" ]
permissive
realpython/materials
cd2f548276be2c82f134ca03eadb1cd279e0f26e
d2d62756d3854f54a12a767f2bf9470486c0ceef
refs/heads/master
2023-09-05T22:12:29.806738
2023-08-31T20:56:28
2023-08-31T20:56:28
132,374,697
4,678
6,482
MIT
2023-09-12T22:22:06
2018-05-06T20:46:18
HTML
UTF-8
Python
false
false
393
py
""" **Only works on Linux or macOS** Demonstrates using `subprocess.Popen()` to pipe one command into the other. """ import subprocess ls_process = subprocess.Popen(["ls", "/usr/bin"], stdout=subprocess.PIPE) grep_process = subprocess.Popen( ["grep", "python"], stdin=ls_process.stdout, stdout=subprocess.PIPE ) for line in grep_process.stdout: print(line.decode("utf-8").strip())
[ "iansedano@gmail.com" ]
iansedano@gmail.com
f982ebb9e9cd158b5998ccb248565e40d9cecb2f
5ba2e42e54110b985aa4e9e35ad0f74fde750bd2
/fromsapp/migrations/0006_favorites.py
609dd28e3e5e678342c01586f8741f92f98c4c19
[]
no_license
BoulinCamille/afpaProject
3db33fa17d0a35c9dc05b9f541177471b4a6e75a
8fbcfdc9e6480836291e189b5257211a18f1db49
refs/heads/main
2022-12-28T00:39:56.709098
2020-10-13T12:17:59
2020-10-13T12:17:59
303,692,307
0
0
null
null
null
null
UTF-8
Python
false
false
1,205
py
# Generated by Django 3.0.3 on 2020-05-30 00:53 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('fromsapp', '0005_auto_20200529_1828'), ] operations = [ migrations.CreateModel( name='Favorites', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ('amazon_url', models.TextField(default='')), ('ldlc_url', models.TextField(default='')), ('maxgaming_url', models.TextField(default='')), ('amazon_price', models.CharField(default='', max_length=100)), ('ldlc_price', models.CharField(default='', max_length=100)), ('maxgaming_price', models.CharField(default='', max_length=100)), ('search_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ]
[ "camille.boulin@outolook.fr" ]
camille.boulin@outolook.fr
40da64597bcd7c63aa5eb0ecc1d623febf2cbf13
5ff2a02cd7de012284e8fbecbab759477fc5af34
/venv/bin/django-admin
74aadd3beefb7f044425211421717f75bcb56e66
[ "MIT" ]
permissive
ezro1245/civilas_web
c34bf1084e414dfd7935f8c761628f305c0d756b
1195142ab5db94ffd1ad0526c389dcac043331c6
refs/heads/main
2023-04-22T07:49:32.464127
2021-05-10T06:38:02
2021-05-10T06:38:02
365,847,523
0
0
MIT
2021-05-10T03:17:11
2021-05-09T21:02:29
Python
UTF-8
Python
false
false
307
#!/Users/ezro/Desktop/repositorios/civilas_web/venv/bin/python # -*- coding: utf-8 -*- import re import sys from django.core.management import execute_from_command_line if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(execute_from_command_line())
[ "ezrosoftware@gmail.com" ]
ezrosoftware@gmail.com
f8a77d8cbbd2ac50088d68d544b1ec1ffc16f9ec
1db123951c96f41cf8dde9fa0ad0f08dd9790c50
/books_authors_app/migrations/0001_initial.py
d0b394ab27f6402a272b3853db22d14a6ab18aa4
[]
no_license
Eliherc1/django-orm-libros
6fa7cf8e5e6c984ef50fc6d16112864a34bba866
69451c22006694d2a2376de3a44a3fd47b036a9e
refs/heads/main
2023-07-11T04:31:07.320351
2021-08-13T03:38:28
2021-08-13T03:38:28
395,134,169
0
0
null
null
null
null
UTF-8
Python
false
false
1,282
py
# Generated by Django 3.2.3 on 2021-08-11 00:48 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Book', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=255)), ('description', models.TextField()), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( name='Author', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('first_name', models.CharField(max_length=45)), ('last_name', models.CharField(max_length=45)), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('books', models.ManyToManyField(related_name='authors', to='books_authors_app.Book')), ], ), ]
[ "elidezx@gmail.com" ]
elidezx@gmail.com
91cbf485313be09b32b5b61c68eca2dc7f3d26bc
35e5d3eae65f2c602304cf5a65f4f29be951ab9b
/mysite/firstapp/migrations/0005_blog_post.py
4860497af483c20d55668d83b96de6f08d34eb4a
[]
no_license
bor1senko/lab3
930fadcf51485edd28ad1fdaf6b56699c234bfe9
446c83df2f88a6637004549e96e43e050cd254e2
refs/heads/master
2021-01-17T10:20:46.086325
2016-06-09T20:18:00
2016-06-09T20:18:00
58,852,360
7
0
null
null
null
null
UTF-8
Python
false
false
671
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('firstapp', '0004_auto_20160518_0124'), ] operations = [ migrations.CreateModel( name='blog_post', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('post_text', models.CharField(max_length=400)), ('image', models.ImageField(upload_to=b'')), ('add_name', models.ForeignKey(to='firstapp.Author')), ], ), ]
[ "i.borisenko97@gmail.com" ]
i.borisenko97@gmail.com
cd7a71b8a3f440db5afe0529105e12055373b712
a112b605c23bd76056e9d8fb2dc506748dfd7106
/recruiting_analytics_backend/cohort_management/business/queries/api/torre/candidate/fetch_candidate_information_by_id.py
b214b92b35b995fb20a22f329327f5d6714c31a7
[]
no_license
jmromeroeci/recruiting-analytics-project
ee41cd418901f5cd09825077e04cf7bf53a82138
126b0c3a7a01cc536238ef3bf2b4ff6c613d55ec
refs/heads/main
2023-02-09T01:45:21.779475
2021-01-05T04:46:21
2021-01-05T04:46:21
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,325
py
from ..torre_query import TorreQuery from typing import Dict, List from cohort_management.business.domain.candidate.candidate_information import CandidateInformation, JobInformation, LinkInformation from cohort_management.business.domain.cohort.cohort_information import OrganizationInformation class FetchCandidateById(TorreQuery): def execute(self, candidate_id: str, **kwargs) -> CandidateInformation: return self.get(TorreQuery.ROOT_URL_BIO + "bios/" + candidate_id, **kwargs) def _build_organization_dto(self, organization: Dict) -> OrganizationInformation: return OrganizationInformation( name=organization["name"], picture=organization.get("picture", "no-picture") ) def _build_job_dto(self, job: Dict) -> JobInformation: return JobInformation( name=job["name"], organizations=list(map(self._build_organization_dto, job["organizations"])) ) def _build_link_dto(self, link: Dict) -> LinkInformation: return LinkInformation( name=link["name"], url=link.get("address", "") ) def _build_dto(self, response: Dict, **kwargs) -> CandidateInformation: print(response) return CandidateInformation( username=response["person"].get("name", ""), country=response["person"]["location"]["country"] if "location" in response["person"] else "No Country", platform_name="Torre", public_id=response["person"]["publicId"], bio=response.get("summaryOfBio", ""), strengths=list(map(lambda strength: strength["name"], response.get("strengths", []))), interests=list(map(lambda interest: interest["name"], response.get("interests", []))), jobs=list(map(self._build_job_dto, response.get("jobs", []))), links=list(map(self._build_link_dto, response.get("strengths", []))), number_of_strengths=response["stats"].get("strengths", 0), number_of_awards=response["stats"].get("awards", 0), number_of_interests=response["stats"].get("interests", 0), number_of_jobs=response["stats"].get("jobs", 0), number_of_projects=response["stats"].get("projects", 0), cohort_id=kwargs["cohort_id"] )
[ "jmromeroe@gmail.com" ]
jmromeroe@gmail.com
0e007e2feb4aaf894b3051e459134496f45c63e5
e2173ceff2c17a48251755fce244a814fc893d85
/trading/backtest/__init__.py
383d2b4b14173b42ac98700c057e25154727d6d8
[ "MIT" ]
permissive
webclinic017/qfengine
687a99534f2ab7011c3a643881ae68d98b44eabe
f71c263becb82ee5b7022c17d7983b40d5df31bb
refs/heads/main
2023-02-27T12:37:39.021657
2021-02-08T01:39:47
2021-02-08T01:39:47
400,952,906
1
0
null
null
null
null
UTF-8
Python
false
false
2,997
py
#--| Pseudos from qfengine.trading.backtest.backtest import BacktestTradingSession ''' PriceHandler, Strategy, PortfolioHandler, PositionSizer, RiskManager and ExecutionHandler. The main components are the They handle portfolio/order management system and brokerage connection functionality. The system is event-driven and communicates via an events queue using subclassed Event objects. The full list of components is as follows: • Event - All "messages" of data within the system are encapsulated in an Event object. The various events include TickEvent, BarEvent, SignalEvent, SentimentEvent, OrderEvent and FillEvent. • Position - This class encapsulates all data associated with an open position in an asset. That is, it tracks the realised and unrealised profit and loss (PnL) by averaging the multiple "legs" of the transaction, inclusive of transaction costs. • Portfolio - The Portfolio class encapsulates a list of Positions, as well as a cash balance, equity and PnL. This object is used by the PositionSizer and RiskManager objects for portfolio construction and risk management purposes. • PortfolioHandler - The PortfolioHandler class is responsible for the management of the current Portfolio, interacting with the RiskManager and PositionSizer as well as submitting orders to be executed by an ExecutionHandler. • PriceHandler - The PriceHandler and derived subclasses are used to ingest financial asset pricing data from various sources. In particular, there are separate class hierarchies for bar and tick data. • Strategy - The Strategy object and subclasses contain the "alpha generation" code for creating trading signals. • PositionSizer - The PositionSizer class provides the PortfolioHandler with guidance on how to size positions once a strategy signal is received. For instance the PositionSizer could incorporate a Kelly Criterion approach or carry out monthly rebalancing of a fixedweight portfolio. • RiskManager - The RiskManager is used by the PortfolioHandler to verify, modify or veto any suggested trades that pass through from the PositionSizer, based on the current composition of the portfolio and external risk considerations (such as correlation to indices or volatility). • ExecutionHandler - This object is tasked with sending orders to brokerages and receiving "fills". For backtesting this behaviour is simulated, with realistic fees taken into account. • Statistics - This is used to produce performance reports from backtests. A "tearsheet" capability has recently been added providing detailed statistics on equity curve performance, with benchmark comparison. • Backtest - Encapsulates the event-driven behaviour of the system, including the handling of the events queue. Requires knowledge of all other components in order to simulate a full backtest. '''
[ "35281717+xphysics@users.noreply.github.com" ]
35281717+xphysics@users.noreply.github.com
680915b69a019fc0e3af9cad490ccd1a67d7f264
4bc29617a307de54a7fe300c8e863f03321bd003
/lib/python3.8/site-packages/trytond/tests/test_field_one2many.py
eee8fc6605089b9d591beb06b8f0da5ed94aa0eb
[]
no_license
Davidoff2103/tryton-training
f594970e77646f0ffeb42eb4f903252ff0b6c201
8d1ec4f2b623f7ca48f38bfda2ac15c01ded35a7
refs/heads/master
2023-06-01T11:55:05.400233
2021-06-09T10:06:56
2021-06-09T10:06:56
375,275,666
0
0
null
null
null
null
UTF-8
Python
false
false
15,817
py
# This file is part of Tryton. The COPYRIGHT file at the top level of # this repository contains the full copyright notices and license terms. import unittest from trytond.exceptions import UserError from trytond.pool import Pool from trytond.tests.test_tryton import activate_module, with_transaction class CommonTestCaseMixin: @with_transaction() def test_create(self): "Test create one2many" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [ ('create', [{ 'name': "Target", }]), ], }]) self.assertEqual(len(one2many.targets), 1) @with_transaction() def test_search_equals(self): "Test search one2many equals" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) one2manys = One2Many.search([ ('targets', '=', "Target"), ]) self.assertListEqual(one2manys, [one2many]) @with_transaction() def test_search_equals_no_link(self): "Test search one2many equals without link" One2Many = self.One2Many() one2many, no_link = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }, { }]) one2manys = One2Many.search([ ('targets', '=', "Target"), ]) self.assertListEqual(one2manys, [one2many]) @with_transaction() def test_search_non_equals(self): "Test search one2many non equals" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) one2manys = One2Many.search([ ('targets', '!=', "Target"), ]) self.assertListEqual(one2manys, []) @with_transaction() def test_search_equals_none(self): "Test search one2many equals None" One2Many = self.One2Many() one2many1, one2many2 = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }, { 'targets': None, }]) one2manys = One2Many.search([ ('targets', '=', None), ]) self.assertListEqual(one2manys, [one2many2]) @with_transaction() def test_search_non_equals_none(self): "Test search one2many non equals None" One2Many = self.One2Many() one2many1, one2many2 = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }, { 'targets': None, }]) one2manys = One2Many.search([ ('targets', '!=', None), ]) self.assertListEqual(one2manys, [one2many1]) @with_transaction() def test_search_non_equals_no_link(self): "Test search one2many non equals without link" One2Many = self.One2Many() one2many, no_link = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }, { }]) one2manys = One2Many.search([ ('targets', '!=', "Target"), ]) self.assertListEqual(one2manys, [no_link]) @with_transaction() def test_search_in(self): "Test search one2many in" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) target, = one2many.targets one2manys = One2Many.search([ ('targets', 'in', [target.id]), ]) self.assertListEqual(one2manys, [one2many]) @with_transaction() def test_search_in_0(self): "Test search one2many in [0]" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) one2manys = One2Many.search([ ('targets', 'in', [0]), ]) self.assertListEqual(one2manys, []) @with_transaction() def test_search_in_empty(self): "Test search one2many in []" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) one2manys = One2Many.search([ ('targets', 'in', []), ]) self.assertListEqual(one2manys, []) @with_transaction() def test_search_not_in(self): "Test search one2many not in" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) target, = one2many.targets one2manys = One2Many.search([ ('targets', 'not in', [target.id]), ]) self.assertListEqual(one2manys, []) @with_transaction() def test_search_not_in_0(self): "Test search one2many not in [0]" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) one2manys = One2Many.search([ ('targets', 'not in', [0]), ]) self.assertListEqual(one2manys, [one2many]) @with_transaction() def test_search_not_in_empty(self): "Test search one2many not in []" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) one2manys = One2Many.search([ ('targets', 'not in', []), ]) self.assertListEqual(one2manys, [one2many]) @with_transaction() def test_search_join(self): "Test search one2many equals" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) one2manys = One2Many.search([ ('targets.name', '=', "Target"), ]) self.assertListEqual(one2manys, [one2many]) @with_transaction() def test_search_where(self): "Test search one2many where" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) one2manys = One2Many.search([ ('targets', 'where', [('name', '=', "Target")]), ]) self.assertListEqual(one2manys, [one2many]) @with_transaction() def test_search_not_where(self): "Test search one2many not where" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Target"}])], }]) one2manys = One2Many.search([ ('targets', 'not where', [('name', '=', "Target")]), ]) self.assertListEqual(one2manys, []) @with_transaction() def test_write_write(self): "Test write one2many write" One2Many = self.One2Many() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Foo"}])], }]) target, = one2many.targets One2Many.write([one2many], { 'targets': [ ('write', [target.id], { 'name': "Bar", }), ], }) self.assertEqual(target.name, "Bar") @with_transaction() def test_write_add(self): "Test write one2many add" One2Many = self.One2Many() Target = self.One2ManyTarget() one2many, = One2Many.create([{}]) target, = Target.create([{}]) One2Many.write([one2many], { 'targets': [ ('add', [target.id]), ], }) self.assertTupleEqual(one2many.targets, (target,)) @with_transaction() def test_write_remove(self): "Test write one2many remove" One2Many = self.One2Many() Target = self.One2ManyTarget() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Foo"}])], }]) target, = one2many.targets One2Many.write([one2many], { 'targets': [ ('remove', [target.id]), ], }) targets = Target.search([('id', '=', target.id)]) self.assertTupleEqual(one2many.targets, ()) self.assertListEqual(targets, [target]) @with_transaction() def test_write_copy(self): "Test write one2many copy" One2Many = self.One2Many() Target = self.One2ManyTarget() one2many, = One2Many.create([{ 'targets': [('create', [{'name': "Foo"}])], }]) target1, = one2many.targets One2Many.write([one2many], { 'targets': [ ('copy', [target1.id], {'name': "Bar"}), ], }) target2, = Target.search([('id', '!=', target1.id)]) self.assertTupleEqual(one2many.targets, (target1, target2)) @with_transaction() def test_write_delete(self): "Test write one2many delete" One2Many = self.One2Many() Target = self.One2ManyTarget() one2many, = One2Many.create([{ 'targets': [( 'create', [{'name': "Foo"}, {'name': "Bar"}])], }]) target1, target2 = one2many.targets One2Many.write([one2many], { 'targets': [ ('delete', [target1.id]), ], }) targets = Target.search([]) self.assertTupleEqual(one2many.targets, (target2,)) self.assertListEqual(targets, [target2]) class FieldOne2ManyTestCase(unittest.TestCase, CommonTestCaseMixin): "Test Field One2Many" @classmethod def setUpClass(cls): activate_module('tests') def One2Many(self): return Pool().get('test.one2many') def One2ManyTarget(self): return Pool().get('test.one2many.target') @with_transaction() def test_create_required_with_value(self): "Test create one2many required with value" One2Many = Pool().get('test.one2many_required') one2many, = One2Many.create([{ 'targets': [ ('create', [{ 'name': "Target", }]), ], }]) self.assertEqual(len(one2many.targets), 1) @with_transaction() def test_create_required_without_value(self): "Test create one2many required without value" One2Many = Pool().get('test.one2many_required') with self.assertRaises(UserError): One2Many.create([{}]) @with_transaction() def test_create_size_valid(self): "Test create one2many size valid" One2Many = Pool().get('test.one2many_size') one2many, = One2Many.create([{ 'targets': [ ('create', [{}] * 3), ], }]) self.assertEqual(len(one2many.targets), 3) @with_transaction() def test_create_size_invalid(self): "Test create one2many size invalid" One2Many = Pool().get('test.one2many_size') with self.assertRaises(UserError): One2Many.create([{ 'targets': [ ('create', [{}] * 4), ], }]) @with_transaction() def test_create_size_pyson_valid(self): "Test create one2many size pyson valid" One2Many = Pool().get('test.one2many_size_pyson') one2many, = One2Many.create([{ 'limit': 4, 'targets': [ ('create', [{}] * 4), ], }]) self.assertEqual(len(one2many.targets), 4) @with_transaction() def test_create_size_pyson_invalid(self): "Test create one2many size pyson invalid" One2Many = Pool().get('test.one2many_size_pyson') with self.assertRaises(UserError): One2Many.create([{ 'limit': 3, 'targets': [ ('create', [{}] * 4), ], }]) @with_transaction() def test_create_filter(self): "Test create one2many with filter" One2Many = Pool().get('test.one2many_filter') filtered, = One2Many.create([{ 'targets': [ ('create', [{'value': x} for x in range(4)])], }]) filtered_target, = filtered.filtered_targets self.assertEqual(len(filtered.targets), 4) self.assertEqual(filtered_target.value, 3) @with_transaction() def test_create_filter_domain(self): "Test create one2many with filter and domain" One2Many = Pool().get('test.one2many_filter_domain') filtered, = One2Many.create([{ 'targets': [ ('create', [{'value': x} for x in range(4)])], }]) filtered_target, = filtered.filtered_targets self.assertEqual(len(filtered.targets), 4) self.assertEqual(filtered_target.value, 3) @with_transaction() def test_search_non_equals_filter(self): "Test search one2many non equals with filter" One2Many = Pool().get('test.one2many_filter') one2many, = One2Many.create([{ 'targets': [('create', [{'value': -1}])], }]) one2manys = One2Many.search([('targets', '!=', None)]) one2manys_filtered = One2Many.search( [('filtered_targets', '!=', None)]) self.assertListEqual(one2manys, [one2many]) self.assertListEqual(one2manys_filtered, []) @with_transaction() def test_search_join_filter(self): "Test search one2many join with filter" One2Many = Pool().get('test.one2many_filter') one2many, = One2Many.create([{ 'targets': [('create', [{'value': -1}])], }]) one2manys = One2Many.search([('targets.value', '=', -1)]) one2manys_filtered = One2Many.search( [('filtered_targets.value', '=', -1)]) self.assertListEqual(one2manys, [one2many]) self.assertListEqual(one2manys_filtered, []) class FieldOne2ManyReferenceTestCase(unittest.TestCase, CommonTestCaseMixin): "Test Field One2Many Reference" @classmethod def setUpClass(cls): activate_module('tests') def One2Many(self): return Pool().get('test.one2many_reference') def One2ManyTarget(self): return Pool().get('test.one2many_reference.target') def suite(): suite_ = unittest.TestSuite() loader = unittest.TestLoader() suite_.addTests(loader.loadTestsFromTestCase(FieldOne2ManyTestCase)) suite_.addTests( loader.loadTestsFromTestCase(FieldOne2ManyReferenceTestCase)) return suite_
[ "davidoff.d777@gmail.com" ]
davidoff.d777@gmail.com
208867d87a25d6d9b92890e6301ba8120c5999a2
3c6c9373298fca5549d4a753bf7ed3a336d5209d
/src/haptic_device_rotation/src/haptic_device_rotation.py
88f59fec9e71837053b2f74144a737734a711805
[]
no_license
JLSchol/omni_marco_gazebo
70077a4648b186bd09510e29f35664bd28f13fd0
050cb6dad674a5ff05b0cd5713d989d0d9bf05ee
refs/heads/master
2023-01-30T16:13:07.620255
2019-12-06T15:39:30
2019-12-06T15:39:30
211,870,305
0
0
null
null
null
null
UTF-8
Python
false
false
2,480
py
#!/usr/bin/env python from rospy import init_node, is_shutdown, Rate, loginfo, sleep, get_param from rospy import ROSInterruptException, Time #import messages from geometry_msgs.msg import TransformStamped from PyKDL import Rotation # import numpy as np from tf2_ros import StaticTransformBroadcaster class hapticDeviceRotation(): def __init__(self): init_node("haptic_device_rotation", anonymous=True) self._getParameters() def run(self): rosRate = Rate(30) broadcaster = StaticTransformBroadcaster() while not is_shutdown(): rot = Rotation(self._rotMatrixArray[0],self._rotMatrixArray[1],self._rotMatrixArray[2], self._rotMatrixArray[3],self._rotMatrixArray[4],self._rotMatrixArray[5], self._rotMatrixArray[6],self._rotMatrixArray[7],self._rotMatrixArray[8]) quat = rot.GetQuaternion() staticTransform = self._setTransform(self._robotBaseFrame,self._HDFrame,quat) broadcaster.sendTransform(staticTransform) rosRate.sleep() def _setTransform(self, parentName, childName, quat): static_transformStamped = TransformStamped() static_transformStamped.header.stamp = Time.now() static_transformStamped.header.frame_id = parentName static_transformStamped.child_frame_id = childName static_transformStamped.transform.translation.x = 0 static_transformStamped.transform.translation.y = 0 static_transformStamped.transform.translation.z = 0 static_transformStamped.transform.rotation.x = quat[0] static_transformStamped.transform.rotation.y = quat[1] static_transformStamped.transform.rotation.z = quat[2] static_transformStamped.transform.rotation.w = quat[3] return static_transformStamped def _getParameters(self): self._robotBaseFrame = get_param("~robot_base_frame_name") self._HDFrame = get_param("~HD_frame_name") rotMatrixString = get_param("~rot_matrix_array") self._rotMatrixArray = self._getMatrixList(rotMatrixString) def _getMatrixList(self, matrixString): matrixList = matrixString.split(" ") matrixListFloats = [float(char) for char in matrixList] return matrixListFloats if __name__ == "__main__": try: node = hapticDeviceRotation() node.run() except ROSInterruptException: pass
[ "jasper.schol@live.nl" ]
jasper.schol@live.nl
019ceff680a7b5a2b85774d7b7ef83055ad0bb38
520ad0df0ad20a764dfc7e5599a23d333d86f5d3
/day14/main.py
655eeddd68796139ba7c1c0190b70abd47c7dac5
[]
no_license
machalvan/advent-of-code-2020
9e83743ec52f2ee677a6323b0ca8c03416ef48bb
34717f2e665c2707e065dc3f980a79cb63ad509d
refs/heads/main
2023-02-06T11:39:39.802352
2020-12-25T06:15:33
2020-12-25T06:15:33
317,325,530
0
0
null
null
null
null
UTF-8
Python
false
false
1,625
py
def part1(input): mem = {} mask = None for line in input: words = line.split(" = ") if words[0] == "mask": mask = words[1] continue adr = words[0][4:-1] mem[adr] = masked(mask, to_bin(words[1])) return sum([to_dec(mem[adr]) for adr in mem]) def part2(input): mem = {} mask = None for line in input: words = line.split(" = ") if words[0] == "mask": mask = words[1] continue adr = words[0][4:-1] bin = masked2(mask, to_bin(adr)) for a in get_addresses(bin): mem[to_dec(a)] = int(words[1]) return sum([mem[adr] for adr in mem]) def to_bin(dec): return '{:036b}'.format(int(dec)) def to_dec(bin): return int(bin, 2) def masked(m, v): masked = '' for i, c in enumerate(list(m)): masked += v[i] if c == 'X' else c return masked def masked2(m, v): masked = '' for i, c in enumerate(list(m)): masked += v[i] if m[i] == '0' else '1' if m[i] == '1' else 'X' return masked def get_addresses(bin): addresses = [''] for i, c in enumerate(list(bin)): if c == 'X': addresses = [l + x for l in addresses for x in ('0', '1')] else: addresses = [l + c for l in addresses] return addresses if __name__ == '__main__': with open('input.txt') as f: file_str = f.read() file = file_str.strip().split('\n') print(part1(list(file))) print(part2(list(file)))
[ "machalvan@hotmail.com" ]
machalvan@hotmail.com
24397dabfec84500b0351935d23351a69656c0c4
f5dad47c5f1ec2b89f5e004b3e7075bc33cb8802
/apps/practice_app/migrations/0001_initial.py
ff2307162a16c911f302854ade3a53c2ffd825b8
[]
no_license
sai-koushik-macha/Coding-Cubs
e8743b533a4926304c25fe29f853cb90897eec76
8df3d2ded2006c8a41a799073ab7bd4d8bee0728
refs/heads/master
2023-04-23T01:02:20.141240
2021-05-10T10:12:07
2021-05-10T10:12:07
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,160
py
# Generated by Django 3.1.5 on 2021-05-09 06:09 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='practice', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('category', models.CharField(blank=True, choices=[('Array', 'Array'), ('Matrix', 'Matrix'), ('String', 'String'), ('Searching & Sorting', 'Searching & Sorting'), ('LinkedList', 'LinkedList'), ('Binary Trees', 'Binary Trees'), ('Binary Search Trees', 'Binary Search Trees'), ('Greedy', 'Greedy'), ('BackTracking', 'BackTracking'), ('Stacks & Queues', 'Stacks & Queues'), ('Heap', 'Heap'), ('Graph', 'Graph'), ('Trie', 'Trie'), ('Dynamic Programming', 'Dynamic Programming'), ('Bit Manipulation', 'Bit Manipulation')], max_length=32)), ('question', models.TextField()), ('testinputs', models.CharField(blank=True, max_length=32)), ('answersource', models.TextField()), ], ), ]
[ "motrurishashank000@gmail.com" ]
motrurishashank000@gmail.com
bf45662e630c6cef37b042f2b344ba3a85276222
efa652178d6d4d2d8d216dc82588ae5a7c570c19
/py/pypg_wal_archive_clean.py
232073db8297e5b4231838c79aea00e9c2d18ef2
[]
no_license
juliano777/pypg_tools
40a1993e2fdd931b4fb73aa704075e295875c5ca
01765d21b09d95987e7cac47e8fe5212d9dba9f5
refs/heads/master
2023-01-24T03:15:39.891276
2023-01-21T20:05:58
2023-01-21T20:05:58
101,754,272
1
0
null
2023-01-21T20:05:59
2017-08-29T11:35:45
Python
UTF-8
Python
false
false
4,493
py
# _*_ coding:utf8 _*_ ''' This software is licensed under the New BSD Licence. ******************************************************************************* Copyright (c) 2013, Juliano Atanazio - juliano777@gmail.com All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Juliano Atanazio nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************* ''' import sys # sys built-in library import os # os built-in library # Show help function def show_help(code_err): ''' Function that shows the application usage and returns a code at exit. ''' msg = ''' Clean up PostgreSQL WAL archive files. Use {} ((--list | --remove) (directory or path) | --help) --list List the files that can be deleted. --remove Remove the older xlog files that can be deleted. The second argument can be either a directory or simply the way for an archived xlog. If this second argument is a directory, all xlogs contained therein (including .backup files) will be deleted (--remove) or listed (--list). If the path to the archived xlog is this second parameter, which will be listed (--list) or removed (--remove) all xlogs older than it. ''' print(msg.format(sys.argv[0])) sys.exit(code_err) # Arguments try: arg_1 = sys.argv[1] # First argument arg_2 = sys.argv[2] # Second argument except: pass # Arguments filtering if ((len(sys.argv) == 1) or ((len(sys.argv) == 2) and (arg_1 != '--help')) or (len(sys.argv) > 4)): show_help(1) if (arg_1 == '--help'): show_help(0) elif ((len(sys.argv) > 2) and (arg_1 not in ('--list', '--remove'))): show_help(1) else: pass # List or Remove Function def ls_or_rm(x, y): ''' Function to list or remove older WAL files from PostgreSQL archives. ''' try: if (os.path.isdir(y)): directory = y bkp_files = [f for f in os.listdir(directory) if f.endswith('.backup')] bkp_files.sort() ref_file = bkp_files[-1][0:24] else: directory = os.path.dirname(y) ref_file = os.path.basename(y) ref_file = ref_file[0:24] except Exception as e: print('{}\n{}'.format(Exception, e)) files = [f for f in os.listdir(directory) if (f < ref_file)] if (x == '--list'): print('\nUnnecessary Files:\n') for file in files: print(file) else: print('\nRemoving the following files in {}:\n'.format(directory)) for file in files: try: os.remove('{}/{}'.format(directory, file)) print(file) except Exception as e: print('{}\n{}'.format(Exception, e)) # The main function def main(): ''' Main function ''' ls_or_rm(arg_1, arg_2) # Test: If it is executed in command line if __name__ == '__main__': main()
[ "juliano777@gmail.com" ]
juliano777@gmail.com
2b15123cf7f9abeb1a80a12b23ceb0cbb34fd739
e1167fd508a2bae06463dd83c41dc8be2d2d9e6d
/osx/commandPlug.py
dcc9a7c04aaf120ed9fdc49b717fe3622090daf1
[]
no_license
babache/FreeScript
56e9fc3e09d0db05d55e60a151ce5aa1ab2b05da
159b820d3807e7ffb887310ad8c670fa57bae4b7
refs/heads/master
2021-08-31T08:21:04.013987
2017-12-20T19:06:35
2017-12-20T19:06:35
114,917,807
0
0
null
null
null
null
UTF-8
Python
false
false
709
py
from pyHS100 import SmartPlug, SmartBulb, Discover from pprint import pformat as pf import subprocess import re import time ip = "" for dev in Discover.discover().values(): ip = re.search(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}", str(dev)).group(0) exit if ip == "": exit plug = SmartPlug(ip) output = subprocess.check_output(['pmset', '-g', 'batt']) percent = re.search(r"\d{1,3}%", str(output)).group(0) battery = int(percent.replace('%', '')) if int(battery)>95 and plug.state == "ON": print ("Turn OFF at %s" % time.strftime("%d/%m/%Y %H:%M:%S")) plug.turn_off() if int(battery)<20 and plug.state == "OFF": print ("Turn ON at %s" % time.strftime("%d/%m/%Y %H:%M:%S")) plug.turn_on()
[ "babache@Babache.local" ]
babache@Babache.local
6d7acb9ce754e362f6171af1764146fff1d16b0b
16508404abab893e7fb679827eec8e28866f1435
/blogproject/blog/models.py
47680a2e423e4caac14598cac62385877b4caeb8
[]
no_license
yuhuaxianfan/Yuhuaxianfan
60324571e1ee91d70bf9c4f869d4d4536d2a655c
29236a28a5d51f5d7862fb2f74e81c3419e96608
refs/heads/master
2021-01-21T13:26:15.619934
2017-10-20T05:03:29
2017-10-20T05:03:29
102,122,119
0
0
null
null
null
null
UTF-8
Python
false
false
832
py
from django.db import models from django.contrib.auth.models import User from django.core.urlresolvers import reverse class Category(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name class Tag(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name class Post(models.Model): title = models.CharField(max_length=70) body = models.TextField() created_time = models.DateField() modified_time = models.DateTimeField() excerpt = models.CharField(max_length=200,blank=True) category = models.ForeignKey(Category) tags = models.ManyToManyField(Tag,blank=True) author = models.ForeignKey(User) def __str__(self): return self.title def get_absolute_url(self): return reverse('blog:detail',kwargs={'pk':self.pk}) # Create your models here.
[ "447724352@qq.com" ]
447724352@qq.com
9180b287d75bebfb527e3af7057b4cb7e090a0c2
78d31def976fc38898e37d1429a75978848c288b
/soundhound/B.py
823f35d9a93e39403b56805053e7d4c76c19a5a8
[]
no_license
RayNakagami/Atcoder
c04ad749b7cda6a2e96d3dac7e549273f34c8a2d
d8c26140f0e769c018877e7e3e8a1867d96feaf0
refs/heads/master
2020-03-31T02:15:24.868098
2018-10-06T07:36:28
2018-10-06T07:36:28
151,815,164
0
0
null
null
null
null
UTF-8
Python
false
false
46
py
S = input() w = int(input()) print(S[0::w])
[ "ray.nakagami@gmail.com" ]
ray.nakagami@gmail.com
79dc973cef96d3e5eef6f7cd3552f8a91bf78cb4
b71f656374293c5f1238fcb449aa4dde78632861
/eudplib/utils/blockstru.py
c3e9a5159a3ca9e5b4b729aab409a5ff123314d1
[ "MIT" ]
permissive
tobeinged/eudplib
ce1cdc15f7ec6af857b4b64b5c826b3dd95d3e48
066c0faa200dc19e70cdb6979daf8f008b8ae957
refs/heads/master
2023-05-04T08:49:01.180147
2019-03-18T14:30:29
2019-03-18T14:30:29
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,549
py
#!/usr/bin/python # -*- coding: utf-8 -*- ''' Copyright (c) 2014 trgk Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' from .eperror import ep_assert class BlockStruManager: def __init__(self): self._blockstru = [] self._lastblockdict = {} def empty(self): return not self._blockstru _current_bsm = BlockStruManager() # Default one def SetCurrentBlockStruManager(bsm): global _current_bsm old_bsm = _current_bsm _current_bsm = bsm return old_bsm def EUDCreateBlock(name, userdata): _blockstru = _current_bsm._blockstru _lastblockdict = _current_bsm._lastblockdict block = (name, userdata) _blockstru.append(block) if name not in _lastblockdict: _lastblockdict[name] = [] _lastblockdict[name].append(block) def EUDGetLastBlock(): _blockstru = _current_bsm._blockstru return _blockstru[-1] def EUDGetLastBlockOfName(name): _lastblockdict = _current_bsm._lastblockdict return _lastblockdict[name][-1] def EUDPeekBlock(name): lastblock = EUDGetLastBlock() ep_assert(lastblock[0] == name, 'Block starting/ending mismatch') return lastblock def EUDPopBlock(name): _blockstru = _current_bsm._blockstru _lastblockdict = _current_bsm._lastblockdict lastblock = _blockstru.pop() ep_assert(lastblock[0] == name, """\ Block starting/ending mismatch: - Started with %s - Ended with %s\ """ % (lastblock[0], name)) _lastblockdict[name].pop() return lastblock def EUDGetBlockList(): return _current_bsm._blockstru
[ "phu54321@naver.com" ]
phu54321@naver.com
e661fdec78b7319ffe2fcad7ed550a0469bf8d6d
564d6a4d305a8ac6a7e01c761831fb2081c02d0f
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_05_01/operations/_network_interfaces_operations.py
f2a6c07097ef93c19db4a57b35f48ccad53b3847
[ "LicenseRef-scancode-generic-cla", "LGPL-2.1-or-later", "MIT" ]
permissive
paultaiton/azure-sdk-for-python
69af4d889bac8012b38f5b7e8108707be679b472
d435a1a25fd6097454b7fdfbbdefd53e05029160
refs/heads/master
2023-01-30T16:15:10.647335
2020-11-14T01:09:50
2020-11-14T01:09:50
283,343,691
0
0
MIT
2020-07-28T22:43:43
2020-07-28T22:43:43
null
UTF-8
Python
false
false
59,447
py
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class NetworkInterfacesOperations(object): """NetworkInterfacesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2020_05_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def _delete_initial( self, resource_group_name, # type: str network_interface_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-05-01" accept = "application/json" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore def begin_delete( self, resource_group_name, # type: str network_interface_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Deletes the specified network interface. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._delete_initial( resource_group_name=resource_group_name, network_interface_name=network_interface_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore def get( self, resource_group_name, # type: str network_interface_name, # type: str expand=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> "models.NetworkInterface" """Gets information about the specified network interface. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :param expand: Expands referenced resources. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: NetworkInterface, or the result of cls(response) :rtype: ~azure.mgmt.network.v2020_05_01.models.NetworkInterface :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-05-01" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('NetworkInterface', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore def _create_or_update_initial( self, resource_group_name, # type: str network_interface_name, # type: str parameters, # type: "models.NetworkInterface" **kwargs # type: Any ): # type: (...) -> "models.NetworkInterface" cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-05-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._create_or_update_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'NetworkInterface') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('NetworkInterface', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('NetworkInterface', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore def begin_create_or_update( self, resource_group_name, # type: str network_interface_name, # type: str parameters, # type: "models.NetworkInterface" **kwargs # type: Any ): # type: (...) -> LROPoller["models.NetworkInterface"] """Creates or updates a network interface. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :param parameters: Parameters supplied to the create or update network interface operation. :type parameters: ~azure.mgmt.network.v2020_05_01.models.NetworkInterface :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either NetworkInterface or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_05_01.models.NetworkInterface] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, network_interface_name=network_interface_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('NetworkInterface', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore def update_tags( self, resource_group_name, # type: str network_interface_name, # type: str parameters, # type: "models.TagsObject" **kwargs # type: Any ): # type: (...) -> "models.NetworkInterface" """Updates a network interface tags. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :param parameters: Parameters supplied to update network interface tags. :type parameters: ~azure.mgmt.network.v2020_05_01.models.TagsObject :keyword callable cls: A custom type or function that will be passed the direct response :return: NetworkInterface, or the result of cls(response) :rtype: ~azure.mgmt.network.v2020_05_01.models.NetworkInterface :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-05-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_tags.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'TagsObject') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('NetworkInterface', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore def list_all( self, **kwargs # type: Any ): # type: (...) -> Iterable["models.NetworkInterfaceListResult"] """Gets all network interfaces in a subscription. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-05-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_all.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces'} # type: ignore def list( self, resource_group_name, # type: str **kwargs # type: Any ): # type: (...) -> Iterable["models.NetworkInterfaceListResult"] """Gets all network interfaces in a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-05-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces'} # type: ignore def _get_effective_route_table_initial( self, resource_group_name, # type: str network_interface_name, # type: str **kwargs # type: Any ): # type: (...) -> Optional["models.EffectiveRouteListResult"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.EffectiveRouteListResult"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-05-01" accept = "application/json" # Construct URL url = self._get_effective_route_table_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _get_effective_route_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore def begin_get_effective_route_table( self, resource_group_name, # type: str network_interface_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller["models.EffectiveRouteListResult"] """Gets all route tables applied to a network interface. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either EffectiveRouteListResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_05_01.models.EffectiveRouteListResult] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["models.EffectiveRouteListResult"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._get_effective_route_table_initial( resource_group_name=resource_group_name, network_interface_name=network_interface_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_get_effective_route_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore def _list_effective_network_security_groups_initial( self, resource_group_name, # type: str network_interface_name, # type: str **kwargs # type: Any ): # type: (...) -> Optional["models.EffectiveNetworkSecurityGroupListResult"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.EffectiveNetworkSecurityGroupListResult"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-05-01" accept = "application/json" # Construct URL url = self._list_effective_network_security_groups_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _list_effective_network_security_groups_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore def begin_list_effective_network_security_groups( self, resource_group_name, # type: str network_interface_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller["models.EffectiveNetworkSecurityGroupListResult"] """Gets all network security groups applied to a network interface. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either EffectiveNetworkSecurityGroupListResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_05_01.models.EffectiveNetworkSecurityGroupListResult] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["models.EffectiveNetworkSecurityGroupListResult"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._list_effective_network_security_groups_initial( resource_group_name=resource_group_name, network_interface_name=network_interface_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_list_effective_network_security_groups.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore def list_virtual_machine_scale_set_vm_network_interfaces( self, resource_group_name, # type: str virtual_machine_scale_set_name, # type: str virtualmachine_index, # type: str **kwargs # type: Any ): # type: (...) -> Iterable["models.NetworkInterfaceListResult"] """Gets information about all network interfaces in a virtual machine in a virtual machine scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_machine_scale_set_name: The name of the virtual machine scale set. :type virtual_machine_scale_set_name: str :param virtualmachine_index: The virtual machine index. :type virtualmachine_index: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_virtual_machine_scale_set_vm_network_interfaces.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'), 'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_virtual_machine_scale_set_vm_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces'} # type: ignore def list_virtual_machine_scale_set_network_interfaces( self, resource_group_name, # type: str virtual_machine_scale_set_name, # type: str **kwargs # type: Any ): # type: (...) -> Iterable["models.NetworkInterfaceListResult"] """Gets all network interfaces in a virtual machine scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_machine_scale_set_name: The name of the virtual machine scale set. :type virtual_machine_scale_set_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_virtual_machine_scale_set_network_interfaces.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_virtual_machine_scale_set_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces'} # type: ignore def get_virtual_machine_scale_set_network_interface( self, resource_group_name, # type: str virtual_machine_scale_set_name, # type: str virtualmachine_index, # type: str network_interface_name, # type: str expand=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> "models.NetworkInterface" """Get the specified network interface in a virtual machine scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_machine_scale_set_name: The name of the virtual machine scale set. :type virtual_machine_scale_set_name: str :param virtualmachine_index: The virtual machine index. :type virtualmachine_index: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :param expand: Expands referenced resources. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: NetworkInterface, or the result of cls(response) :rtype: ~azure.mgmt.network.v2020_05_01.models.NetworkInterface :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterface"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-10-01" accept = "application/json" # Construct URL url = self.get_virtual_machine_scale_set_network_interface.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'), 'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('NetworkInterface', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_virtual_machine_scale_set_network_interface.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}'} # type: ignore def list_virtual_machine_scale_set_ip_configurations( self, resource_group_name, # type: str virtual_machine_scale_set_name, # type: str virtualmachine_index, # type: str network_interface_name, # type: str expand=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> Iterable["models.NetworkInterfaceIPConfigurationListResult"] """Get the specified network interface ip configuration in a virtual machine scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_machine_scale_set_name: The name of the virtual machine scale set. :type virtual_machine_scale_set_name: str :param virtualmachine_index: The virtual machine index. :type virtualmachine_index: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :param expand: Expands referenced resources. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either NetworkInterfaceIPConfigurationListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceIPConfigurationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceIPConfigurationListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_virtual_machine_scale_set_ip_configurations.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'), 'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('NetworkInterfaceIPConfigurationListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_virtual_machine_scale_set_ip_configurations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations'} # type: ignore def get_virtual_machine_scale_set_ip_configuration( self, resource_group_name, # type: str virtual_machine_scale_set_name, # type: str virtualmachine_index, # type: str network_interface_name, # type: str ip_configuration_name, # type: str expand=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> "models.NetworkInterfaceIPConfiguration" """Get the specified network interface ip configuration in a virtual machine scale set. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param virtual_machine_scale_set_name: The name of the virtual machine scale set. :type virtual_machine_scale_set_name: str :param virtualmachine_index: The virtual machine index. :type virtualmachine_index: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :param ip_configuration_name: The name of the ip configuration. :type ip_configuration_name: str :param expand: Expands referenced resources. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: NetworkInterfaceIPConfiguration, or the result of cls(response) :rtype: ~azure.mgmt.network.v2020_05_01.models.NetworkInterfaceIPConfiguration :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceIPConfiguration"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-10-01" accept = "application/json" # Construct URL url = self.get_virtual_machine_scale_set_ip_configuration.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'), 'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('NetworkInterfaceIPConfiguration', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_virtual_machine_scale_set_ip_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations/{ipConfigurationName}'} # type: ignore
[ "noreply@github.com" ]
noreply@github.com
e525e75ccebdd3a3803682e6711866c1e53c3c8c
b69c9571db562908f03ab6e57d3f8d6550bf9fc4
/main_program.py
f7f323228e0607ece4470e049c4eb46a73cdb12e
[]
no_license
Snow-Crash/pc-host
b7f167727418bc4437d0e26daa6e7c7a1a9ef140
e4824fde89bdd4ba7b8ee31ce7f6722f04b33cca
refs/heads/master
2023-02-21T08:35:12.058256
2021-01-25T17:11:00
2021-01-25T17:11:00
192,278,045
0
0
null
null
null
null
UTF-8
Python
false
false
11,627
py
import tkinter import time import serial import numpy as np from timeit import default_timer as timer import matplotlib.pyplot as plt import tkinter as Tk import numpy as np import matplotlib.animation as animation from pyqtgraph.Qt import QtCore, QtGui #import pyqtgraph.opengl as gl import pyqtgraph as pg from PyQt5 import QtTest import sys import multiprocessing # UART Com Settings UART_PORT = 'COM6' UART_BAUDRATE = 230400 UART_INPUT_SIZE = 243 UART_OUTPUT_SIZE = 4 WINDOW = 450 # FPGA Command values CMD_START_NEURON = 1 CMD_RESET_NEURON = 2 CMD_SET_SPIKE = 3 CMD_SET_TEST_SPIKE = 4 CMD_CLEAR_BUFFERED_SPIKE = 5 class uart_com: """ A class that encapsulates the UART connection between a PC and FPGA via a USB UART.""" def __init__(self, port=None, baudrate=None, timeout=0.1): # Set internal variables if port is None: self.port = 'COM6' else: self.port = port if baudrate is None: self.baudrate = 230400 else: self.baudrate = baudrate self.serial = serial.Serial() self.serial.baudrate = self.baudrate self.serial.port = self.port self.serial.timeout = timeout # Open the serial connection self.serial.open() # Clear any excess data sitting in the buffers self.serial.reset_input_buffer() self.serial.reset_output_buffer() def __del__(self): # Release the serial connection self.serial.close() def read_blocking(self): """ A blocking read. stop until receive four 0xff or timeout""" stop_frame_count = 0 receive = bytearray() # receive.append(1) while(1): data_read = self.serial.read(1) #if read() returns empty array, break if len(data_read) == 0: print('timeout') break # print(data_read) if data_read[0] != 255: receive.append(data_read[0]) else: stop_frame_count += 1 if stop_frame_count == 4: break return receive def write_blocking(self, write_data): """ A blocking write. The expected number of bytes written is set during initialization. """ self.serial.write(write_data) def dataInReadBuffer(self): return True if self.serial.in_waiting > 0 else False class neuron_controller(): def __init__(self, window, port=None, baudrate=None, timeout=0.1): self.uart = uart_com(port=port, baudrate=baudrate, timeout=timeout) self.window = window self.decoding = 'ascii' def send_word(self, binary_string): byte_array = self.bin_str_to_bytes(binary_string) self.uart.write_blocking(byte_array) def read_data(self): data = self.uart.read_blocking() return data def bin_str_to_bytes(self, bin_str): ''' seperate every 8 bits by whitespace msb lsb 31 0 00000000 00000000 00000000 00000000 store msb in bytearray[3], lsb in bytearray[0] ''' bin_str = bin_str.split(' ') byte_array = bytearray() for field in reversed(bin_str): int_val = int(field, 2) byte_array.append(int_val) return byte_array def check_for_output(self): return self.connection.dataInReadBuffer() # Commands to read from the FPGA def read_cycle(self): ''' convert raw output from fpga to numpy ''' psp = np.zeros(110) voltage = np.zeros(10) spikes = np.zeros(10) data = self.read_data() if self.decoding == 'ascii': decoded = data.decode('ascii') split = decoded.split('\n') psp_str = split[0].split(',') voltage_str = split[1].split(',') #data in fpga is is represented by fixed(16,4) format, so divided by 2^12 for i in range(110): psp[i] = int(psp_str[i]) / 4096 for i in range(10): voltage[i] = int(voltage_str[i]) / 4096 spikes[i] = int(voltage[i] > 1) else: #data is a list, every 5 bytes belong to a packet #split data into chunks of 5 split_data = [data[i:i + 5] for i in range(0, len(data), 5)] #0-110 are psp psp_bytes = split_data[0:110] #110 to 120 are voltage voltage_bytes = split_data[110:] #convert raw bytes to integers for i,val_byte in enumerate(psp_bytes): #microblaze sends lsb fisrt, so lsb stores in [1], msb in [3] #byte order is little endian u16 = int.from_bytes(val_byte[1:], byteorder = 'little') int16 = self.u16toint(u16) psp[i] = int16 voltage_bytes = split_data[110:] for i,val_byte in enumerate(psp_bytes): u16 = int.from_bytes(val_byte[1:], byteorder = 'little') int16 = self.u16toint(u16) psp_bytes[i] = int16 return spikes, psp, voltage # Commands to write to the FPGA def start_cycle(self): cmd_start = bytearray([0,0,0,CMD_START_NEURON]) self.uart.write_blocking(cmd_start) def run_one_step(self): ''' run neuron for one time step, including send start command, and read results ''' self.start_cycle(); data = self.read_cycle() return data def reset_neuron(self): ''' reset psp ''' cmd_reset = bytearray([0,0,0,CMD_RESET_NEURON]) self.uart.write_blocking(cmd_reset) def set_spikes(self, spike_array: np.array): ''' spike_array: 1d numpy array, 1 for spike, 0 for nothing, each position is a synapse input ''' #find non zero positions input_spike_index = np.where(spike_array!=0)[0] for idx in input_spike_index: spike_packet = bytearray([0,0,idx,CMD_SET_SPIKE]) self.uart.write_blocking(spike_packet) def set_test_spikes(self): cmd_test_spikes = bytearray([0,0,0,CMD_SET_TEST_SPIKE]) self.uart.write_blocking(cmd_test_spikes) def clear_spikes(self): ''' set spike buffer in microblaze to 0 ''' cmd_clear_spikes = bytearray([0,0,0,CMD_CLEAR_BUFFERED_SPIKE]) self.uart.write_blocking(cmd_clear_spikes) def chunks(self, l, n): """Yield successive n-sized chunks from l. https://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks """ for i in range(0, len(l), n): yield l[i:i + n] def u16toint(self, u16): ''' assume the u16 is unsigned integer, convert it to int ''' #value in fpga is represented by fixpoint(16,4), while in microblaze, #it is converted to u32. so only keep lower 16 bits u16 = u16 & 0xFFFF #convert unsigned to signed if u16 > 32767: return u16-65536 else: return u16 def disconnect(self): self.uart.serial.close() def run_one_step_fake(self): return np.random.rand(10), np.random.rand(110), np.random.rand(10) if __name__ == '__main__': spike = np.load('D:/islped_demo/snn/noise_train.npy') test_spike = spike[0] controller = neuron_controller(WINDOW, port='COM5', baudrate=230400, timeout=0.1) #manager = multiprocessing.Manager() #shared_list = manager.list() #n = 0 #process1 = multiprocessing.Process( # target=write_only, args=[shared_list,n]) #process2 = multiprocessing.Process( # target=read_only, args=[shared_list]) #process1.start() #process2.start() #process1.join() #process2.join() #record voltage[instance_idx, neuron_id, time_step] v_record = np.zeros([100,10,WINDOW]) controller.reset_neuron() test_pyqtgraph = False use_fake_data = False if test_pyqtgraph: start = timer() app = QtGui.QApplication(sys.argv) w = gl.GLViewWidget() w.setBackgroundColor('w') w.opts['azimuth'] = 90 w.opts['elevation'] = 0 w.setGeometry(0, 110, 1920, 1080) w.show() traces = dict() for i in range(10): x = np.array(range(450)) y = np.zeros(450) z = np.zeros(450) pts = np.vstack([x, y, z]).transpose() traces[i] = gl.GLLinePlotItem( pos=pts, color=pg.glColor((i, 10 * 1.3)), width=(i + 1) / 10, antialias=True, ) #if use white background #reference: https://github.com/pyqtgraph/pyqtgraph/issues/193 traces[i].setGLOptions('translucent') w.addItem(traces[i]) for j in range(450): for i in range(10): if use_fake_data: s,p,v = controller.run_one_step_fake() v_record[i,:,j] = v z = v_record[0,i,0:j] + i*5 else: controller.set_spikes(spike[i,j,:]) s,p,v = controller.run_one_step() v_record[i,:,j] = v # z coordinates represent voltage # + 5 to plac each trace at different vertical position z = v_record[0,i,0:j] + i*5 #reset psp at last step if (j == 450-1): controller.reset_neuron() x = np.array(range(0,j)) y = np.zeros(j) z = np.random.rand(j) + i * 5 pts = np.vstack([x, y, z]).transpose() traces[i].setData(pos=pts, color=pg.glColor((i, 10 * 1.3)), width=3) print(j) # QtTest.QTest.qWait(1000) app.processEvents() end = timer() print(end - start) # Time in seconds, e.g. 5.38091952400282 ############################################################################### else: fig, ax = plt.subplots() for i in range(10): line = ax.plot(np.random.randn(450)) plt.show(block=False) fig.canvas.draw() plt.ioff() #run for multiple samples start = timer() for i in range(1): #for every time step for j in range(WINDOW): controller.set_spikes(spike[i,j,:]) s,p,v = controller.run_one_step() v_record[i,:,j] = v #reset psp at last step if (j == 450-1): controller.reset_neuron() # if j % 5 == 0: ax.draw_artist(ax.patch) for n, l in enumerate(ax.lines): l.set_ydata( v_record[0,n,:]) # ax.draw_artist(l) fig.canvas.update() fig.canvas.flush_events() #plt.show() # ... end = timer() print(end - start) # Time in seconds, e.g. 5.38091952400282
[ "hfang02@syr.edu" ]
hfang02@syr.edu
219941a73b3a3abb917568111dfa3dca7b917452
0c67106d19d29092c02cbfc93e33cb077a8e4386
/machine_translation/nmt_zh/model_helper.py
e83f20932eeac34b26c463ff4bf2be129b34ffa0
[]
no_license
iamdsyang/chinese_nlp
b026c0b9c84b044638006bfa063b8523fe51aff9
22a47ad9e8cfb18593e8a6e089d7a7b5e1b6a838
refs/heads/master
2020-05-01T14:52:44.364216
2018-07-09T04:04:09
2018-07-09T04:04:09
null
0
0
null
null
null
null
UTF-8
Python
false
false
9,106
py
import six, os import collections import tensorflow as tf import numpy as np from utils import iterator_utils from utils import vocab_utils from utils import misc_utils as utils class TrainModel( collections.namedtuple("TrainModel", ("graph", "model", "iterator"))): pass def create_train_model( model_creator, hparams): """Create train graph, model, and iterator.""" src_file = "%s.%s" % (hparams.train_prefix, hparams.src) tgt_file = "%s.%s" % (hparams.train_prefix, hparams.tgt) src_vocab_file = hparams.src_vocab_file tgt_vocab_file = hparams.tgt_vocab_file graph = tf.Graph() with graph.as_default(), tf.container("train"): src_vocab_table, tgt_vocab_table = vocab_utils.create_vocab_tables( src_vocab_file, tgt_vocab_file, hparams.share_vocab) src_dataset = tf.data.TextLineDataset(src_file) tgt_dataset = tf.data.TextLineDataset(tgt_file) iterator = iterator_utils.get_iterator( src_dataset, tgt_dataset, src_vocab_table, tgt_vocab_table, batch_size=hparams.batch_size, sos=hparams.sos, eos=hparams.eos, source_reverse=hparams.source_reverse, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets, src_max_len=hparams.src_max_len, tgt_max_len=hparams.tgt_max_len) model = model_creator( hparams, iterator=iterator, mode=tf.contrib.learn.ModeKeys.TRAIN, source_vocab_table=src_vocab_table, target_vocab_table=tgt_vocab_table) return TrainModel( graph=graph, model=model, iterator=iterator) class EvalModel( collections.namedtuple("EvalModel", ("graph", "model", "src_file_placeholder", "tgt_file_placeholder", "iterator"))): pass def create_eval_model(model_creator, hparams): """Create train graph, model, src/tgt file holders, and iterator.""" src_vocab_file = hparams.src_vocab_file tgt_vocab_file = hparams.tgt_vocab_file graph = tf.Graph() with graph.as_default(), tf.container("eval"): src_vocab_table, tgt_vocab_table = vocab_utils.create_vocab_tables( src_vocab_file, tgt_vocab_file, hparams.share_vocab) src_file_placeholder = tf.placeholder(shape=(), dtype=tf.string) tgt_file_placeholder = tf.placeholder(shape=(), dtype=tf.string) src_dataset = tf.data.TextLineDataset(src_file_placeholder) tgt_dataset = tf.data.TextLineDataset(tgt_file_placeholder) iterator = iterator_utils.get_iterator( src_dataset, tgt_dataset, src_vocab_table, tgt_vocab_table, hparams.batch_size, sos=hparams.sos, eos=hparams.eos, source_reverse=hparams.source_reverse, random_seed=hparams.random_seed, num_buckets=hparams.num_buckets, src_max_len=hparams.src_max_len_infer, tgt_max_len=hparams.tgt_max_len_infer) model = model_creator( hparams, iterator=iterator, mode=tf.contrib.learn.ModeKeys.EVAL, source_vocab_table=src_vocab_table, target_vocab_table=tgt_vocab_table) return EvalModel( graph=graph, model=model, src_file_placeholder=src_file_placeholder, tgt_file_placeholder=tgt_file_placeholder, iterator=iterator) class InferModel( collections.namedtuple("InferModel", ("graph", "model", "src_placeholder", "batch_size_placeholder", "iterator"))): pass def create_infer_model(model_creator, hparams): """Create inference model.""" src_vocab_file = hparams.src_vocab_file tgt_vocab_file = hparams.tgt_vocab_file graph = tf.Graph() with graph.as_default(), tf.container("infer"): src_vocab_table, tgt_vocab_table = vocab_utils.create_vocab_tables( src_vocab_file, tgt_vocab_file, hparams.share_vocab) reverse_tgt_vocab_table = tf.contrib.lookup.index_to_string_table_from_file( tgt_vocab_file, default_value=vocab_utils.UNK) src_placeholder = tf.placeholder(shape=[None], dtype=tf.string) batch_size_placeholder = tf.placeholder(shape=[], dtype=tf.int64) src_dataset = tf.data.Dataset.from_tensor_slices( src_placeholder) iterator = iterator_utils.get_infer_iterator( src_dataset, src_vocab_table, batch_size=batch_size_placeholder, eos=hparams.eos, source_reverse=hparams.source_reverse, src_max_len=hparams.src_max_len_infer) model = model_creator( hparams, iterator=iterator, mode=tf.contrib.learn.ModeKeys.INFER, source_vocab_table=src_vocab_table, target_vocab_table=tgt_vocab_table, reverse_target_vocab_table=reverse_tgt_vocab_table) return InferModel( graph=graph, model=model, src_placeholder=src_placeholder, batch_size_placeholder=batch_size_placeholder, iterator=iterator) def load_model(model, ckpt, session, name): model.saver.restore(session, ckpt) session.run(tf.tables_initializer()) utils.log("Load {} model parameters from {}".format(name, ckpt)) return model def create_or_load_model(model, model_dir, session, name): """Create translation model and initialize or load parameters in session.""" latest_ckpt = tf.train.latest_checkpoint(model_dir) if latest_ckpt: model.saver.restore(session, latest_ckpt) session.run(tf.tables_initializer()) utils.log("Load {} model parameters from {}".format(name, latest_ckpt)) else: session.run(tf.global_variables_initializer()) session.run(tf.tables_initializer()) utils.log("Create {} model with fresh parameters".format(name)) global_step = model.global_step.eval(session=session) return model, global_step def avg_checkpoints(model_dir, num_last_checkpoints, global_step, global_step_name): """ Average the last N checkpoints in the model_dir. """ checkpoint_state = tf.train.get_checkpoint_state(model_dir) if not checkpoint_state: utils.log("No checkpoint file found in directory: {}".format(model_dir)) return None # Checkpoints are ordered from oldest to newest. checkpoints = ( checkpoint_state.all_model_checkpoint_paths[-num_last_checkpoints:]) if len(checkpoints) < num_last_checkpoints: utils.log( "Skipping averaging checkpoints because not enough checkpoints is " "avaliable." ) return None avg_model_dir = os.path.join(model_dir, "avg_checkpoints") if not os.path.exists(avg_model_dir): utils.log( "Creating new directory {} for saving averaged checkpoints." .format( avg_model_dir)) os.makedirs(avg_model_dir) utils.log("Reading and averaging variables in checkpoints:") var_list = tf.contrib.framework.list_variables(checkpoints[0]) var_values, var_dtypes = {}, {} for (name, shape) in var_list: if name != global_step_name: var_values[name] = np.zeros(shape) for checkpoint in checkpoints: utils.log("{}".format(checkpoint)) reader = tf.contrib.framework.load_checkpoint(checkpoint) for name in var_values: tensor = reader.get_tensor(name) var_dtypes[name] = tensor.dtype var_values[name] += tensor for name in var_values: var_values[name] /= len(checkpoints) # Build a graph with same variables in the checkpoints, and save the averaged # variables into the avg_model_dir. with tf.Graph().as_default(): tf_vars = [ tf.get_variable(v, shape=var_values[v].shape, dtype=var_dtypes[name]) for v in var_values ] placeholders = [tf.placeholder(v.dtype, shape=v.shape) for v in tf_vars] assign_ops = [tf.assign(v, p) for (v, p) in zip(tf_vars, placeholders)] global_step_var = tf.Variable( global_step, name=global_step_name, trainable=False) saver = tf.train.Saver(tf.all_variables()) with tf.Session() as sess: sess.run(tf.initialize_all_variables()) for p, assign_op, (name, value) in zip(placeholders, assign_ops, six.iteritems(var_values)): sess.run(assign_op, {p: value}) # Use the built saver to save the averaged checkpoint. Only keep 1 # checkpoint and the best checkpoint will be moved to avg_best_metric_dir. saver.save( sess, os.path.join(avg_model_dir, "translate.ckpt")) return avg_model_dir
[ "1181856726@qq.com" ]
1181856726@qq.com
6515b896bb03a7ebb2cd72e4073d7b54a04e0c93
7e2945510ece2255c4902931edc6c53184043243
/integrationtest/vm/virtualrouter/ipsec/test_create_ipsec_ike_auth_sha384.py
13de0821af3cdc9d9d76d5db9742203980a750f7
[ "Apache-2.0" ]
permissive
mahaibin/zstack-woodpecker
181410d3ab8aefc5ec98280b1449d3ac84eec4c6
7ddd907ae67fa1432ae7c5f2035baabdd960993f
refs/heads/master
2021-01-23T02:43:55.532559
2017-03-23T06:43:09
2017-03-23T06:43:59
null
0
0
null
null
null
null
UTF-8
Python
false
false
4,340
py
''' Test IPsec creation when ikeAuthAlgorithm is sha384 @author: Glody ''' import zstackwoodpecker.test_util as test_util import zstackwoodpecker.test_lib as test_lib import zstackwoodpecker.test_state as test_state import zstackwoodpecker.operations.ipsec_operations as ipsec_ops import os test_stub = test_lib.lib_get_test_stub() test_obj_dict1 = test_state.TestStateDict() test_obj_dict2 = test_state.TestStateDict() ipsec1 = None ipsec2 = None mevoco1_ip = None mevoco2_ip = None def test(): global mevoco1_ip global mevoco2_ip global ipsec1 global ipsec2 mevoco1_ip = os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] mevoco2_ip = os.environ['secondZStackMnIp'] test_util.test_dsc('Create test vm in mevoco1') vm1 = test_stub.create_vlan_vm(os.environ.get('l3VlanNetworkName1')) test_obj_dict1.add_vm(vm1) vm1.check() pri_l3_uuid1 = vm1.vm.vmNics[0].l3NetworkUuid vr1 = test_lib.lib_find_vr_by_l3_uuid(pri_l3_uuid1)[0] l3_uuid1 = test_lib.lib_find_vr_pub_nic(vr1).l3NetworkUuid vip1 = test_stub.create_vip('ipsec1_vip', l3_uuid1) os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip test_util.test_dsc('Create test vm in mevoco2') vm2 = test_stub.create_vlan_vm(os.environ.get('l3VlanDNATNetworkName')) test_obj_dict2.add_vm(vm2) vm2.check() pri_l3_uuid2 = vm2.vm.vmNics[0].l3NetworkUuid vr2 = test_lib.lib_find_vr_by_l3_uuid(pri_l3_uuid2)[0] l3_uuid2 = test_lib.lib_find_vr_pub_nic(vr2).l3NetworkUuid vip2 = test_stub.create_vip('ipsec2_vip', l3_uuid2) os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip test_util.test_dsc('Create ipsec in mevoco1') ipsec1 = ipsec_ops.create_ipsec_connection('ipsec1', pri_l3_uuid1, vip2.get_vip().ip, '123456', vip1.get_vip().uuid, [os.environ['secondZStackCidrs']], ike_auth_algorithm="sha384") os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip test_util.test_dsc('Create ipsec in mevoco2') ipsec2 = ipsec_ops.create_ipsec_connection('ipsec2', pri_l3_uuid2, vip1.get_vip().ip, '123456', vip2.get_vip().uuid, [os.environ['firstZStackCidrs']], ike_auth_algorithm="sha384") os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip if not test_lib.lib_check_ping(vm1.vm, vm2.vm.vmNics[0].ip): test_util.test_fail('vm in mevoco1[MN:%s] could not connect to vm in mevoco2[MN:%s]' % (mevoco1_ip, mevoco2_ip)) os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip if not test_lib.lib_check_ping(vm2.vm, vm1.vm.vmNics[0].ip): test_util.test_fail('vm in mevoco1[MN:%s] could not connect to vm in mevoco2[MN:%s]' % (mevoco2_ip, mevoco1_ip)) os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip ipsec_ops.delete_ipsec_connection(ipsec1.uuid) if test_lib.lib_check_ping(vm1.vm, vm2.vm.vmNics[0].ip, no_exception=True): test_util.test_fail('vm in mevoco1[MN:%s] could still connect to vm in mevoco2[MN:%s] after Ipsec is deleted' % (mevoco1_ip, mevoco2_ip)) os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip if test_lib.lib_check_ping(vm2.vm, vm1.vm.vmNics[0].ip, no_exception=True): test_util.test_fail('vm in mevoco2[MN:%s] could still connect to vm in mevoco1[MN:%s] after Ipsec is deleted' % (mevoco2_ip, mevoco1_ip)) os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip test_lib.lib_error_cleanup(test_obj_dict1) os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip ipsec_ops.delete_ipsec_connection(ipsec2.uuid) test_lib.lib_error_cleanup(test_obj_dict2) os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip test_util.test_pass('Create Ipsec Success') #Will be called only if exception happens in test(). def error_cleanup(): global mevoco1_ip global mevoco2_ip os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco1_ip global test_obj_dict1 test_lib.lib_error_cleanup(test_obj_dict1) global ipsec1 if ipsec1 != None: ipsec_ops.delete_ipsec_connection(ipsec1.uuid) os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = mevoco2_ip global test_obj_dict2 test_lib.lib_error_cleanup(test_obj_dict2) global ipsec2 if ipsec2 != None: ipsec_ops.delete_ipsec_connection(ipsec2.uuid)
[ "glody.guo@gmail.com" ]
glody.guo@gmail.com
374e5e8a8f650eb8fa7df655eaf0a275bdd2792f
642cf7d391e1767beeba0c836f45b55f27b81867
/lib/topic_model.py
66b464ae200eb9274c6f638aaf9e2ebc88600313
[]
no_license
dataSuckers/Deep-Learning-models
ad664d341807cd4bb242b847e62d3a989c82f91b
6bac493d424f047121a9e871b707e110e4c45734
refs/heads/master
2021-08-30T20:20:10.218440
2017-12-19T09:21:24
2017-12-19T09:21:24
112,164,577
0
0
null
null
null
null
UTF-8
Python
false
false
3,283
py
from textblob import TextBlob from collections import Counter import re from nltk import ngrams import nltk from nltk.tokenize import TweetTokenizer import string from sklearn.feature_extraction.text import TfidfVectorizer from nltk.stem.porter import PorterStemmer from topics_utils import clean_sentence,train_model,test_model_log,pos_tagger,get_tokens,get_all_files_list from multitask_utils import multi_work import os import pandas as pd '****************************clean texts*********************************' #get the data DIR = './../coinscore_test/output/good_tweets/' FILES, senti_dfs = get_all_files_list(DIR) tweets = pd.concat(list(senti_dfs)) #tweets = senti_dfs[0] #tweets = senti_dfs[term] #from nltk.stem import WordNetLemmatizer #lemmatizer = WordNetLemmatizer() #clean text #CPU times: user 40 s, sys: 44 ms, total: 40 s #Wall time: 40 s #%time texts = [clean_sentence(x) for x in tweets.text] texts = multi_work(thelist=list(enumerate(tweets.tweet_text.values)),func=clean_sentence,arguments=[],iterable_input=False,scaling_number=4,on_disk=False) texts = sum(texts,[]) texts = sum(texts,[]) texts = list(dict(sorted(texts)).values()) '****************************train models*********************************' #model training shouldn't be put together #model1= multi_work(thelist=texts,func=train_model,arguments=[1,True]) #dict_whole = train_model(texts,N=2,norm=True) dict_whole = train_model(texts,N=1,norm=True) model = dict_whole '****************************tag models*********************************' #nltk pos tags #CPU times: user 4min 35s, sys: 1.55 s, total: 4min 36s #Wall time: 4min 37s #%time tags_nltk = [nltk.pos_tag(tknzr.tokenize(text)) for text in texts] tknzr = TweetTokenizer() #if arguments contain a function, it should be double bracketed tags = multi_work(thelist=list(enumerate(texts)),func=pos_tagger,arguments=[[tknzr.tokenize]],iterable_input=False) outs = [] for x in tags: outs+=x tags =outs tags = list(dict(sorted(tags)).values()) #textblob tags #CPU times: user 5min 58s, sys: 2.18 s, total: 6min #Wall time: 6min 1s #%time tags = [TextBlob(x).tags for x in texts] '****************************train noun models*********************************' #tag_list=['NN','NNS','NNP','NNPS'] tag_list=['NNP','NNPS'] noun_sentence = [' '.join([w for w,t in one_tag if t in tag_list]) for one_tag in tags] dict_noun = train_model(noun_sentence,N=1,norm=True) model = dict_noun '****************************test models*********************************' #test #CPU times: user 1min 51s, sys: 844 ms, total: 1min 52s #Wall time: 1min 52s tests = list(tweets.text.values) #%time outs= test_model_log(tests=tests,model=dict_whole,N=1) test_outs = multi_work(thelist=list(enumerate(tests)),func=test_model_log,arguments=[[model,1]],iterable_input=False) test_outs = sum(test_outs,[]) test_outs = list(dict(sorted(test_outs)).values()) '****************************tfidf encoding*********************************' tfidf = TfidfVectorizer(tokenizer=get_tokens,stop_words='english',analyzer='char',ngram_range=(3,7),smooth_idf=True) %time tfs = tfidf.fit_transform(texts) feature_names = tfidf.get_feature_names() response = tfs for col in response.nonzero()[1]: print(feature_names[col], ' - ', max(response[:, 135015][1]))
[ "noreply@github.com" ]
noreply@github.com
52a836d0343e1d518ac0e52bf9f10bca23d2531e
689b4573107af6e6d6d91dba6f81c77ab44f2ee5
/migrations/0009_auto_20171213_1815.py
e561af149598ef6f5e0fb136f61a7d84ffc8921b
[]
no_license
prajwal041/Provisioning_UI
3f112da972426cd4d5bd96aa193e3e342159a3f8
d42eadabaed3252c40314e7e19d0694f2a1cd410
refs/heads/master
2020-03-27T13:57:34.243833
2018-09-24T19:36:28
2018-09-24T19:36:28
146,636,759
0
0
null
null
null
null
UTF-8
Python
false
false
641
py
# -*- coding: utf-8 -*- # Generated by Django 1.10.4 on 2017-12-13 12:45 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('app', '0008_auto_20171213_1752'), ] operations = [ migrations.AddField( model_name='orchestration', name='imagelist', field=models.CharField(blank=True, max_length=255), ), migrations.AddField( model_name='orchestration', name='ssh_name', field=models.CharField(blank=True, max_length=255), ), ]
[ "prajwal041@gmail.com" ]
prajwal041@gmail.com
7df4448674925a4f5236bfda93a13a48ff6499c9
8cf82dffebc98a4f2166dc306663415c1a485aae
/trainModel_mine.py
27556cea603ac392fdbb4f23e0e1e5c71a0eefb5
[]
no_license
GUUUUG/MACS-Net-Overlapping-chromosome-segmentation-based-on-multi-scale-U-shaped-network
e93ae01a026876599251702a29c7f45ecf7b1a4c
bae91433b0e4251c38e196feb8c173f81be68f61
refs/heads/main
2023-01-03T23:59:42.957871
2020-11-02T13:45:52
2020-11-02T13:45:52
309,244,647
0
0
null
null
null
null
UTF-8
Python
false
false
8,621
py
import os os.environ["CUDA_VISIBLE_DEVICES"] = "0" ### #!/usr/bin/python3.5 # -*- coding: utf-8 -*- import matplotlib.pyplot as plt import numpy as np import utilities import os from MACS_Net import MACS_Net from MACS_Net_1 import MACS_Net_1 from CENet_ceshi import CENet_ceshi from MACS_Net_3layers import MACS_Net_3layers #from MACS_Net_3layers_TEST import MACS_Net_3layers_TEST from MACS_Net_4layers import MACS_Net_4layers #from UNet import UNet from ARMSNet_duibi.UNet_R_4l import UNet_R_4l from ARMSNet_duibi.UNet_AR_4l import UNet_AR_4l from ARMSNet_duibi.UNet_PR_4l import UNet_PR_4l from ARMSNet_duibi.UNet_DAR_4l import UNet_DAR_4l from ARMSNet_duibi.UNet_4l import UNet_4l from model import Deeplabv3 from keras.models import Model import tensorflow as tf from Loss_accuracy import LossHistory import math from keras.optimizers import Adam import random #import cv2 from keras.layers import Conv2D #from keras.callbacks import Callback #from keras.callbacks import EarlyStopping from keras.callbacks import ModelCheckpoint #from tensorflow.python.keras.callbacks import ModelCheckpoint from tensorflow.python.keras.callbacks import EarlyStopping from tensorflow.python.keras.callbacks import Callback #import warnings from keras import backend as K from test import binary_crossentropy from keras.utils import multi_gpu_model from time import * # Load data xdata = np.load('/home/guangjie.wang/new/data/xdata_128x128.npy') ############################ labels = np.load('/home/guangjie.wang/new/data/ydata_128x128_0123_onehot.npy') number = 1 a=np.load('/home/guangjie.wang/new/data_cls_new/'+str(number)+'/data_cls_4.npy') ################ 4 a=a.tolist() b=np.load('/home/guangjie.wang/new/data_cls_new/'+str(number)+'/data_cls_1.npy') ################ 1 b=b.tolist() x = xdata[a] y = labels[a] x_test = xdata[b] y_test = labels[b] #weight = [] #for i in range(4): # posative = np.sum(labels[:,:,:,i]) # negative = 220102656-posative # inter = round(float(negative)/float(posative),2) # mi = math.log(inter,1000) # if mi < 0: # mi = (1+np.abs(mi))**-1 # weight.append(mi) # else: # weight.append(1+mi) #ix = [] #for i in range(len(y)): # #print(i) # value = y[i,:,:,3].sum() # if value == 0: # ix.append(i) #print(len(ix)) #length = np.arange(len(y)).tolist() #z = [] #for m in length: # if m not in ix: # z.append(m) #x = x[z] #y = y[z] #ix = [] #for i in range(len(y_test)): # #print(i) # value = y_test[i,:,:,3].sum() # if value == 0: # ix.append(i) #length = np.arange(len(y_test)).tolist() #z = [] #for m in length: # if m not in ix: # z.append(m) #x_test = x_test[z] #y_test = y_test[z] #x_f = np.fliplr(x) #y_f = np.fliplr(y) #x_test_f = np.fliplr(x_test) #y_test_f = np.fliplr(y_test) # #x = np.stack((x,x_f),axis=0).reshape(-1,128,128,1) #y = np.stack((y,y_f),axis=0).reshape(-1,128,128,4) ############################################################################################################################################################################## ############################################################################################################################################################################## #Name = './MACSNet_4layers_zidaiweighted_original.h5' ################ model 2 #Name = '/home/guangjie.wang/new/MACSNet_duibi/CENet_1.h5' Name = '/home/guangjie.wang/new/ARMSNet_duibi/duibi_h5/UNet_4l_1.h5' Name2 = '/home/guangjie.wang/new/ARMSNet_duibi/duibi_h5/UNet_PR_'+str(number)+'.h5' ################ MODEL #model = CENet_ceshi(input_shape=(128,128,1)) #model = MACS_Net_1(input_shape=(128,128,1)) #model = Deeplabv3(input_shape=(128,128,1), classes=4) #model = UNet(input_shape=(128,128,1)) #model = UNet_R_4l(input_shape=(128,128,1)) #model = UNet_AR_4l(input_shape=(128,128,1)) #model = UNet_PR_4l(input_shape=(128,128,1)) #model = UNet_DAR_4l(input_shape=(128,128,1)) model = UNet_4l(input_shape=(128,128,1)) #model = MACS_Net(input_shape=(128,128,1)) #model = MACS_Net_4layers(input_shape=(128,128,1)) #model = MACS_Net_3layers(input_shape=(128,128,1)) #model = MACS_Net_3layers_TEST(input_shape=(128,128,1)) #model = multi_gpu_model(model, gpus=2) model.compile(loss='binary_crossentropy', optimizer='adam') ##loss='binary_crossentropy' , optimizer='adam' optimizer=Adam(lr=1e-4) default lr=1e-3, beta_1=0.9, beta_2=0.999 model.load_weights(Name) ############################################### 3 ############################################### predict img ix = 119 img = x_test[ix,:,:,0].reshape(1,128,128,1) label = y_test[ix,:,:,3] #print(np.sum(label)) img_pred = model.predict(img).round() plt.xticks(()) plt.yticks(()) plt.imshow(x_test[ix,:,:,0]) plt.savefig('./img.png') plt.imshow(label) plt.savefig('./label.png') plt.show() plt.imshow(img_pred[0,:,:,3]) plt.savefig('./pred.png') plt.show() testIOU = utilities.IOU(img_pred, y_test[ix,:,:,:].reshape(1,128,128,4)) print('Testing IOU: ' + str(testIOU)) ############################################## predict iou #y_pred_test = model.predict(x_test).round() #testIOU = utilities.IOU(y_pred_test, y_test) #print('Testing IOU: ' + str(testIOU)) ################################################ predict chrom iou #y_pred_test = model.predict(x_test).round() #testIOU = utilities.IOU_One(y_pred_test, y_test) #print('Testing Chrom IOU: ' + str(testIOU)) ############################################### predict Accuracy #y_pred_test = model.predict(x_test).round() #testIOU = utilities.global_chrom_Accuracy(y_pred_test, y_test) #print('Testing Chrom Acc: ' + str(testIOU)) ############################################### predict iou_set #y_pred_test = model.predict(x_test).round() #testIOU = utilities.IOU_set(y_pred_test, y_test) #np.save('/home/guangjie.wang/new/IOU_SET/IOU_test_UNet_'+str(number),testIOU) #for layer in model.layers[:-3]: # layer.trainable = False #for layer in model.layers[-3:]: # layer.trainable = True # Specify the number of epochs to run #num_epoch = 1 ############################################### 4 #for i in range(num_epoch): # print('epoch:', i) # # Fit # #history = LossHistory() # check_point = ModelCheckpoint(Name2, monitor='val_loss', verbose=0, save_best_only=True, save_weights_only=True, mode='auto', period=1) # # callback = EarlyStopping(monitor="val_loss", patience=10, verbose=0, mode='min') # # history = model.fit(x, y, epochs=100, validation_split=0.2, batch_size=32, callbacks=[check_point, callback]) ########################## [check_point, callback] # # # Calculate mIOU # model.load_weights(Name2) # y_pred_train = model.predict(x).round() # trainIOU = utilities.IOU(y_pred_train, y) # print('value: ',np.sum(y_pred_train[0,:,:,3])) # print('value: ',np.sum(y[0,:,:,3])) # print('Training IOU: ' + str(trainIOU)) # y_pred_test = model.predict(x_test).round() # testIOU = utilities.IOU(y_pred_test, y_test) # print('Testing Overlap IOU: ' + str(testIOU)) ## ## y_pred_test = model.predict(x_test).round() ## testIOU = utilities.IOU_One(y_pred_test, y_test) ## print('Testing Chrom IOU: ' + str(testIOU)) ## ## y_pred_test = model.predict(x_test).round() ## testIOU = utilities.global_chrom_Accuracy(y_pred_test, y_test) ## print('Testing Chrom Acc: ' + str(testIOU)) # # # # # #fig = plt.figure() # #plt.plot(history.history['loss'],label='training loss') # #plt.plot(history.history['val_loss'],label='val loss') # #plt.title('model loss') # #plt.ylabel('loss') # #plt.xlabel('epoch') # #plt.legend(loc='upper right') # #fig.savefig('/home/guangjie.wang/new/pic/6_'+str(i)+'.png') ####this is My_loss # # #y_pred_train = model.predict(x).round() # #trainIOU = utilities.IOU_set(y_pred_train, y) # #np.save('/home/guangjie.wang/new/IOU_SET/IOU_train_better_'+str(i),trainIOU) # #y_pred_test = model.predict(x_test).round() # #testIOU = utilities.IOU_set(y_pred_test, y_test) # #np.save('/home/guangjie.wang/new/IOU_SET/IOU_test_better_'+str(i),testIOU)
[ "953704875@qq.com" ]
953704875@qq.com
4a567d7a7c4e85a615d24491eb179078e930fe16
e68598b26d1f8d49adc22bab6f85b5a739ebda2c
/multi_weather/settings.py
be6ef288b9d091068116f6e02c63841c992b6cab
[]
no_license
ZackT403/multi_weather-1
5b1a1891c8ac8bd201e6802d666800620d3fb9ab
6f0191320d52e1ecd5dfb59472e980ddafbd07d8
refs/heads/master
2022-09-06T20:55:36.497176
2020-06-02T21:57:59
2020-06-02T21:57:59
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,204
py
""" Django settings for multi_weather project. Generated by 'django-admin startproject' using Django 3.0.6. For more information on this file, see https://docs.djangoproject.com/en/3.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.0/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'm+t9z#k&)doq87d_amh@sw^u+(y+x)cr(o0z&1ut41v$b%&f=d' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'multi_weather.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR,'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'multi_weather.wsgi.application' # Database # https://docs.djangoproject.com/en/3.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/3.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.0/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), )
[ "andrew@loenhouse.com" ]
andrew@loenhouse.com
7abb5c21dc4c445762a6970a1c524ea1b1234e2c
e73cd01c238ae7931093f61ed4a93a5b6268b49c
/venv/bin/easy_install-3.8
b5ac80b8d46ffc3adc8569a7cdfa427d55aa6592
[]
no_license
riyadzaigirdar/django-redis
8532dd6511fe47e04a88bfea66dd5374f7452825
0bb75d6e415adac51599d3225af7dc99d4634d9f
refs/heads/master
2022-12-15T23:12:34.714320
2020-09-19T14:55:26
2020-09-19T14:55:26
296,893,101
0
0
null
null
null
null
UTF-8
Python
false
false
255
8
#!/home/riyad/Desktop/redis/venv/bin/python # -*- coding: utf-8 -*- import re import sys from setuptools.command.easy_install import main if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(main())
[ "riyadzaigir280@gmail.com" ]
riyadzaigir280@gmail.com
6b6b8ed46de995cb4125b9f3eae5ad6f987cb563
1ff9adfdb9d559e6f81ed9470467bab25e93b5ab
/src/ta_lib/_vendor/tigerml/core/reports/lib.py
e68184e54ad7fe935d6209b79f71931a46f2af5f
[]
no_license
Seemant-tiger/housing-price-prediction
a39dbefcb11bc460edeeee92e6becf77d35ff3a8
be5d8cca769c7e267cfee1932eb82b70c2855bc1
refs/heads/main
2023-06-24T00:25:49.776720
2021-07-18T16:44:28
2021-07-18T16:44:28
387,222,852
0
0
null
null
null
null
UTF-8
Python
false
false
594
py
def create_report( contents, name="", path="", format=".html", split_sheets=True, tiger_template=False ): if format == ".xlsx": from .excel import create_excel_report create_excel_report(contents, name=name, path=path, split_sheets=split_sheets) elif format == ".pptx": from .ppt.lib import create_ppt_report create_ppt_report(contents, name=name, path=path, tiger_template=tiger_template) if format == ".html": from .html import create_html_report create_html_report(contents, name=name, path=path, split_sheets=split_sheets)
[ "seemantsingh1199@gmail.com" ]
seemantsingh1199@gmail.com
fd4852650ffaaeb54b696523ab7c48faf644901c
40e593f1872a6ab4ea2e3f1830bd1919fad80422
/e_commerce_project/cart/views.py
31a4447a01d731d2e370aaf00749a802cde1a039
[]
no_license
avodhaR/Ecommerce-app
b1def2391b5082f8ddf94db78a9b6de7508d3f7d
1f1865ffd41380983231d7ab4b9a15bb9ed9adfb
refs/heads/master
2023-04-08T03:17:47.375590
2021-04-05T06:47:31
2021-04-05T06:47:31
354,736,037
0
0
null
null
null
null
UTF-8
Python
false
false
2,111
py
from django.shortcuts import render,redirect,get_object_or_404 from e_commerce_app.models import Product from . models import Cart,CartItem from django.core.exceptions import ObjectDoesNotExist def _cart_id(request): cart=request.session.session_key if not cart: cart=request.session.create() return cart def add_cart(request,product_id): product=Product.objects.get(id=product_id) try: cart=Cart.objects.get(cart_id=_cart_id(request)) except Cart.DoesNotExist: cart=Cart.objects.create(cart_id=_cart_id(request)) cart.save() try: cart_item=CartItem.objects.get(product=product,cart=cart) if cart_item.quantity < cart_item.product.stock: cart_item.quantity +=1 cart_item.save(); except CartItem.DoesNotExist: cart_item=CartItem.objects.create( product=product, quantity=1, cart=cart ) cart_item.save() return redirect('cart:cart_detail') def cart_detail(request,total=0,counter=0,cart_items=None): try: cart=Cart.objects.get(cart_id=_cart_id(request)) cart_items=CartItem.objects.filter(cart=cart,active=True) for cart_item in cart_items: total +=(cart_item.product.price * cart_item.quantity) counter += cart_item.quantity except ObjectDoesNotExist: pass return render(request,'cart.html',dict(cart_items=cart_items,total=total,counter=counter)) def cart_remove(request,product_id): cart=Cart.objects.get(cart_id=_cart_id(request)) product=get_object_or_404(Product,id=product_id) cart_item=CartItem.objects.get(product=product,cart=cart) if cart_item.quantity >1: cart_item.quantity-=1 cart_item.save() else: cart_item.delete() return redirect('cart:cart_detail') def full_remove(request,product_id): cart=Cart.objects.get(cart_id=_cart_id(request)) product=get_object_or_404(Product,id=product_id) cart_item=CartItem.objects.get(product=product,cart=cart) cart_item.delete() return redirect('cart:cart_detail')
[ "riyasupri@gmail.com" ]
riyasupri@gmail.com
7213f10f28062eadf3bbacca50ac7ce3992e7b2e
854639c9f2ce6a1cf0a412a35a56a93f19f07e63
/Experimento/gerar_grafico_tempo_execucao_5_subplot.py
926b46fdf0f14c14769d9832f38bc579274e320b
[ "MIT" ]
permissive
RandelSouza/TCC
3fd82d94fe88bc00b0d2d6968c0772957ccc4dc7
aeaee2c26635db26ccc3802720d41d2426a062fa
refs/heads/master
2020-04-29T20:11:35.723032
2020-04-20T02:31:37
2020-04-20T02:31:37
176,377,555
1
0
null
null
null
null
UTF-8
Python
false
false
4,496
py
# -*- coding:UTF-8-*- import sys from glob import glob import pandas as pd import matplotlib.pyplot as plt import matplotlib.patches as mpatches import numpy as np plt.rcParams.update({'font.size': 25}) plt.rc('legend', fontsize=20) ''' # Configurações dos Gráficos que podem ser alteradas SMALL_SIZE = 40 MEDIUM_SIZE = 40 BIGGER_SIZE = 40 plt.rc('font', size=SMALL_SIZE) # controls default text sizes plt.rc('axes', titlesize=SMALL_SIZE) # fontsize of the axes title plt.rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick labels plt.rc('legend', fontsize=SMALL_SIZE) # legend fontsize plt.rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title ''' def calc(df): mean = df[0].mean() std = df[0].std() margemdeerro = 1.96 * (std / np.sqrt(len(df[0]))) return mean, margemdeerro reload(sys) sys.setdefaultencoding('utf-8') path = "resultados_experimento" controllerSDN = [ "Ryu", "Floodlight", "POX" ] nodesQuantity = [ 26, 52, 104 ] dados = [] dados_processados = [] color = ["green", "#ff8c00", "m"] size = 20 x = [ 20, 30, 40 ] labels=[ str(nodesQuantity[0]), str(nodesQuantity[1]), str(nodesQuantity[2]) ] for controller in controllerSDN: for quantity in nodesQuantity: arquivo = "{}/{}{}{}/time_execucao_real_{}_{}.txt".format(path, controller, "_Nos_", quantity, controller, quantity) dados.append( pd.read_csv(glob(arquivo)[0], header=None ) ) for dado in dados: dados_processados.append( calc( dado ) ) print dados_processados plt.subplot(221) #plt.ylim(22, 41) plt.plot( [x+1 for x in range(len(dados[1]))], dados[1], label=controllerSDN[0], color= color[0], marker="", markersize=2, linestyle='-', lw=3) plt.plot([1, len(dados[1])], [calc(dados[1])[0], calc(dados[1])[0] ], color='#FFFF00', linewidth=3, linestyle='-', label='Média '+controllerSDN[0]) plt.legend(loc='best', numpoints=1 , fancybox=True, framealpha=0.5) plt.title('Tempo de execução \n com 52 nós IoT (a)') plt.ylabel('Tempo (s)') plt.xlabel('Número de coletas') plt.subplot(222) #plt.ylim(22, 41) plt.plot( [x+1 for x in range(len(dados[4]))], dados[4], label=controllerSDN[1], color= color[1], marker="", markersize=2, linestyle='-', lw=3) plt.plot([1, len(dados[4])], [calc(dados[4])[0], calc(dados[4])[0] ], color='black', linewidth=3, linestyle='-', label='Média '+controllerSDN[1]) plt.legend(loc='best', numpoints=1, fancybox=True, framealpha=0.5) plt.title('Tempo de execução \n com 52 nós IoT (b)') plt.ylabel('Tempo (s)') plt.xlabel('Número de coletas') plt.subplot(223) #plt.ylim(22, 41) plt.plot( [x+1 for x in range(len(dados[7]))], dados[7], label=controllerSDN[2], color= color[2], marker= "", markersize=2, linestyle='-', lw=3) plt.plot([1, len(dados[7])], [calc(dados[7])[0], calc(dados[7])[0] ], color='blue', linewidth=3, linestyle='-', label='Média '+controllerSDN[2]) plt.legend(loc='best', numpoints=1, fancybox=True, framealpha=0.5) plt.title('Tempo de execução \n com 52 nós IoT (c)') plt.ylabel('Tempo (s)') plt.xlabel('Número de coletas') plt.subplot(224) #plt.ylim(22, 41) plt.plot( [x+1 for x in range(len(dados[1]))], dados[1], label=controllerSDN[0], color= color[0], marker= "", markersize=2, linestyle='-', lw=3) plt.plot( [x+1 for x in range(len(dados[4]))], dados[4], label=controllerSDN[1], color= color[1], marker= "", markersize=2, linestyle='-', lw=3) plt.plot( [x+1 for x in range(len(dados[7]))], dados[7], label=controllerSDN[2], color= color[2], marker= "", markersize=2, linestyle='-', lw=3) plt.plot([1, len(dados[1])], [calc(dados[1])[0], calc(dados[1])[0] ], color='#FFFF00', linewidth=3, linestyle='-', label='Média '+controllerSDN[0]) plt.plot([1, len(dados[4])], [calc(dados[4])[0], calc(dados[4])[0] ], color='black', linewidth=3, linestyle='-', label='Média '+controllerSDN[1]) plt.plot([1, len(dados[7])], [calc(dados[7])[0], calc(dados[7])[0] ], color='blue', linewidth=3, linestyle='-', label='Média '+controllerSDN[2]) plt.legend(loc='upper center', numpoints=1, fancybox=True, framealpha=0.5, bbox_to_anchor=(0.5, 1.), ncol=2) plt.title('Tempo de execução \n com 52 nós IoT (d)') plt.ylabel('Tempo (s)') plt.xlabel('Número de coletas') plt.grid(True) plt.subplots_adjust(top=0.92, bottom=0.08, left=0.10, right=0.95, hspace=0.50, wspace=0.35) #plt.savefig("/home/randel/manhattan02.png") plt.show()
[ "randelsouza88@gmail.com" ]
randelsouza88@gmail.com
7d99e26a6d7d4b0a7f916ad07f46105c644061c7
ac2f43c8e0d9649a7f063c59b3dffdfed9fd7ed7
/tests2/common/base_slaac_test.py
459b17fe8aebffa9efbf641b36e553aada1068c0
[]
no_license
facebook/openbmc
bef10604ced226288600f55248b7f1be9945aea4
32777c66a8410d767eae15baabf71c61a0bef13c
refs/heads/helium
2023-08-17T03:13:54.729494
2023-08-16T23:24:18
2023-08-16T23:24:18
31,917,712
684
331
null
2023-07-25T21:19:08
2015-03-09T19:18:35
C
UTF-8
Python
false
false
2,578
py
#!/usr/bin/env python3 # # Copyright 2018-present Facebook. All Rights Reserved. # # This program file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; version 2 of the License. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # # You should have received a copy of the GNU General Public License # along with this program in a file named COPYING; if not, write to the # Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, # Boston, MA 02110-1301 USA # import subprocess from common.base_interface_test import BaseInterfaceTest from utils.cit_logger import Logger class BaseSlaacTest(BaseInterfaceTest): def get_ipv6_address(self): """ Get inet6 address with highest length of a given interface overriding this method of BaseInterfaceTest class because we want to have inet6 address with highest length """ out = self.get_ip_addr_output_inet6() # trying to find inet6 address with highest length ipv6 = "" for value in out[1:]: if len(value.split("/")[0]) > len(ipv6): ipv6 = value.split("/")[0] Logger.debug("Got ip address for " + str(self.ifname)) return ipv6.lower() def get_mac_address(self): """ Get Ethernet MAC address """ f = subprocess.Popen( ["fw_printenv", "-n", "ethaddr"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) out, err = f.communicate() self.assertEqual( f.returncode, 0, "fw_printenv -n ethaddr " + "exited with returncode: " + str(f.returncode) + ", err: " + str(err), ) if out: out = out.decode("utf-8").rstrip() return out.lower() else: raise Exception("Couldn't find MAC address [FAILED]") def generate_modified_eui_64_mac_address(self): """ Get Modified EUI-64 Mac Address """ mac_address = self.get_mac_address().split(":") # reversing the 7th bit of the mac address mac_address[0] = hex(int(mac_address[0], 16) ^ 2)[2:] mac_address[2] = mac_address[2] + "fffe" return "".join(mac_address)
[ "facebook-github-bot@users.noreply.github.com" ]
facebook-github-bot@users.noreply.github.com
48b50a8ac8d47771a70791145c9359e97e3a9752
178b24f3d164eb498c03cf51762bd938ef76afab
/db/model.py
01bc9065564740e417b4729f28c5cc5511db9291
[]
no_license
yezixigua/homeTE
73175339370741b48d00ad83f04e598c3727da11
514dbb80ed397cc758bc86ae32d6a4c257a110f7
refs/heads/master
2020-04-05T03:34:18.032645
2018-11-24T07:10:04
2018-11-24T07:10:04
156,519,657
0
0
null
null
null
null
UTF-8
Python
false
false
6,978
py
from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from sqlalchemy import Column, Integer, String from sqlalchemy.ext.declarative import declarative_base import time Base = declarative_base() # 保存ip的对象类型, 映射一个数据表 class Visitor(Base): __tablename__ = 'Visitor' id = Column('id', Integer, primary_key=True, autoincrement=True) time = Column('time', String(50)) ip = Column('ip', String(50)) # 用户&密码数据库 class User(Base): __tablename__ = 'User' id = Column('id', Integer, primary_key=True, autoincrement=True) name = Column('name', String(50)) password = Column('password', String(50)) email = Column('email', String(20)) phone = Column('phone', String(20)) createdTime = Column('createdTime', String(50)) lastVisitTime = Column('lastVisitTime', String(50)) isDeleted = Column('isDeleted', String(10)) def __init__(self, name='', pwd='', email='', phone=''): self.name = name self.password = pwd ct = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) self.email = email self.phone = phone self.createdTime = ct self.lastVisitTime = ct self.isDeleted = 'False' @staticmethod def init_data(): Base.metadata.create_all(engine) def add(self): session1 = DB_Session() session1.add(self) session1.commit() session1.close() def __repr__(self): string = '' \ 'id: {} user: {} pwd: {} ct: {} lvt: {} d: {}' \ ''.format( self.id, self.name, self.password, self.createdTime, self.lastVisitTime, self.isDeleted ) return string @classmethod def query_data(cls, **kwargs): # 5. 查询数据 # 5.1 返回结果集的第二项 session1 = DB_Session() for k, v in kwargs.items(): key, value = k, v print(key, value) # user = session1.query(cls).get(key) user = session1.query(cls).filter_by(id=1).all() session1.close() print(user) @classmethod def query_by_name(cls, name): # 5. 查询数据 # 5.1 返回结果集的第二项 # user = session.query(cls).get(key) session1 = DB_Session() user = session1.query(cls).filter_by(name=name).all() print(user) session1.close() return user @classmethod def is_valid_user(cls, name, pwd): user = cls.query_by_name(name) if len(user) == 0: return False if user[0].password == pwd: return True else: return False @staticmethod def query_all(): # 5. 查询数据 session1 = DB_Session() users = session1.query(User)[:] for user in users: print(user) session1.close() # 连接数据库的一些变量, 会需要全局引用 # print(os.getcwd()) DB_CONNECT_STRING = 'sqlite:///db/test2.db' if __name__ == '__main__': DB_CONNECT_STRING = 'sqlite:///test2.db' engine = create_engine(DB_CONNECT_STRING, echo=False) DB_Session = sessionmaker(bind=engine) session = DB_Session() def init_data(): # 1. 创建表(如果表已经存在,则不会创建) Base.metadata.create_all(engine) def add_data(data_ip): # 2. 插入数据 session1 = DB_Session() localtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) u = Visitor(time=localtime, ip=data_ip) session1.add(u) session1.commit() session1.close() def add_data_user(): # 2. 插入数据 session1 = DB_Session() ct = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) u = User(name='1', password='34', createdTime=ct, lastVisitTime=ct, isDeleted='False') session1.add(u) session1.commit() session1.close() def add_data_demo(): # 2. 插入数据 u = Visitor(time='tobi', ip='wdtf') u1 = Visitor(time='tobi', ip='wdtf') u2 = Visitor(time='tobi', ip='wdtf') r = Role(name='user') # 2.1 使用add,如果已经存在,会报错 session.add(u) session.add(u1) session.add(u2) session.add(r) session.commit() print(r.id) # # # 3 修改数据 # # 3.1 使用merge方法,如果存在则修改,如果不存在则插入(只判断主键,不判断unique列) # r.name = 'admin' # session.merge(r) # def update_data(id_data, ip='0.0.0.0'): # 3.2 也可以通过这种方式修改 localtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) session.query(Visitor).filter(Visitor.id == id_data).update({'ip': ip, 'time': localtime}) def delete_data(id_data): # 4. 删除数据 session.query(Role).filter(Role.id == id_data).delete() def query_data(id, database=User): # 5. 查询数据 # 5.1 返回结果集的第二项 # user = session.query(database).filter_by(id=1) user = session.query(database).filter(database.id == id).all() print(user) def query_all(database=User): # 5. 查询数据 users = session.query(database)[:] for user in users: print(user) # # # 5.3 查询条件 # user = session.query(User).filter(User.id < 6).first() # # # 5.4 排序 # users = session.query(User).order_by(User.name) # # # 5.5 降序(需要导入desc方法) # from sqlalchemy import desc # users = session.query(User).order_by(desc(User.name)) # # # 5.6 只查询部分属性 # users = session.query(User.name).order_by(desc(User.name)) # for user in users: # print user.name # # # 5.7 给结果集的列取别名 # users = session.query(User.name.label('user_name')).all() # for user in users: # print user.user_name # # # 5.8 去重查询(需要导入distinct方法) # from sqlalchemy import distinct # users = session.query(distinct(User.name).label('name')).all() # # # 5.9 统计查询 # user_count = session.query(User.name).order_by(User.name).count() # age_avg = session.query(func.avg(User.age)).first() # age_sum = session.query(func.sum(User.age)).first() # # # 5.10 分组查询 # users = session.query(func.count(User.name).label('count'), User.age).group_by(User.age) # for user in users: # print 'age:{0}, count:{1}'.format(user.age, user.count) # # # 6.1 exists查询(不存在则为~exists()) # from sqlalchemy.sql import exists # session.query(User.name).filter(~exists().where(User.role_id == Role.id)) # # SELECT name AS users_name FROM users WHERE NOT EXISTS (SELECT * FROM roles WHERE users.role_id = roles.id) # # # 6.2 除了exists,any也可以表示EXISTS # session.query(Role).filter(Role.users.any()) # # # 7 random # from sqlalchemy.sql.functions import random # user = session.query(User).order_by(random()).first() def my_test(): init_data() User.query_all() User.is_valid_user('12234', '123') if __name__ == '__main__': my_test() session.close()
[ "541889983@qq.com" ]
541889983@qq.com
a1d37c6673b71f08cb910ad6171f14c473dacd2e
d2310dd974620eb5af112d056e4518b086dea07d
/Apps/Inventario/migrations/0032_auto_20191209_1322.py
251ea7187a36e3fe6ebd47a07e968600f4c6ab2c
[]
no_license
diegoquirozramirez/Inventario
de824fbc06e6b64f9ca87c061fbded2a3c3df020
f3e6e89e03e633acb5729390b1ef5f59a55bdaf2
refs/heads/master
2022-03-25T17:02:37.830179
2019-12-16T08:22:57
2019-12-16T08:22:57
226,250,841
0
0
null
null
null
null
UTF-8
Python
false
false
405
py
# Generated by Django 2.0 on 2019-12-09 18:22 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('Inventario', '0031_auto_20191209_1321'), ] operations = [ migrations.AlterField( model_name='base0', name='codigo_interno', field=models.CharField(blank=True, max_length=5), ), ]
[ "ingquirozramirez@gmail.com" ]
ingquirozramirez@gmail.com
44b3c3e17a72f62f7418db2902479cb858d1cca6
c275fc8e52e852a82c240d363fc80d818c938549
/fastreid/modeling/backbones/resnest.py
54d052d0d66e2081c3a4416c82bce401e9447f8b
[]
no_license
JinkaiZheng/fast-reid_20200430
67c968698e6a1c837e7c1c49f0078afae96152a3
6832d28e8ddc9b743d2de2a1a089175b8dd4bfd4
refs/heads/master
2022-06-29T09:02:50.443834
2020-05-12T12:09:26
2020-05-12T12:09:26
262,527,154
0
1
null
null
null
null
UTF-8
Python
false
false
17,224
py
# encoding: utf-8 # based on: # https://github.com/zhanghang1989/ResNeSt/blob/master/resnest/torch/resnest.py """ResNeSt models""" import logging import math import torch from torch import nn from .build import BACKBONE_REGISTRY from ...layers import SplAtConv2d, IBN, Non_local _url_format = 'https://hangzh.s3.amazonaws.com/encoding/models/{}-{}.pth' _model_sha256 = {name: checksum for checksum, name in [ ('528c19ca', 'resnest50'), ('22405ba7', 'resnest101'), ('75117900', 'resnest200'), ('0cc87c48', 'resnest269'), ]} def short_hash(name): if name not in _model_sha256: raise ValueError('Pretrained model for {name} is not available.'.format(name=name)) return _model_sha256[name][:8] model_urls = {name: _url_format.format(name, short_hash(name)) for name in _model_sha256.keys() } class Bottleneck(nn.Module): """ResNet Bottleneck """ # pylint: disable=unused-argument expansion = 4 def __init__(self, inplanes, planes, with_ibn=False, stride=1, downsample=None, radix=1, cardinality=1, bottleneck_width=64, avd=False, avd_first=False, dilation=1, is_first=False, rectified_conv=False, rectify_avg=False, norm_layer=None, dropblock_prob=0.0, last_gamma=False): super(Bottleneck, self).__init__() group_width = int(planes * (bottleneck_width / 64.)) * cardinality self.conv1 = nn.Conv2d(inplanes, group_width, kernel_size=1, bias=False) if with_ibn: self.bn1 = IBN(group_width) else: self.bn1 = norm_layer(group_width) self.dropblock_prob = dropblock_prob self.radix = radix self.avd = avd and (stride > 1 or is_first) self.avd_first = avd_first if self.avd: self.avd_layer = nn.AvgPool2d(3, stride, padding=1) stride = 1 if radix > 1: self.conv2 = SplAtConv2d( group_width, group_width, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, groups=cardinality, bias=False, radix=radix, rectify=rectified_conv, rectify_avg=rectify_avg, norm_layer=norm_layer, dropblock_prob=dropblock_prob) elif rectified_conv: from rfconv import RFConv2d self.conv2 = RFConv2d( group_width, group_width, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, groups=cardinality, bias=False, average_mode=rectify_avg) self.bn2 = norm_layer(group_width) else: self.conv2 = nn.Conv2d( group_width, group_width, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, groups=cardinality, bias=False) self.bn2 = norm_layer(group_width) self.conv3 = nn.Conv2d( group_width, planes * 4, kernel_size=1, bias=False) self.bn3 = norm_layer(planes * 4) if last_gamma: from torch.nn.init import zeros_ zeros_(self.bn3.weight) self.relu = nn.ReLU(inplace=True) self.downsample = downsample self.dilation = dilation self.stride = stride def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) if self.dropblock_prob > 0.0: out = self.dropblock1(out) out = self.relu(out) if self.avd and self.avd_first: out = self.avd_layer(out) out = self.conv2(out) if self.radix == 1: out = self.bn2(out) if self.dropblock_prob > 0.0: out = self.dropblock2(out) out = self.relu(out) if self.avd and not self.avd_first: out = self.avd_layer(out) out = self.conv3(out) out = self.bn3(out) if self.dropblock_prob > 0.0: out = self.dropblock3(out) if self.downsample is not None: residual = self.downsample(x) out += residual out = self.relu(out) return out class ResNest(nn.Module): """ResNet Variants ResNest Parameters ---------- block : Block Class for the residual block. Options are BasicBlockV1, BottleneckV1. layers : list of int Numbers of layers in each block classes : int, default 1000 Number of classification classes. dilated : bool, default False Applying dilation strategy to pretrained ResNet yielding a stride-8 model, typically used in Semantic Segmentation. norm_layer : object Normalization layer used in backbone network (default: :class:`mxnet.gluon.nn.BatchNorm`; for Synchronized Cross-GPU BachNormalization). Reference: - He, Kaiming, et al. "Deep residual learning for image recognition." Proceedings of the IEEE conference on computer vision and pattern recognition. 2016. - Yu, Fisher, and Vladlen Koltun. "Multi-scale context aggregation by dilated convolutions." """ # pylint: disable=unused-variable def __init__(self, last_stride, with_ibn, with_nl, block, layers, non_layers, radix=1, groups=1, bottleneck_width=64, dilated=False, dilation=1, deep_stem=False, stem_width=64, avg_down=False, rectified_conv=False, rectify_avg=False, avd=False, avd_first=False, final_drop=0.0, dropblock_prob=0, last_gamma=False, norm_layer=nn.BatchNorm2d): self.cardinality = groups self.bottleneck_width = bottleneck_width # ResNet-D params self.inplanes = stem_width * 2 if deep_stem else 64 self.avg_down = avg_down self.last_gamma = last_gamma # ResNeSt params self.radix = radix self.avd = avd self.avd_first = avd_first super().__init__() self.rectified_conv = rectified_conv self.rectify_avg = rectify_avg if rectified_conv: from rfconv import RFConv2d conv_layer = RFConv2d else: conv_layer = nn.Conv2d conv_kwargs = {'average_mode': rectify_avg} if rectified_conv else {} if deep_stem: self.conv1 = nn.Sequential( conv_layer(3, stem_width, kernel_size=3, stride=2, padding=1, bias=False, **conv_kwargs), norm_layer(stem_width), nn.ReLU(inplace=True), conv_layer(stem_width, stem_width, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs), norm_layer(stem_width), nn.ReLU(inplace=True), conv_layer(stem_width, stem_width * 2, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs), ) else: self.conv1 = conv_layer(3, 64, kernel_size=7, stride=2, padding=3, bias=False, **conv_kwargs) self.bn1 = norm_layer(self.inplanes) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0], with_ibn=with_ibn, norm_layer=norm_layer, is_first=False) self.layer2 = self._make_layer(block, 128, layers[1], stride=2, with_ibn=with_ibn, norm_layer=norm_layer) if dilated or dilation == 4: self.layer3 = self._make_layer(block, 256, layers[2], stride=1, with_ibn=with_ibn, dilation=2, norm_layer=norm_layer, dropblock_prob=dropblock_prob) self.layer4 = self._make_layer(block, 512, layers[3], stride=1, with_ibn=with_ibn, dilation=4, norm_layer=norm_layer, dropblock_prob=dropblock_prob) elif dilation == 2: self.layer3 = self._make_layer(block, 256, layers[2], stride=2, with_ibn=with_ibn, dilation=1, norm_layer=norm_layer, dropblock_prob=dropblock_prob) self.layer4 = self._make_layer(block, 512, layers[3], stride=1, with_ibn=with_ibn, dilation=2, norm_layer=norm_layer, dropblock_prob=dropblock_prob) else: self.layer3 = self._make_layer(block, 256, layers[2], stride=2, with_ibn=with_ibn, norm_layer=norm_layer, dropblock_prob=dropblock_prob) self.layer4 = self._make_layer(block, 512, layers[3], stride=last_stride, with_ibn=with_ibn, norm_layer=norm_layer, dropblock_prob=dropblock_prob) for m in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) elif isinstance(m, norm_layer): m.weight.data.fill_(1) m.bias.data.zero_() if with_nl: self._build_nonlocal(layers, non_layers) else: self.NL_1_idx = self.NL_2_idx = self.NL_3_idx = self.NL_4_idx = [] def _make_layer(self, block, planes, blocks, stride=1, with_ibn=False, dilation=1, norm_layer=None, dropblock_prob=0.0, is_first=True): downsample = None if stride != 1 or self.inplanes != planes * block.expansion: down_layers = [] if self.avg_down: if dilation == 1: down_layers.append(nn.AvgPool2d(kernel_size=stride, stride=stride, ceil_mode=True, count_include_pad=False)) else: down_layers.append(nn.AvgPool2d(kernel_size=1, stride=1, ceil_mode=True, count_include_pad=False)) down_layers.append(nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=1, bias=False)) else: down_layers.append(nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False)) down_layers.append(norm_layer(planes * block.expansion)) downsample = nn.Sequential(*down_layers) layers = [] if planes == 512: with_ibn = False if dilation == 1 or dilation == 2: layers.append(block(self.inplanes, planes, with_ibn, stride, downsample=downsample, radix=self.radix, cardinality=self.cardinality, bottleneck_width=self.bottleneck_width, avd=self.avd, avd_first=self.avd_first, dilation=1, is_first=is_first, rectified_conv=self.rectified_conv, rectify_avg=self.rectify_avg, norm_layer=norm_layer, dropblock_prob=dropblock_prob, last_gamma=self.last_gamma)) elif dilation == 4: layers.append(block(self.inplanes, planes, with_ibn, stride, downsample=downsample, radix=self.radix, cardinality=self.cardinality, bottleneck_width=self.bottleneck_width, avd=self.avd, avd_first=self.avd_first, dilation=2, is_first=is_first, rectified_conv=self.rectified_conv, rectify_avg=self.rectify_avg, norm_layer=norm_layer, dropblock_prob=dropblock_prob, last_gamma=self.last_gamma)) else: raise RuntimeError("=> unknown dilation size: {}".format(dilation)) self.inplanes = planes * block.expansion for i in range(1, blocks): layers.append(block(self.inplanes, planes, with_ibn, radix=self.radix, cardinality=self.cardinality, bottleneck_width=self.bottleneck_width, avd=self.avd, avd_first=self.avd_first, dilation=dilation, rectified_conv=self.rectified_conv, rectify_avg=self.rectify_avg, norm_layer=norm_layer, dropblock_prob=dropblock_prob, last_gamma=self.last_gamma)) return nn.Sequential(*layers) def _build_nonlocal(self, layers, non_layers): self.NL_1 = nn.ModuleList( [Non_local(256) for _ in range(non_layers[0])]) self.NL_1_idx = sorted([layers[0] - (i + 1) for i in range(non_layers[0])]) self.NL_2 = nn.ModuleList( [Non_local(512) for _ in range(non_layers[1])]) self.NL_2_idx = sorted([layers[1] - (i + 1) for i in range(non_layers[1])]) self.NL_3 = nn.ModuleList( [Non_local(1024) for _ in range(non_layers[2])]) self.NL_3_idx = sorted([layers[2] - (i + 1) for i in range(non_layers[2])]) self.NL_4 = nn.ModuleList( [Non_local(2048) for _ in range(non_layers[3])]) self.NL_4_idx = sorted([layers[3] - (i + 1) for i in range(non_layers[3])]) def forward(self, x): x = self.conv1(x) x = self.bn1(x) x = self.relu(x) x = self.maxpool(x) NL1_counter = 0 if len(self.NL_1_idx) == 0: self.NL_1_idx = [-1] for i in range(len(self.layer1)): x = self.layer1[i](x) if i == self.NL_1_idx[NL1_counter]: _, C, H, W = x.shape x = self.NL_1[NL1_counter](x) NL1_counter += 1 # Layer 2 NL2_counter = 0 if len(self.NL_2_idx) == 0: self.NL_2_idx = [-1] for i in range(len(self.layer2)): x = self.layer2[i](x) if i == self.NL_2_idx[NL2_counter]: _, C, H, W = x.shape x = self.NL_2[NL2_counter](x) NL2_counter += 1 # Layer 3 NL3_counter = 0 if len(self.NL_3_idx) == 0: self.NL_3_idx = [-1] for i in range(len(self.layer3)): x = self.layer3[i](x) if i == self.NL_3_idx[NL3_counter]: _, C, H, W = x.shape x = self.NL_3[NL3_counter](x) NL3_counter += 1 # Layer 4 NL4_counter = 0 if len(self.NL_4_idx) == 0: self.NL_4_idx = [-1] for i in range(len(self.layer4)): x = self.layer4[i](x) if i == self.NL_4_idx[NL4_counter]: _, C, H, W = x.shape x = self.NL_4[NL4_counter](x) NL4_counter += 1 return x @BACKBONE_REGISTRY.register() def build_resnest_backbone(cfg): """ Create a ResNest instance from config. Returns: ResNet: a :class:`ResNet` instance. """ # fmt: off pretrain = cfg.MODEL.BACKBONE.PRETRAIN last_stride = cfg.MODEL.BACKBONE.LAST_STRIDE with_ibn = cfg.MODEL.BACKBONE.WITH_IBN with_se = cfg.MODEL.BACKBONE.WITH_SE with_nl = cfg.MODEL.BACKBONE.WITH_NL depth = cfg.MODEL.BACKBONE.DEPTH num_blocks_per_stage = {50: [3, 4, 6, 3], 101: [3, 4, 23, 3], 200: [3, 24, 36, 3], 269: [3, 30, 48, 8]}[depth] nl_layers_per_stage = {50: [0, 2, 3, 0], 101: [0, 2, 3, 0]}[depth] stem_width = {50: 32, 101: 64, 200: 64, 269: 64}[depth] model = ResNest(last_stride, with_ibn, with_nl, Bottleneck, num_blocks_per_stage, nl_layers_per_stage, radix=2, groups=1, bottleneck_width=64, deep_stem=True, stem_width=stem_width, avg_down=True, avd=True, avd_first=False) if pretrain: # if not with_ibn: # original resnet state_dict = torch.hub.load_state_dict_from_url( model_urls['resnest' + str(depth)], progress=True, check_hash=True) # else: # raise KeyError('Not implementation ibn in resnest') # # ibn resnet # state_dict = torch.load(pretrain_path)['state_dict'] # # remove module in name # new_state_dict = {} # for k in state_dict: # new_k = '.'.join(k.split('.')[1:]) # if new_k in model.state_dict() and (model.state_dict()[new_k].shape == state_dict[k].shape): # new_state_dict[new_k] = state_dict[k] # state_dict = new_state_dict res = model.load_state_dict(state_dict, strict=False) logger = logging.getLogger(__name__) logger.info('missing keys is {}'.format(res.missing_keys)) logger.info('unexpected keys is {}'.format(res.unexpected_keys)) return model
[ "sherlockliao01@gmail.com" ]
sherlockliao01@gmail.com
afa45ecc7ae9304c49f9038d38efcbbae76a06e2
01579f20c71dbf868daac38653947eda5a663081
/mzitu_spider/mzitu/spiders/mzitu_spider.py
6492d41782ec9dcbc7075f2cf38ec91a2c2444e4
[]
no_license
yhxuanmo/meizitu_spider
652f3b91cadda7cee384e1a4ef6e0aae6fab6a76
e7ca197827a4f5855c4567839cf8d0606d2bbcfc
refs/heads/master
2020-03-22T14:19:04.690081
2018-07-08T13:28:44
2018-07-08T13:28:44
140,169,486
0
0
null
null
null
null
UTF-8
Python
false
false
2,080
py
from scrapy.spiders import Spider from scrapy.selector import Selector from scrapy import Request from mzitu.items import ImgBoxItem class MzituSpiser(Spider): name = 'mzitu' start_urls = ['http://www.mzitu.com/',] def parse(self, response): sel = Selector(response) typy_tag = sel.xpath('//ul[@id="menu-nav"]/li') for tag in typy_tag: # 排除首页的url if tag.xpath('./a/text()').extract_first() != '首页': if tag.xpath('./a/text()').extract_first() == '妹子自拍': continue if tag.xpath('./a/text()').extract_first() == '每日更新': continue # 分类的url type_url = tag.xpath('./a/@href').extract_first() # 分类名 type_name = tag.xpath('./a/text()').extract_first() yield Request(url=type_url,callback=self.type_url_parse, meta={'type_name':type_name}) def type_url_parse(self, response): # 分页类页面解析 sel = Selector(response) curent_url = response.url max_page = sel.xpath('//div[@class="nav-links"]/a[@class="page-numbers"]')[-1].xpath('./text()').extract_first() for i in range(1,int(max_page)+1): yield Request(url= curent_url+'page/'+ str(i) + '/', callback=self.img_box_url_parse, meta={'type_name':response.meta.get('type_name')} ) def img_box_url_parse(self, response): # 套图url解析 sel = Selector(response) img_box_lis = sel.xpath('//ul[@id="pins"]/li') item = ImgBoxItem() type_name = response.meta.get('type_name') for li in img_box_lis: # 套图url item['img_box_url'] = li.xpath('./a/@href').extract()[0] # 套图名称 item['img_box_name'] = li.xpath('./span/a/text()').extract()[0] # 分类名 item['type_name'] = type_name yield item
[ "yh_xuanmo@163.com" ]
yh_xuanmo@163.com
b443d6514d029cd986721f382abe20b94dd68d0a
991223fcbb355ffa278c708a452b8978e987f6dd
/Mega_Man.py
90d5e6b0902e30ab3dc47c3da6b0a611f276f28e
[]
no_license
jstambaugh/Mega-Man
2a4be4871c21768f0355874a8b7c0813b87e8ec5
6c380327e51605b72b1aca9ec0409cbac16ac795
refs/heads/master
2021-10-20T12:22:53.260645
2019-02-27T16:03:28
2019-02-27T16:03:28
172,946,185
0
0
null
null
null
null
UTF-8
Python
false
false
3,313
py
import pygame import Sprite_Sheet import Settings vector = pygame.math.Vector2 class Player(pygame.sprite.Sprite): def __init__(self,x,y): # The self class used for the player "Mega-Man" super().__init__() self.move_x = 5 # The x moving section self.move_y = 5 # The y moving section for jumping # Arrays used to store the sprites for each animation moment self.moving_left = [] self.moving_right = [] self.not_moving = [] self.direction = "Right" # Start the game off moving to the right like traditional megaman self.shooting = "False" # All of the sprites where mega man is facing to the right sprite_sheet = Sprite_Sheet.SpriteSheet("mega_man_sprite_sheet.png") image = sprite_sheet.get_image(150, 0, 50, 50) self.moving_right.append(image) image = sprite_sheet.get_image(200, 0, 50, 50) self.moving_right.append(image) image = sprite_sheet.get_image(250, 0, 50, 50) self.moving_right.append(image) # All of the sprites where mega man is facing to the left image = sprite_sheet.get_image(150, 0, 50, 50) image = pygame.transform.flip(image, True, False) self.moving_left.append(image) image = sprite_sheet.get_image(200, 0, 50, 50) image = pygame.transform.flip(image, True, False) self.moving_left.append(image) image = sprite_sheet.get_image(250, 0, 50, 50) image = pygame.transform.flip(image, True, False) self.moving_left.append(image) # Not moving sprite indicator image = sprite_sheet.get_image(0,0,50,50) self.not_moving.append(image) # Need to different not moving images one for the left and one for the right image = sprite_sheet.get_image(0,0,50,50) image = pygame.transform.flip(image, True, False) self.not_moving.append(image) # Shooting sprites image = sprite_sheet.get_image(100,50,50,50) self.not_moving.append(image) image = sprite_sheet.get_image(100,50,50,50) image = pygame.transform.flip(image, True, False) self.not_moving.append(image) # sprite to start with self.image = self.not_moving[0] self.rect = self.image.get_rect() def update(self): #This section will blit the correct sprite to the screen depending on which way megaman is moving and or if he is moving pos = self.rect.x if self.direction == "Right": frame = (pos // 30) % len(self.moving_right) self.image = self.moving_right[frame] elif self.direction == "Left": frame = (pos // 30) % len(self.moving_left) self.image = self.moving_left[frame] elif self.direction == "None-right": if self.shooting == "False": self.image = self.not_moving[0] elif self.shooting == "True": self.image = self.not_moving[2] elif self.direction == "None-left": if self.shooting == "False": self.image = self.not_moving[1] elif self.shooting == "True": self.image = self.not_moving[3]
[ "noreply@github.com" ]
noreply@github.com
ee4cc8f5d99dabc24f70760a8e975fedfe84cf5f
3cd2ec8afd70235f1271ee483373e0f131f4bf33
/Datasets/GTOS_mobile_single_size.py
123fbdfe5e345327fb6254e5a4c6fd1ee027ac86
[ "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
joeyee/Histogram_Layer
74678772210bdcc27f22c154af24c8dbf3c61d83
d396fc3e066afded3b208588ffccdeb8d1d52cf5
refs/heads/master
2023-03-28T17:00:37.421362
2021-04-05T18:01:59
2021-04-05T18:01:59
398,550,467
0
0
MIT
2021-08-21T12:19:12
2021-08-21T12:19:12
null
UTF-8
Python
false
false
3,216
py
# -*- coding: utf-8 -*- """ Created on Mon July 01 16:01:36 2019 GTOS data loader @author: jpeeples """ import os from PIL import Image from torch.utils.data import Dataset import pdb import torch class GTOS_mobile_single_data(Dataset): def __init__(self, texture_dir, train = True,image_size = 256, img_transform = None): # numset: 0~5 self.texture_dir = texture_dir self.img_transform = img_transform self.files = [] # empty list self.targets = [] #labels #pdb.set_trace() imgset_dir = os.path.join(self.texture_dir) if train: # train #Get training file sample_dir = os.path.join(imgset_dir,'train') class_names = sorted(os.listdir(sample_dir)) label = 0 #Loop through data frame and get each image for img_folder in class_names: #Set class label #Select folder and remove class number/space in name temp_img_folder = os.path.join(sample_dir,img_folder) for image in os.listdir(temp_img_folder): #Check for correct size image if image.startswith(str(image_size)): if(image=='Thumbs.db'): print('Thumb image') else: img_file = os.path.join(temp_img_folder,image) self.files.append({ # appends the images "img": img_file, "label": label }) self.targets.append(label) label +=1 else: # test sample_dir = os.path.join(imgset_dir,'test') class_names = sorted(os.listdir(sample_dir)) label = 0 #Loop through data frame and get each image for img_folder in class_names: #Set class label #Select folder and remove class number/space in name temp_img_folder = os.path.join(sample_dir,img_folder) for image in os.listdir(temp_img_folder): if(image=='Thumbs.db'): print('Thumb image') else: img_file = os.path.join(temp_img_folder,image) self.files.append({ # appends the images "img": img_file, "label": label }) self.targets.append(label) label +=1 def __len__(self): return len(self.files) def __getitem__(self, index): datafiles = self.files[index] img_file = datafiles["img"] img = Image.open(img_file).convert('RGB') label_file = datafiles["label"] label = torch.tensor(label_file) if self.img_transform is not None: img = self.img_transform(img) return img, label,index if __name__ == '__main__': path = 'gtos-mobile' # train = GTOS_mobile_single_data(path) test = GTOS_mobile_single_data(path,train=False)
[ "jpeeples@ufl.edu" ]
jpeeples@ufl.edu
fe7dc646e6803d550245c1e42b84b4e935c02343
3611ad1133a1d71448f5390bda1dc711ad3f0214
/Week_0_Search/tictactoe/tictactoe.py
f87331681677a0dc9542509bfdb98bb786e03b37
[]
no_license
michaeljs-c/CS50-Artificial-Intelligence
03de8eb05c5bf5367d06e476cfc78874be314f81
c2218c82394860533645d0464ab796bd44abb92d
refs/heads/master
2023-03-16T05:16:33.459488
2021-03-05T17:37:11
2021-03-05T17:37:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,103
py
""" Tic Tac Toe Player """ import math import copy X = "X" O = "O" EMPTY = None def initial_state(): """ Returns starting state of the board. """ return [[EMPTY, EMPTY, EMPTY], [EMPTY, EMPTY, EMPTY], [EMPTY, EMPTY, EMPTY]] def player(board): """ Returns player who has the next turn on a board. """ values = [i for n in board for i in n] xs = values.count("X") os = values.count("O") if xs == os: return "X" else: return "O" def actions(board): """ Returns set of all possible actions (i, j) available on the board. """ moves = set() for i in range(0,3): for j in range(0,3): if board[i][j] is None: moves.add((i,j)) return moves def result(board, action): """ Returns the board that results from making move (i, j) on the board. """ a = copy.deepcopy(board) """if board[action[0]-1][action[1]-1] is not None: print("That move is already taken") return board""" a[action[0]][action[1]] = player(board) return a def winner(board): """ Returns the winner of the game, if there is one. """ a = (board[0][0],board[1][1],board[2][2]) b = (board[0][2],board[1][1],board[2][0]) c = (board[0][0],board[1][0],board[2][0]) d = (board[0][1],board[1][1],board[2][1]) e = (board[0][2],board[1][2],board[2][2]) f = (board[0][0],board[0][1],board[0][2]) g = (board[1][0],board[1][1],board[1][2]) h = (board[2][0],board[2][1],board[2][2]) for n in [a,b,c,d,e,f,g,h]: if ("X","X","X") == n: return "X" elif ("O","O","O") == n: return "O" return None def terminal(board): """ Returns True if game is over, False otherwise. """ return winner(board) is not None or None not in [i for n in board for i in n] def utility(board): """ Returns 1 if X has won the game, -1 if O has won, 0 otherwise. """ if winner(board) == "X": return 1 elif winner(board) == "O": return -1 else: return 0 def minimax(board): """ Returns the optimal action for the current player on the board. """ if player(board) == "X": max_value(board) return action_ else: min_value(board) return action_ action_ = () def max_value(board): v = -100 if terminal(board): return utility(board) maxa = [v,None] for action in actions(board): v = max(v, min_value(result(board,action))) if v > maxa[0]: maxa[1] = action maxa[0] = v global action_ if maxa[1] is not None: action_ = maxa[1] return v def min_value(board): v = 100 if terminal(board): return utility(board) mina = [v, None] for action in actions(board): v = min(v, max_value(result(board,action))) if v < mina[0]: mina[1] = action mina[0] = v global action_ if mina[1] is not None: action_ = mina[1] return v
[ "" ]
23e3ad4e01e0f76661ea461347891416a38d216c
a71a756203a07ccaece6db440410493b3b7ff77f
/helios/plugins/builtin/rpc_websocket_proxy_through_ipc_socket/plugin.py
f25a485fbaf58be719639402aa3e72f7562385ca
[ "MIT" ]
permissive
Helios-Protocol/py-helios-node
73735dc24cd4c816d55649ed2f5df822efabfdce
691b378938f0a36bf8774dc1ee4e4370b6cf7c63
refs/heads/master
2021-08-19T23:05:18.841604
2020-01-18T19:38:33
2020-01-18T19:38:33
134,452,574
21
10
MIT
2019-06-09T04:43:14
2018-05-22T17:39:10
Python
UTF-8
Python
false
false
1,344
py
from argparse import ( ArgumentParser, _SubParsersAction, ) from helios.extensibility import ( BaseIsolatedPlugin, ) from .websocket_proxy_server import Proxy as rpc_websocket_proxy ### # This one is not used anymore because it is synchronous. There is a new asynchronous one in the json_rpc folder # This one connects through IPC as well. So it wont be stopped by admin_stopRPC. ### class RpcWebsocketProxyPlugin(BaseIsolatedPlugin): @property def name(self) -> str: return "RPC Websocket Proxy" def should_start(self) -> bool: return (not self.context.args.disable_rpc_websocket_proxy) and self.context.chain_config.is_main_instance def configure_parser(self, arg_parser: ArgumentParser, subparser: _SubParsersAction) -> None: arg_parser.add_argument( '--disable_rpc_websocket_proxy-NOT_USED', action="store_true", help="Should we disable the RPC websocket proxy server?", ) def start(self) -> None: self.logger.info('RPC Websocket proxy started') self.context.event_bus.connect() proxy_url = "ws://0.0.0.0:" + str(self.context.chain_config.rpc_port) rpc_websocket_proxy_service = rpc_websocket_proxy(proxy_url, self.context.chain_config.jsonrpc_ipc_path) rpc_websocket_proxy_service.run()
[ "admin@hyperevo.com" ]
admin@hyperevo.com
9d0ceb0a983d177fb194fa88a84647305cb10f4a
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
/PE8XQipGLS5bhpLZ5_12.py
135603cb370bc577f19f614c4e375ea0e5b17c54
[]
no_license
daniel-reich/ubiquitous-fiesta
26e80f0082f8589e51d359ce7953117a3da7d38c
9af2700dbe59284f5697e612491499841a6c126f
refs/heads/master
2023-04-05T06:40:37.328213
2021-04-06T20:17:44
2021-04-06T20:17:44
355,318,759
0
0
null
null
null
null
UTF-8
Python
false
false
98
py
def check_equals(lst1, lst2): if lst1[:] == lst2[:]: return True else: return False
[ "daniel.reich@danielreichs-MacBook-Pro.local" ]
daniel.reich@danielreichs-MacBook-Pro.local
5c79994e4891957104782181f70c52182e20dc7e
7f1f51e8c70a6434f209b7dda970a52adff33143
/renew.py
d5d2defc64ae93ef31f01ef8dac3f2d3a2c28261
[]
no_license
axel-durham/solar
534a60e06cac39d7e2c0d305e52542ec3e97a0fc
1b7c16e93ff7823b1209a7b6308e07462dabb8ab
refs/heads/master
2020-05-19T05:05:01.200926
2019-05-09T18:29:15
2019-05-09T18:29:15
184,841,482
0
0
null
null
null
null
UTF-8
Python
false
false
8,952
py
import math as m def TimeMinutesToDecimal(time): """ This function converts time from the hours.minutes.seconds format to hours.decimal_minutes Input time as a string formatted 'hour.minute.second' """ time = time.split('.') time = [int(x) for x in time] return time[0] + time[1]/60 + time[2]/3600 def TimeDecimalToMinutes(x): """ This function converts time from the hours.decimal_minutes format to hours:minutes:seconds.decimal_milliseconds """ x = float(x) decimal_minutes1, hours = m.modf(x) hours = int(hours) decimal_minutes2 = decimal_minutes1*60 decimal_seconds, minutes = m.modf(decimal_minutes2) minutes = int(minutes) seconds = round(decimal_seconds*60, 2) return str(hours) + ':' + str(minutes) + ':' + str(seconds) def LatLong(coord): """ This function converts Latitude and Longitude from a deg.minute.second format to a decimal degree format Input is is a string formatted 'deg.minutes.seconds' """ coord = coord.split('.') coord = [int(x) for x in coord] return coord[0] + coord[1]/60 + coord[2]/3600 def DayOfTheYear(date): """ This function return the day of the year Input is a string formatted 'mm/dd' Leap years are not taken into account """ date = date.split('/') date = [int(x) for x in date] days_in_months = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] return sum(days_in_months[0:date[0]-1]) + date[1] def dateToWords(date): """ This function converts a date fronm a 'mm/dd' format to a 'Month dd' format Intended for use on labels for plots """ date = date.split('/') month_words = ['Jan.', 'Feb.', 'Mar.', 'Apr.', 'May', 'Jun.', 'Jul.', 'Aug', 'Sep.', 'Oct.', 'Nov.', 'Dec.'] return month_words[int(date[0])-1] + ' ' + date[1] def I_0(N): """ This function calculate the daily solar constant as a function of the the day of the year N=1 on January 1st This function does not take leap years into account """ return 1368*(1+0.034*m.cos(2*m.pi*(N-3)/365)) def AM(theta_z): """ This function calculates Air Mass # as a function of theta_z theta_z is in degrees """ theta_z_rad = theta_z/360*2*m.pi return 1/(m.cos(theta_z_rad)+0.50572*(96.07995-theta_z)**(-1.3636)) def LocalToSolarTime(std_time, long_std, long_loc, N): """ This function finds solar time (decimal) when local time, standard longitude, local longitude, and day of the year are inputted input standard time as a hours.decimal_minutes input longitude in decimal degrees input day of the year as a number """ #step 1: calculate tau for the day of the year tau = 2*m.pi*N/365 #step 2: calculate ET #eq 9.9 coefficients a = [-7.3412, -9.3795, -0.3179, -0.1739] b = [0.4944, -3.2568, -0.0774, -0.1283] ET = [] for j in range(4): ET.append(a[j]*m.sin((j+1)*tau) + b[j]*m.cos((j+1)*tau)) ET_tot_decimal = sum(ET)/60 #DST correction if N >= 69 and N < 307: std_time = std_time - 1 solar_time_decimal = std_time + 4/60*(long_std - long_loc) + ET_tot_decimal return solar_time_decimal def SolarToLocalTime(solar_time, long_std, long_loc, N): """ This function finds local time (decimal) when solar time, standard longitude, local longitude, and day of the year are inputted input solar time as a string formatted 'hours.minutes.seconds' input longitude as a string formatted 'degrees.minutes.seconds' input day of the year as a number """ #step 1: calculate tau for the day of the year tau = 2*m.pi*N/365 #step 2: calculate ET #eq 9.9 coefficients a = [-7.3412, -9.3795, -0.3179, -0.1739] b = [0.4944, -3.2568, -0.0774, -0.1283] ET = [] for j in range(4): ET.append(a[j]*m.sin((j+1)*tau) + b[j]*m.cos((j+1)*tau)) ET_tot_decimal = sum(ET)/60 local_time_decimal = TimeMinutesToDecimal(solar_time) - 4/60*(long_std - long_loc) - ET_tot_decimal if N < 69 or N >= 307: local_time_decimal = local_time_decimal + 1 return local_time_decimal def Declination(N): """ This function finds declination (delta) in degrees when the day of the year is inputted N=1 on January 1st """ return 23.45*m.sin(m.radians(360*(284+N)/365)) def AltitudeMax(lat, delta): """ This function finds altitude at solar noon (alpha_max) in degrees when the day of the year is inputted input latitude as a string of the form 'degrees.minutes.seconds' """ return 90 - LatLong(lat) + delta def HourAngle(solar_time): """ This function finds the hour angle (omega) in degrees when the solar time is supplied Input solar time in decimal minutes """ return solar_time*15 - 180 def ZenithAngle(delta, lat, omega): """ This function finds the zenith angle (theta_z) in degrees when the declination (delta), latitude, and hour angle (omega) are supplied Input delta in degrees Input latitude in decimal degrees Input omega in degrees """ delta_radians = m.radians(delta) lat_radians = m.radians(lat) omega_radians = m.radians(omega) theta_z_radians = m.acos(m.sin(delta_radians)*m.sin(lat_radians) + m.cos(delta_radians)*m.cos(lat_radians)*m.cos(omega_radians)) return theta_z_radians/2/m.pi*360 def Altitude(delta, lat, omega): """ This function finds the altitude (alpha) in degrees when the declination (delta), latitude, and hour angle (omega) are supplied Input delta in degrees Input latitude in decimal degrees Input omega in degrees """ delta_radians = m.radians(delta) lat_radians = m.radians(lat) omega_radians = m.radians(omega) alpha_radians = m.asin(m.sin(delta_radians)*m.sin(lat_radians) + m.cos(delta_radians)*m.cos(lat_radians)*m.cos(omega_radians)) return alpha_radians/2/m.pi*360 def SolarAzimuth(delta, omega, alpha): """ This function finds the solar azimuth (gamma_s) in degrees when the declination (delta), hour angle(omega), and altitude (alpha) are supplied Input delta in degrees Input omega in degrees Input alpha in degrees """ delta_radians = m.radians(delta) omega_radians = m.radians(omega) alpha_radians = m.radians(alpha) gamma_s_radians = m.asin(m.cos(delta_radians)*m.sin(omega_radians)/m.cos(alpha_radians)) return gamma_s_radians/2/m.pi*360 def AngleOfIncidence(alpha, beta, gamma, gamma_s): """ This function finds the angle of incidencee (theta_i) in degrees when the altitude (alpha), tilt angle (beta), azimuth (gamma), and solar azimuth (gamma_s) are supplied Input alpha in degrees Input beta in degrees (for a horizontal surface, beta = 0) Input gamma in degrees (for a south facing surface, gamma = 0) Input gamma_s in degrees """ alpha_radians = m.radians(alpha) beta_radians = m.radians(beta) gamma_radians = m.radians(gamma) gamma_s_radians = m.radians(gamma_s) theta_i_radians = m.acos(m.sin(alpha_radians)*m.cos(beta_radians) + m.cos(alpha_radians)*m.sin(beta_radians)*m.cos(gamma_radians - gamma_s_radians)) return theta_i_radians/2/m.pi*360 def I_Beta_b(I_n_b, theta_i): """ This function finds the hourly beam radiation normal to a tilted surface when the hourly beam radiation normal to the ground (I_n_b) and the angle of incidence (theta_i) is supplied Input theta_i in degrees """ theta_i_radians = m.radians(theta_i) return I_n_b*m.cos(theta_i_radians)/2/m.pi*360 def BeamTransmissivity(theta_z, N, A=0.149): """ This function finds the beam transmissivity, tau_b Input theta_z in degrees N is the day of the year A is the altitude in km (defualts to 0.149 for Austin, TX) The 23km haze model is used (for mid-latitude) """ #Haze Model if N == 172: #June 21st, middle of summer --- N = 172 r_0 = 0.97 r_1 = 0.99 r_k = 1.02 elif N == 355: #December 21st, middle of winter --- N = 355 r_0 = 1.03 r_1 = 1.01 r_k = 1.00 elif N >= 1 and N < 172: r_0 = (0.97-1.03)/(172+10)*N + 1.0267 r_1 = (0.99-1.01)/(172+10)*N + 1.0089 r_k = (1.02-1.00)/(172+10)*N + 1.001099 elif N > 172 and N < 355: r_0 = (1.03-0.97)/(355-172)*N + 0.91361 r_1 = (1.01-0.99)/(355-172)*N + 0.9712 r_k = (1.00-1.02)/(355-172)*N + 1.0388 elif N > 355 and N <= 365: r_0 = (0.97-1.03)/(536-355)*N + 1.14768 r_1 = (0.99-1.01)/(536-355)*N + 1.04923 r_k = (1.02-1.00)/(536-355)*N + 0.98077 a_0_star = 0.4237 - 0.008216*(6-A)**2 a_1_star = 0.5055 + 0.00595*(6.5-A)**2 k_star = 0.2711 + 0.01858*(2.5-A)**2 a_0 = r_0*a_0_star a_1 = r_1*a_1_star k = r_k*k_star theta_z_radians = m.radians(theta_z) return a_0 + a_1*m.e**(-k/m.cos(theta_z_radians)) def I_c_b(I_0, tau_b, theta_i): """ This function finds the clear-day beam Insolation in W/m^2 Input theta_i in degrees This works for flat and tilted panels (beta > 0) """ theta_i_radians = m.radians(theta_i) return I_0*tau_b*m.cos(theta_i_radians) def DiffuseTransmissivity(tau_b): """ This function finds the diffuse transmissivity, tau_d """ return 0.271 - 0.294*tau_b def I_c_d(tau_d, I_0, theta_z, beta): """ This function find the clear-day diffuse insolation in W/m^2 Input theta_z in degrees Input beta as radians """ theta_z_radians = m.radians(theta_z) beta_radians = m.radians(beta) return tau_d*I_0*m.cos(theta_z_radians)*(1+m.cos(beta_radians))/2
[ "axel.durham@utexas.edu" ]
axel.durham@utexas.edu
4f0864a0e24ce619fcf9319fc69630fd9e35a830
235f15f47ffa9b0534b0e823d5ac95bafdc9325e
/src/tests/test_delete.py
427b50dd17652ebac7b8cbde49dcd71ca106eda1
[]
no_license
gyuseok-dev/fastapi-sample-for-docker-compose
fca891255ffcead969f4af245b35cf03de61ce13
40b7e8769cf5c164eddd146eb4584557763784c8
refs/heads/main
2023-06-06T04:54:43.087769
2021-06-30T14:08:17
2021-06-30T14:08:17
381,697,715
0
0
null
null
null
null
UTF-8
Python
false
false
803
py
from starlette.testclient import TestClient from src import crud def test_delete_todo(client: TestClient, monkeypatch): async def mock_is_todo_exist(id, db): return True async def mock_delete_todo(id, db): return None monkeypatch.setattr(crud, "delete_todo", mock_delete_todo) monkeypatch.setattr(crud, "is_todo_exist", mock_is_todo_exist) r = client.delete("/api/v1/todos/1") assert r.status_code == 204 assert r.text == "null" def test_delete_todo_invalid_id(client: TestClient, monkeypatch): async def mock_is_todo_exist(id, db): return False monkeypatch.setattr(crud, "is_todo_exist", mock_is_todo_exist) r = client.delete("/api/v1/todos/1") assert r.status_code == 404 assert r.json() == {"detail": "Item Not Found"}
[ "gyskjng@gmail.com" ]
gyskjng@gmail.com
9f8d390f8b93600719eede63855cb61081c13b1c
89d3d9438ee0dfbfdfa9e32e5bb83f7ac33de820
/flask_app.py
5a39354584e396dbd241eec8f586dddf2b55b817
[]
no_license
Vaibhav148/Quote-Dispensing-Engine
a33226ca869843e12778396612057cb26124999b
cb36885514f3076c9a27207bdc23a0bb466992be
refs/heads/master
2022-10-10T05:19:14.584571
2020-06-14T19:28:40
2020-06-14T19:28:40
272,264,399
0
0
null
null
null
null
UTF-8
Python
false
false
3,201
py
from flask import Flask, render_template, request, url_for, Markup import os import pandas as pd import numpy as np from random import randrange import nltk nltk.download('vader_lexicon') from nltk.sentiment.vader import SentimentIntensityAnalyzer app = Flask(__name__) quotes = None @app.before_request def prepare_sentiment_quote_stash(): global quotes quotes = pd.read_csv('quotes.csv') sid = SentimentIntensityAnalyzer() all_compounds = [] for sentence in quotes['quote']: ss = sid.polarity_scores(sentence) for k in sorted(ss): if k == 'compound': all_compounds.append(ss[k]) quotes['sentiment_score'] = all_compounds quotes = quotes.sort_values('sentiment_score') quotes['index'] = [ix for ix in range(0, len(quotes))] def gimme_a_quote(direction = None, current_index = None, max_index_value = 0): rand_index = randrange(max_index_value) darker = None brighter = None if current_index is None: brighter = rand_index if direction == 'brighter': brighter = current_index else: darker = current_index if darker is not None: try: current_index = int(darker) except ValueError: current_index = rand_index if current_index > 0: rand_index = randrange(0, current_index) else: rand_index = rand_index elif brighter is not None: try: current_index = int(brighter) except ValueError: current_index = rand_index if current_index < max_index_value -1: rand_index = randrange(current_index, max_index_value) else: rand_index = rand_index else: rand_index = rand_index return (rand_index) @app.route("/") def quote_me(): quote_stash_tmp = quotes.copy() max_index_value = np.max(quote_stash_tmp['index'].values) rand_index_value = randrange(max_index_value) darker = request.args.get("darker") brighter = request.args.get("brighter") if darker is not None: try: current_index = int(darker) except ValueError: current_index = randrange(max_index_value) new_index = gimme_a_quote(direction = 'darker', current_index = current_index, max_index_value = max_index_value) elif brighter is not None: try: current_index = int(brighter) except ValueError: current_index = rand_index_value new_index = gimme_a_quote(direction = 'brighter', current_index = current_index, max_index_value = max_index_value) else: new_index = randrange(max_index_value) random_quote = quote_stash_tmp.iloc[new_index] quote=random_quote['quote'] author = random_quote['author'] current_id = random_quote['index'] return render_template("quote.html", quote=quote, author=author, current_id=current_id,) if __name__ == "__main__": app.run(debug=True, port=5000)
[ "vaibhavathani2740@gmail.com" ]
vaibhavathani2740@gmail.com
6d4d1d60f2c789f78d8d5f3257764908e635553d
809f263b77b525549cd945c39c4c9cf2b8e6a167
/pqcrypto/sign/sphincs_shake256_192s_simple.py
6a45aa9c023c21f67bb3b6b83ca198236bb3e8f7
[ "BSD-3-Clause" ]
permissive
Kayuii/pqcrypto
bdf5014b7590dfe363baedbf47171f4b4cb25349
dd8c56fd876a397caef06a00d35537a4f9c1db28
refs/heads/master
2022-12-14T00:34:36.632689
2020-09-08T10:40:26
2020-09-08T10:40:26
null
0
0
null
null
null
null
UTF-8
Python
false
false
447
py
from .._sign.sphincs_shake256_192s_simple import ffi as __ffi, lib as __lib from .common import _sign_generate_keypair_factory, _sign_sign_factory, _sign_verify_factory PUBLIC_KEY_SIZE = __lib.CRYPTO_PUBLICKEYBYTES SECRET_KEY_SIZE = __lib.CRYPTO_SECRETKEYBYTES SIGNATURE_SIZE = __lib.CRYPTO_BYTES generate_keypair = _sign_generate_keypair_factory(__ffi, __lib) sign = _sign_sign_factory(__ffi, __lib) verify = _sign_verify_factory(__ffi, __lib)
[ "inbox@philonas.net" ]
inbox@philonas.net
ab4d888257c3c89753a638575b9018d4733a7a00
087fdfcd157fa626f63ef22da7813b66531c18e9
/personality_analysis/vocab/vocab.py
538bf00325559c827348b3938e404be84db49a56
[ "MIT" ]
permissive
kayzhou/exp_user_behaviour
5316c5e04a1c624631219b46fdb32e16a17ef2e2
58490763c76880eb84211a86bcadb31b868b5c13
refs/heads/main
2023-07-26T00:51:50.486012
2021-08-25T02:37:59
2021-08-25T02:37:59
374,299,484
0
0
null
null
null
null
UTF-8
Python
false
false
48,603
py
financial_keywords = ['富有', '富于', '饶富', '超富', '最富', '饶有', '极富', '国最', '最高档', '装富', '贵有', '高富', '财产权', '人财物', '财产权利', '私产', '家庭财产', '个人收入', '巨额财产', '债权人', '私有财产', '共同财产', '合法财产', '家庭收入', '居民家庭', '财产性', '个人财产', '家庭事务', '财政支出', '公共财政', '财权', '地权', '合法权利', '公民权利', '共同利益', '生命安全', '生命权', '财产损失', '谋财害命', '房产信息', '财税', '装穷', '装钱', '有钱有势', '穷光蛋', '脏钱', '木有钱', '钱有', '很富', '搞钱', '太有钱', '有钱有闲', '有权人', '有钱出钱', '真有钱', '赚不赚钱', '钱那', '木有买', '闲钱', '有闲', '钱少', '挣大钱', '钱真', '挣了钱', '赚了钱', '没赚', '钱你', '那钱', '钱当', '没钱买', '钱就会', '钱比', '钱才', '钱让', '钱太', '有了钱', '太抠', '钱还没', '有钱', '贪钱', '莫装', '穷死', '哭穷', '金钱至上', '金钱游戏', '钱为', '价值观念', '钱权', '权钱', '徼', '权钱交易', '诉权', '分权', '印钱', '权宜', '钱与', '金钱观', '拜金主义', '钱而', '奴化', '钱养', '钱来', '钱更', '钱买', '赚回', '玩钱', '钱能', '钱用', '钱用在', '钱要', '钱去', '投资收益', '税收收入', '毛收入', '工人工资', '员工工资', '低工资', '低收入者', '净收入', '平均收入', '销售总额', '全年收入', '营业收入', '业务收入', '净营收', '零售额', '纯收入', '人均收入', '收入水平', '中低收入', '农民收入', '工资收入', '高收入者', '国民收入', '低收入', '居民收入', '总收入', '销售收入', '净利', '营收', '营业额', '外汇收入', '年收入', '实际收入', '增加收入', '财政收入', '财政投入', '中低收入者', '非税', '富人', '穷人家', '有钱人', '尿糖', '狂话', '乍富', '钱多人', '穷人', '钱都让', '贫富不均', '杀富济贫', '平权', '贫富悬殊', '中国贫富', '劫富济贫', '民穷', '为富不仁', '贫弱', '别替', '购房人', '买方', '房屋买卖', '买卖人', '买房者', '供房', '赎人', '买帐', '抵偿', '买房子', '亿万富豪', '大富豪', '富豪榜', '富豪', '新富', '民富国强', '中产阶层', '贫穷落后', '家财万贯', '家财', '腰缠万贯', '腰缠', '万户侯', '家资', '万贯', '家产', '家业', '身家', '贫家', '财阀', '兴家', '聚敛', '农资', '金融资产', '资不抵债', '国家税收', '国有资本', '百万千万', '家贫', '富户', '孩子家', '法家', '病愈', '贫寒', '家翁', '富家子弟', '万利', '本利', '一千亿', '一万四', '土豪', '大款', '大数', '大手大脚', '结款', '税款', '赃款', '货款', '借款人', '工程款', '外逃', '赔偿款', '赔款', '巨款', '煤老板', '富家女', '中国富豪', '大富翁', '大富贵', '富翁', '大富', '斗富', '富贾', '亿万富翁', '财富榜', '85亿', '百万富翁', '首富', '巨商', '富家', '巨富', '王公', '富婆', '官商', '负翁', '胡润', '胡润百富', '排名榜', '新财富', '榜上', '高居榜首', '金牌榜', '国家元首', '富商', '股神', '农家子弟', '中产阶级', '小资产阶级', '资产阶级', '无产', '工薪阶层', '有产者', '负资产', '中奖', '获奖者', '得奖', '名单', '获奖人', '未获奖', '中奖者', '得奖者', '获奖项', '中奖人', '得奖人', '中奖号码', '中头奖', '中奖号', '纪念奖', '颁奖人', '获奖', '未获', '兑奖', '弃奖', '奖池', '号码牌', '抽奖券', '单注', '帐号密码', '发奖', '周奖', '第三波', '抽奖', '选中', '没中', '抽选', '五等奖', '小奖', '四等奖', '刮奖', '机率', '行大运', '发财', '发发发', '万事大吉', '财源滚滚', '财运亨通', '宏图大展', '大吉大利', '发家致富', '脱贫致富', '致富', '发横财', '升官发财', '致富经', '发家', '勤劳致富', '兴旺发达', '升官', '发财致富', '升迁', '发大财', '谋财', '步步高升', '财路', '高升', '官运', '梦而已', '梦者', '益发', '国难财', '梦就', '恭贺新禧', '财神到', '马年行', '招财进宝', '接财神', '财源', '阖家欢乐', '和气生财', '生财有道', '生财之道', '聚财', '财气', '散财', '正财', '顺意', '阖家幸福', '事业兴旺', '恭喜发财', '等奖品', '赢大奖', '大奖', '裸钻', '来赢', '奖券', '巨奖', '中大奖', '来晒单', '礼品券', '等奖项', '等你拿', '拿大奖', '礼品', '单赢', '头奖', '创新奖', '单项奖', '优胜奖', '奖给', '票券', '抽送', '赠券', '欠缴', '发红包', '赚钱', '亏钱', '养家', '管钱', '赚大钱', '好赚', '理亏', '才玩', '钱堆', '大笔', '大钱', '大笔钱', '挣', '就赚', '多赚', '攒点', '挣点', '钱收', '钱得', '赚过', '挣来', '生命财产', '没钱', '粪土', '用钱', '均贫富', '买房人', '穷国', '一本万利', '壕', '傍大款', '携款', '款爷', '中产', '开奖', '抽中', '中奖率', '发财梦', '生财', '发财树', '奖品', '比较发达', '欠发达', '高度发达', '发达国家', '大红包', '抢红包', '挣钱', '赚', '难赚'] # 432 popularity_keywords = ['名作家', '词曲作家', '曲名', '作曲者', '遗作', '名作', '名剧', '剧作家', '词曲创作', '曲作者', '成家立业', '功成名就', '竖子成名', '无成', '成于', '功名富贵', '争名', '家翁', '名臣', '自矜', '成名成家', '小有成就', '功成身退', '功名', '建功立业', '那就成', '巨大成就', '竖子', '臭名', '成钢', '诗名', '褚遂良', '望子成龙', '名讳', '病成', '不成器', '伟大成就', '成批', '便成仁', '成仁', '有名气', '赫赫有名', '鼎鼎有名', '小有名气', '出名', '著名', '盛名', '大名鼎鼎', '有名有姓', '驰名', '系出名门', '倾巢而出', '名闻遐迩', '闻名于世', '遐迩闻名', '驰名于世', '闻名遐尔', '闻名', '世界闻名', '有名', '名气', '有名无实', '颇负盛名', '享有盛名', '着名', '出名门', '名门望族', '名匠', '名宅', '名门', '名牌产品', '名产', '而出名', '衙口', '报了名', '出了门', '出产地', '土产', '名吃', '名优', '生产地', '赤味', '土鸭', '名窑', '农产', '驰名中外', '赫赫的', '享有盛誉', '威名', '鼎鼎大名', '专有名词', '名声在外', '出了名', '知名', '著名演员', '著名品牌', '著名人士', '齐名', '最富', '久负盛名', '享誉', '享誉中外', '盛誉', '久闻大名', '大名府', '久仰大名', '单姓', '大姓', '马姓', '徒有其名', '复姓', '山名', '驰名世界', '远近驰名', '闻名中外', '名闻', '闻名全国', '名扬四海', '名扬中外', '相互理解', '相互信任', '互相爱护', '互爱', '互相理解', '互敬', '相互依赖', '尊重人权', '相互间', '互敬互爱', '值得尊敬', '尊重', '令人尊敬', '敬重', '值得羡慕', '受尊敬', '值得称赞', '尊重知识', '值不值得', '敬佩', '值得尊重', '崇敬', '敬仰', '钦佩', '尊敬', '敬慕', '尊老敬老', '民族尊严', '国格', '尊严感', '健全人格', '多重人格', '保障人权', '重惩', '严肃性', '人格尊严', '道德感', '沉重感', '相互尊重', '荣誉感', '珍存', '疼惜', '珍惜', '自珍', '惜才', '弥足', '怜惜', '爱惜', '轻视', '啧啧称奇', '交口称赞', '赞口不绝', '称赞', '赞叹不已', '夸赞', '叹服', '连称', '称羡', '广为人知', '称颂', '称奇', '津津乐道', '为人所知', '赞誉', '赞赏', '赞许', '广受', '赞扬', '盛赞', '褒奖', '称道', '高度评价', '颂扬', '表扬', '赞美', '石氏', '啧啧称赞', '众口', '赞不绝口', '有口皆碑', '大加', '赏识', '夸她', '夸奖', '令人钦佩', '评得', '值得反思', '阈值', '颇得', '惊叹不已', '惊叹', '令人叹服', '赞道', '叹为观止', '赞颂', '令人称奇', '哑然', '啧', '认同度', '认知度', '诚信度', '信誉度', '可控性', '可靠性', '美誉度', '贡献度', '接受程度', '程度较高', '赞同', '认同感', '认可', '苟同', '同意', '完全同意', '思想认识', '感性认识', '并不认为', '嘉奖', '谬奖', '褒扬', '抬爱', '荣誉奖', '赏赐', '认可度', '契合度', '高度肯定', '增长幅度', '信任感', '信赖感', '认同', '感受力', '安定感', '不信任感', '充分认识', '首肯', '充分反映', '充分条件', '充分考虑', '定给', '敢肯定', '肯请', '定案', '称许', '嘉许', '接受批评', '充分肯定', '稳定度', '高度一致', '高炳义', '高度重视', '肯定句', '威望', '声望', '名位', '企望', '品级', '声名显赫', '权势', '名望', '声誉', '享有声誉', '相差悬殊', '自由度', '权威性', '频度', '望族', '士族', '名居', '衙署', '企求', '物质欲望', '毫无希望', '渴盼', '悲观失望', '热望', '众望', '希求', '冀望', '企图心', '德高', '位高权重', '望众', '品德高尚', '行尊', '位尊', '向老', '望尘', '治民', '民怨', '于国', '生民', '瞻望', '民力', '忧民', '徼', '威廉堡', '德威', '威桑', '高洞', '高城', '威尼斯水城', '海德威', '德格县', '拉纳', '高庙', '王德威', '丘德威', '威拉德', '德威龙', '沃邦', '文德', '芒德', '高威', '武德', '怅望', '望之', '望云', '仰首', '望而', '举目望去', '喟', '迢遥', '南望', '望尽', '民望', '不负重望', '众志', '所望', '众望所归', '所归', '众人皆醉', '主导权', '领先地位', '主导者', '占据主动', '权位', '权欲', '身居高位', '权重', '财权', '官位', '重利', '归其位', '声名', '商誉', '名声', '争权', '利禄', '争权夺利', '本位', '不畏强权', '兴废', '要面子', '丢面子', '碍于面子', '不留情面', '死要面子', '情面', '留钱', '爱面子', '不孝子', '留面子', '没面子', '孙子辈', '甩脸子', '丢饭碗', '兜圈子', '要债', '死不要脸', '要脸', '装孙子', '碍于', '有求于', '不体面', '没规矩', '见过世面', '没脸', '没脑子', '没骨气', '要死不活', '要死要活', '贪财', '给脸', '力哈', '脸子', '挺凶', '抹不掉', '脱不开', '解不开', '晕开', '推不开', '脱开', '抹粉', '拔开', '抹头', '那开', '不讲情面', '忸怩', '不领情', '寡情', '无情无义', '不留情', '场面上', '伤自尊了', '不自重', '妄自尊大', '自尊', '伤害罪', '自伤', '自愧', '暗自神伤', '伤财', '伤自尊', '害己', '自尊心', '作贱自己', '狂妄自大', '骄傲自大', '妄自', '自欺', '自高自大', '自惭', '枉自', '自卑心理', '自信心', '自我中心', '自卑感', '自立自强', '才会爱', '自强自立', '学会爱', '寡断', '自励', '自我牺牲', '严于律己', '自我剖析', '自我解嘲', '自我教育', '自我管理', '自我实现', '自我暗示', '牺牲者', '自我认识', '自我表现', '敝帚自珍', '自悲', '自爱', '自贱', '自谦', '自恃', '自轻自贱', '自毙', '性心理', '逆反心理', '恐惧心理', '自私自利', '心理战', '心理疾病', '过于自信', '自信力', '信仰自由', '缺欠', '缺乏经验', '偏信', '仰慕者', '爱岗', '深恨', '爱慕', '兼爱', '谈爱', '爱生恨', '向爱', '倾慕', '仰慕', '爱慕者', '人心神', '爱怜', '倾吐', '真纯', '眷念', '怜爱', '爱憎', '怜悯之心', '生畏', '楚楚可怜', '虔敬', '敬服', '令人敬畏', '敬虔', '礼敬', '曝光', '照曝光', '网友曝', '被疑', '自曝', '媒曝', '爆出', '大曝光', '遭', '艳照', '谍照', '艳照门', '曝', '爆乳', '曝光量', '曝光率', '曝光度', '溢美', '赞美声', '颂赞', '旋律优美', '赞歌', '朗诵诗', '法语歌', '吟颂', '唱诗', '英文歌曲', '长诗', '传颂', '颂歌', '歌颂', '传诵', '歌吟', '唱国歌', '歌功颂德', '卓有成就', '小成', '学有所成', '学业有成', '杰出成就', '杀身成仁', '居功', '方能成', '头功', '陈立夫', '芮成刚', '成家', '立业', '家业', '功名利禄', '不求名利', '功业', '浮名', '追求名利', '建功', '丰功伟业', '奇功', '战功', '伟绩', '成大业', '辉煌成就', '巨大变化', '铸成大错', '成就动机', '巨大作用', '颇受', '颇受欢迎', '谬赞', '赞叹', '评议', '德高望重', '显赫', '声名大噪', '蜚声中外', '声名鹊起', '名声大噪', '名声鹊起', '名声大振', '蜚声国际', '有声书', '惠誉', '相有声', '悬殊', '相差太多', '贫富悬殊', '相差不多', '不相容', '相差太大', '相距甚远', '不相称', '相异', '篡位', '下位', '圣名', '争名夺利', '争利', '争功', '党争', '污名', '有权有势', '当权', '钱权', '权钱', '有钱有势', '权柄', '成名作', '易图', '名歌手', '早成', '珍视', '主导地位', '给面子', '抹不开', '缺乏自信', '过曝', '曝光台', '赞美诗', '言施'] # 627 image_keywords = ['吸引力', '赶潮流', '时髦', '时尚女装', '时尚饰品', '女装品牌', '时尚女性', '时装节', '时尚服饰', '时装展', '秋冬装', '时装剧', '街头时尚', '时尚秀', '裙装', '裙裤', '下装', '休闲裤', '休闲装', '西装裤', '翻领', '小脚裤', '毛衫', '套衫', '包型', '板型', '廓形', '搭扣', '挺括', '壁布', '斜纹', '短款', '便鞋', '美观大方', '造形', '美观', '运动感', '款型', '外形上', '纤秀', '形貌', '果形', '表面', '外观', '光鲜', '外貌', '内柔', '表相', '华丽却', '外型', '姣美', '身材苗条', '健硕', '细瘦', '纤瘦', '俊俏', '壮硕', '身形', '高挑', '俊朗', '气质佳', '温良', '文品', '形象气质', '年轻美貌', '气质高雅', '样貌', '贤淑', '品德高尚', '容颜', '面容', '姣好', '相貌', '花容月貌', '品貌', '老态', '丽质', '高雅而', '高贵而', '娴雅', '雅淡', '高贵和', '高贵又', '英气', '透气性', '气色好', '气质', '爆发力', '人格魅力', '英气逼人', '气韵', '天生丽质', '端丽', '玉质', '流丽', '清俊', '兰心蕙', '雅丽', '婉丽', '古拙', '匠气', '乡土气息', '文艺腔', '痞气', '范有', '温文儒雅', '良好形象', '形象思维', '形象化', '贵族般', '华丽而', '雍容', '高贵', '贵而', '华而', '穿着者', '裤装', '乔装打扮', '打扮', '衣着', '打底衫', '穿着打扮', '穿插着', '褴褛', '紧身裙', '穿着', '女舞者', '穿衣戴帽', '笔挺', '整身', '装束', '晚装', '得体', '衣衫褴褛', '衣衫不整', '纱衣', '薄衣', '夏衣', '僧袍', '肩披', '破衣', '裙裾', '曳地', '身着', '身穿', '睡裤', '短衣', '穿', '唐装', '皮夹克', '西装', '没穿', '戴帽', '带帽', '长衣', '衣帽', '白帽子', '裤袜', '套头毛衣', '绒裤', '血衣', '貂皮大衣', '衣衫', '遮体', '衣冠不整', '长袖衫', '薄衫', '紧身衣', '民族服装', '服饰文化', '民族风格', '服饰', '服饰店', '民族色彩', '传统服装', '妆饰', '衣裙', '藏装', '戏装', '衣料', '身材', '苗条身材', '魔鬼身材', '保持身材', '洁身自好', '白皙', '瘦削', '脸型', '腰身', '修长', '身型', '瘦小', '体态', '塑身', '好身材', '常保', '保持警惕', '保身', '长期保持', '苗条', '身材高大', '魔鬼般', '纹眉', '扮鬼脸', '身材矮小', '机灵鬼', '鬼气', '瘦身操', '竖条', '人高马大', '瘦高', '高大而', '身居高位', '背身', '娇小玲珑', '娇俏', '娇弱', '娇憨', '容貌', '易老', '娇容', '渐老', '岁月流逝', '枯槁', '老心', '姿容', '面庞', '脸庞', '面露', '面颊', '蜡黄', '长相', '平平的', '面相', '才貌', '美丑', '美眷', '女性美', '恬美', '年轻貌美', '年轻夫妻', '貌美', '美女蛇', '周美青', '年轻夫妇', '年逾花甲', '闭月羞花', '花容', '花月', '貌若', '花想容', '雪月', '春花秋月', '花易', '风花', '疲态', '媚态', '老态龙钟', '窘态', '颇显', '显形', '老气', '渐显', '年老色衰', '势态', '外在', '外显', '外因', '内在联系', '客观存在', '外境', '外化', '内在', '性感女郎', '性感美', '港女', '女裙', '新女性', '扮女', '邦女郎', '东方女性', '童女', '性感女', '野性美', '欧美大', '知性美', '东方美女', '韩范', '金发女郎', '兔女郎', '涩女郎', '摩登女郎', '色女郎', '豪乳', '半裸', '爆灯', '闪爆', '巨乳', '爆炸头', '裸模', '嫩模', '露背', '爆乳', '扮起', '迷死人', '火儿', '烈焰红唇', '狂野', '柔媚', '母性', '粗狂', '妖异', '风与', '抹胸', '包臀裙', '背带裙', '小衫', '吊带裙', '纱裙', '皮裙', '垫肩', '低胸', '丰臀', '御女', '宇豪', '香肩', '陈豪', '帅酷', '舞衣', '女人香', '男人味', '人味', '女人缘', '尚感', '俐落', '柔嫩', '蛙肉', '丰腴', '肥腴', '筋肉', '滑腻', '韧韧', '颗粒感', '光泽感', '细皮嫩肉', '大头贴', '自拍照', '张萌照', '小照', '张照', '照来', '寸照', '靓照', '照片儿', '上镜头', '演出照', '帅滴', '帅照', '张小花', '张小然', '造型照', '果照', '照发', '上照', '素颜照', '近照', '证件照', '裸照', '生活照', '张带', '偷发', '张美照', '洗照片', '照上', '图片吧', '大片儿', '来组', '选片', '纸片儿', '毛片儿', '洗出来', '张自拍', '大头照', '可拍照', '张合照', '才拍', '硬照', '明星照', '定妆照', '私照', '私房话', '新造型', '发型', '头型', '扎头发', '烫个', '发箍', '梳头发', '盘发', '短发', '寸头', '真俊', '新发型', '背头', '头箍', '挺帅', '红头发', '长发', '齐刘海', '刘海', '剪短', '超短', '披肩发', '短衫', '脸形', '高鼻梁', '体型', '唇纹', '造型', '新郎官', '新造', '型仔', '准新郎', '新机型', '发饰', '盘头', '发簪', '发夹', '发髻', '长卷发', '盘扣', '绑带', '美发师', '发型师', '造型各异', '拗', '凹造型', '花型', '人物造型', '烫头发', '留头发', '黄头发', '吹头发', '发剪', '头发丝', '长头发', '理发师', '发型设计', '剪发', '剪头发', '造型师', '美甲师', '民族舞', '彝族人', '畲族', '服装设计', '服装鞋帽', '服装类', '服装师', '服装服饰', '服装店', '服装节', '首饰店', '饰品店', '品牌服饰', '时装店', '名牌服饰', '服饰品牌', '家饰', '内衣店', '国际服装', '服装学院', '时装设计', '流行时尚', '鞋帽', '鞋包', '品牌服装', '皮装', '汉文化', '民族服饰', '龙山文化', '蜀文化', '徽文化', '文化性', '红山文化', '楚文化', '服务化', '奇装异服', '长袍', '韩服', '便服', '藏服', '蒙族', '装饰品', '珠宝饰品', '饰物', '首饰', '家居饰品', '耳饰', '手饰', '银饰', '头饰', '装饰画', '佩饰', '眼线', '唇妆', '眼影', '底妆', '裸妆', '腮红', '眼线笔', '眼线液', '眼唇', '定妆', '小妆', '妆容', '妆点', '打扮起来', '淡妆', '浓妆艳抹', '俏丽', '脱妆', '眼妆', '减龄', '遮瑕', '上妆', '裸色', '修颜', '粉底霜', '贴服', '祛皱', '浓妆', '化浓妆', '烟熏妆', '卸妆后', '卸完', '补妆', '敷上', '化妆水', '卸妆液', '护唇膏', '唇色', '唇线', '塑型', '斜肩', '熟龄', '光采', '妆扮', '装扮成', '装点', '扮萌', '短打', '入时', '跟着打', '撩起来', '抖起来', '梳起来', '踢起来', '捆起来', '拽起来', '滑起来', '演起来', '鼓起来', '梳洗打扮', '梳妆', '梳妆镜', '不得体', '扮酷', '嗲得', '乖得', '丑得', '装萌', '梳妆打扮', '梳洗', '洗洗涮涮', '洗涮', '洗濯', '打毛衣', '打昏', '扮得', '酷男', '装酷', '男扮女装', '军装', '装鬼', '色鬼', '遇鬼', '捣鬼', '扮作', '乔装', '扮成', '化妆', '彩妆师', '化妆间', '摄像师', '萧峻', '冯海', '化妆箱', '化妆棉', '化妆镜', '化妆袋', '卸妆水', '韩妆', '试妆', '彩妆品', '化妆刷', '花妆', '护肤品', '药妆店', '日用品', '美容品', '化妆品行业', '小烟', '美美容', '风小', '化妆盒', '化妆包', '硫化', '过厚', '化脓', '化妆室', '化妆舞会', '梳妆台', '更衣间', '试衣间', '小间', '除皱', '脱色', '湿敷', '防脱发', '护肤霜', '有吸引力的', '诱惑力', '吸引', '赶时髦', '髦', '酷似', '外表', '气场', '浪漫气质', '文气', '着装', '衣装', '娇小', '外在美', '惹火', '野性', '女人味', '肉感', '玉照', '皂片', '私房照', '服装', '饰品', '装扮', '扮鬼', '化妆师', '妆', '化妆品'] # 659 affiliation_keywords = ['父子关系', '夫妻关系', '亲戚关系', '亲密关系', '密切关系', '父女关系', '邻里关系', '互动关系', '相互关系', '关系人', '密切相关', '密切联系', '亲子关系', '两性关系', '紧密联系', '紧密结合', '互相联系', '伙伴关系', '婚姻观', '关系紧张', '姻亲', '婚姻制度', '婚姻状况', '暧昧关系', '休戚相关', '从属关系', '搞关系', '父系', '婆媳关系', '男女关系', '父女情', '正当关系', '友好合作', '友好往来', '友好相处', '搞好关系', '和平友好', '外交关系', '至亲至爱', '亲人或', '无话不谈', '至亲', '至亲好友', '血亲', '志趣相投', '亲亲的', '亲密', '朋友们', '好朋友', '亲戚朋友', '博友', '女性朋友', '老朋友', '男女朋友', '友', '亲朋好友', '戚友', '新朋友', '老友', '旧友', '新老朋友', '老战友', '新朋', '故友', '老同事', '明友', '把兄弟', '盟兄弟', '兄弟', '周兄', '大弟', '兄弟情', '亲兄妹', '难兄难弟', '四兄弟', '互祝', '亲朋', '祝家人', '友朋', '亲友们', '走亲访友', '祝全家', '亲切友好', '同窗好友', '加为好友', '谢友', '崔智友', '友台', '友有', '玩友', '友问', '道友', '文友', '知心朋友', '结识', '狐朋狗友', '良师益友', '良朋', '挚诚', '友聚', '够朋友', '友尽', '探友', '酒肉朋友', '租男友', '串亲戚', '亲戚', '走亲戚', '亲戚家', '志同道合', '同道', '有志者', '志趣', '合作者', '同心同德', '同工同酬', '共患难', '同行者', '伴同', '志同道合者', '相投', '兴趣爱好', '个人兴趣', '相契', '矢志', '相互信任', '臭味相投', '投缘', '相熟', '亲密无间', '道不同不相为谋', '同道中人', '尚道', '同辈', '同流', '携手同心', '意中人', '性情中人', '圈中人', '同路人', '同席', '同甘苦', '同日生', '爱好音乐', '共同之处', '共同话题', '共同利益', '互相爱护', '互勉', '结伴同游', '结伴同行', '游伴', '约同', '同行人', '履约', '搭伴', '商业伙伴', '同甘', '情投意合', '意气相投', '老相识', '同德', '同心协力', '同心结', '共同理想', '共同犯罪', '共苦', '同甘共苦', '同义', '同生共死', '荣辱与共', '同侪', '情义', '战友情', '情意', '深情厚谊', '共叙', '恩情', '情分', '交情', '情份', '增进感情', '增进', '相互促进', '互相促进', '友谊宫', '友谊桥', '之谊', '友谊路', '亲情', '友谊', '伉俪情深', '战友', '老战士', '情深意重', '情感世界', '世态人情', '人情世故', '思想感情', '感情丰富', '情感生活', '情感故事', '感情线', '不谙世事', '感情交流', '真情实感', '动感情', '重感情', '感情用事', '感情投资', '真心真意', '真情实意', '纠葛', '感情破裂', '不合情理', '情绪性', '纠缠不清', '起纠纷', '感情', '情感小说', '情感话题', '情感化', '感情世界', '感情纠葛', '感情戏', '夫妻感情', '真感情', '激情戏', '苦情戏', '言情片', '情色片', '哭戏', '伤感情', '解法', '破裂', '感官世界', '内心世界', '生活感', '婚姻生活', '生命感', '夫妻生活', '陪伴', '陪着', '陪着她', '相陪', '谢谢有', '搀扶着', '伴随', '曾陪我', '都陪', '牵挂着', '陪', '陪伴着', '有人陪', '陪他', '来陪', '作伴', '相随', '相携', '相依', '相扶', '与共', '相守到', '伴', '长相守', '再相逢', '来陪我', '陪到', '陪床', '陪住', '来陪你', '都爱我', '都爱你', '都祝你', '陪我玩', '来家', '奉陪', '来你', '想着她', '牵着手', '陪笑', '不知有汉', '难觅', '善知识', '知彼知己', '知己者', '无知己', '不知所终', '不知为不知', '相思苦', '知遇', '乐卡', '银卡', '知音难觅', '星享卡', '申卡', '运通卡', '铂金卡', '声卡', '卡密', '此卡', '亲眷', '亲人', '亲如家人', '亲疏', '双亲', '亲属', '情侣们', '恋人们', '许下爱', '浓情蜜意', '小情侣', '挚爱', '共度', '新婚夫妇', '向家人', '全家人', '两家人', '傣家人', '来英', '和和睦睦', '牵上', '知心人', '负心人', '扑上去', '美人痣', '心魂', '走上台', '再踏上', '心贴心', '才会爱', '自爱', '成人达', '推己及人', '人不为己', '爱民如子', '才爱', '不如人意', '初恋情人', '爱人', '旧情人', '爱亲', '至死不渝', '备至', '六亲', '至爱', '来亲', '夫妻恩爱', '永浴爱河', '恩爱夫妻', '恩爱', '爱相随', '于爱', '甜甜蜜蜜', '阖家幸福', '合家欢乐', '阖家欢', '家庭幸福', '阖家欢乐', '阖家', '合家', '阖家团圆', '合家幸福', '顺意', '祝全', '祝全国', '祝健康', '祝杨', '祝张', '祝老', '祝您健康', '欢欢乐乐', '阖家团聚', '家团圆', '团团圆圆', '团圆日', '生活美满', '祝你和', '和睦', '美满', '全家欢', '和和美美', '睦睦', '和和气气', '和睦相处', '和睦家医院', '吉祥和', '和平共处', '和平相处', '共处', '合谐', '于与人', '相互理解', '和气生财', '和睦村', '冀连梅', '何睦', '和谐家园', '和家园', '谐和', '侠气', '一团和气', '客客气气', '重情义', '和善', '圆圆满满', '美满生活', '美满婚姻', '气气', '和气', '莫生气', '暖暖和和', '呕气', '父母辈', '岳父母', '给母亲', '赡养父母', '父母双亡', '孝敬父母', '祖父母', '父母心', '父母', '冤亲', '眷属', '非亲非故', '亲恩', '任人唯亲', '那家人', '向家', '向人', '替人家', '乡邻', '自家人', '谨向', '亲如一家', '亲如兄弟', '苗家人', '亲家母', '父母亲', '故去', '守寡', '亲生父母', '老父亲', '畅叙', '把盏', '斟茶', '挚友', '益友', '师友', '良师', '善友', '知友', '叙旧', '叙叙旧', '旧识', '诸友', '诤友', '花友', '寻亲访友', '故交', '一友', '亲友', '宾朋', '真诚待人', '诚能', '真诚相待', '挚交', '心慈', '于诚', '诚诚恳恳', '心诚', '虔敬', '结善缘', '网友聚会', '送友人', '乐聚', '访友', '酒逢知己', '酒逢知己千杯少', '相知相惜', '知己', '贵在知心', '相识已久', '自斟', '知已', '酌酒', '自斟自饮', '才知酒', '自知者', '解己', '知己知彼', '知之者', '知之为知之', '士为知己者死', '谷丽媛', '小色', '红颜薄命', '重色轻友', '红颜', '待字闺中', '知心话', '千杯不醉', '三杯酒', '几杯酒', '知者', '悦己者', '谈谈天', '说古', '谈谈心', '叙谈', '对酌', '婚姻家庭', '婚姻关系', '感情生活', '婚姻', '关系型', '包办婚姻', '婚姻大事', '婚姻登记', '婚恋观', '爱情观', '婚姻法', '婚前婚后', '恋爱观', '家庭事务', '非婚', '友好关系', '亲密度', '朋友', '好兄弟', '耍朋友', '共同爱好', '情谊', '增进友谊', '友情', '友谊医院', '情感', '相伴', '知音卡', '爱侣', '家人', '心上人', '爱人如己', '恋人', '恩恩爱爱', '睦', '蓝颜', '碳粉', '古也'] # 549 selfacceptance_keywords = ['学业有成', '成长', '长成', '成长期', '增长期', '成长性', '成熟期', '成长型', '长期性', '高速成长', '高成长', '中长期', '个人成长', '小型企业', '专家型', '长期投资', '发展型', '专业型', '畸型', '个人经历', '个人努力', '成长史', '个人修养', '个人所有', '年成长', '茁壮成长', '史是', '创业史', '奋斗史', '小环境', '变宽', '环境恶劣', '适应能力', '反应力', '适应性', '应变力', '耐受力', '应变能力', '力弱', '抵抗能力', '应激反应', '适应症', '应变', '适应力', '随机应变', '抗压性', '感悟力', '思维力', '兴奋期', '短时期', '周期短', '自适', '自我调节', '弥合', '内调', '协调性', '安适', '调治', '适时地', '调节作用', '体察', '相适应', '适用性', '应用性', '普适性', '自适应', '融合性', '适应期', '艰难困苦', '窘困', '愁苦', '苦无', '困窘', '忍辱', '苦累', '恚', '适应环境', '聪颖', '聪明能干', '高智慧', '才智', '聪明才智', '聪敏', '明辨', '智性', '聪慧', '不智', '真智慧', '智是', '聪明智慧', '大智若愚', '才是大', '谋略', '胆略', '智慧', '高智', '高智伟', '商业智能', '德智体', '测智商', '智慧之门', '智谋', '普门', '众妙', '文殊师', '方便之门', '戒定慧', '智语', '智之虎', '智珠', '贤慧', '真和尚', '真聪明', '真邪', '理智型', '真善', '真与假', '禀性', '空性', '精神性', '智识', '能动性', '性灵', '寓于', '明心见性', '哲理性', '克服困难', '克服', '改善服务', '敬服', '碰到困难', '不畏艰难', '都佩服', '判断能力', '服输', '重重困难', '困难重重', '解决困难', '灾难深重', '不怕困难', '步履艰难', '遇到困难', '碰到问题', '磕磕碰碰', '碰得', '难以解决', '妥善解决', '解难', '急需解决', '决择', '解决矛盾', '解困', '解决目前', '就地解决', '能克服', '心理障碍', '破除', '摒除', '攻克', '不畏艰险', '艰难险阻', '险阻', '知难而上', '畏难', '耐苦', '艰苦环境', '困苦', '苦境', '困厄', '受苦受难', '危机重重', '十分困难', '不怕苦', '不怕牺牲', '不怕累', '二不怕死', '难缠', '怕累', '天不怕', '攻坚战', '攻坚', '攻无不克', '救难', '攻防战', '战略决策', '阻击战', '力主', '中坚', '纪律性', '规律性', '自主性', '自觉性', '政治性', '自然性', '职业性', '阶级性', '引导性', '规范性', '自怨', '互勉互励', '自惭', '自警', '枉自', '浊者自浊', '不自重', '严以律己', '寡断', '守规矩', '遵守纪律', '自觉遵守', '无规则', '遵行', '坚持原则', '不规矩', '交通规则', '守法者', '显规则', '遵守规则', '自觉自愿', '守法', '自律性', '严格遵守', '知法', '严于', '律己', '严于律已', '于己', '慎于', '自谦', '谦虚谨慎', '于人', '施于人', '自我实现', '自我认识', '自我牺牲', '自我完善', '自我教育', '自我中心', '自我剖析', '自我解嘲', '企业管理者', '严守', '信守', '恪尽职守', '谨守', '遵纪守法', '共守', '守信', '警觉性', '政治觉悟', '自主权', '独立性', '自立自强', '独立自主', '自强不息', '自立', '自学能力', '自爱', '自恃', '独立行使', '自力', '责权利', '财权', '失责', '诉权', '分权', '事权', '职权', '责任制', '责任方', '权利人', '自信力', '缺乏自信', '信心', '没信心', '自卑心理', '自尊心', '信心倍增', '自制力', '自信', '信仰自由', '公信度', '自信心', '自然力', '自理能力', '自控力', '过于自信', '缺欠', '自私自利', '缺乏经验', '偏信', '坚定信心', '充满信心', '信心十足', '满怀信心', '信心百倍', '坚定信念', '奸诈', '狂妄自大', '自贱', '自负盈亏', '自强自立', '自立更生', '坚强意志', '不畏强权', '自由权', '宗教自由', '宗教信仰者', '信仰者', '信仰危机', '公民自由', '自由思想', '自由化', '自欺', '强加于人', '信任他人', '严于律己', '自暴自弃', '惯于', '自珍', '才会爱', '学会爱', '自励', '聪明伶俐', '聪明绝顶', '能干', '学聪明', '少干', '绝顶聪明', '明白事理', '敢想敢干', '敢想敢说', '敢做敢当', '真干', '敢说敢做', '敢骂', '敢作敢当', '敢做敢为', '敢做', '钱少', '常干', '儿少', '敢干', '缺德事', '少想', '少是', '嫌少', '少问', '干出', '才力', '干才', '干好', '才是能', '巧干', '实干家', '办好事', '顾好', '好好儿', '活得好', '干点', '单干', '来干', '肯吃苦', '实干', '实干精神', '实干型', '勤恳', '实干者', '都肯', '肯做', '正事儿', '正事', '专干', '活儿干', '都干', '常跟', '老干', '都在干', '干不了', '才干', '逼出', '混出', '惹出', '蠢事', '祸事', '不干不净', '活不了', '脱不了', '管不了', '都忍', '有所不为', '有所为', '寸有所长', '尺有所短', '无所作为', '无所不为', '有所长', '有所作为', '有所感', '有所不知', '有所为有所不为', '所动', '若有所失', '所失', '所作所为', '一无所成', '无所不谈', '无所遁形', '不耻', '大有可为', '盛大有', '大有希望', '才有大', '大有可观', '都有为', '大有文章', '善为', '有恒', '所怀', '为所欲为', '不以为意', '为情所困', '发挥所长', '化发展', '长足发展', '畸形发展', '促发展', '发展前途', '社会发展', '不为所动', '己所不欲', '妄为', '不以为耻', '在所不惜', '老有所乐', '老有所养', '小有成就', '学棋', '所学', '成成', '有志于', '勤奋学习', '学习心得', '虚心', '学习态度', '学习者', '虚心接受', '业务学习', '互相学习', '心学', '好好学', '学习', '学下', '努力学习', '学得好', '虚心学习', '练下', '学习效果', '重新学习', '学懂', '习得', '学用', '学不完', '比学', '新闻学', '学习会', '学新闻', '新学年', '改学', '重新做人', '业务培训', '专业课程', '业务知识', '勤奋好学', '勤学苦练', '勤学', '勤练', '勤奋努力', '实际效果', '学习成绩', '学门', '学习气氛', '科学态度', '正确态度', '积极态度', '态度端正', '态度改变', '表明态度', '治学', '生态学', '教育者', '科学化', '学习型', '学术研究', '集中学习', '互学', '互勉', '交流学习', '互利', '应承', '承不承认', '承担风险', '担责任', '重责', '应负', '该当', '担负起', '正当权益', '当政者', '肩负起', '负起', '担起', '担负', '担不起', '负有责任', '责任者', '重担', '扛起', '重大责任', '担着', '敢当', '愧不敢当', '敢作敢为', '当得起', '负担得起', '赔得起', '承当', '尽到责任', '责任感', '承担责任', '承受', '更知', '欺辱', '当自强', '吾辈', '自当', '应尽', '更须', '害人者', '敢为人先', '敢为天下先', '健康成长', '环境变', '调适', '适应', '大智慧', '攻坚克难', '自我管理', '恪守', '权责', '自负', '肯干', '干事', '大有作为', '有所发展', '学有所成', '好好学习', '担当不起', '责任', '承担', '更当'] # 561 health_keywords = ['健健康康', '祝健康', '祝您健康', '康健', '都平安', '健康长寿', '健康成长', '身体健康', '康安', '都祝你', '真健康', '有益健康', '益健康', '少健康', '保健康', '健康操', '果蔬汁', '健骨', '健康歌', '健康证', '都健康', '祝张', '祝全家', '祝家人', '祝杨', '祝顺', '泰康人寿', '孙健', '乔健', '整体健康', '健康检查', '心理健康', '身心状态', '身心愉快', '心身', '体健', '长命百岁', '福寿绵长', '祝老', '瘦身操', '健康网', '操操操', '保健操', '操操', '何康康', '健身舞', '平平安安', '快快乐乐', '康美', '健美运动', '美体', '维康', '训练课', '训练馆', '康复训练', '训练法', '锻练', '训练员', '训练师', '冬训', '体能', '训练班', '耗体力', '脚力', '脑力', '体力活', '体力劳动', '精力', '体力不支', '耐力', '毅力', '整体素质', '心理素质', '基本素质', '人员素质', '文化素质', '质素', '国民素质', '道德素质', '提高素质', '科学素养', '加强锻炼', '体育锻炼', '煅', '锻炼', '锻炼身体', '体能训练', '练完', '身体疲劳', '勤练', '练车', '增强体质', '强身健体', '自我保健', '臂力', '费力气', '腰力', '支撑力', '运动量', '脚踏板', '耗氧量', '平衡力', '护脚', '耗力', '流体力学', '集中力', '才力', '体力', '抗力', '壮劳力', '防御力', '抵抗能力', '免疫能力', '吸收能力', '控制能力', '体液免疫', '抗病力', '力弱', '操作能力', '控制力', '掌控力', '控制感', '自控力', '防御能力', '不能抑制', '判断能力', '控制性', '力有', '精神压力', '力有木', '力也', '受限于', '反应力', '应变力', '心有余力', '物力', '财力', '人力', '人力资本', '工人工资', '人财物', '劳力', '为力', '承载力', '心力交瘁', '离心力', '殚', '费尽', '尽力去做', '向心力', '尽心竭力', '气力', '力气活', '没力气', '卖力气', '别费', '费点', '卯足', '憋足', '拼劲', '精神不振', '精神紧张', '精神振奋', '压抑感', '心理压力', '压力感', '抗压性', '应激', '精神力量', '精力充沛', '旺盛期', '活力充沛', '旺盛', '精神力', '干劲', '抗抑郁', '人力物力', '物流费', '空气阻力', '物质财富', '民力', '快生了', '病成', '找病', '药到病除', '久病成医', '病殃殃', '啥病', '生吞活剥', '起病', '犯病', '倒床', '瘫倒', '累倒', '卧病', '腰伤', '病愈', '如山倒', '昏倒', '昏迷不醒', '这病', '妇科病', '怪病', '脑癌', '肾癌', '生了病', '少得', '偏瘫', '要犯', '老毛病', '咽炎', '病倒', '发晕', '脑胀', '昏厥', '请医生', '换药', '拿药', '复诊', '开药', '打点滴', '王医生', '验血', '紧绷着', '累病', '绷着', '病怏怏', '怏怏', '想着她', '累着', '病体', '重感冒', '治感冒', '患感冒', '抗感冒', '风寒感冒', '咳嗽', '伤风感冒', '感冒药', '风热感冒', '鼻塞', '发低烧', '咳得', '发冷', '发高烧', '脑涨', '肠胃炎', '昏昏沉沉', '感冒', '低烧', '热伤风', '流涕', '嗓子疼', '高烧', '发烧时', '持续发烧', '退烧', '退烧药', '祝寿星', '长寿村', '长寿湖', '长山岛', '三长', '长寿菜', '永寿', '长寿果', '长谷', '寿县', '长生果', '圣药', '枇杷果', '圣果', '山楂粥', '降糖', '百果', '粟米羹', '长寿乡', '洪村', '兴寿', '滩村', '长屋', '湾村', '长溪村', '长生', '长命', '老鼠药', '心不老', '未老先衰', '灵药', '方长', '福寿康宁', '添寿', '增福', '福永', '享寿', '寿延', '福如东海', '福寿之', '福满', '祝李', '永葆', '永驻', '常保', '青春永驻', '童贞', '永葆青春', '佑护', '常存', '永在', '益寿延年', '延年', '益寿', '轻身', '老年性', '益肺', '防衰老', '益精', '防老', '抗疲劳', '养生粥', '养生茶', '养生酒', '养生菜', '汤最', '疗疾', '养生餐', '滋益', '养生经', '养脾', '来恋', '补养', '温补', '养神', '养血', '暖身', '养生', '温中', '养生学', '养生法', '温经', '养生之道', '养生方', '养生汤', '摄生', '温饮', '药补', '滋阳', '阴精', '滋肾', '补肾益精', '益胃', '固精', '固肾', '温肺', '杞菊', '生滚', '汤羹', '煲粥', '靓汤', '滋补品', '清补', '平补', '补阳', '食疗', '进补', '姜枣茶', '生茶', '红枣茶', '枣茶', '养生馆', '养肾', '修生', '食疗法', '养气', '营养学', '中医学', '生态学', '药学', '生理学', '理学', '疲劳感', '肌肉疲劳', '视疲劳', '身体虚弱', '疲惫感', '腺体', '淤滞', '体弱', '虚劳', '纾缓', '安康', '气血虚弱', '体虚', '脾胃虚弱', '虚弱者', '虚寒', '气虚', '血虚', '久病', '发肤', '玉体', '身体语言', '自体', '领受', '身教', '体肤', '受损害', '抱恙', '偶感', '欠安', '恙', '四肢无力', '轻微伤', '惫', '酸软', '运动状态', '平衡状态', '心态平衡', '状态栏', '混乱状态', '紧张状态', '中间状态', '动态平衡', '精神状态', '健康状况', '身体检查', '状况良好', '状况不佳', '甲状腺', '身体素质', '病状', '天气状况', '安歇', '微恙', '安份', '慰安', '徐安安', '缺欠', '安安全全', '还安', '健身室', '康乐活动', '商务会议', '健体', '娱乐室', '康年', '泉浴', '会务', '体育设施', '健身活动', '康体', '护理室', '桑拿浴', '按摩房', '蒸汽浴室', '淋浴室', '乒乓球室', '健身器', '健身球', '强身', '丰肌', '热浴', '操房', '健身', '来健身', '健身卡', '健身教练', '健身器材', '健身操', '健身运动', '健美操', '常运动', '室内运动', '热身运动', '全身性', '来运动', '下楼来', '爬下来', '来下', '文体活动', '团体活动', '外交活动', '科普活动', '竞赛活动', '教学活动', '运动器材', '洁身', '器械', '耗材', '铝材', '全钢', '充满活力', '精神饱满', '补充剂', '精力旺盛', '活跃性', '青春活力', '活力', '充氧', '充满热情', '朝气', '充满生机', '充满希望', '朝气蓬勃', '充溢', '青春少女', '青春不老', '魅力四射', '跃动', '蓬勃', '稚气', '勃勃的', '蓬勃向上', '热力四射', '艳光四射', '基情', '激情', '光芒四射', '才运动', '运动过度', '运动损伤', '运化', '运动强度', '运动性', '热热身', '热身', '动身前往', '活动量', '有氧运动', '量减少', '食量', '体育项目', '球类运动', '奥林匹克运动', '水上运动', '体育竞技', '足球运动', '民主运动', '运时', '动火', '肠蠕动', '懒得动', '健美', '肌力', '运动选手', '全面性', '性腺', '内室', '室内外', '室内环境', '室外机', '动力性', '运动感', '能动性', '波动性', '机动性', '操控性', '气动', '冲击性', '性冲动', '跑跑步', '加衣', '多加练习', '炼钢', '泌乳', '皂素', '冶炼', '颞', '吸汗', '阴囊', '慢跑', '晨跑', '跑完', '跑步者', '跑步机', '健步', '跑鞋', '爬山', '磨练', '历炼', '磨砺', '锤炼', '试炼', '磨练意志', '砥砺', '磨难', '淬炼', '受折磨', '刚跑', '快步走', '骑骑', '跑下来', '跑下去', '走步', '散散步', '爬楼梯', '跑不动', '闭门思过', '面壁思过', '常思', '小过', '静坐', '悔过', '梦过', '勤于思考', '苦思冥想', '知过', '救人者', '医病', '庸医', '救命恩人', '未病', '救命钱', '救死扶伤', '草菅', '急病', '治好病', '住院治疗', '诊治', '救治', '治病救人', '脑瘤', '急症', '根治', '病者', '尿毒', '妇科疾病', '肾衰', '疗毒', '病原', '系统疾病', '病毒性', '脑病', '夏治', '治疗法', '难治', '治法', '闭经', '防病毒', '治疗室', '自然疗法', '介入治疗', '药疗', '心理治疗', '舒经', '求医问药', '投医', '寻医', '良医', '转院', '脑血栓', '癫痫病', '血癌', '久治不愈', '喊救命', '钱养', '筹钱', '偷钱', '垫钱', '救人一命', '钱那', '能保命', '取药', '配药', '开给', '开药方', '打药', '喂药', '危重', '病故', '肾衰竭', '该病', '性肝炎', '看病难', '看病', '富贵病', '就医难', '看病贵', '查不出', '开点', '买药', '带药', '看医生', '胃药', '喝药', '买饭', '医务室', '挂号费', '专家号', '候诊', '排号', '挂不上', '挂号处', '特需', '叫号', '就诊', '保外就医', '诊疗费', '筛查', '用药', '就业难', '就医', '实际困难', '医案', '急难', '从医', '入园难', '医疗卫生', '复检', '查血', '尿检', '体格检查', '血常规', '化验', '查体', '检查报告', '血检', '定期检查', '体检表', '孕检', '待产', '妇产', '进产房', '顺产', '产科', '临产', '产检', '早孕', '临检', '不孕症', '验尸', '尸检', '填表', '验伤', '查完', '验尿', '抽血', '复查', '皮试', '胸片', '体检中心', '爱康', '初诊', '诊疗', '会诊', '试验性', '化验单', '检查结果', '洗胃', '消炎药', '退热药', '冲剂', '眼药膏', '止泻药', '喷药', '垫补', '补液', '止咳药', '镇痛药', '止疼药', '止疼', '止痛片', '喉痛', '苦药', '止痛', '颈椎痛', '喝凉水', '喝开水', '喝血', '照喝', '喝盐水', '喝口水', '消炎片', '消毒液', '立消', '止疼片', '止痛药', '药液', '疼疼', '止痒', '退热', '吃错', '吃了亏', '吃不准', '错怪', '吃准', '没治', '吃不香', '吃白饭', '咽喉炎', '咽喉痛', '胃肠炎', '胃炎', '口腔炎', '慢性咽炎', '胸痛', '咽痛', '才健康', '身心健康', '健康美', '精力有限', '心力', '力气', '得病', '病着', '发烧', '长生不老', '永保', '延年益寿', '养身', '食补', '身体发肤', '身体状况', '健身房', '大冲', '四射', '体育新闻', '体育系', '体育比赛', '体育台', '群众体育', '体育产业', '体育频道', '体育界', '体育运动', '跑步', '磨炼', '思过', '医治', '求医', '挂号', '慈铭', '吃错药'] # 833 community_keywords = ['改善社会', '帮助别人', '世界美好', '别人生活', '帮助', '乐于助人', '履行责任', '公益', '团队意识', '关心别人', '拯救灵魂', '世界和平', '服务社会', '世界更美好', '救世', '服务国家', '帮扶', '慈善', '利人利己', '帮助者', '给予帮助', '帮助人', '帮到', '协助', '度过难关', '热情帮助', '支助', '扶助', '助人', '于己', '施于人', '信任他人', '助人为乐', '乐善好施', '真诚待人', '心地善良', '公益行', '公益活动', '公益路', '公益事业', '公益性', '光明行', '公益金', '助学', '微公益', '救世主', '救度', '救难', '救主', '救苦', '宗教界', '世之', '主神', '世世', '中医界', '互帮互助', '守望相助', '受助', '孤残', '助老', '助残', '扶持', '慈善事业', '慈善家', '义拍', '慈善机构', '基金会', '募捐', '善款', '义卖', '损人利己', '利人', '损人不利己', '毫不利己', '损人害己', '谋利益', '求利', '资助者', '受助者', '救助者', '赞助者', '求助者', '辅助性', '捐助人', '捐血', '援手', '互相帮助', '互帮', '交人', '无助于', '助益', '帮上', '帮帮我', '帮帮', '帮不了', '帮个忙', '提供援助', '大力协助', '法律援助', '求助于', '通力协作', '共渡难关', '解决困难', '爱国热情', '帮教', '热情服务', '资助金', '扶助金', '捐资助学', '救助金', '无偿援助', '支援灾区', '扶养', '赡养', '天助', '积善', '正己', '于人', '严于律己', '于事', '勿施于人', '责己', '嫁祸于人', '我为人人', '何乐不为', '舍己为人', '乐善', '养德', '行善积德', '扬善', '心善', '水善利', '善思', '善道', '宽以待人', '纯良', '良善', '热心肠', '善谈', '温良', '捐书', '献爱心', '公共事业', '公益心','筹募', '公共财政', '助学金', '贫困学生', '资助', '捐助', '筹款', '爱心', '打拐', '民主权利', '普度众生', '救苦救难', '普渡众生','救穷', '援救', '相帮', '互助', '团结互助', '互转', '互爱', '相扶', '相助', '相持', '互相爱护', '相互支持', '互相支持', '敬老爱老', '尊老敬老', '助残日','拍品', '义捐', '义买', '拍卖会','竞拍', '义演', '中华慈善总会', '募捐箱', '爱德基金会', '联合国儿童基金会', '中国扶贫基金会', '中国青少年发展基金会', '中国红十字基金会', '捐款捐物', '募款', '捐款', '乐捐', '捐赠', '捐入', '赠款', '捐出', '害己', '损人', '害人者', '人不为己', '害人害己', '人人自危', '害民', '加害人', '利他主义', '利他', '民族利益','公共利益', '求善'] # 207 # print(len(financial_keywords),len(popularity_keywords),len(image_keywords),len(affiliation_keywords),len(selfacceptance_keywords),len(health_keywords))
[ "zhenkun91@outlook.com" ]
zhenkun91@outlook.com
21062a70c9c801d0fbd39a73c551d1b344d148ea
68def49f2d3290c7de76a1f08dc63b7db34b1989
/kinokorik/main/migrations/0007_comment.py
a4c2906cadd2f9800357182a356c75ed9d2cb3e4
[]
no_license
amirlanburakhanov/Kinokorik.TV
a7c433d1da63121b6aaf2f68e07d21fecf4b0a3e
db531498ac5445261767905f4229159be04d9efc
refs/heads/master
2020-09-30T23:35:47.587219
2019-12-11T15:51:39
2019-12-11T15:51:39
227,400,460
0
0
null
null
null
null
UTF-8
Python
false
false
1,169
py
# Generated by Django 2.2.3 on 2019-12-02 07:48 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('main', '0006_auto_20191130_2102'), ] operations = [ migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('author', models.CharField(max_length=30, verbose_name='Author')), ('content', models.TextField(verbose_name='Content')), ('is_active', models.BooleanField(db_index=True, default=True, verbose_name='Display on screen?')), ('created_at', models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Published')), ('bb', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.Bb', verbose_name='Movie')), ], options={ 'verbose_name': 'Comment', 'verbose_name_plural': 'Comments', 'ordering': ['created_at'], }, ), ]
[ "amirlanburahanov@gmail.com" ]
amirlanburahanov@gmail.com
94d06d040b4b2ab63cd972227cd4465457230360
9bad373aba508fd5bdccfd81b527cec2415c4c2a
/FallingBody.py
49ffc5efbaffd4305b3faba7842eddef1a7a85b2
[]
no_license
JosephEmmanuel/python_files
a305329d8620028ed55bc4d58d56a0ea85d5091b
55eaeeebc94edcdf8a16486b73c1d542d3408ea5
refs/heads/master
2020-05-27T02:22:05.187329
2020-04-05T09:15:07
2020-04-05T09:15:07
188,450,943
0
0
null
null
null
null
UTF-8
Python
false
false
1,706
py
#Falling body simulation #Author Ronald Jasper import math #earth's acceleration due to gravity, a constant in m/(sec square) g=9.80665 def distanceTraveledWhileFall(t): #distance traveled by a body while falling for t seconds #d is in meters, t is in seconds d=0.5*g*t*t return(d) def timeTakenToFall(d): #Time taken to fall d distance #d is in meters, t is in seconds t=math.sqrt(2*d/g) return(t) def instantaneousVelocityAfterFallingElapsedTime(t): v=g*t return(v) def instantaneousVelocityAfterFallingDistance(d): v=math.sqrt(2*g*d) return(v) def mps2kmphr(mpc): #convert m/s to km/hr return(mpc/1000*3600) c=input("choice: enter time or distance. t or d:") if (c=="t"): time=float(input("Enter time in seconds:")) distance=distanceTraveledWhileFall(time) velocity=instantaneousVelocityAfterFallingElapsedTime(time) print("") print("Input time of fall:", time, " seconds or ", time/60.0, " minutes") print("Distance fell:", distance, " meters or ", distance/1000, " km") print("Velocity at that time:", velocity, " meters/second or ", mps2kmphr(velocity), " km/hour") print("") if (c=="d"): distance=float(input("Enter distance of fall in meters:")) time=timeTakenToFall(distance) velocity=instantaneousVelocityAfterFallingDistance(distance) print("") print("Distance of fall:", distance, " meters or ", distance/1000, " km") print("Time of fall:", time, " seconds or ", time/60.0, " minutes") print("Velocity at that distance:", velocity, " meters/second or ", mps2kmphr(velocity), " km/hour") print("")
[ "jet.repository8@gmail.com" ]
jet.repository8@gmail.com
71c778cc56ea5d0a03b400a5fb63405ab5da9707
3a17437dce911b74c102756ce27cfade29e2c19b
/clients.py
a495552b38b4fbbad717c4c1f15c1481d152a162
[]
no_license
hacksnextdoor/mobility
a3c9208f78ef7e14c4cf7f4654c3856054959b2d
dc04e6106c2dee962247da742751e908db18286e
refs/heads/master
2020-05-14T07:50:01.721978
2019-05-10T05:21:53
2019-05-10T05:21:53
181,713,295
1
2
null
2019-05-10T16:19:13
2019-04-16T15:06:59
Python
UTF-8
Python
false
false
664
py
import requests def get_all(): return [Jump()] class BaseBikeClass: NAME = '' BASE_URL = '' METADATA = {} def all_bikes(): raise NotImplementedError class Jump(BaseBikeClass): def __init__(self): self.NAME = 'jumpbikes' self.BASE_URL = 'jumpbikes.com/opendata' self.METADATA = {} def all_bikes(self, region): if region not in ['atx', 'nyc', 'chi', 'sc', 'dc', 'san', 'sac', 'pvd', 'mia']: raise Exception("There are no bikes in {0}".format(region)) url = "https://{0}.{1}/free_bike_status.json".format(region, self.BASE_URL) r = requests.get(url) data = r.json() return data
[ "brandonsanchez05@gmail.com" ]
brandonsanchez05@gmail.com
e59d092404ad3bb41f179eac7e955fc6b6e3d84f
9b0b9ed1fe86355a8e7f71e9af76e21aa86fb17c
/Venter/forms.py
69543ee08c5508f893bdbac44f2a8e3d4171b252
[]
no_license
simranmadhok/Venter_CMS
b453d1f4b74bd4a7cfc72c37a1cebaa52c4ea809
05bf5e44ebbfbfe63209b496aa25d12c1b2c65a1
refs/heads/master
2020-04-26T04:26:38.414249
2019-02-04T06:29:27
2019-02-04T06:29:27
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,823
py
from django import forms from django.contrib.auth.models import User from Backend import settings from Venter.models import File, Profile from .validate import csv_file_header_validation class CSVForm(forms.ModelForm): """ ModelForm, used to facilitate CSV file upload. Meta class------ 1) declares 'File' as the model class to generate the 'csv_form' 2) includes only only field in the 'csv_form 'from the File model Usage: 1) upload_file.html template: Generates the file form fields in the csv file upload page for logged in users. """ class Meta: model = File fields = ('csv_file',) def __init__(self, *args, **kwargs): """ It accepts the self.request argument, here for the purpose of accessing the logged-in user's organisation name """ self.request = kwargs.pop("request") super(CSVForm, self).__init__(*args, **kwargs) def clean_csv_file(self): """ It validates specific attributes of 'csv_file' field: csv header, file type, and file size. """ # cleaning and retrieving the uploaded csv file to perform further validation on it uploaded_csv_file = self.cleaned_data['csv_file'] # checks for non-null file upload if uploaded_csv_file: # validation of the filetype based on the extension type .csv # validation of the filesize based on the size limit 5MB # the csv_file_header_validation() is invoked from validate.py filename = uploaded_csv_file.name if filename.endswith(settings.FILE_UPLOAD_TYPE): if uploaded_csv_file.size < int(settings.MAX_UPLOAD_SIZE): if csv_file_header_validation(uploaded_csv_file, self.request): return uploaded_csv_file else: raise forms.ValidationError( "Incorrect headers, please contact your administrator") else: raise forms.ValidationError( "File size must not exceed 5 MB") else: raise forms.ValidationError( "Please upload .csv extension files only") return uploaded_csv_file class UserForm(forms.ModelForm): """ Modelform, generated from Django's user model. Meta class------ 1) declares 'User' as the model class to generate the 'user_form' 2) includes only five fields in the 'user_form' from the User model Usage------ 1) 'registration.html' template: Generates the user form fields in the signup page for new users 2) 'update_profile.html' template: Generates the user form fields in the update profile page for existing users """ class Meta: model = User fields = ('username', 'password', 'email', 'first_name', 'last_name') def save(self): # pylint: disable = W0221 user = super(UserForm, self).save(commit=False) password = self.cleaned_data.get('password') user.set_password(password) user.save() return user class ProfileForm(forms.ModelForm): """ Modelform, generated from Django's Profile model. Meta class------ 1) declares 'Profile' as the model class to generate the 'profile_form' 2) includes only three fields in the 'profile_form' from the Profile model Usage------ 1) 'registration.html' template: Generates the profile form fields in the signup page for new users 2) 'update_profile.html' template: Generates the profile form fields in the update profile page for existing users """ class Meta: model = Profile fields = ('organisation_name', 'phone_number', 'profile_picture')
[ "madhok.simran8@gmail.com" ]
madhok.simran8@gmail.com
6406b63bcacf384542fd445cf5ca113155b0db63
52f558dcd7cbea61e6b370bcf2ec65d3b8990087
/Observtions/Python/AttachmentVsSlope.py
2fd544ec9ba84a5ef48f0088d974265f319de0d3
[]
no_license
gauravcse/Graph-Learning
0f61abef73c30d6edca3313351e8db20601ea875
4098ade242c4b5ba906c335fe66b40174e6ee06f
refs/heads/master
2021-01-17T19:19:38.978666
2016-07-17T17:31:33
2016-07-17T17:31:33
63,544,016
0
0
null
null
null
null
UTF-8
Python
false
false
466
py
# coding: utf-8 # In[2]: import networkx as nx import random import matplotlib.pyplot as plt import numpy as np from sklearn import linear_model import pandas as pd get_ipython().magic(u'matplotlib inline') # In[3]: data = pd.read_csv("pref_slope.csv") data = data.as_matrix() # In[5]: x,y = data[:,[0]],data[:,[1]] # In[10]: plt.xlabel("Prefferential Attachment") plt.ylabel("Slope") plt.scatter(x,y) plt.show() # In[28]: # In[ ]: # In[ ]:
[ "gauravmitra95@gmail.com" ]
gauravmitra95@gmail.com
2a488fae216a650166cbcdea5467d76f54a01160
1fa678e96bffbb40aaec6eeacdad63d6702768c1
/part-3 files/generate_input_attributes.py
646701ff449296096aa6c10987db41a6c158fa05
[]
no_license
hiteshvaidya/Android-Security
b18f8d7abb94813c88f294d962215c995d7eb8ad
c48eb036c6c0f25978dacc985e68eecee1cf6995
refs/heads/master
2020-07-11T15:14:13.896262
2019-08-28T00:49:07
2019-08-28T00:49:07
204,580,179
1
0
null
null
null
null
UTF-8
Python
false
false
1,935
py
""" This program generates dataset for the machine learning model used in this project. :author: Hitesh Vaidya """ import numpy as np import math import csv def generatePrintBinary(n): # Create an empty queue from Queue import Queue q = Queue() numbers = [] # Enqueu the first binary number q.put("1") # This loop is like BFS of a tree with 1 as root # 0 as left child and 1 as right child and so on while(n>0): n-= 1 # Print the front of queue s1 = q.get() # print s1 numbers.append(s1) s2 = s1 # Store s1 before changing it # Append "0" to s1 and enqueue it q.put(s1+"0") # Append "1" to s2 and enqueue it. Note that s2 # contains the previous front q.put(s2+"1") return numbers def main(): numbers = generatePrintBinary(math.pow(2,10)-1) vectors = [] # print "main function" for index in numbers: # temp = index.split() num = '' while len(index) < 10: index = '0' + index num = [int(x) for x in index] vectors.append(num) # if len(vectors)%500: # print vectors[-1] vectors.insert(0, [0 for x in range(10)]) print "number of vectors = " + str(len(vectors)) print '\nfinal data\n' data = [] for index in range(1, 29): count = 0 for combo in vectors: temp = [x for x in combo] temp.append(index) data.append(temp) count += 1 print "data size = " + str(len(data)) + "; rise = " + str(count) # print 'after -> ' + str(data[np.random.randint(0, len(data))]) print '\nfinal' print 'len = ' + str(len(data)) for index in range(10): print str(-1*index) + '. ' + str(data[-index]) with open('random.csv', 'w') as csvfile: fp = csv.writer(csvfile) for index in data: fp.writerow(index) if __name__ == '__main__': main()
[ "noreply@github.com" ]
noreply@github.com
2a7bd36e3bdacdc5463a393dd2219babfd2e80ea
918b7f10e0f662aa756959fd67204405f14d4e00
/ddpg/Actor.py
7a66a045692040f70ee349ec1186670ead21f109
[ "MIT" ]
permissive
fstonezst/deep-rl
2b98c78dde3e164792aab068d702169f82457911
a7091badd36fac8a4b1782cf9110eb14b8addfd4
refs/heads/master
2021-09-11T14:30:55.766336
2018-01-23T14:41:16
2018-01-23T14:41:16
108,348,333
0
0
null
2017-12-15T06:11:53
2017-10-26T01:56:15
Python
UTF-8
Python
false
false
3,956
py
import tensorflow as tf import tflearn import numpy as np class ActorNetwork(object): """ Input to the network is the state, output is the action under a deterministic policy. The output layer activation is a tanh to keep the action between -action_bound and action_bound """ def __init__(self, sess, state_dim, action_dim, action_bound, learning_rate, tau): self.sess = sess self.s_dim = state_dim self.a_dim = action_dim self.action_bound = action_bound self.learning_rate = learning_rate self.tau = tau # Actor Network self.inputs, self.out, self.scaled_out = self.create_actor_network() self.network_params = tf.trainable_variables() # Target Network self.target_inputs, self.target_out, self.target_scaled_out = self.create_actor_network() self.target_network_params = tf.trainable_variables()[ len(self.network_params):] # Op for periodically updating target network with online network # weights self.update_target_network_params = \ [self.target_network_params[i].assign(tf.multiply(self.network_params[i], self.tau) + tf.multiply(self.target_network_params[i], 1. - self.tau)) for i in range(len(self.target_network_params))] # This gradient will be provided by the critic network self.action_gradient = tf.placeholder(tf.float32, [None, self.a_dim]) # Combine the gradients here self.actor_gradients = tf.gradients( self.scaled_out, self.network_params, -self.action_gradient) # Optimization Op self.optimize = tf.train.AdamOptimizer(self.learning_rate). \ apply_gradients(zip(self.actor_gradients, self.network_params)) self.num_trainable_vars = len( self.network_params) + len(self.target_network_params) def create_actor_network(self): times = 1 N_HIDDEN_1, N_HIDDEN_2 = 400 * times, 300 * times DROPOU_KEEP_PROB = 0.5 inputs = tflearn.input_data(shape=[None, self.s_dim]) inputLayer = tflearn.layers.normalization.batch_normalization(inputs) w_init = tflearn.initializations.uniform(minval=-1/np.sqrt(self.s_dim), maxval=1/np.sqrt(self.s_dim)) net = tflearn.fully_connected(inputLayer, N_HIDDEN_1, activation='relu',regularizer='L2', weight_decay=1.0E-2, weights_init=w_init) net = tflearn.layers.normalization.batch_normalization(net) w_init = tflearn.initializations.uniform(minval=-1/np.sqrt(N_HIDDEN_1), maxval=1/np.sqrt(N_HIDDEN_1)) net = tflearn.fully_connected(net, N_HIDDEN_2, activation='relu',regularizer='L2', weight_decay=1.0E-2, weights_init=w_init) net = tflearn.layers.normalization.batch_normalization(net) # Final layer weights are init to Uniform[-3e-3, 3e-3] w_init = tflearn.initializations.uniform(minval=-3.0E-3, maxval=3.0E-3) out = tflearn.fully_connected( net, self.a_dim, activation='tanh', weights_init=w_init, bias=w_init) # Scale output to -action_bound to action_bound scaled_out = tf.multiply(out, self.action_bound) return inputs, out, scaled_out def train(self, inputs, a_gradient): self.sess.run(self.optimize, feed_dict={ self.inputs: inputs, self.action_gradient: a_gradient }) def predict(self, inputs): return self.sess.run(self.scaled_out, feed_dict={ self.inputs: inputs }) def predict_target(self, inputs): return self.sess.run(self.target_scaled_out, feed_dict={ self.target_inputs: inputs }) def update_target_network(self): self.sess.run(self.update_target_network_params) def get_num_trainable_vars(self): return self.num_trainable_vars
[ "fstone.zh@gmail.com" ]
fstone.zh@gmail.com
be8baba074bc2b06e054dbc1762b979a90b1f361
c3a5c2886f47f2d5e6c43a315c4edbd252c7c95d
/archivos.py
23118e3c9819378191567445fe8f461cdad8d42f
[]
no_license
0danielfrozo/Clases_Phyton
1999501cc4f4fdac4b08433bd575dc6a73f04918
777e9685d6061d6f073000bbf10183eb840b829f
refs/heads/master
2020-03-25T18:57:32.364042
2018-08-08T19:27:53
2018-08-08T19:27:53
144,058,086
0
0
null
null
null
null
UTF-8
Python
false
false
32
py
print "mi primer repositorio"
[ "dg.melo@uniandes.edu.co" ]
dg.melo@uniandes.edu.co
d88feee59447268d7756cf064e8f61bfb542edd5
bbc33e3a01f3e14f12956e40eed43fd668bcf349
/rubiksolver.py
741267b1c70c9f56070eb6e450e2995a9297f071
[]
no_license
TristanKalloniatis/Rubiksolver
4cd576250646d7252b97a8d84ade4bfa3efeed90
3c9d7434d5b21ed6490940500736d2f77b10d0f9
refs/heads/master
2021-01-10T15:16:20.979526
2016-02-03T19:03:39
2016-02-03T19:03:39
51,023,071
0
0
null
null
null
null
UTF-8
Python
false
false
22,039
py
#Documentation for Rubik's solver #In this file we specify the effect of each face turn on a 3x3x3 cube and give a solving algorithm #Later will add computer vision functionality to automatically read state of scrambled cube from photos #Cube representation: we represent the cube by an array cube[6][3][3] where first index covers the faces, second and third index cover the cubies #Array is filled with entries 0,1,2,3,4,5 representing the 6 face colours #Convention on cube orientation and numbering: #UP, DOWN, LEFT, RIGHT, FRONT, BACK #Hold cube so that ULF corner is closet to you #U=cube[0] with first 3-index giving the rows and second 3-index the columns from this view #L=cube[1] and F=cube[2] with subsidiary indices as above #Rotate the cube a half turn about UD axis so that URB corner faces #R=cube[3] and B=cube[4] with subsidiary indices as above #Finally, rotate the cube so that F is in front, L is left, etc. Then perform a quarter turn around LR axis so D faces #D=cube[5] with subsidiary indices as above #Sanity check on numbering convention: here are the indices for the 8 corners #ULF=cube[0][2][0]=cube[1][0][2]=cube[2][0][0] #UFR=cube[0][2][2]=cube[2][0][2]=cube[3][0][0] #URB=cube[0][0][2]=cube[3][0][2]=cube[4][0][0] #UBL=cube[0][0][0]=cube[4][0][2]=cube[1][0][0] #FLD=cube[2][2][0]=cube[1][2][2]=cube[5][0][0] #RFD=cube[3][2][0]=cube[2][2][2]=cube[5][0][2] #BRD=cube[4][2][0]=cube[3][2][2]=cube[5][2][2] #LBD=cube[1][2][0]=cube[4][2][2]=cube[5][2][0] #And for the 12 edges #UL=cube[0][1][0]=cube[1][0][1] #UF=cube[0][2][1]=cube[2][0][1] #UR=cube[0][1][2]=cube[3][0][1] #UB=cube[0][0][1]=cube[4][0][1] #LF=cube[1][1][2]=cube[2][1][0] #FR=cube[2][1][2]=cube[3][1][0] #RB=cube[3][1][2]=cube[4][1][0] #BL=cube[4][1][2]=cube[1][1][0] #LD=cube[1][2][1]=cube[5][1][0] #FD=cube[2][2][1]=cube[5][0][1] #RD=cube[3][2][1]=cube[5][1][2] #BD=cube[4][2][1]=cube[5][2][1] #Since the centre pieces don't move, we initialise colour choice so that cube[i][1][1] has colour label i for all i #We now specify the effect of the 6 CLOCKWISE face turns def Uturn(cube): face=0 #Track the effect of the U turn on the corner pieces temp=cube[face][0][0] #This part can be copied for each face cube[face][0][0]=cube[face][2][0] cube[face][2][0]=cube[face][2][2] cube[face][2][2]=cube[face][0][2] cube[face][0][2]=temp temp=cube[1][0][0] cube[1][0][0]=cube[2][0][0] cube[2][0][0]=cube[3][0][0] cube[3][0][0]=cube[4][0][0] cube[4][0][0]=temp temp=cube[4][0][2] cube[4][0][2]=cube[1][0][2] cube[1][0][2]=cube[2][0][2] cube[2][0][2]=cube[3][0][2] cube[3][0][2]=temp #Track the effect of the U turn on the edge pieces temp=cube[face][1][0] #This part can be copied for each face cube[face][1][0]=cube[face][2][1] cube[face][2][1]=cube[face][1][2] cube[face][1][2]=cube[face][0][1] cube[face][0][1]=temp temp=cube[1][0][1] cube[1][0][1]=cube[2][0][1] cube[2][0][1]=cube[3][0][1] cube[3][0][1]=cube[4][0][1] cube[4][0][1]=temp def Lturn(cube): face=1 #Track the effect of the L turn on the corner pieces temp=cube[face][0][0] #This part can be copied for each face cube[face][0][0]=cube[face][2][0] cube[face][2][0]=cube[face][2][2] cube[face][2][2]=cube[face][0][2] cube[face][0][2]=temp temp=cube[4][0][2] cube[4][0][2]=cube[5][2][0] cube[5][2][0]=cube[2][2][0] cube[2][2][0]=cube[0][2][0] cube[0][2][0]=temp temp=cube[0][0][0] cube[0][0][0]=cube[4][2][2] cube[4][2][2]=cube[5][0][0] cube[5][0][0]=cube[2][0][0] cube[2][0][0]=temp #Track the effect of the L turn on the edge pieces temp=cube[face][1][0] #This part can be copied for each face cube[face][1][0]=cube[face][2][1] cube[face][2][1]=cube[face][1][2] cube[face][1][2]=cube[face][0][1] cube[face][0][1]=temp temp=cube[4][1][2] cube[4][1][2]=cube[5][1][0] cube[5][1][0]=cube[2][1][0] cube[2][1][0]=cube[0][1][0] cube[0][1][0]=temp def Fturn(cube): face=2 #Track the effect of the F turn on the corner pieces temp=cube[face][0][0] #This part can be copied for each face cube[face][0][0]=cube[face][2][0] cube[face][2][0]=cube[face][2][2] cube[face][2][2]=cube[face][0][2] cube[face][0][2]=temp temp=cube[1][0][2] cube[1][0][2]=cube[5][0][0] cube[5][0][0]=cube[3][2][0] cube[3][2][0]=cube[0][2][2] cube[0][2][2]=temp temp=cube[0][2][0] cube[0][2][0]=cube[1][2][2] cube[1][2][2]=cube[5][0][2] cube[5][0][2]=cube[3][0][0] cube[3][0][0]=temp #Track the effect of the F turn on the edge pieces temp=cube[face][1][0] #This part can be copied for each face cube[face][1][0]=cube[face][2][1] cube[face][2][1]=cube[face][1][2] cube[face][1][2]=cube[face][0][1] cube[face][0][1]=temp temp=cube[1][1][2] cube[1][1][2]=cube[5][0][1] cube[5][0][1]=cube[3][1][0] cube[3][1][0]=cube[0][2][1] cube[0][2][1]=temp def Rturn(cube): face=3 #Track the effect of the R turn on the corner pieces temp=cube[face][0][0] #This part can be copied for each face cube[face][0][0]=cube[face][2][0] cube[face][2][0]=cube[face][2][2] cube[face][2][2]=cube[face][0][2] cube[face][0][2]=temp temp=cube[2][0][2] cube[2][0][2]=cube[5][0][2] cube[5][0][2]=cube[4][2][0] cube[4][2][0]=cube[0][0][2] cube[0][0][2]=temp temp=cube[0][2][2] cube[0][2][2]=cube[2][2][2] cube[2][2][2]=cube[5][2][2] cube[5][2][2]=cube[4][0][0] cube[4][0][0]=temp #Track the effect of the R turn on the edge pieces temp=cube[face][1][0] #This part can be copied for each face cube[face][1][0]=cube[face][2][1] cube[face][2][1]=cube[face][1][2] cube[face][1][2]=cube[face][0][1] cube[face][0][1]=temp temp=cube[2][1][2] cube[2][1][2]=cube[5][1][2] cube[5][1][2]=cube[4][1][0] cube[4][1][0]=cube[0][1][2] cube[0][1][2]=temp def Bturn(cube): face=4 #Track the effect of the B turn on the corner pieces temp=cube[face][0][0] #This part can be copied for each face cube[face][0][0]=cube[face][2][0] cube[face][2][0]=cube[face][2][2] cube[face][2][2]=cube[face][0][2] cube[face][0][2]=temp temp=cube[3][0][2] cube[3][0][2]=cube[5][2][2] cube[5][2][2]=cube[1][2][0] cube[1][2][0]=cube[0][0][0] cube[0][0][0]=temp temp=cube[0][0][2] cube[0][0][2]=cube[3][2][2] cube[3][2][2]=cube[5][2][0] cube[5][2][0]=cube[1][0][0] cube[1][0][0]=temp #Track the effect of the B turn on the edge pieces temp=cube[face][1][0] #This part can be copied for each face cube[face][1][0]=cube[face][2][1] cube[face][2][1]=cube[face][1][2] cube[face][1][2]=cube[face][0][1] cube[face][0][1]=temp temp=cube[3][1][2] cube[3][1][2]=cube[5][2][1] cube[5][2][1]=cube[1][1][0] cube[1][1][0]=cube[0][0][1] cube[0][0][1]=temp def Dturn(cube): face=5 #Track the effect of the D turn on the corner pieces temp=cube[face][0][0] #This part can be copied for each face cube[face][0][0]=cube[face][2][0] cube[face][2][0]=cube[face][2][2] cube[face][2][2]=cube[face][0][2] cube[face][0][2]=temp temp=cube[1][2][2] cube[1][2][2]=cube[4][2][2] cube[4][2][2]=cube[3][2][2] cube[3][2][2]=cube[2][2][2] cube[2][2][2]=temp temp=cube[2][2][0] cube[2][2][0]=cube[1][2][0] cube[1][2][0]=cube[4][2][0] cube[4][2][0]=cube[3][2][0] cube[3][2][0]=temp #Track the effect of the D turn on the edge pieces temp=cube[face][1][0] #This part can be copied for each face cube[face][1][0]=cube[face][2][1] cube[face][2][1]=cube[face][1][2] cube[face][1][2]=cube[face][0][1] cube[face][0][1]=temp temp=cube[1][2][1] cube[1][2][1]=cube[4][2][1] cube[4][2][1]=cube[3][2][1] cube[3][2][1]=cube[2][2][1] cube[2][2][1]=temp def turntype(num): if num==1: return " " elif num==2: return "2" else: return "\'" def name(face): if face==0: return "blue" elif face==1: return "orange" elif face==2: return "white" elif face==3: return "red" elif face==4: return "yellow" else: return "green" def turn(cube,face,num): #num=1, 2, or -1 to represent clockwise, half, or anticlockwise turns of the face if num==-1: num=3 if num==-2: num=2 if num==-3: num=1 if num==-4: num=0 if num==4: num=0 for n in range(num): if n==0: print name(face),turntype(num) if face==0: Uturn(cube) elif face==1: Lturn(cube) elif face==2: Fturn(cube) elif face==3: Rturn(cube) elif face==4: Bturn(cube) else: Dturn(cube) def adjacent(face): #Returns the 4 adjacent faces to a given face followed by opposite face as an array of size 5, in the order above, to the left, to the right, below, opposite when face is oriented so subsidiary idices increase down and to the right if face==0: return [4,1,3,2,5] elif face==1: return [0,4,2,5,3] elif face==2: return [0,1,3,5,4] elif face==3: return [0,2,4,5,1] elif face==4: return [0,3,1,5,2] else: return [2,1,3,4,0] def setcube(cube): #Build a setlike dictionary representation of the cubes state #Will not use turns on this representation, but will ue to track the position of pieces during the solve more easily cubedict=dict() cubedict['ULF']=set([cube[0][2][0],cube[1][0][2],cube[2][0][0]]) cubedict['UFR']=set([cube[0][2][2],cube[2][0][2],cube[3][0][0]]) cubedict['URB']=set([cube[0][0][2],cube[3][0][2],cube[4][0][0]]) cubedict['UBL']=set([cube[0][0][0],cube[4][0][2],cube[1][0][0]]) cubedict['FLD']=set([cube[2][2][0],cube[1][2][2],cube[5][0][0]]) cubedict['RFD']=set([cube[3][2][0],cube[2][2][2],cube[5][0][2]]) cubedict['BRD']=set([cube[4][2][0],cube[3][2][2],cube[5][2][2]]) cubedict['LBD']=set([cube[1][2][0],cube[4][2][2],cube[5][2][0]]) cubedict['UL']=set([cube[0][1][0],cube[1][0][1]]) cubedict['UF']=set([cube[0][2][1],cube[2][0][1]]) cubedict['UR']=set([cube[0][1][2],cube[3][0][1]]) cubedict['UB']=set([cube[0][0][1],cube[4][0][1]]) cubedict['LF']=set([cube[1][1][2],cube[2][1][0]]) cubedict['FR']=set([cube[2][1][2],cube[3][1][0]]) cubedict['RB']=set([cube[3][1][2],cube[4][1][0]]) cubedict['BL']=set([cube[4][1][2],cube[1][1][0]]) cubedict['LD']=set([cube[1][2][1],cube[5][1][0]]) cubedict['FD']=set([cube[2][2][1],cube[5][0][1]]) cubedict['RD']=set([cube[3][2][1],cube[5][1][2]]) cubedict['BD']=set([cube[4][2][1],cube[5][2][1]]) return cubedict def piecename(i,j,k): #Returns the 2 or 3 character name of cube[i][j][k] in the dictionary keys above if (i,j,k)==(0,2,0) or (i,j,k)==(1,0,2) or (i,j,k)==(2,0,0): return 'ULF' elif (i,j,k)==(0,2,2) or (i,j,k)==(2,0,2) or (i,j,k)==(3,0,0): return 'UFR' elif (i,j,k)==(0,0,2) or (i,j,k)==(3,0,2) or (i,j,k)==(4,0,0): return 'URB' elif (i,j,k)==(0,0,0) or (i,j,k)==(4,0,2) or (i,j,k)==(1,0,0): return 'UBL' elif (i,j,k)==(2,2,0) or (i,j,k)==(1,2,2) or (i,j,k)==(5,0,0): return 'FLD' elif (i,j,k)==(3,2,0) or (i,j,k)==(2,2,2) or (i,j,k)==(5,0,2): return 'RFD' elif (i,j,k)==(4,2,0) or (i,j,k)==(3,2,2) or (i,j,k)==(5,2,2): return 'BRD' elif (i,j,k)==(1,2,0) or (i,j,k)==(4,2,2) or (i,j,k)==(5,2,0): return 'LBD' elif (i,j,k)==(0,1,0) or (i,j,k)==(1,0,1): return 'UL' elif (i,j,k)==(0,2,1) or (i,j,k)==(2,0,1): return 'UF' elif (i,j,k)==(0,1,2) or (i,j,k)==(3,0,1): return 'UR' elif (i,j,k)==(0,0,1) or (i,j,k)==(4,0,1): return 'UB' elif (i,j,k)==(1,1,2) or (i,j,k)==(2,1,0): return 'LF' elif (i,j,k)==(2,1,2) or (i,j,k)==(3,1,0): return 'FR' elif (i,j,k)==(3,1,2) or (i,j,k)==(4,1,0): return 'RB' elif (i,j,k)==(4,1,2) or (i,j,k)==(1,1,0): return 'BL' elif (i,j,k)==(1,2,1) or (i,j,k)==(5,1,0): return 'LD' elif (i,j,k)==(2,2,1) or (i,j,k)==(5,0,1): return 'FD' elif (i,j,k)==(3,2,1) or (i,j,k)==(5,1,2): return 'RD' else: return 'BD' def edgecorrect(cube,face,edge): #Determines whether an edge on the cube face is positioned AND oriented correctly #Returns 0 if edge is correctly oriented, 1 if edge is correct but flipped, and -1 otherwise #Edges are numbered 0,1,2,3 corresponding to subsidiary indices [1][2],[0][1],[1][0],[2][1] #Numbering is set up so that turning face edge times puts relevant edge on the right when viewing face on adj=adjacent(face) if edge==0: j=1 k=2 a=2 elif edge==1: j=0 k=1 a=0 elif edge==2: j=1 k=0 a=1 else: j=2 k=1 a=3 name=piecename(face,j,k) otherside=(setcube(cube)[name].difference(set([cube[face][j][k]]))).pop() if cube[face][j][k]==cube[face][1][1] and otherside==cube[adj[a]][1][1]: return 0 elif cube[face][j][k]==cube[adj[a]][1][1] and otherside==cube[face][1][1]: return 1 else: return -1 def cornercorrect(cube,face,corner): #Determines whether a corner on the cube face is positioned AND oriented correctly #Returns 0 if corner is correctly oriented, 1 if corner is correct but needs to be rotated clockwise, 2 if corner is correct but needs to be rotated anticlockwise, and -1 otherwise #Corners are numbered 0,1,2,3 corresponding to subsidiary indices [0][2],[0][0],[2][0],[2][2] #Numbering is set up so that turning face corner times puts relevant corner on the top right when viewing face on adj=adjacent(face) #Here a1 will specify the face one place clockwise of the given face and a2 the face one place anticlockwise if corner==0: j=0 k=2 a1=0 a2=2 elif corner==1: j=0 k=0 a1=1 a2=0 elif corner==2: j=2 k=0 a1=3 a2=1 else: j=2 k=2 a1=2 a2=3 name=piecename(face,j,k) colours=setcube(cube)[name] if colours!=set([cube[face][1][1],cube[adj[a1]][1][1],cube[adj[a2]][1][1]]): return -1 elif cube[face][j][k]==cube[face][1][1]: return 0 elif cube[face][j][k]==cube[adj[a1]][1][1]: return 1 else: return 2 def orientcorners(cube,face,c1,c2): #Turns c1 clockwise and c2 anticlockwise on a face. Corners are numbered 0,1,2,3 corresponding to subsidiary indices [0][2],[0][0],[2][0],[2][2] #Numbering is set up so that turning face c1 times puts relevant corner in top right when viewing face on adj=adjacent(face) turn(cube,face,c1) turn(cube,adj[2],1) turn(cube,adj[4],1) turn(cube,adj[2],3) turn(cube,adj[4],3) turn(cube,adj[2],1) turn(cube,adj[4],1) turn(cube,adj[2],3) turn(cube,face,c2-c1) turn(cube,adj[2],1) turn(cube,adj[4],3) turn(cube,adj[2],3) turn(cube,adj[4],1) turn(cube,adj[2],1) turn(cube,adj[4],3) turn(cube,adj[2],3) turn(cube,face,4-c2) def orientedges(cube,face,e1,e2): #Flips edges e1 and d2 on a face. Edges are numbered 0,1,2,3 corresponding to subsidiary indices [1][2],[0][1],[1][0],[2][1] #Numbering is set up so that turning face e1 times puts relevant edge on the right when viewing face on adj=adjacent(face) turn(cube,face,e1) turn(cube,adj[2],1) turn(cube,adj[4],1) turn(cube,face,3) turn(cube,adj[0],1) turn(cube,face,e2-e1) turn(cube,adj[0],3) turn(cube,face,1) turn(cube,adj[4],3) turn(cube,adj[2],3) turn(cube,face,4-e2) def permuteedges(cube,face,e1,e2,e3): #Swaps edges e1 and e2, then edges e3 and e4 on a face. Edges are numbered 0,1,2,3 corresponding to subsidiary indices [1][2],[0][1],[1][0],[2][1] #Assume e4-e3=e2-e1 #Numbering is set up so that turning face e1 times puts relevant edge on the right when viewing face on adj=adjacent(face) a=[2,0,1,3] turn(cube,face,e1) turn(cube,adj[2],2) turn(cube,adj[4],e2-e1) turn(cube,adj[a[e2]],2) turn(cube,adj[4],(4-e2+e1)%4) turn(cube,adj[2],2) turn(cube,face,e3-e1) turn(cube,adj[2],2) turn(cube,adj[4],e2-e1) turn(cube,adj[a[e2]],2) turn(cube,adj[4],(4-e2+e1)%4) turn(cube,adj[2],2) turn(cube,face,4-e3) def permutecorners(cube,face,c1,c2,c3): #Performs the 3 cycle c1->c2->c3->c1 on 3 corners. Corners are numbered 0,1,2,3 corresponding to subsidiary indices [0][2],[0][0],[2][0],[2][2] #Numbering is set up so that turning face c1 times puts relevant corner in top right when viewing face on #Corners c1 and c2 lie on the face, while c3 lies on the opposite face, with the numbering matching the numbering on the given face #Assume c3!=3 adj=adjacent(face) turn(cube,face,c1) turn(cube,adj[2],1) if c3==0: turn(cube,adj[4],1) else: turn(cube,adj[4],4-c3) turn(cube,adj[2],3) turn(cube,face,c2-c1) turn(cube,adj[2],1) if c3==0: turn(cube,adj[4],3) else: turn(cube,adj[4],c3) turn(cube,adj[2],3) turn(cube,face,4-c2) def edgepermutation(cube,face): #Determines the permutation of edges on a face, assuming all correct edges are already on the face #Edges are numbered 0,1,2,3 corresponding to subsidiary indices [1][2],[0][1],[1][0],[2][1] when viewing cube face on perm=[0,0,0,0] adj=adjacent(face) a=[2,0,1,3] for edge in range(4): if edge==0: j=1 k=2 elif edge==1: j=0 k=1 elif edge==2: j=1 k=0 else: j=2 k=1 name=piecename(face,j,k) othercolour=(setcube(cube)[name].difference(set([cube[face][1][1]]))).pop() for otheredge in range(4): if othercolour==cube[adj[a[otheredge]]][1][1]: perm[edge]=otheredge return perm def permutationtype(perm): #Returns the cycle type of a permutation of [0,1,2,3] ans=[] for i in range(4): if i in ans: break else: ans.append(i) j=i while True: j=perm[j] if j==i: break else: ans.append(j) ans.append(' ') return ans def sign(perm): #Returns the sign of a permutation cycles=permutationtype(perm) cyclelength=0 sign=1 for x in cycles: if x==" ": if cyclelength%2==0: sign=-sign cyclelength=0 else: cyclelength=cyclelength+1 return sign def is3cycle(perm): #Returns 0 if perm is not a 3 cycle, and otherwise returns the 3 elements of the cycle #cube=[[[0,0,0],[0,0,0],[0,0,0]],[[0,0,0],[0,0,0],[0,0,0]],[[0,0,0],[0,0,0],[0,0,0]],[[0,0,0],[0,0,0],[0,0,0]],[[0,0,0],[0,0,0],[0,0,0]],[[0,0,0],[0,0,0],[0,0,0]]] #Sample scrambled state below cube=[[[3,2,5],[0,0,5],[2,0,3]],[[2,3,0],[1,1,1],[1,1,1]],[[1,4,4],[2,2,1],[4,3,3]],[[0,4,3],[0,3,2],[4,0,5]],[[2,3,0],[5,4,4],[1,3,4]],[[0,4,5],[5,5,2],[5,5,2]]] while True: f=raw_input("Face: ") C1=raw_input("e1: ") C2=raw_input("e2: ") C3=raw_input("e3: ") C4=raw_input("e4: ") try: face=int(f) e1=int(C1) e2=int(C2) e3=int(C3) e4=int(C4) except: break permuteedges(cube,face,e1,e2,e3,e4) for i in range(6): print "Face",i for j in range(3): for k in range(3): print cube[i][j][k] print "-----" #Read scrambled state #Later will replace this with computer vision code but for now manually enter the sticker colours #for i in range(6): # print "Face",i # for j in range(3): # for k in range(3): # cube[i][j][k]=raw_input("C:") #cube[i][j][k]=int(cubie) #May need above line or similar depending on representation of colours. For now keep as integers. Note that if this changes will need to change the labelling of faces # print "-----" #Solving algorithm implented will be my intuitive/conjugator solver. #D layer edges #Decide which face to start on face=5 correct=0 for i in range(5,-1,-1): edgescorrect=0 for edge in range(4): if edgecorrect(cube,i,edge)==0: edgescorrect=edgescorrect+1 if edgescorrect>correct: correct=edgescorrect face=i adj=adjacent(face) #Fix remaining edges on this face for edge in range(4): if edgecorrect(cube,face,edge)==0: continue elif edgecorrect(cube,face,edge)==1: turn(cube,face,edge) turn(cube,adj[2],1) turn(cube,adj[4],1) turn(cube,face,3) turn(cube,adj[0],1) turn(cube,face,4-edge) else: pass #NEED TO THINK ABOUT CODE HERE #Middle layer edges #NEED TO THINK ABOUT CODE HERE #Permute U layer edges perm=edgepermutation(cube,adj[4]) if sign(perm)==-1: turn(cube,adj[4],1) perm=edgepermutation(cube,adj[4]) #Edges now require an even permutation to position #CONTINUE FROM HERE #Worth considering embedding detecting the permutation required in the sign function #Orient U layer edges #Permute corners #Orient corners #while True: # f=raw_input("Face: ") # C=raw_input("edge: ") # try: # face=int(f) # e1=int(C) # except: # break # print edgecorrect(cube,face,e1) #for i in range(6): # print "Face",i # for j in range(3): # for k in range(3): # print cube[i][j][k] # print "-----"
[ "tristankalloniatis@gmail.com" ]
tristankalloniatis@gmail.com
b24f887ce614c49d40017dfe367f9809d91a2b74
1fbf39b1b51f222961dc2d8018adb5c358b26d22
/main.py
9aad821534c05359e1a53905963682d4ee477846
[]
no_license
mtokarev93/Flask
2b99fa97c7aa54cbfd89a85ad6944a002b96a9e5
dfdbaf1815b337c283da161ecd0686ceee34d2d9
refs/heads/main
2023-06-30T01:54:41.555045
2021-08-05T17:34:31
2021-08-05T17:34:31
393,123,619
0
0
null
null
null
null
UTF-8
Python
false
false
2,860
py
from flask import Flask, render_template, request, redirect from flask_sqlalchemy import SQLAlchemy from cloudipsp import Api, Checkout app = Flask(__name__) #создание объекта на основе класса Flask. Можем отслеживать те функции, которые будут отслеживать переходы на разные URL адреса. app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///shop.db' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) #Чтобы отследить главную страницу, нам необходимо создать декоратор @app.route('/') def index(): items = Item.query.order_by(Item.price).all() return render_template('index.html', data=items ) # ORM - Object-relational model class Item(db.Model): id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(100), nullable=False) price = db.Column(db.Integer, nullable=False) is_active = db.Column(db.Boolean, default=True) # item = Item(id=....) # item.save() => 'INSERT INTO items .....' # query = 'SELECT * ....' # db.execute(query) #text = db.Column(db.Text, nullable=True) def __repr__(self): return self.title @app.route('/about') def about(): return render_template('about.html') # GET /item/4 => { Холодильник, белый, 100 000 ₽} # GET /item/4/buying @app.route('/item/<int:id>/purchase') def item_buy(id): item = Item.query.get(id) api = Api(merchant_id=1396424, secret_key='test') checkout = Checkout(api=api) data = { "currency": "RUB", "amount": str(item.price) + "00" } url = checkout.url(data).get('checkout_url') return redirect(url) # https://google.ru/products # REST API # POST site.ru/item # GET site.ru/item/3 # site.ru/item # Это handler (обработчик) # POST | GET /item # POST /item # body: # title=Tovar&price=100 # GET site.ru/ # @app.route('/') => "<html><head></head><.....></html>" # Введите название: Товар # Введите цену: 20 000 # => POST /item # BODY # title=Товар&price=20000 # => request.form = { # 'title': 'Товар', # 'price': '20000', # } # a = dict() # 'Vasya' => +7985... # 'Petya' => +7999... # Petya? # a['Petya'] @app.route('/item', methods=['POST', 'GET']) def create(): if request.method == "POST": title = request.form['title'] price = request.form['price'] item = Item(title=title, price=price) try: db.session.add(item) db.session.commit() return redirect('/') except: return "Получилась ошибка" else: return render_template('create.html') if __name__ == "__main__": app.run(debug=True)
[ "micheal.93@mail.ru" ]
micheal.93@mail.ru