blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2b2789eba257e061d11d9b5b879f59907bb1a3fa | 1eb3d05443ca08403aecab7d7056eb6195ea3ca3 | /code/params/grec_params.py | a2b8cf90caa34a9a364b004210e463cdb714b87f | [] | no_license | kfirsalo/QGCN | 0582a33caf47c2f80de17b0035b82f152ec64e3e | 9a311134681648d9b924efef7e037439dfbb789a | refs/heads/master | 2020-12-21T18:59:31.716210 | 2020-01-27T16:22:05 | 2020-01-27T16:22:05 | 236,529,076 | 0 | 0 | null | 2020-01-27T15:57:18 | 2020-01-27T15:57:16 | null | UTF-8 | Python | false | false | 4,303 | py | from torch.nn.functional import relu, softmax, cross_entropy
from torch.optim import Adam
import os
from betweenness_centrality import BetweennessCentralityCalculator
from bfs_moments import BfsMomentsCalculator
from bilinear_model import LayeredBilinearModule
from dataset.dataset_model import BilinearDataset
from dataset.dataset_external_data import ExternalData
from feature_calculators import FeatureMeta
from multi_class_bilinear_activator import BilinearMultiClassActivator
from params.parameters import BilinearDatasetParams, BilinearActivatorParams, BilinearLayerParams, LinearLayerParams, \
LayeredBilinearModuleParams, DEG, CENTRALITY, BFS, NORM_REDUCED, ExternalDataParams
# --------------------------------------------------- PROTEIN ---------------------------------------------------------
class GrecAllExternalDataParams(ExternalDataParams):
def __init__(self):
super().__init__()
self.GRAPH_COL = "g_id"
self.NODE_COL = "node"
self.FILE_NAME = "GREC_external_data_all.csv"
self.EMBED_COLS = ["type"]
self.VALUE_COLS = ["x", "y"]
class GrecDatasetAllParams(BilinearDatasetParams):
def __init__(self):
super().__init__()
self.DATASET_NAME = "Web_train"
self.DATASET_FILENAME = "GREC_all.csv"
self.SRC_COL = "src"
self.DST_COL = "dst"
self.GRAPH_NAME_COL = "g_id"
self.LABEL_COL = "label"
self.PERCENTAGE = 1
self.DIRECTED = False
self.FEATURES = [DEG, CENTRALITY, BFS]
# ----------------------------------------------------------------------------------------------------------------------
class GrecBilinearLayerParams(BilinearLayerParams):
def __init__(self, in_col_dim, ftr_len):
super().__init__(in_col_dim, ftr_len)
self.LEFT_LINEAR_ROW_DIM = in_col_dim # should be equal to RIGHT_LINEAR_IN and FirstLayerModelParams::OUT_DIM
self.LEFT_LINEAR_COL_DIM = 1 # out rows
self.RIGHT_LINEAR_ROW_DIM = ftr_len # should be equal to FirstLayerModelParams::ROW_DIM
self.RIGHT_LINEAR_COL_DIM = 22 # out cols
self.ACTIVATION_FUNC = softmax
self.ACTIVATION_FUNC_ARGS = {"dim": 2}
class GrecLinearLayerParams(LinearLayerParams):
def __init__(self, in_dim, out_dim, dropout=0.3):
super().__init__(in_dim, out_dim, dropout)
self.ROW_DIM = in_dim
self.COL_DIM = out_dim
self.ACTIVATION_FUNC = relu
self.DROPOUT = dropout
class GrecLayeredBilinearModuleParams(LayeredBilinearModuleParams):
def __init__(self, ftr_len=6, layer_dim=None, embed_vocab_dim=None):
super().__init__(ftr_len, layer_dim, embed_vocab_dim)
self.EMBED_DIMS = [10]
self.NORM = NORM_REDUCED
self.DROPOUT = 0
self.LR = 1e-3
self.OPTIMIZER = Adam
self.WEIGHT_DECAY = 1e-2
if layer_dim is None:
self.NUM_LAYERS = 2
self.LINEAR_PARAMS_LIST = [
GrecLinearLayerParams(in_dim=ftr_len, out_dim=50, dropout=self.DROPOUT),
GrecLinearLayerParams(in_dim=50, out_dim=10, dropout=self.DROPOUT),
GrecLinearLayerParams(in_dim=50, out_dim=10, dropout=self.DROPOUT),
GrecLinearLayerParams(in_dim=200, out_dim=1, dropout=self.DROPOUT)
]
self.BILINEAR_PARAMS = GrecBilinearLayerParams(self.LINEAR_PARAMS_LIST[self.NUM_LAYERS - 1].COL_DIM,
self.LINEAR_PARAMS_LIST[0].ROW_DIM)
class GrecBilinearActivatorParams(BilinearActivatorParams):
def __init__(self):
super().__init__()
self.DEV_SPLIT = 0.15
self.TEST_SPLIT = 0.15
self.LOSS = cross_entropy # f.factor_loss #
self.BATCH_SIZE = 16
self.EPOCHS = 500
self.DATASET = "GREC - MultiClass"
if __name__ == '__main__':
ext_train = ExternalData(GrecAllExternalDataParams())
aids_train_ds = BilinearDataset(GrecDatasetAllParams(), external_data=ext_train)
activator = BilinearMultiClassActivator(LayeredBilinearModule(GrecLayeredBilinearModuleParams(
ftr_len=aids_train_ds.len_features, embed_vocab_dim=ext_train.len_embed())),
GrecBilinearActivatorParams(), aids_train_ds)
activator.train()
| [
"ovednagar@gmail.com"
] | ovednagar@gmail.com |
d6633f66e546763035964e64d33a6ef8f827ab98 | f15cc90617a9ba8139a9019fa654b04cd1a475d8 | /src/test/From_GRIB_to_GEOTIFF_Noah.py | 9b7ca39c0fef0775f6a704e053448d3fee515031 | [
"MIT"
] | permissive | AKotb/AGPS_PYT27 | 584fc1354c86ed96bf10cbbf3f9be9fa590f293c | 8fed04ac3525e967eafa169b0d1fd2539caabe9a | refs/heads/master | 2020-09-21T00:38:07.113762 | 2020-02-20T11:51:17 | 2020-02-20T11:51:17 | 224,630,396 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,611 | py | # Code to convert ".grb" files to geotiff files.
# Coded by Mohamed Ahmed
# Change the processed years and path to the input data
import os
import gdal
namesCLM = ["NSWRS", "NLWRS", "LHTFL", "SHTFL","var155","var131","var132","EVP","var235","var234","SNOM","var138","WEASD","TSOIL_1","TSOIL_2","TSOIL_3","TSOIL_4","TSOIL_5","TSOIL_6","TSOIL_7","TSOIL_8","TSOIL_9","TSOIL_10","SOILM_1","SOILM_2","SOILM_3","SOILM_4","SOILM_5","SOILM_6","SOILM_7","SOILM_8","SOILM_9","SOILM_10","TCDC","WIND","TMP","SPFH","PRES","var204","var205"]
namesNOAH = ["NSWRS", "NLWRS", "LHTFL", "SHTFL","var155","var131","var132","EVP","var235","var234","SNOM","var138","WEASD","TSOIL_1","TSOIL_2","TSOIL_3","TSOIL_4","SOILM_1","SOILM_2","SOILM_3","SOILM_4","TCDC","WIND","TMP","SPFH","PRES","var204","var205"]
namesVIC = ["var131","var132","EVP","SSRUN","BGRUN", "SNOM", "WEASD","SOILM_1","SOILM_2","SOILM_3","TCDC","WIND","TMP","SPFH","PRES","DSWRF", "DLWRF"]
namesMOSAIC = ["NSWRS", "NLWRS", "LHTFL", "SHTFL","var155","var131","var132","EVP","var235","var234","SNOM","var138","TSOIL_1","WEASD","SOILM_1","SOILM_2","SOILM_3","TCDC","WIND","TMP","SPFH","PRES","var204","var205"]
# Change the processed years and path to the input data
for year in range(2002, 2018):
os.chdir("E:\\NOAH10_M\\"+str(year))
for file in os.listdir(os.getcwd()):
# Change the model name
for x in range(len(namesNOAH)):
if(file[-3:] == "grb"):
# Change the model name
outName = file[:-4]+"_"+namesNOAH[x]+".tif"
os.system("gdal_translate -b "+str(x+1)+" -of GTiff "+file+" "+outName)
| [
"A7madKotb@gmail.com"
] | A7madKotb@gmail.com |
d048220677e6d76e01cdd6e1eeaf484c06a1f347 | 93570735d20043e28c85747d248511e06cb9f231 | /lib/colors.py | 17e16233034b49bb05b91675da7d2a4a18401d1c | [] | no_license | Yeahboi12356/phoneinfoga | bc64483223428042102a3623c2eee51dcc5a32f4 | 7bbd570ceb51be6869ea03bfabe2cee12f868dde | refs/heads/main | 2023-08-29T23:40:34.075879 | 2021-11-08T06:43:34 | 2021-11-08T06:43:34 | 425,723,676 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
#
# @name : PhoneInfoga - Phone numbers OSINT tool
# @url : https://github.com/sundowndev
# @author : Raphael Cerveaux (sundowndev)
import sys
import colorama
if sys.platform == 'win32':
colorama.init()
R = "\033[%s;31m"
B = "\033[%s;34m"
G = "\033[%s;32m"
W = "\033[%s;38m"
Y = "\033[%s;33m"
E = "\033[0m"
BOLD = "\033[1m"
| [
"noreply@github.com"
] | noreply@github.com |
46ac07bad8a6e3103486ea0e16532532835096d7 | 4dcb08f3961590de108976ff9dc77f70ac17b1ae | /3. Exploring Data/assignment5.py | 1094b752ee4c23d6731af8aa992715f8def98a92 | [] | no_license | mjk276/DAt210x-Programming-with-Python-for-DS | 0ee3c6a20672787575c956901f7f8ec26aa5aca9 | bfe10e4895f34d6f2412e7d84e5f50d756cf3f85 | refs/heads/master | 2021-06-03T05:25:51.995703 | 2016-08-06T21:13:55 | 2016-08-06T21:13:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 696 | py | #
# This code is intentionally missing!
# Read the directions on the course lab page!
#
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
from pandas.tools.plotting import andrews_curves
from pandas.tools.plotting import parallel_coordinates
# Look pretty...
matplotlib.style.use('ggplot')
df = pd.read_csv("Datasets/wheat.data", index_col=0, header=0)
# create and show andrews curve plot
plt.figure()
andrews_curves(df, 'wheat_type')
plt.show()
# dropping 'area' and 'perimeter' features
df = df.drop('area', axis=1)
df = df.drop('perimeter', axis=1)
# create and show andrews curve plot on dropped dataset
plt.figure()
andrews_curves(df, 'wheat_type')
plt.show() | [
"Puneet Narula"
] | Puneet Narula |
7e04d44c0839f69eba1fdbba4f985107bf70d72e | dce00a226c2c1b458a18c25e3ffa58aed05e21c1 | /continuous_contract/__init__.py | 5994660f507ffecd5d088c109582a0a628774e8d | [] | no_license | invcomb/Assignment3 | 5bc5d5cfa141877e05a7f34d7ee18546e1a07e08 | e114c39baf07ae663f79027c4839b9ab86811ba0 | refs/heads/master | 2023-05-01T06:41:47.467279 | 2021-05-13T12:57:07 | 2021-05-13T12:57:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 59 | py | from .ContinuousMainContract import ContinuousMainContract
| [
"219040029@link.cuhk.edu.cn"
] | 219040029@link.cuhk.edu.cn |
29481648eb96d39e6a7dd17bdbe4e7a2d1c35ac2 | 887ef52f59ba21dc74473c3607b10c1985007ae8 | /Scrapy_ZhongHongIndustryResearch_V2_51/Scrapy_ZhongHongIndustryResearch_V2_51/spiders/zhongHongIndustryResearch_V1.py | 76f290fd414f27b949551a219bfb1a0b331e0c6c | [] | no_license | GJJ121785021/first-upload-spider | 246834f9d2c761e5ec521b4a9f12eb3430539e21 | daf2bc6f01db90ae8bfb5c7452d1bffd204ec0d4 | refs/heads/master | 2021-04-04T15:24:16.579619 | 2020-03-20T02:11:24 | 2020-03-20T02:11:24 | 248,467,440 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,557 | py | # -*- coding: utf-8 -*-
import scrapy
from Scrapy_ZhongHongIndustryResearch_V2_51.items import ScrapyZhonghongindustryresearchV251Item
from scrapy.crawler import logger
import json
import requests
class ZhonghongindustryresearchV1Spider(scrapy.Spider):
name = 'zhongHongIndustryResearch_V1'
start_urls = ['http://zhcy.app.ckcest.cn/api/GetNavJson?id=10']
url_get_id = 'http://zhcy.app.ckcest.cn/api/GetArchAnsyJson?id={}'
url_get_data = 'http://zhcy.app.ckcest.cn/include/handler.ashx?ar=76&id={}&by=1990&ey=2019&new=1&nt=1&dv=2'
# the api is similar to mysql_for_id.py
url_mysql_api = 'http://127.0.0.1:5591/?table_name={}&name={}&pid={}&must_create=1'
table_name = 'entertainment_catalogue'
root_id = 18005
def parse(self, response):
datas = json.loads(response.text).get('subNavItem')
for data in datas:
eles = data.get('archInfoItem')
for ele in eles:
id = ele.get('aid')
name = ele.get('aName')
if '文教、工美、体育和娱乐用品制造业' == name:
new_id = requests.get(self.url_mysql_api.format(self.table_name, name, self.root_id)).json().get(
'parent_id')
yield scrapy.Request(url=self.url_get_id.format(id), meta={'parent_id': new_id}, callback=self.parse_page)
def parse_page(self, response):
# 取下一页所需的id
# id_data = eval(response.text.replace('null', '"null"')).get('indexItem')[0].get('id')
datas = json.loads(response.text).get('subArchItem')
for data in datas:
isParent = data.get('isParent')
id = data.get('id')
if isParent:
name = data.get('name')
new_id = requests.get(self.url_mysql_api.format(self.table_name, name, response.meta['parent_id'])).json().get(
'parent_id')
yield scrapy.Request(url=self.url_get_id.format(id)+'&name='+name, meta={'parent_id': new_id},
callback=self.parse_page)
else:
yield scrapy.Request(self.url_get_id.format(id), meta={'parent_id': response.meta['parent_id']},
callback=self.parse_page_2)
def parse_page_2(self, response):
datas = json.loads(response.text).get('indexItem')
for data in datas:
id = data.get('id')
yield scrapy.Request(self.url_get_data.format(id), meta={'parent_id': response.meta['parent_id']},
callback=self.parse_response)
def parse_response(self, response):
# 解析数据
# 删除空数据的网页
if response.text == '<err>Empty</err>':
logger.info('该页面数据为空')
return None
indic_name = response.xpath('//tr[2]/td[2]/text()').get() # 名称
logger.info(indic_name)
new_id = requests.get(self.url_mysql_api.format(self.table_name, indic_name, response.meta['parent_id'])).json().get('parent_id')
unit = response.xpath('//tr[2]/td[4]/text()').get() # 单位
region = response.xpath('//tr[2]/td[3]/text()').get() # 全国、省份、市等地区
# 把表格中的时间及数据以列表的形式取出来并解析
datatimes = response.xpath('//tr[1]/th/text()').getall()[4:]
values = response.xpath('//tr[2]/td/text()').getall()[4:]
for datatime, value in zip(datatimes, values):
create_time = datatime # 数据产生时间
if create_time.endswith('年'):
create_time = create_time[:-1]
item = ScrapyZhonghongindustryresearchV251Item()
item['parent_id'] = str(new_id)
item['root_id'] = '18'
item['indic_name'] = indic_name
item['frequency'] = 5
item['unit'] = unit
item['data_source'] = '中宏产业研究平台'
item['region'] = region
item['country'] = '0'
item['sign'] = '03'
item['status'] = 1
item['cleaning_status'] = 0
item['create_time'] = create_time
item['data_year'] = int(datatime[:-1])
item['data_day'] = 0
item['data_month'] = 0
item['data_value'] = float(value.replace(',', '')) # 把数据中夹杂的逗号(,)删除
yield item
import os
if __name__ == '__main__':
os.system('scrapy crawl zhongHongIndustryResearch_V1')
| [
"121785021@qq.com"
] | 121785021@qq.com |
214f46e4b4086618c4ebc13bfb000e9aef54231f | 69720c18b1a2d9259b913bf1cc0ff1ac05687d0d | /nets/inception_v3.py | a7969e24e45b42587c1fbf1d5904c8c7a89f8396 | [] | no_license | liujing1003/tfslim | 282bc6d347bd3428169202fea6f2da007b887768 | 9c54dbb75426a75645ab8cf95a75fd60cb1cd78f | refs/heads/master | 2021-05-07T18:07:33.498852 | 2017-04-29T21:04:36 | 2017-04-29T21:04:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,254 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the definition for inception v3 classification network."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tfslim.nets import inception_utils
slim = tf.contrib.slim
trunc_normal = lambda stddev: tf.truncated_normal_initializer(0.0, stddev)
def inception_v3_base(inputs,
final_endpoint='Mixed_7c',
min_depth=16,
depth_multiplier=1.0,
scope=None):
"""Inception model from http://arxiv.org/abs/1512.00567.
Constructs an Inception v3 network from inputs to the given final endpoint.
This method can construct the network up to the final inception block
Mixed_7c.
Note that the names of the layers in the paper do not correspond to the names
of the endpoints registered by this function although they build the same
network.
Here is a mapping from the old_names to the new names:
Old name | New name
=======================================
conv0 | Conv2d_1a_3x3
conv1 | Conv2d_2a_3x3
conv2 | Conv2d_2b_3x3
pool1 | MaxPool_3a_3x3
conv3 | Conv2d_3b_1x1
conv4 | Conv2d_4a_3x3
pool2 | MaxPool_5a_3x3
mixed_35x35x256a | Mixed_5b
mixed_35x35x288a | Mixed_5c
mixed_35x35x288b | Mixed_5d
mixed_17x17x768a | Mixed_6a
mixed_17x17x768b | Mixed_6b
mixed_17x17x768c | Mixed_6c
mixed_17x17x768d | Mixed_6d
mixed_17x17x768e | Mixed_6e
mixed_8x8x1280a | Mixed_7a
mixed_8x8x2048a | Mixed_7b
mixed_8x8x2048b | Mixed_7c
Args:
inputs: a tensor of size [batch_size, height, width, channels].
final_endpoint: specifies the endpoint to construct the network up to. It
can be one of ['Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3',
'MaxPool_3a_3x3', 'Conv2d_3b_1x1', 'Conv2d_4a_3x3', 'MaxPool_5a_3x3',
'Mixed_5b', 'Mixed_5c', 'Mixed_5d', 'Mixed_6a', 'Mixed_6b', 'Mixed_6c',
'Mixed_6d', 'Mixed_6e', 'Mixed_7a', 'Mixed_7b', 'Mixed_7c'].
min_depth: Minimum depth value (number of channels) for all convolution ops.
Enforced when depth_multiplier < 1, and not an active constraint when
depth_multiplier >= 1.
depth_multiplier: Float multiplier for the depth (number of channels)
for all convolution ops. The value must be greater than zero. Typical
usage will be to set this value in (0, 1) to reduce the number of
parameters or computation cost of the model.
scope: Optional variable_scope.
Returns:
tensor_out: output tensor corresponding to the final_endpoint.
end_points: a set of activations for external use, for example summaries or
losses.
Raises:
ValueError: if final_endpoint is not set to one of the predefined values,
or depth_multiplier <= 0
"""
# end_points will collect relevant activations for external use, for example
# summaries or losses.
end_points = {}
if depth_multiplier <= 0:
raise ValueError('depth_multiplier is not greater than zero.')
depth = lambda d: max(int(d * depth_multiplier), min_depth)
with tf.variable_scope(scope, 'InceptionV3', [inputs]):
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='VALID'):
# 299 x 299 x 3
end_point = 'Conv2d_1a_3x3'
net = slim.conv2d(inputs, depth(32), [3, 3], stride=2, scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# 149 x 149 x 32
end_point = 'Conv2d_2a_3x3'
net = slim.conv2d(net, depth(32), [3, 3], scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# 147 x 147 x 32
end_point = 'Conv2d_2b_3x3'
net = slim.conv2d(net, depth(64), [3, 3], padding='SAME', scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# 147 x 147 x 64
end_point = 'MaxPool_3a_3x3'
net = slim.max_pool2d(net, [3, 3], stride=2, scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# 73 x 73 x 64
end_point = 'Conv2d_3b_1x1'
net = slim.conv2d(net, depth(80), [1, 1], scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# 73 x 73 x 80.
end_point = 'Conv2d_4a_3x3'
net = slim.conv2d(net, depth(192), [3, 3], scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# 71 x 71 x 192.
end_point = 'MaxPool_5a_3x3'
net = slim.max_pool2d(net, [3, 3], stride=2, scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# 35 x 35 x 192.
# Inception blocks
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
# mixed: 35 x 35 x 256.
end_point = 'Mixed_5b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(64), [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(48), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(64), [5, 5],
scope='Conv2d_0b_5x5')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, depth(64), [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, depth(96), [3, 3],
scope='Conv2d_0b_3x3')
branch_2 = slim.conv2d(branch_2, depth(96), [3, 3],
scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, depth(32), [1, 1],
scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed_1: 35 x 35 x 288.
end_point = 'Mixed_5c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(64), [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(48), [1, 1], scope='Conv2d_0b_1x1')
branch_1 = slim.conv2d(branch_1, depth(64), [5, 5],
scope='Conv_1_0c_5x5')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, depth(64), [1, 1],
scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, depth(96), [3, 3],
scope='Conv2d_0b_3x3')
branch_2 = slim.conv2d(branch_2, depth(96), [3, 3],
scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, depth(64), [1, 1],
scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed_2: 35 x 35 x 288.
end_point = 'Mixed_5d'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(64), [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(48), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(64), [5, 5],
scope='Conv2d_0b_5x5')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, depth(64), [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, depth(96), [3, 3],
scope='Conv2d_0b_3x3')
branch_2 = slim.conv2d(branch_2, depth(96), [3, 3],
scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, depth(64), [1, 1],
scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed_3: 17 x 17 x 768.
end_point = 'Mixed_6a'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(384), [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(64), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(96), [3, 3],
scope='Conv2d_0b_3x3')
branch_1 = slim.conv2d(branch_1, depth(96), [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_1x1')
with tf.variable_scope('Branch_2'):
branch_2 = slim.max_pool2d(net, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed4: 17 x 17 x 768.
end_point = 'Mixed_6b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(192), [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(128), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(128), [1, 7],
scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, depth(192), [7, 1],
scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, depth(128), [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, depth(128), [7, 1],
scope='Conv2d_0b_7x1')
branch_2 = slim.conv2d(branch_2, depth(128), [1, 7],
scope='Conv2d_0c_1x7')
branch_2 = slim.conv2d(branch_2, depth(128), [7, 1],
scope='Conv2d_0d_7x1')
branch_2 = slim.conv2d(branch_2, depth(192), [1, 7],
scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, depth(192), [1, 1],
scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed_5: 17 x 17 x 768.
end_point = 'Mixed_6c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(192), [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(160), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(160), [1, 7],
scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, depth(192), [7, 1],
scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, depth(160), [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, depth(160), [7, 1],
scope='Conv2d_0b_7x1')
branch_2 = slim.conv2d(branch_2, depth(160), [1, 7],
scope='Conv2d_0c_1x7')
branch_2 = slim.conv2d(branch_2, depth(160), [7, 1],
scope='Conv2d_0d_7x1')
branch_2 = slim.conv2d(branch_2, depth(192), [1, 7],
scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, depth(192), [1, 1],
scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed_6: 17 x 17 x 768.
end_point = 'Mixed_6d'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(192), [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(160), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(160), [1, 7],
scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, depth(192), [7, 1],
scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, depth(160), [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, depth(160), [7, 1],
scope='Conv2d_0b_7x1')
branch_2 = slim.conv2d(branch_2, depth(160), [1, 7],
scope='Conv2d_0c_1x7')
branch_2 = slim.conv2d(branch_2, depth(160), [7, 1],
scope='Conv2d_0d_7x1')
branch_2 = slim.conv2d(branch_2, depth(192), [1, 7],
scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, depth(192), [1, 1],
scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed_7: 17 x 17 x 768.
end_point = 'Mixed_6e'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(192), [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(192), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(192), [1, 7],
scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, depth(192), [7, 1],
scope='Conv2d_0c_7x1')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, depth(192), [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, depth(192), [7, 1],
scope='Conv2d_0b_7x1')
branch_2 = slim.conv2d(branch_2, depth(192), [1, 7],
scope='Conv2d_0c_1x7')
branch_2 = slim.conv2d(branch_2, depth(192), [7, 1],
scope='Conv2d_0d_7x1')
branch_2 = slim.conv2d(branch_2, depth(192), [1, 7],
scope='Conv2d_0e_1x7')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(branch_3, depth(192), [1, 1],
scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed_8: 8 x 8 x 1280.
end_point = 'Mixed_7a'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(192), [1, 1], scope='Conv2d_0a_1x1')
branch_0 = slim.conv2d(branch_0, depth(320), [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(192), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(192), [1, 7],
scope='Conv2d_0b_1x7')
branch_1 = slim.conv2d(branch_1, depth(192), [7, 1],
scope='Conv2d_0c_7x1')
branch_1 = slim.conv2d(branch_1, depth(192), [3, 3], stride=2,
padding='VALID', scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.max_pool2d(net, [3, 3], stride=2, padding='VALID',
scope='MaxPool_1a_3x3')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed_9: 8 x 8 x 2048.
end_point = 'Mixed_7b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(320), [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(384), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = tf.concat(axis=3, values=[
slim.conv2d(branch_1, depth(384), [1, 3], scope='Conv2d_0b_1x3'),
slim.conv2d(branch_1, depth(384), [3, 1], scope='Conv2d_0b_3x1')])
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, depth(448), [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(
branch_2, depth(384), [3, 3], scope='Conv2d_0b_3x3')
branch_2 = tf.concat(axis=3, values=[
slim.conv2d(branch_2, depth(384), [1, 3], scope='Conv2d_0c_1x3'),
slim.conv2d(branch_2, depth(384), [3, 1], scope='Conv2d_0d_3x1')])
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(
branch_3, depth(192), [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
# mixed_10: 8 x 8 x 2048.
end_point = 'Mixed_7c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(320), [1, 1], scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(net, depth(384), [1, 1], scope='Conv2d_0a_1x1')
branch_1 = tf.concat(axis=3, values=[
slim.conv2d(branch_1, depth(384), [1, 3], scope='Conv2d_0b_1x3'),
slim.conv2d(branch_1, depth(384), [3, 1], scope='Conv2d_0c_3x1')])
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(net, depth(448), [1, 1], scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(
branch_2, depth(384), [3, 3], scope='Conv2d_0b_3x3')
branch_2 = tf.concat(axis=3, values=[
slim.conv2d(branch_2, depth(384), [1, 3], scope='Conv2d_0c_1x3'),
slim.conv2d(branch_2, depth(384), [3, 1], scope='Conv2d_0d_3x1')])
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(
branch_3, depth(192), [1, 1], scope='Conv2d_0b_1x1')
net = tf.concat(axis=3, values=[branch_0, branch_1, branch_2, branch_3])
end_points[end_point] = net
if end_point == final_endpoint: return net, end_points
raise ValueError('Unknown final endpoint %s' % final_endpoint)
def inception_v3(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.8,
min_depth=16,
depth_multiplier=1.0,
prediction_fn=slim.softmax,
spatial_squeeze=True,
reuse=None,
scope='InceptionV3'):
"""Inception model from http://arxiv.org/abs/1512.00567.
"Rethinking the Inception Architecture for Computer Vision"
Christian Szegedy, Vincent Vanhoucke, Sergey Ioffe, Jonathon Shlens,
Zbigniew Wojna.
With the default arguments this method constructs the exact model defined in
the paper. However, one can experiment with variations of the inception_v3
network by changing arguments dropout_keep_prob, min_depth and
depth_multiplier.
The default image size used to train this network is 299x299.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether is training or not.
dropout_keep_prob: the percentage of activation values that are retained.
min_depth: Minimum depth value (number of channels) for all convolution ops.
Enforced when depth_multiplier < 1, and not an active constraint when
depth_multiplier >= 1.
depth_multiplier: Float multiplier for the depth (number of channels)
for all convolution ops. The value must be greater than zero. Typical
usage will be to set this value in (0, 1) to reduce the number of
parameters or computation cost of the model.
prediction_fn: a function to get predictions out of logits.
spatial_squeeze: if True, logits is of shape [B, C], if false logits is
of shape [B, 1, 1, C], where B is batch_size and C is number of classes.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
Returns:
logits: the pre-softmax activations, a tensor of size
[batch_size, num_classes]
end_points: a dictionary from components of the network to the corresponding
activation.
Raises:
ValueError: if 'depth_multiplier' is less than or equal to zero.
"""
if depth_multiplier <= 0:
raise ValueError('depth_multiplier is not greater than zero.')
depth = lambda d: max(int(d * depth_multiplier), min_depth)
with tf.variable_scope(scope, 'InceptionV3', [inputs, num_classes],
reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = inception_v3_base(
inputs, scope=scope, min_depth=min_depth,
depth_multiplier=depth_multiplier)
# Auxiliary Head logits
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
aux_logits = end_points['Mixed_6e']
with tf.variable_scope('AuxLogits'):
aux_logits = slim.avg_pool2d(
aux_logits, [5, 5], stride=3, padding='VALID',
scope='AvgPool_1a_5x5')
aux_logits = slim.conv2d(aux_logits, depth(128), [1, 1],
scope='Conv2d_1b_1x1')
# Shape of feature map before the final layer.
kernel_size = _reduced_kernel_size_for_small_input(
aux_logits, [5, 5])
aux_logits = slim.conv2d(
aux_logits, depth(768), kernel_size,
weights_initializer=trunc_normal(0.01),
padding='VALID', scope='Conv2d_2a_{}x{}'.format(*kernel_size))
aux_logits = slim.conv2d(
aux_logits, num_classes, [1, 1], activation_fn=None,
normalizer_fn=None, weights_initializer=trunc_normal(0.001),
scope='Conv2d_2b_1x1')
if spatial_squeeze:
aux_logits = tf.squeeze(aux_logits, [1, 2], name='SpatialSqueeze')
end_points['AuxLogits'] = aux_logits
# Final pooling and prediction
with tf.variable_scope('Logits'):
kernel_size = _reduced_kernel_size_for_small_input(net, [8, 8])
net = slim.avg_pool2d(net, kernel_size, padding='VALID',
scope='AvgPool_1a_{}x{}'.format(*kernel_size))
# 1 x 1 x 2048
net = slim.dropout(net, keep_prob=dropout_keep_prob, scope='Dropout_1b')
end_points['PreLogits'] = net
# 2048
logits = slim.conv2d(net, num_classes, [1, 1], activation_fn=None,
normalizer_fn=None, scope='Conv2d_1c_1x1')
if spatial_squeeze:
logits = tf.squeeze(logits, [1, 2], name='SpatialSqueeze')
# 1000
end_points['Logits'] = logits
end_points['Predictions'] = prediction_fn(logits, scope='Predictions')
return logits, end_points
inception_v3.default_image_size = 299
def _reduced_kernel_size_for_small_input(input_tensor, kernel_size):
"""Define kernel size which is automatically reduced for small input.
If the shape of the input images is unknown at graph construction time this
function assumes that the input images are is large enough.
Args:
input_tensor: input tensor of size [batch_size, height, width, channels].
kernel_size: desired kernel size of length 2: [kernel_height, kernel_width]
Returns:
a tensor with the kernel size.
TODO(jrru): Make this function work with unknown shapes. Theoretically, this
can be done with the code below. Problems are two-fold: (1) If the shape was
known, it will be lost. (2) inception.slim.ops._two_element_tuple cannot
handle tensors that define the kernel size.
shape = tf.shape(input_tensor)
return = tf.pack([tf.minimum(shape[1], kernel_size[0]),
tf.minimum(shape[2], kernel_size[1])])
"""
shape = input_tensor.get_shape().as_list()
if shape[1] is None or shape[2] is None:
kernel_size_out = kernel_size
else:
kernel_size_out = [min(shape[1], kernel_size[0]),
min(shape[2], kernel_size[1])]
return kernel_size_out
inception_v3_arg_scope = inception_utils.inception_arg_scope
| [
"ModarTensai@gmail.com"
] | ModarTensai@gmail.com |
68d86c968f4b517b3d5b4ebd2e76fbe486ab4355 | bfb9b4e92bf583ae16013b4f5e55e114f9c2140c | /config.py | dc266dee44c26aa8c1905ea63e38910a19b107c6 | [] | no_license | dreyevihor/course_work_dbis | 52cc3bd5852e6ccefd32acb4bd14b8af5a497eb1 | a8c3e794ea497c203c010de790c5cffff1e4866e | refs/heads/master | 2020-04-13T18:33:34.097873 | 2018-12-28T07:03:51 | 2018-12-28T07:03:51 | 163,377,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | import os
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'coursach' | [
"dreyevihor@gmail.com"
] | dreyevihor@gmail.com |
bb1b9cccef0279464e835bc30ec7610597df1bd6 | fca06f2bc23c08ccc2e573c4a4004e95f0ccb39a | /tkinter/udemy course/tablet project 2.py | bb198795bd7977cc00aa58caf65ba20fac89ee1a | [] | no_license | himanshugullaiya/PYTHON-GUI | 6cb786f52910a030c8fe1387df5ea911ab317ade | f42a9aadf9016e4b50eadd48aa39d79d743a219c | refs/heads/master | 2020-08-22T09:31:37.528810 | 2019-12-30T20:59:00 | 2019-12-30T20:59:00 | 216,366,398 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | from tkinter import *
import random
root = Tk()
def get_table():
for x in range(10):
label_txt[x].set(str(entry.get())+ 'X' + str(9-x+1) + '=' + str(int(entry.get())*(9-x+1)))
def reset():
for x in range(10):
label_txt[x].set('------------------')
entry.delete(0,END)
colors = ["pink", "gray70", "indian red", "pale green", "deep sky blue", "cyan", "yellow", "magenta"]
entry = Entry(root, font = ('Courier',15))
entry.pack()
list_labels = []
label_txt = []
for x in range(10):
tp = StringVar()
tp.set('------------------')
label_txt.append(tp)
my_label = Label(root, textvariable = label_txt[x], font = ('Courier',20), bg = random.choice(colors), width = 10)
list_labels.append(my_label)
list_labels[x].pack(side = 'bottom')
Button(root, text = "Show Table", command = get_table, width = 10).pack(padx = 45, pady = 5, side = 'left')
Button(root, text = "Reset", command = reset, width = 10).pack(padx = 2, pady = 5, side = 'left')
root.mainloop() | [
"noreply@github.com"
] | noreply@github.com |
5df22d78b6e8dffc4a0c2173a282847ce7a447ee | 931f17611f3abf5f5384699d65f0de25ee1f98a3 | /code/utils/__init__.py | 537e4d50a3c296482ebec6a357bf45dadbbaffc8 | [] | no_license | alexliu2360/ai_comp | 3d44649f998e50ed436ca21fdbec680fcac562ee | 5b52f7f7e1931b45a62177197aa7ba049338f30d | refs/heads/master | 2020-04-04T13:28:02.190195 | 2018-11-10T13:55:23 | 2018-11-10T13:55:23 | 155,962,533 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | # -*- coding: utf-8 -*-
from . import JoinAttLayer
def getClassification(arr):
arr = list(arr)
if arr.index(max(arr)) == 0:
return -2
elif arr.index(max(arr)) == 1:
return -1
elif arr.index(max(arr)) == 2:
return 0
else:
return 1
| [
"noreply@github.com"
] | noreply@github.com |
ba9cf4cf17502c5bb9565e840604e9a3f1ecbad3 | fbe214d6ca9a44a9a11482e1d6b06a01fcbdcc68 | /main.py | b02ce1a5eefbc99e949d01b105c22a13485b7ff5 | [] | no_license | EugenioAV/crypto-operation | af549fab2b3fd562990515ea36ec5795239ffeb0 | 6bff54b49f0e830c2d5a169e5a1c10cf83cedb72 | refs/heads/master | 2020-08-07T06:21:07.144868 | 2019-10-10T11:53:12 | 2019-10-10T11:53:12 | 213,330,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,839 | py | import tkinter
import rsa
from tkinter import filedialog
from tkinter import messagebox
class KeyInputBlock:
file_path = ''
def __init__(self, tk, label_text, file_format):
self.input_frame = tkinter.Frame(master=tk, bg="#f5f5f5", bd=2)
self.input_label = tkinter.Label(master=self.input_frame, text=label_text, justify=tkinter.CENTER, bd=2)
self.input_entry = tkinter.Entry(master=self.input_frame, justify=tkinter.LEFT, bd=2)
self.input_button = tkinter.Button(master=self.input_frame, width=5, text='...', command=self.__get_key_path)
self.file_format = file_format
def draw_interface(self, col):
self.input_frame.grid(column=col, row=0, sticky="WESN")
self.input_label.grid(column=0, row=0, sticky="WESN")
self.input_entry.grid(column=1, row=0, sticky="WESN")
self.input_button.grid(column=2, row=0, sticky="WESN")
self.input_frame.columnconfigure(1, weight=1)
def __get_key_path(self):
f_path = filedialog.askopenfilename()
f_format = f_path.split('/')[-1].split('.')[-1]
if f_format == self.file_format:
self.file_path = f_path
self.input_entry.delete(0, tkinter.END)
self.input_entry.insert(0, self.file_path)
else:
messagebox.showinfo("Invalid file format", f"The file must has '.{self.file_format}' format")
def get_key(self):
key = None
if self.file_path:
with open(self.file_path, "rb") as file:
key = file.read()
return key
class TextBlock:
def __init__(self, tk, button_text, func):
self.text_frame = tkinter.Frame(master=tk, bg="#f5f5f5", bd=2)
self.input_text = tkinter.Text(master=self.text_frame)
self.button = tkinter.Button(master=self.text_frame, text=button_text, command=func, bg='#e0ccff')
self.output_text = tkinter.Text(master=self.text_frame)
def draw_interface(self, col):
self.text_frame.grid(column=col, row=1, sticky="WESN")
self.input_text.grid(column=0, row=0, sticky="WESN")
self.button.grid(column=0, row=1, sticky="WESN")
self.output_text.grid(column=0, row=2, sticky="WESN")
self.text_frame.columnconfigure(0, weight=1)
self.text_frame.rowconfigure(0, weight=1)
self.text_frame.rowconfigure(2, weight=1)
class Application(tkinter.Tk):
def __init__(self):
super().__init__()
self.configure(background='#f5f5f5')
self.geometry("{0}x{1}+0+0".format(int(self.winfo_screenwidth() * 0.99), int(self.winfo_screenheight() * 0.9)))
self.bind('<Escape>', lambda e: self.destroy())
self.title("Text encryption")
self.iconbitmap('img/app_icon.ico')
self.public_key_block = KeyInputBlock(self, 'Public key', 'pub')
self.private_key_block = KeyInputBlock(self, 'Private key', 'prv')
self.encrypt_text = TextBlock(self, 'Encrypt', self.__encrypt)
self.decrypt_text = TextBlock(self, 'Load data & Decrypt', self.__decrypt)
def draw_interface(self):
self.columnconfigure(0, weight=1)
self.columnconfigure(1, weight=1)
self.rowconfigure(1, weight=1)
self.public_key_block.draw_interface(0)
self.private_key_block.draw_interface(1)
self.encrypt_text.draw_interface(0)
self.decrypt_text.draw_interface(1)
@staticmethod
def save_data(data):
with open("output.txt", "wb") as file:
file.write(data)
def open_data(self):
with open("output.txt", "rb") as file:
data = file.read()
self.decrypt_text.input_text.insert(tkinter.END, data)
return data
def __encrypt(self):
self.encrypt_text.output_text.delete(1.0, tkinter.END)
file_key = self.public_key_block.get_key()
if file_key:
key = rsa.PublicKey.load_pkcs1(file_key)
message = self.encrypt_text.input_text.get(1.0, 'end-1c').encode('utf-8')
crypto = rsa.encrypt(message, key)
self.save_data(crypto)
self.encrypt_text.output_text.insert(tkinter.END, crypto)
else:
messagebox.showinfo("Error", "Input the path to the public key")
def __decrypt(self):
file_key = self.private_key_block.get_key()
if file_key:
self.decrypt_text.output_text.delete(1.0, tkinter.END)
self.decrypt_text.input_text.delete(1.0, tkinter.END)
key = rsa.PrivateKey.load_pkcs1(file_key)
self.decrypt_text.output_text.insert(tkinter.END, rsa.decrypt(self.open_data(), key))
else:
messagebox.showinfo("Error", "Input the path to the private key")
if __name__ == '__main__':
app = Application()
app.draw_interface()
app.mainloop()
| [
"hedgehog0996@gmail.com"
] | hedgehog0996@gmail.com |
d125331a2cc55284fc603b28183c47dbaab2798d | 6d14fee4a337f75ca0364084ae68c37f443a8ffe | /crime.py | 8ab8d3f2eb0a2fa7d0c78c4793f5354b6ce6827d | [
"MIT"
] | permissive | aleranaudo/SizeItUp | e309379fff177844367caabab2448fb2ccea036f | cd412173b4f3f3ca049f30d128b9df08c4395b05 | refs/heads/master | 2020-06-28T16:27:55.752227 | 2019-08-02T19:03:11 | 2019-08-02T19:03:11 | 200,241,689 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | import state_crime
list_of_report = state_crime.get_all_crimes()
state_crime.get_all_crimes(test=False)
print(state_crime[1])
| [
"alejandra.ranaudo@gmail.com"
] | alejandra.ranaudo@gmail.com |
dbc05861ff1d574b59cf13ace3a9c4f734503c16 | 2f330fc050de11676ab46b963b7878882e9b6614 | /memsource_cli/models/linguist_v2.py | 3fded445994ae511b98da76e5f74617d09e0bc7b | [
"Apache-2.0"
] | permissive | zerodayz/memsource-cli-client | 609f48c18a2b6daaa639d4cb8a61da43763b5143 | c2574f1467539a49e6637c874e88d75c7ef789b3 | refs/heads/master | 2020-08-01T12:43:06.497982 | 2019-09-30T11:14:13 | 2019-09-30T11:14:13 | 210,999,654 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,216 | py | # coding: utf-8
"""
Memsource REST API
Welcome to Memsource's API documentation. To view our legacy APIs please [visit our documentation](https://wiki.memsource.com/wiki/Memsource_API) and for more information about our new APIs, [visit our blog](https://www.memsource.com/blog/2017/10/24/introducing-rest-apis-qa-with-the-memsource-api-team/). If you have any questions, please contact [Memsource Support](<mailto:support@memsource.com>). # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from memsource_cli.models.abstract_project_dto_v2 import AbstractProjectDtoV2 # noqa: F401,E501
from memsource_cli.models.domain_reference import DomainReference # noqa: F401,E501
from memsource_cli.models.reference_file_reference import ReferenceFileReference # noqa: F401,E501
from memsource_cli.models.sub_domain_reference import SubDomainReference # noqa: F401,E501
from memsource_cli.models.user_reference import UserReference # noqa: F401,E501
class LinguistV2(AbstractProjectDtoV2):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""LinguistV2 - a model defined in Swagger""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(LinguistV2, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LinguistV2):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"cerninr@gmail.com"
] | cerninr@gmail.com |
a923336f5999cb8e5aa571ccd2f842d30783123c | cd70f7f3339489e7ce347ae55f8893f493f5b459 | /model/model_data.py | 6379a35374a4285e13964781e3e1cf187d8cec2d | [] | no_license | MayaBenj/Facebook-Recruiting | c3f9aaa55e141f56a52b2aca2783b74c35efd79d | 7ca25c1708be5da5d927dc129b9c50a8292d54b7 | refs/heads/master | 2022-12-22T21:36:20.066057 | 2020-09-30T21:29:04 | 2020-09-30T21:29:04 | 298,805,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,638 | py | import pandas as pd
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import f1_score, mean_squared_error, classification_report
from sklearn.model_selection import train_test_split, cross_val_score
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import PolynomialFeatures, StandardScaler
from sklearn.svm import SVC
# Load data
train_data = pd.read_csv("../data_csv/train_with_features.csv")
x = train_data.iloc[:, 1:-1]
y = train_data.iloc[:, -1]
# Create polynomial features
poly = PolynomialFeatures(degree=1, include_bias=False)
polynomials = pd.DataFrame(poly.fit_transform(x))
x = pd.concat([x, polynomials], axis=1)
# Split data
x_train, x_test, y_train, y_test = train_test_split(x, y, random_state=5)
# Scale data
scaler = StandardScaler()
x_train_scaled = scaler.fit_transform(x_train)
x_test_scaled = scaler.transform(x_test)
"""
Logistic Regression
Polynomial features
# Using CV to calculate accuracy and choose degree
# CV score for d 3: 0.9321 (+/- 0.02)
# CV score for d 2: 0.9415 (+/- 0.02)
# CV score for d 1: 0.9503 (+/- 0.00)
Model data
Using CV to calculate accuracy and choose regularization coefficient
Accuracy for c 0.9: 0.8958 (+/- 0.05)
Accuracy for c 1.0: 0.8965 (+/- 0.05)
Accuracy for c 1.1: 0.8978 (+/- 0.05)
Accuracy for c 1.3: 0.8992 (+/- 0.05)
Accuracy for c 1.5: 0.8992 (+/- 0.05)
"""
# clf = LogisticRegression(fit_intercept=True, max_iter=10000, C=1.3)
# clf.fit(x_train_scaled, y_train)
# scores = cross_val_score(clf, x_train_scaled, y_train, cv=5)
# print("Accuracy: %0.4f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
#
# cost_train = mean_squared_error(y_train, clf.predict(x_train_scaled))
# cost_test = mean_squared_error(y_test, clf.predict(x_test_scaled))
# print("Cost train: %f, cost test: %f" % (cost_train, cost_test))
#
# train_score = clf.score(x_train_scaled, y_train)
# test_score = clf.score(x_test_scaled, y_test)
# print("Train score: %f, Test score: %f" % (train_score, test_score))
#
# f1_train = f1_score(y_train, (clf.predict_proba(x_train_scaled)[:, 1] >= 0.3).astype(int))
# f1_test = f1_score(y_test, (clf.predict_proba(x_test_scaled)[:, 1] >= 0.3).astype(int))
# print("F1-score for train %f, For test: %f" % (f1_train, f1_test))
#
# print("Top 4 most influential features: " + np.array_str(x.axes[1][np.argpartition(abs(clf.coef_), -4)[0, -4:]]))
"""
Random Forest
# Using CV to calculate accuracy and choose degree
# CV score for max_depth 20: 0.9483 (+/- 0.02)
# CV score for max_depth 15: 0.9503 (+/- 0.01)
# CV score for max_depth 10: 0.9523 (+/- 0.01)
# CV score for max_depth 5: 0.9476 (+/- 0.01)
# CV score for max_depth None: 0.9462 (+/- 0.01)
# CV score for min_samples_split 100: 0.9429 (+/- 0.01)
# CV score for min_samples_split 50: 0.9436 (+/- 0.01)
# CV score for min_samples_split 20: 0.9483 (+/- 0.01)
# CV score for min_samples_split 2: 0.9476 (+/- 0.01)
# CV score for min_samples_leaf 10: 0.9462 (+/- 0.01)
# CV score for min_samples_leaf 5: 0.9469 (+/- 0.01)
# CV score for min_samples_leaf 1: 0.9489 (+/- 0.01)
# CV score for max_leaf_nodes 20: 0.9469 (+/- 0.01)
# CV score for max_leaf_nodes 10: 0.9483 (+/- 0.01)
# CV score for max_leaf_nodes 5: 0.9456 (+/- 0.01)
# CV score for ccp_alpha 0.001: 0.9462 (+/- 0.02)
# CV score for ccp_alpha 0: 0.9503 (+/- 0.01)
"""
clf = RandomForestClassifier(max_depth=10, min_samples_split=20)
clf.fit(x_train_scaled, y_train)
scores = cross_val_score(clf, x_train_scaled, y_train, cv=5)
print("Accuracy: %0.4f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
cost_train = mean_squared_error(y_train, clf.predict(x_train_scaled))
cost_test = mean_squared_error(y_test, clf.predict(x_test_scaled))
print("Cost train: %f, cost test: %f" % (cost_train, cost_test))
train_score = clf.score(x_train_scaled, y_train)
test_score = clf.score(x_test_scaled, y_test)
print("Train score: %f, Test score: %f" % (train_score, test_score))
f1_train = f1_score(y_train, (clf.predict_proba(x_train_scaled)[:, 1] >= 0.3).astype(int))
f1_test = f1_score(y_test, (clf.predict_proba(x_test_scaled)[:, 1] >= 0.3).astype(int))
print("F1-score for train %f, For test: %f" % (f1_train, f1_test))
print("Top 4 most influential features: " + np.array_str(x.axes[1][np.argpartition(clf.feature_importances_, -4)[-4:]]))
"""
Support Vector Machine
# CV score for C
# CV score for c 2: 0.9422 (+/- 0.01)
# CV score for c 1: 0.9462 (+/- 0.00)
# CV score for kernel
# CV score for kernel linear: 0.9449 (+/- 0.00)
# CV score for poly linear: 0.9415 (+/- 0.01)
# CV score for poly sigmoid: 0.9281 (+/- 0.02)
# CV score for poly rbf: 0.9462 (+/- 0.00)
"""
# clf = SVC(probability=True)
# clf.fit(x_train_scaled, y_train)
# scores = cross_val_score(clf, x_train_scaled, y_train, cv=5)
# print("Accuracy: %0.4f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
#
# cost_train = mean_squared_error(y_train, clf.predict(x_train_scaled))
# cost_test = mean_squared_error(y_test, clf.predict(x_test_scaled))
# print("Cost train: %f, cost test: %f" % (cost_train, cost_test))
#
# train_score = clf.score(x_train_scaled, y_train)
# test_score = clf.score(x_test_scaled, y_test)
# print("Train score: %f, Test score: %f" % (train_score, test_score))
#
# f1_train = f1_score(y_train, (clf.predict_proba(x_train_scaled)[:, 1] >= 0.3).astype(int))
# f1_test = f1_score(y_test, (clf.predict_proba(x_test_scaled)[:, 1] >= 0.3).astype(int))
# print("F1-score for train %f, For test: %f" % (f1_train, f1_test))
"""
K-Nearest Neighbors
# CV score for n_neighbors
# CV score for n_neighbors 5: 0.9388 (+/- 0.02)
# CV score for n_neighbors 10: 0.9442 (+/- 0.01)
# CV score for n_neighbors 20: 0.9462 (+/- 0.00)
"""
# clf = KNeighborsClassifier(n_neighbors=20)
# clf.fit(x_train_scaled, y_train)
# scores = cross_val_score(clf, x_train_scaled, y_train, cv=5)
# print("Accuracy: %0.4f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
#
# cost_train = mean_squared_error(y_train, clf.predict(x_train_scaled))
# cost_test = mean_squared_error(y_test, clf.predict(x_test_scaled))
# print("Cost train: %f, cost test: %f" % (cost_train, cost_test))
#
# train_score = clf.score(x_train_scaled, y_train)
# test_score = clf.score(x_test_scaled, y_test)
# print("Train score: %f, Test score: %f" % (train_score, test_score))
#
# f1_train = f1_score(y_train, (clf.predict_proba(x_train_scaled)[:, 1] >= 0.3).astype(int))
# f1_test = f1_score(y_test, (clf.predict_proba(x_test_scaled)[:, 1] >= 0.3).astype(int))
# print("F1-score for train %f, For test: %f" % (f1_train, f1_test))
# Apply model to test data
test_data = pd.read_csv("../data_csv/test_with_features.csv")
X = test_data.iloc[:, 1:]
poly = PolynomialFeatures(degree=1, include_bias=False)
X_scaled = pd.DataFrame(scaler.transform(X))
polynomials = pd.DataFrame(poly.fit_transform(X_scaled))
X_scaled = pd.concat([X_scaled, polynomials], axis=1)
Y = pd.Series(clf.predict_proba(X_scaled)[:, 1])
Y.name = "prediction"
prediction_data = pd.concat([test_data.bidder_id, Y], axis=1)
# Add users with no data as 0 (humans)
test_data_original = pd.read_csv("../data_csv/test.csv")
no_data_users = test_data_original[~test_data_original.bidder_id.isin(test_data.bidder_id)][["bidder_id"]]
no_data_users["prediction"] = 0
prediction_data = prediction_data.append(no_data_users)
# Create prediction csv
prediction_data.to_csv("../data_csv/prediction.csv", index=False)
| [
"noreply@github.com"
] | noreply@github.com |
750eca92f6f411b38c8f0f6c0e42725c8525b87a | a4ca3d18b25e4aebb81f2ceb6408af051c94c8f1 | /p040.py | c188f13a21e7ccf9d5142c366228ca73c7163c47 | [] | no_license | ohassa/code-eval | e7784418d502ac86c8a132d5de3e851d2962d440 | 6980f51f18ac92ba4c15da1f7489b1deea6abd78 | refs/heads/master | 2020-04-10T14:15:17.292279 | 2015-03-01T22:02:09 | 2015-03-01T22:02:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | import sys
for n in open(sys.argv[1]).read().splitlines():
for i in range(len(n)):
if int(n[i]) != n.count(str(i)):
print(0)
break
else:
print(1) | [
"theomer1@gmail.com"
] | theomer1@gmail.com |
5f347e6b6fc31503d5eb071e29e147c5e03c8963 | c94f888541c0c430331110818ed7f3d6b27b788a | /billing/python/antchain_sdk_billing/models.py | a84d6d9e51b2efd4b9c0729e85e28c8c46ab9f42 | [
"Apache-2.0",
"MIT"
] | permissive | alipay/antchain-openapi-prod-sdk | 48534eb78878bd708a0c05f2fe280ba9c41d09ad | 5269b1f55f1fc19cf0584dc3ceea821d3f8f8632 | refs/heads/master | 2023-09-03T07:12:04.166131 | 2023-09-01T08:56:15 | 2023-09-01T08:56:15 | 275,521,177 | 9 | 10 | MIT | 2021-03-25T02:35:20 | 2020-06-28T06:22:14 | PHP | UTF-8 | Python | false | false | 55,021 | py | # -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
class Config(TeaModel):
"""
Model for initing client
"""
def __init__(
self,
access_key_id: str = None,
access_key_secret: str = None,
security_token: str = None,
protocol: str = None,
read_timeout: int = None,
connect_timeout: int = None,
http_proxy: str = None,
https_proxy: str = None,
endpoint: str = None,
no_proxy: str = None,
max_idle_conns: int = None,
user_agent: str = None,
socks_5proxy: str = None,
socks_5net_work: str = None,
max_idle_time_millis: int = None,
keep_alive_duration_millis: int = None,
max_requests: int = None,
max_requests_per_host: int = None,
):
# accesskey id
self.access_key_id = access_key_id
# accesskey secret
self.access_key_secret = access_key_secret
# security token
self.security_token = security_token
# http protocol
self.protocol = protocol
# read timeout
self.read_timeout = read_timeout
# connect timeout
self.connect_timeout = connect_timeout
# http proxy
self.http_proxy = http_proxy
# https proxy
self.https_proxy = https_proxy
# endpoint
self.endpoint = endpoint
# proxy white list
self.no_proxy = no_proxy
# max idle conns
self.max_idle_conns = max_idle_conns
# user agent
self.user_agent = user_agent
# socks5 proxy
self.socks_5proxy = socks_5proxy
# socks5 network
self.socks_5net_work = socks_5net_work
# 长链接最大空闲时长
self.max_idle_time_millis = max_idle_time_millis
# 长链接最大连接时长
self.keep_alive_duration_millis = keep_alive_duration_millis
# 最大连接数(长链接最大总数)
self.max_requests = max_requests
# 每个目标主机的最大连接数(分主机域名的长链接最大总数
self.max_requests_per_host = max_requests_per_host
def validate(self):
pass
def to_map(self):
result = dict()
if self.access_key_id is not None:
result['accessKeyId'] = self.access_key_id
if self.access_key_secret is not None:
result['accessKeySecret'] = self.access_key_secret
if self.security_token is not None:
result['securityToken'] = self.security_token
if self.protocol is not None:
result['protocol'] = self.protocol
if self.read_timeout is not None:
result['readTimeout'] = self.read_timeout
if self.connect_timeout is not None:
result['connectTimeout'] = self.connect_timeout
if self.http_proxy is not None:
result['httpProxy'] = self.http_proxy
if self.https_proxy is not None:
result['httpsProxy'] = self.https_proxy
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.no_proxy is not None:
result['noProxy'] = self.no_proxy
if self.max_idle_conns is not None:
result['maxIdleConns'] = self.max_idle_conns
if self.user_agent is not None:
result['userAgent'] = self.user_agent
if self.socks_5proxy is not None:
result['socks5Proxy'] = self.socks_5proxy
if self.socks_5net_work is not None:
result['socks5NetWork'] = self.socks_5net_work
if self.max_idle_time_millis is not None:
result['maxIdleTimeMillis'] = self.max_idle_time_millis
if self.keep_alive_duration_millis is not None:
result['keepAliveDurationMillis'] = self.keep_alive_duration_millis
if self.max_requests is not None:
result['maxRequests'] = self.max_requests
if self.max_requests_per_host is not None:
result['maxRequestsPerHost'] = self.max_requests_per_host
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('accessKeyId') is not None:
self.access_key_id = m.get('accessKeyId')
if m.get('accessKeySecret') is not None:
self.access_key_secret = m.get('accessKeySecret')
if m.get('securityToken') is not None:
self.security_token = m.get('securityToken')
if m.get('protocol') is not None:
self.protocol = m.get('protocol')
if m.get('readTimeout') is not None:
self.read_timeout = m.get('readTimeout')
if m.get('connectTimeout') is not None:
self.connect_timeout = m.get('connectTimeout')
if m.get('httpProxy') is not None:
self.http_proxy = m.get('httpProxy')
if m.get('httpsProxy') is not None:
self.https_proxy = m.get('httpsProxy')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('noProxy') is not None:
self.no_proxy = m.get('noProxy')
if m.get('maxIdleConns') is not None:
self.max_idle_conns = m.get('maxIdleConns')
if m.get('userAgent') is not None:
self.user_agent = m.get('userAgent')
if m.get('socks5Proxy') is not None:
self.socks_5proxy = m.get('socks5Proxy')
if m.get('socks5NetWork') is not None:
self.socks_5net_work = m.get('socks5NetWork')
if m.get('maxIdleTimeMillis') is not None:
self.max_idle_time_millis = m.get('maxIdleTimeMillis')
if m.get('keepAliveDurationMillis') is not None:
self.keep_alive_duration_millis = m.get('keepAliveDurationMillis')
if m.get('maxRequests') is not None:
self.max_requests = m.get('maxRequests')
if m.get('maxRequestsPerHost') is not None:
self.max_requests_per_host = m.get('maxRequestsPerHost')
return self
class GetAccountBalanceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
from_channel: str = None,
tenant: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 渠道来源: DEFAULT 官网 ANT_OPEN_SERVICE_MARKET 开放平-服务市场台
self.from_channel = from_channel
# 租户id,外部用户唯一id,如蚂蚁通行证id
self.tenant = tenant
def validate(self):
self.validate_required(self.from_channel, 'from_channel')
self.validate_required(self.tenant, 'tenant')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.from_channel is not None:
result['from_channel'] = self.from_channel
if self.tenant is not None:
result['tenant'] = self.tenant
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('from_channel') is not None:
self.from_channel = m.get('from_channel')
if m.get('tenant') is not None:
self.tenant = m.get('tenant')
return self
class GetAccountBalanceResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
account_balance: str = None,
available_balance: str = None,
currency_value: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 账户余额
self.account_balance = account_balance
# 可用余额
self.available_balance = available_balance
# 币种
self.currency_value = currency_value
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.account_balance is not None:
result['account_balance'] = self.account_balance
if self.available_balance is not None:
result['available_balance'] = self.available_balance
if self.currency_value is not None:
result['currency_value'] = self.currency_value
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('account_balance') is not None:
self.account_balance = m.get('account_balance')
if m.get('available_balance') is not None:
self.available_balance = m.get('available_balance')
if m.get('currency_value') is not None:
self.currency_value = m.get('currency_value')
return self
class ChargeAccountBalanceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
charge_amt: str = None,
currency_value: str = None,
from_channel: str = None,
return_url: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# "10.11"代表10.11元,最小粒度到分,小数点后2位
self.charge_amt = charge_amt
# 币种,支付宝体系内一般存储币种值(156/840/...),156代表人民币
self.currency_value = currency_value
# 渠道来源: ANTCLOUD_OFFICIAL 官网 ANT_OPEN_SERVICE_MARKET 开放平-服务市场台
self.from_channel = from_channel
# 唤起收银台后,充值后回调url链接
self.return_url = return_url
def validate(self):
self.validate_required(self.charge_amt, 'charge_amt')
self.validate_required(self.currency_value, 'currency_value')
self.validate_required(self.from_channel, 'from_channel')
self.validate_required(self.return_url, 'return_url')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.charge_amt is not None:
result['charge_amt'] = self.charge_amt
if self.currency_value is not None:
result['currency_value'] = self.currency_value
if self.from_channel is not None:
result['from_channel'] = self.from_channel
if self.return_url is not None:
result['return_url'] = self.return_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('charge_amt') is not None:
self.charge_amt = m.get('charge_amt')
if m.get('currency_value') is not None:
self.currency_value = m.get('currency_value')
if m.get('from_channel') is not None:
self.from_channel = m.get('from_channel')
if m.get('return_url') is not None:
self.return_url = m.get('return_url')
return self
class ChargeAccountBalanceResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
alipay_gateway: str = None,
input_charset: str = None,
notify_url: str = None,
out_trade_no: str = None,
partner: str = None,
payment_type: str = None,
return_url: str = None,
seller_id: str = None,
service: str = None,
sign: str = None,
sign_type: str = None,
subject: str = None,
total_fee: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 支付宝网关
self.alipay_gateway = alipay_gateway
# 字符编码格式 _input_charset
self.input_charset = input_charset
# 服务器异步通知页面路径
self.notify_url = notify_url
# 商户网站唯一订单号
self.out_trade_no = out_trade_no
# 收款方PID
self.partner = partner
# 支付类型
self.payment_type = payment_type
# 收银台发起页面跳转同步通知页面路径
self.return_url = return_url
# 收款方ID
self.seller_id = seller_id
# 调用的接口名
self.service = service
# 签名
self.sign = sign
# 签名类型
self.sign_type = sign_type
# 商品名称
self.subject = subject
# 交易金额
self.total_fee = total_fee
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.alipay_gateway is not None:
result['alipay_gateway'] = self.alipay_gateway
if self.input_charset is not None:
result['input_charset'] = self.input_charset
if self.notify_url is not None:
result['notify_url'] = self.notify_url
if self.out_trade_no is not None:
result['out_trade_no'] = self.out_trade_no
if self.partner is not None:
result['partner'] = self.partner
if self.payment_type is not None:
result['payment_type'] = self.payment_type
if self.return_url is not None:
result['return_url'] = self.return_url
if self.seller_id is not None:
result['seller_id'] = self.seller_id
if self.service is not None:
result['service'] = self.service
if self.sign is not None:
result['sign'] = self.sign
if self.sign_type is not None:
result['sign_type'] = self.sign_type
if self.subject is not None:
result['subject'] = self.subject
if self.total_fee is not None:
result['total_fee'] = self.total_fee
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('alipay_gateway') is not None:
self.alipay_gateway = m.get('alipay_gateway')
if m.get('input_charset') is not None:
self.input_charset = m.get('input_charset')
if m.get('notify_url') is not None:
self.notify_url = m.get('notify_url')
if m.get('out_trade_no') is not None:
self.out_trade_no = m.get('out_trade_no')
if m.get('partner') is not None:
self.partner = m.get('partner')
if m.get('payment_type') is not None:
self.payment_type = m.get('payment_type')
if m.get('return_url') is not None:
self.return_url = m.get('return_url')
if m.get('seller_id') is not None:
self.seller_id = m.get('seller_id')
if m.get('service') is not None:
self.service = m.get('service')
if m.get('sign') is not None:
self.sign = m.get('sign')
if m.get('sign_type') is not None:
self.sign_type = m.get('sign_type')
if m.get('subject') is not None:
self.subject = m.get('subject')
if m.get('total_fee') is not None:
self.total_fee = m.get('total_fee')
return self
class GetAccountUserRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
alipay_user_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# alipay托管子户ID
self.alipay_user_id = alipay_user_id
def validate(self):
self.validate_required(self.alipay_user_id, 'alipay_user_id')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.alipay_user_id is not None:
result['alipay_user_id'] = self.alipay_user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('alipay_user_id') is not None:
self.alipay_user_id = m.get('alipay_user_id')
return self
class GetAccountUserResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
tenant_id: str = None,
login_name: str = None,
real_name: str = None,
mobile_num: str = None,
firm_name: str = None,
user_type: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 智能科技user_id
self.tenant_id = tenant_id
# 登录名
self.login_name = login_name
# 用户的真实姓名
self.real_name = real_name
# 手机号
self.mobile_num = mobile_num
# 公司名称
self.firm_name = firm_name
# 帐号类型,企业或者是个人
# ENTERPRISE("1","企业类型账号"),
# INDIVIDUAL("2","个人类型账号")
self.user_type = user_type
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.tenant_id is not None:
result['tenant_id'] = self.tenant_id
if self.login_name is not None:
result['login_name'] = self.login_name
if self.real_name is not None:
result['real_name'] = self.real_name
if self.mobile_num is not None:
result['mobile_num'] = self.mobile_num
if self.firm_name is not None:
result['firm_name'] = self.firm_name
if self.user_type is not None:
result['user_type'] = self.user_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('tenant_id') is not None:
self.tenant_id = m.get('tenant_id')
if m.get('login_name') is not None:
self.login_name = m.get('login_name')
if m.get('real_name') is not None:
self.real_name = m.get('real_name')
if m.get('mobile_num') is not None:
self.mobile_num = m.get('mobile_num')
if m.get('firm_name') is not None:
self.firm_name = m.get('firm_name')
if m.get('user_type') is not None:
self.user_type = m.get('user_type')
return self
class CreateAcfeewebInstanceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
id: str = None,
domain_code: str = None,
domain_name: str = None,
domain_version: str = None,
product_code: str = None,
product_name: str = None,
service_code: str = None,
service_name: str = None,
offer_code: str = None,
offer_name: str = None,
oms_data: str = None,
oms_biz_no: str = None,
verification_cache: str = None,
verification_url: str = None,
bpms_id: str = None,
config_data: str = None,
status: str = None,
creator: str = None,
modifor: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 出账验证主键id
self.id = id
# 计量域code
self.domain_code = domain_code
# 计量域名称
self.domain_name = domain_name
# 计量域版本
self.domain_version = domain_version
# 业务产品code
self.product_code = product_code
# 业务产品名称
self.product_name = product_name
# 渠道产品code
self.service_code = service_code
# 渠道产品名称
self.service_name = service_name
# 商品code
self.offer_code = offer_code
# 商品名称
self.offer_name = offer_name
# 验证的计量数据
self.oms_data = oms_data
# 计量数据业务幂等号
self.oms_biz_no = oms_biz_no
# 缓存报文
self.verification_cache = verification_cache
# 缓存链接
self.verification_url = verification_url
# 审批流id
self.bpms_id = bpms_id
# 配置报文
self.config_data = config_data
# 状态
self.status = status
# 创建人
self.creator = creator
# 修改人
self.modifor = modifor
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.domain_code, 'domain_code')
self.validate_required(self.domain_name, 'domain_name')
self.validate_required(self.domain_version, 'domain_version')
self.validate_required(self.product_code, 'product_code')
self.validate_required(self.product_name, 'product_name')
self.validate_required(self.service_code, 'service_code')
self.validate_required(self.service_name, 'service_name')
self.validate_required(self.offer_code, 'offer_code')
self.validate_required(self.offer_name, 'offer_name')
self.validate_required(self.oms_data, 'oms_data')
self.validate_required(self.oms_biz_no, 'oms_biz_no')
self.validate_required(self.verification_cache, 'verification_cache')
self.validate_required(self.verification_url, 'verification_url')
self.validate_required(self.bpms_id, 'bpms_id')
self.validate_required(self.config_data, 'config_data')
self.validate_required(self.status, 'status')
self.validate_required(self.creator, 'creator')
self.validate_required(self.modifor, 'modifor')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.id is not None:
result['id'] = self.id
if self.domain_code is not None:
result['domain_code'] = self.domain_code
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.domain_version is not None:
result['domain_version'] = self.domain_version
if self.product_code is not None:
result['product_code'] = self.product_code
if self.product_name is not None:
result['product_name'] = self.product_name
if self.service_code is not None:
result['service_code'] = self.service_code
if self.service_name is not None:
result['service_name'] = self.service_name
if self.offer_code is not None:
result['offer_code'] = self.offer_code
if self.offer_name is not None:
result['offer_name'] = self.offer_name
if self.oms_data is not None:
result['oms_data'] = self.oms_data
if self.oms_biz_no is not None:
result['oms_biz_no'] = self.oms_biz_no
if self.verification_cache is not None:
result['verification_cache'] = self.verification_cache
if self.verification_url is not None:
result['verification_url'] = self.verification_url
if self.bpms_id is not None:
result['bpms_id'] = self.bpms_id
if self.config_data is not None:
result['config_data'] = self.config_data
if self.status is not None:
result['status'] = self.status
if self.creator is not None:
result['creator'] = self.creator
if self.modifor is not None:
result['modifor'] = self.modifor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('domain_code') is not None:
self.domain_code = m.get('domain_code')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('domain_version') is not None:
self.domain_version = m.get('domain_version')
if m.get('product_code') is not None:
self.product_code = m.get('product_code')
if m.get('product_name') is not None:
self.product_name = m.get('product_name')
if m.get('service_code') is not None:
self.service_code = m.get('service_code')
if m.get('service_name') is not None:
self.service_name = m.get('service_name')
if m.get('offer_code') is not None:
self.offer_code = m.get('offer_code')
if m.get('offer_name') is not None:
self.offer_name = m.get('offer_name')
if m.get('oms_data') is not None:
self.oms_data = m.get('oms_data')
if m.get('oms_biz_no') is not None:
self.oms_biz_no = m.get('oms_biz_no')
if m.get('verification_cache') is not None:
self.verification_cache = m.get('verification_cache')
if m.get('verification_url') is not None:
self.verification_url = m.get('verification_url')
if m.get('bpms_id') is not None:
self.bpms_id = m.get('bpms_id')
if m.get('config_data') is not None:
self.config_data = m.get('config_data')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifor') is not None:
self.modifor = m.get('modifor')
return self
class CreateAcfeewebInstanceResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
process_instance_id: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 审批流实例id
self.process_instance_id = process_instance_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.process_instance_id is not None:
result['process_instance_id'] = self.process_instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('process_instance_id') is not None:
self.process_instance_id = m.get('process_instance_id')
return self
class UpdateAcfeewebStateRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
id: str = None,
domain_code: str = None,
domain_name: str = None,
domain_version: str = None,
product_code: str = None,
product_name: str = None,
service_code: str = None,
service_name: str = None,
offer_code: str = None,
offer_name: str = None,
oms_data: str = None,
oms_biz_no: str = None,
verification_cache: str = None,
verification_url: str = None,
bpms_id: str = None,
config_data: str = None,
status: str = None,
creator: str = None,
modifor: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 出账验证主键id
self.id = id
# 计量域code
self.domain_code = domain_code
# 计量域名称
self.domain_name = domain_name
# 计量域版本
self.domain_version = domain_version
# 业务产品code
self.product_code = product_code
# 业务产品名称
self.product_name = product_name
# 渠道产品code
self.service_code = service_code
# 渠道产品名称
self.service_name = service_name
# 商品code
self.offer_code = offer_code
# 商品名称
self.offer_name = offer_name
# 验证的计量数据
self.oms_data = oms_data
# 计量数据业务幂等号
self.oms_biz_no = oms_biz_no
# 缓存报文
self.verification_cache = verification_cache
# 缓存链接
self.verification_url = verification_url
# 审批流id
self.bpms_id = bpms_id
# 配置报文
self.config_data = config_data
# 状态
self.status = status
# 创建人
self.creator = creator
# 修改人
self.modifor = modifor
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.domain_code, 'domain_code')
self.validate_required(self.domain_name, 'domain_name')
self.validate_required(self.domain_version, 'domain_version')
self.validate_required(self.product_code, 'product_code')
self.validate_required(self.product_name, 'product_name')
self.validate_required(self.service_code, 'service_code')
self.validate_required(self.service_name, 'service_name')
self.validate_required(self.offer_code, 'offer_code')
self.validate_required(self.offer_name, 'offer_name')
self.validate_required(self.oms_data, 'oms_data')
self.validate_required(self.oms_biz_no, 'oms_biz_no')
self.validate_required(self.verification_cache, 'verification_cache')
self.validate_required(self.verification_url, 'verification_url')
self.validate_required(self.bpms_id, 'bpms_id')
self.validate_required(self.config_data, 'config_data')
self.validate_required(self.status, 'status')
self.validate_required(self.creator, 'creator')
self.validate_required(self.modifor, 'modifor')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.id is not None:
result['id'] = self.id
if self.domain_code is not None:
result['domain_code'] = self.domain_code
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.domain_version is not None:
result['domain_version'] = self.domain_version
if self.product_code is not None:
result['product_code'] = self.product_code
if self.product_name is not None:
result['product_name'] = self.product_name
if self.service_code is not None:
result['service_code'] = self.service_code
if self.service_name is not None:
result['service_name'] = self.service_name
if self.offer_code is not None:
result['offer_code'] = self.offer_code
if self.offer_name is not None:
result['offer_name'] = self.offer_name
if self.oms_data is not None:
result['oms_data'] = self.oms_data
if self.oms_biz_no is not None:
result['oms_biz_no'] = self.oms_biz_no
if self.verification_cache is not None:
result['verification_cache'] = self.verification_cache
if self.verification_url is not None:
result['verification_url'] = self.verification_url
if self.bpms_id is not None:
result['bpms_id'] = self.bpms_id
if self.config_data is not None:
result['config_data'] = self.config_data
if self.status is not None:
result['status'] = self.status
if self.creator is not None:
result['creator'] = self.creator
if self.modifor is not None:
result['modifor'] = self.modifor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('domain_code') is not None:
self.domain_code = m.get('domain_code')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('domain_version') is not None:
self.domain_version = m.get('domain_version')
if m.get('product_code') is not None:
self.product_code = m.get('product_code')
if m.get('product_name') is not None:
self.product_name = m.get('product_name')
if m.get('service_code') is not None:
self.service_code = m.get('service_code')
if m.get('service_name') is not None:
self.service_name = m.get('service_name')
if m.get('offer_code') is not None:
self.offer_code = m.get('offer_code')
if m.get('offer_name') is not None:
self.offer_name = m.get('offer_name')
if m.get('oms_data') is not None:
self.oms_data = m.get('oms_data')
if m.get('oms_biz_no') is not None:
self.oms_biz_no = m.get('oms_biz_no')
if m.get('verification_cache') is not None:
self.verification_cache = m.get('verification_cache')
if m.get('verification_url') is not None:
self.verification_url = m.get('verification_url')
if m.get('bpms_id') is not None:
self.bpms_id = m.get('bpms_id')
if m.get('config_data') is not None:
self.config_data = m.get('config_data')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifor') is not None:
self.modifor = m.get('modifor')
return self
class UpdateAcfeewebStateResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sync_result: bool = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 成功、失败
self.sync_result = sync_result
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sync_result is not None:
result['sync_result'] = self.sync_result
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sync_result') is not None:
self.sync_result = m.get('sync_result')
return self
class CreateAccountVerificationRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
id: int = None,
domain_code: str = None,
domain_name: str = None,
domain_version: str = None,
product_code: str = None,
product_name: str = None,
service_code: str = None,
service_name: str = None,
offer_code: str = None,
offer_name: str = None,
oms_data: str = None,
oms_biz_no: str = None,
verification_cache: str = None,
verification_url: str = None,
bpms_id: str = None,
config_data: str = None,
status: str = None,
creator: str = None,
modifor: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 出账验证主键id
self.id = id
# 计量域code
self.domain_code = domain_code
# 计量域名称
self.domain_name = domain_name
# 计量域版本
self.domain_version = domain_version
# 业务产品code
self.product_code = product_code
# 业务产品名称
self.product_name = product_name
# 渠道产品code
self.service_code = service_code
# 渠道产品名称
self.service_name = service_name
# 商品code
self.offer_code = offer_code
# 商品名称
self.offer_name = offer_name
# 验证的计量数据
self.oms_data = oms_data
# 计量数据业务幂等号
self.oms_biz_no = oms_biz_no
# 缓存报文
self.verification_cache = verification_cache
# 缓存链接
self.verification_url = verification_url
# 审批流id
self.bpms_id = bpms_id
# 配置报文
self.config_data = config_data
# 状态
self.status = status
# 创建人
self.creator = creator
# 修改人
self.modifor = modifor
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.domain_code, 'domain_code')
self.validate_required(self.domain_name, 'domain_name')
self.validate_required(self.domain_version, 'domain_version')
self.validate_required(self.product_code, 'product_code')
self.validate_required(self.product_name, 'product_name')
self.validate_required(self.service_code, 'service_code')
self.validate_required(self.service_name, 'service_name')
self.validate_required(self.offer_code, 'offer_code')
self.validate_required(self.offer_name, 'offer_name')
self.validate_required(self.oms_data, 'oms_data')
self.validate_required(self.oms_biz_no, 'oms_biz_no')
self.validate_required(self.verification_cache, 'verification_cache')
self.validate_required(self.verification_url, 'verification_url')
self.validate_required(self.bpms_id, 'bpms_id')
self.validate_required(self.config_data, 'config_data')
self.validate_required(self.status, 'status')
self.validate_required(self.creator, 'creator')
self.validate_required(self.modifor, 'modifor')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.id is not None:
result['id'] = self.id
if self.domain_code is not None:
result['domain_code'] = self.domain_code
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.domain_version is not None:
result['domain_version'] = self.domain_version
if self.product_code is not None:
result['product_code'] = self.product_code
if self.product_name is not None:
result['product_name'] = self.product_name
if self.service_code is not None:
result['service_code'] = self.service_code
if self.service_name is not None:
result['service_name'] = self.service_name
if self.offer_code is not None:
result['offer_code'] = self.offer_code
if self.offer_name is not None:
result['offer_name'] = self.offer_name
if self.oms_data is not None:
result['oms_data'] = self.oms_data
if self.oms_biz_no is not None:
result['oms_biz_no'] = self.oms_biz_no
if self.verification_cache is not None:
result['verification_cache'] = self.verification_cache
if self.verification_url is not None:
result['verification_url'] = self.verification_url
if self.bpms_id is not None:
result['bpms_id'] = self.bpms_id
if self.config_data is not None:
result['config_data'] = self.config_data
if self.status is not None:
result['status'] = self.status
if self.creator is not None:
result['creator'] = self.creator
if self.modifor is not None:
result['modifor'] = self.modifor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('domain_code') is not None:
self.domain_code = m.get('domain_code')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('domain_version') is not None:
self.domain_version = m.get('domain_version')
if m.get('product_code') is not None:
self.product_code = m.get('product_code')
if m.get('product_name') is not None:
self.product_name = m.get('product_name')
if m.get('service_code') is not None:
self.service_code = m.get('service_code')
if m.get('service_name') is not None:
self.service_name = m.get('service_name')
if m.get('offer_code') is not None:
self.offer_code = m.get('offer_code')
if m.get('offer_name') is not None:
self.offer_name = m.get('offer_name')
if m.get('oms_data') is not None:
self.oms_data = m.get('oms_data')
if m.get('oms_biz_no') is not None:
self.oms_biz_no = m.get('oms_biz_no')
if m.get('verification_cache') is not None:
self.verification_cache = m.get('verification_cache')
if m.get('verification_url') is not None:
self.verification_url = m.get('verification_url')
if m.get('bpms_id') is not None:
self.bpms_id = m.get('bpms_id')
if m.get('config_data') is not None:
self.config_data = m.get('config_data')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifor') is not None:
self.modifor = m.get('modifor')
return self
class CreateAccountVerificationResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
process_instance_id: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 审批流实例id
self.process_instance_id = process_instance_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.process_instance_id is not None:
result['process_instance_id'] = self.process_instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('process_instance_id') is not None:
self.process_instance_id = m.get('process_instance_id')
return self
class SyncAccountVerificationRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
id: int = None,
domain_code: str = None,
domain_name: str = None,
domain_version: str = None,
product_code: str = None,
product_name: str = None,
service_code: str = None,
service_name: str = None,
offer_code: str = None,
offer_name: str = None,
oms_data: str = None,
oms_biz_no: str = None,
verification_cache: str = None,
verification_url: str = None,
bpms_id: str = None,
config_data: str = None,
status: str = None,
creator: str = None,
modifor: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 出账验证主键id
self.id = id
# 计量域code
self.domain_code = domain_code
# 计量域名称
self.domain_name = domain_name
# 计量域版本
self.domain_version = domain_version
# 业务产品code
self.product_code = product_code
# 业务产品名称
self.product_name = product_name
# 渠道产品code
self.service_code = service_code
# 渠道产品名称
self.service_name = service_name
# 商品code
self.offer_code = offer_code
# 商品名称
self.offer_name = offer_name
# 验证的计量数据
self.oms_data = oms_data
# 计量数据业务幂等号
self.oms_biz_no = oms_biz_no
# 缓存报文
self.verification_cache = verification_cache
# 缓存链接
self.verification_url = verification_url
# 审批流id
self.bpms_id = bpms_id
# 配置报文
self.config_data = config_data
# 状态
self.status = status
# 创建人
self.creator = creator
# 修改人
self.modifor = modifor
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.domain_code, 'domain_code')
self.validate_required(self.domain_name, 'domain_name')
self.validate_required(self.domain_version, 'domain_version')
self.validate_required(self.product_code, 'product_code')
self.validate_required(self.product_name, 'product_name')
self.validate_required(self.service_code, 'service_code')
self.validate_required(self.service_name, 'service_name')
self.validate_required(self.offer_code, 'offer_code')
self.validate_required(self.offer_name, 'offer_name')
self.validate_required(self.oms_data, 'oms_data')
self.validate_required(self.oms_biz_no, 'oms_biz_no')
self.validate_required(self.verification_cache, 'verification_cache')
self.validate_required(self.verification_url, 'verification_url')
self.validate_required(self.bpms_id, 'bpms_id')
self.validate_required(self.config_data, 'config_data')
self.validate_required(self.status, 'status')
self.validate_required(self.creator, 'creator')
self.validate_required(self.modifor, 'modifor')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.id is not None:
result['id'] = self.id
if self.domain_code is not None:
result['domain_code'] = self.domain_code
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.domain_version is not None:
result['domain_version'] = self.domain_version
if self.product_code is not None:
result['product_code'] = self.product_code
if self.product_name is not None:
result['product_name'] = self.product_name
if self.service_code is not None:
result['service_code'] = self.service_code
if self.service_name is not None:
result['service_name'] = self.service_name
if self.offer_code is not None:
result['offer_code'] = self.offer_code
if self.offer_name is not None:
result['offer_name'] = self.offer_name
if self.oms_data is not None:
result['oms_data'] = self.oms_data
if self.oms_biz_no is not None:
result['oms_biz_no'] = self.oms_biz_no
if self.verification_cache is not None:
result['verification_cache'] = self.verification_cache
if self.verification_url is not None:
result['verification_url'] = self.verification_url
if self.bpms_id is not None:
result['bpms_id'] = self.bpms_id
if self.config_data is not None:
result['config_data'] = self.config_data
if self.status is not None:
result['status'] = self.status
if self.creator is not None:
result['creator'] = self.creator
if self.modifor is not None:
result['modifor'] = self.modifor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('domain_code') is not None:
self.domain_code = m.get('domain_code')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('domain_version') is not None:
self.domain_version = m.get('domain_version')
if m.get('product_code') is not None:
self.product_code = m.get('product_code')
if m.get('product_name') is not None:
self.product_name = m.get('product_name')
if m.get('service_code') is not None:
self.service_code = m.get('service_code')
if m.get('service_name') is not None:
self.service_name = m.get('service_name')
if m.get('offer_code') is not None:
self.offer_code = m.get('offer_code')
if m.get('offer_name') is not None:
self.offer_name = m.get('offer_name')
if m.get('oms_data') is not None:
self.oms_data = m.get('oms_data')
if m.get('oms_biz_no') is not None:
self.oms_biz_no = m.get('oms_biz_no')
if m.get('verification_cache') is not None:
self.verification_cache = m.get('verification_cache')
if m.get('verification_url') is not None:
self.verification_url = m.get('verification_url')
if m.get('bpms_id') is not None:
self.bpms_id = m.get('bpms_id')
if m.get('config_data') is not None:
self.config_data = m.get('config_data')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifor') is not None:
self.modifor = m.get('modifor')
return self
class SyncAccountVerificationResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sync_result: bool = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 成功、失败
self.sync_result = sync_result
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sync_result is not None:
result['sync_result'] = self.sync_result
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sync_result') is not None:
self.sync_result = m.get('sync_result')
return self
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
fefd8b8b7dd964ec6ed10ab3fa8cbe2e98ec471b | 6fdf0a15d5c62ac8e7ccf02846b1068169f79377 | /chessForView.py/main.py | dcd703774c7d9756468575d2e8d56b809f4a2209 | [] | no_license | DivyeshMakwana12599/DCB-linux | e76940e81adbce0429761ed3f6ccb6aece9954f6 | dc22bb4df177372329f6a7023972b7912bdeba7e | refs/heads/master | 2023-05-20T01:06:16.474800 | 2021-06-07T11:21:18 | 2021-06-07T11:21:18 | 374,107,849 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,535 | py | import requests
import pygame as p
from requests.api import request
width = height = 550
running = True
MAX_FPS = 60
IMAGES = {}
DIMENSION = 8
SQ_SIZE = height // DIMENSION
url = "http://localhost:5000/"
p.init()
screen = p.display.set_mode((width - 4, height + 100))
# creating position from fen
def fenToPos(fen):
pos = []
for _ in range(8):
p = []
for _ in range(8):
p.append("")
pos.append(p)
notation = {
"r": "br",
"n": "bn",
"b": "bb",
"q": "bq",
"k": "bk",
"p": "bp",
"R": "wr",
"N": "wn",
"B": "wb",
"Q": "wq",
"K": "wk",
"P": "wp",
}
column = 0
row = 0
for length in range(len(fen)):
if not row % 8:
row = 0
if fen[length] == "/":
column += 1
elif not fen[length].isdigit():
pos[column][row] = notation[fen[length]]
row += 1
elif fen[length].isdigit():
r = int(fen[length]) + row
pos[column][row:r] = int(fen[length]) * ["--"]
row += int(fen[length])
return pos
pos = fenToPos("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR")
print(pos)
# blit text to screen
def addText(text, x, y, FONT_SIZE):
font = p.font.Font("freesansbold.ttf", FONT_SIZE)
text = font.render(text, True, (0, 0, 0))
screen.blit(text, (x, y))
# load Images to python
def loadImages():
pieces = ["br", "bn", "bb", "bq", "bk", "bp", "wr", "wn", "wb", "wq", "wk", "wp"]
for piece in pieces:
IMAGES[piece] = p.transform.scale(
p.image.load("chessForBoard/images/" + piece + ".png"), (SQ_SIZE, SQ_SIZE)
)
# Making 2d ChessBoard and displaying Images on it
def drawGameState(screen, pos):
drawBoard(screen)
drawPieces(screen, pos)
# Making 2d Chess Board
def drawBoard(screen):
colors = [p.Color("#f0d9b5"), p.Color("#b58863")]
for r in range(DIMENSION):
for c in range(DIMENSION):
color = colors[(r + c) % 2]
p.draw.rect(
screen, color, p.Rect(c * SQ_SIZE, r * SQ_SIZE, SQ_SIZE, SQ_SIZE)
)
# displaying pieces on board according to input screen
def drawPieces(screen, board):
letters = ["A", "B", "C", "D", "E", "F", "G", "H"]
numbers = ["8", "7", "6", "5", "4", "3", "2", "1"]
for r in range(DIMENSION):
for c in range(DIMENSION):
piece = board[r][c]
if piece != "--":
screen.blit(
IMAGES[piece], p.Rect(c * SQ_SIZE, r * SQ_SIZE, SQ_SIZE, SQ_SIZE)
)
for i in range(8):
addText(numbers[i], 0, i * SQ_SIZE, 13)
addText(letters[i], (i * SQ_SIZE) + SQ_SIZE + 2, height - 18, 13)
def main():
running = True
global url, pos
gameId = input("Enter Game ID: ")
loadImages()
print(IMAGES)
clock = p.time.Clock()
p.display.set_caption("Digital Chess Board")
p.display.set_icon(p.image.load("chessForBoard/images/Icon.png"))
while running:
for e in p.event.get():
if e.type == p.QUIT:
running = False
try:
r = requests.get(url + "api/games/" + gameId)
r = r.json()
pos = fenToPos(r["currentPosition"])
except Exception as e:
print(e)
screen.fill(p.Color("#fffdd0"))
drawGameState(screen, pos)
clock.tick(MAX_FPS)
p.display.flip()
if __name__ == "__main__":
main() | [
"divyeshmakwana12599@gmail.com"
] | divyeshmakwana12599@gmail.com |
dce66acaecaeb92ead8da8165aa063f5144d1414 | 0c005f75771101fdea1f647f124343077af19c36 | /test_word_break.py | 3a437518d9123157cc2a7afd251265b23f0ac32f | [
"MIT"
] | permissive | brigitteunger/katas | 19ff80a43d1c8fe0e6a49a6790495e716f09f10d | 3f9af88fe5d98753360457084741f573c863dc25 | refs/heads/master | 2023-01-01T00:57:45.294204 | 2020-10-13T21:01:18 | 2020-10-13T21:01:18 | 265,810,827 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,603 | py | import unittest
from typing import List, Set
from data_word_break import s_2, wordDict_2
class Solution:
def wordBreak(self, s: str, wordDict: List[str]) -> bool:
if not wordDict:
return []
set_words = set(wordDict)
dp = [False]*(len(s)+1)
dp[0] = True
for i in range(1, len(s)+1):
for j in range(0, i):
if dp[j] is True and s[j:i] in set_words:
dp[i] = True
break
return dp[-1]
class TestFindWords(unittest.TestCase):
def setUp(self):
self.sol = Solution()
def testWordBreak_1(self):
s = "leetcode"
word_dict = ["leet", "code"]
segmented = self.sol.wordBreak(s, word_dict)
self.assertTrue(segmented)
def testWordBreak_2(self):
s = "applepenapple"
word_dict = ["apple", "pen"]
segmented = self.sol.wordBreak(s, word_dict)
self.assertTrue(segmented)
def testWordBreak_3(self):
s = "catsandog"
word_dict = ["cats", "dog", "sand", "and", "cat"]
segmented = self.sol.wordBreak(s, word_dict)
self.assertFalse(segmented)
def testWordBreak_4(self):
s = "goalspecial"
word_dict = ["go", "goal", "goals", "special"]
segmented = self.sol.wordBreak(s, word_dict)
self.assertTrue(segmented)
def testWordBreak_5(self):
s = s_2
word_dict = wordDict_2
segmented = self.sol.wordBreak(s, word_dict)
self.assertFalse(segmented)
if __name__ == "__main__":
unittest.main()
| [
"brigitteunger@users.noreply.github.com"
] | brigitteunger@users.noreply.github.com |
a4bbedaf2b5ebe13a740b96d0d86a97e762347bc | 22a29f18a2c53158b9bbbc00e177dacc3e3081ca | /demo/accounts/views.py | 23ba455221351a210edd2b7574772e1c9c87aaaf | [] | no_license | loganbeast/Django-WebAPI | b2ff3a8d884d00330fdad9859cd0e5a95d8d759d | 7aa1529ecf6b48366ecf92b049364da8909a484f | refs/heads/master | 2023-02-08T16:40:02.373825 | 2020-12-28T07:02:11 | 2020-12-28T07:02:11 | 324,527,988 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,922 | py | from django.shortcuts import render
from django.contrib import messages
# Create your views here.
from django.shortcuts import render ,redirect
from django.http import HttpResponse
from django.contrib.auth.models import User,auth
# Create your views here.
def register(req):
if req.method == 'POST':
first_name = req.POST['first_name']
last_name = req.POST['last_name']
username = req.POST['username']
password1 = req.POST['password1']
password2 = req.POST['password2']
email = req.POST['email']
if password1 == password2:
if User.objects.filter(username=username).exists():
messages.info(req,'Username taken')
return redirect('register')
elif User.objects.filter(email=email).exists():
messages.info(req,'email taken')
return redirect('register')
else :
user = User.objects.create_user(username = username,password = password1,email = email,first_name = first_name,last_name = last_name)
user.save()
messages.info(req,'user created')
return redirect('login')
else:
messages.info(req,'Password is not matching')
return redirect('register')
return redirect('/')
else:
return render(req,'register.html')
def login(req) :
if(req.method == 'POST'):
username = req.POST['username']
password = req.POST['password']
user = auth.authenticate(username=username,password = password)
if user is not None:
auth.login(req,user)
return redirect('/')
else:
messages.info(req,"Invalid credentials")
return redirect('login')
else:
return render(req,'login.html')
def logout(req):
auth.logout(req)
return redirect('/')
| [
"ss"
] | ss |
8639ae16bf226e4edb29dcac2032b57525c7275d | d8bb5c29c7600ffdf400cef5aaafe5e5feffce66 | /orders/migrations/0002_logs.py | dfa85452d4815487f411fe09efa2bd0b59cd504d | [] | no_license | mark-antonov/hillel_django | 0cc2f9dd124f9e91db8bd78ab8418f01b9cf7eb7 | 61913e7aeead2223a0870d4fcfa2de60641f668d | refs/heads/master | 2023-06-22T14:20:40.425547 | 2021-07-08T14:16:08 | 2021-07-08T14:16:08 | 374,808,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | # Generated by Django 3.2.4 on 2021-06-13 12:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orders', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Logs',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('path', models.CharField(max_length=100)),
('method', models.CharField(max_length=100)),
('timestamp', models.DateTimeField(auto_now=True)),
],
),
]
| [
"antonov.marq@gmail.com"
] | antonov.marq@gmail.com |
271de5d07c5e65241a158766ad42ff511c972f5e | b60fb52d8b4a10215b2636a30bc206f2e4bbeacd | /credentials.py | 6de522808d94adc13cb14d615afeef80c076eca2 | [] | no_license | gabee1987/Project-66 | 3135161f44d1ea6f8c92e84e09f1201f8ce423bb | d577f60d35c10820d4e3e4a445798deced15d6bc | refs/heads/master | 2020-12-02T06:37:51.178544 | 2017-07-12T08:30:56 | 2017-07-12T08:30:56 | 96,866,953 | 0 | 0 | null | 2017-07-12T14:21:37 | 2017-07-11T07:57:12 | HTML | UTF-8 | Python | false | false | 455 | py | import os
import psycopg2
import urllib
# Database connection string
# DATABASE_URL=postgres://user:password@host:port/database
urllib.parse.uses_netloc.append('postgres')
url = urllib.parse.urlparse(os.environ.get('DATABASE_URL'))
connection_data = {
'dbname': url.path[1:],
'user': url.username,
'host': url.hostname,
'password': url.password,
'port': url.port
}
# Secret key for sessions
secret_key = os.environ.get('SECRET_KEY')
| [
"GreywindGit@users.noreply.github.com"
] | GreywindGit@users.noreply.github.com |
901575547e133ec65da0dc673dc742585f27de73 | e70432549addc22fb8158d5515e07b261340153c | /ProtoPixel/scripts/waves/Noise.py | 47c46f87611000deff2fb07fa6324981936f104e | [
"MIT"
] | permissive | ImanolGo/IngoLightAndBuilding | ff998f10a3fdb3636416640481f396a3f007be0d | 23bfce0633ad056b2db5eb62822dc30c331ba800 | refs/heads/master | 2021-01-25T13:41:45.869247 | 2018-04-12T11:26:23 | 2018-04-12T11:26:23 | 123,606,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,633 | py | from protopixel import Content
from openframeworks import *
import os.path
from tempfile import mkdtemp
content = Content("Noise")
side = 256
content.FBO_SIZE = (side,side)
shaderfile = content.add_asset('shader')
shader = ofShader()
temp_dir = mkdtemp()
frag_file = os.path.join(temp_dir,'s.frag')
vert_file = os.path.join(temp_dir,'s.vert')
shader_file_of = os.path.join(temp_dir,'s')
#a global variable
color = ofColor(255)
elapsedTime = 0.0
startColorIndex = 1
endColorIndex = 2
scaleFactor = 10
currentTime = 0
timeFactor = 0.5
speedFactor = 2.0
content.add_parameter("color1", type="color", value=ofColor(255, 255, 255))
content.add_parameter("color2", type="color", value=ofColor(255, 255, 255))
content.add_parameter("color3", type="color", value=ofColor(255, 255, 255))
content.add_parameter("change_hue", value=True)
content.add_parameter("color_speed", min=0.00, max=1.0, value=0.1)
content.add_parameter("speed", min=0.0, max=1.0, value=0.3)
content.add_parameter("noise_grain", min=0.00, max=1.0, value=0.1)
content.add_parameter("stage_mode", value=False)
@content.parameter_changed('change_hue')
def parameter_changed(value):
"""
This function is called every time a a_integer is changed.
We get the new value as an argument
"""
global color
if value == False:
color.r = content['color1'].r
color.g = content['color1'].g
color.b = content['color1'].b
elapsedTime = 0
startColorIndex = 1
endColorIndex = 2
@content.parameter_changed('color1')
def parameter_changed(value):
"""
This function is called every time a a_integer is changed.
We get the new value as an argument
"""
global color
print value
if content['change_hue'] == False:
color.r = content['color1'].r
color.g = content['color1'].g
color.b = content['color1'].b
def setup():
global currentTime, timefactor
if content['shader path']:
shader_path_changed(content['shader path'])
with open(frag_file,'w') as f:
f.write(frag_contents_prefix)
f.write(frag_contents)
f.write(frag_contents_suffix)
with open(vert_file,'w') as f:
f.write(vert_contents)
shader.load(shader_file_of)
global color
color = ofColor(content['color1'].r,content['color1'].g,content['color1'].b)
currentTime = ofGetElapsedTimef()*content['speed']*timefactor
def update():
"""
For every frame, before drawing, we update stuff
"""
if content['change_hue'] == False:
return
global elapsedTime, color, startColorIndex, endColorIndex, timeFactor, scaleFactor, currentTime
elapsedTime+=ofGetLastFrameTime()
time = ofMap(content['color_speed'], 0,1, scaleFactor, scaleFactor/20.0)
if elapsedTime>time:
elapsedTime = 0
startColorIndex = endColorIndex
endColorIndex = (endColorIndex+1)%3 + 1
amt = elapsedTime/(time)
startColorStr = 'color' + str(startColorIndex)
endColorStr = 'color' + str(endColorIndex)
color.r = int(ofLerp(content[startColorStr].r, content[endColorStr].r, amt))
color.g = int(ofLerp(content[startColorStr].g, content[endColorStr].g, amt))
color.b = int(ofLerp(content[startColorStr].b, content[endColorStr].b, amt))
targetTime = ofGetElapsedTimef()*content['speed']*timeFactor
currentTime = currentTime + (targetTime - currentTime)*0.1
def draw():
global color, currentTime, speedFactor
r = color.r/255.0
g = color.g/255.0
b = color.b/255.0
speed = ofMap(content['speed'], 0.0, 1.0, 0.0, speedFactor)
if shader.isLoaded():
shader.begin()
shader.setUniform3f('iColor', r,g,b)
shader.setUniform3f('iResolution', float(content.FBO_SIZE[0]), float(content.FBO_SIZE[1]),0.0)
shader.setUniform1f('iGlobalTime', ofGetElapsedTimef()*speed)
#shader.setUniform1f('iGlobalTime', currentTime)
shader.setUniform1f('inoise_grain', content['noise_grain']*10.0)
ofDrawRectangle(-side/2.,-side/2.,side,side)
shader.end()
@content.parameter_changed('shader path')
def shader_path_changed(p):
print p
frag_contents = open(p).read()
with open(frag_file,'w') as f:
f.write(frag_contents_prefix)
f.write(frag_contents)
f.write(frag_contents_suffix)
with open(vert_file,'w') as f:
f.write(vert_contents)
shader.load(shader_file_of)
vert_contents = """
#version 150
in vec4 position;
out vec4 position_frag;
void main() {
gl_Position = position;
position_frag = position;
}
"""
frag_contents_prefix = """
#version 150
out vec4 outputColor;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform float inoise_grain;
in vec4 position_frag;
"""
frag_contents = """
// This code can be found in
// https://www.shadertoy.com/view/Ms3SWs
// and it's property of its creator.
// This is distributed for illustration purposes only.
uniform vec3 iColor = vec3(1.0,1.0,1.0);
float hash(vec2 p)
{
vec3 p3 = fract(vec3(p.xyx) * 0.1031);
p3 += dot(p3, p3.yzx + 19.19);
return fract((p3.x + p3.y) * p3.z);
}
float ang(vec2 uv, vec2 center){
return atan((uv.y-center.y),(uv.x-center.x));
}
float spir(vec2 uv, vec2 loc){
float dist1=length(uv-loc);
float dist2=dist1*dist1;
float layer6=sin((ang(uv,loc)+dist2-iGlobalTime)*6.0);
layer6 = layer6*dist1;
return layer6;
}
float ripl(vec2 uv, vec2 loc, float speed, float frequency){
return sin(iGlobalTime*speed-length(uv-loc)*frequency);
}
float height(in vec2 uv){
float layer1=sin(iGlobalTime*8.54-inoise_grain*sin(length(uv-vec2(-0.41,-0.47)))*55.0);
float layer2=sin(iGlobalTime*7.13-inoise_grain*sin(length(uv-vec2(1.35,1.32)))*43.0);
float layer3=sin(iGlobalTime*7.92-inoise_grain*sin(length(uv-vec2(-0.34,1.28)))*42.5);
float layer4=sin(iGlobalTime*6.71-inoise_grain*sin(length(uv-vec2(1.23,-0.24)))*47.2);
float spiral=spir(uv,vec2(0.5,0.5));
spiral*=3.0;
float temp = layer1+layer2+layer3+layer4+spiral;
float b=smoothstep(-1.5,7.0,temp);
return b*2.0;
}
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv=fragCoord.xy/iResolution.x;
float waveHeight=0.02+height(uv);
vec3 color=vec3(waveHeight*iColor.r,waveHeight*iColor.g,waveHeight*iColor.b);
fragColor = vec4( color, 1.0 );
}
"""
frag_contents_suffix = """
void main()
{
vec2 pos = position_frag.xy;
pos.x /= 2.0;
pos.y /= 2.0;
pos.x += 0.5;
pos.y += 0.5;
pos.x *= iResolution.x;
pos.y *= iResolution.y;
mainImage( outputColor, pos);
}
""" | [
"yo@imanolgomez.net"
] | yo@imanolgomez.net |
cb456b71bb665743d0f76205cebc055b2afff359 | 7051c1463ae32b7a5ea50697a0b1f97a760431c0 | /Strings/Design_Door_Mat.py | 9c6b9ae80384a8dbc0bf3019c7db28bd57fa4411 | [] | no_license | mukherjeeritwik3/HackerRank-Solutions-python | 15de2bcd56d9f0e072f7913af9417ca1bebaf876 | 2978a23a78bdc4a2818c318afd778b04ca3cb5ba | refs/heads/master | 2022-07-10T08:24:06.197925 | 2020-05-13T10:52:32 | 2020-05-13T10:52:32 | 263,233,025 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | N, M = map(int, input().split())
for i in range(1, N, 2):
print(''.join(['.|.'] * i).center(M, '-'))
print("WELCOME".center(M, '-'))
for i in range(N-2, -1, -2):
print(''.join(['.|.'] * i).center(M, '-'))
| [
"noreply@github.com"
] | noreply@github.com |
c8a53968f8a9b7ca674b33705a8a3e2e2e7088f7 | 0909dd85eecccd7d908bdb235ce850e6b4dda8f9 | /02-program-flow/ranges.py | ab760cd4e7b9c6d680b42b261b43eb1520ac4fab | [] | no_license | NeoGlanding/python-masterclass | 88895812ee5d43ecdd7e1145338bb1394bbbf443 | d60c964408d390b4c813d32983d05b145c8db30e | refs/heads/main | 2023-08-11T15:52:00.595729 | 2021-09-27T11:36:21 | 2021-09-27T11:36:21 | 408,334,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | for i in range(1,11):
print(i)
for i in range(10, -1, -1): #looping backwards
print(i)
for i in range(1,6):
print(f'Ini adalah sets ke {i}')
for j in range(0,11):
print(f'Ini adalah repetisi ke {j}')
| [
"neoglanding@protonmail.com"
] | neoglanding@protonmail.com |
f104bb6f8281c3057c60069d41856b978e1b533f | c46754b9600a12df4f9d7a6320dfc19aa96b1e1d | /tests/models/x_clip/test_modeling_x_clip.py | 5c602d3d3ef732ee29ccbde5441ab84d15c0e2f7 | [
"Apache-2.0"
] | permissive | huggingface/transformers | ccd52a0d7c59e5f13205f32fd96f55743ebc8814 | 4fa0aff21ee083d0197a898cdf17ff476fae2ac3 | refs/heads/main | 2023-09-05T19:47:38.981127 | 2023-09-05T19:21:33 | 2023-09-05T19:21:33 | 155,220,641 | 102,193 | 22,284 | Apache-2.0 | 2023-09-14T20:44:49 | 2018-10-29T13:56:00 | Python | UTF-8 | Python | false | false | 26,961 | py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Testing suite for the PyTorch XCLIP model. """
import inspect
import os
import tempfile
import unittest
import numpy as np
from huggingface_hub import hf_hub_download
from transformers import XCLIPConfig, XCLIPTextConfig, XCLIPVisionConfig
from transformers.testing_utils import require_torch, require_torch_multi_gpu, require_vision, slow, torch_device
from transformers.utils import is_torch_available, is_vision_available
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import (
ModelTesterMixin,
_config_zero_init,
floats_tensor,
ids_tensor,
random_attention_mask,
)
from ...test_pipeline_mixin import PipelineTesterMixin
if is_torch_available():
import torch
from torch import nn
from transformers import XCLIPModel, XCLIPTextModel, XCLIPVisionModel
from transformers.models.x_clip.modeling_x_clip import XCLIP_PRETRAINED_MODEL_ARCHIVE_LIST
if is_vision_available():
from transformers import XCLIPProcessor
class XCLIPVisionModelTester:
def __init__(
self,
parent,
batch_size=8,
image_size=30,
patch_size=2,
num_channels=3,
num_frames=8, # important; the batch size * time must be divisible by the number of frames
is_training=True,
hidden_size=32,
num_hidden_layers=2,
num_attention_heads=4,
intermediate_size=37,
mit_hidden_size=64,
dropout=0.1,
attention_dropout=0.1,
initializer_range=0.02,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.image_size = image_size
self.patch_size = patch_size
self.num_channels = num_channels
self.num_frames = num_frames
self.is_training = is_training
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.mit_hidden_size = mit_hidden_size
self.dropout = dropout
self.attention_dropout = attention_dropout
self.initializer_range = initializer_range
self.scope = scope
# in ViT, the seq length equals the number of patches + 1 (we add 1 for the [CLS] token)
num_patches = (image_size // patch_size) ** 2
self.seq_length = num_patches + 1
def prepare_config_and_inputs(self):
pixel_values = floats_tensor(
[self.batch_size * self.num_frames, self.num_channels, self.image_size, self.image_size]
)
config = self.get_config()
return config, pixel_values
def get_config(self):
return XCLIPVisionConfig(
image_size=self.image_size,
patch_size=self.patch_size,
num_channels=self.num_channels,
num_frames=self.num_frames,
hidden_size=self.hidden_size,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=self.num_attention_heads,
intermediate_size=self.intermediate_size,
mit_hidden_size=self.mit_hidden_size,
dropout=self.dropout,
attention_dropout=self.attention_dropout,
initializer_range=self.initializer_range,
)
def create_and_check_model(self, config, pixel_values):
model = XCLIPVisionModel(config=config)
model.to(torch_device)
model.eval()
with torch.no_grad():
result = model(pixel_values)
# expected sequence length = num_patches + 1 (we add 1 for the [CLS] token)
image_size = (self.image_size, self.image_size)
patch_size = (self.patch_size, self.patch_size)
num_patches = (image_size[1] // patch_size[1]) * (image_size[0] // patch_size[0])
self.parent.assertEqual(
result.last_hidden_state.shape, (self.batch_size * self.num_frames, num_patches + 1, self.hidden_size)
)
self.parent.assertEqual(result.pooler_output.shape, (self.batch_size * self.num_frames, self.hidden_size))
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, pixel_values = config_and_inputs
inputs_dict = {"pixel_values": pixel_values}
return config, inputs_dict
@require_torch
class XCLIPVisionModelTest(ModelTesterMixin, unittest.TestCase):
"""
Here we also overwrite some of the tests of test_modeling_common.py, as X-CLIP does not use input_ids, inputs_embeds,
attention_mask and seq_length.
"""
all_model_classes = (XCLIPVisionModel,) if is_torch_available() else ()
fx_compatible = False
test_pruning = False
test_resize_embeddings = False
test_head_masking = False
def setUp(self):
self.model_tester = XCLIPVisionModelTester(self)
self.config_tester = ConfigTester(
self, config_class=XCLIPVisionConfig, has_text_modality=False, hidden_size=37
)
def test_config(self):
self.config_tester.run_common_tests()
@unittest.skip(reason="X-CLIP does not use inputs_embeds")
def test_inputs_embeds(self):
pass
def test_model_common_attributes(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
model = model_class(config)
self.assertIsInstance(model.get_input_embeddings(), (nn.Module))
x = model.get_output_embeddings()
self.assertTrue(x is None or isinstance(x, nn.Linear))
def test_forward_signature(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
model = model_class(config)
signature = inspect.signature(model.forward)
# signature.parameters is an OrderedDict => so arg_names order is deterministic
arg_names = [*signature.parameters.keys()]
expected_arg_names = ["pixel_values"]
self.assertListEqual(arg_names[:1], expected_arg_names)
def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
def test_training(self):
pass
def test_training_gradient_checkpointing(self):
pass
@unittest.skip(reason="XCLIPVisionModel has no base class and is not available in MODEL_MAPPING")
def test_save_load_fast_init_from_base(self):
pass
@unittest.skip(reason="XCLIPVisionModel has no base class and is not available in MODEL_MAPPING")
def test_save_load_fast_init_to_base(self):
pass
@slow
def test_model_from_pretrained(self):
for model_name in XCLIP_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = XCLIPVisionModel.from_pretrained(model_name)
self.assertIsNotNone(model)
def test_gradient_checkpointing_backward_compatibility(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
if not model_class.supports_gradient_checkpointing:
continue
print("Model class:", model_class)
config.gradient_checkpointing = True
model = model_class(config)
self.assertTrue(model.is_gradient_checkpointing)
def test_attention_outputs(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
config.return_dict = True
# we add 1 here due to the special message token in X-CLIP's vision encoder
seq_len = getattr(self.model_tester, "seq_length", None) + 1
encoder_seq_length = getattr(self.model_tester, "encoder_seq_length", seq_len)
for model_class in self.all_model_classes:
inputs_dict["output_attentions"] = True
inputs_dict["output_hidden_states"] = False
config.return_dict = True
model = model_class(config)
model.to(torch_device)
model.eval()
with torch.no_grad():
outputs = model(**self._prepare_for_class(inputs_dict, model_class))
self.assertEqual(len(outputs.attentions), self.model_tester.num_hidden_layers)
# check that output_attentions also work using config
del inputs_dict["output_attentions"]
config.output_attentions = True
model = model_class(config)
model.to(torch_device)
model.eval()
with torch.no_grad():
outputs = model(**self._prepare_for_class(inputs_dict, model_class))
self.assertEqual(len(outputs.attentions), self.model_tester.num_hidden_layers)
self.assertListEqual(
list(outputs.attentions[0].shape[-3:]),
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_seq_length],
)
out_len = len(outputs)
# Check attention is always last and order is fine
inputs_dict["output_attentions"] = True
inputs_dict["output_hidden_states"] = True
model = model_class(config)
model.to(torch_device)
model.eval()
with torch.no_grad():
outputs = model(**self._prepare_for_class(inputs_dict, model_class))
self.assertEqual(out_len + 1, len(outputs))
self_attentions = outputs.attentions
self.assertEqual(len(self_attentions), self.model_tester.num_hidden_layers)
self.assertListEqual(
list(self_attentions[0].shape[-3:]),
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_seq_length],
)
@require_torch_multi_gpu
def test_multi_gpu_data_parallel_forward(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
# some params shouldn't be scattered by nn.DataParallel
# so just remove them if they are present.
blacklist_non_batched_params = ["head_mask", "decoder_head_mask", "cross_attn_head_mask"]
for k in blacklist_non_batched_params:
inputs_dict.pop(k, None)
# move input tensors to cuda:O
for k, v in inputs_dict.items():
if torch.is_tensor(v):
inputs_dict[k] = v.to(0)
for model_class in self.all_model_classes:
model = model_class(config=config)
model.to(0)
model.eval()
# Wrap model in nn.DataParallel
model = nn.DataParallel(model)
with torch.no_grad():
test = self._prepare_for_class(inputs_dict, model_class)
for k, v in test.items():
if isinstance(v, torch.Tensor):
print(k, v.shape)
else:
print(k, v)
_ = model(**self._prepare_for_class(inputs_dict, model_class))
class XCLIPTextModelTester:
def __init__(
self,
parent,
batch_size=8,
seq_length=7,
is_training=True,
use_input_mask=True,
use_labels=True,
vocab_size=99,
hidden_size=32,
num_hidden_layers=2,
num_attention_heads=4,
intermediate_size=37,
dropout=0.1,
attention_dropout=0.1,
max_position_embeddings=512,
initializer_range=0.02,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.seq_length = seq_length
self.is_training = is_training
self.use_input_mask = use_input_mask
self.use_labels = use_labels
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.dropout = dropout
self.attention_dropout = attention_dropout
self.max_position_embeddings = max_position_embeddings
self.initializer_range = initializer_range
self.scope = scope
def prepare_config_and_inputs(self):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
input_mask = None
if self.use_input_mask:
input_mask = random_attention_mask([self.batch_size, self.seq_length])
if input_mask is not None:
batch_size, seq_length = input_mask.shape
rnd_start_indices = np.random.randint(1, seq_length - 1, size=(batch_size,))
for batch_idx, start_index in enumerate(rnd_start_indices):
input_mask[batch_idx, :start_index] = 1
input_mask[batch_idx, start_index:] = 0
config = self.get_config()
return config, input_ids, input_mask
def get_config(self):
return XCLIPTextConfig(
vocab_size=self.vocab_size,
hidden_size=self.hidden_size,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=self.num_attention_heads,
intermediate_size=self.intermediate_size,
dropout=self.dropout,
attention_dropout=self.attention_dropout,
max_position_embeddings=self.max_position_embeddings,
initializer_range=self.initializer_range,
)
def create_and_check_model(self, config, input_ids, input_mask):
model = XCLIPTextModel(config=config)
model.to(torch_device)
model.eval()
with torch.no_grad():
result = model(input_ids, attention_mask=input_mask)
result = model(input_ids)
self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
self.parent.assertEqual(result.pooler_output.shape, (self.batch_size, self.hidden_size))
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, input_ids, input_mask = config_and_inputs
inputs_dict = {"input_ids": input_ids, "attention_mask": input_mask}
return config, inputs_dict
@require_torch
class XCLIPTextModelTest(ModelTesterMixin, unittest.TestCase):
all_model_classes = (XCLIPTextModel,) if is_torch_available() else ()
fx_compatible = False
test_pruning = False
test_head_masking = False
def setUp(self):
self.model_tester = XCLIPTextModelTester(self)
self.config_tester = ConfigTester(self, config_class=XCLIPTextConfig, hidden_size=37)
def test_config(self):
self.config_tester.run_common_tests()
def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
def test_training(self):
pass
def test_training_gradient_checkpointing(self):
pass
@unittest.skip(reason="X-CLIP does not use inputs_embeds")
def test_inputs_embeds(self):
pass
@unittest.skip(reason="XCLIPTextModel has no base class and is not available in MODEL_MAPPING")
def test_save_load_fast_init_from_base(self):
pass
@unittest.skip(reason="XCLIPTextModel has no base class and is not available in MODEL_MAPPING")
def test_save_load_fast_init_to_base(self):
pass
@slow
def test_model_from_pretrained(self):
for model_name in XCLIP_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = XCLIPTextModel.from_pretrained(model_name)
self.assertIsNotNone(model)
class XCLIPModelTester:
def __init__(
self,
parent,
text_kwargs=None,
vision_kwargs=None,
projection_dim=64,
mit_hidden_size=64,
is_training=True,
):
if text_kwargs is None:
text_kwargs = {}
if vision_kwargs is None:
vision_kwargs = {}
self.parent = parent
self.projection_dim = projection_dim
self.mit_hidden_size = mit_hidden_size
self.text_model_tester = XCLIPTextModelTester(parent, **text_kwargs)
self.vision_model_tester = XCLIPVisionModelTester(parent, **vision_kwargs)
self.is_training = is_training
def prepare_config_and_inputs(self):
text_config, input_ids, attention_mask = self.text_model_tester.prepare_config_and_inputs()
vision_config, _ = self.vision_model_tester.prepare_config_and_inputs()
pixel_values = floats_tensor(
[
self.vision_model_tester.batch_size,
self.vision_model_tester.num_frames,
self.vision_model_tester.num_channels,
self.vision_model_tester.image_size,
self.vision_model_tester.image_size,
]
)
config = self.get_config()
return config, input_ids, attention_mask, pixel_values
def get_config(self):
return XCLIPConfig.from_text_vision_configs(
self.text_model_tester.get_config(),
self.vision_model_tester.get_config(),
projection_dim=self.projection_dim,
)
def create_and_check_model(self, config, input_ids, attention_mask, pixel_values):
model = XCLIPModel(config).to(torch_device).eval()
with torch.no_grad():
result = model(input_ids, pixel_values, attention_mask)
self.parent.assertEqual(
result.logits_per_video.shape,
(self.vision_model_tester.batch_size, self.text_model_tester.batch_size),
)
self.parent.assertEqual(
result.logits_per_text.shape,
(self.text_model_tester.batch_size, self.vision_model_tester.batch_size),
)
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, input_ids, attention_mask, pixel_values = config_and_inputs
inputs_dict = {
"input_ids": input_ids,
"attention_mask": attention_mask,
"pixel_values": pixel_values,
"return_loss": True,
}
return config, inputs_dict
@require_torch
class XCLIPModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (XCLIPModel,) if is_torch_available() else ()
pipeline_model_mapping = {"feature-extraction": XCLIPModel} if is_torch_available() else {}
fx_compatible = False
test_head_masking = False
test_pruning = False
test_resize_embeddings = False
test_attention_outputs = False
test_torchscript = False
maxdiff = None
def setUp(self):
self.model_tester = XCLIPModelTester(self)
def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
@unittest.skip(reason="Hidden_states is tested in individual model tests")
def test_hidden_states_output(self):
pass
@unittest.skip(reason="Inputs_embeds is tested in individual model tests")
def test_inputs_embeds(self):
pass
@unittest.skip(reason="Retain_grad is tested in individual model tests")
def test_retain_grad_hidden_states_attentions(self):
pass
@unittest.skip(reason="XCLIPModel does not have input/output embeddings")
def test_model_common_attributes(self):
pass
@unittest.skip(reason="XCLIPModel does not support feedforward chunking")
def test_feed_forward_chunking(self):
pass
# override as the `logit_scale`, `prompts_generator.alpha` parameters require special treatment
def test_initialization(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
configs_no_init = _config_zero_init(config)
for model_class in self.all_model_classes:
model = model_class(config=configs_no_init)
for name, param in model.named_parameters():
if param.requires_grad:
# check if `logit_scale` is initilized as per the original implementation
if name == "logit_scale":
self.assertAlmostEqual(
param.data.item(),
np.log(1 / 0.07),
delta=1e-3,
msg=f"Parameter {name} of model {model_class} seems not properly initialized",
)
elif name == "prompts_generator.alpha":
self.assertAlmostEqual(param.data.mean().item(), model.config.prompt_alpha)
else:
self.assertIn(
((param.data.mean() * 1e9).round() / 1e9).item(),
[0.0, 1.0],
msg=f"Parameter {name} of model {model_class} seems not properly initialized",
)
def _create_and_check_torchscript(self, config, inputs_dict):
if not self.test_torchscript:
return
configs_no_init = _config_zero_init(config) # To be sure we have no Nan
configs_no_init.torchscript = True
configs_no_init.return_dict = False
for model_class in self.all_model_classes:
model = model_class(config=configs_no_init)
model.to(torch_device)
model.eval()
try:
input_ids = inputs_dict["input_ids"]
pixel_values = inputs_dict["pixel_values"] # X-CLIP needs pixel_values
traced_model = torch.jit.trace(model, (input_ids, pixel_values))
except RuntimeError:
self.fail("Couldn't trace module.")
with tempfile.TemporaryDirectory() as tmp_dir_name:
pt_file_name = os.path.join(tmp_dir_name, "traced_model.pt")
try:
torch.jit.save(traced_model, pt_file_name)
except Exception:
self.fail("Couldn't save module.")
try:
loaded_model = torch.jit.load(pt_file_name)
except Exception:
self.fail("Couldn't load module.")
model.to(torch_device)
model.eval()
loaded_model.to(torch_device)
loaded_model.eval()
model_state_dict = model.state_dict()
loaded_model_state_dict = loaded_model.state_dict()
non_persistent_buffers = {}
for key in loaded_model_state_dict.keys():
if key not in model_state_dict.keys():
non_persistent_buffers[key] = loaded_model_state_dict[key]
loaded_model_state_dict = {
key: value for key, value in loaded_model_state_dict.items() if key not in non_persistent_buffers
}
self.assertEqual(set(model_state_dict.keys()), set(loaded_model_state_dict.keys()))
model_buffers = list(model.buffers())
for non_persistent_buffer in non_persistent_buffers.values():
found_buffer = False
for i, model_buffer in enumerate(model_buffers):
if torch.equal(non_persistent_buffer, model_buffer):
found_buffer = True
break
self.assertTrue(found_buffer)
model_buffers.pop(i)
models_equal = True
for layer_name, p1 in model_state_dict.items():
p2 = loaded_model_state_dict[layer_name]
if p1.data.ne(p2.data).sum() > 0:
models_equal = False
self.assertTrue(models_equal)
def test_load_vision_text_config(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
# Save XCLIPConfig and check if we can load XCLIPVisionConfig from it
with tempfile.TemporaryDirectory() as tmp_dir_name:
config.save_pretrained(tmp_dir_name)
vision_config = XCLIPVisionConfig.from_pretrained(tmp_dir_name)
self.assertDictEqual(config.vision_config.to_dict(), vision_config.to_dict())
# Save XCLIPConfig and check if we can load XCLIPTextConfig from it
with tempfile.TemporaryDirectory() as tmp_dir_name:
config.save_pretrained(tmp_dir_name)
text_config = XCLIPTextConfig.from_pretrained(tmp_dir_name)
self.assertDictEqual(config.text_config.to_dict(), text_config.to_dict())
@slow
def test_model_from_pretrained(self):
for model_name in XCLIP_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = XCLIPModel.from_pretrained(model_name)
self.assertIsNotNone(model)
# We will verify our results on a spaghetti video
def prepare_video():
file = hf_hub_download(
repo_id="hf-internal-testing/spaghetti-video", filename="eating_spaghetti_8_frames.npy", repo_type="dataset"
)
video = np.load(file)
return list(video)
@require_vision
@require_torch
class XCLIPModelIntegrationTest(unittest.TestCase):
@slow
def test_inference(self):
model_name = "microsoft/xclip-base-patch32"
model = XCLIPModel.from_pretrained(model_name).to(torch_device)
processor = XCLIPProcessor.from_pretrained(model_name)
video = prepare_video()
inputs = processor(
text=["playing sports", "eating spaghetti", "go shopping"], videos=video, return_tensors="pt", padding=True
).to(torch_device)
# forward pass
with torch.no_grad():
outputs = model(**inputs)
# verify the logits
self.assertEqual(
outputs.logits_per_video.shape,
torch.Size((inputs.pixel_values.shape[0], inputs.input_ids.shape[0])),
)
self.assertEqual(
outputs.logits_per_text.shape,
torch.Size((inputs.input_ids.shape[0], inputs.pixel_values.shape[0])),
)
expected_logits = torch.tensor([[14.0181, 20.2771, 14.4776]], device=torch_device)
self.assertTrue(torch.allclose(outputs.logits_per_video, expected_logits, atol=1e-3))
| [
"noreply@github.com"
] | noreply@github.com |
eaf536317ff38e57c7364db06474e7bcc194795b | 4b805cedcab65eab46e10b5b4b88eb0e0aba3c9c | /account/serializers.py | 0561eba5328e499248087aaf98ad6b814dcae27b | [] | no_license | ayiza/insta_project | 80f5c19326af486c0749646d9cef3703f8689c51 | ce509825c6308b782552bfd1ff639adec05098bd | refs/heads/master | 2022-12-02T11:06:42.069544 | 2020-08-25T05:35:35 | 2020-08-25T05:35:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,164 | py | from django.contrib.auth import get_user_model, authenticate
from rest_framework import serializers
from django.utils.translation import gettext_lazy as _
from account.utils import send_activation_email
User = get_user_model()
class RegisterSerializer(serializers.ModelSerializer):
password = serializers.CharField(min_length=6, write_only=True)
password_confirmation = serializers.CharField(min_length=6, write_only=True)
class Meta:
model = User
fields = ('email', 'password', 'password_confirmation', 'image')
def validate_email(self, email):
if User.objects.filter(email=email).exists():
raise serializers.ValidationError('User with given email already exist')
return email
def validate(self, validated_data):
password = validated_data.get('password')
password_confirmation = validated_data.get('password_confirmation')
if password != password_confirmation:
raise serializers.ValidationError('Passwords don\'t match')
return validated_data
def create(self, validated_data):
email = validated_data.get('email')
password = validated_data.get('password')
image = validated_data.get('image')
user = User.objects.create_user(email, password, image)
send_activation_email(user.email, user.activation_code)
return user
class LoginSerializer(serializers.Serializer):
email = serializers.EmailField()
password = serializers.CharField()
def validate(self, attrs):
email = attrs.get('email')
password = attrs.get('password')
if email and password:
user = authenticate(request=self.context.get('request'),
username=email, password=password)
if not user:
msg = _('Unable to log in with provided credentials.')
raise serializers.ValidationError(msg, code='authorization')
else:
msg = _('Must include "username" and "password".')
raise serializers.ValidationError(msg, code='authorization')
attrs['user'] = user
return attrs
| [
"aleks.98.x.28@gmail.com"
] | aleks.98.x.28@gmail.com |
9aa75ed11b12907f0de6614609748aa8ad37e042 | c03a2369d346c3a289671349cb175dfaae6e8dda | /lambda.py | 5a43d5635ab8f44de4ba622282329354c30b8a2b | [] | no_license | yoliskdeveloper/basic_python | c5bdd9ce22f9b0183ecacb4064e8325d3211d46d | d2216db3edb938a929e5c922b06c2bf55c73a98c | refs/heads/main | 2023-05-19T22:50:10.826012 | 2021-06-11T10:58:36 | 2021-06-11T10:58:36 | 375,972,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | # anonymous function atau lambda
x = lambda a : a ** 2
print(x(10))
# dengan banyak argument
x = lambda a, b: a * b
print(x(12, 24))
x = lambda p, l, t : print('panjang persegi:', p * l * t)
x(12, 12, 45)
# lambda di dalam function
def my_funct(n):
return lambda a : a * n
mydoubler = my_funct(2)
print(mydoubler(11))
| [
"yolisk.developer@gmail.com"
] | yolisk.developer@gmail.com |
382d3f7f4b1c2235c1b24d6616eff5dea08bb6e6 | 9cfbf2519d6df441389a80a125c33c5f5889c601 | /healthnetproject/users/migrations/0005_auto_20161205_2139.py | 8ac1bc00425806f952e4a581ffd9e187a722f214 | [] | no_license | BenAlderfer/HealthNet | a5c1e8c9fac2bc9e1eff7cd161828407fce85f72 | a4bab3559a04d6f151958b1c1087555e9b316eb3 | refs/heads/master | 2021-03-19T16:20:20.150735 | 2018-08-14T21:18:33 | 2018-08-14T21:18:33 | 81,754,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 642 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-12-05 21:39
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0004_patient_admitted_dr'),
]
operations = [
migrations.AlterField(
model_name='patient',
name='insurance_num',
field=models.CharField(max_length=13, validators=[django.core.validators.RegexValidator('^[a-zA-Z]{1}[a-zA-Z0-9]{12}', 'Must match insurance number format.')], verbose_name='Insurance Number'),
),
]
| [
"benalderfer974@gmail.com"
] | benalderfer974@gmail.com |
150e7a95d89ecdee3ea16019ac75400cb370d44d | 506320a43335e453483ccbaab15504cd1856bad8 | /upload/migrations/0001_initial.py | 85fde439d64b1ebe450191619d3d1dc774f3362a | [] | no_license | zzdd1558/multiFileUpload | 8b008f1777afbaa807109202b346c56bf99db8aa | ff597f6330dd7bbd6bd9d9038e0a12f06f4fbe84 | refs/heads/master | 2020-03-19T01:24:02.049861 | 2018-05-31T06:42:44 | 2018-05-31T06:42:44 | 135,542,688 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 504 | py | # Generated by Django 2.0.5 on 2018-05-30 10:11
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='UploadFileModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(null=True, upload_to='')),
],
),
]
| [
"root@p04-centos-lb06.novalocal"
] | root@p04-centos-lb06.novalocal |
53dfe5edd7e128c9d9c296ed09fd9eeddca65929 | 231eff40055f697a8d6f9fc20485ad13b9b0c4d1 | /src/pyco/device.py | 56a6486f9f9e92175d2a5614f0d5db281c0f04bf | [] | no_license | attdona/pyco | 58d0b8c2138b0c5008ea0659229c6eef9adb01ca | 747dd63685d21cd1d61a9f989305e8f430103226 | refs/heads/master | 2021-01-19T13:00:34.174343 | 2015-11-17T10:37:36 | 2015-11-17T10:37:36 | 39,942,273 | 1 | 1 | null | 2015-11-11T14:45:13 | 2015-07-30T09:23:31 | Python | UTF-8 | Python | false | false | 51,642 | py | # coding=utf-8
'''
Created on Mar 15, 2011
@author: Attilio Donà
'''
import sys
import os
import re
import time
from mako.template import Template
from mako.runtime import Context
from io import StringIO
from validate import Validator
from pkg_resources import resource_filename, resource_string, iter_entry_points #@UnresolvedImport
import pyco.log
# create logger
log = pyco.log.getLogger("device")
from configobj import ConfigObj, flatten_errors #@UnresolvedImport
expectLogfile = '/tmp/expect.log'
cfgFile = resource_filename('pyco', 'cfg/pyco.cfg')
if hasattr(pyco, 'pyco_home'):
expectLogfile = pyco.pyco_home + "/logs/expect.log"
if (os.path.isfile(pyco.pyco_home + "/cfg/pyco.cfg")):
cfgFile = pyco.pyco_home + "/cfg/pyco.cfg"
# the shared configObj
configObj = None
class DeviceException(Exception):
"""This is the Device base exception class."""
def __init__(self, device, msg=''):
self.msg = msg
self.device = device
device.close()
def __str__(self):
return self.msg
class WrongDeviceUrl(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class MissingDeviceParameter(DeviceException):
pass
class UnsupportedProtocol(DeviceException):
pass
class ExpectException(DeviceException):
# impossibile connettersi correttamente all'apparato di rete
code = 310
def __init__(self, device, msg=''):
self.interaction_log = device.interaction_log()
DeviceException.__init__(self, device, msg)
def descr(self):
return '%s: %s' % (self.device.name, self.msg)
class ConnectionClosed(ExpectException):
'''
Raised when EOF is read from a pexpect child. This usually means the child has exited
'''
pass
class ConnectionRefused(ExpectException):
'''
Thrown when the connection was refused by the remote endpoint
'''
pass
class PermissionDenied(ExpectException):
'''
Thrown when the device login fails because username token or password token or both are wrong.
'''
pass
class ConnectionTimedOut(ExpectException):
'''
Typically occurs when there is no response or when none of the expected patterns match with the device response
'''
pass
class CommandExecutionError(ExpectException):
'''
Raised when there is a error pattern match that signal a command error, for example an invalid syntax
'''
class LoginFailed(ExpectException):
'''
Thrown when the login phase in not successful
'''
pass
class EventHandlerUndefined(Exception):
'''
Thrown when an event action is undefined
'''
def __init__(self, value):
self.value = value
def __str__(self):
return "event handler %s not defined" % self.value
class ConfigFileError(Exception):
'''
Thrown when the pyco config file is invalid
'''
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def getAccount(device):
# this is the last function in the plugin group
return False
def path(hops):
'''
Get the target device from the list of hops that define the path to the device.
The target device must be the last list item.
'''
target = hops.pop()
target.hops = hops
return target
def device(url):
'''
Returns a Device instance builded from a url.
the device url is compliant with the RFC syntax defined by http://tools.ietf.org/html/rfc3986
the telnet and ssh scheme are extended with a path item defining the host specific driver to be used for connecting:
*[protocol://][user][:password]@hostname[:port][/driver]*
valid examples of device url:
* telnet://u:p@localhost:21/linux
* ssh://user@localhost:21/linux
* ssh://localhost
where *protocol* is one of:
* telnet
* ssh
for example:
>>> h = device('ssh://jack:secret@myhost/linux')
>>> h.username
'jack'
>>> h.password
'secret'
>>> h.name
'myhost'
>>> h.protocol
'ssh'
>>> h.driver
driver:linux
*protocol* is optional, it defaults to *ssh*
*driver* is optional. If not defined it defaults to the common driver:
>>> h = device('jack:secret@myhost')
>>> h.username
'jack'
>>> h.password
'secret'
>>> h.name
'myhost'
>>> h.protocol
'ssh'
>>> h.driver
driver:common
if username or password is not defined they are set to the null value None:
>>> h = device('ssh://foo@myhost')
>>> h.username
'foo'
>>> h.password
>>> h.password is None
True
>>> h.name
'myhost'
>>> h.protocol
'ssh'
>>> h.port
22
>>> h = device('telnet://:secret@myhost:2222')
>>> h.username
>>> h.username is None
True
>>> h.password
'secret'
>>> h.name
'myhost'
>>> h.protocol
'telnet'
>>> h.port
2222
The *driver* name has to be one of the [section] name found into the pyco configuration file :ref:`driver-configuration`.
With this release the configured drivers are:
* common
* linux
* ciscoios
If a driver is not configured this way an exception is thrown in the device factory function:
>>> h1 = device('ssh://jack:secret@myhost/zdriver')
Traceback (most recent call last):
...
DriverNotFound: 'zdriver driver not defined'
'''
if url.startswith('telnet://') or url.startswith('ssh://'):
pass
else:
url = 'ssh://' + url
try:
(driverName, host, user, password, protocol, port) = parseUrl(url)
except:
raise WrongDeviceUrl('invalid url %s' % url)
if host == None:
raise WrongDeviceUrl('hostname not defined')
if user == '':
user = None
if port is None:
if protocol == 'ssh': port = 22
elif protocol == 'telnet' : port = 23
if driverName.startswith('/'):
driverName=driverName.lstrip('/')
log.debug("[%s] info: driver [%s], cred [%s / %s], protocol [%s:%s]" % (host, driverName, user, password, protocol, port))
if driverName == '':
driverName = 'common'
driver = Driver.get(driverName)
obj = Device(host, driver, user, password, protocol, port)
log.debug("[%s] builded" % host)
return obj
def parseUrl(url):
'''
'''
from urllib.parse import urlparse #@UnresolvedImport
# Workaround
# jython (and python?) 2.5.2 urlparse does not support the ssh schema
if url.startswith('ssh://'):
purl = url.replace('ssh://', 'telnet://', 1)
sshScheme = True
else:
purl = url
sshScheme = False
items = urlparse(purl)
if items.scheme == '':
items = urlparse('telnet://' + url)
if sshScheme:
return (items.path, items.hostname, items.username, items.password, 'ssh', items.port)
else:
return (items.path, items.hostname, items.username, items.password, items.scheme, items.port)
def defaultEventHandler(device):
'''
The default event handler is invoked if and only if the fsm (event,current_state)
fall back on the fsm default_transition
'''
log.debug("[%s] in state [%s] got [%s] event" % (device.name, device.state, device.currentEvent.name))
event_map = {
'eof' : ConnectionClosed
}
#'timeout': ConnectionTimedOut,
if device.currentEvent.name in event_map:
log.info("[%s] unexpected communication error in state [%s] got [%s] event" % (device.name, device.state, device.currentEvent.name))
exception = event_map[device.currentEvent.name](device)
device.close()
raise exception
def getExactStringForMatch(str):
'''
Used for example to escape special characters in prompt strings
'''
specials = ['(\\[)', '(\\$)', '(\\.)', '(\\^)', '(\\*)', '(\\+)', '(\\?)', '(\\{)', '(\\})', '(\\])', '(\\|)', '(\\()', '(\\))']
orSep = '|'
pattern = orSep.join(specials)
p = re.compile('(\\\\)')
match = p.sub(r'\\\1', str)
for spec in specials:
p = re.compile(spec)
match = p.sub(r'\\\1', match)
return match
def discoverPromptCallback(device, tentativePrompt=None):
'''
The discover prompt algorithm
'''
if tentativePrompt is not None:
output = tentativePrompt
elif device.currentEvent.name == 'prompt-match':
# TODO: manage a tuple of hints, for example
# output = (device.esession.pipe.before + device.esession.pipe.after, device.esession.pipe.after)
# because a multiline prompt actually is not correctly managed
output = device.esession.pipe.after
log.debug('raw output: [%s]' % output)
elif device.currentEvent.name == 'timeout':
output = device.esession.pipe.before
else:
raise Exception("discover prompt failed; unexpected event [%s]" % device.currentEvent.name)
# if regular exp succeed then set the prompt
log.debug("[%s] prompt discovery ..." % (device.name))
# stop the default handling of the timeout event
device.currentEvent.stopPropagation()
sts = device.state
if sts in device.prompt:
if output.startswith('\r\n'):
output = output.replace('\r\n', '', 1)
log.debug('[%s] == [%s]' % (device.prompt[sts].value, output))
if device.prompt[sts].value == output:
device.discoveryCounter = 0
log.debug("[%s] [%s] prompt discovered: [%s]" % (device.name, sts, device.prompt[sts].value))
device.prompt[sts].setExactValue(device.prompt[sts].value)
# TODO: save only if the cache is not aligned
if cache_enabled():
save_cached_prompt(device)
#device.add_event_action('prompt-match', getExactStringForMatch(device.prompt[sts].value), device.fsm.current_state)
device.add_expect_pattern('prompt-match', getExactStringForMatch(device.prompt[sts].value), sts)
for ev in ['timeout', 'prompt-match']:
log.debug('removing discoverPromptCallback')
device.remove_event_handler(ev, discoverPromptCallback)
# declare the discovery with the event
device.currentEvent = Event('prompt-match')
return
else:
device.remove_pattern(getExactStringForMatch(device.prompt[sts].value), sts)
if device.discoveryCounter == 2:
log.debug("[%s] [%s] unable to found the prompt, unsetting discovery. last output: [%s]" % (device.name, sts, output))
device.discoverPrompt = False
device.remove_event_handler('timeout', discoverPromptCallback)
return
else:
device.prompt[sts].tentative = True
if output.startswith('\r\n'):
output = output.replace('\r\n', '', 1)
device.prompt[sts].value = output
log.debug("[%s] [%s] no prompt match, retrying discovery with pointer %s" % (device.name, sts, [device.prompt[sts].value]))
device.add_expect_pattern('prompt-match', getExactStringForMatch(device.prompt[sts].value), sts)
device.discoveryCounter += 1
else:
print("OUT: " + str(output))
rows = output.split('\r\n')
if hasattr(device, 'promptRegexp'):
if output.startswith('\r\n'):
output = output.replace('\r\n', '', 1)
tentativePrompt = output
log.debug('promptRegexp tentativePrompt: [%s]' % tentativePrompt)
device.remove_pattern('\r\n' + device.promptRegexp, sts)
else:
tentativePrompt = rows[-1]
device.discoveryCounter = 0
log.debug("[%s] tentativePrompt: [%s]" % (device.name, tentativePrompt))
device.prompt[sts] = Prompt(tentativePrompt, tentative=True)
device.add_expect_pattern('prompt-match', getExactStringForMatch(device.prompt[sts].value), sts)
device.clear_buffer()
device.send_line('')
device.expect(lambda d: d.currentEvent.name == 'timeout' or d.currentEvent.name == 'prompt-match')
def buildPatternsList(device, driver=None):
'''
Setup the expect patterns and the action events from the configobj
'''
if driver == None:
driver = device.driver
log.debug("loading driver [%s]" % driver)
if hasattr(driver, 'parent'):
log.debug("[%s] parent driver: [%s]" % (driver, driver.parent))
buildPatternsList(device, Driver.get(driver.parent))
if driver.name not in configObj:
log.debug("skipping undefined [%s] section" % (driver.name))
return
for (eventKey, eventData) in list(configObj[driver.name]['events'].items()):
action=None
if 'action' in eventData:
action = buildAction(eventData['action'])
states = '*'
if 'beginState' in eventData:
states = eventData['beginState']
endState = None
if 'endState' in eventData:
endState = eventData['endState']
pattern = None
if 'pattern' in eventData:
pattern = eventData['pattern']
device.add_event_action(event=eventKey, pattern=pattern, action=action, beginState=states, endState=endState)
def buildAction(actionString):
if actionString.startswith(':'):
al = actionString.split(':')
al = al[1:-1]
else:
al = actionString.split()
#log.debug('[%s] splitted into [%s]' % (actionString, al))
if len(al) > 1:
baseAction = get_callable(al[0])
def action(target):
log.debug("invoking action [%s] with %s" % (baseAction.__name__, al[1:]))
baseAction(target,*al[1:])
else:
action = get_callable(actionString)
return action
def get_callable(methodName):
'''
From the methodName string get the first callable object looking into:
* `$PYCO_HOME/handlers.py` python source file
* pyco.actions internal package module
* pyco.device internal package module
'''
if methodName == '' or methodName is None:
return None
log.debug('looking for action [%s]' % methodName)
import pyco.actions
if isinstance(methodName,str):
try:
if hasattr(pyco, 'pyco_home'):
sys.path.append(pyco.pyco_home)
try:
log.debug('looking for [%s] into actions module' % methodName)
import handlers #@UnresolvedImport
return getattr(handlers, methodName)
except:
log.debug('looking for [%s] into pyco package' % methodName)
return getattr(pyco.actions, methodName)
else:
return getattr(pyco.actions, methodName)
except:
if methodName in globals():
return globals()[methodName]
else:
raise EventHandlerUndefined(methodName)
else:
def composite(d):
for m in methodName:
get_callable(m)(d)
return composite
def cliIsConnected(target):
log.debug("[%s] [%s] state, [%s] event: checking if CLI is connected ..." % (target.name, target.state, target.currentEvent.name))
if target.currentEvent.name == 'prompt-match':
return True
if hasattr(target, 'promptPattern'):
log.debug('[%s] matching prompt with pattern [%s]' % (target.state, target.promptPattern))
target.prompt[target.state] = Prompt(target.promptPattern, tentative=False)
target.add_expect_pattern('prompt-match', target.promptPattern, target.state)
return True
else:
if target.discoverPrompt:
log.debug("[%s] starting [%s] prompt discovery" % (target.name, target.state))
target.enable_prompt_discovery()
def isTimeoutOrPromptMatch(d):
return d.currentEvent.name == 'timeout' or d.currentEvent.name == 'prompt-match'
target.expect(isTimeoutOrPromptMatch)
log.debug("prompt discovery executed, cliIsConnected event: [%s]" % target.currentEvent.name)
return target.currentEvent.name == 'prompt-match'
def commandError(target):
log.error('[%s]: detected error response [%s]' % (target.name, target.esession.pipe.after))
raise CommandExecutionError(target)
class Event:
def __init__(self, name, propagateToFsm=True):
self.name = name
self.propagate = propagateToFsm
def __str__(self):
return self.name
def stopPropagation(self):
self.propagate = False
def isActive(self):
return self.propagate
def isTimeout(self):
return self.name == 'timeout'
def isPromptMatch(self):
return self.name == 'prompt-match' or self.name.endswith('_prompt')
class Prompt:
def __init__(self, promptValue, tentative=False):
self.value = promptValue
self.tentative = tentative
def isTentative(self):
return self.tentative
def isFinal(self):
return not self.tentative
def setExactValue(self, value):
self.value = value
self.tentative = False
class FSMException(Exception):
"""This is the ExtFSM Exception class."""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Device:
'''
`Device` class models a host machine and implements the FSM behavoir.
device object properties are:
* `name`
the fqdn or the host ip address
* `username`
the authenticated username
* `password`
the authenticated password
* `protocol`
the transport protocol: actually ssh o telnet
* `driver`
the FSM driver associated with this device
In a normal usage scenario you dont use any of the Device methods apart the :meth:`pyco.device.Device.send_line` to send
a string to the CLI in response to an event::
def sendUsername(target):
if target.username is None:
raise MissingDeviceParameter(target, '%s username undefined' % target.name)
log.debug("sending username [%s] ..." % target.username)
target.send_line(target.username)
'''
#processResponseg = None
def __init__(self, name, driver=None, username = None, password = None, protocol='ssh', port=22, hops = []):
log.debug("[%s] ctor" % name)
self.name = name
self.username = username
self.password = password
self.protocol = protocol
self.port = port
self.hops = hops
self.loggedin = False
self.eventCb = {}
self.prompt = {}
# the finite state machine
self.state = 'GROUND'
if not driver:
self.driver = Driver.get('common')
else:
self.driver = driver
self.set_driver(self.driver.name)
# TODO: return the device url
def __str__(self):
return self.name
def __repr__(self):
return 'device:' + self.name
def __getattr__(self, attrname):
if attrname == 'driver':
raise AttributeError(attrname)
else:
#log.debug("[%s] delegating search for [%s] to [%s]" % (self, attrname, self.driver))
try:
return getattr(self.driver, attrname)
except AttributeError:
raise AttributeError(attrname)
def get_driver(self):
return self.driver.name
def set_driver(self, driverName):
'''
Initialize the device object with the FSM associated with `driverName`
'''
self.driver = Driver.get(driverName)
# Map (input_symbol, current_state) --> (action, next_state).
self.state_transitions = {}
# Map (current_state) --> (action, next_state).
self.state_transitions_any = {}
self.input_transitions_any = {}
self.default_transition = None
self.patternMap = {'*':{}}
buildPatternsList(self)
self.set_default_transition(defaultEventHandler, None)
# simply ignore 'prompt-match' on any state
#self.add_input_any('prompt-match')
def enable_prompt_discovery(self):
"""
Match the output device against the promptRegexp pattern and set the device prompt
"""
self.on_event('timeout', discoverPromptCallback)
self.on_event('prompt-match', discoverPromptCallback)
# add the cached prompt ...
if cache_enabled():
prompt = get_cached_prompt(self)
if prompt:
log.debug('[%s] found cached [%s] prompt [%s]' % (self.name, self.state, prompt.prompt))
self.prompt[self.state] = Prompt(prompt.prompt, tentative=True)
self.add_expect_pattern('prompt-match', getExactStringForMatch(prompt.prompt), self.state)
self.discoveryCounter = 0
else:
log.debug('[%s] - [%s]: no prompt cached' % (self.name, self.state))
#self.expect(lambda d: d.currentEvent.name == 'timeout' or d.currentEvent.name == 'prompt-match')
def interaction_log(self):
return self.esession.logfile.getvalue()
def is_connected(self):
'''
If the device is connected return True
'''
return self.loggedin
def where_am_i(self):
'''
return the hop device actually connected.
'''
from pyco.expectsession import SOURCE_HOST
if self.is_connected():
return self
for d in reversed(self.hops):
log.debug("checking if [%s] is connected" % d.name)
if d.is_connected():
return d
return SOURCE_HOST
def close(self):
if hasattr(self, 'esession'):
if self.currentEvent.name != 'eof':
self.esession.close()
self.state = 'GROUND'
def discover_prompt_with_regexp(self, regexp, state='*'):
'''
Use regexp as a hint for prompt discovery
Add the guard \'\\\\r\\\\n\' to the begin of prompt regexp
'''
self.add_event_action("prompt-match", '\r\n' + regexp, state)
self.on_event('prompt-match', discoverPromptCallback)
def get_prompt(self):
'''
Get the current device prompt
'''
return self.prompt[self.state].value
def prompt_discovered(self):
if self.state in self.prompt:
return self.prompt[self.state].isFinal()
return False
def get_event(self,pattern):
'''
The event associated with the pattern argument
'''
try:
return self.patternMap[self.state][pattern]
except:
# TODO: raise an exception if event not found
return self.patternMap['*'][pattern]
def connect_command(self, clientDevice):
for ep in iter_entry_points(group='pyco.plugin', name=None):
log.debug("found [%s] plugin into module [%s]" % (ep.name, ep.module_name))
authFunction = ep.load()
if authFunction(self):
break
if self.protocol == 'ssh':
# the username must be defined for ssh connections
if self.username == None:
raise MissingDeviceParameter(self, '%s username undefined' % self.name)
try:
command = clientDevice.sshCommand
except:
command = 'ssh ${device.username}@${device.name}'
elif self.protocol == 'telnet':
try:
command = clientDevice.telnetCommand
except:
command = 'telnet ${device.name} ${device.port}'
else:
raise UnsupportedProtocol(self, 'unsupported protocol: %s' % self.protocol)
template = Template(command)
clicommand = StringIO()
context = Context(clicommand, device=self)
template.render_context(context)
return clicommand.getvalue()
def has_event_handlers(self, event):
return event.name in self.eventCb
def get_event_handlers(self, event):
return self.eventCb[event.name]
def on_event(self, eventName, callback):
log.debug("[%s] adding [%s] for [%s] event" % (self.name, callback, eventName))
try:
if not callback in self.eventCb[eventName]:
self.eventCb[eventName].append(callback)
except:
self.eventCb[eventName] = [callback]
def remove_event_handler(self, eventName, callback):
log.debug("[%s] removing [%s] event handler [%s]" % (self.name, eventName, callback))
try:
self.eventCb[eventName].remove(callback)
except:
log.debug("[%s] not found [%s] event handler [%s]" % (self.name, eventName, callback))
def login(self):
"""
open a network connection using protocol. Currently supported protocols are telnet and ssh.
If login has succeeded the device is in USER_PROMPT state and it is ready for consuming commands
"""
from pyco.expectsession import ExpectSession
log.debug("%s login ..." % self.name)
self.esession = ExpectSession(self.hops,self)
self.currentEvent = Event('do-nothing-event')
log.debug("[%s] session: [%s]" % (self.name, self.esession))
try:
self.esession.login()
except ExpectException as e:
# something go wrong, try to find the last connected hop in the path
log.info("[%s]: in login phase got [%s] error" % (e.device.name ,e.__class__))
log.debug("full interaction: [%s]" % e.interaction_log)
raise e
self.clear_buffer()
if self.state == 'GROUND' or self.currentEvent.isTimeout():
raise LoginFailed(self, 'unable to connect: %s' % self.currentEvent.name)
else:
log.debug("%s logged in !!! ..." % self.name)
def expect(self, checkPoint):
self.esession.patternMatch(self, checkPoint, [], self.maxWait)
def send_line(self, stringValue):
"""
simply send ``stringValue`` to the device CLI assuming that the device object is connected::
self.is_connected() == True
"""
log.debug('generating event [%s]' % stringValue)
event = Event(stringValue)
self.process(event)
log.debug("[%s] sending [%s]" % (self, stringValue))
self.esession.send_line(stringValue)
def __call__(self, command):
return self.send(command)
def send(self, script_or_template, param_map=None):
'''
Send the template script or a plain script to the device and return the command output.
If the device is not connected first a :py:meth:`pyco.device.Device.login` is executed
`param_map` is a dictionary containing the `key-value` entries used for creating the plain script from the template:
the key dictionary is the template keyword ${`key`} and `value` is the corresponding substituted value.
if `param_map` is not defined it is assumed that `script_or_template` is a plain script and no substitution is performed.
'''
if self.state == 'GROUND':
self.login()
if param_map:
template = Template(script_or_template)
clicommand = StringIO()
context = Context(clicommand, **param_map)
template.render_context(context)
command = clicommand.getvalue()
else:
command = script_or_template
out = ''
for line in command.split('\n'):
log.debug('[%s]: sending line [%s]' % (self.name, line))
if out != '':
out += '\n'
out += self.process_single_line(line)
return out
def process_single_line(self, command):
'''
Send the command string to the device and return the command output.
After sending the command with :py:meth:`pyco.device.Device.send_line` activate a `processResponse` loop and awaits one of
the expected results or a timeout.
'''
#self.clear_buffer()
self.send_line(command)
def runUntilPromptMatchOrTimeout(device):
#return device.currentEvent.name == 'timeout' or device.currentEvent.name == 'prompt-match' or device.currentEvent.name.endswith('_prompt')
return device.currentEvent.name == 'timeout' or device.currentEvent.name == 'prompt-match' or device.currentEvent.name.endswith('_prompt')
out = self.esession.processResponse(self, runUntilPromptMatchOrTimeout)
if self.currentEvent.name == 'timeout' and self.discoverPrompt == True:
if not hasattr(self, 'promptPattern') and hasattr(self, 'rediscoverPrompt') and self.rediscoverPrompt:
# rediscover the prompt
log.debug("[%s] discovering again the prompt ..." % self.name)
tentativePrompt = out.split('\r\n')[-1]
log.debug('[%s] taking last line as tentativePrompt: [%s]' % (self.name, tentativePrompt))
self.enable_prompt_discovery()
discoverPromptCallback(self, tentativePrompt)
else:
raise ConnectionTimedOut(self, 'prompt not hooked')
# TODO: to be evaluated if this check is useful
if self.checkIfOutputComplete == True:
log.debug("Checking if [%s] response [%s] is complete" % (command,out))
prevOut = None
while out != prevOut:
self.clear_buffer()
log.debug("[%s] == [%s]" % (prevOut,out))
prevOut = out
currOut = self.esession.processResponse(self, runUntilPromptMatchOrTimeout)
if prevOut == None:
out = currOut
else:
out = prevOut + currOut
log.debug("Rechecking if [%s] response [%s] is complete" % (command,out))
if out.startswith(command):
out = out.replace(command.replace('\n','\r\n'), '', 1).strip('\r\n')
log.info("[%s:%s]: captured response [%s]" % (self.name, command, out))
return out
def clear_buffer(self):
log.debug('clearing buffer ...')
try:
# expect some time (default: 2 seconds) the arrivals of terminal characters and then clears the buffer
time.sleep(self.waitBeforeClearingBuffer)
self.esession.pipe.expect('.*', timeout=1)
except Exception as e:
log.debug("[%s] clear_buffer timeout: cleared expect buffer (%s)" % (self.name, e.__class__))
log.debug(e)
def add_transition (self, input_symbol, state, action=None, next_state=None):
"""This adds a transition that associates:
(input_symbol, current_state) --> (action, next_state)
The action may be set to None in which case the process() method will
ignore the action and only set the next_state. The next_state may be
set to None in which case the current state will be unchanged.
You can also set transitions for a list of symbols by using
add_transition_list(). """
if next_state is None:
next_state = state
self.state_transitions[(input_symbol, state)] = (action, next_state)
def add_transition_list (self, list_input_symbols, state, action=None, next_state=None):
"""This adds the same transition for a list of input symbols.
You can pass a list or a string. Note that it is handy to use
string.digits, string.whitespace, string.letters, etc. to add
transitions that match character classes.
The action may be set to None in which case the process() method will
ignore the action and only set the next_state. The next_state may be
set to None in which case the current state will be unchanged. """
if next_state is None:
next_state = state
for input_symbol in list_input_symbols:
self.add_transition (input_symbol, state, action, next_state)
def add_transition_any (self, state, action=None, next_state=None):
"""This adds a transition that associates:
(current_state) --> (action, next_state)
That is, any input symbol will match the current state.
The process() method checks the "any" state associations after it first
checks for an exact match of (input_symbol, current_state).
The action may be set to None in which case the process() method will
ignore the action and only set the next_state. The next_state may be
set to None in which case the current state will be unchanged. """
if next_state is None:
next_state = state
self.state_transitions_any [state] = (action, next_state)
def add_input_any (self, input_symbol, action=None, next_state=None):
"""This adds a transition that associates:
(input_symbol) --> (action, next_state)
That is, the input symbol will trigger a transition in any state.
The process() method checks the input_symbol in "any state" associations after it
checks for a match of transition_any
The action may be set to None in which case the process() method will
ignore the action and only set the next_state. The next_state may be
set to None in which case the current state will be unchanged. """
self.input_transitions_any [input_symbol] = (action, next_state)
def set_default_transition (self, action, next_state):
"""This sets the default transition. This defines an action and
next_state if the ExtFSM cannot find the input symbol and the current
state in the transition list and if the ExtFSM cannot find the
current_state in the transition_any list. This is useful as a final
fall-through state for catching errors and undefined states.
The default transition can be removed by setting the attribute
default_transition to None. """
self.default_transition = (action, next_state)
def get_transition (self, input_symbol, state):
"""This returns (action, next state) given an input_symbol and state.
This does not modify the ExtFSM state, so calling this method has no side
effects. Normally you do not call this method directly. It is called by
process().
The sequence of steps to check for a defined transition goes from the
most specific to the least specific.
1. Check state_transitions[] that match exactly the tuple,
(input_symbol, state)
2. Check state_transitions_any[] that match (state)
In other words, match a specific state and ANY input_symbol.
3. Check if the input_symbol has a (action, next_state) association
in any state
4. Check if the default_transition is defined.
This catches any input_symbol and any state.
This is a handler for errors, undefined states, or defaults.
5. No transition was defined. If we get here then raise an exception.
"""
if (input_symbol, state) in self.state_transitions:
return self.state_transitions[(input_symbol, state)]
elif state in self.state_transitions_any:
return self.state_transitions_any[state]
elif input_symbol in self.input_transitions_any:
return self.input_transitions_any[input_symbol]
elif self.default_transition is not None:
return self.default_transition
else:
raise FSMException ('Transition is undefined: (%s, %s).' %
(str(input_symbol), str(state)) )
def process (self, event, ext=True):
"""This is the main method that you call to process input. This may
cause the driver to change state and call an action. The action callable
is invoked with device object argument as a first parameter. This method calls
get_transition() to find the action and next_state associated with the
input_symbol and current_state. If the action is None then the action
is not called and only the current state is changed. This method
processes one complete input symbol. You can process a list of symbols
(or a string) by calling process_list(). """
if event.isActive():
# disactive the event
event.stopPropagation()
input_symbol = event.name
#self.input_symbol = input_symbol.name
(action, next_state) = self.get_transition (input_symbol, self.state)
log.debug("selected transition [event:%s,beginState:%s] -> [action:%s, endState:%s]" % (input_symbol, self.state, action, next_state))
stateChanged = False
if next_state != None:
log.debug("transition activated for [%s,%s] -> [%s]" % (input_symbol, self.state, next_state))
stateChanged = (self.state != next_state)
self.state = next_state
if action is not None:
log.debug("[%s]: executing [%s] action [%s]" % (self.name, input_symbol, str(action)))
action (self)
if stateChanged:
log.debug('generating event [%s]' % self.state.lower())
self.currentEvent = Event(self.state.lower())
self.process(self.currentEvent,ext=False)
return stateChanged
def process_list (self, input_symbols):
"""This takes a list and sends each element to process(). The list may
be a string or any iterable object. """
for s in input_symbols:
self.process (s)
def patterns(self, state):
'''
Return the pattern list to match the device output
'''
try:
return list(self.patternMap[state].keys()) + list(self.patternMap['*'].keys())
except:
return list(self.patternMap['*'].keys())
def add_event_action(self, event, pattern=None, beginState=['*'], endState=None, action=None):
'''
Add a pattern to be matched in the FSM state. If the pattern is matched then the corresponding event is generated.
If pattern is None only a transition is configured
'''
if isinstance(beginState, str):
beginState = [beginState]
for state in beginState:
if not pattern or pattern == '':
if state == '*':
log.debug("[%s]: [%s] event with empty pattern activated in any state" % (self.name, event))
self.add_input_any(event, action, endState)
else:
log.debug("[%s] adding transition [%s-%s (action:%s)-%s]" % (self.name, state, event, action, endState))
self.add_transition(event, state, action, endState)
continue
try:
reverseMap = dict([(item[1],item[0]) for item in list(self.patternMap[state].items())])
self.patternMap[state][pattern] = event
log.debug('[%s-%s]: configuring [%s] event [%s]' % (self.name, state, pattern, event))
if event in reverseMap and pattern != reverseMap[event]:
log.debug('[%s]: deleting event [%s]' % (self.name, event))
del self.patternMap[state][reverseMap[event]]
except:
self.patternMap[state] = {pattern:event}
# add the transition
if state == '*':
log.debug("[%s]: adding pattern driven transition in any state [%s-%s (action:%s)-%s]" % (self.name, state, event, action, endState))
self.add_input_any(event, action, endState)
else:
log.debug("[%s]: adding pattern driven transition [%s-%s (action:%s)-%s]" % (self.name, state, event, action, endState))
self.add_transition(event, state, action, endState)
def add_transition_object(self, t):
'''
Add the transition argument coded as a dictionary::
suRule = {
'beginState' : 'USER_PROMPT',
'event': 'su_event',
'action' : sendSuPassword,
'endState' : 'USER2_PROMPT'
}
device.add_transition_object(suRule)
'''
self.add_transition(t['event'], t['beginState'], t['action'], t['endState'])
def add_expect_pattern(self, event, pattern, state):
log.debug("[%s]: adding expect pattern %s, event [%s], state [%s]" % (self.name, [pattern], event, state))
if not pattern or pattern == '':
log.warning("[%s]: skipped [%s] event with empty pattern and * state" % (self.name, event))
return
try:
self.patternMap[state][pattern] = event
except:
self.patternMap[state] = {pattern:event}
def remove_event(self, event, state = '*'):
reverseMap = dict([(item[1],item[0]) for item in list(self.patternMap[state].items())])
if event in reverseMap:
pattern = reverseMap[event]
self.remove_pattern(pattern, state)
def remove_pattern(self, pattern, state = '*'):
try:
del self.patternMap[state][pattern]
except KeyError:
log.info('[%s] failed to delete patternMap[%s] entry [%s]: item not found' % (self.name, state, pattern))
# end Device class
def loadConfiguration(cfgfile=cfgFile):
'''
Load the pyco configuration file
'''
import os.path
if os.path.isfile(cfgfile):
#try:
config = ConfigObj(cfgfile, configspec=resource_filename('pyco', 'cfg/pyco_spec.cfg'))
return reload(config)
else:
raise Exception('pyco configuration file not found: ' + cfgfile)
#except:
# raise Exception('pyco configuration file not found: ' + cfgfile)
def load(config):
'''
Load the pyco configObj
'''
global configObj
pyco_spec = resource_filename('pyco', 'cfg/pyco_spec.cfg')
config.configspec = ConfigObj(pyco_spec)
val = Validator()
results = config.validate(val)
if results != True:
for (section_list, key, _) in flatten_errors(config, results):
if key is not None:
raise ConfigFileError('The "%s" key in the section "%s" failed validation' % (key, ', '.join(section_list)))
else:
raise ConfigFileError('The following section was missing:%s ' % ', '.join(section_list))
configObj = config
for section in list(config.keys()):
for (key,value) in list(config[section].items()):
if value is None:
log.debug("skipping [%s.%s] undefined value" % (section, key))
continue
try:
driver = Driver.get(section)
if key in ['events', 'transitions']:
continue
except DriverNotFound:
log.debug("creating driver [%s]" % section)
driver = driverBuilder(section)
log.debug("setting [%s.%s] = [%s]" % (driver,key,value))
setattr(driver, key, value)
return config
def reload(config):
reset()
load(config)
def reset():
'''
Delete the current configuration parameters
'''
global DBSession
DBSession = None
if configObj is None:
return;
for section in list(configObj.keys()):
for (key,value) in list(configObj[section].items()):
log.debug("deleting %s.%s (was %s)" % (section, key, value))
try:
driver = Driver.get(section)
if key not in ['events', 'transitions']:
try:
delattr(driver, key)
except AttributeError:
log.debug("[%s] attribute [%s] not found (not directly defined?)" % (section,key))
except DriverNotFound:
log.error('configuration reset error: [%s] driver not found' % section)
def driverBuilder(modelName):
driver = Driver(modelName)
Driver.addDriver(driver)
return driver
class DriverException(Exception):
"""This is the Driver Exception class."""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class DriverNotFound(DriverException):
pass
class Driver:
"""Driver for the command line configuration"""
registry = {}
def __init__(self, name):
"""This creates the Driver. You set the initial state here. The "memory"
attribute is any object that you want to pass along to the action
functions. It is not used by the Driver. For parsing you would typically
pass a list to be used as a stack. """
self.name = name
def __str__(self):
return self.name
def __repr__(self):
return 'driver:' + self.name
def __getattr__(self, attrname):
if attrname == 'parent':
raise AttributeError(attrname)
else:
#log.debug("[%s] delegating search for [%s] to [%s]" % (self, attrname, self.parent))
try:
pDriver = Driver.get(self.parent)
return getattr(pDriver, attrname)
except AttributeError:
raise AttributeError(attrname)
@staticmethod
def get(driverName):
try:
return Driver.registry[driverName]
except KeyError:
raise DriverNotFound('%s driver not defined' % driverName)
@staticmethod
def addDriver(driver):
Driver.registry[driver.name] = driver
try:
from sqlalchemy import create_engine #@UnresolvedImport
from sqlalchemy import Column #@UnresolvedImport
from sqlalchemy import String #@UnresolvedImport
from sqlalchemy.ext.declarative import declarative_base #@UnresolvedImport
from sqlalchemy.orm import scoped_session #@UnresolvedImport
from sqlalchemy.orm import sessionmaker #@UnresolvedImport
from zope.sqlalchemy import ZopeTransactionExtension #@UnresolvedImport
import transaction #@UnresolvedImport
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARN)
Base = declarative_base()
class DevicePrompt(Base):
__tablename__ = 'device_prompt'
device = Column(String, primary_key=True)
state = Column(String, primary_key=True)
prompt = Column(String)
def __init__(self, device ,state, prompt):
self.device = device
self.state = state
self.prompt = prompt
def initialize_sql():
Base.metadata.bind = engine
Base.metadata.create_all(engine)
def createDB(url):
log.debug('db endpoint: [%s]' % url)
initialize_sql()
def db_url():
import os.path
if hasattr(pyco, 'pyco_home'):
db_url = 'sqlite:///%s/%s' % (pyco.pyco_home, configObj['common']['cache'])
else:
db_url = 'sqlite:////tmp/%s' % configObj['common']['cache']
return db_url
def cache_enabled():
return DBSession != None
def cache_exists():
import os.path
if hasattr(pyco, 'pyco_home'):
db_file = '%s/%s' % (pyco.pyco_home, configObj['common']['cache'])
else:
db_file = '/tmp/%s' % configObj['common']['cache']
try:
if configObj['common']['cache']:
if not os.path.isfile(db_file):
log.debug('creating cache [%s] ...' % db_file)
createDB('sqlite://%s' % db_file)
except Exception as e:
log.info('prompt cache is not enabled: %s' % e)
def get_cached_prompt(target):
log.debug('[%s] state [%s]: getting cached prompt' % (target.name, target.state))
try:
session = DBSession()
prompt = session.query(DevicePrompt).get((target.name,target.state))
session.close()
return prompt
except Exception as e:
log.debug('no prompt cached: %s' % e)
return None
def save_cached_prompt(target):
log.debug('[%s] state [%s]: caching prompt [%s]' % (target.name, target.state, target.prompt[target.state].value))
try:
session = DBSession()
transaction.begin()
prompt = session.query(DevicePrompt).get((target.name,target.state))
if prompt:
prompt.prompt = target.prompt[target.state].value
else:
log.debug('adding a new prompt to cache')
prompt = DevicePrompt(target.name, target.state, target.prompt[target.state].value)
session.add(prompt)
transaction.commit()
#session.close()
except Exception as e:
log.error('no prompt saved: %s' % e)
sql_powered = True
except:
logging.exception("unable to load sql pluging for caching prompts")
sql_powered = False
def cache_enabled():
return False
# finally and only finally load the configuration
loadConfiguration()
if sql_powered:
DBSession = None
if 'cache' in configObj['common']:
log.debug('creating engine for [%s]' % db_url())
engine = create_engine(db_url(), echo=False)
DBSession = scoped_session(sessionmaker(
extension=ZopeTransactionExtension(), bind=engine))
cache_exists()
if __name__ == "__main__":
import doctest #@UnresolvedImport
doctest.testmod()
| [
"attilio.dona@gmail.com"
] | attilio.dona@gmail.com |
45253f3f15ff6abade75f9c34143694a16c567d5 | 742478601715c8010650a3daf36623d84eaead56 | /Personal_space/utils/YTX_SDK/CCPRestSDK.py | 52d3970c5af756c9ded207b5f7e73a4ebd941f78 | [] | no_license | yuanzheng0922/My_Blog | 4b4450f7a917cd2a11d9cbb940e2b559e96a8a10 | c732f7c1ef305ac4bbbe82bde885b4f5317fb9e2 | refs/heads/master | 2020-03-14T03:36:48.271945 | 2018-05-03T16:53:11 | 2018-05-03T16:53:11 | 131,423,795 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,306 | py | #-*- coding: UTF-8 -*-
# Copyright (c) 2014 The CCP project authors. All Rights Reserved.
#
# Use of this source code is governed by a Beijing Speedtong Information Technology Co.,Ltd license
# that can be found in the LICENSE file in the root of the web site.
#
# http://www.yuntongxun.com
#
# An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import md5
import base64
import datetime
import urllib2
import json
from xmltojson import xmltojson
from xml.dom import minidom
class REST:
AccountSid=''
AccountToken=''
AppId=''
SubAccountSid=''
SubAccountToken=''
ServerIP=''
ServerPort=''
SoftVersion=''
Iflog=True #是否打印日志
Batch='' #时间戳
BodyType = 'xml'#包体格式,可填值:json 、xml
# 初始化
# @param serverIP 必选参数 服务器地址
# @param serverPort 必选参数 服务器端口
# @param softVersion 必选参数 REST版本号
def __init__(self,ServerIP,ServerPort,SoftVersion):
# type: (object, object, object) -> object
self.ServerIP = ServerIP
self.ServerPort = ServerPort
self.SoftVersion = SoftVersion
# 设置主帐号
# @param AccountSid 必选参数 主帐号
# @param AccountToken 必选参数 主帐号Token
def setAccount(self,AccountSid,AccountToken):
self.AccountSid = AccountSid;
self.AccountToken = AccountToken;
# 设置子帐号
#
# @param SubAccountSid 必选参数 子帐号
# @param SubAccountToken 必选参数 子帐号Token
def setSubAccount(self,SubAccountSid,SubAccountToken):
self.SubAccountSid = SubAccountSid
self.SubAccountToken = SubAccountToken
# 设置应用ID
#
# @param AppId 必选参数 应用ID
def setAppId(self,AppId):
self.AppId = AppId
# def log(self,url,body,data):
# print('这是请求的URL:')
# print (url)
# print('这是请求包体:')
# print (body)
# print('这是响应包体:')
# print (data)
# print('********************************')
#
# 创建子账号
# @param friendlyName 必选参数 子帐号名称
def CreateSubAccount(self, friendlyName):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/SubAccounts?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
req.add_header("Authorization", auth)
#xml格式
body ='''<?xml version="1.0" encoding="utf-8"?><SubAccount><appId>%s</appId>\
<friendlyName>%s</friendlyName>\
</SubAccount>\
'''%(self.AppId, friendlyName)
if self.BodyType == 'json':
#json格式
body = '''{"friendlyName": "%s", "appId": "%s"}'''%(friendlyName,self.AppId)
data=''
req.add_data(body)
try:
res = urllib2.urlopen(req)
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 获取子帐号
# @param startNo 可选参数 开始的序号,默认从0开始
# @param offset 可选参数 一次查询的最大条数,最小是1条,最大是100条
def getSubAccounts(self, startNo,offset):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/GetSubAccounts?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
req.add_header("Authorization", auth)
#xml格式
body ='''<?xml version="1.0" encoding="utf-8"?><SubAccount><appId>%s</appId>\
<startNo>%s</startNo><offset>%s</offset>\
</SubAccount>\
'''%(self.AppId, startNo, offset)
if self.BodyType == 'json':
#json格式
body = '''{"appId": "%s", "startNo": "%s", "offset": "%s"}'''%(self.AppId,startNo,offset)
data=''
req.add_data(body)
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 子帐号信息查询
# @param friendlyName 必选参数 子帐号名称
def querySubAccount(self, friendlyName):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/QuerySubAccountByName?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
req.add_header("Authorization", auth)
#创建包体
body ='''<?xml version="1.0" encoding="utf-8"?><SubAccount><appId>%s</appId>\
<friendlyName>%s</friendlyName>\
</SubAccount>\
'''%(self.AppId, friendlyName)
if self.BodyType == 'json':
body = '''{"friendlyName": "%s", "appId": "%s"}'''%(friendlyName,self.AppId)
data=''
req.add_data(body)
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 发送模板短信
# @param to 必选参数 短信接收彿手机号码集合,用英文逗号分开
# @param datas 可选参数 内容数据
# @param tempId 必选参数 模板Id
def sendTemplateSMS(self, to,datas,tempId):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/SMS/TemplateSMS?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
req.add_header("Authorization", auth)
#创建包体
b=''
for a in datas:
b+='<data>%s</data>'%(a)
body ='<?xml version="1.0" encoding="utf-8"?><SubAccount><datas>'+b+'</datas><to>%s</to><templateId>%s</templateId><appId>%s</appId>\
</SubAccount>\
'%(to, tempId,self.AppId)
if self.BodyType == 'json':
# if this model is Json ..then do next code
b='['
for a in datas:
b+='"%s",'%(a)
b+=']'
body = '''{"to": "%s", "datas": %s, "templateId": "%s", "appId": "%s"}'''%(to,b,tempId,self.AppId)
req.add_data(body)
data=''
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 外呼通知
# @param to 必选参数 被叫号码
# @param mediaName 可选参数 语音文件名称,格式 wav。与mediaTxt不能同时为空。当不为空时mediaTxt属性失效。
# @param mediaTxt 可选参数 文本内容
# @param displayNum 可选参数 显示的主叫号码
# @param playTimes 可选参数 循环播放次数,1-3次,默认播放1次。
# @param respUrl 可选参数 外呼通知状态通知回调地址,云通讯平台将向该Url地址发送呼叫结果通知。
# @param userData 可选参数 用户私有数据
# @param maxCallTime 可选参数 最大通话时长
# @param speed 可选参数 发音速度
# @param volume 可选参数 音量
# @param pitch 可选参数 音调
# @param bgsound 可选参数 背景音编号
def landingCall(self,to,mediaName,mediaTxt,displayNum,playTimes,respUrl,userData,maxCallTime,speed,volume,pitch,bgsound):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/Calls/LandingCalls?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
req.add_header("Authorization", auth)
#创建包体
body ='''<?xml version="1.0" encoding="utf-8"?><LandingCall>\
<to>%s</to><mediaName>%s</mediaName><mediaTxt>%s</mediaTxt><appId>%s</appId><displayNum>%s</displayNum>\
<playTimes>%s</playTimes><respUrl>%s</respUrl><userData>%s</userData><maxCallTime>%s</maxCallTime><speed>%s</speed>
<volume>%s</volume><pitch>%s</pitch><bgsound>%s</bgsound></LandingCall>\
'''%(to, mediaName,mediaTxt,self.AppId,displayNum,playTimes,respUrl,userData,maxCallTime,speed,volume,pitch,bgsound)
if self.BodyType == 'json':
body = '''{"to": "%s", "mediaName": "%s","mediaTxt": "%s","appId": "%s","displayNum": "%s","playTimes": "%s","respUrl": "%s","userData": "%s","maxCallTime": "%s","speed": "%s","volume": "%s","pitch": "%s","bgsound": "%s"}'''%(to, mediaName,mediaTxt,self.AppId,displayNum,playTimes,respUrl,userData,maxCallTime,speed,volume,pitch,bgsound)
req.add_data(body)
data=''
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 语音验证码
# @param verifyCode 必选参数 验证码内容,为数字和英文字母,不区分大小写,长度4-8位
# @param playTimes 可选参数 播放次数,1-3次
# @param to 必选参数 接收号码
# @param displayNum 可选参数 显示的主叫号码
# @param respUrl 可选参数 语音验证码状态通知回调地址,云通讯平台将向该Url地址发送呼叫结果通知
# @param lang 可选参数 语言类型
# @param userData 可选参数 第三方私有数据
def voiceVerify(self,verifyCode,playTimes,to,displayNum,respUrl,lang,userData):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/Calls/VoiceVerify?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
req.add_header("Authorization", auth)
#创建包体
body ='''<?xml version="1.0" encoding="utf-8"?><VoiceVerify>\
<appId>%s</appId><verifyCode>%s</verifyCode><playTimes>%s</playTimes><to>%s</to><respUrl>%s</respUrl>\
<displayNum>%s</displayNum><lang>%s</lang><userData>%s</userData></VoiceVerify>\
'''%(self.AppId,verifyCode,playTimes,to,respUrl,displayNum,lang,userData)
if self.BodyType == 'json':
# if this model is Json ..then do next code
body = '''{"appId": "%s", "verifyCode": "%s","playTimes": "%s","to": "%s","respUrl": "%s","displayNum": "%s","lang": "%s","userData": "%s"}'''%(self.AppId,verifyCode,playTimes,to,respUrl,displayNum,lang,userData)
req.add_data(body)
data=''
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# IVR外呼
# @param number 必选参数 待呼叫号码,为Dial节点的属性
# @param userdata 可选参数 用户数据,在<startservice>通知中返回,只允许填写数字字符,为Dial节点的属性
# @param record 可选参数 是否录音,可填项为true和false,默认值为false不录音,为Dial节点的属性
def ivrDial(self,number,userdata,record):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/ivr/dial?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
req.add_header("Accept", "application/xml")
req.add_header("Content-Type", "application/xml;charset=utf-8")
req.add_header("Authorization", auth)
#创建包体
body ='''<?xml version="1.0" encoding="utf-8"?>
<Request>
<Appid>%s</Appid>
<Dial number="%s" userdata="%s" record="%s"></Dial>
</Request>
'''%(self.AppId,number,userdata,record)
req.add_data(body)
data=''
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 话单下载
# @param date 必选参数 day 代表前一天的数据(从00:00 – 23:59),目前只支持按天查询
# @param keywords 可选参数 客户的查询条件,由客户自行定义并提供给云通讯平台。默认不填忽略此参数
def billRecords(self,date,keywords):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/BillRecords?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
req.add_header("Authorization", auth)
#创建包体
body ='''<?xml version="1.0" encoding="utf-8"?><BillRecords>\
<appId>%s</appId><date>%s</date><keywords>%s</keywords>\
</BillRecords>\
'''%(self.AppId,date,keywords)
if self.BodyType == 'json':
# if this model is Json ..then do next code
body = '''{"appId": "%s", "date": "%s","keywords": "%s"}'''%(self.AppId,date,keywords)
req.add_data(body)
data=''
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 主帐号信息查询
def queryAccountInfo(self):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/AccountInfo?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
body=''
req.add_header("Authorization", auth)
data=''
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 短信模板查询
# @param templateId 必选参数 模板Id,不带此参数查询全部可用模板
def QuerySMSTemplate(self,templateId):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/SMS/QuerySMSTemplate?sig=" + sig
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
req.add_header("Authorization", auth)
#创建包体
body ='''<?xml version="1.0" encoding="utf-8"?><Request>\
<appId>%s</appId><templateId>%s</templateId></Request>
'''%(self.AppId,templateId)
if self.BodyType == 'json':
# if this model is Json ..then do next code
body = '''{"appId": "%s", "templateId": "%s"}'''%(self.AppId,templateId)
req.add_data(body)
data=''
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main2(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 呼叫结果查询
# @param callsid 必选参数 呼叫ID
def CallResult(self,callSid):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/CallResult?sig=" + sig + "&callsid=" + callSid
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
body=''
req.add_header("Authorization", auth)
data=''
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 呼叫状态查询
# @param callid 必选参数 一个由32个字符组成的电话唯一标识符
# @param action 可选参数 查询结果通知的回调url地址
def QueryCallState (self,callid,action):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/ivr/call?sig=" + sig + "&callid=" + callid
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
self.setHttpHeader(req)
req.add_header("Authorization", auth)
#创建包体
body ='''<?xml version="1.0" encoding="utf-8"?><Request>\
<Appid>%s</Appid><QueryCallState callid="%s" action="%s"/>\
</Request>\
'''%(self.AppId,callid,action)
if self.BodyType == 'json':
# if this model is Json ..then do next code
body = '''{"Appid":"%s","QueryCallState":{"callid":"%s","action":"%s"}}'''%(self.AppId,callid,action)
req.add_data(body)
data=''
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
# 语音文件上传
# @param filename 必选参数 文件名
# @param body 必选参数 二进制串
def MediaFileUpload (self,filename,body):
self.accAuth()
nowdate = datetime.datetime.now()
self.Batch = nowdate.strftime("%Y%m%d%H%M%S")
#生成sig
signature = self.AccountSid + self.AccountToken + self.Batch;
sig = md5.new(signature).hexdigest().upper()
#拼接URL
url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/Calls/MediaFileUpload?sig=" + sig + "&appid=" + self.AppId + "&filename=" + filename
#生成auth
src = self.AccountSid + ":" + self.Batch;
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
req.add_header("Authorization", auth)
if self.BodyType == 'json':
req.add_header("Accept", "application/json")
req.add_header("Content-Type", "application/octet-stream")
else:
req.add_header("Accept", "application/xml")
req.add_header("Content-Type", "application/octet-stream")
#创建包体
req.add_data(body)
try:
res = urllib2.urlopen(req);
data = res.read()
res.close()
if self.BodyType=='json':
#json格式
locations = json.loads(data)
else:
#xml格式
xtj=xmltojson()
locations=xtj.main(data)
if self.Iflog:
self.log(url,body,data)
return locations
except Exception, error:
if self.Iflog:
self.log(url,body,data)
return {'172001':'网络错误'}
#子帐号鉴权
def subAuth(self):
if(self.ServerIP==""):
print('172004');
print('IP为空');
if(self.ServerPort<=0):
print('172005');
print('端口错误(小于等于0)');
if(self.SoftVersion==""):
print('172013');
print('版本号为空');
if(self.SubAccountSid==""):
print('172008');
print('子帐号为空');
if(self.SubAccountToken==""):
print('172009');
print('子帐号令牌为空');
if(self.AppId==""):
print('172012');
print('应用ID为空');
#主帐号鉴权
def accAuth(self):
if(self.ServerIP==""):
print('172004');
print('IP为空');
if(self.ServerPort<=0):
print('172005');
print('端口错误(小于等于0)');
if(self.SoftVersion==""):
print('172013');
print('版本号为空');
if(self.AccountSid==""):
print('172006');
print('主帐号为空');
if(self.AccountToken==""):
print('172007');
print('主帐号令牌为空');
if(self.AppId==""):
print('172012');
print('应用ID为空');
#设置包头
def setHttpHeader(self,req):
if self.BodyType == 'json':
req.add_header("Accept", "application/json")
req.add_header("Content-Type", "application/json;charset=utf-8")
else:
req.add_header("Accept", "application/xml")
req.add_header("Content-Type", "application/xml;charset=utf-8")
| [
"yuanzheng0922@163.com"
] | yuanzheng0922@163.com |
78f611b8d4b8750cab5741821716162ad2583890 | 0670d89e5d7b91d86b181e0b6cfdbef8b3b9e9e6 | /p2/components/quota/exceptions.py | bdc46ece3b3237860e9d62cf8e192515e2818382 | [
"MIT"
] | permissive | BeryJu/p2 | dfe570afb420843033e519350f5b89e992878a6b | 80b5c6a821f90cef73d6e8cd3c6cdb05ffa86b27 | refs/heads/master | 2020-12-04T01:21:21.197822 | 2019-08-29T16:02:21 | 2019-08-29T16:02:21 | 231,549,415 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | """p2 quota exceptions"""
from p2.core.exceptions import BlobException
class QuotaExceededException(BlobException):
"""Exception raised when ACTION_BLOCK is selected."""
| [
"jens.langhammer@haufe-lexware.com"
] | jens.langhammer@haufe-lexware.com |
e679d27a9d4dedf51b36d0bae5283472e685e8fd | 15a60dec097cf03a1fd304979f2aea8ff49a786b | /CalculateBox.py | 8d8c54d39a49ee34a86df0a12bbd4b5096fc249f | [] | no_license | Nyxeka/Various-Scripts | 556c0a4e766ef2bd1003b581916cd4655328378c | e664cf6d7bc1b7011c3c9ca6647a9546aa659210 | refs/heads/master | 2021-06-03T10:30:21.810846 | 2019-09-06T14:11:26 | 2019-09-06T14:11:26 | 18,566,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,205 | py | """
By Nicholas Hylands
This program was created to solve the 9-box problem:
[ ][ ][ ]
[ ][ ][ ]
[ ][ ][ ][x]
we need to fill this up so that each row and column in the middle are the sum of their
respective adjacent numbers on the outside.
A possibility for the question was considered so that 1,1 and 1,2 would be the sum of [0,3] * 10 + [1,2]
this possibility has been proven wrong, as when we run this program, we go over every single possibility.
"""
myData = range(1,9)
print myData, " "
print "========================="
#first, check the digits to the right of curDig
def checkFromRight(givenList, num): #num, is the number to check to from the right...
##print num, ' ', len(givenList)
#print "checking from right to: ", num
if num >= len(givenList)-1:
#print "num is equal or greater than length"
return 0
for i in range(len(givenList)-1, num, -1):
#print "Comparing ", num,givenList[num],',', " to ",i,',', givenList[i], " in ", givenList
if givenList[i] > givenList[num]:
#print 'Time to Swap', i, ' ', num
return i
#print "nope, time to shift..."
return 0
#reverse a sub-array in an array between two points
#remember to use the function so that the numbers given are actual numbs
#in array: start with 0
def reverseBetween(start, finish, given):
if start > 0 and finish < (len(given)):
swap = [given[finish]]#create the swap variable
for i in range(finish-1, start-1, -1):
swap.append(given[i]) #do our temp-function thing
for i in range(start, finish+1): #why is it finish + 1?
given[i] = swap[i-start]
return given
#here we check a list of 8, and if it solves the box, we return true.
def checkSolvedBoxWithFixedOne(given):
#check the list with the box
check = given
check[check.index(8)] = 9
if check[0] + check[3] == check[7]:
if check[1] + check[4] == 8:
if check[0] + check[1] == check[2]:
if check[3] + check[4] == check[5]:
if check[6] == 1:
check[check.index(9)] = 8
return 2 #case 2, the box is solved WITH the one in the bottom left.
else:
check[check.index(9)] = 8
return 1
check[check.index(9)] = 8
return 0
#test an array for the possibilities
def testList(given, curDig = None):
if curDig is None:
curDig = len(given)-1 #remember curDig will be 7 if list has 8 digits.
#create our list to return
results = [0]
resultsSolvedWithOne = []
resultsSolvedWithoutOne = []
#first check from the right from the current digit.
listToTest = given
#print "list to test is: ", listToTest
while curDig is not -1:
numToSwap = checkFromRight(listToTest,curDig)
#print 'num to swap is: ', numToSwap
if numToSwap is not 0:
#swap:
#print 'swapping: ', listToTest, ' ', curDig, ' ', numToSwap
listToTest[curDig], listToTest[numToSwap] = listToTest[numToSwap], listToTest[curDig]
#print 'swapped: ', listToTest
#print "reversing after curDig..."
listToTest = reverseBetween(curDig+1,len(listToTest)-1,listToTest)
#print "result is... ", listToTest
#CHECK TO SEE IF ITS SOLVED:
#print "=========================="
#print listToTest
#print "num results was ", results[0]
results[0] = results[0] + 1
#print "num results now is ", results[0]
#print "=========================="
if results[0] > 40321:
return results
#Check the results############
checkIfSolved = checkSolvedBoxWithFixedOne(listToTest)
if checkIfSolved == 1:
resultsSolvedWithoutOne.append(listToTest)
if checkIfSolved == 2:
if listToTest is not [8,7,6,5,4,3,2,1]:
resultsSolvedWithOne.append(listToTest)
##############################
#reset:
#print "resetting curDig to length from: ", curDig
curDig = len(given) - 1
#repeat:
#how to repeat? Run a loop!
else:
#print 'shifting curdigit', curDig, curDig-1
#shift to the left
#sort the digits to the right of curDig
curDig = curDig - 1
results.append(resultsSolvedWithOne)
results.append(resultsSolvedWithoutOne)
return results
myResult = testList(myData)
print 'possibilities total: ', myResult[0]
print 'possibilities if bottom left is 1: ',myResult[1]
print 'possibilities if bottom left can be anything: ',myResult[2]
| [
"me@nickhylands.com"
] | me@nickhylands.com |
711320ebe92a86224c24614f559ba81626a159bc | a357331b095f29e6cbe81ceb4fa17e2768b87415 | /pycc | d76aa5e046c7b25011ab626bf45aedfba511a257 | [] | no_license | IndianBoy42/single-file-cxx | 43597b0ac490cf16bfa6531a45e8337fe9d82b80 | 1cee2d396fc622792699dd7f5ec79fb8d1d0c537 | refs/heads/master | 2023-03-22T22:28:33.159831 | 2021-03-01T13:53:07 | 2021-03-01T13:53:07 | 343,407,377 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,341 | #!/usr/bin/env python
import click
import subprocess
import watchgod
import shutil
from os import path, makedirs
def do_compile(compiler, output_exe, srcfile, opt_level, std, native=True, openmp=False, eigen=False, blas=False, gtest=False, gbench=False, pthread=False, checkonly=False):
build = [compiler, srcfile, f'-O{opt_level}', f'--std={std}', '-I.', '-o', output_exe]
if native:
build.append('-march=native')
build.append('-mtune=native')
if checkonly:
build.append('-fsyntax-only')
if openmp:
build.append('-fopenmp')
build.append('-lomp')
if pthread:
build.append('-lpthread')
if eigen:
build.append('-leigen')
if blas:
build.append('-lopenblas')
if gbench:
build += ['-isystem', 'benchmark/include', '-Lbenchmark/build/src', '-lbenchmark' ]
if gtest:
raise NotImplementedError()
print(build)
status = subprocess.run(build)
return status.returncode
def do_run_exe(output_exe):
r = [f'./{output_exe}']
subprocess.run(r)
def docheck(srcfile, compiler, native, opt_level, output_exe, std, watch, openmp, blas, eigen, pthread):
do_compile(compiler, output_exe, srcfile, opt_level, std, native, openmp, checkonly=True)
def dorun(srcfile, compiler, native, opt_level, output_exe, std, watch, openmp, blas, eigen, pthread):
ret = do_compile(compiler, output_exe, srcfile, opt_level, std, native, openmp)
if ret == 0:
do_run_exe(output_exe)
def dobench(srcfile, compiler, native, opt_level, output_exe, std, watch, openmp, blas, eigen, pthread):
ret = do_compile(compiler, output_exe, srcfile, opt_level, std, native, openmp, gbench=True, pthread=True)
if ret == 0:
do_run_exe(output_exe)
return
build = [compiler, srcfile, f'-O{opt_level}', f'--std={std}',
'-isystem', 'benchmark/include', '-Lbenchmark/build/src', '-lbenchmark',
'-lpthread',
'-I', '.',
'-o', output_exe]
if native:
build.append('-march=native')
build.append('-mtune=native')
if openmp:
build.append('-fopenmp')
print(build)
status = subprocess.run(build)
if status.returncode != 0:
return
r = [f'./{output_exe}']
subprocess.run(r)
def dotest(srcfile, compiler, native, opt_level, output_exe, std, watch, openmp):
build = [compiler, srcfile, f'-O{opt_level}', f'--std={std}',
'-isystem', 'gtest/include', '-Lgtest/build/src', '-lgtest',
'-lpthread',
'-I', '.',
'-o', output_exe]
if native:
build.append('-march=native')
build.append('-mtune=native')
if openmp:
build.append('-fopenmp')
print(build)
status = subprocess.run(build)
if status.returncode != 0:
return
r = [f'./{output_exe}']
subprocess.run(r)
@click.group()
def benchmark_helper():
pass
@benchmark_helper.group()
def new():
pass
@new.command()
@click.argument('name', default='example.c')
def main(name):
if path.exists(name):
print(f'{name} Already exists')
else:
shutil.copyfile('./main-template.c', f'./{name}')
print(f'Open+Edit ./{name}')
print(f'Then run the following to run/check (add -w to rerun on file change)')
print(f'{__file__} run/check {name}')
@new.command()
@click.argument('name', default='example.cpp')
def main(name):
if path.exists(name):
print(f'{name} Already exists')
else:
shutil.copyfile('./main-template.cpp', f'./{name}')
print(f'Open+Edit ./{name}')
print(f'Then run the following to run/check (add -w to rerun on file change)')
print(f'{__file__} run/check {name}')
@new.command()
@click.argument('name', default='example.cpp')
def test(name):
if path.exists(name):
print(f'{name} Already exists')
else:
shutil.copyfile('./test-template.cpp', f'./{name}')
print(f'Open+Edit ./{name}')
print(f'Then run the following to test (add -w to rerun on file change)')
print(f'{__file__} test {name}')
@new.command()
@click.argument('name', default='example.cpp')
def bench(name):
if path.exists(name):
print(f'{name} Already exists')
else:
shutil.copyfile('./benchmark-template.cpp', f'./{name}')
print(f'Open+Edit ./{name}')
print(f'Then run the following to bench (add -w to rerun on file change)')
print(f'{__file__} bench {name}')
import functools
def compile_options(fn):
fn = click.argument('srcfile')(fn)
fn = click.option('--compiler', '-c', default='g++') (fn)
fn = click.option('--native/--no-native', default=True) (fn)
fn = click.option('--opt-level', '-O', default=3) (fn)
fn = click.option('--output-exe', '-o', default=None) (fn)
fn = click.option('--std', default='gnu++20') (fn)
fn = click.option('--watch/--no-watch', '-w', default=False)(fn)
fn = click.option('--omp/--no-omp', '-w', default=False)(fn)
fn = click.option('--eigen/--no-eigen', '-w', default=False)(fn)
fn = click.option('--pthread/--no-pthread', '-w', default=False)(fn)
fn = click.option('--blas/--no-blas', '-w', default=False)(fn)
return fn
def fix_inputs(srcfile, compiler, output_exe):
# Find the source file, by trying different extensions
if not path.exists(srcfile):
if path.exists(srcfile + '.cpp'):
srcfile = srcfile + '.cpp'
elif path.exists(srcfile + '.c'):
srcfile = srcfile + '.c'
# Make sure to use the c++ compiler
if srcfile.endswith('cpp'):
compiler = compiler.replace('gcc', 'g++')
if 'clang' in compiler and 'clang++' not in compiler:
compiler = compiler.replace('clang', 'clang++')
elif srcfile.endswith('c'):
compiler = compiler.replace('g++', 'gcc')
compiler = compiler.replace('clang++', 'clang')
# correct the output_exe
if output_exe is None:
output_exe = f'build/{srcfile}'
makedirs('build', exist_ok=True)
return srcfile, compiler, output_exe
@benchmark_helper.command()
@compile_options
def test(srcfile, compiler, native, opt_level, output_exe, std, watch, omp):
srcfile, compiler, output_exe = fix_inputs(srcfile, compiler, output_exe)
if not srcfile.endswith('.cpp'):
raise ArgumentError('GTest only works with C++')
if watch:
for change in watchgod.watch(cpp):
dotest(srcfile, compiler, native, opt_level, output_exe, std, watch, omp)
else:
dotest(srcfile, compiler, native, opt_level, output_exe, std, watch, omp)
@benchmark_helper.command()
@compile_options
def check(srcfile, compiler, native, opt_level, output_exe, std, watch, omp, blas, eigen, pthread):
srcfile, compiler, output_exe = fix_inputs(srcfile, compiler, output_exe)
if watch:
for change in watchgod.watch(cpp):
docheck(srcfile, compiler, native, opt_level, output_exe, std, watch, omp, blas, eigen, pthread)
else:
docheck(srcfile, compiler, native, opt_level, output_exe, std, watch, omp, blas, eigen, pthread)
@benchmark_helper.command()
@compile_options
def run(srcfile, compiler, native, opt_level, output_exe, std, watch, omp, blas, eigen, pthread):
srcfile, compiler, output_exe = fix_inputs(srcfile, compiler, output_exe)
if watch:
for change in watchgod.watch(cpp):
dorun(srcfile, compiler, native, opt_level, output_exe, std, watch, omp, blas, eigen, pthread)
else:
dorun(srcfile, compiler, native, opt_level, output_exe, std, watch, omp, blas, eigen, pthread)
@benchmark_helper.command()
@compile_options
def bench(srcfile, compiler, native, opt_level, output_exe, std, watch, omp, blas, eigen, pthread):
srcfile, compiler, output_exe = fix_inputs(srcfile, compiler, output_exe)
if not srcfile.endswith('.cpp'):
raise ArgumentError('Google Benchmarks only works with C++')
if watch:
for change in watchgod.watch(cpp):
dobench(srcfile, compiler, native, opt_level, output_exe, std, watch, omp, blas, eigen, pthread)
else:
dobench(srcfile, compiler, native, opt_level, output_exe, std, watch, omp, blas, eigen, pthread)
if __name__ == '__main__':
benchmark_helper()
| [
"amedhi@connect.ust.hk"
] | amedhi@connect.ust.hk | |
33ecb460f9446f7511b78064220e3249008e7215 | b6c3dd30eebc78233f9b36065290c53bd299691f | /python/asyncore/simple_chat.py | 4ece2b32a7bc17606099f53c461a62ca099938d7 | [] | no_license | qiurenping/tiger | 9bc54594b9d8ef4a4cfc6b92e058df46fb498982 | ddcb1d303922d267a972db6e72bd4a3294a6ab9c | refs/heads/master | 2022-07-27T15:55:01.257040 | 2020-03-15T10:49:14 | 2020-03-15T10:49:14 | 148,075,253 | 0 | 0 | null | 2022-06-17T02:56:06 | 2018-09-09T23:42:25 | HTML | UTF-8 | Python | false | false | 1,825 | py | from asyncore import dispatcher
from asynchat import async_chat
import socket, asyncore
port = 5005
name = 'TestChat'
class ChatSession(async_chat):
"""
一个负责处理服务器和单个用户间连接的类
"""
def __init__(self, server, sock):
# 标准的设置任务:
async_chat.__init__(self, sock)
self.server = server
self.set_terminator("\r\n")
self.data = []
# 问候用户:
self.push('Welcome to %s' % self.server.name)
def collect_incoming_data(self, data):
self.data.append(data)
def found_terminator(self):
"""
如果遇到结束符,就意味着读取了一整行,
因此将这行内容广播给每个人
"""
line = ''.join(self.data)
self.data = []
self.server.broadcast(line)
def handle_close(self):
async_chat.handle_close(self)
self.server.disconnect(self)
class ChatServer(dispatcher):
"""
一个接受连接并创建会话的类。它还负责向这些会话广播
"""
def __init__(self, port, name):
# 标准的设置任务:
dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind(('', port))
self.listen(5)
self.name = name
self.sessions = []
def disconnect(self, session):
self.sessions.remove(session)
def broadcast(self, line):
for session in self.sessions:
session.push(line + '\r\n')
def handle_accept(self):
conn, addr = self.accept()
self.sessions.append(ChatSession(self, conn))
if __name__ == '__main__':
s = ChatServer(port, name)
try:
asyncore.loop()
except KeyboardInterrupt:
print() | [
"247808311@qq.com"
] | 247808311@qq.com |
7159e600941904c0d1c03e507685ace64c206e55 | 8310be80fd8ac8ef164e9350b7f58bbb00161e07 | /Python_apps/Dojo_ORM/Dojo_Ninjas/apps/DN_app/apps.py | 73d9b88af16dac2f6b1bccc35cccd4be9ebcafa6 | [] | no_license | dkoom20/Dojo_projects | a5c582f2de321e36eb97b555f3b08987dcb57fb5 | 97bcd56d853f80632ec2b16588f5d32d399e69de | refs/heads/master | 2020-05-03T01:23:16.821953 | 2020-04-13T23:40:57 | 2020-04-13T23:40:57 | 178,335,966 | 0 | 0 | null | 2020-04-06T14:29:57 | 2019-03-29T05:06:14 | C# | UTF-8 | Python | false | false | 151 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class DnAppConfig(AppConfig):
name = 'DN_app'
| [
"dkoom20@gmail.com"
] | dkoom20@gmail.com |
f230294c1cafff1fcef138b41536276f674af034 | 1710616222b4c9e1accb7880159fa449fb5438ef | /app/celery.py | 023c30ec9e185f0c7262e3d1db86aa9f5687a24b | [] | no_license | yassiommi/Shekarchi | 256b754c67017afe6db9dab2100c8db19e7b3e39 | 8a2ff69a4c0bf82d5696cb66b3cabe3a2ffd23c5 | refs/heads/master | 2020-04-12T23:13:27.817055 | 2018-12-26T12:35:23 | 2018-12-26T12:35:23 | 162,811,901 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | from celery import Celery
celery = Celery('Shekarchi', broker='redis://localhost:6379/0',
backend='redis://localhost:6379/0',
include=['app.api', 'controller'])
import logging
from celery.signals import after_setup_logger, after_setup_task_logger
def handle_logs(logger=None,loglevel=logging.DEBUG, **kwargs):
from common import handler
logger.addHandler(handler)
return logger
after_setup_task_logger.connect(handle_logs)
after_setup_logger.connect(handle_logs)
| [
"yassi.ommi@gmail.com"
] | yassi.ommi@gmail.com |
c789dc1e93da4961b9621b7bb0212cfe1e5ad4fe | 88efda89a4aa923079e89ea53c4eaa13522d1bfe | /模块/函数处理/生成器.py | 6655143ebfd5910819ca4720fc95a69a949a575f | [] | no_license | gm332211/workspace_p3x | fab5132107863ae26ed079774d2c80883e187266 | 33c106cd069962df8e2ab99b552bfd9288530f1a | refs/heads/master | 2021-04-12T12:05:16.716708 | 2019-01-29T09:02:28 | 2019-01-29T09:02:28 | 126,682,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 648 | py | #coding=utf-8
#Author:gm
a=[i*2 for i in range(10)]#生成表达式
b=(i*2 for i in range(10))#生成器
print(a)
print(b)
#生成器的特点
#优点:不占用内存空间,生成速度快,
#缺点:不能切片,只能保存当前值,前值不能获取
#调用方法
print(b.__next__())#只唤醒
print(b.send(1))#唤醒并发送数据
#函数生成器
def func(x):
count=0
while count<x:
yield count #保存当前函数的中断状态(断点)
count += 1
f=func(10)
#当到函数执行到yield跳出函数进行打印,后继续跳到yield位置向下执行
print(f.__next__())
for i in (i*2 for i in range(10)):
print(i) | [
"1172848156@qq.com"
] | 1172848156@qq.com |
5e27ad938993d694e6e4a8bc5c3c17e538ad3405 | 9440b127e5743c0f2d29a15936890432d43864b9 | /setup.py | 91654895ca7c177a16fb5ae2e8c82f82b1f35799 | [
"MIT"
] | permissive | olufekosamuel/scrumy | 2b8d3ef78ca73a37a55916dd9ee0b912c6886e98 | 8d93794bb04a3b3ec76ae41ae685f4e2c7b55730 | refs/heads/master | 2020-03-21T08:36:05.040862 | 2018-06-22T22:37:57 | 2018-06-22T22:37:57 | 138,353,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
setup(
name ='olufekoscrumy',
version='1.0',
packages=find_packages(),
include_package_data = True,
license = 'BSD Lincense',
description = 'Scrumy App;ication',
long_description = README,
author = 'Samuel Olufeko',
author_email = 'mololuwasamuel12@gmail.com',
) | [
"noreply@github.com"
] | noreply@github.com |
cd799ec1b3e0d73e7ad2cf47d2e6ec201cf3a548 | ef85e46a469a9a7f7e4a5f15293093f3cc092bfd | /webscrap_intro.py | 658435daf9d7b63b0b4432148a58797f0f5addcf | [] | no_license | tracine3636/Barebone-Webscrapping | 5ad85e5c9074f93e566f7b171be5fa7ee2b0f143 | ea4419c4faf0fe181c9be2d9f2180cb772d97a83 | refs/heads/main | 2023-01-30T07:38:27.616151 | 2020-12-05T23:51:41 | 2020-12-05T23:51:41 | 318,913,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 211 | py | import requests #pip3 install requests
import bs4 #beautifulsoup
res = requests.get('https://learncodeonline.in')
type(res) #this records the data from the following website
#then type the following
res.text
| [
"noreply@github.com"
] | noreply@github.com |
cc283b9b4b09f6bf5595826d7c51710a2bbd1948 | b72dbc51279d3e59cb6410367b671f8a956314c1 | /프로그래머스/그외/1844_게임맵 최단거리.py | 0aa107ad05fc1b98b72ecda8ab28d2ebc0eba2d7 | [] | no_license | ddobokki/coding-test-practice | 7b16d20403bb1714d97adfd1f47aa7d3ccd7ea4b | c88d981a1d43b986169f7884ff3ef1498e768fc8 | refs/heads/main | 2023-07-08T15:09:32.269059 | 2021-08-08T12:19:44 | 2021-08-08T12:19:44 | 344,116,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,399 | py | #https://programmers.co.kr/learn/courses/30/lessons/1844
from collections import deque
def solution(maps):
answer = 0
dx = [1, -1, 0, 0] # 순서대로 동, 서, 남, 북
dy = [0, 0, 1, -1]
visit = [[-1] * len(maps[0]) for _ in range(len(maps))] # 거리 측정 및, 방문을 확인하는 visit 배열
visit[0][0] = 1 # visit이 -1이면 아직 방문을 안했다는 뜻, 탐색이 끝나고도 도달 못하면 -1을 리턴해야하므로 -1로 초기화
q = deque([(0, 0)]) # 0,0에서 시작
while q:
x, y = q.popleft() # q에서 현재 좌표를 꺼낸다.
for i in range(4):
nx, ny = x + dx[i], y + dy[i] # 순서대로 동서남북의 좌표
if (0 <= nx < len(maps[0])) and (0 <= ny < len(maps)): # 각 루프마다 동서남북으로 갈수 있는 곳인지 확인
if (maps[ny][nx] == 1) and (visit[ny][nx] == -1):
# 갈수 있는 조건 -> 맵 밖이 아니고, visit하지 않았으며 맵이 1이어야 한다.
visit[ny][nx] = visit[y][x] + 1 # 현재 visit이 거리이므로 다음칸은 visit에 1을 더한값이 이동한 거리
q.append((nx, ny)) # 다음 좌표를 q에 삽입
return visit[-1][-1]
#map = [[1, 0, 1, 1, 1], [1, 0, 1, 0, 1], [1, 0, 1, 1, 1], [1, 1, 1, 0, 1], [0, 0, 0, 0, 1]]
#print(solution(map))
| [
"44228269+ddobokki@users.noreply.github.com"
] | 44228269+ddobokki@users.noreply.github.com |
ccce96391f179171085d48df2853759a97636189 | 13c599a48f0b596c314c7c570f47756fd97a2b92 | /media/capture/capture.gyp | b634f9566ebee0fbbfb564a38d48aab581a59d5b | [
"BSD-3-Clause"
] | permissive | qichanna/chromium | a5e3d44bda4bd6511e090e25263f5de94dbfe492 | 458d956db161377610486b7c82a95fc485f60b9b | refs/heads/master | 2022-11-13T00:50:48.147260 | 2016-08-01T23:23:16 | 2016-08-01T23:28:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,219 | gyp | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
'capture_sources': [
'capture_export.h',
'content/animated_content_sampler.cc',
'content/animated_content_sampler.h',
'content/capture_resolution_chooser.cc',
'content/capture_resolution_chooser.h',
'content/screen_capture_device_core.cc',
'content/screen_capture_device_core.h',
'content/smooth_event_sampler.cc',
'content/smooth_event_sampler.h',
'content/thread_safe_capture_oracle.cc',
'content/thread_safe_capture_oracle.h',
'content/video_capture_oracle.cc',
'content/video_capture_oracle.h',
'device_monitor_mac.h',
'device_monitor_mac.mm',
'system_message_window_win.cc',
'system_message_window_win.h',
'video/android/video_capture_device_android.cc',
'video/android/video_capture_device_android.h',
'video/android/video_capture_device_factory_android.cc',
'video/android/video_capture_device_factory_android.h',
'video/fake_video_capture_device.cc',
'video/fake_video_capture_device.h',
'video/fake_video_capture_device_factory.cc',
'video/fake_video_capture_device_factory.h',
'video/file_video_capture_device.cc',
'video/file_video_capture_device.h',
'video/file_video_capture_device_factory.cc',
'video/file_video_capture_device_factory.h',
'video/linux/v4l2_capture_delegate.cc',
'video/linux/v4l2_capture_delegate.h',
'video/linux/video_capture_device_chromeos.cc',
'video/linux/video_capture_device_chromeos.h',
'video/linux/video_capture_device_factory_linux.cc',
'video/linux/video_capture_device_factory_linux.h',
'video/linux/video_capture_device_linux.cc',
'video/linux/video_capture_device_linux.h',
'video/mac/video_capture_device_avfoundation_mac.h',
'video/mac/video_capture_device_avfoundation_mac.mm',
'video/mac/video_capture_device_decklink_mac.h',
'video/mac/video_capture_device_decklink_mac.mm',
'video/mac/video_capture_device_factory_mac.h',
'video/mac/video_capture_device_factory_mac.mm',
'video/mac/video_capture_device_mac.h',
'video/mac/video_capture_device_mac.mm',
'video/scoped_result_callback.h',
'video/video_capture_device.cc',
'video/video_capture_device.h',
'video/video_capture_device_factory.cc',
'video/video_capture_device_factory.h',
'video/video_capture_device_info.cc',
'video/video_capture_device_info.h',
'video/win/capability_list_win.cc',
'video/win/capability_list_win.h',
'video/win/filter_base_win.cc',
'video/win/filter_base_win.h',
'video/win/pin_base_win.cc',
'video/win/pin_base_win.h',
'video/win/sink_filter_observer_win.h',
'video/win/sink_filter_win.cc',
'video/win/sink_filter_win.h',
'video/win/sink_input_pin_win.cc',
'video/win/sink_input_pin_win.h',
'video/win/video_capture_device_factory_win.cc',
'video/win/video_capture_device_factory_win.h',
'video/win/video_capture_device_mf_win.cc',
'video/win/video_capture_device_mf_win.h',
'video/win/video_capture_device_win.cc',
'video/win/video_capture_device_win.h'
],
'capture_unittests_sources': [
'content/animated_content_sampler_unittest.cc',
'content/capture_resolution_chooser_unittest.cc',
'content/smooth_event_sampler_unittest.cc',
'content/video_capture_oracle_unittest.cc',
'system_message_window_win_unittest.cc',
'video/fake_video_capture_device_unittest.cc',
'video/mac/video_capture_device_factory_mac_unittest.mm',
'video/video_capture_device_unittest.cc'
],
# The following files lack appropriate platform suffixes.
'conditions': [
['OS=="linux" and use_udev==1', {
'capture_sources': [
'device_monitor_udev.cc',
'device_monitor_udev.h',
],
}],
],
},
'targets': [
{
# GN version: //media/capture
'target_name': 'capture',
'type': '<(component)',
'hard_dependency': 1,
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/base/base.gyp:base_i18n',
'<(DEPTH)/media/media.gyp:media',
'<(DEPTH)/media/media.gyp:shared_memory_support', # For audio support.
'<(DEPTH)/media/mojo/interfaces/mojo_bindings.gyp:image_capture_mojo_bindings',
'<(DEPTH)/mojo/mojo_edk.gyp:mojo_system_impl',
'<(DEPTH)/mojo/mojo_public.gyp:mojo_cpp_bindings',
'<(DEPTH)/skia/skia.gyp:skia',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx_geometry',
],
'defines': [
'CAPTURE_IMPLEMENTATION',
],
'include_dirs': [
'<(DEPTH)/',
],
'sources': [
'<@(capture_sources)'
],
'conditions': [
['OS=="android"', {
'dependencies': [
'capture_java',
'<(DEPTH)/media/capture/video/android'
],
}],
['OS=="mac"', {
'dependencies': [
'<(DEPTH)/third_party/decklink/decklink.gyp:decklink',
],
}],
['chromeos==1', {
'dependencies': [
'<(DEPTH)/ui/display/display.gyp:display',
],
}],
['OS=="linux" and use_udev==1', {
'dependencies': [
'<(DEPTH)/device/udev_linux/udev.gyp:udev_linux',
],
}],
['OS=="win"', {
'dependencies': [
'<(DEPTH)/media/media.gyp:mf_initializer',
],
# TODO(jschuh): http://crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],
}],
],
},
{
# GN version: //media/capture:capture_unittests
'target_name': 'capture_unittests',
'type': '<(gtest_target_type)',
'include_dirs': [
'<(DEPTH)/',
],
'dependencies': [
'capture',
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/base/base.gyp:run_all_unittests',
'<(DEPTH)/media/media.gyp:media',
'<(DEPTH)/media/mojo/interfaces/mojo_bindings.gyp:image_capture_mojo_bindings',
'<(DEPTH)/mojo/mojo_edk.gyp:mojo_system_impl',
'<(DEPTH)/mojo/mojo_public.gyp:mojo_cpp_bindings',
'<(DEPTH)/testing/gmock.gyp:gmock',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx_geometry',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx_test_support',
],
'sources': [
'<@(capture_unittests_sources)'
],
'conditions': [
['OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
}],
['OS=="win"', {
'dependencies': [
'<(DEPTH)/media/media.gyp:mf_initializer',
],
# TODO(jschuh): http://crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],
}],
], # conditions
},
],
'conditions': [
['test_isolation_mode != "noop"', {
'targets': [
{
# There's no GN equivalent to this.
'target_name': 'capture_unittests_run',
'type': 'none',
'dependencies': [
'capture_unittests',
],
'includes': [
'../../build/isolate.gypi',
],
'sources': [
'capture_unittests.isolate',
]
}
]
}],
['OS=="android"', {
'targets': [
{
'target_name': 'capture_java',
'type': 'none',
'dependencies': [
'/base/base.gyp:base',
'media_android_captureapitype',
'media_android_imageformat',
'video_capture_android_jni_headers',
],
'export_dependent_settings': [
'../base/base.gyp:base',
],
'variables': {
'java_in_dir': 'video/android/java',
},
'includes': ['../../build/java.gypi'],
},
{
'target_name': 'media_android_captureapitype',
'type': 'none',
'variables': {
'source_file': 'video/video_capture_device.h',
},
'includes': [ '../../build/android/java_cpp_enum.gypi' ],
},
{
'target_name': 'media_android_imageformat',
'type': 'none',
'variables': {
'source_file': 'video/android/video_capture_device_android.h',
},
'includes': [ '../../build/android/java_cpp_enum.gypi' ],
},
{
'target_name': 'video_capture_android_jni_headers',
'type': 'none',
'sources': [
'video/android/java/src/org/chromium/media/VideoCapture.java',
'video/android/java/src/org/chromium/media/VideoCaptureFactory.java',
],
'variables': {
'jni_gen_package': 'media',
},
'includes': ['../../build/jni_generator.gypi'],
},
{
# There's no GN equivalent to this.
'target_name': 'capture_unittests_apk',
'type': 'none',
'dependencies': [
'capture_java',
'capture_unittests',
],
'variables': {
'test_suite_name': 'capture_unittests',
},
'includes': ['../../build/apk_test.gypi'],
},
],
'conditions': [
['test_isolation_mode != "noop"', {
'targets': [
{
'target_name': 'capture_unittests_apk_run',
'type': 'none',
'dependencies': [
'capture_unittests_apk',
],
'includes': [
'../../build/isolate.gypi',
],
'sources': [
'capture_unittests_apk.isolate',
],
},
],
}],
],
}],
],
}
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
598036c5cbc32b195db0574e70bda866e86cfe51 | 4cf0a4df69549c0c813fdb5f1c651e6a97e8106d | /nudgebot/statistics/__init__.py | 825f32b9c7849afc8108a5903be3f0e5e9c1fbbb | [
"MIT"
] | permissive | RedHatQE/Nudgebot | d9ee6208b04df21b2fb198e631bf3e1eb60605cc | 4cf6cf1fe975a9002299f0460873c0f21bc8d414 | refs/heads/master | 2020-03-07T17:33:31.905560 | 2018-05-01T11:12:39 | 2018-05-01T14:10:45 | 127,614,644 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | from functools import partial
from nudgebot.utils import collect_subclasses
from .base import Statistics
from . import github
collect_statistics_classes = partial(collect_subclasses, cls=Statistics, exclude=collect_subclasses(github, Statistics))
| [
"gshefer@redhat.com"
] | gshefer@redhat.com |
75cbe17b9e21f9d61fdbc637f95b2fb013bb08e2 | bf1a72d104b3ed3c5504abbc8946bb06f3777091 | /gym_novel_gridworlds/envs/__init__.py | 2cdedfbbb0840bc3295859156ead570fa6b511f6 | [] | no_license | shukla-yash/Low_fidelity_RL_project | aba93853b83d6001e976d4568fefdaa69d2d4d37 | 8f56d2557f8fec29199cd258556a60632fa35689 | refs/heads/main | 2023-02-03T01:55:45.858668 | 2020-12-16T18:41:46 | 2020-12-16T18:41:46 | 320,923,807 | 0 | 1 | null | 2020-12-21T23:15:09 | 2020-12-12T21:04:50 | Python | UTF-8 | Python | false | false | 82 | py | from gym_novel_gridworlds.envs.novel_gridworld_v0_env import NovelGridworldV0Env
| [
"yshukla@wpi.edu"
] | yshukla@wpi.edu |
9e54190950acc84485189e281118b49d3e7fe0b5 | e3174989e0622ce0cac3b17660a1d05ed2bac99a | /tempCodeRunnerFile.py | c26a85585f098ac3e6429e0c8c781446f4e79857 | [] | no_license | vipulsharma1204/project | bb607a047cd562c54cb19846e8cac09f0df7c26d | 13f2a4f2bdb0e4983572ad5d3ddeb4c99d858c38 | refs/heads/master | 2023-06-12T12:31:02.047308 | 2021-07-05T14:43:30 | 2021-07-05T14:43:30 | 372,617,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 39 | py | for i in range(9,-1,-1):
# print(i) | [
"sharmavipul817@gmail.com"
] | sharmavipul817@gmail.com |
c5be65667ea897a3b1195fc4a0a2b71215b22b8f | 179480a17cee927cefda3653297da57e840343b6 | /FilePython.py | 6e32a030481d1c5821c259ab7a693caa528d3bfb | [] | no_license | sarikasp/Email | b968596be52b5a62d4471b6c26ca45180f1ec9ff | bc7c0e1148c8c635160eb5ac7b03d533951ee7e4 | refs/heads/main | 2023-03-17T16:55:25.861584 | 2021-03-05T16:41:11 | 2021-03-05T16:41:11 | 344,870,523 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | import sys, re
d = {}
for j in zip(*[re.split("\W+", open(sys.argv[1], 'r', encoding="utf-8").read().lower())[i:] for i in range(int(sys.argv[2]))]):
d[j] = d.get(j, 0) + 1
[print("".join(k)+""+str(d.get(k))+"\n")for k in sorted(d) if d.get(k) == int(sys.argv[3]) and re.search("^\w{"+sys.argv[4] + "}(.+)? \w {"+sys.argv[5]+"}$", "".join(k))]
| [
"pansaresarika98@gmail.com"
] | pansaresarika98@gmail.com |
86dde494b8f0e26a9330333684d216a6b0e648ac | 18f1867d348d48c185f529d81056a9070fa72645 | /Chapter 9/2-1-9-3 min_run.py | 9ea1ebb45231890c7698963f432b008184eaa2dd | [] | no_license | ParkSooYeong/2-1-Data-Structure | 600bbed9ad81b9394ce9e02940375c763aa90c35 | 91421827aab978c96aae9646b36b4c954f0448e9 | refs/heads/main | 2023-05-10T17:44:50.246178 | 2021-05-28T03:45:07 | 2021-05-28T03:45:07 | 341,569,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | ### SKU CoE ITE - ParkSooYoung ###
### Grade 2 , Semester 1 , Chapter 9 , Number 3 ###
def min_run_len(N):
r = 0
while N >= 64:
r = r | (N & 1)
N >>= 1
return N + r
| [
"noreply@github.com"
] | noreply@github.com |
59539427e8a8ae0da55f6d20ad3d5fad4fbee857 | 8a730eab7e9cd6f721e7897da21f92436513a689 | /Read_mockra | 3dc3e5c3d47a5d42443e86dda71b7aa98ae4ca06 | [] | no_license | hugoas/mockra | 79265915a1809b1226efc5d12b7eb704aec64a14 | 2117a086fd7c7378967bf04f333739c88e8e760d | refs/heads/master | 2022-12-15T08:03:18.935290 | 2020-09-16T20:54:09 | 2020-09-16T20:54:09 | 292,933,186 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 4 14:28:26 2020
@author: hugosouza
"""
import pandas as pd
import m_ockra
from sklearn.model_selection import train_test_split
from sklearn.metrics import roc_auc_score
#read csv file
dataset = pd.read_csv('bupa.csv', header=None)
rows, columns = dataset.shape
X = dataset.iloc[:, :columns-1]
y = dataset.values[:, -1]
#shape train and test
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
#train m-OCKRA
print('Training classifier...')
clf = m_ockra.m_OCKRA()
clf.fit(X_train, y_train)
print('Classifier trained!')
print('Testing classifier...')
y_pred = clf.score_samples(X_test)
auc = roc_auc_score(y_test, y_pred)
print(f'Testing AUC: {auc if auc > .5 else 1 - auc}') | [
"hugo.souza82@gmail.com"
] | hugo.souza82@gmail.com | |
6fc4ec46dbb91bf68830b9ff84ab4c4ae12b5f74 | 19bf3030d14e205bce57e0ad7e6df4266300054e | /python_repo/QUBEXP/qexperiment.py | dabaade7cb35cfe4136c5e0031fec1c1f0ceed80 | [] | no_license | phani061989/RF | 739b891386bbeac9df339cd13055a39a5565f117 | 7c41b75d8375def56b30d3e9d0a6c8ae6acb6999 | refs/heads/main | 2023-04-12T03:50:08.351768 | 2021-05-06T07:26:15 | 2021-05-06T07:26:15 | 364,825,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 39,205 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Sep 11 16:00:48 2018
@author: User
"""
from .UtilityFunctions import *
import numpy as np
import DataModule as dm
import time
from UtilitiesLib import progressive_plot_2d,filetodictionary
import matplotlib.pyplot as plt
import IQCALIBRATION
from PULSE import Pulse,Sequence
class Experiment():
def __init__(self,CryoID,exp_dict=None):
if exp_dict is None:
self.exp_dict = dict(CryostatID=CryoID,
sgro_frequency = 6., #GHz
sgro_power = -40, #dBm
sgro_pulse_length= 1000, #ns
repetition_rate = 500, #mus
averages = 1000,
samples = 1000, #1 samples = 1 ns
sgro_pulse_delay = 0, #ns
dm_freq = 50, #MHz
LOpwr = 8 #dBm
)
else:
self.exp_dict = exp_dict
self.gen_dict = {}
#self.gen_pointers={}
self._dev_ids = ['sgro','sgLO','sgex','IQLO','AWG','DIG']
#------------------------------------------------------------------------------ Custom exceptions
class __BASEEXC(Exception):
pass
class _EXPEXC(__BASEEXC):
def __init__(self,Expression,Message):
self.Expression = Expression
self.Message = Message
#------------------------------------------------------------------------------- Parameters functions
def CryoID(self,CryoID=None):
if CryoID is None:
return self.exp_dict['CryostatID']
else:
if type(CryoID)!=str:
self._EXPEXC('CryoID must be a string\n','TypeError')
self.exp_dict['CryostatID']=CryoID
def list_dev_ids(self):
print(self._dev_ids)
def __check_dev_id(self,dev_id):
dev_id = dev_id#.lower()
try:
self._dev_ids.index(dev_id)
return dev_id
except ValueError:
self._dev_ids.append(dev_id)
print('dev_id inserted\n')
return dev_id
def assign_generator(self,dev_id,dev,**k):
dev_id=self.__check_dev_id(dev_id)
tmp = dict(devid=dev.id,channel=dev.ch)
#dev.power(-80) #safe
tmp.update({'device':dev})
tmp.update(**k)
self.gen_dict.update({dev_id:tmp})
def assign_digitizer(self,dev_id,dev,**k):
dev_id = self.__check_dev_id(dev_id)
if dev.id == 'ADQ14':
tmp = dict(devid=dev.id,boardnum=dev.devnum)
tmp.update(**k)
elif dev.id =='DIGKEY':
tmp = dict(devid='DIGKEY',slot=dev._slot,chassis=dev._chassis)
tmp.update(**k)
else:
print('WRONG digitizer ID: {}\n'.format(dev.id))
return
tmp.update({'device':dev})
self.gen_dict.update({dev_id:tmp})
def assign_awg(self,dev_id,dev,**k):
dev_id = self.__check_dev_id(dev_id)
if dev.id == 'AWGKEY':
tmp = dict(devid='AWGKEY',slot=dev._slot,chassis=dev._chassis)
tmp.update(**k)
else:
print('WRONG awg ID: {}\n'.format(dev.id))
return
tmp.update({'device':dev})
self.gen_dict.update({dev_id:tmp})
def _setup_rogen(self):
sgro = self.gen_dict['sgro']['device']
sgro.power(self.exp_dict['sgro_power'])
sgro.frequency(self.exp_dict['sgro_frequency'])
sgro.reference()
sgro.instr.alc(0)
delay = self.exp_dict['sgro_pulse_delay']
try:
delay += self.gen_dict['sgro']['trig_delay']
except:
pass
sgro.instr.pulse_triggered(1,self.exp_dict['sgro_pulse_length'],delay)
sgro.output(1)
sgLO = self.gen_dict['sgLO']['device']
sgLO.power(self.exp_dict['LOpwr'])
sgLO.reference()
try:
sgLO.instr.alc(1)
sgLO.instr.pulse_triggered(0)
except:
pass
sgLO.output(1)
return sgro,sgLO
def _setup_exgen(self,expid,mode='CW'):
sg = self.gen_dict[expid]['device']
sg.power(self.exp_dict[expid+'_power'])
try:
sg.frequency(self.exp_dict[expid+'_frequency'])
except:
sg.frequency(6.)
sg.reference()
if mode.upper() == 'CW':
try:
sg.instr.alc(1)
sg.instr.pulse_triggered(0,0,0)
except:
pass
elif mode.upper() == 'PULSE':
sg.instr.alc(0)
delay=self.exp_dict[expid+'_pulse_delay']
try:
delay += self.gen_dict[expid]['trig_delay']
except:
pass
sg.instr.pulse_triggered(1,self.exp_dict[expid+'_pulse_length'],delay)
else:
print('Wrong mode inserted\n')
raise TypeError
sg.output(1)
return sg
def _setup_dig(self):
tmp = self.gen_dict['DIG']
if tmp['devid']=='ADQ14':
adq = tmp['device']
adq.Trigger_mode('INT',np.round(self.exp_dict['repetition_rate']*1e-6,9))
adq.trigger_output_setup('INT','RISE')
delay = self.exp_dict['sgro_pulse_delay']
try:
delay += self.gen_dict['DIG']['trig_delay']
except:
pass
self.exp_dict['samples']=adq.Acquisition_setup(self.exp_dict['averages'],self.exp_dict['samples'],delay)
return adq
else:
print('Not implemented yet')
raise TypeError
def measure_wave(self,x,sgro,sgLO,adq,conversion=True,amp=True):
if x is not None:
sgro.frequency(np.round(x,9))
sgLO.frequency(np.round(x+self.exp_dict['dm_freq']/1000,9))
data = adq.Acquire_data(1,conversion)
if amp==True:
SF = adq.SF
ft = np.abs(np.fft.fft(data))/len(data)
index = int(len(data)*self.exp_dict['dm_freq']/1000/SF)
return 2*np.max(ft[index-2:index+2])
else:
return data
def test_readout(self,freq,engine='p'):
import bokeh.plotting as bp
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
test = self.measure_wave(freq,sgro,sgLO,adq,0,0)
xaxis = np.linspace(0,1,len(test))
ft = np.abs(np.fft.fft(test))/len(test)
if engine=='p':
plt.plot(test)
plt.figure()
plt.plot(xaxis,ft)
elif engine=='b':
tools = ['box_zoom', 'pan', 'wheel_zoom', 'reset',
'save', 'hover']
fig1 = bp.figure(tools=tools,
height=300,
sizing_mode='scale_width',
title= 'Signal')
fig1.line(xaxis*1e4, test)
fig2 = bp.figure(tools=tools,
height=300,
sizing_mode='scale_width',
title= 'Fourier Transform')
fig2.line(xaxis*1e3,ft)
for fig in [fig1,fig2]:
bp.show(fig)
else:
print('Wrong engine selected: {p,b}')
def do_readout_sweep(self,freq_sweep,power=None):
if power != None:
self.exp_dict['sgro_power']=power
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
tmp = np.ndarray(len(freq_sweep))
time_start = time.time()
temp_start, temp_start_time= read_temperature(self.CryoID())
try:
for i,f in enumerate(freq_sweep):
tmp[i]= self.measure_wave(f,sgro,sgLO,adq,1,1)*1e3
progressive_plot_2d(freq_sweep[:i],tmp[:i],'-o')
except KeyboardInterrupt:
freq_sweep = freq_sweep[:i]
tmp = tmp[:i]
#creating data module
data = dm.data_table((freq_sweep,tmp),('Frequency (GHz)','Amplitude (mV)'))
data.time_start = time_start
data.temp_start = temp_start
data.temp_start_time = temp_start_time
data.time_stop = time.time()
data.temp_stop,data.temp_stop_time= read_temperature(self.CryoID())
data.par= self.gen_pars()
return data
def do_spectroscopy_sweep(self,freq_sweep,pulse_length=None,devid='sgex',power=None,ro_delay=None):
if power != None:
self.exp_dict[devid+'_power']=power
if pulse_length is not None:
self.exp_dict[devid+'_pulse_length']=pulse_length
try:
self.exp_dict[devid+'_pulse_delay']
except KeyError:
print(devid+'_pulse_delay set to 0\n')
self.exp_dict[devid+'_pulse_delay']=0
sgex = self._setup_exgen(devid,'PULSE')
if ro_delay is None:
self.exp_dict['sgro_pulse_delay']=sgex.instr.pulse_delay()+sgex.instr.pulse_width()
else:
self.exp_dict['sgro_pulse_delay']=ro_delay
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
tmp = np.ndarray(len(freq_sweep))
time_start = time.time()
temp_start, temp_start_time= read_temperature(self.CryoID())
try:
for i,f in enumerate(freq_sweep):
sgex.frequency(f)
tmp[i]= self.measure_wave(self.exp_dict['sgro_frequency'],sgro,sgLO,adq,1,1)*1e3
progressive_plot_2d(freq_sweep[:i],tmp[:i],'-o')
except KeyboardInterrupt:
freq_sweep = freq_sweep[:i]
tmp = tmp[:i]
#creating data module
data = dm.data_table((freq_sweep,tmp),('Frequency (GHz)','Amplitude (mV)'))
data.time_start = time_start
data.temp_start = temp_start
data.temp_start_time = temp_start_time
data.time_stop = time.time()
data.temp_stop,data.temp_stop_time= read_temperature(self.CryoID())
data.par= self.gen_pars()
return data
def do_calibration(self,freq_to_cal=None,awg_freq=123,qubit_id='q1', cal_amp=0.5,chI=0,SAID= 'SH',freq_sb = 'L', fitpoints=7,LO_pwr=13,show_steps=False):
"""Perform an IQ mixed calibration
pars:
- freq_to_cal = frequency to calibrate in GHz (def None = taken from the exp. dictionary)
- awg_freq = awg frequencty to be used in MHz
- qubit_id = if frequency is None (def), the qubit_id frequency will be taken
the calibration will be assigned to this qubit
- cal_amp = calibration amplitude in V (def 0.5)
- chI: channel number connected to the I mixer port (def 0)
NOTE:chQ = chI+1
- SAID = Spectrum analyzer ID: ['SH','RS'] (def 'SH')
- freq_sb = which sideband you want to calibrate: ['L','R'], (def 'L')
- fitpoint: number of points to add around the minimum to improve the fit
tradeoff between speed and quality (def 7)
- LO_pwr: LO generator power (def 13)
- show_steps: shows all the calibration plots (def False)
"""
if freq_to_cal is None:
freq_to_cal = self.exp_dict[qubit_id]['frequency']
awg = self.gen_dict['AWG']['device']
IQLO = self.gen_dict['IQLO']['device']
cal = IQCALIBRATION.IQCal_KEYAWG(awg,IQLO,SAID,AWG_channel_cal_amplitude=cal_amp)
cal.initialize_calibration([awg_freq,chI],freq_to_cal,freq_sb,LO_pwr)
cal.measure_SB(1,1)
cal.do_calibration(fitpoints,show_steps=show_steps,timeout=5,save=False)
bands=cal.measure_SB(1,1)
cal.calibration.calibration_dictionary['Calibration results']=bands
print(cal.calibration.calibration_dictionary['Calibration results'])
fpath='calibrations/{}V-cal_amp-freq-{}GHz-LO-{}GHz-AWG-{}MHz-{}'.format(cal_amp,freq_to_cal,cal.calibration.Sidebands()[1],awg_freq,qubit_id)
cal.save_calibration(fpath,True)
self.exp_dict[qubit_id].update({'cal_file_path':fpath})
cal.close_SA_connection()
def _setup_IQLO(self,IQLO,upmix_freq,cal_dict):
IQLO.reference()
IQLO.power(cal_dict['LO power'])
if cal_dict['Sideband'] == 'RSB':
IQLO.frequency(np.round(upmix_freq-cal_dict['AWG frequency']/1e3,9))
else:
IQLO.frequency(np.round(upmix_freq+cal_dict['AWG frequency']/1e3,9))
try:
IQLO.instr.alc(1)
IQLO.instr.pulse_triggered(0)
except:
pass
IQLO.output(1)
def do_amplitude_Rabi_experiment(self,amp_sweep,pulse_length,qubit_id='q1',autodelay=True):
awg = self.gen_dict['AWG']['device']
IQLO = self.gen_dict['IQLO']['device']
cal_dict = filetodictionary(self.exp_dict[qubit_id]['cal_file_path'])
chI = cal_dict['AWG chI']
awg.apply_correction(cal_dict,0.5) #arbitrary, to show waves if needed
#AWG setup
for i in [chI,chI+1]:
awg.mode(i,'AWG')
awg.modulation(i,0)
#ph_corr = cal_dict['Phase correction chQ']
#awg.register(0,int(awg_pulse_length/10+10))
#awg.register(1,QUBEXP.set_phase_in_reg_deg(270,ph_corr))
#awg.register(2,0)
#awg.register(3,QUBEXP.set_phase_in_reg_deg(270,ph_corr))
#awg.register(4,0)#awg_pulse_length+200) #safer
awg.clear_waves()
awg.clear_channel_queue(3<<chI)
p0ch0 = Pulse(awg.frequency(chI)/1e3,awg.phase(chI),Width= pulse_length,Wait=10,Shape='g',SF=awg.SF)
p0ch1 = Pulse(awg.frequency(chI+1)/1e3,awg.phase(chI+1),Width= pulse_length,Wait=10,Shape='g',SF=awg.SF)
awg.insert_array(p0ch0.generate(),'p0ch0')
awg.insert_array(p0ch1.generate(),'p0ch1')
#awg.load_waves_in_AWG_memory()
awg.queue_in_channel(chI,'p0ch0',6,Repetitions=0)
awg.queue_in_channel(chI+1,'p0ch1',6,Repetitions=0)
#setup LO
self._setup_IQLO(IQLO,self.exp_dict[qubit_id]['frequency'],cal_dict)
#setup readout
if autodelay:
self.exp_dict['sgro_pulse_delay']= pulse_length
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
#measure
tmp = np.ndarray(len(amp_sweep))
time_start = time.time()
temp_start, temp_start_time= read_temperature(self.CryoID())
awg.start_multiple()
try:
for i,am in enumerate(amp_sweep):
awg.apply_correction(cal_dict,am)
tmp[i]= self.measure_wave(self.exp_dict['sgro_frequency'],sgro,sgLO,adq,1,1)*1e3
progressive_plot_2d(amp_sweep[:i],tmp[:i],'-o')
except KeyboardInterrupt:
amp_sweep = amp_sweep[:i]
tmp = tmp[:i]
awg.stop_multiple()
awg.stop_multiple()
#creating data module
data = dm.data_table((amp_sweep,tmp),('AWG Amplitude (V)','Transmitted Amplitude (mV)'))
data.time_start = time_start
data.temp_start = temp_start
data.temp_start_time = temp_start_time
data.time_stop = time.time()
data.temp_stop,data.temp_stop_time= read_temperature(self.CryoID())
data.par= self.gen_pars()
data.insert_par(awg_pulse_length = pulse_length, cal_dict = cal_dict)
return data
def _change_ro_delay(self,tau):
sgro = self.gen_dict['sgro']['device']
adq = self.gen_dict['DIG']['device']
delay = self.exp_dict['sgro_pulse_delay']
try:
delay += self.gen_dict['sgro']['trig_delay']
except:
pass
sgro.instr.pulse_triggered(1,self.exp_dict['sgro_pulse_length'],delay+tau)
delay = self.exp_dict['sgro_pulse_delay']
try:
delay += self.gen_dict['DIG']['trig_delay']
except:
pass
self.exp_dict['samples']=adq.Acquisition_setup(self.exp_dict['averages'],self.exp_dict['samples'],delay+tau)
def do_T1_experiment(self,time_sweep,qubit_id='q1',autodelay=True):
awg = self.gen_dict['AWG']['device']
IQLO = self.gen_dict['IQLO']['device']
cal_dict = filetodictionary(self.exp_dict[qubit_id]['cal_file_path'])
chI = cal_dict['AWG chI']
#AWG setup
awg.apply_correction(cal_dict,self.exp_dict[qubit_id]['pi_pulse_amp']) #arbitrary, to show waves if needed
for i in [chI,chI+1]:
awg.mode(i,'AWG')
awg.modulation(i,0)
#ph_corr = cal_dict['Phase correction chQ']
#awg.register(0,int(awg_pulse_length/10+10))
#awg.register(1,QUBEXP.set_phase_in_reg_deg(270,ph_corr))
#awg.register(2,0)
#awg.register(3,QUBEXP.set_phase_in_reg_deg(270,ph_corr))
#awg.register(4,0)#awg_pulse_length+200) #safer
awg.autoset=0
awg.clear_waves()
awg.clear_channel_queue(3<<chI)
pulse_length = self.exp_dict[qubit_id]['pulse_length']
p0ch0 = Pulse(awg.frequency(chI)/1e3,awg.phase(chI),Width= pulse_length,Wait=10,Shape='g',SF=awg.SF)
p0ch1 = Pulse(awg.frequency(chI+1)/1e3,awg.phase(chI+1),Width= pulse_length,Wait=10,Shape='g',SF=awg.SF)
awg.insert_array(p0ch0.generate(),'p0ch0')
awg.insert_array(p0ch1.generate(),'p0ch1')
#awg.load_waves_in_AWG_memory()
awg.queue_in_channel(chI,'p0ch0',6,Repetitions=0)
awg.queue_in_channel(chI+1,'p0ch1',6,Repetitions=0)
awg.set_channel(load_waves_in_memory=True)
awg.autoset=1
#setup LO
self._setup_IQLO(IQLO,self.exp_dict[qubit_id]['frequency'],cal_dict)
#setup readout
if autodelay:
self.exp_dict['sgro_pulse_delay']= pulse_length
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
#measure
tmp = np.ndarray(len(time_sweep))
time_start = time.time()
temp_start, temp_start_time= read_temperature(self.CryoID())
awg.start_multiple()
try:
for i,tau in enumerate(time_sweep):
self._change_ro_delay(tau)
tmp[i]= self.measure_wave(self.exp_dict['sgro_frequency'],sgro,sgLO,adq,1,1)*1e3
progressive_plot_2d(time_sweep[:i],tmp[:i],'-o')
except KeyboardInterrupt:
time_sweep = time_sweep[:i]
tmp = tmp[:i]
awg.stop_multiple()
self._change_ro_delay(0)
#creating data module
data = dm.data_table((time_sweep+pulse_length/2,tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
data.time_start = time_start
data.temp_start = temp_start
data.temp_start_time = temp_start_time
data.time_stop = time.time()
data.temp_stop,data.temp_stop_time= read_temperature(self.CryoID())
data.par= self.gen_pars()
data.insert_par(awg_pulse_length = pulse_length, cal_dict = cal_dict)
return data
def do_T2_experiment(self,time_sweep,osc_period,qubit_id='q1',autodelay=True):
awg = self.gen_dict['AWG']['device']
IQLO = self.gen_dict['IQLO']['device']
cal_dict = filetodictionary(self.exp_dict[qubit_id]['cal_file_path'])
chI = cal_dict['AWG chI']
#AWG setup
awg.apply_correction(cal_dict,self.exp_dict[qubit_id]['pi_pulse_amp']) #arbitrary, to show waves if needed
for i in [chI,chI+1]:
awg.mode(i,'AWG')
awg.modulation(i,0)
awg.clear_waves()
awg.clear_channel_queue(3<<chI)
pulse_length = self.exp_dict[qubit_id]['pulse_length']
p0ch0 = Pulse(awg.frequency(chI)/1e3,awg.phase(chI),Width= pulse_length,Wait=10,Shape='g',SF=awg.SF,Amplitude=0.5)
p0ch1 = Pulse(awg.frequency(chI+1)/1e3,awg.phase(chI+1),Width= pulse_length,Wait=10,Shape='g',SF=awg.SF,Amplitude=0.5)
awg.insert_array(p0ch0.generate(),'p0ch0')
awg.insert_array(p0ch1.generate(),'p0ch1')
def load_waves(tau,osc_period):
awg.autoset=0
awg.remove_wave_by_ID('p1ch0')
awg.remove_wave_by_ID('p1ch1')
new_phase = np.degrees(2*np.pi*awg.frequency(chI)/1e3*(pulse_length+10+tau))
if osc_period>0:
new_phase += 360/osc_period*tau
p1ch0 = Pulse(awg.frequency(chI)/1e3,awg.phase(chI)+new_phase,Width= pulse_length,Wait=10,Shape='g',SF=awg.SF,Amplitude=0.5)
p1ch1 = Pulse(awg.frequency(chI+1)/1e3,awg.phase(chI+1)+new_phase,Width= pulse_length,Wait=10,Shape='g',SF=awg.SF,Amplitude=0.5)
awg.insert_array(p1ch0.generate(),'p1ch0')
awg.insert_array(p1ch1.generate(),'p1ch1')
awg.clear_channel_queue(3<<chI)
awg.queue_in_channel(chI,'p0ch0',6,Repetitions=1)
awg.queue_in_channel(chI+1,'p0ch1',6,Repetitions=1)
awg.queue_in_channel(chI,'p1ch0',0,Delay=tau)
awg.queue_in_channel(chI+1,'p1ch1',0,Delay=tau)
awg.autoset=1
awg.set_channel(load_waves_in_memory=True)
load_waves(0,0)
#setup LO
self._setup_IQLO(IQLO,self.exp_dict[qubit_id]['frequency'],cal_dict)
#setup readout
if autodelay:
self.exp_dict['sgro_pulse_delay']= 2*(pulse_length+10)
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
#measure
tmp = np.ndarray(len(time_sweep))
#data = dm.data_table((time_sweep,tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
time_start = time.time()
temp_start, temp_start_time= read_temperature(self.CryoID())
awg.start_multiple(0xf)
try:
for i,tau in enumerate(time_sweep):
load_waves(tau,osc_period)#loading waves stop the AWG
awg.start_multiple()
self._change_ro_delay(tau)
#time.sleep(0.01)
tmp[i]= self.measure_wave(self.exp_dict['sgro_frequency'],sgro,sgLO,adq,1,1)*1e3
progressive_plot_2d(time_sweep[:i]+(pulse_length+10),tmp[:i],'-o')
except KeyboardInterrupt:
time_sweep = time_sweep[:i]
tmp = tmp[:i]
awg.stop_multiple()
self._change_ro_delay(0)
#creating data module
data = dm.data_table((time_sweep+pulse_length+10,tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
data.time_start = time_start
data.temp_start = temp_start
data.temp_start_time = temp_start_time
data.time_stop = time.time()
data.temp_stop,data.temp_stop_time= read_temperature(self.CryoID())
data.par= self.gen_pars()
data.insert_par(awg_pulse_length = pulse_length, cal_dict = cal_dict,osc_period=osc_period)
return data
def do_time_Rabi_experiment(self,amp,pulse_len_sweep,qubit_id='q1'):
awg = self.gen_dict['AWG']['device']
IQLO = self.gen_dict['IQLO']['device']
cal_dict = filetodictionary(self.exp_dict[qubit_id]['cal_file_path'])
chI = cal_dict['AWG chI']
#awg.apply_correction(cal_dict,amp) #arbitrary, to show waves if needed
#AWG setup
for i in [chI,chI+1]:
awg.mode(i,'AWG')
awg.modulation(i,0)
def rabi_block(t):
time_pulse=Pulse(Frequency=awg.frequency(chI)/1e3, Phase=0.0, Shape='g', Width=t, SF=awg.SF, ID='time_pulse')
pulselist=Sequence([time_pulse])
awg.insert_sequence_object_IQ(
seq=pulselist,
cal_dict=cal_dict,
amplitude=amp,
clear_channels_queue=True,
)
"""
dummy = np.hstack((np.ones(100),np.zeros(10)))
awg.clear_channel_queue(1<<2)
awg.insert_array(dummy,'dummy')
awg.queue_in_channel(2,'dummy',6)
"""
return pulselist.total_width()
#setup LO
self._setup_IQLO(IQLO,self.exp_dict[qubit_id]['frequency'],cal_dict)
#setup readout
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
#measure
tmp = np.ndarray(len(pulse_len_sweep))
#data = dm.data_table((time_sweep,tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
time_start = time.time()
temp_start, temp_start_time= read_temperature(self.CryoID())
#awg.start_multiple(0xf)
try:
for i,tau in enumerate(pulse_len_sweep):
tot_length=rabi_block(tau)#loading waves stops the AWG
self._change_ro_delay(tot_length)
print(tot_length)
awg.start_multiple()
#time.sleep(0.01)
tmp[i]= self.measure_wave(self.exp_dict['sgro_frequency'],sgro,sgLO,adq,1,1)*1e3
progressive_plot_2d(pulse_len_sweep[:i],tmp[:i],'-o')
except KeyboardInterrupt:
pulse_len_sweep = pulse_len_sweep[:i]
tmp = tmp[:i]
awg.stop_multiple()
self._change_ro_delay(0)
#creating data module
#data = dm.data_table((pulse_len_sweep+self.exp_dict['sgro_pulse_delay']-self.exp_dict[qubit_id]['pulse_length'],tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
data = dm.data_table((pulse_len_sweep,tmp),('Pulse length (ns)','Transmitted Amplitude (mV)'))
data.time_start = time_start
data.temp_start = temp_start
data.temp_start_time = temp_start_time
data.temp_stop,data.temp_stop_time= read_temperature(self.CryoID())
data.par= self.gen_pars()
data.insert_par(amp = amp, cal_dict = cal_dict)
return data
def do_Spinlock_experiment(self,time_sweep,rab_amp,qubit_id='q1',df=0): #df in MHz
awg = self.gen_dict['AWG']['device']
IQLO = self.gen_dict['IQLO']['device']
cal_dict = filetodictionary(self.exp_dict[qubit_id]['cal_file_path'])
chI = cal_dict['AWG chI']
awg.apply_correction(cal_dict,1.)
for i in [chI,chI+1]:
awg.mode(i,'AWG')
awg.modulation(i,0)
hpi_amp=(self.exp_dict[qubit_id]['pi_pulse_amp'])/2
hpi_len=self.exp_dict[qubit_id]["pulse_length"]
awg.clear_waves()
def block(rab_amp,t,df):
hpi1=Pulse(Frequency=awg.frequency(chI)/1e3, Phase=0.0, Shape='g', Width=hpi_len, Amplitude=hpi_amp, SF=awg.SF, ID='hpi1')
hpi2=Pulse(Frequency=awg.frequency(chI)/1e3, Phase=0.0, Shape='g', Width=hpi_len, Amplitude=hpi_amp, SF=awg.SF, ID='hpi2')
if t==0:
pulselist=Sequence([hpi1,hpi2])
else:
rab=Pulse(Frequency=awg.frequency(chI)/1e3 +df/1e3, Phase=-90, Shape='g', Width=t, Amplitude=rab_amp, SF=awg.SF, ID='rab')
pulselist=Sequence([hpi1,rab,hpi2])
awg.insert_sequence_object_IQ(
seq=pulselist,
cal_dict=cal_dict,
amplitude=1.,
clear_channels_queue=True,
)
"""
dummy = np.hstack((np.ones(100),np.zeros(10)))
awg.clear_channel_queue(1<<2)
awg.insert_array(dummy,'dummy')
awg.queue_in_channel(2,'dummy',6)
"""
return pulselist.total_width()
#setup LO
self._setup_IQLO(IQLO,self.exp_dict[qubit_id]['frequency'],cal_dict)
#setup readout
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
#measure
tmp = np.ndarray(len(time_sweep))
#data = dm.data_table((time_sweep,tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
time_start = time.time()
temp_start, temp_start_time= read_temperature(self.CryoID())
#awg.start_multiple(0xf)
try:
for i,tau in enumerate(time_sweep):
tot_length=block(rab_amp,tau,df)#loading waves stops the AWG
self._change_ro_delay(tot_length)
print(tot_length)
awg.start_multiple()
#time.sleep(0.01)
tmp[i]= self.measure_wave(self.exp_dict['sgro_frequency'],sgro,sgLO,adq,1,1)*1e3
progressive_plot_2d(time_sweep[:i],tmp[:i],'-o')
except KeyboardInterrupt:
time_sweep = time_sweep[:i]
tmp = tmp[:i]
awg.stop_multiple()
self._change_ro_delay(0)
#creating data module
#data = dm.data_table((time_sweep+self.exp_dict['sgro_pulse_delay']-self.exp_dict[qubit_id]['pulse_length'],tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
data = dm.data_table((time_sweep ,tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
data.time_start = time_start
data.temp_start = temp_start
data.temp_start_time = temp_start_time
data.temp_stop,data.temp_stop_time= read_temperature(self.CryoID())
data.par= self.gen_pars()
data.insert_par(hpi_len = hpi_len, cal_dict = cal_dict,rab_amp=rab_amp)
return data
def do_check_pulse(self,amp_pulse,len_pulse,N_sweep,qubit_id='q1'): #df in MHz
awg = self.gen_dict['AWG']['device']
IQLO = self.gen_dict['IQLO']['device']
cal_dict = filetodictionary(self.exp_dict[qubit_id]['cal_file_path'])
chI = cal_dict['AWG chI']
awg.apply_correction(cal_dict,1.)
for i in [chI,chI+1]:
awg.mode(i,'AWG')
awg.modulation(i,0)
awg.clear_waves()
def pulse(amp_pulse,len_pulse,N):
check=Pulse(Frequency=awg.frequency(chI)/1e3, Phase=0.0, Shape='g', Width=len_pulse, Amplitude=1., SF=awg.SF, ID='check')
list1=[check for i in range(N)]
pulselist=Sequence(list1)
awg.insert_sequence_object_IQ(
seq=pulselist,
cal_dict=cal_dict,
amplitude=amp_pulse,
clear_channels_queue=True,
)
"""
dummy = np.hstack((np.ones(100),np.zeros(10)))
awg.clear_channel_queue(1<<2)
awg.insert_array(dummy,'dummy')
awg.queue_in_channel(2,'dummy',6)
"""
return pulselist.total_width()
#setup LO
self._setup_IQLO(IQLO,self.exp_dict[qubit_id]['frequency'],cal_dict)
#setup readout
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
#measure
tmp = np.ndarray(len(N_sweep))
#data = dm.data_table((time_sweep,tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
time_start = time.time()
temp_start, temp_start_time= read_temperature(self.CryoID())
#awg.start_multiple(0xf)
try:
for i,nau in enumerate(N_sweep):
tot_length=pulse(amp_pulse,len_pulse,nau)#loading waves stops the AWG
awg.start_multiple()
self._change_ro_delay(tot_length)
#time.sleep(0.01)
tmp[i]= self.measure_wave(self.exp_dict['sgro_frequency'],sgro,sgLO,adq,1,1)*1e3
progressive_plot_2d(N_sweep[:i],tmp[:i],'-o')
except KeyboardInterrupt:
N_sweep = N_sweep[:i]
tmp = tmp[:i]
awg.stop_multiple()
self._change_ro_delay(0)
#creating data module
data = dm.data_table((N_sweep,tmp),('Pulses number','Transmitted Amplitude (mV)'))
data.time_start = time_start
data.temp_start = temp_start
data.temp_start_time = temp_start_time
data.temp_stop,data.temp_stop= read_temperature(self.CryoID())
data.par= self.gen_pars()
data.insert_par(amp_pulse = amp_pulse, cal_dict = cal_dict, len_pulse=len_pulse)
return data
def do_TEcho_experiment(self,time_sweep,osc_period,N_pulses=1,qubit_id='q1',autodelay=True):
awg = self.gen_dict['AWG']['device']
IQLO = self.gen_dict['IQLO']['device']
cal_dict = filetodictionary(self.exp_dict[qubit_id]['cal_file_path'])
chI = cal_dict['AWG chI']
#AWG setup
awg.apply_correction(cal_dict,self.exp_dict[qubit_id]['pi_pulse_amp']) #arbitrary, to show waves if needed
for i in [chI,chI+1]:
awg.mode(i,'AWG')
awg.modulation(i,0)
pulse_length = self.exp_dict[qubit_id]['pulse_length']
pi_pulse_amp = self.exp_dict[qubit_id]['pi_pulse_amp']
def load_waves(tau,osc_period,N_pulses):
if N_pulses<1:
print('N must be larger than 1\n')
raise ValueError
if ((tau/N_pulses) % 10)!= 0 or ((tau/N_pulses/2) % 10)!=0:
print('Time steps must be a multiple of 10 ns\n')
raise ValueError
awg.clear_waves()
#hpi-pulse
hpI = Pulse(awg.frequency(chI)/1e3,0,Width= pulse_length,Wait=10,Shape='g',SF=awg.SF,Amplitude=0.5,ID='hpi1')
pI = Pulse(awg.frequency(chI)/1e3,0,Width= pulse_length,Delay=tau/2/N_pulses,Wait=10,Shape='g',SF=awg.SF,Amplitude=1,ID='pi1')
if osc_period>0:
new_phase=360/osc_period*tau
else:
new_phase=0
hpI2 = Pulse(awg.frequency(chI)/1e3,new_phase,Delay=tau/2/N_pulses,Width= pulse_length,Wait=10,Shape='g',SF=awg.SF,Amplitude=0.5,ID='hpi2')
#pi-pulse
seq = Sequence([hpI,pI])
if N_pulses>1:
pIN = Pulse(awg.frequency(chI)/1e3,0,Width= pulse_length,Delay=tau/N_pulses,Wait=10,Shape='g',SF=awg.SF,Amplitude=1,ID='piN')
for i in range(N_pulses-1):
seq.sequence().append(pIN)
seq.sequence().append(hpI2)
awg.insert_sequence_object_IQ(seq,cal_dict,pi_pulse_amp,True)
#awg.load_waves_in_AWG_memory()
#return total_length
#setup LO
self._setup_IQLO(IQLO,self.exp_dict[qubit_id]['frequency'],cal_dict)
#setup readout
if autodelay:
self.exp_dict['sgro_pulse_delay']= (self.exp_dict[qubit_id]['pulse_length']+10)*(N_pulses+2)
sgro,sgLO = self._setup_rogen()
adq = self._setup_dig()
#measure
tmp = np.ndarray(len(time_sweep))
#data = dm.data_table((time_sweep,tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
time_start = time.time()
temp_start, temp_start_time= read_temperature(self.CryoID())
#awg.start_multiple(0xf)
try:
for i,tau in enumerate(time_sweep):
load_waves(tau,osc_period,N_pulses)#loading waves stops the AWG
awg.start_multiple()
self._change_ro_delay(tau)
#time.sleep(0.01)
tmp[i]= self.measure_wave(self.exp_dict['sgro_frequency'],sgro,sgLO,adq,1,1)*1e3
progressive_plot_2d(time_sweep[:i]+self.exp_dict['sgro_pulse_delay']-self.exp_dict[qubit_id]['pulse_length'],tmp[:i],'-o')
except KeyboardInterrupt:
time_sweep = time_sweep[:i]
tmp = tmp[:i]
awg.stop_multiple()
self._change_ro_delay(0)
#creating data module
data = dm.data_table((time_sweep+self.exp_dict['sgro_pulse_delay']-self.exp_dict[qubit_id]['pulse_length'],tmp),('Pulses delay (ns)','Transmitted Amplitude (mV)'))
data.time_start = time_start
data.temp_start = temp_start
data.temp_start_time = temp_start_time
data.temp_stop,data.temp_stop_time= read_temperature(self.CryoID())
data.par= self.gen_pars()
data.insert_par(awg_pulse_length = pulse_length, cal_dict = cal_dict,osc_period=osc_period,N_pulses=N_pulses)
return data
def gen_pars(self,dev_id=None):
if dev_id is None:
full={}
for k in self.gen_dict:
dev = self.gen_dict[k]['device']
try:
tmp = dev.pars_dict
except:
tmp = dev.parameters_dictionary()
full.update({k:tmp})
return full
dev = self.gen_dict[dev_id]['device']
try:
tmp = dev.pars_dict
except:
tmp = dev.parameters_dictionary()
return tmp
def close_inst_connection(self):
for d in self.gen_dict:
try:
self.gen_dict[d]['device'].close_driver()
except AttributeError:
try:
self.gen_dict[d]['device'].Disconnect()
except:
pass
| [
"muppalla.phaniraja@gmail.com"
] | muppalla.phaniraja@gmail.com |
a7f8d57ddb57e6c3f39dbb2124b6aab55c3e00ed | a6a03f3ffe7e8516bd8b442ea662ad74052f5688 | /jobs/examples/ex5_copy_to_redshift_job.py | 27a7aa9a10389cdea463c4d9b1346cbcacf49b08 | [] | no_license | tiredoftools/pyspark_aws_etl | 9e2bc507ff75ad76015ef23174a8c8c095d7f3e4 | a554f1991b091760c0ee4402ddc45151cf673290 | refs/heads/master | 2022-12-04T05:55:28.188860 | 2020-08-23T16:13:49 | 2020-08-23T16:13:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 688 | py | from core.etl_utils import ETL_Base, Commandliner
from sqlalchemy import types
class Job(ETL_Base):
OUTPUT_TYPES = {
'session_id': types.VARCHAR(16),
'count_events': types.INT(),
}
def transform(self, some_events, other_events):
df = self.query("""
SELECT se.session_id, count(*) as count_events
FROM some_events se
JOIN other_events oe on se.session_id=oe.session_id
WHERE se.action='searchResultPage' and se.n_results>0
group by se.session_id
order by count(*) desc
""")
return df
if __name__ == "__main__":
Commandliner(Job, aws_setup='perso')
| [
"aprevot@thehotelsnetwork.com"
] | aprevot@thehotelsnetwork.com |
bbd1ae72d35375d2e72bc4586a6dfe961fa2726d | f035ef6beb8586d60e584105e9dba00573dfaf5e | /tcp/client.py | 1a8b0317f23f65acc809e0975ff395c32ec2c289 | [] | no_license | reisnobre/CET098 | ecc20cdca97693e7d7edda367e5e704ce226a433 | 2d997fc5a614d787e2d5891def32c097c3e16ec6 | refs/heads/master | 2022-11-25T13:25:15.709174 | 2020-07-30T11:52:35 | 2020-07-30T11:52:35 | 283,759,721 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,018 | py | import socket
import io
HOST = '127.0.0.1'
PORT = 9009
CONN = (HOST, PORT)
ALLOWED_METHODS = ['GET', 'POST', 'PUT']
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.settimeout(2)
server.connect(CONN)
print('Server ready at {}:{}'.format(HOST, PORT))
print('Inform connection method and message as it follows ~> METHOD:MESSAGE\n')
try:
while True:
raw = input('')
if (len(raw) > 3):
method, message = raw.split(':')
print(method, message)
else:
method = raw
try:
index = ALLOWED_METHODS.index(method)
if index is 0:
server.sendall('{}'.format(method).encode())
print('response:\n', server.recv(1024).decode())
elif index is 1:
server.sendall('{}:{}'.format(method, message).encode())
except ValueError as e:
print('METHOD NOT ALLOWED')
finally:
print('Closing Server at {}:{}'.format(HOST, PORT))
server.close()
| [
"eduardo.reisnobre@gmail.com"
] | eduardo.reisnobre@gmail.com |
dac95d4e171ff68dc333912772c6c8dd3bfe73ec | 24cbfefc98c5c34dc2d29021889c06d40734108d | /LeetCodeEasy/MajorityElementInList.py | 93845e38f332f601bd9ee8fba4ad43a09eb83a59 | [] | no_license | surmayi/CodePython | 6bbf5967de9b72aa6e8915725fda53d6c3d82fc5 | c0dd25471e34a2c7ce2d1ce6a10dbd6e0f7042f1 | refs/heads/master | 2022-05-16T11:59:39.289329 | 2022-05-02T19:33:48 | 2022-05-02T19:33:48 | 234,530,224 | 0 | 0 | null | 2020-06-05T18:48:33 | 2020-01-17T10:55:34 | Python | UTF-8 | Python | false | false | 296 | py | def majorityElement( nums):
setnum= set(nums)
check = len(nums)//2
for i in setnum:
c=0
while(i in nums):
nums.remove(i)
c+=1
if(c>check):
return i
majorityElement([2,2,1,1,1,2,2]) | [
"surmayi7@gmail.com"
] | surmayi7@gmail.com |
869f7c920d278ff777f63b8b1715c4a493ae22b4 | a3be3d5f13c7831d21b23c6fdc264d45fb9448a1 | /catkin_ws/src/action_server/src/sis_arm_pick_and_place_action.py | 6ab20c6d17240871da1fba020b564b80bcd3e0c3 | [] | no_license | VIMO-Robotics/mmbot | 8a70f7ee7b074c384883ec0200cf2b624c37dfe2 | dc004f2b870bf86798d9e1d06f1c8a0c47a4c060 | refs/heads/master | 2020-06-25T10:42:34.580709 | 2019-09-09T04:47:50 | 2019-09-09T04:47:50 | 199,286,782 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,032 | py | #!/usr/bin/env python
import sys
import copy
import rospy
import moveit_commander
import moveit_msgs.msg
from geometry_msgs.msg import PoseStamped, PointStamped
from std_msgs.msg import Float64, Bool
from action_server.msg import block_pick_and_placeAction, block_pick_and_placeResult, block_pick_and_placeFeedback
import actionlib
import tf
from ik_4dof import ik_solver
import numpy as np
class block_pick_and_place(object):
def __init__(self):
self.node_name = rospy.get_name()
# Initial
self.gripper_v = 1.23 # for cube
self.br = tf.TransformBroadcaster()
self.tf_listener = tf.TransformListener()
# Thread lock
moveit_commander.roscpp_initialize(sys.argv)
self.robot = moveit_commander.RobotCommander()
self.scene = moveit_commander.PlanningSceneInterface()
self.group = moveit_commander.MoveGroupCommander("arm")
display_trajectory_publisher = rospy.Publisher(
'/move_group/display_planned_path',
moveit_msgs.msg.DisplayTrajectory,
queue_size=1)
self.gripper_cmd = rospy.Publisher('/gripper_joint/command', Float64, queue_size=1)
self.pub_current_joint_state = rospy.Publisher('/pub_current_joint_state', Bool, queue_size=1)
self._as = actionlib.SimpleActionServer('block_pick_and_place_server', block_pick_and_placeAction, execute_cb=self.execute_cb, auto_start = False)
self._feedback = block_pick_and_placeFeedback()
self._result = block_pick_and_placeResult()
self._as.start()
# safe shutdown
rospy.on_shutdown(self.onShutdown)
rospy.loginfo("[%s] Initialized " %(rospy.get_name()))
self.group.allow_replanning(True)
self.group.set_pose_reference_frame("base_link")
self.pub_current_joints()
self.gripper_action(0)
self.home_pose()
def pub_current_joints(self):
msg = Bool(data=1)
self.pub_current_joint_state.publish(msg)
rospy.sleep(1)
msg.data = 0
self.pub_current_joint_state.publish(msg)
def execute_cb(self, goal):
rospy.loginfo("Goal Received !")
self.gripper_v = goal.object_size
if goal.mode == 0 or goal.mode == 1:
self.ready_pose(1)
print "========== ready_pose finished ==========\n"
self.stack_mode(goal.pick_pose, goal.place_pose)
elif goal.mode == 2:
self.classifier_mode(goal.pick_pose, goal.place_pose)
self.home_pose()
self._result.state = True
self._as.set_succeeded(self._result)
rospy.loginfo('block_pick_and_place: Succeeded')
def stack_mode(self,pick_pose, place_pose):
for i in range(len(pick_pose)):
print "========== pick_pose =========="
print [pick_pose[i].pose.position.x, pick_pose[i].pose.position.y, pick_pose[i].pose.position.z]
print "========== place_pose =========="
print [place_pose[i].pose.position.x, place_pose[i].pose.position.y, place_pose[i].pose.position.z]
self.pre_action_pose(copy.deepcopy(pick_pose[i]))
print "========== pre_action_pose finished =========="
self.action_pose(pick_pose[i])
print "========== Pick finished =========="
self.gripper_action(1)
print "========== grasp cube finished =========="
self.pre_action_pose(copy.deepcopy(pick_pose[i]))
print "========== pre_action_pose finished =========="
self.ready_pose(0)
print "========== ready_pose finished =========="
self.pre_action_pose(copy.deepcopy(place_pose[i]))
print "========== pre_action_pose finished =========="
self.action_pose(place_pose[i])
print "========== Place finished =========="
self.gripper_action(0)
print "========== Place cube finished =========="
self.pre_action_pose(copy.deepcopy(place_pose[i]))
print "========== pre_action_pose finished =========="
self.ready_pose(0)
print "========== ready_pose finished ==========\n"
def classifier_mode(self,pick_pose, place_pose):
for i in range(len(pick_pose)):
print "========== pick_pose =========="
print [pick_pose[i].pose.position.x, pick_pose[i].pose.position.y, pick_pose[i].pose.position.z]
print "========== place_pose =========="
print [place_pose[i].pose.position.x, place_pose[i].pose.position.y, place_pose[i].pose.position.z]
self.pre_action_pose(copy.deepcopy(pick_pose[i]))
print "========== pre_action_pose finished =========="
self.action_pose(pick_pose[i])
print "========== Pick finished =========="
self.gripper_action(1)
print "========== grasp cube finished =========="
self.pre_action_pose(copy.deepcopy(pick_pose[i]))
print "========== pre_action_pose finished =========="
self.ready_pose(0)
print "========== ready_pose finished =========="
self.pre_action_pose(copy.deepcopy(place_pose[i]))
print "========== pre_action_pose finished =========="
self.action_pose(place_pose[i])
print "========== Place finished =========="
self.gripper_action(0)
print "========== Place cube finished =========="
self.pre_action_pose(copy.deepcopy(place_pose[i]))
print "========== pre_action_pose finished =========="
self.ready_pose(2)
print "========== ready_pose finished ==========\n"
def home_pose(self):
self.execute_fk(0.7976700097005335, -2.1782527187976104, 2.188479257383515, 0.4601942363656924)
self.execute_fk(-0.02045307717180855, -2.162912910918754, 2.0657607943526637, 0.8)
def ready_pose(self, state):
if state == 0:
self.execute_fk(0,-0.3681553890925539,1.6055665579869711,1.6413594430376361)
elif state == 1:
# self.execute_fk(0.8130098175793898, -2.0197413707160945, 2.1782527187976104, 0.3425890426277932)
self.execute_fk(0.7976700097005335, -2.1782527187976104, 2.188479257383515, 0.4601942363656924)
self.execute_fk(0,-0.3681553890925539,1.6055665579869711,1.6413594430376361)
elif state == 2:
self.execute_fk(0,-0.3681553890925539,1.6055665579869711,1.6413594430376361)
self.execute_fk(0.7976700097005335, -2.1782527187976104, 2.188479257383515, 0.4601942363656924)
# rospy.sleep(1.5)
def pre_action_pose(self, pre_pose):
pre_pose.pose.position.z += 0.02
# print [pre_pose.pose.position.x, pre_pose.pose.position.y, pre_pose.pose.position.z]
self.find_ik_and_execute(pre_pose)
# rospy.sleep(1.5)
def action_pose(self, pose):
# print [pose.pose.position.x, pose.pose.position.y, pose.pose.position.z]
self.find_ik_and_execute(pose)
def execute_fk(self, theta_1, theta_2, theta_3, theta_4):
if rospy.is_shutdown():
rospy.loginfo('%s: Finished' % self._action_name)
self._as.set_preempted()
joint_values = self.group.get_current_joint_values()
joint_values[0] = theta_1
joint_values[1] = theta_2
joint_values[2] = theta_3
joint_values[3] = theta_4
self.group.set_joint_value_target(joint_values)
plan = self.group.plan()
return self.group.execute(plan,wait=True)
def gripper_action(self, state):
if state:
msg = Float64(data=self.gripper_v) #1.23
s_t = 1.5
else:
msg = Float64(data=0)
s_t = 2
self.gripper_cmd.publish(msg)
rospy.sleep(s_t)
def find_ik_and_execute(self, pose_transformed):
x = pose_transformed.pose.position.x
y = pose_transformed.pose.position.y
z = pose_transformed.pose.position.z
ik_candidate = ik_solver(x, y, z, -90)
# print "========== Find ",len(ik_candidate)," Plan =========="
if not np.isnan(ik_candidate.all()):
for theta_1, theta_2, theta_3, theta_4 in ik_candidate:
# while not rospy.is_shutdown():
try:
if self.execute_fk(theta_1, theta_2, theta_3, theta_4):
# rospy.loginfo("========== Execute Plan ==========")
# print [theta_1, theta_2, theta_3, theta_4]
break
except Exception as e:
# rospy.loginfo(e)
# print "------------- Failed -------------"
# print [theta_1, theta_2, theta_3, theta_4],"\n"
continue
else:
rospy.loginfo("========== Cannot Find Solution ==========")
self._result.state = False
self._as.set_aborted(self._result)
def onShutdown(self):
rospy.loginfo("[%s] Shutting down..." %self.node_name)
rospy.sleep(0.5) #To make sure that it gets published.
rospy.loginfo("[%s] Shutdown" %self.node_name)
if __name__ == '__main__':
rospy.init_node('block_pick_and_place',anonymous=False)
block_pick_and_place = block_pick_and_place()
rospy.on_shutdown(block_pick_and_place.onShutdown)
rospy.spin()
| [
"andy19970307@gmail.com"
] | andy19970307@gmail.com |
0146feeb837e36370f62c42e74d38532382cc87f | 7ca2ad45e2726c4bf2b30ce37d6dc552ef5590cc | /app/models/wish.py | 63fbbc50a6cf6b7ab4d21d787ae747fc8fdb8412 | [] | no_license | yangyangmei/fisher | 98330583848f3519dfb0d5a7cde72a4c93df4a97 | f4034935070656b4e523cbb2f99ce2599e453800 | refs/heads/master | 2022-12-15T09:07:06.866679 | 2018-10-06T08:54:17 | 2018-10-06T09:08:06 | 151,822,075 | 0 | 0 | null | 2022-12-08T02:59:47 | 2018-10-06T08:58:49 | CSS | UTF-8 | Python | false | false | 1,540 | py | """
created by yangyang on 2018/10/2.
"""
from app.models.base import Base,db
from sqlalchemy import Column, Integer, String, Boolean, func
from sqlalchemy.orm import relationship
from app.spider.yushu_book import YuShuBook
__author__ = "yangyang"
class Wish(Base):
__tablename__ = "wish"
id = Column(Integer, primary_key=True)
launched = Column(Boolean, default=False) # 表示礼物是否收到,False表示添加到了心愿清单还没有收到,True表示已经收到
uid = Column(Integer, db.ForeignKey("user.id"))
user = relationship("User")
isbn = Column(String(15), nullable=False)
@property
def book( self ):
# 根据当前的isbn获取这本书
yushu_book = YuShuBook()
yushu_book.search_by_isbn(self.isbn)
return yushu_book.first
@classmethod # 获取我的所有想要的礼物
def get_user_gifts( cls, uid ):
return Wish.query.filter_by(
uid=uid, launched=False).order_by(
Wish.create_time.desc()).all()
@classmethod
def get_wish_counts( cls, isbn_list ):
# 根据传入的一组isbn,到Gift表中检索出要赠送的人,并计算出某个礼物可以赠送的Gift数量
from app.models.gift import Gift
gifts = db.session.query(Gift.isbn, func.count(Gift.id)).filter(
Gift.launched == False, Gift.status == 1, Gift.isbn.in_(isbn_list)).group_by(Gift.isbn).all()
count_list = [{"count": w[1], "isbn": w[0]} for w in gifts]
return count_list
| [
"yangman_011088@163.com"
] | yangman_011088@163.com |
a2484349f338d1e863be00b2f6d439cc5ce08161 | 64e7b148557ba53c38d481f1e7615cafad57fded | /potform/apps.py | f52e9b4a1a49216b25e8beff527e32a81e2415a3 | [] | no_license | toma1031/ptoform | 0bc406d61e92851e627a4b924a8828e6971c093f | 1728f8da77e64d00796909af42f606ae577de10e | refs/heads/master | 2023-04-23T10:22:54.171645 | 2021-05-21T04:07:56 | 2021-05-21T04:07:56 | 360,765,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | from django.apps import AppConfig
class PotformConfig(AppConfig):
name = 'potform'
| [
"akinoritoma@appletoma.home"
] | akinoritoma@appletoma.home |
9558078b495c9f41b5bcc8fde64f93bfb7668f33 | ec87c361be4a2f9f842695b6a6e8601ebd735e83 | /GuessNum.py | ee225ea9d692c0e4fc54bd50da1e7441a632581c | [] | no_license | heheddff/python2018 | 357d51bee7ea39f6a1df82101fb49c1568250e24 | 77a240dd155f679fffe33b62df57f689a8c85082 | refs/heads/master | 2020-03-27T23:13:38.789249 | 2018-12-08T14:55:21 | 2018-12-08T14:55:21 | 147,302,979 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | #GuessNum
import random as rd
target = rd.randint(1,1000)
count = 0
while True:
try:
guess = eval(input("请输入一个猜测的整数(1至1000):"))
except:
print('输入有误,请重试,此次不计入猜测次数!')
continue
count +=1
if guess > target:
print("猜大了")
elif guess < target:
print("猜小了")
else:
print("猜对了")
break
print("此论猜测的次数是:{}".format(count))
| [
"qq2003qq@126.com"
] | qq2003qq@126.com |
372dc25a16d16ee0e086ea76423727bf3d6b3273 | 63ff51263313f5d5df0609578939c88e09cd9f0e | /clustering.py | fdfdb0b19a249e8ec7838b81bf4f76131ea60ac8 | [] | no_license | ritikamangla/FakeBot | 0bc307e74aa208f5135914a11e73665787eb2597 | 7017daa451e2d9bc30b2235c0dcfa5e22a74a837 | refs/heads/master | 2021-02-06T23:47:02.948953 | 2020-02-29T13:20:52 | 2020-02-29T13:20:52 | 243,957,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,220 | py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import datetime
from sklearn.preprocessing import StandardScaler , OneHotEncoder ,LabelEncoder
from datetime import timedelta
from sklearn.decomposition import PCA
dataset = pd.read_csv('FinalDS_AdditionalFeatures.csv')
x=dataset.iloc[:,[7,8,12,13,16,17,20,39,40,41,44]].values
encoder = LabelEncoder()
x[:,3] = encoder.fit_transform(x[:,3])
#CALCULATE TIME DIFFERENCE
datetimeFormat = '%d-%m-%Y %H:%M'
date1 = '09-02-2020 10:01'
difference =[]
for i in range(1600):
date2 = x[i,2]
diff = datetime.datetime.strptime(date1, datetimeFormat) \
- datetime.datetime.strptime(date2, datetimeFormat)
difference.append(diff.days)
print("Difference:", difference)
x = np.append(arr =x ,values = np.ones((1601,1)).astype(list), axis= 1)
for i in range(1600):
x[i][11] = difference[i]
x= x[:,[0,1,3,4,5,6,7,8,9,10,11]]
pca =PCA(n_components=2)
x= pca.fit_transform(x)
wcss = []
from sklearn.cluster import KMeans
"""for i in range (1,11):
kmeans = KMeans(n_clusters= i , init = 'k-means++',max_iter=300 )
kmeans.fit(x)
wcss.append(kmeans.inertia_)
plt.plot(range(1,11), wcss)
plt.title('The elbow method')
plt.xlabel('Number of clusters')
plt.ylabel('WCSS')
plt.show()
kmeans = KMeans(n_clusters= 5 , init = 'k-means++',max_iter=300 ,random_state=0)
y_means = kmeans.fit_predict(x)"""
kmeans = KMeans(n_clusters= 3 , init = 'k-means++',max_iter=300 ,random_state=0)
y_means = kmeans.fit_predict(x)
#q = kmeans.
groups= pd.DataFrame(y_means)
gk = groups.groupby(0)
print(gk)
l0=[]
l1=[]
l2=[]
for i in range(1600):
if(y_means[i]==0):
l0.append(i)
if (y_means[i] == 1):
l1.append(i)
if (y_means[i] == 2):
l2.append(i)
print(x[y_means == 0, 1])
#visualising data
plt.scatter(x[y_means == 0,0],x[y_means == 0, 1], s = 100 ,c= 'red',label ='cluster 1',alpha=0.2)
plt.scatter(x[y_means == 1,0],x[y_means == 1, 1], s = 100 ,c= 'cyan',label ='cluster 2',alpha=0.2)
plt.scatter(x[y_means == 2,0],x[y_means == 2, 1], s = 100 ,c= 'green',label ='cluster 3',alpha=0.2)
plt.legend()
plt.show() | [
"noreply@github.com"
] | noreply@github.com |
c030c9d9706f342259a891c2d96bb611cefde6a7 | e3e626951eeb1d571208f7483252024965b95d4a | /test1.py | 726c5f56f41b395ca0e2ce3603fd40020bdd4045 | [] | no_license | abdul-malik360/Flask_EOMP | 883866d519d50295719a93851a5a5d46a0f3ae87 | 984e8fe0b4d43de70a01ad131a7e09ef5b0fab3d | refs/heads/main | 2023-07-18T04:24:36.376501 | 2021-08-17T10:15:11 | 2021-08-17T10:15:11 | 391,969,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,729 | py | import unittest
from app import app
class Testing(unittest.TestCase):
def test_registration(self):
test = app.test_client(self)
response = test.get('/api/register/')
status = response.status_code
self.assertEqual(status, 405)
def test_show_users(self):
test = app.test_client(self)
response = test.get('/api/show-users/')
status = response.status_code
self.assertEqual(status, 401)
def test_view_user(self):
test = app.test_client(self)
response = test.get('/api/show-users/')
status = response.status_code
self.assertEqual(status, 401)
def test_add_products(self):
test = app.test_client(self)
response = test.get('/api/add-product/')
status = response.status_code
self.assertEqual(status, 405)
def test_show_products(self):
test = app.test_client(self)
response = test.get('/api/show-products/')
status = response.status_code
self.assertEqual(status, 200)
def test_view_product(self):
test = app.test_client(self)
response = test.get('/api/view-product/<int:prod_list>')
status = response.status_code
self.assertEqual(status, 404)
def test_edit_product(self):
test = app.test_client(self)
response = test.get('/edit-product/<int:prod_list>')
status = response.status_code
self.assertEqual(status, 404)
def test_delete_product(self):
test = app.test_client(self)
response = test.get('/api/delete-product/<int:prod_list>')
status = response.status_code
self.assertEqual(status, 404)
if __name__ == '__main__':
unittest.main()
| [
"abdulmalikmohamed360@gmail.com"
] | abdulmalikmohamed360@gmail.com |
e98d8dcea92717dc00bba19ef5d887b8d521e12e | ba949e02c0f4a7ea0395a80bdc31ed3e5f5fcd54 | /problems/dp/Solution115.py | aec8a435c4f15acf75a0391afcaed2b0f9481f66 | [
"MIT"
] | permissive | akaliutau/cs-problems-python | 6bc0a74064f6e9687fe58b13763da1fdf2e1f626 | 9b1bd8e3932be62135a38a77f955ded9a766b654 | refs/heads/master | 2023-05-11T22:19:06.711001 | 2021-06-04T11:14:42 | 2021-06-04T11:14:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,170 | py | """ Given two strings s and t, return the number of distinct subsequences of s
which equals t. A string's subsequence is a new string formed from the
original string by deleting some (can be none) of the characters without
disturbing the relative positions of the remaining characters. (i.e., "ACE"
is a subsequence of "ABCDE" while "AEC" is not). It's guaranteed the answer
fits on a 32-bit signed integer.
Example 1: Input: s = "rabbbit", t = "rabbit" Output: 3
Explanation: As shown below, there are 3 ways you can
generate "rabbit" from S.
____ __
rabbbit
__ ____
rabbbit
___ ___
rabbbit
IDEA:
lets t=ra, s=ram
if we have a sequence
[ra]
number of distinct subsequences, layer by layer:
i=0, t="", [""] ["r"] ["ra"] ["ram"]
\
i=1, t="r", [""]<-["r"]<-["r"]<- ["r"]
i=2, t="ra", [""]<-["r"]<-["r"]<- ["r"]
where
\ = use data from previous iteration, like this
["", r, a, ra] = [{"",r} + {"",r} * a]
<- = copy prev result-set
["", r, a, ra] --> ["", r, a, ra]
"""
class Solution115:
pass
| [
"aliaksei.kaliutau@gmail.com"
] | aliaksei.kaliutau@gmail.com |
d92bc04455151b4829c7d6a408c43e918f798952 | fb7e610d5f2beaba3547eced6ced5f1ebfd5ac00 | /mult.py | 7119ca94a82fb3693e62751cea63ecd592a94be3 | [] | no_license | Prachithakur27/Python- | c8d048820016a450be4a1747385955902b27fcc8 | 03811ed3ae808337bbe6e9e5761b027fa10540b3 | refs/heads/master | 2020-11-29T21:27:52.763096 | 2019-12-26T07:48:46 | 2019-12-26T07:48:46 | 230,219,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 696 | py | #importing the multiprocessing module
import multiprocessing
def print_cube(num):
"""
function to print cube of given num
"""
print("Cube: {}".format(num * num * num))
def print_square(num):
"""
function to print square of given num
"""
print("Square: {}".format(num * num))
if __name__ == "__main__":
# creating processes
p1 = multiprocessing.Process(target=print_square, args=(10, ))
p2 = multiprocessing.Process(target=print_cube, args=(10, ))
# starting process 1
p1.start()
# starting process 2
# p2.start()
# wait until process 1 is finished
p1.join()
# wait until process 2 is finished
# p2.join()
# both processes finished
print("Done!")
| [
"noreply@github.com"
] | noreply@github.com |
559074d54fc53ee940a2dd8ca8070e5013bb619f | 35dc527419dc88876faef014f0fd358b5bdda892 | /PySendImg/ImgSending.py | a53bde3813b1f75330f240edb70be5eb2a912617 | [] | no_license | manoon/pyapp | 175955f2ca9be0f741d553c09bbaa23a05b94c8a | 5335053d4714a23a51ca7f3741ba36adeb01a2ae | refs/heads/master | 2021-03-13T00:10:59.965563 | 2014-05-24T12:10:03 | 2014-05-24T12:10:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,776 | py | from PyQt4.QtGui import QIcon,QLabel,QPushButton,QMessageBox,QTextEdit,QApplication,QGridLayout,QWidget,QStatusBar
from PyQt4.QtCore import QTime,QTimer,SIGNAL,QDate
import os,sys,ftplib,time
class ImgSending(QWidget):
def __init__(self,parent=None):
super(ImgSending,self).__init__(parent)
self.SetupUi()
self.running=False
self.connect(self.toggleButton,SIGNAL("clicked()"),self.StartWorking)
self.connect(self.clearButton,SIGNAL("clicked()"),self.RemoveImg)
self.setWindowTitle("ImgSending")
self.setWindowIcon(QIcon('icons/web.png'))
def SetupUi(self):
toggleButton=QPushButton("Start!",self)
clearButton=QPushButton("Clear!",self)
statusBar=QStatusBar()
self.statusBar=statusBar
self.toggleButton=toggleButton
self.clearButton=clearButton
textEdit=QTextEdit()
self.textEdit=textEdit
self.date=QDate.currentDate()
self.time=QTime.currentTime()
statusText=self.time.toString("hh:mm:ss")
statusText="Start up @ " +self.date.toString("yyyy-MM-dd")+" "+statusText
self.statusBar.showMessage(statusText)
grid=QGridLayout()
grid.addWidget(textEdit,1,0)
grid.addWidget(toggleButton,2,1)
grid.addWidget(clearButton,2,2)
grid.addWidget(statusBar,2,0)
self.setLayout(grid)
self.resize(500,500)
def StartWorking(self):
if not self.running:
self.running=True
secTimer=QTimer(self)
secTimer.start(1000)
self.secTimer=secTimer
self.time=QTime.currentTime()
self.textEdit.setText(self.time.toString("hh:mm:ss"))
secTimer=QTimer(self)
secTimer.start(3000)
self.secTimer=secTimer
self.connect(secTimer,SIGNAL("timeout()"),self.UpdateText)
secTimerCls=QTimer(self)
secTimerCls.start(180000)
self.secTimerCls=secTimerCls
self.connect(secTimerCls,SIGNAL("timeout()"),self.ClearText)
else:
self.running=False
self.secTimer.stop()
def ClearText(self):
self.textEdit.setText("A new Page")
def UpdateText(self):
self.time=self.time.addSecs(+3)
time=self.time
ImgList=self.GetImgList("d:\\tmp\\pytest","jpg")
if len(ImgList)==0:
self.textEdit.append(self.time.toString("hh:mm:ss")+"-----No Images Need 2 Be Send")
else:
self.Send2Ftp(ImgList)
self.textEdit.append(self.time.toString("hh:mm:ss")+"-----Images Sent Okay")
self.RemoveLocal(ImgList)
def GetImgList(self,dir,ext=None):
ImgList=[]
needExtFilter=(ext!=None)
for root,dirs,files in os.walk(dir):
for filespath in files:
filepath=os.path.join(root,filespath)
extension=os.path.splitext(filepath)[1][1:]
if needExtFilter and extension in ext:
ImgList.append(filepath)
elif not needExtFilter:
ImgList.append(filepath)
return ImgList
def Send2Ftp(self,imglist):
def list_contain( ls, item ):
ret = True
try:
index = ls.index( item )
except:
ret = False
return ret
session=ftplib.FTP('127.0.0.1','user','pwd')
curtime=time.localtime(time.time())
curdate=time.strftime('%Y%m%d',curtime)
remotedir="/img/"+curdate+"/"
session.cwd("/img/")
nlst = session.nlst()
print session.nlst()
if not list_contain(nlst, curdate):
session.mkd(remotedir)
session.cwd(remotedir)
for img in imglist:
file_handler=open(img,'rb')
file_name=img.split('\\')[3]
print file_name
cmd="STOR %s"%(file_name)
session.storbinary(cmd,file_handler)
#print "we sending img..."
def RemoveImg(self):
ImgList=self.GetImgList("d:\\tmp\\pytest","jpg")
self.RemoveLocal(ImgList)
reply=QMessageBox.question(self,'Message',
"All images had been removed!& Screen Is Cleared",QMessageBox.Yes)
def RemoveLocal(self,imglist):
for img in imglist:
os.remove(img)
#print "removed okay"
if __name__=="__main__":
import sys
app=QApplication(sys.argv)
form=ImgSending()
form.show()
sys.exit(app.exec_())
| [
"Administrator@icaker.(none)"
] | Administrator@icaker.(none) |
735ca579689a97e8d2ac5d786f6291ac32bc650d | 5116d59c38fbfdcf74467cd555cdd813ab9b798c | /mapEmnerToText.py | 017c9559792c8deffd5f00129dbd138ce5ba8799 | [] | no_license | nb-sommerprosjekt/emneord-analyzer | 3b6949e6c59ed178ba06fc9f54be1c9296c167ad | b80755c16d1eb23f75a3e158381a54f7039250c4 | refs/heads/master | 2020-04-01T15:50:36.895313 | 2018-10-17T11:03:37 | 2018-10-17T11:03:37 | 153,354,243 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,663 | py | import os
from collections import Counter
from sklearn.preprocessing import MultiLabelBinarizer
import pandas as pd
def traverseBokDir(dirPath):
print("Henter inn stier til bøker")
filePaths = []
filenames = []
baseNames = []
txt_dict = {}
for root, dirs, files in os.walk(dirPath):
for file in files:
if file.endswith(".txt"):
file_path = os.path.join(root, file)
#print(file_path)
#print(file)
filePaths.append(file_path)
filenames.append(file)
baseName = file.replace(".txt", "")
baseNames.append(file.replace(".txt",""))
txt_dict[baseName] = {"path":file_path,"filename": file}
return txt_dict
def getAllEmner(path):
print("Henter emner fra filer")
file_paths = []
antall_emner = 0
totalt_antall = 0
emne_ordliste_liste = []
emneord = []
for root, dirs, files in os.walk("/disk1/bokhylla/emneUttrekk"):
for file in files:
if file.endswith(".emner"):
file_path = os.path.join(root, file)
f = open(file_path,"r")
text = f.readlines()
file_paths.append(file_path)
totalt_antall +=1
for line in text:
if "TOPIC" in line:
antall_emner +=1
line = line.replace("TOPIC:", "")
line = line.replace("\n", "")
emner = line.split(",")
emner = [x.lower() for x in emner]
emne_ordliste_liste.append(emner)
emneordliste_og_path = zip(file_paths, emne_ordliste_liste)
return list(emneordliste_og_path)
def getListOfEmnerAndFrequency(emnerOgPathList, minFreq = 0):
print("Lager liste over emner og frekvenser")
emner = []
for element in emnerOgPathList:
for emne in element[1]:
if len(emne)>1:
emner.append(emne.lower().strip())
count = Counter(emner)
emner, freq = count.keys(), count.values()
emnerOgFreq = list(zip(emner,freq))
emnerOgFreq.sort(key=lambda tup: tup[1], reverse = True)
emnerOgFreq = [x for x in emnerOgFreq if x[1] >= minFreq]
return emnerOgFreq
def makeEmneDict(listOfEmneFilePaths):
print("lager emnedictionary")
file_paths = listOfEmneFilePaths
file_paths = [x.strip() for x in file_paths]
fileNames = []
baseNames = []
emne_dict = {}
for f in file_paths:
path,filename = os.path.split(f)
# fileNames.append(filename)
baseName = os.path.basename(filename).replace(".emner","")
# baseNames.append((os.path.basename(filename).replace(".emner","")))
emne_dict[baseName] = {"path": path, "filename" : filename}
#print(baseNames)
return emne_dict
#def parseEmneListe(path):
# f = open(path, 'r')
# emner_raw = f.readlines()
# emner_clean = []
# for line in emner_raw:
# emne = ' '.join(line.split()[:-1])
# emne = emne.lower()
# if len(emne)>1:
# emner_clean.append(emne)
# return emner_clean
def filterEmner(emner_og_fil_liste, emnerOfChoice, numEmnerPerFile):
print("filtrerer emner")
newEmneOgFilListe = []
for f in emner_og_fil_liste:
tempEmner = []
for emne in f[1]:
if emne.lower() in emnerOfChoice:
tempEmner.append(emne.strip().lower())
newEmneOgFilListe.append([f[0],tempEmner])
newEmneOgFilListe = [x for x in newEmneOgFilListe if len(x[1])>=numEmnerPerFile]
return newEmneOgFilListe
def transformLabels(emnerOgFilListe, label_list):
print("transformerer labels til multilabelformat")
emner =[x[1] for x in emnerOgFilListe]
mlb = MultiLabelBinarizer(classes = label_list)
emner_binarized = mlb.fit_transform(emner)
# print(list(mlb.classes_))
# print(len(list(mlb.classes_)))
return emner_binarized, mlb.classes_
def makeEmneDataframe(binarized_emner, class_list,matchedTxtsOgEmnelist):
print("Legger data i dataframe")
panda_dict = {}
emnepaths = [x[1] for x in matchedTxtsOgEmnelist]
txtpaths = [x[0] for x in matchedTxtsOgEmnelist]
panda_dict["txtpath"] = txtpaths
panda_dict["emnefilpath"] = emnepaths
emne_dict = {}
for class_enum in list(enumerate(class_list)):
vals = []
for vec in binarized_emner:
vals.append(vec[class_enum[0]])
panda_dict[class_enum[1]]=vals
df = pd.DataFrame.from_dict(panda_dict)
return df
# df.to_csv("labels.csv")
def mapEmnerToText(txtDict,emnefilpaths):
matched_txts = []
for emnepath in emnefilpaths:
emnepath_basename = os.path.basename(emnepath).replace(".emner","")
if emnepath_basename in txtDict:
matched_txts.append((txtDict[emnepath_basename]["path"], emnepath))
print("Mapper emner til text")
return matched_txts
if __name__ == '__main__':
bokDir = "/disk1/bokhylla"
bokEmnerDir = "/disk1/bokhylla/emneUttrekk"
# Lager dictionary over alle paths til alle bøker, dict[basename] = {filepath :" ", filename : "" }
txtDict = traverseBokDir(bokDir)
# Henter inn alle emnerordene og filnavn. Liste[0] = [filnavn,[emner]]
emnerOgFilpaths = getAllEmner(bokEmnerDir)
# Lager liste over alle emneFiler
emneFilePaths = [x[0] for x in emnerOgFilpaths]
# Lager dictionary over alle paths til alle emnefiler, dict[basename] = {filepath :" ", filename : "" }
emneDict = makeEmneDict(emneFilePaths)
# Finner ut frekvens av hvert emne, og gir ut en sortert liste av tupler med [emne, frekvens].
emnerOgFrekvens = getListOfEmnerAndFrequency(emnerOgFilpaths,40)
# Henter ut liste over alle emner på topplisten. Sortert med frekvens.
liste_over_top_emner = [x[0].lower() for x in emnerOgFrekvens]
#print(liste_over_top_emner)
# Lager ny liste over Emneordfiler + emneord der kun emneord som er med i 40 eller flere docs blir beholdt.
# Alle dokumenter med mindre enn 5 emner blir også fjernet
# output: list[0] = [emnefilpath, [emne1, emne2,emne3]]
newEmnerOgFilPath = filterEmner(emnerOgFilpaths,liste_over_top_emner, 5)
print("lengde av txtpaths: "+str(len(txtDict.keys())))
print("lengde av emnepaths: "+str(len(emneDict.keys())))
emnepaths = [x[0] for x in newEmnerOgFilPath]
matchedTxtEmner = mapEmnerToText(txtDict, emnepaths)
print("Antall top emner: "+ str(len(liste_over_top_emner)))
print("Antall filer opprinnelig: "+ str(len(emnerOgFilpaths)))
# print(emnerOgFilpath[0])
print("Antall filer med 5 eller flere popular emner: " + str(len(newEmnerOgFilPath)))
# print(newEmnerOgFilPath)
# Transformerer labels til binarized format
emner_binarized, classes = transformLabels(newEmnerOgFilPath,liste_over_top_emner)
# Lager csv-fil der label er kolonne-tittel og alle oppføringene under er oppforingene binary
emne_df = makeEmneDataframe(emner_binarized, classes, matchedTxtEmner)
# Legger til paths til emnefiler i dataframe
# print("legger til emnepaths i dataframe")
# emnepaths = [x[0] for x in newEmnerOgFilPath]
# emne_df["emnefilpath"] =emnepaths
print("printer ut csv med emner og filpaths")
emne_df.to_csv("new_label_og_filpath.csv")
| [
"tensor@meta2.lx.nb.no"
] | tensor@meta2.lx.nb.no |
a919b3bfb71f42970b5e865a2740073fdbda5e34 | d69959a04e0a83ab54f5a642c88544258db40e50 | /Lesson2/2.1.py | df3b40d6849600ce42f57e2d7954861bf4f0d39c | [] | no_license | MSmirnov3517/Smirnov-Python | d18921e3c1bb1c32c4bb1db25a43f9df53e1863f | 40802feb77d7b7b5062010a6588105e9dca1ec52 | refs/heads/Lesson2 | 2023-02-23T19:01:37.018802 | 2021-01-20T14:10:17 | 2021-01-20T14:10:17 | 329,683,682 | 0 | 0 | null | 2021-01-25T16:57:46 | 2021-01-14T17:15:10 | Python | UTF-8 | Python | false | false | 69 | py | my_list = [1, 'test', 20.05]
for i in my_list:
print(i, type(i))
| [
"kroligne@gmail.com"
] | kroligne@gmail.com |
0230d57258e99d3d0229b48e6832adb4086a310f | 3762e1e9dcc74653eefc35010768a32fd2ab00d5 | /src/python/gui/render_mad.py | 513df97c1d3485960bea9a367e3e253a976628f0 | [] | no_license | HollisJoe/geometry-1 | 6d5d8ba69b006b2d0bb5b9128b94fdcbd4af3b82 | 1e1f3c3b0aec35e80313d9d1124a77f1e439f53e | refs/heads/master | 2021-06-14T16:38:37.744499 | 2017-02-02T14:20:26 | 2017-02-02T14:20:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,682 | py | #!/usr/bin/python
##
# @file render_mad.py
# @author Eric Turner <elturner@indoorreality.edu>
# @brief Will render the path from a .mad file
#
# @section DESCRIPTION
#
# Will read the given .mad file, parsing the path information
# will open a window showing the path
#
# import matlibplot
from matplotlib import pyplot
import os
import sys
# Get the location of this file
SCRIPT_LOCATION = os.path.dirname(__file__)
# Import our python files
sys.path.append(os.path.join(SCRIPT_LOCATION, '..', 'io'))
from madformat import Mad
# ------------------------------------ #
# ------ Function Implentations ------ #
# ------------------------------------ #
##
# Will plot the contents of the specified mad file
#
# @param input_file The input .mad file to parse
#
def run(input_file):
# read the file
mad = Mad(input_file)
# open a figure
pyplot.figure(1)
# plot the path
x = [mad.poses[i][0] for i in range(len(mad.poses))]
y = [mad.poses[i][1] for i in range(len(mad.poses))]
pyplot.plot(x, y)
pyplot.axis('equal')
pyplot.title('Duration: %f sec' % (mad.times[-1] - mad.times[0]))
# render it
pyplot.show()
##
# The main function
#
# This will call the run() function using command-line arguments
#
def main():
# check command-line arguments
if len(sys.argv) != 2:
print ""
print " Usage:"
print ""
print "\t",sys.argv[0],"<path_to_mad_file>"
print ""
sys.exit(1)
# run this script with the given arguments
ret = run(sys.argv[1])
sys.exit(ret)
##
# Boilerplate code to call main function when used as executable
#
if __name__ == '__main__':
main()
| [
"elturner@eecs.berkeley.edu"
] | elturner@eecs.berkeley.edu |
255d98214bba91cb298d4a2ebf05b4f7fdbfad92 | 47f33149f7b300c6781f98d20be009a308198460 | /rhea_erpnext/config/docs.py | 61f91e8f444405be735a7b1d49371fbc66aeae67 | [
"MIT"
] | permissive | teddyseptiadi/Rhea-ERPNext | f67382c50be922d4e636b65acebe5c41850ce284 | 92a83d89f41565c148d94e3c86185b25a6684067 | refs/heads/master | 2020-06-26T14:13:24.704162 | 2019-01-29T07:08:27 | 2019-01-29T07:08:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | """
Configuration for docs
"""
# source_link = "https://github.com/[org_name]/rhea_erpnext"
# docs_base_url = "https://[org_name].github.io/rhea_erpnext"
# headline = "App that does everything"
# sub_heading = "Yes, you got that right the first time, everything"
def get_context(context):
context.brand_html = "Rhea Erpnext"
| [
"vishaldhayagude09@gmail.com"
] | vishaldhayagude09@gmail.com |
600212451009ad393006697e56b58998e26de38d | b7f467ada857d862e9538f58c18398f65e24af98 | /samples/client/echo_api/python-nextgen/openapi_client/models/__init__.py | efec638ad0ba4c71068ac444511a47bf24c5fe68 | [
"Apache-2.0"
] | permissive | snebjorn/openapi-generator | 0ccb5c4671799a927e86faaed6449089e703d7e3 | e1719f2b7b1ec633e50bbe572e35854f6fad9dfd | refs/heads/master | 2023-06-19T00:09:29.044707 | 2023-03-17T08:02:27 | 2023-03-17T08:02:27 | 214,228,173 | 0 | 1 | Apache-2.0 | 2023-05-30T05:04:40 | 2019-10-10T16:08:27 | Java | UTF-8 | Python | false | false | 1,179 | py | # coding: utf-8
# flake8: noqa
"""
Echo Server API
Echo Server API # noqa: E501
The version of the OpenAPI document: 0.1.0
Contact: team@openapitools.org
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import absolute_import
# import models into model package
from openapi_client.models.bird import Bird
from openapi_client.models.category import Category
from openapi_client.models.data_query import DataQuery
from openapi_client.models.data_query_all_of import DataQueryAllOf
from openapi_client.models.default_value import DefaultValue
from openapi_client.models.pet import Pet
from openapi_client.models.query import Query
from openapi_client.models.string_enum_ref import StringEnumRef
from openapi_client.models.tag import Tag
from openapi_client.models.test_query_style_deep_object_explode_true_object_all_of_query_object_parameter import TestQueryStyleDeepObjectExplodeTrueObjectAllOfQueryObjectParameter
from openapi_client.models.test_query_style_form_explode_true_array_string_query_object_parameter import TestQueryStyleFormExplodeTrueArrayStringQueryObjectParameter
| [
"noreply@github.com"
] | noreply@github.com |
d99e7d5c2e0d76e1e690d6d994bb7ceb29511efc | ee5b238ba8995b0aadf1837a1393cfc45326ad2f | /GraficoBoxplot.py | 24dfa3c00c05c0e0848315e69ef724420f5dae28 | [] | no_license | AndersonNobrega/Metrics | 354df13f4cc99da4e02695832a33bfddd94116d2 | a90b1dbf989fe7779b99185694ed37719936a2fe | refs/heads/master | 2020-05-21T00:35:33.308781 | 2019-10-23T14:39:07 | 2019-10-23T14:39:07 | 185,831,905 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,518 | py | import matplotlib.pyplot as plt
import csv
from os import makedirs
def ler_dados(arquivo):
lista1 = []
lista2 = []
lista3 = []
lista4 = []
lista5 = []
lista6 = []
cont = 0
with open(arquivo, "r", encoding="ISO-8859-1") as arquivo:
tabela = csv.reader(arquivo)
for linha in tabela:
if cont > 0:
lista1.append(float(linha[5]))
lista2.append(float(linha[6]))
lista3.append(float(linha[10]))
lista4.append(float(linha[11]))
lista5.append(float(linha[15]))
lista6.append(float(linha[16]))
cont = 1
return lista1, lista2, lista3, lista4, lista5, lista6
def plot_grafico(title, data, languages, label):
fig = plt.figure(1, figsize=(9, 6))
ax = fig.add_subplot(111)
bp = ax.boxplot(data, zorder=3)
ax.set_xticklabels(languages)
ax.get_yaxis().tick_left()
plt.xlabel("Linguagens")
plt.title(title)
plt.grid(axis="y", linestyle="-", zorder=0)
fig.savefig("Graficos/Linguagens" + label + "Boxplot.png", bbox_inches="tight", dpi=300)
plt.clf()
valores_csharp = ler_dados("C#/C#Metricas.csv")
valores_php = ler_dados("PHP/PHPMetricas.csv")
valores_java = ler_dados("Java/JavaMetricas.csv")
valores_vb = ler_dados("Visual Basic/VisualBasicMetricas.csv")
valores_c = ler_dados("C/CMetricas.csv")
valores_js = ler_dados("JavaScript/JavaScriptMetricas.csv")
try:
makedirs("Linguagens/")
except FileExistsError:
pass
ntcf = [valores_csharp[0], valores_php[0], valores_java[0], valores_vb[0], valores_c[0], valores_js[0]]
ntc = [valores_csharp[1], valores_php[1], valores_java[1], valores_vb[1], valores_c[1], valores_js[1]]
nccf = [valores_csharp[2], valores_php[2], valores_java[2], valores_vb[2], valores_c[2], valores_js[2]]
ncc = [valores_csharp[3], valores_php[3], valores_java[3], valores_vb[3], valores_c[3], valores_js[3]]
nlcf = [valores_csharp[4], valores_php[4], valores_java[4], valores_vb[4], valores_c[4], valores_js[4]]
nlc = [valores_csharp[5], valores_php[5], valores_java[5], valores_vb[5], valores_c[5], valores_js[5]]
languages = ["C#", "PHP", "Java", "Visual Basic", "C", "JavaScript"]
plot_grafico("NTCF/NTTP", ntcf, languages, "NTCF")
plot_grafico("NTC/NTTP", ntc, languages, "NTC")
plot_grafico("NCCF/NTCP", nccf, languages, "NCCF")
plot_grafico("NCC/NTCP", ncc, languages, "NCC")
plot_grafico("NLCF/NTLP", nlcf, languages, "NLCF")
plot_grafico("NLC/NTLP", nlc, languages, "NLC") | [
"anderson.nobrega62@gmail.com"
] | anderson.nobrega62@gmail.com |
a92309f4c06a45e9fc8a12855d0fbe22d95c8feb | a0c53168a4bdcfb0aa917d6d2c602f0999443a10 | /projexui/widgets/xurlwidget.py | 40516a9749b83fb4e82b2ccb12a331191a731d1e | [] | no_license | kanooshka/DPS_PIPELINE | 8067154c59ca5c8c9c09740969bb6e8537021903 | df2fcdecda5bce98e4235ffddde1e99f334562cc | refs/heads/master | 2021-05-24T04:32:03.457648 | 2018-09-07T13:25:11 | 2018-09-07T13:25:11 | 29,938,064 | 3 | 2 | null | 2020-07-23T23:06:37 | 2015-01-27T22:26:01 | Python | UTF-8 | Python | false | false | 3,438 | py | """ Defines the XUrlWidget class """
# define authorship information
__authors__ = ['Eric Hulser']
__author__ = ','.join(__authors__)
__credits__ = []
__copyright__ = 'Copyright (c) 2011, Projex Software, LLC'
__license__ = 'LGPL'
__maintainer__ = 'Projex Software, LLC'
__email__ = 'team@projexsoftware.com'
import webbrowser
from projexui import qt
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QWidget,\
QHBoxLayout,\
QIcon,\
QToolButton
from projexui.widgets.xlineedit import XLineEdit
from projexui import resources
class XUrlWidget(QWidget):
urlChanged = qt.Signal(str)
urlEdited = qt.Signal()
def __init__( self, parent ):
super(XUrlWidget, self).__init__(parent)
# define the interface
self._urlEdit = XLineEdit(self)
self._urlButton = QToolButton(self)
self._urlButton.setAutoRaise(True)
self._urlButton.setIcon(QIcon(resources.find('img/web.png')))
self._urlButton.setToolTip('Browse Link')
self._urlButton.setFocusPolicy(Qt.NoFocus)
self._urlEdit.setHint('http://')
layout = QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
layout.addWidget(self._urlEdit)
layout.addWidget(self._urlButton)
self.setLayout(layout)
self.setFocusPolicy(Qt.StrongFocus)
# create connections
self._urlEdit.textChanged.connect(self.urlChanged)
self._urlEdit.textEdited.connect(self.urlEdited)
self._urlButton.clicked.connect(self.browse)
def blockSignals( self, state ):
"""
Blocks the signals for this widget and its sub-parts.
:param state | <bool>
"""
super(XUrlWidget, self).blockSignals(state)
self._urlEdit.blockSignals(state)
self._urlButton.blockSignals(state)
def browse( self ):
"""
Brings up a web browser with the address in a Google map.
"""
webbrowser.open(self.url())
def hint( self ):
"""
Returns the hint associated with this widget.
:return <str>
"""
return self._urlEdit.hint()
def lineEdit( self ):
"""
Returns the line edit linked with this widget.
:return <XLineEdit>
"""
return self._urlEdit
def setFocus(self):
"""
Sets the focus for this widget on its line edit.
"""
self._urlEdit.setFocus()
@qt.Slot(str)
def setHint( self, hint ):
"""
Sets the hint associated with this widget.
:param hint | <str>
"""
self._urlEdit.setHint(hint)
@qt.Slot(str)
def setUrl( self, url ):
"""
Sets the url for this widget to the inputed url.
:param url | <str>
"""
self._urlEdit.setText(str(url))
def url( self ):
"""
Returns the current url from the edit.
:return <str>
"""
return str(self._urlEdit.text())
x_hint = qt.Property(str, hint, setHint)
x_url = qt.Property(str, url, setUrl)
__designer_plugins__ = [XUrlWidget] | [
"kanooshka@gmail.com"
] | kanooshka@gmail.com |
7410be96f175b601a5548a754e1f957a04f1ac96 | 4db9fe916704bee4e6aac4abb2f9d8a358cf3beb | /fasta_strip.py | bbae29d34c756412705c10f7fbe47d645087cd60 | [] | no_license | calandryll/cmain_bioinform | 419f840c718b57bea04fb314ba84e89835766ccd | 56c8567e718329acac93df3dc6071922092a171c | refs/heads/master | 2021-01-18T21:29:59.324444 | 2012-11-14T01:37:41 | 2012-11-14T01:37:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,612 | py | #!/usr/bin/python -tt
# Pulls a range of sequences from large FASTA files.
# Any inputs with - or -- are optional and will default to certain values.
# Written by: Christopher R. Main, University of Delaware
# Last Updated: 09/08/2012
# Versions:
# 0.1 - Open fasta file correctly
# 0.2 - Pull record names and parse them
# 0.3 - Print out cluster range that is wanted
# 0.4 - Write wanted sequences to file
# 0.5 - Comestic interactions
# 0.6 - Changes to argument handling
# Allow opening of FASTA file
from Bio import SeqIO
# Ready arguments from the commandline
import argparse
# Read and parse the arguments from the command line
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--version", action="version", version='Version 0.6')
parser.add_argument("filename", help="location of FASTA file")
parser.add_argument("out_file", help="filename for output of BLAST search results")
parser.add_argument("first", help="Starting sequence number to begin parsing of file", type=int)
parser.add_argument("last", help="Ending sequence number", type=int)
args = parser.parse_args()
print "Loading %s to memory..." % (args.filename)
handle = open(args.filename, "rU")
output_handle = open(args.out_file, "w")
# Pulls in record names
records = list(SeqIO.parse(handle, "fasta"))
# Write sequences to file
for i in range(int(args.first), int(args.last) + 1):
SeqIO.write(records[i], output_handle, "fasta")
print "Writing %s to file" % (records[i].id)
handle.close()
output_handle.close()
print "Writing of %s complete, closing file..." % (args.out_file) | [
"calandryll@gmail.com"
] | calandryll@gmail.com |
f888c9b94e1bde631a9935f532c892b94ad13a65 | c1a27a70a3cbf74270a073ddb0dcb6dde7f222f0 | /src/rock_paper_scissors_cv/capture.py | 5993dc36ac1360105bcbacb1a974f7912d37b6c2 | [] | no_license | karosas/rock_paper_scissors_cv | ba9183a69e78b3f7d23b4d09e618244b33faa04b | b8a15e9613e03b10bafb444a5d9c3145d3676856 | refs/heads/master | 2022-11-16T01:26:53.401931 | 2020-07-20T18:59:09 | 2020-07-20T18:59:09 | 281,201,606 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,728 | py | import cv2
import numpy as np
import os
import time
from pathlib import Path
CAPTURE_INTERVAL_MS = 250
TRAIN_DATA_FOLDER_NAME = "train"
VALIDATION_DATA_FOLDER_NAME = "validation"
def current_milli_time():
return int(round(time.time() * 1000))
# Double texts for better contrast
def add_texts(frame, is_capturing, capture_type):
font = cv2.FONT_HERSHEY_SIMPLEX
black, white = (0, 0, 0), (255, 255, 255)
cv2.putText(frame, "Hotkeys to start capturing: 'a' - Rock, 's' - Scissors, 'd' - Paper", (20, 20), font, 0.5,
black, 3, cv2.LINE_AA)
cv2.putText(frame, "Hotkeys to start capturing: 'a' - Rock, 's' - Scissors, 'd' - Paper", (20, 20), font, 0.5,
white, 1, cv2.LINE_AA)
cv2.putText(frame, "Is Capturing: {}".format(is_capturing), (20, 40), font, 0.5, black, 3, cv2.LINE_AA)
cv2.putText(frame, "Is Capturing: {}".format(is_capturing), (20, 40), font, 0.5, white, 1, cv2.LINE_AA)
if is_capturing:
cv2.putText(frame, "Now capturing: {}".format(capture_type), (20, 60), font, 0.5, black, 3, cv2.LINE_AA)
cv2.putText(frame, "Now capturing: {}".format(capture_type), (20, 60), font, 0.5, white, 1, cv2.LINE_AA)
def create_folders(capture_type):
current_dir = os.path.dirname(os.path.realpath(__file__))
train_path = os.path.join(current_dir, TRAIN_DATA_FOLDER_NAME, capture_type)
validation_path = os.path.join(current_dir, VALIDATION_DATA_FOLDER_NAME, capture_type)
Path(train_path).mkdir(parents=True, exist_ok=True)
Path(validation_path).mkdir(parents=True, exist_ok=True)
#os.chdir(path)
def start_capture():
cap = cv2.VideoCapture(0)
last_capture_time = 0
is_capturing = False
capture_type = 'Unknown'
capture_count_dict = {}
data_type = TRAIN_DATA_FOLDER_NAME
while True:
ret, frame = cap.read()
gray = cv2.cvtColor(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2RGB)
if is_capturing and current_milli_time() - last_capture_time > CAPTURE_INTERVAL_MS:
last_capture_time = current_milli_time()
if capture_type not in capture_count_dict:
capture_count_dict[capture_type] = 0
capture_count_dict[capture_type] += 1
resized = cv2.resize(gray, (256, 256), interpolation=cv2.INTER_AREA)
height, width, channels = gray.shape
cv2.rectangle(gray, (0, 0), (width, height), (0, 0, 255), 50)
# Save 25% of data as validation data
if capture_count_dict[capture_type] % 4 == 0:
data_type = VALIDATION_DATA_FOLDER_NAME
else:
data_type = TRAIN_DATA_FOLDER_NAME
os.chdir(os.path.join(os.path.dirname(os.path.realpath(__file__)), data_type, capture_type))
cv2.imwrite("{}_{}.jpg".format(capture_type, capture_count_dict[capture_type]), resized)
add_texts(gray, is_capturing, capture_type)
cv2.imshow('Dataset', gray)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
if cv2.waitKey(10) & 0xFF == ord('a'):
print('a Pressed')
is_capturing = True
capture_type = 'rock'
create_folders(capture_type)
if cv2.waitKey(10) & 0xFF == ord('s'):
print('sas Pressed')
is_capturing = True
capture_type = 'scissors'
create_folders(capture_type)
if cv2.waitKey(10) & 0xFF == ord('d'):
print('d Pressed')
is_capturing = True
capture_type = 'paper'
create_folders(capture_type)
cap.release()
cv2.destroyAllWindows()
def main():
start_capture()
if __name__ == '__main__':
main(); | [
"e.ausvicas@gmail.com"
] | e.ausvicas@gmail.com |
ed1a9f9656b9acbb48484d06b1a881f9b3cb183b | b781b2a178bef35090f4270ad834985ff882d0aa | /mictest/test_case/test_order/test_login.py | 19ae8dfbdceefdca3de5ab8e1460a8191972ab4f | [] | no_license | cjzimeng/mictest | d0ac19d9fb4105ecad6b563902f0dae709950a5c | efb65972799b3d8f65ecead568ce593d060a206b | refs/heads/master | 2023-01-22T17:01:50.492773 | 2020-12-02T07:46:15 | 2020-12-02T07:46:15 | 289,271,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 752 | py | import json
import allure
import requests
from test_case.test_basic_data import *
#
# @allure.description("测试登录")
# @allure.title('测试登录用例')
# @allure.testcase('http://calapi.51jirili.com/dream/categoryList','登录用例地址')
# def test_login():
# r = requests.post(test_host+'/micro-service/relation/wx-user',
# headers = headers,
# json={
# "openId":openId,
# "mobile": "13058041296",
# "userId": 86,
# "nickName": "cecilia"
# })
# print(json.dumps(r.json(), indent=2, ensure_ascii=False))
# assert (r.json()['data'])['realName'] == '陈姣'
| [
"443516246"
] | 443516246 |
1401a17efdbfb7b2ff484178a6944d5e373dd1f7 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03488/s626861342.py | 65eae1c60abd7285830f846f3f83e1f7681f124f | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,207 | py | s=input()
X,Y=map(int,input().split())
from collections import defaultdict
def solve(da,dp,G):
for i in range(len(da)):
tmp=set()
for j in dp:
tmp.add(j-da[i])
tmp.add(j+da[i])
dp=tmp
if G in dp:
return True
else:
return False
"""amax=0
for i in a:
amax+=abs(i)
if amax==0:
if G==0:
return True
else:
return False
if a[0]==G:
return True
dp=[[0]*2*amax for _ in range(3)]
dp[0][a[0]]=1
for i in range(1,len(a)):
p=a[i]
for j in range(-amax,amax):
dp[i%3][j-p]+=dp[(i-1)%3][j]
dp[i%3][j+p]+=dp[(i-1)%3][j]
#print(dp)
if dp[i%3][G]>=1:
return True
return False"""
"""#print(a)
dp=set()
dp.add(a[0])
dp2=set()
for i in range(1,len(a)):
for j in dp:
dp2.add(j-a[i])
dp2.add(j+a[i])
dp=copy.deepcopy(dp2)
#print(dp)
if G in dp:
return True
return False"""
d=[len(x) for x in s.split("T")]
dx=d[2::2]
dy=d[1::2]
if solve(dx,{d[0]},X) and solve(dy,{0},Y):
print("Yes")
else:
print("No")
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
e87293f0dbca65cf9e8eb987d30bb7c5e8ed590e | 29b1b15e4fef90717ff7bf8b13ab9a23cdc17c51 | /finalproduct/testapp/migrations/0003_comments.py | 24bc341b3f011285e4bdec6c57d8b347120c9b42 | [] | no_license | deepawalekedar319/DjangoProjects | 93fe59812593a1e1b8f542c8c5b1642bc95f6da4 | 1780b703a3022ea17dc188ad98b0f17bb14fa12f | refs/heads/main | 2023-09-03T04:48:21.201822 | 2021-11-08T05:28:00 | 2021-11-08T05:28:00 | 425,706,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2020-10-31 13:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('testapp', '0002_auto_20201027_1439'),
]
operations = [
migrations.CreateModel(
name='Comments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=32)),
('email', models.EmailField(max_length=254)),
('body', models.TextField()),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('active', models.BooleanField(default=True)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='testapp.Post')),
],
options={
'ordering': ('-created',),
},
),
]
| [
"deepawalekedar319@gmail.com"
] | deepawalekedar319@gmail.com |
cd6c3d056cf27f3082fac0bd37eca025145c9f9b | 540c849de98b01f6373908a95fea6b6b7bba997e | /PLS/PyFiles-Depricated/PLS_PCA+LDA_NMF.py | 10a8dde05e228f42ded8bde94875ee2804fc6bc4 | [] | no_license | ThiaraSana/BacteriaDetection | 42fd57c30f6f113029df880e348c1670e35de6ab | 3030d84905718bd49f7f42d03397476f741804aa | refs/heads/main | 2023-08-07T05:49:15.693046 | 2021-10-02T17:27:25 | 2021-10-02T17:27:25 | 405,360,091 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,165 | py | import glob
import os
import numpy as np
import pandas as pd
import sys
sys.path.append('/Users/sanaahmed/Desktop/mb-master')
import mb
from pylab import *
from matplotlib import pyplot
from sklearn import decomposition
from sklearn.decomposition import PCA
from sklearn.decomposition import NMF
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
training_folder_conc = '/Users/sanaahmed/Desktop/test/Data/Training_FreshDMEMFBSNoPS/'
testing_folder_pH = '/Users/sanaahmed/Desktop/test/Data/NApH/'
training_folder_pH = '/Users/sanaahmed/Desktop/test/Data/Training_pH/'
testing_folder_msc_21 = '/Users/sanaahmed/Desktop/test/Data/Testing_SpikedMSCs_0421/'
def getData(files, footer, header, j):
data = []
na_conc=[]
label = []
for i in range(0,len(files)):
a=np.genfromtxt(files[i], skip_header=header, delimiter=',', skip_footer=footer)
conc_na = a[:,6][0]
w1 = a[:,0]
a1 = a[:,1]
a2 = a[:,3]
a3 = a[:,5]
b1 = (a1 + a2 + a3)/3
na_conc.append(conc_na)
label.append(conc_na)
data.append(b1)
data1 = np.array(data)
na_conc1 = np.array(na_conc)
plot(w1, b1)
plt.show
xlabel("wavelength [nm]")
ylabel("Absorbance")
legend(label)
plt.savefig('./Graphs/UV Vis/UV_VIS_' + j + '.png')
plt.clf()
return data1, na_conc1, label
def Predict_pH(files, footer, header, j):
data = []
pH_phenolred=[]
label = []
for i in range(0,len(files)):
a=np.genfromtxt(files[i], skip_header=header, delimiter=',', skip_footer=footer)
pH = a[:,6][0]
w1 = a[:,0]
a1 = a[:,1]
a2 = a[:,3]
a3 = a[:,5]
b1 = (a1 + a2 + a3)/3
pH_phenolred.append(pH)
data.append(b1)
label.append(pH)
plot(w1, b1)
plt.show
xlabel("wavelength [nm]")
ylabel("Absorbance")
legend(label)
plt.savefig('./Graphs/UV Vis/UV_VIS_' + j + '.png')
plt.clf()
return data, pH_phenolred, label
def SetNMF(absorbance, y, j):
nmf = decomposition.NMF(n_components=16)
nmf.fit(absorbance)
absorbance_nmf = nmf.transform(absorbance)
return absorbance_nmf, nmf
def SetPCA(absorbance, concentration, j):
LDA = LinearDiscriminantAnalysis()
concentration=concentration.astype('int')
LDA.fit(absorbance, concentration)
absorbance_LDA = LDA.transform(absorbance)
plt.savefig('./Graphs/' + 'PCATraining.png')
show()
return absorbance_LDA, LDA
def plotGraphPLS(actual, predicted, j):
figure('PLS Graph')
plot(actual,'+')
plot(predicted,'.')
plt.savefig('./Graphs/PLS/PLS_Graph'+ j +'.png')
plt.clf()
def plotGraphTraining1(predicted, label, l, position):
plot(position[0], predicted[0], '.')
plot(position[1], predicted[1], '.')
plot(position[2], predicted[2], '.')
plot(position[3], predicted[3], '.')
plot(position[4], predicted[4], '.')
plot(position[5], predicted[5], '.')
# plot(position[6], predicted[6], '.')
# plot(position[7], predicted[7], '.')
# plot(position[8], predicted[8], '.')
# plot(position[9], predicted[9], '.')
# legend(label)
plt.savefig('./Graphs/PLS/PLS_Graph'+ l +'.png')
plt.clf()
def plotGraphTraining2(predicted, label, l, position):
plot(position[0], predicted[0], '.')
# plot(position[1], predicted[1], '.')
# plot(position[2], predicted[2], '.')
# plot(position[3], predicted[3], '.')
# plot(position[4], predicted[4], '.')
# plot(position[5], predicted[5], '.')
# legend(label)
plt.savefig('./Graphs/PLS/PLS_Graph'+ l +'.png')
plt.clf()
if __name__ == "__main__":
training_files_conc = sorted(glob.glob(training_folder_conc+"*csv"),key=os.path.getmtime)
trainX_conc, trainY_conc, label = getData(training_files_conc, 93, 2, "3")
#TRAIN NMF
nmf_train, nmf = SetNMF(trainX_conc, trainY_conc, "train_n5")
#TRAIN PLS_CONC
pls_conc,sc = mb.raman.pls_x(nmf_train, trainY_conc, n_components=3)
train_predicted_conc = pls_conc.predict(nmf_train)
plot(pls_conc.x_loadings_)
show()
plotGraphPLS(trainY_conc, train_predicted_conc, "3")
training_files_pH = sorted(glob.glob(training_folder_pH+"*csv"),key=os.path.getmtime)
trainX_pH, trainY_pH, label = Predict_pH(training_files_pH, 1093, 336, "4")
#TRAIN PLS_pH
pls_pH,sc = mb.raman.pls_x(trainX_pH, trainY_pH, n_components=3)
train_predicted_pH = pls_pH.predict(trainX_pH)
plotGraphPLS(trainY_pH, train_predicted_pH, "4")
######################################################
testing_files_pH = sorted(glob.glob(testing_folder_pH +"*csv"),key=os.path.getmtime)
testX_conc, testY_conc, label = getData(testing_files_pH, 93, 2, "13")
trainX_conc_nmf = nmf.transform(testX_conc)
conc_pred = pls_conc.predict(trainX_conc_nmf)
plotGraphPLS(testY_conc, conc_pred, "13")
# testX_pH, testY_pH, label = Predict_pH(testing_files_pH, 1093, 2, "14")
# test_predicted_pH = pls_pH.predict(testX_pH)
# print(test_predicted_pH)
# print(conc_pred)
testing_files_msc = sorted(glob.glob(testing_folder_msc_21 +"*csv"),key=os.path.getmtime)
testX_msc, testY_msc, label = getData(testing_files_msc, 93, 2, "14")
trainX_msc_nmf = nmf.transform(testX_msc)
conc_pred = pls_conc.predict(trainX_msc_nmf)
plotGraphPLS(testY_msc, conc_pred, "14")
### PCA ################################################
if __name__ == "__main__":
training_files_conc = sorted(glob.glob(training_folder_conc+"*csv"),key=os.path.getmtime)
trainX_conc, trainY_conc, label = getData(training_files_conc, 93, 2, "1")
#TRAIN PCA
pca_train, pca = SetPCA(trainX_conc, trainY_conc, "train_n5")
#TRAIN PLS_CONC
pls_conc,sc = mb.raman.pls_x(pca_train, trainY_conc, n_components=3)
train_predicted_conc = pls_conc.predict(pca_train)
plot(pls_conc.x_loadings_)
show()
plotGraphPLS(trainY_conc, train_predicted_conc, "1")
training_files_pH = sorted(glob.glob(training_folder_pH+"*csv"),key=os.path.getmtime)
trainX_pH, trainY_pH, label = Predict_pH(training_files_pH, 1093, 336, "2")
#TRAIN PLS_pH
pls_pH,sc = mb.raman.pls_x(trainX_pH, trainY_pH, n_components=3)
train_predicted_pH = pls_pH.predict(trainX_pH)
plotGraphPLS(trainY_pH, train_predicted_pH, "2")
######################################################
testing_files_pH = sorted(glob.glob(testing_folder_msc_21 +"*csv"),key=os.path.getmtime)
testX, testY, label = getData(testing_files_pH, 93, 2, "10")
testX_pca = pca.transform(testX)
conc_pred = pls_conc.predict(testX_pca)
plotGraphPLS(testY, conc_pred, "10")
# testX_pH, testY_pH, label = Predict_pH(testing_files_pH, 1093, 2, "2")
# test_predicted_pH = pls_pH.predict(testX_pH)
testing_files_msc = sorted(glob.glob(testing_folder_msc_21 +"*csv"),key=os.path.getmtime)
testX_msc, testY_msc, label = getData(testing_files_msc, 93, 2, "12")
trainX_msc_pca = pca.transform(testX_msc)
conc_pred = pls_conc.predict(trainX_msc_pca)
plotGraphPLS(testY_msc, conc_pred, "12") | [
"56861810+ThiaraSana@users.noreply.github.com"
] | 56861810+ThiaraSana@users.noreply.github.com |
eda145b24f8eb3a31944d716b1ce043274db0706 | bef0f2ddfcc4f7b7ef79638efba5848c13bdb6ad | /pu_cam.py | b3a805307302d7b76b12bc6b9ef9590854398899 | [
"MIT"
] | permissive | pranav083/puauto_login | b0d67528633b2a39e7b14132fb6040c30a5cf782 | 608ad1dc7dd98802e7356995dcfa9e4bf2881c81 | refs/heads/master | 2020-05-03T11:16:33.211744 | 2019-03-30T18:54:48 | 2019-03-30T18:54:48 | 166,466,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,033 | py |
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
import webbrowser
<<<<<<< HEAD
browser = webdriver.Firefox() #install
browser.get("http:\\bit.do/wifion")
time.sleep(2) #based on processor increase the time for initiallizing the browser
username = browser.find_element_by_id("user")
password = browser.find_element_by_id("password")
username.send_keys("------") #enter your pu wifi user name here
password.send_keys("------") #enter your pu wifi password here
=======
# firefox_capabilities = {}
# firefox_capabilities['marionette'] = True
# firefox_capabilities['binary'] = '/usr/bin/firefox'
browser = webdriver.Firefox()
browser.get("http:\\bit.do/wifion")
time.sleep(2)
username = browser.find_element_by_id("user")
password = browser.find_element_by_id("password")
username.send_keys("149541")
password.send_keys("3038praRAN")
>>>>>>> 1e00778e810aef7b03b3dbd12288f8abe887e289
login_attempt = browser.find_element_by_xpath("//*[@type='submit']")
login_attempt.submit()
| [
"noreply@github.com"
] | noreply@github.com |
811c54e32c59559195243cf283c1baeaf6bea67e | 41ede4fd3bfba1bff0166bca7aee80dcf21434c6 | /ayhanyalcinsoy/Desktop/xfce/addon/thunar-archive-plugin/actions.py | 105c7d47862ca22da89c73e94b0087b806747df5 | [] | no_license | pisilinux/playground | a7db4b42559a21cc72fd4c8649e0231ab6a3eb3c | e4e12fff8a847ba210befc8db7e2af8556c3adf7 | refs/heads/master | 2022-08-12T23:03:27.609506 | 2022-08-11T18:28:19 | 2022-08-11T18:28:19 | 8,429,459 | 16 | 22 | null | 2022-08-11T18:28:20 | 2013-02-26T09:37:11 | Python | UTF-8 | Python | false | false | 683 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import autotools
from pisi.actionsapi import get
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
def setup():
shelltools.export("LDFLAGS", "%s -lgtk-x11-2.0 -lthunarx-2" % get.LDFLAGS())
autotools.configure("--disable-static \
--disable-dependency-tracking")
def build():
autotools.make()
def install():
autotools.rawInstall("DESTDIR=%s" % get.installDIR())
pisitools.dodoc("AUTHORS", "ChangeLog", "COPYING", "NEWS", "README")
| [
"ayhanyalcinsoy@gmail.com"
] | ayhanyalcinsoy@gmail.com |
cbaef6ac7c24d922ac9efbcf3a1d34eaa3879b44 | c9fabaf57937634c893b159469bb8f63af238575 | /0x04-python-more_data_structures/101-square_matrix_map.py | 86b0494759b8dca33a4949512b8d1cccd1751fcc | [] | no_license | arturovictoriar/holbertonschool-higher_level_programming | 83c1f7ccd8b8b1d0b72c3a373c4358c3315071be | 1923f29921cb76b964ee5e618323f2f9520b25aa | refs/heads/master | 2022-07-31T08:25:40.616783 | 2020-05-14T20:35:19 | 2020-05-14T20:35:19 | 226,962,952 | 0 | 7 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | #!/usr/bin/python3
def square_matrix_map(matrix=[]):
return list(map(lambda j: list(map(lambda i: i**2, j)), matrix[:]))
| [
"1231@holbertonschool.com"
] | 1231@holbertonschool.com |
996902627b0c3db73ded129422f6fc258480bfdc | eadb3c1704acf1cc3a77b410e6dcc076595590d5 | /admin.py | fe307cea0ccd77312d4140ab7163d7df93961bd8 | [] | no_license | enmanuelar/finalblog | fff778d7fa31c80e91b5f38b5846fd05f6a56590 | 022e9c518b8f8b838d5e9e1ccc38bdd481644bd3 | refs/heads/master | 2021-01-19T00:25:10.161368 | 2016-08-02T23:35:10 | 2016-08-02T23:35:10 | 63,826,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,824 | py | import webapp2, logging
from main import *
from itertools import groupby
from collections import namedtuple
from json import JSONEncoder
def get_top_categories():
d = namedtuple('top_tags',['category', 'comments_count'])
entities = blogdb.get_categories_by_comments()
entity_l = [(e.category, e.post_id) for e in entities]
entity_l.sort()
top_categories = [d(category, len(list(post_id))) for category, post_id in groupby(entity_l, lambda x: x[0])]
top_categories.sort(key=lambda comment: comment.comments_count, reverse=True)
return top_categories
class AdminHandler(Handler):
def get_top_posts(self):
comments = blogdb.get_comments_sorted_by_post_id()
post_id_list = []
l = []
d = namedtuple('top_post', ['id', 'title', 'user', 'category', 'comments_count'])
for comment in comments:
if comment.post_id in post_id_list:
post_id_list.append((str(comment.post_id)))
else:
post_id_list.append((str(comment.post_id)))
for key, group in groupby(post_id_list):
entry = blogdb.get_single_entry(int(key))
l.append([d(key, entry.title, entry.user, entry.category, len(list(group)))])
l.sort(key=lambda comment: comment[0].comments_count, reverse=True)
return l
@check_auth
def get(self, **kwargs):
posts = blogdb.get_admin_entries()
top_posts_l = self.get_top_posts()
top_categories = get_top_categories()
self.render("admin.html", posts=posts, top_posts=top_posts_l[:5], top_categories=top_categories, user_logged=kwargs['user_logged'], username=kwargs['username'])
def post(self):
post_id = int(self.request.get("post_id"))
status = self.request.get("status")
entity = blogdb.Entry.get_by_id(post_id)
if status == "true":
entity.enabled = False
else:
entity.enabled = True
entity.put()
class ChartsHandler(Handler):
def get(self):
##Categories order in charts ["Random", "Music", "Science", "Technology", "Funny"]
top_categories = get_top_categories()
entries_count = blogdb.get_posts_count_by_category("random", "music", "science", "technology", "funny")
l = [0, 0, 0, 0, 0]
for c in top_categories:
i = {'random': 0, 'music': 1, 'science': 2, 'technology': 3, 'funny': 4}.get(c.category)
l[i] = {'random': c.comments_count,
'music': c.comments_count,
'science': c.comments_count,
'technology': c.comments_count,
'funny': c.comments_count}.get(c.category)
response = JSONEncoder().encode({'pie_data': l, 'bar_data': entries_count})
self.response.out.write(response) | [
"ealmonterozon@gmail.com"
] | ealmonterozon@gmail.com |
5631077333222d3476b68d4a173ce9d25f7396be | caf8cbcafd448a301997770165b323438d119f5e | /.history/mercari/mercari_search_20201124184144.py | 21031f8d0dc5f3408e41cd1680d9f13d31082892 | [
"MIT"
] | permissive | KustomApe/nerdape | 03e0691f675f13ce2aefa46ee230111247e90c72 | aef6fb2d1f8c364b26d91bf8570b4487a24de69a | refs/heads/main | 2023-01-23T10:13:26.584386 | 2020-11-28T22:29:49 | 2020-11-28T22:29:49 | 309,897,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,776 | py | from selenium import webdriver
from selenium.webdriver.support.ui import Select
import pandas as pd
import re
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import PyQt5
import time
"""[Initial Settings]
初期設定
"""
options = webdriver.ChromeOptions()
options.add_argument('--headeless')
options.add_argument('--disable-gpu')
options.add_argument('--lang-ja')
browser = webdriver.Chrome(chrome_options=options, executable_path='./chromedriver')
"""[CSS Selector Settings]
CSSセレクターの設定
"""
PAGER = "li.pager-next"
body > div.default-container > main > div.l-content > ul > li.pager-next.visible-pc > ul > li:nth-child(1) > a
word = input("検索したいキーワードを入力してください:")
while True:
if PAGER:
n = 1
res = browser.get("https://www.mercari.com/jp/search/?page="+str(n)+"&keyword="+word)
df_main = pd.DataFrame(columns=['在庫有無','タイトル','値段','URL'])
df_graf = pd.DataFrame(columns=['SOLD','PRICE'])
browser.get(res)
item_boxlist = browser.find_elements_by_css_selector(".items-box")
for item_box in item_boxlist:
try:
if len(item_box.find_elements_by_css_selector(".item-sold-out-badge")) > 0:
sold = "SOLD"
else:
sold = "NOT SOLD"
sub_title = item_box.find_element_by_class_name("items-box-body")
title = sub_title.find_element_by_tag_name("h3").text
item_price = item_box.find_element_by_css_selector(".items-box-price")
price_text = item_price.text
price_text = re.sub(r",", "", price_text).lstrip("¥ ")
price_text_int = int(price_text)
print(price_text_int)
url = item_box.find_element_by_tag_name("a").get_attribute("href")
data = pd.Series( [ sold,title,price_text_int,url ], index=df_main.columns )
grdata = pd.Series( [ sold,price_text_int ], index=df_graf.columns )
df_main = df_main.append( data, ignore_index=True )
df_graf = df_graf.append( grdata, ignore_index=True )
except Exception as e:
print(e)
btn = browser.find_element_by_css_selector(PAGER).get_attribute('href')
n += 1
print('next url:{}'.format(btn))
time.sleep(3)
browser.get(btn)
print('Moving to next page...')
else:
print('No items anymore...')
break
print(df_main)
sns.stripplot(x='SOLD', y='PRICE', data=df_graf)
plt.show()
sns.pairplot(df_graf,hue="SOLD")
plt.show()
print('Writing out to CSV file...')
df_main.to_csv("pricedata.csv", encoding="utf_8_sig")
print("Done") | [
"kustomape@gmail.com"
] | kustomape@gmail.com |
8d63e1cb3daea984971b86a7a544f1c3e00fa5b2 | 82247be0541795eb9f301fc29cca450dd2ea1ca8 | /aa.py | fd13a89550cabbd66751276139ece18276220f6c | [] | no_license | jinmyeonglee/practice_algorithm | 69318d4c32e5237ca227ceb3f7c6968fcd85513e | 8368d7c09dd569b859372cf0a7d4349f35203494 | refs/heads/master | 2020-06-05T15:16:19.062135 | 2019-06-19T06:18:10 | 2019-06-19T06:18:10 | 192,470,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 640 | py | import sys
if __name__ == '__main__':
money = 20000
split = sys.stdin.readline().strip().split(' ')
check = False
for token in split:
distance = int(token)
if(distance < 4 or distance > 178):
print(money)
check = True
break
if(distance <= 40):
temp = 720
else:
distance -= 40
temp = (distance // 8) * 80 + 720
if(distance % 8 != 0): temp += 80
if(money - temp < 0):
print(money)
check = True
break
else: money -= temp
if(check == False): print(money)
| [
"jinious1111@naver.com"
] | jinious1111@naver.com |
24f7094f592ca94921452121357c166599d46016 | baad3d4a636f824b62f4aa0e4d8ac21d27bdba73 | /Empire.py | 03e3ec40dc641b1881c6f510613c3e29d31057d9 | [] | no_license | LukeSiganto/EmpireBot | 38a5400af97616ac7ea6ac8f5be313dd89a4ed65 | 13858c01e6b84cd2cdc2f9d6e241637866d1cf4f | refs/heads/master | 2020-03-30T19:43:45.082540 | 2018-10-08T23:46:50 | 2018-10-08T23:46:50 | 151,555,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,719 | py | # Work with Python 3.6
import discord
import random
import string
import time
import sys
if sys.version_info[0] < 3:
raise Exception("Python 3 or a more recent version is required.")
client = discord.Client()
def randomLine():
global QuoteSend
QuoteSend = (random.choice(list(open('Quotes.txt'))))
@client.event
async def on_message(message):
# we do not want the bot to reply to itself
if message.author == client.user:
return
if message.content.startswith('-Quote') or message.content.startswith('-quote') or message.content.startswith('-QUOTE'):
randomLine()
global QuoteSend
msg = QuoteSend.format(message)
await client.send_message(message.channel, msg)
print('Quote Sent = ' + QuoteSend)
if message.content.startswith('my') or message.content.startswith('My') or message.content.startswith('MY') or message.content.startswith('mY'):
UsrMSG = message.content
if len(UsrMSG) < 60:
msg = str('YOUR NEW '+(UsrMSG[3:]).upper()+'?').format(message)
await client.send_message(message.channel, msg)
print('Message Edited = ' + msg)
else:
msg = ("I hate your message, It's rough, coarse, irritating and gets everywhere").format(message)
await client.send_message(message.channel, msg)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
#@client.event
#async def on_server_join(ser):
# await bot.send_message(ser.default_channel, 'A surprise to be sure but a welcome one!'.format(ser.name))
client.run('token')
| [
"noreply@github.com"
] | noreply@github.com |
8ef26000770edf0c0c7b62bfd1c8c0ef5ce54bab | edc961c1a021ce34d9b426b1a76460687fa2e80e | /URI_Online/1009.py | 190555b31a128cdc5b21e5bcbdf0ddabc0f5412a | [] | no_license | JavierFSS/MySchoolProject | 0fc466a2597f0eb320720c0148ddeff3ac41aa24 | f989b1013829d5da83e0c3fffa3879e7c603ec9e | refs/heads/master | 2023-06-02T05:13:13.669548 | 2021-06-18T11:58:46 | 2021-06-18T11:58:46 | 274,874,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | a = input()
b = float(input())
c = float(input())
bonus = float(c * (15/100))
total = b + bonus
print("TOTAL = R$ %0.2f" %total)
| [
"javier.jimmy1982@gmail.com"
] | javier.jimmy1982@gmail.com |
43c459006813e5c6be8118e01dfcb2ed57d4728c | ba7d1913c4e279b6ab7a7f79e2e1e5c8a56b51af | /examples/arduino_threads.py | 6e7d869d8d3c703a02c412ee2f757c6712d0e66f | [
"MIT"
] | permissive | mitag/python-arduino-serial | 678002b370a043c2f89a9dc44578e6b8b94a211f | 6beccc099167a21595c84286a973347a3a903cbd | refs/heads/master | 2020-06-03T20:10:16.472382 | 2018-09-02T14:05:16 | 2018-09-02T14:05:16 | 191,714,920 | 0 | 0 | MIT | 2019-06-13T07:40:52 | 2019-06-13T07:40:52 | null | UTF-8 | Python | false | false | 2,110 | py | from __future__ import print_function, division, absolute_import
import time
import threading
from robust_serial import write_order, Order
from robust_serial.threads import CommandThread, ListenerThread
from robust_serial.utils import open_serial_port, CustomQueue
def reset_command_queue():
"""
Clear the command queue
"""
command_queue.clear()
if __name__ == '__main__':
try:
serial_file = open_serial_port(baudrate=115200)
except Exception as e:
raise e
is_connected = False
# Initialize communication with Arduino
while not is_connected:
print("Waiting for arduino...")
write_order(serial_file, Order.HELLO)
bytes_array = bytearray(serial_file.read(1))
if not bytes_array:
time.sleep(2)
continue
byte = bytes_array[0]
if byte in [Order.HELLO.value, Order.ALREADY_CONNECTED.value]:
is_connected = True
print("Connected to Arduino")
# Create Command queue for sending orders
command_queue = CustomQueue(2)
# Number of messages we can send to the Arduino without receiving an acknowledgment
n_messages_allowed = 3
n_received_semaphore = threading.Semaphore(n_messages_allowed)
# Lock for accessing serial file (to avoid reading and writing at the same time)
serial_lock = threading.Lock()
# Event to notify threads that they should terminate
exit_event = threading.Event()
print("Starting Communication Threads")
# Threads for arduino communication
threads = [CommandThread(serial_file, command_queue, exit_event, n_received_semaphore, serial_lock),
ListenerThread(serial_file, exit_event, n_received_semaphore, serial_lock)]
for t in threads:
t.start()
# Send 3 orders to the arduino
command_queue.put((Order.MOTOR, -56))
command_queue.put((Order.SERVO, 120))
time.sleep(2)
#
command_queue.put((Order.MOTOR, 0))
# End the threads
exit_event.set()
n_received_semaphore.release()
print("Exiting...")
for t in threads:
t.join()
| [
"antonin.raffin@ensta-paristech.fr"
] | antonin.raffin@ensta-paristech.fr |
538215aa0cc6b8084fff013b4fd1dac21131423c | 523f8f5febbbfeb6d42183f2bbeebc36f98eadb5 | /80_best.py | c500e3032fd796de2b2a3073cdc4baa3dbdbb67f | [] | no_license | saleed/LeetCode | 655f82fdfcc3000400f49388e97fc0560f356af0 | 48b43999fb7e2ed82d922e1f64ac76f8fabe4baa | refs/heads/master | 2022-06-15T21:54:56.223204 | 2022-05-09T14:05:50 | 2022-05-09T14:05:50 | 209,430,056 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 608 | py | class Solution(object):
def removeDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
n=0
p=0
pre=float("inf")
for i in range(len(nums)):
if nums[i]==pre:
if n==2:
continue
else:
n+=1
nums[p]=nums[i]
p+=1
else:
n=1
nums[p]=nums[i]
p+=1
pre=nums[i]
return p
a=Solution()
test=[1,1,1,2,2,3]
print(a.removeDuplicates(test)) | [
"noelsun@mowennaierdeMacBook-Pro.local"
] | noelsun@mowennaierdeMacBook-Pro.local |
b76b81db5069d7990b3344efce198036913a8fa9 | 4d7d330630d4205bde47cbac5db115c68c179001 | /ejercicio_final_django/ejercicio_final_django/urls.py | b108bdb4d68e3d8b36adf0ce95df6c9cbce64a98 | [] | no_license | gafajardogr/CursoLeonEoiPythonDjango | 6d9bfa39e15f2e80d869b369b21de066856d674e | 1c630c750b4963c2634403b850b7c799797947f6 | refs/heads/master | 2021-09-03T05:10:49.440799 | 2018-01-05T21:47:05 | 2018-01-05T21:47:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,523 | py | """ejercicio_final_django URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
# from django.conf.urls import url, include
from rest_framework import routers
from main import views
router = routers.DefaultRouter()
router.register(r'fighters', views.FighterViewSet)
router.register(r'tournaments', views.TournamentViewSet)
router.register(r'combats', views.CombatViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include(router.urls)),
path('api-auth/', include('rest_framework.urls')),
path('', views.IndexView.as_view())
]
# Only for development environment
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"malandro@gmail.com"
] | malandro@gmail.com |
64cfaf128c32f6121b1d5cd6194329ba27f2532b | c0792645c156cb9e20a1aa2b28c565150358bc6e | /apps/inmueble/migrations/0007_auto_20180413_2159.py | 67c3d2b9cf04f9cc969b9db636a4659a8eea6221 | [] | no_license | clioo/Praver | b22fd92886e0399845adb4366663cae6a7d7853b | 523f0d78e0a2039a5bae3e539c93e2c2415a0840 | refs/heads/master | 2020-03-11T12:38:54.272392 | 2018-06-28T18:24:21 | 2018-06-28T18:24:21 | 130,003,043 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 636 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2018-04-14 03:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inmueble', '0006_imagenesinmbueble'),
]
operations = [
migrations.AlterField(
model_name='inmueble',
name='latitud',
field=models.CharField(blank=True, max_length=100),
),
migrations.AlterField(
model_name='inmueble',
name='longitud',
field=models.CharField(blank=True, max_length=100),
),
]
| [
"jesus_acosta1996@hotmail.com"
] | jesus_acosta1996@hotmail.com |
02dcffa0f1904689cb6c05fe6bb8cf05cdf3fef8 | f2f4cb105681064fc2d0dcecf97471e06beb6947 | /mysite/blog/views.py | 205caff1b043ccbc92e82149df0a4f967671735c | [] | no_license | MansiLad/MyBlogApp | eab572b19469dcc329a6400f953f8534f143e5a3 | 76458750afb83f7d5c28ddf92b11ca6d5856f906 | refs/heads/main | 2023-02-16T22:47:27.174712 | 2021-01-20T17:46:12 | 2021-01-20T17:46:12 | 331,292,982 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,680 | py | from django.shortcuts import render, get_object_or_404
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.views.generic import ListView
from django.core.mail import send_mail
from django.contrib.postgres.search import SearchVector, SearchQuery, SearchRank, TrigramSimilarity
from taggit.models import Tag
from django.db.models import Count
from .forms import EmailPostForm, CommentForm
from .models import Post, Comment
# Create your views here.
def post_list(request, tag_slug=None):
object_list = Post.published.all()
tag = None
if tag_slug:
tag = get_object_or_404(Tag, slug=tag_slug)
object_list = object_list.filter(tags__in=[tag])
paginator = Paginator(object_list, 3)
page = request.GET.get('page')
try:
posts = paginator.page(page)
except PageNotAnInteger:
posts = paginator.page(1)
except EmptyPage:
posts = paginator.page(paginator.num_pages)
return render(request, 'blog/post/list.html', {'page':page, 'posts':posts, 'tag':tag})
def post_detail(request, year, month, day, post):
post = get_object_or_404(
Post, slug=post,
status='published',
publish__year=year,
publish__month=month,
publish__day=day
)
comments = post.comments.filter(active=True)
new_comment = None
if request.method == 'POST':
comment_form = CommentForm(data=request.POST)
if comment_form.is_valid():
new_comment = comment_form.save(commit=False)
new_comment.post = post
new_comment.save()
else:
comment_form = CommentForm()
post_tags_ids = post.tags.values_list('id', flat=True)
similar_posts = Post.published.filter(tags__in=post_tags_ids)\
.exclude(id=post.id)
similar_posts = similar_posts.annotate(same_tags=Count('tags'))\
.order_by('-same_tags','-publish')[:4]
return render(request, 'blog/post/detail.html', {'post':post, 'comments':comments,'new_comment':new_comment,'comment_form':comment_form, 'similar_posts':similar_posts})
class PostListView(ListView):
queryset = Post.published.all()
context_object_name = 'posts'
paginate_by = 3
template_name = 'blog/post/list.html'
def post_share(request, post_id):
post = get_object_or_404(Post, id=post_id, status='published')
sent = False
if request.method == 'POST':
form = EmailPostForm(request.POST)
if form.is_valid():
cd = form.cleaned_data
post_url = request.build_absolute_uri(post.get_absolute_url())
subject = '{} ({}) recommends you reading "{}"'.format(cd['name'], cd['email'],post.title)
message = 'Read "{}" at {}\n\n{}\'s comments: {}'.format(post.title,post_url,cd['name'],cd['comments'])
send_mail(subject, message, 'admin@myblog.com',[cd['to']])
sent = True
else:
form = EmailPostForm()
return render(request, 'blog/post/share.html', {'post':post, 'form':form, 'sent':sent})
def post_search(request):
form = SearchForm()
query = None
results = []
if 'query' in request.GET:
form = SearchForm(request.GET)
if form.is_valid():
query = form.cleaned_data['query']
results = Post.objects.annotate(
similarity=TrigramSimilarity('title', query),
).filter(similarity__gt=0.3).order_by('-similarity')
return render(request, 'blog/post/search.html',{'form':form,'query':query,'results':results})
| [
"noreply@github.com"
] | noreply@github.com |
9df488729a33c40b7f0a79805a0e490939c392cc | de06c4a1fb02fd23eadfc58c770d87edfd0a6d38 | /group_buying/payment/migrations/0002_auto_20200829_0923.py | da6c064bd2bf89bfec813229cb49073801216a4d | [] | no_license | saisantoshchirag/group_buying | c12dc0bf1882cf03d20e6865dd98105a28907f80 | 1d9fb28f99dfb9b085e43bb5429bde476680ffa7 | refs/heads/master | 2023-08-15T01:27:56.203321 | 2020-11-13T11:16:36 | 2020-11-13T11:16:36 | 267,057,651 | 0 | 1 | null | 2021-09-22T19:21:23 | 2020-05-26T13:58:14 | HTML | UTF-8 | Python | false | false | 1,038 | py | # Generated by Django 2.1.5 on 2020-08-29 03:53
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('payment', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='orders',
name='address',
),
migrations.RemoveField(
model_name='orders',
name='city',
),
migrations.RemoveField(
model_name='orders',
name='email',
),
migrations.RemoveField(
model_name='orders',
name='items_json',
),
migrations.RemoveField(
model_name='orders',
name='name',
),
migrations.RemoveField(
model_name='orders',
name='phone',
),
migrations.RemoveField(
model_name='orders',
name='state',
),
migrations.RemoveField(
model_name='orders',
name='zip_code',
),
]
| [
"saisantosh.c17@iiits.in"
] | saisantosh.c17@iiits.in |
eca4633d060603bdcdb613e539c02efd7707de5b | 6d6b0055a240ec5ced9d6fb28013a5f21f70b36a | /build/lib/srg/spaces/tuple.py | 165344c626d7d0e57d51291d425cd441f5e4c80a | [] | no_license | xiaochen-salford/salford-robotics-gym | 189b2c741d1af0c67351c0ae2910052f8c76528b | 0d742be16aaef318a38403601177e4f0e12839a3 | refs/heads/master | 2022-11-16T03:27:54.583071 | 2020-07-14T12:57:58 | 2020-07-14T12:57:58 | 271,031,199 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,504 | py | import numpy as np
from .space import Space
class Tuple(Space):
"""
A tuple (i.e., product) of simpler spaces
Example usage:
self.observation_space = spaces.Tuple((spaces.Discrete(2), spaces.Discrete(3)))
"""
def __init__(self, spaces):
self.spaces = spaces
for space in spaces:
assert isinstance(space, Space), "Elements of the tuple must be instances of gym.Space"
super(Tuple, self).__init__(None, None)
def seed(self, seed=None):
[space.seed(seed) for space in self.spaces]
def sample(self):
return tuple([space.sample() for space in self.spaces])
def contains(self, x):
if isinstance(x, list):
x = tuple(x) # Promote list to tuple for contains check
return isinstance(x, tuple) and len(x) == len(self.spaces) and all(
space.contains(part) for (space,part) in zip(self.spaces,x))
def __repr__(self):
return "Tuple(" + ", ". join([str(s) for s in self.spaces]) + ")"
def to_jsonable(self, sample_n):
# serialize as list-repr of tuple of vectors
return [space.to_jsonable([sample[i] for sample in sample_n]) for i, space in enumerate(self.spaces)]
def from_jsonable(self, sample_n):
return [sample for sample in zip(*[space.from_jsonable(sample_n[i]) for i, space in enumerate(self.spaces)])]
def __getitem__(self, index):
return self.spaces[index]
def __len__(self):
return len(self.spaces)
def __eq__(self, other):
return isinstance(other, Tuple) and self.spaces == other.spaces
| [
"anon.nry@gmail.com"
] | anon.nry@gmail.com |
75926df3fbf859ff1baec4e9c7406ebd84ad3bc4 | fa38866fa922042463d17e1665b637a12db76072 | /random_encounter.py | 4fe32c9bb39f8059652eb62787d812ed14c70330 | [] | no_license | jopetersen/dnd_utility_DM | 2908b9c56987d502873283e1c948793c109a17e1 | e4d88591731e5249112b3d71196ed85c5d3ce81b | refs/heads/master | 2020-12-21T07:39:42.816257 | 2020-01-26T19:01:19 | 2020-01-26T19:01:19 | 236,360,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | import random
import csv
#Goal: Create a random encounter generator for D&D
#open the csv file & create a random encounter
with open('encounters.csv', 'r') as f:
reader = csv.reader(f)
your_list = list(reader)
list_upper_limit = len(your_list)
random_index = random.randint(0, list_upper_limit)
print(your_list[random_index]) | [
"noreply@github.com"
] | noreply@github.com |
a034ae69847cc367c1eb332739564f89b6b89f77 | 78790dccf4eb45e85295322ab7b040c008650070 | /src/command/commands/report/host/osg/hadoop/install/__init__.py | fe2d147649af31827a49ba1c276d7a7d6ca2c657 | [] | no_license | wangfeilong321/osg-roll | 68a07b2dac5bda956431a0f203985ac9737bb503 | 7fb86ac61b4b4f6417dc0653be635c80b603ba12 | refs/heads/master | 2021-01-22T04:53:39.607130 | 2015-08-03T14:07:02 | 2015-08-03T14:07:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,860 | py | #$Id$
#
# @Copyright@
#
# $Log$
# Revision 0.10 2012/10/26 05:48:54 eduardo
# Creation
#
import sys
import os
import pwd
import string
import types
import rocks.commands
from syslog import syslog
class Command(rocks.commands.HostArgumentProcessor,
rocks.commands.report.command):
"""
Output the OSG hadoop wrapper install script
<arg type='string' name='host'>
One host name.
</arg>
<example cmd='report host osg hadoop install hadoop-0-0'>
Create wrapper script to install OSG hadoop for hadoop-0-0
</example>
"""
def writeProperty(self, Pname, Pvalue, xmlFile):
self.addOutput(self.host, ' echo "<property>" >> %s' % xmlFile)
self.addOutput(self.host, ' echo "<name>%s</name>" >> %s' % (Pname,xmlFile))
self.addOutput(self.host, ' echo "<value>%s</value>" >> %s' % (Pvalue,xmlFile))
self.addOutput(self.host, ' echo "</property>" >> %s' % xmlFile)
def OpenConfiguration(self, xmlFile):
self.addOutput(self.host, ' echo "<?xml version=\\"1.0\\"?>" >> %s' % xmlFile )
self.addOutput(self.host, ' echo "<?xml-stylesheet type=\\"text/xsl\\" href=\\"configuration.xsl\\"?>" >> %s' % xmlFile )
self.addOutput(self.host, ' echo "" >> %s' % xmlFile)
self.addOutput(self.host, ' echo "<!-- Template created by rocks report osg hadoop install -->" >> %s' % xmlFile)
self.addOutput(self.host, ' echo "" >> %s' % xmlFile)
self.addOutput(self.host, ' echo "<configuration>" >> %s' %xmlFile)
def CloseConfiguration(self, xmlFile):
self.addOutput(self.host, ' echo "</configuration>" >> %s' % xmlFile)
def FixGid(self, thegid, thegroup):
self.addOutput(self.host, 'swapgid=`getent group %s | cut -d: -f3`' % thegroup)
self.addOutput(self.host, 'swapgroup=`getent group %s | cut -d: -f1`' % thegid)
self.addOutput(self.host, '[ ! -z "$swapgroup" ]&&[ "x$swapgroup" != "x%s" ]&&/usr/sbin/groupmod -o -g $swapgid $swapgroup' % thegroup)
self.addOutput(self.host, '/usr/sbin/groupmod -g %s %s' % (thegid,thegroup))
def FixUid(self, theuid, theuser):
self.addOutput(self.host, 'swapuid=`getent passwd %s | cut -d: -f3`' % theuser)
self.addOutput(self.host, 'swapuser=`getent passwd %s | cut -d: -f1`' % theuid)
self.addOutput(self.host, '[ ! -z "$swapuser" ]&&[ "x$swapuser" != "x%s" ]&&/usr/sbin/usermod -o -u $swapuid $swapuser' % theuser)
self.addOutput(self.host, '/usr/sbin/usermod -u %s %s' % (theuid,theuser))
def run(self, params, args):
self.beginOutput()
for host in self.getHostnames(args):
self.host = host
loginstall = '/var/log/hadoop-install.log'
hostexclude = '/etc/hadoop/conf/hosts_exclude'
hadoopenvsh = '/etc/hadoop/conf/hadoop-env.sh.template'
hdfssitexml = '/etc/hadoop/conf/hdfs-site.xml.template'
coresitexml = '/etc/hadoop/conf/core-site.xml.template'
maprsitexml = '/etc/hadoop/conf/mapred-site.xml.template'
hadoopconfg = '/root/HadoopConfigurator'
osg_client = self.db.getHostAttr(host,'OSG_Client')
osg_se = self.db.getHostAttr(host,'OSG_SE')
osg_ce = self.db.getHostAttr(host,'OSG_CE')
osg_gftp = self.db.getHostAttr(host,'OSG_GFTP_HDFS')
osg_hadoop = self.db.getHostAttr(host,'OSG_HADOOP')
trigger_install = osg_client or osg_se or osg_gftp or osg_hadoop or osg_ce>0
if trigger_install:
self.addOutput(self.host, '/usr/sbin/groupadd -g &OSG_fusegid; fuse')
self.FixGid('&OSG_fusegid;','fuse')
self.addOutput(self.host, '/usr/sbin/groupadd -g &OSG_hadoopgid; hadoop')
self.FixGid('&OSG_hadoopgid;','hadoop')
self.addOutput(self.host, '/usr/sbin/groupadd -g &OSG_mapredgid; mapred')
self.FixGid('&OSG_mapredgid;','mapred')
self.addOutput(self.host, '/usr/sbin/groupadd -g &OSG_zookeepergid; zookeeper')
self.FixGid('&OSG_zookeepergid;','zookeeper')
self.addOutput(self.host, '/usr/sbin/useradd -r -u &OSG_hdfsuid; -g &OSG_hadoopgid; -c "Hadoop HDFS" -s /bin/bash -d /home/hadoop -m -k /etc/skel hdfs')
self.FixUid('&OSG_hdfsuid;','hdfs')
self.addOutput(self.host, '/usr/sbin/useradd -r -u &OSG_mapreduid; -g &OSG_mapredgid; -c "Hadoop MapReduce" -s /bin/bash -d /usr/lib/hadoop-hdfs mapred')
self.FixUid('&OSG_mapreduid;','mapred')
self.addOutput(self.host, '/usr/sbin/useradd -r -u &OSG_zookeeperuid; -g &OSG_zookeepergid; -c "ZooKeeper" -s /bin/nologin -d /var/run/zookeeper zookeeper')
self.FixUid('&OSG_zookeeperuid;','zookeeper')
self.addOutput(self.host, '')
self.addOutput(self.host, 'touch %s' % loginstall )
self.addOutput(self.host, 'yum install osg-se-hadoop-client >> %s 2>&1' % loginstall)
self.addOutput(self.host, 'yum install osg-se-hadoop-datanode >> %s 2>&1' % loginstall)
self.addOutput(self.host, 'yum install osg-se-hadoop-namenode >> %s 2>&1' % loginstall)
self.addOutput(self.host, 'yum install osg-se-hadoop-secondarynamenode >> %s 2>&1' % loginstall)
self.addOutput(self.host, 'touch %s' % hostexclude )
self.addOutput(self.host, '')
self.addOutput(self.host, '#Make sure services are turned off to prevent upgrade cases')
self.addOutput(self.host, 'chkconfig hadoop-hdfs-datanode off')
self.addOutput(self.host, 'chkconfig hadoop-hdfs-namenode off')
self.addOutput(self.host, 'chkconfig hadoop-hdfs-secondarynamenode off')
self.addOutput(self.host, '')
self.addOutput(self.host, '#Make sure config templates exists')
# template for hadoop-env.sh
self.addOutput(self.host, 'hadoopenvshcreate=0')
self.addOutput(self.host, '[ -f %s ]||echo "Creating %s" >> %s 2>&1' % (hadoopenvsh,hadoopenvsh,loginstall) )
self.addOutput(self.host, '[ -f %s ]||hadoopenvshcreate=1' % hadoopenvsh)
self.addOutput(self.host, 'echo " hadoopenvshcreate=$hadoopenvshcreate" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, 'if [ "x$hadoopenvshcreate" == "x1" ]; then')
self.addOutput(self.host, ' echo "passed if hadoopenvshcreate is 1" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, ' touch %s' % hadoopenvsh )
self.addOutput(self.host, ' echo "# The maximum amount of heap to use, in MB. Default is 1000." >> %s' % hadoopenvsh )
self.addOutput(self.host, ' echo "export HADOOP_HEAPSIZE=@HADOOP_NAMENODE_HEAP@" >> %s' % hadoopenvsh )
self.addOutput(self.host, '')
self.addOutput(self.host, 'else')
self.addOutput(self.host, ' echo "NOT created hadoopenvshcreate is 0" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, 'fi')
self.addOutput(self.host, '')
# template for hdfs-site.xml
self.addOutput(self.host, 'hdfssitexmlcreate=0')
self.addOutput(self.host, '[ -f %s ]||echo "Creating %s" >> %s 2>&1' % (hdfssitexml,hdfssitexml,loginstall) )
self.addOutput(self.host, '[ -f %s ]||hdfssitexmlcreate=1' % hdfssitexml)
self.addOutput(self.host, 'echo " hdfssitexmlcreate=$hdfssitexmlcreate" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, 'if [ "x$hdfssitexmlcreate" == "x1" ]; then')
self.addOutput(self.host, ' echo "passed if hdfssitexmlcreate is 1" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, ' touch %s' % hdfssitexml )
self.OpenConfiguration(hdfssitexml)
self.writeProperty('dfs.block.size','@HADOOP_DATANODE_BLOCKSIZE@',hdfssitexml)
self.writeProperty('dfs.replication','@HADOOP_REPLICATION_DEFAULT@',hdfssitexml)
self.writeProperty('dfs.replication.max','@HADOOP_REPLICATION_MAX@',hdfssitexml)
self.writeProperty('dfs.replication.min','@HADOOP_REPLICATION_MIN@',hdfssitexml)
self.writeProperty('dfs.datanode.du.reserved','10000000000',hdfssitexml)
self.writeProperty('dfs.balance.bandwidthPerSec','2000000000',hdfssitexml)
self.writeProperty('dfs.data.dir','@HADOOP_DATA@',hdfssitexml)
self.writeProperty('dfs.datanode.handler.count','10',hdfssitexml)
self.writeProperty('dfs.hosts.exclude','@HADOOP_CONF_DIR@/hosts_exclude',hdfssitexml)
self.writeProperty('dfs.namenode.handler.count','40',hdfssitexml)
self.writeProperty('dfs.namenode.logging.level','all',hdfssitexml)
self.writeProperty('fs.checkpoint.dir','@HADOOP_CHECKPOINT_DIRS@',hdfssitexml)
self.writeProperty('topology.script.file.name','@HADOOP_RACKAWARE_SCRIPT@',hdfssitexml)
self.writeProperty('dfs.secondary.http.address','@HADOOP_SECONDARY_HTTP_ADDRESS@',hdfssitexml)
self.writeProperty('dfs.http.address','@HADOOP_PRIMARY_HTTP_ADDRESS@',hdfssitexml)
self.writeProperty('fs.checkpoint.period','@HADOOP_CHECKPOINT_PERIOD@',hdfssitexml)
self.writeProperty('dfs.permissions.supergroup','root',hdfssitexml)
self.CloseConfiguration(hdfssitexml)
self.addOutput(self.host, '')
self.addOutput(self.host, 'else')
self.addOutput(self.host, ' echo "NOT created hdfssitexmlcreate is 0" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, 'fi')
self.addOutput(self.host, '')
# template for core-site.xml
self.addOutput(self.host, 'coresitexmlcreate=0')
self.addOutput(self.host, '[ -f %s ]||echo "Creating %s" >> %s 2>&1' % (coresitexml,coresitexml,loginstall) )
self.addOutput(self.host, '[ -f %s ]||coresitexmlcreate=1' % coresitexml)
self.addOutput(self.host, 'echo " coresitexmlcreate=$coresitexmlcreate" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, 'if [ "x$coresitexmlcreate" == "x1" ]; then')
self.addOutput(self.host, ' echo "passed if coresitexmlcreate is 1" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, ' touch %s' % coresitexml )
self.OpenConfiguration(coresitexml)
self.writeProperty('fs.default.name','hdfs://@HADOOP_NAMENODE@:@HADOOP_NAMEPORT@',coresitexml)
self.writeProperty('hadoop.tmp.dir','@HADOOP_SCRATCH@',coresitexml)
self.writeProperty('dfs.umaskmode','@HADOOP_UMASK@',coresitexml)
self.writeProperty('io.bytes.per.checksum','4096',coresitexml)
self.writeProperty('hadoop.log.dir','@HADOOP_LOG@',coresitexml)
self.CloseConfiguration(coresitexml)
self.addOutput(self.host, '')
self.addOutput(self.host, 'else')
self.addOutput(self.host, ' echo "NOT created coresitexmlcreate is 0" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, 'fi')
self.addOutput(self.host, '')
# template for mapred-site.xml
self.addOutput(self.host, 'maprsitexmlcreate=0')
self.addOutput(self.host, '[ -f %s ]||echo "Creating %s" >> %s 2>&1' % (maprsitexml,maprsitexml,loginstall) )
self.addOutput(self.host, '[ -f %s ]||maprsitexmlcreate=1' % maprsitexml)
self.addOutput(self.host, 'echo " maprsitexmlcreate=$maprsitexmlcreate" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, 'if [ "x$maprsitexmlcreate" == "x1" ]; then')
self.addOutput(self.host, ' echo "passed if maprsitexmlcreate is 1" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, ' touch %s' % maprsitexml )
self.OpenConfiguration(maprsitexml)
self.writeProperty('mapred.job.tracker','@HADOOP_TRACKER@:@HADOOP_TRACKERPORT@',maprsitexml)
self.writeProperty('mapred.map.tasks','7919',maprsitexml)
self.writeProperty('mapred.reduce.tasks','1543',maprsitexml)
self.writeProperty('mapred.tasktracker.map.tasks.maximum','4',maprsitexml)
self.writeProperty('mapred.tasktracker.reduce.tasks.maximum','4',maprsitexml)
self.writeProperty('tasktracker.http.threads','50',maprsitexml)
self.CloseConfiguration(maprsitexml)
self.addOutput(self.host, '')
self.addOutput(self.host, 'else')
self.addOutput(self.host, ' echo "NOT created maprsitexmlcreate is 0" >> %s 2>&1' % (loginstall) )
self.addOutput(self.host, 'fi')
self.addOutput(self.host, '')
# Hadoop Configurator
self.addOutput(self.host, 'echo "Creating %s" >> %s 2>&1' % (hadoopconfg,loginstall) )
self.addOutput(self.host, 'echo "#! /bin/bash" > %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo ". /etc/sysconfig/hadoop" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "sed -i -e \\"s/servers=.*/servers=\${HADOOP_GANGLIA_ADDRESS}:8649/\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s/period=.*/period=\${HADOOP_GANGLIA_INTERVAL}/\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s/^# dfs\./dfs./\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s/^# jvm\./jvm./\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t/etc/hadoop/conf/hadoop-metrics.properties" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "if gmond -V 2>/dev/null | grep -q \'gmond 3.1\' ; then" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " sed -i -e \'s/^#\\(dfs.*GangliaContext31\\)/\\1/\' \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \'s/^#\\(jvm.*GangliaContext31\\)/\\1/\' \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " /etc/hadoop/conf/hadoop-metrics.properties" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "else" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "sed -i -e \'s/^#\\(dfs.*GangliaContext\$\\)/\\1/\' \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \'s/^#\\(jvm.*GangliaContext\$\\)/\\1/\' \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " /etc/hadoop/conf/hadoop-metrics.properties" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "fi" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "sed -e "s#@HADOOP_NAMENODE_HEAP@#\${HADOOP_NAMENODE_HEAP}#" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "< /etc/hadoop/conf/hadoop-env.sh.template > /etc/hadoop/conf/hadoop-env.sh" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "sed -e "s#@HADOOP_CONF_DIR@#\${HADOOP_CONF_DIR}#" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_NAMENODE@#\${HADOOP_NAMENODE}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_NAMEPORT@#\${HADOOP_NAMEPORT}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_SCRATCH@#\${HADOOP_SCRATCH}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_UMASK@#\${HADOOP_UMASK}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_LOG@#\${HADOOP_LOG}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "< /etc/hadoop/conf/core-site.xml.template > /etc/hadoop/conf/core-site.xml" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "sed -e \\"s#@HADOOP_CONF_DIR@#\${HADOOP_CONF_DIR}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_DATADIR@#\${HADOOP_DATADIR}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_DATA@#\${HADOOP_DATA}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_DATANODE_BLOCKSIZE@#\${HADOOP_DATANODE_BLOCKSIZE}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_REPLICATION_DEFAULT@#\${HADOOP_REPLICATION_DEFAULT}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_REPLICATION_MIN@#\${HADOOP_REPLICATION_MIN}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_REPLICATION_MAX@#\${HADOOP_REPLICATION_MAX}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_CHECKPOINT_DIRS@#\${HADOOP_CHECKPOINT_DIRS}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_PRIMARY_HTTP_ADDRESS@#\${HADOOP_PRIMARY_HTTP_ADDRESS}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_SECONDARY_HTTP_ADDRESS@#\${HADOOP_SECONDARY_HTTP_ADDRESS}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_CHECKPOINT_PERIOD@#\${HADOOP_CHECKPOINT_PERIOD}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_RACKAWARE_SCRIPT@#\${HADOOP_RACK_AWARENESS_SCRIPT}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t< /etc/hadoop/conf/hdfs-site.xml.template > /etc/hadoop/conf/hdfs-site.xml" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "sed -e \\"s#@HADOOP_CONF_DIR@#\${HADOOP_CONF_DIR}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_TRACKER@#\${HADOOP_NAMENODE}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t-e \\"s#@HADOOP_TRACKERPORT@#\${HADOOP_NAMEPORT}#\\" \\\\" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "\t< /etc/hadoop/conf/mapred-site.xml.template > /etc/hadoop/conf/mapred-site.xml" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "if [ \\"\$HADOOP_UPDATE_FSTAB\\" == \\"1\\" ] ; then" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " if [ ! -e /usr/bin/hdfs ] ; then" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " echo \\"Not updating fstab because /usr/bin/hdfs not found. Is hadoop-hdfs installed?\\"" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " fi" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " mkdir -p /mnt/hadoop" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " if grep -q \'^hdfs#\' /etc/fstab ; then" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " sed -i -e \\"s;^hdfs#.*;hadoop-fuse-dfs# /mnt/hadoop fuse server=\${HADOOP_NAMENODE},port=\${HADOOP_NAMEPORT},rdbuffer=131072,allow_other 0 0;\\" /etc/fstab" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " else" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " if grep -q \'^hadoop-fuse-dfs#\' /etc/fstab ; then" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " sed -i -e \\"s;^hadoop-fuse-dfs#.*;hadoop-fuse-dfs# /mnt/hadoop fuse server=\${HADOOP_NAMENODE},port=\${HADOOP_NAMEPORT},rdbuffer=131072,allow_other 0 0;\\" /etc/fstab" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " else" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " echo \\"hadoop-fuse-dfs# /mnt/hadoop fuse server=\${HADOOP_NAMENODE},port=\${HADOOP_NAMEPORT},rdbuffer=131072,allow_other 0 0\\" >> /etc/fstab" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " fi" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo " fi" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "fi" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, 'echo "" >> %s 2>&1' % (hadoopconfg) )
self.addOutput(self.host, '/bin/chmod 755 %s' % (hadoopconfg) )
self.addOutput(self.host, '')
self.endOutput(padChar='')
| [
"juaneduardo.ramirez@upr.edu"
] | juaneduardo.ramirez@upr.edu |
1e90395ed560cb2337b5b40f162a4fc8716076cc | 40b39249f1ee9bb1652dfee6949da98c20eb7c07 | /src/my/app/__init__.py | a83b2fef38569334875d455936c1ca359b8e303d | [] | no_license | AnneGilles/my.app | 97bd9e416aead2399507081ce158a043c1d84ab0 | 1711977a84d0f10a69b57d79f2e75d74afd2833b | refs/heads/master | 2021-01-10T19:09:40.141263 | 2011-12-15T08:40:34 | 2011-12-15T08:40:34 | 2,805,547 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | import cone.app
from my.app.model import MyApp
cone.app.register_plugin('myapp', MyApp)
| [
"c@shri.de"
] | c@shri.de |
97c04828414670989066e89294c98ebc37b4f70f | aeb9affdd14dc8986112dcd759eed0ff13b4fde7 | /blog/admin.py | b188c3e2251d1c19316374eb457359bdd8562bb2 | [] | no_license | iOSGJZ/learnDjango | 77634da1515b285585483f7d0847aa7924235629 | 394c58d3a20b4445b9aaf20bd06737b737876e9e | refs/heads/master | 2020-05-30T03:33:40.057359 | 2019-06-06T01:31:13 | 2019-06-06T01:31:13 | 189,518,033 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,298 | py | from django.contrib import admin
from .models import Article,Tags,Category
# Register your models here.
#修改登录后台管理页面头部显示和页面标题
admin.site.site_header = '郭兢哲'
admin.site.site_title = '郭兢哲的Django管理后台'
#自定义列表操作
def changeTime(self,request,queryset):
#批量更新created_time字段的值未2019-5-21
queryset.update(created_time='2019-5-21')
changeTime.short_description = '中文显示自定义的Actions'
class ArticleAdmin(admin.ModelAdmin):
"""
一般ManyToManyField多对多字段用过滤器
标题等文本字段用搜索框
日期时间用分层筛选。
过滤器如果是外键需要遵循这样的语法:本表字段__外键表要显示的字段。如:“user__user_name”。
"""
#搜索框,指定标题title作为搜索字段
search_fields = ['title']
#右侧栏过滤器和日期筛选
list_filter = ['user'] #右侧栏过滤器, 按作者进行筛选
date_hierarchy = 'created_time' #详细时间分层筛选
#listdisplay设置要显示在列表中的字段(id字段是Django模型的默认主键)
#这里的‘riqi’是方法名,显示一列为方法,方法定义为model的类方法
list_display = ('id','category','title','user','created_time','riqi','paixu')
#设置哪些字段可以点击进入编辑界面
list_display_links = ('title','id')
#list_per_page设置每页显示多少条记录,默认是100条
list_per_page = 50
#ordering设置默认排序字段,负号表示降序排序
ordering = ('-created_time',)
#list_editable 设置默认可编辑字段,在列表里就可以编辑
list_editable = ['user']
#fk_fields 设置显示外键字段
fk_fields = ['category']
#操作选项的设置
#是否在列表顶部显示
actions_on_top = True
#是否在列表底部显示
actions_on_bottom = False
actions = [changeTime,]
class TagsAdmin(admin.ModelAdmin):
list_display = ('name',)
list_display_links = ('name',)
class CategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
list_display_links = ('name',)
admin.site.register(Article,ArticleAdmin)
admin.site.register(Tags,TagsAdmin)
admin.site.register(Category,CategoryAdmin) | [
"541011933@qq.com"
] | 541011933@qq.com |
377991264854c35f07cc5d22c7ac0c9b988016f2 | 1bdc14be2378ff31a4c7f51bee446eb35e48c390 | /c4.5/c45.py | 6b76c3bb3785bb74548f0a1314956cbf7f8e52a6 | [] | no_license | CornellDataScience/Insights-FakeNews | dd6aa16c3971c521e9ee213227f4027e87c47c17 | d995fdf4a5d9701efce3d4de81a0b0f749fcae74 | refs/heads/master | 2020-03-28T17:31:07.011328 | 2019-04-29T19:58:04 | 2019-04-29T19:58:04 | 148,797,486 | 6 | 1 | null | 2019-02-04T05:50:07 | 2018-09-14T14:08:12 | Jupyter Notebook | UTF-8 | Python | false | false | 14,991 | py | """
C4.5 Binary Decision Tree Implementation
Usage:
Read csv file in; will be stored as a 2 Dimensional list. (See fread())
Train a classifier (i.e. train(list))
Prune the decision tree (i.e. prune_tree(tree, 0.5))
Predict the result (i.e. predict([.....], classifier))
The function assumes that the last column of your data is populated by labels.
Example of usage:
data = fread("./test_val_dump.csv", True)
drop_first_col = []
for x in data:
drop_first_col.append(x[1:])
tree = train(drop_first_col)
prune_tree(tree, 0.5)
print(predict([2,0,2,6,1,0,0,3,1,0,0,2,0,0,0,0,0,1,.223606798,0,.285714,.141421,0,.253546],tree))
"""
from collections import OrderedDict, Counter
from math import log
import csv
from random import randrange
def entropy(X):
"""
Calculate Entropy (as per Octavian)
"""
counts = Counter([x[-1] for x in X])
n_rows = len(X)
# Declare entropy value
entropy = 0.0
for c in counts:
# Calculate P(C_i)
p = float(counts[c])/n_rows
entropy -= p*log(p)/log(2)
return entropy
def gini(X):
"""
Calculate Gini Index
"""
n_rows = len(X)
counts = Counter([x[-1] for x in X])
imp = 0.0
for k1 in counts:
p1 = float(counts[k1])/n_rows
for k2 in counts:
if k1 == k2: continue
p2 = float(counts[k2])/n_rows
imp += p1*p2
return imp
class Tree:
"""
Decision Tree class
"""
def __init__(self, feature=-1, value=None, right_branch=None, left_branch=None, results=None, gain = 0.0):
self.feature = feature
self.value = value
self.right_branch = right_branch
self.left_branch = left_branch
self.results = results
self.gain = gain
def prune_tree(tree, least_gain, eval_fun=entropy):
"""
tree : type Tree
eval_fun : entropy(X) or gini(X)
least_gain : float
"""
if tree.right_branch.results == None: # if the right branch is a node
prune_tree(tree.right_branch, least_gain, eval_fun)
if tree.left_branch.results == None: # if the left branch is a node
prune_tree(tree.left_branch, least_gain, eval_fun)
if (tree.right_branch.results != None) and (tree.left_branch.results != None):
right, left = [], []
for v, c in tree.right_branch.results.items():
right += [[v]] * c
for v, c in tree.left_branch.results.items():
left += [[v]] * c
p = float(len(right)) / len(left + right)
diff_entropy = eval_fun(right+left) - p * \
eval_fun(right) - (1-p)*eval_fun(left)
if diff_entropy < least_gain:
tree.right_branch, tree.left_branch = None, None
tree.results = Counter([x[-1] for x in (left+right)])
"""
Helper functions: type_conversion, fread
"""
def type_conversion(val):
val = val.strip()
try:
if '.' in val:
return float(val)
else:
return int(val)
except ValueError:
# For other types, return
return val
def fread(f, col_labels=False):
"""
takes a filepath, f, and a boolean argument, col_labels.
By default, col_labels is False, implying that the columns do not have labels. If set to true,
fread will remove the row containing column labels at the end.
"""
data = csv.reader(open(f, 'rt'))
lst = [[type_conversion(i) for i in r] for r in data]
if col_labels:
lst.pop(0)
return lst
def train(lst, depth=0, max_depth=100, min_samples_leaf=1, min_samples_split=2, criteria=entropy):
"""
Decision tree construction - by default, the entropy function is used to calculate the criteria for splitting.
lst : dataframe with the last column reserved for labels
criteria : entropy or gini calculation function
"""
# Base Case: Empty Set
if len(lst) == 0:
return Tree()
elif len(lst) > min_samples_split:
# Calculate Entropy/Gini of current X, declare A_best, create sets/gain accordingly
score = criteria(lst)
Attribute_best = None
Set_best = None
Gain_best = 0.0
num_col = len(lst[0]) - 1 # last column of lst is labels
for c in range(num_col):
col_val = list(sorted(set([row[c] for row in lst])))
for value in col_val:
# Partition Dataset
if isinstance(value, float) or isinstance(value, int): # numerics
set1 = [row for row in lst if row[c] >= value]
set2 = [row for row in lst if row[c] < value]
else: # strings
set1 = [row for row in lst if row[c] == value]
set2 = [row for row in lst if row[c] != value]
if len(set1) > min_samples_leaf and len(set2) > min_samples_leaf: #check that leaves are large enough
# Calculate Gain
p = float(len(set1))/len(lst)
gain = score - p*criteria(set1) - (1-p)*criteria(set2)
if gain > Gain_best:
Gain_best = gain
Attribute_best = (c, value)
Set_best = (set1, set2)
if Gain_best > 0 and depth < max_depth: #check max depth
# Recursive Call on partitioned Sets
r = train(Set_best[0], depth+1, max_depth)
l = train(Set_best[1], depth+1, max_depth)
return Tree(feature=Attribute_best[0], value=Attribute_best[1], right_branch=r, left_branch=l, gain = Gain_best)
else:
return Tree(results=Counter([x[-1] for x in lst]), gain = Gain_best)
else: #partition is too small to split
return Tree(results=Counter([x[-1] for x in lst]))
def tree_classify(X, tree):
"""
Classification function using a list read from fread, X, and grown Decision Tree, tree
"""
if tree.results != None:
return (tree.results)
else:
val = X[tree.feature] # Retrieve label from dataframe X
if isinstance(val, float) or isinstance(val, int):
# Traversing decision tree for numerics
if val >= tree.value:
branch = tree.right_branch
else:
branch = tree.left_branch
else:
# Traversing decision tree for non-numeric types
if val == tree.value:
branch = tree.right_branch
else:
branch = tree.left_branch
return tree_classify(X, branch)
def predict(x, classifier):
return tree_classify(x, classifier).most_common(1)[0][0]
"""
usage:
tree = DecisionTree()
tree.train(data)
tree.classify(input)
params:
criterion
max_depth - default 100
min_samples_leaf - default 1
min_samples_split - default 2
"""
class DecisionTree():
"""
Decision Tree Class
"""
def __init__(self, **kwargs):
self.classifier = None
self.criterion = kwargs.get('criterion', entropy)
self.max_depth = kwargs.get('max_depth', 100)
self.min_samples_leaf = kwargs.get('min_samples_leaf', 1)
self.min_samples_split = kwargs.get('min_samples_split', 2)
def fit(self, X):
"""
X is the set of data where the last column of data is labels.
"""
self.classifier = train(X, 0, self.max_depth, self.min_samples_leaf, self.min_samples_split, self.criterion)
prune_tree(self.classifier, 0.5, self.criterion)
def classify(self, x):
"""
x is the set of values to be classified.
"""
return predict(x, self.classifier)
#Bagging tree prediction
def bag(treelst, row):
predictions = [predict(row, tree.classifier) for tree in treelst]
return max(set(predictions), key=predictions.count)
def subsample(X, size):
sample = []
n = round(len(X) * size)
while len(sample) < n:
index = randrange(len(X))
sample.append(X[index])
return sample
def create_forest(X, max_depth, subsample_ratio, min_samples_leaf = 1, min_samples_split = 2, n_trees = 250):
"""
Create a list containing decision trees fitted to a subsample of the data.
max_depth : max tree depth
subsample_ratio : subsample ratio
min_samples_leaf : minimum samples per tree leaf
min_samples_split : minimum samples per split
n_trees : number of decision trees to create
"""
forest = []
for i in range(n_trees):
sample = subsample(X, subsample_ratio)
dt = DecisionTree(max_depth = max_depth, min_samples_leaf = min_samples_leaf, min_samples_split = min_samples_split)
dt.fit(sample)
forest.append(dt)
return forest
def forest_predictions(X, forest):
"""
List of predictions from a random forest.
"""
pred = [bag(forest, r) for r in X]
return(pred)
def predict_probabilities(forest, inputs):
"""
forest is a list of DecisionTree objects.
"""
if(forest == None):
raise Exception("Random Forest has not been fitted. Please call fit() first.")
prob0 = 0
prob1 = 0
for i in range(len(forest)):
temp0 = prob0 * (i)
temp1 = prob1 * (i)
classifications = tree_classify(inputs, forest[i].classifier)
prob0 = (temp0 + (classifications[0] / (classifications[0] + classifications[1]))) / (i + 1)
prob1 = (temp1 + (classifications[1] / (classifications[0] + classifications[1]))) / (i + 1)
if prob0>=prob1:
#If 0 is at least as likely as 1, return 0 for deterministic behavior.
return (0, prob0)
else:
return (1, prob1)
#else:
#return (0, prob0)
"""
The following two functions are used to evaluate a RandomForest. They have been modified from Jason Brownlee's implementation, along with the score function.
"""
# Split a dataset into k folds
def cross_validation_split(dataset, n_folds):
dataset_split = list()
dataset_copy = list(dataset)
fold_size = int(len(dataset) / n_folds)
for i in range(n_folds):
fold = list()
while len(fold) < fold_size:
index = randrange(len(dataset_copy))
fold.append(dataset_copy.pop(index))
dataset_split.append(fold)
return dataset_split
# Calculate accuracy percentage
def accuracy_metric(actual, predicted):
correct = 0
for i in range(len(actual)):
if actual[i] == predicted[i]:
correct += 1
return correct / float(len(actual)) * 100.0
class RandomForest():
"""
Random Forest Classifier
"""
def __init__(self, **kwargs):
self.forest = None
self.subsample_ratio = kwargs.get('subsample_ratio')
self.criterion = kwargs.get('criterion', entropy)
self.n_trees = kwargs.get('n_trees', 25)
self.max_depth = kwargs.get('max_depth', 100)
self.min_samples_leaf = kwargs.get('min_samples_leaf', 1)
self.min_samples_split = kwargs.get('min_samples_split', 2)
def fit(self, X):
self.forest = create_forest(X, self.max_depth, self.subsample_ratio, self.min_samples_leaf, self.min_samples_split, self.n_trees)
def predictions(self, X):
"""
predictions is the list containing the prediction of each DecisionTree in the forest.
"""
if(self.forest == None):
raise Exception("Random Forest has not been fitted. Please call fit() first.")
else:
return forest_predictions(X, self.forest)
def predict_probs(self, X):
"""
predict_probs is the (prediction, probability) tuple representing the result of the RandomForest object.
"""
return predict_probabilities(self.forest, X)
def predict(self, X):
"""
predict is the int (0,1) representing the label classified by the RandomForest object.
"""
return predict_probabilities(self.forest, X)[0]
"""
def score(self, data, n_folds = 5):
folds = cross_validation_split(data, n_folds)
scores = list()
for fold in folds:
train_set = list(folds)
train_set.remove(fold)
train_set = sum(train_set, [])
test_set = list()
for row in fold:
row_copy = list(row)
test_set.append(row_copy)
row_copy[-1] = None
rf = RandomForest(subsample_ratio = self.subsample_ratio, criterion = self.criterion, n_trees = self.n_trees, max_depth = self.max_depth, min_samples_leaf = self.min_samples_leaf, min_samples_split = self.min_samples_split)
rf.fit(train_set)
predicted = rf.predictions(test_set)
print(rf.predictions(test_set))
actual = [row[-1] for row in fold]
accuracy = accuracy_metric(actual, predicted)
scores.append(accuracy)
return (sum(scores)/float(len(scores)))
"""
# Evaluate an algorithm using a cross validation split
def evaluate_forest(dataset, n_folds, subsample_ratio, ntrees = 25, maxdepth = 100, minsamplesleaf = 1, minsamplessplit = 2):
folds = cross_validation_split(dataset, n_folds)
scores = list()
for fold in folds:
train_set = list(folds)
train_set.remove(fold)
train_set = sum(train_set, [])
test_set = list()
for row in fold:
row_copy = list(row)
test_set.append(row_copy)
row_copy[-1] = None
rf = RandomForest(subsample_ratio = subsample_ratio, criterion = entropy, n_trees = ntrees, max_depth = maxdepth, min_samples_leaf = minsamplesleaf, min_samples_split = minsamplessplit)
rf.fit(train_set)
predicted = rf.predictions(test_set)
actual = [row[-1] for row in fold]
accuracy = accuracy_metric(actual, predicted)
scores.append(accuracy)
return scores
def mean_accuracy(dataset, n_folds, subsample_ratio, ntrees = 25, maxdepth = 100, minsamplesleaf = 1, minsamplessplit = 2):
scores = evaluate_forest(dataset, n_folds, subsample_ratio, ntrees = 25, maxdepth = 100, minsamplesleaf = 1, minsamplessplit = 2)
return (sum(scores)/float(len(scores)))
def evaluate_tree(dataset, n_folds):
folds = cross_validation_split(dataset, n_folds)
scores = list()
for fold in folds:
train_set = list(folds)
train_set.remove(fold)
train_set = sum(train_set, [])
test_set = list()
for row in fold:
row_copy = list(row)
test_set.append(row_copy)
row_copy[-1] = None
dt = DecisionTree()
dt.fit(train_set)
predicted = [dt.classify(x) for x in test_set]
actual = [row[-1] for row in fold]
accuracy = accuracy_metric(actual, predicted)
scores.append(accuracy)
return scores
def mean_tree_accuracy(dataset, n_folds):
scores = evaluate_tree(dataset, n_folds)
return (sum(scores)/float(len(scores)))
| [
"dzy4@cornell.edu"
] | dzy4@cornell.edu |
fcbdfed4eced7b015e30d52c48afc228a61af810 | 6058a27579d91d5a38202dbbd233606b56463825 | /app/grandchallenge/publications/__init__.py | dda68116d9925a0a2c97fd993497e8c6a5eb2bed | [
"Apache-2.0"
] | permissive | michelkok/grand-challenge.org | 9b91a71cf85e850f03b3b9520d78cf6658e6da38 | 2a96109f85d6c6ffc564323be844d78e9842c1f9 | refs/heads/master | 2023-08-15T22:58:59.209925 | 2021-10-15T15:21:10 | 2021-10-15T15:21:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 76 | py | default_app_config = "grandchallenge.publications.apps.PublicationsConfig"
| [
"noreply@github.com"
] | noreply@github.com |
b4474859a1447f1cb83e2be9dd2a3c2f071d0f5d | b7ef41b7d2fd7b7ca123310327779d4d46030ca6 | /KB/posts/migrations/0004_posts_slug.py | adeb2bc6abd71e25d96a30007918b30055a17faa | [] | no_license | MariposaDigital0/nowkb | 09fbab0ebc94a255a311105c2170f6ab94951ea6 | 2ea7218ac77970737316795a74127526e53653a4 | refs/heads/master | 2023-07-10T09:57:50.765488 | 2021-08-22T05:56:00 | 2021-08-22T05:56:00 | 398,723,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | # Generated by Django 3.2.5 on 2021-07-19 11:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0003_posts_description'),
]
operations = [
migrations.AddField(
model_name='posts',
name='slug',
field=models.CharField(blank=True, max_length=100, null=True),
),
]
| [
"dev.mariposadigital@gmail.com"
] | dev.mariposadigital@gmail.com |
2316a59621a593c0332a7ccfb9969b07b17d65f5 | ca6019538c41ab4718aef70c2d7ccfa43859231f | /Gr.11 Computer Science/Gr.11-Computer-Science-Decisions-Exercises.py | c5fd0102fe6e1e9f960049dd9b2544a0baf477d8 | [] | no_license | Benji-Saltz/Early-Python-Projects | 1bc282a0991c46915fb918cfefc93f1830a1b8e1 | 70f3ce61b6a878f37bd6ad72f5a72622ea033a7e | refs/heads/master | 2020-04-18T05:40:59.357162 | 2019-01-24T02:22:24 | 2019-01-24T02:22:24 | 167,287,895 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,061 | py | #Name: Benji Saltz
#Discription: Everything is in one document
#1. Marks= Output of what letter grade you got
#2.Discount= Ticket sales with a discounted value if people buy tickets in groups
#3.Quadratic Roots= Calculates if real roots come out of quadratic formula
#4.Rates= Caluclates price of water between government, public and companies
#Date:9/20/16
#Question 1
mark=(float(input("Put in your mark")))
if mark>=80 and mark<=100 :
print("Execelent work, it\'s an A!!!")
elif mark<=79 and mark>=70:
print("Not bad, you got a B!")
elif mark<=69 and mark>=60:
print("you barely passed! you got a C")
elif mark<=59 and mark>=50:
print("you are ruining your life, you got a D")
elif mark<50 and mark>=0:
print("Dig a grave you failed!")
elif mark>100 or mark<0:
print("INVALID! INVALID! INVALID!")
#Question 2
print("Hey, welcome to the fair!")
print("tickets are $10.95 a person while group bigger than 5 cost $8.95 a person!")
ticket=(int(input("How many tickets would you like to purchase?")))
if ticket<5 :
print("You purchased",ticket,"Tickets! That will be",round(10.95*ticket,2))
elif ticket>=5 and ticket<=50 :
print("You Purchased",ticket,"Tickets! That will be",round(8.95*ticket,2))
elif ticket>50:
print("ERROR ERROR, YOU CANNOT PURCHASE IN THAT AMOUNT!")
#Question 3
import math
print("Lets do quadratics! ax**2+bx+x=0")
print(" We will be using -b+-sqrt(b**2-4ac)/2a")
a=float(input("What will represent a?:"))
b=float(input("What will represent b?:"))
c=float(input("What will represent c?:"))
d=(b**2)-4*a*c
if d>0:
x1=-b+math.sqrt(d)/2*a
x2=-b-math.sqrt(d)/2*a
print(round(x1,2), "or", round(x2,2))
elif d==0:
x3=-b/2*a
print(round(x,2))
elif d<0:
print("No real roots")
#Question 4
print("Welcome to the city water company!")
typ=(str(input("please enter what type you fall under: G for government, C for Company or P for public:")))
liter=(float(input("How much water did you use?:")))
if typ=='c' and liter<=1000:
print("You are a company and used:",liter,"The subtotal is $",300,"With a total of $",(300*0.13)+300)
elif typ=='c' and liter>1000:
print("You are a company and used:",liter,"The subtotal is $",round(300+(0.75*(liter-1000)),2),"With a total of $",round(((300+(0.75*(liter-1000)))*0.13)+(300+(0.75*(liter-1000))),2))
elif typ=='p' and liter<=100:
print("You used:",liter,"The subtotal is $",round(0.77*liter,2),"With a total of $",round(((0.77*liter)*0.13)+(0.77*liter),2))
elif typ=='p' and liter>100:
print("You used:",liter,"The subtotal is $",round(0.50*liter,2),"With a total of $",round(((0.50*liter)*0.13)+(0.50*liter),2))
elif typ=='g' and liter<=500:
print("You are the government and used:",liter,"The subtotal is $:",200,"With a total of $",round((200*0.13)+200),2)
elif typ=='g' and liter<500:
print("You are the government and used:",liter,"The subtotal is $:",400,"With a total of $",round((400*0.13)+400),2)
else:
print("ERRROR ERROR ERROR")
| [
"noreply@github.com"
] | noreply@github.com |
996dc0474cab198d06f85edfffcf0e20b3d4b6c1 | 77215178766736db9efe1e9625b90780716f4008 | /decaptcha_example/decaptcha_example/example_engine.py | c8eb8f970ba37d778a981f26a9f61b8a6417ef28 | [] | no_license | yetone/decaptcha | 92b574c6af913eda78e3fdf1ddb351152268bf3c | 16474b444c3d3ff070cdf0daec362f34cb20589b | refs/heads/master | 2022-03-08T04:32:53.406676 | 2022-03-01T13:33:57 | 2022-03-01T13:33:57 | 47,335,245 | 14 | 2 | null | 2022-03-01T13:33:58 | 2015-12-03T13:56:03 | Python | UTF-8 | Python | false | false | 430 | py | from os.path import dirname, join
class ExampleEngine(object):
def __init__(self, *args, **kwargs):
pass
def has_captcha(self, response, **kwargs):
return response.meta.get('has_captcha', False)
def get_captcha_image(self, *args, **kwargs):
path = join(dirname(__file__), 'example_captcha.gif')
return open(path).read()
def submit_captcha(self, *args, **kwargs):
pass
| [
"shirk3y@gmail.com"
] | shirk3y@gmail.com |
f2207aa3c133c9aac5da21550018769f2cd3aad7 | d39091bcc2d94414a8d3415ce9479ef1c0e05352 | /custom_operations.py | e6dd4e443aa75702085230306111987d70f3876d | [
"MIT"
] | permissive | chutien/zpp-mem | 979a7b24a37cbeab4e41212fb281629243f61d5f | 470dec89dda475f7272b876f191cef9f8266a6dc | refs/heads/master | 2020-04-02T19:38:44.313064 | 2019-10-17T12:57:32 | 2019-10-17T12:57:32 | 154,741,539 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,411 | py | import tensorflow as tf
import numpy as np
def feedback_alignment_fc(input, weights, initializer=tf.initializers.he_normal(), name="fa_fc"):
random = tf.get_variable("random", shape=reversed(weights.get_shape().as_list()),
initializer=initializer, use_resource=True, trainable=False)
@tf.custom_gradient
def func(x):
def grad(dy, variables=[weights]):
dx = tf.matmul(dy, random)
dw = tf.matmul(tf.transpose(x), dy)
return dx, [dw]
return tf.matmul(x, weights), grad
with tf.name_scope(name):
return func(input)
def feedback_alignment_conv(input, weights, strides, padding, use_cudnn_on_gpu=True, data_format='NHWC',
dilations=[1, 1, 1, 1], initializer=tf.initializers.he_normal(),
name="fa_conv"):
random = tf.get_variable("random", shape=weights.get_shape().as_list(), initializer=initializer, use_resource=True, trainable=False)
@tf.custom_gradient
def func(x):
def grad(dy, variables=[weights]):
dx = tf.nn.conv2d_backprop_input(tf.shape(x), random, dy, strides, padding, use_cudnn_on_gpu,
data_format, dilations)
dw = tf.nn.conv2d_backprop_filter(x, weights.get_shape(), dy, strides, padding, use_cudnn_on_gpu,
data_format, dilations)
return dx, [dw]
return tf.nn.conv2d(input, weights, strides, padding, use_cudnn_on_gpu, data_format, dilations), grad
with tf.name_scope(name):
return func(input)
def direct_feedback_alignment_fc(input, weights, output_dim, error_container, initializer=tf.initializers.he_normal(),
name="dfa_fc"):
random = tf.get_variable("random", shape=[output_dim, weights.shape[0]], initializer=initializer, use_resource=True, trainable=False)
@tf.custom_gradient
def func(x):
def grad(dy, variables=[weights]):
dx = tf.matmul(error_container[0], random, name='matmul_grad_x')
dw = tf.matmul(tf.transpose(x), dy, name='matmul_grad_w')
return dx, [dw]
return tf.matmul(x, weights, name='matmul_forward_x'), grad
with tf.name_scope(name):
return func(input)
def direct_feedback_alignment_conv(input, weights, output_dim, error_container, strides, padding,
use_cudnn_on_gpu=True, data_format='NHWC', dilations=[1, 1, 1, 1],
initializer=tf.initializers.he_normal(), name="dfa_conv"):
input_shape = tf.shape(input)
input_flat_shape = np.prod(input.shape[1:])
random = tf.get_variable("random", shape=[output_dim, input_flat_shape],
initializer=initializer, use_resource=True, trainable=False)
@tf.custom_gradient
def func(x):
def grad(dy, variables=[weights]):
dx = tf.reshape(tf.matmul(error_container[0], random), input_shape)
dw = tf.nn.conv2d_backprop_filter(x, weights.get_shape(), dy, strides, padding, use_cudnn_on_gpu,
data_format, dilations)
return dx, [dw]
return tf.nn.conv2d(input, weights, strides, padding, use_cudnn_on_gpu, data_format, dilations), grad
with tf.name_scope(name):
return func(input)
| [
"noreply@github.com"
] | noreply@github.com |
ba5014784f3743dd646cde77f767c986b56fe4b5 | 7858af7e78bb13d5495f85b3ea96af53891a21a6 | /suscripciones/models.py | a6535a1a8ca873bfbb8bc6bcef9bbc6346a8a218 | [] | no_license | tecogn88/Pinchef | bfd0f016f0f63777f498fdbd5a6047eafbaa061c | e957978113a815f40da8bba6b2cbed1e66d214d9 | refs/heads/master | 2021-01-23T06:50:33.285280 | 2015-03-19T14:54:58 | 2015-03-19T14:54:58 | 31,382,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,783 | py | # -*- coding: utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
class Suscripcion(models.Model):
class Meta:
verbose_name = u"Suscripción"
verbose_name_plural = u"Suscripciones"
fecha = models.DateTimeField(auto_now_add=True)
usuario = models.ForeignKey(User, related_name='suscripciones')
id_cliente = models.CharField(max_length=25)
id_suscripcion = models.CharField(max_length=25)
status = models.CharField(max_length=15)
activo = models.BooleanField(default=False)
def __unicode__(self):
return self.usuario.get_full_name()
# class Log(models.Model):
# class Meta:
# verbose_name = u'Log'
# verbose_name_plural = u'Logs'
# fecha = models.DateTimeField(auto_now_add=True)
# suscripcion = models.ForeignKey(Suscripcion, related_name='logs')
# monto = models.FloatField()
# status = models.CharField(max_length=25)
class Historial(models.Model):
class Meta:
verbose_name='Historial'
verbose_name_plural='Historial'
fecha = models.DateTimeField(auto_now_add=True)
suscripcion = models.ForeignKey(Suscripcion, related_name='movimientos')
movimiento = models.CharField(max_length=55)
def __unicode__(self):
return self.movimiento
class Event(models.Model):
class Meta:
verbose_name = 'Evento'
verbose_name_plural = 'Eventos'
fecha = models.DateTimeField(auto_now_add=True)
id_evento = models.CharField(max_length=25)
objeto = models.CharField(max_length=25)
livemode = models.BooleanField(default=True)
created_at = models.CharField(max_length=25)
event_type = models.CharField(max_length=25)
data = models.TextField()
def __unicode__(self):
return self.id_evento
| [
"fabian@newemage.com"
] | fabian@newemage.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.