blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
402356583e661662b2d31cbd9a4377957378f7e5 | 64bc4aa71333c5848a5da60047eb101d1e2b2699 | /trydjango19/posts/forms.py | 58de489c69f22b7b69e0c0d2dec8e9639859539d | [] | no_license | Kpsmile/Recruitment-Site | a28f8c9b582428717aabfcfe112946a513f2f16b | 7266d8b95d8d2849fd0500b139631515357c2a0f | refs/heads/master | 2020-05-20T14:37:42.902372 | 2018-09-02T18:27:35 | 2018-09-02T18:27:35 | 84,483,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462 | py | from django import forms
from .models import Post
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = [
"lastname",
"firstname",
"skills",
"Experience",
"Designation",
"currentsalary",
"expectedsalary",
"Noticeperiod",
"Resume",
"coverletter",
"image",
"profile_status"
] | [
"rkamakhya@gmail.com"
] | rkamakhya@gmail.com |
2dd10e9b1e87ed1142ff9a4a08cfcf45ee0fcfec | 0c942c7d35e26d8748ed9ae27bf973d6f81844fb | /import csv.py | 58a9265a78a750a30cee669c17baa9982beaf285 | [] | no_license | mjncardenas/Election-Analysis | 280b338e09ab2183349609537087a390a6cc4ff2 | a4bf2f19f48b17fc20fd3f250b95285edd3c8078 | refs/heads/master | 2021-04-24T11:48:23.901275 | 2020-04-01T01:54:12 | 2020-04-01T01:54:12 | 250,114,487 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,970 | py | import csv
import os
# Assign a variable to load a file from a path.
file_to_load = os.path.join("Resources/election_results.csv")
# Assign a variable to save the file to a path.
file_to_save = os.path.join("analysis", "election_analysis.txt")
#1. Initializing a total vote counter
total_votes = 0
#candidate option
candidate_options = []
candidate_votes = {}
winning_candidate = ""
winning_count = 0
winning_percentage = 0
# Open the election results and read the file.
with open(file_to_load) as election_data:
file_reader = csv.reader(election_data)
# Read the header row.
headers = next(file_reader)
# Print each row in the CSV file.
for row in file_reader:
# Add to the total vote count.
total_votes += 1
# Print the candidate name from each row.
candidate_name = row[2]
if candidate_name not in candidate_options:
# Add the candidate name to the candidate list.
candidate_options.append(candidate_name)
# 2. Begin tracking that candidate's vote count.
candidate_votes[candidate_name] = 0
# Add a vote to that candidate's count.
candidate_votes[candidate_name] += 1
# Determine the percentage of votes for each candidate by looping through the counts.
# 1. Iterate through the candidate list.
for candidate in candidate_votes:
# 2. Retrieve vote count of a candidate.
votes = candidate_votes[candidate]
# 3. Calculate the percentage of votes.
vote_percentage = float(votes) / float(total_votes) * 100
# Print each candidate, their voter count, and percentage to the
# terminal.
print(f"{candidate}: {vote_percentage:.1f}% ({votes:,})\n")
if (votes > winning_count) and (vote_percentage > winning_percentage):
winning_count = votes
winning_candidate = candidate
winning_percentage = vote_percentage
| [
"michelle@michelles-air.home"
] | michelle@michelles-air.home |
15f9959cd15e212381be7bdcaef5c62408654411 | df47f9d5b9cf014d63f7a0041bd2ef8f1d842e2b | /intermediate_programs/day25-USStatesGame-ReadingCSV/reading_csv_data.py | 0ee3c07798475adc3ed37babca0e8161585e3182 | [] | no_license | elvin-rivera23/100DaysOfCodePython | e7b57b9d49da14e1e7fced6cdfa7b35305dbc7a4 | 52aa402621f5982679e19236fd9185c06d9c4dcb | refs/heads/main | 2023-07-19T03:09:55.586666 | 2021-09-06T00:42:15 | 2021-09-06T00:42:15 | 399,601,225 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,473 | py | # # in the same directory, so use relative data path
# with open("./weather_data.csv") as data_file:
# data = data_file.readlines() # .readlines() takes each line in the file and turn it into item in a list
# print(data)
# # built in python library
# import csv
#
# with open("./weather_data.csv") as data_file:
# data = csv.reader(data_file) # reads file and outputs data
# print(data)
# temperatures = []
# for temp in data:
# print(row)
# Expected output:
# ['day', 'temp', 'condition']
# ['Monday', '12', 'Sunny']
# ['Tuesday', '14', 'Rain']
# ['Wednesday', '15', 'Rain']
# ['Thursday', '14', 'Cloudy']
# ['Friday', '21', 'Sunny']
# ['Saturday', '22', 'Sunny']
# ['Sunday', '24', 'Sunny']
# for temperature in data:
# # exclude column title
# if temperature[1] != "temp":
# temperatures.append(int(temperature[1])) # turn the values into int instead of str
#
# print(temperatures)
# ----- REFERENCES: https://pandas.pydata.org -----
# import pandas
#
# data = pandas.read_csv("weather_data.csv")
# print(data)
# # EXPECTED OUTPUT:
# day temp condition
# 0 Monday 12 Sunny
# 1 Tuesday 14 Rain
# 2 Wednesday 15 Rain
# 3 Thursday 14 Cloudy
# 4 Friday 21 Sunny
# 5 Saturday 22 Sunny
# 6 Sunday 24 Sunny
# print(data["temp"])
# # EXPECTED OUTPUT:
# 0 12
# 1 14
# 2 15
# 3 14
# 4 21
# 5 22
# 6 24
# Name: temp, dtype: int64
# ----- DataFrames & Series: working with Rows and Columns -----
import pandas
data = pandas.read_csv("weather_data.csv")
# print(type(data)) # output: <class 'pandas.core.frame.DataFrame'>
# print(type(data["temp"])) # output: <class 'pandas.core.series.Series'>
# notes: .Series equivalent to single column list
# data_dict = data.to_dict() # creates data dictionary, each column has its own dictionary
# print(data_dict)
#
# temp_list = data["temp"].to_list()
# print(temp_list) # [12, 14, 15, 14, 21, 22, 24]
#
# total_temp = 0
# for temp in temp_list:
# total_temp += temp
#
# average = int(total_temp/len(temp_list))
# print(average)
#
# # # instructor way
# # average = sum(temp_list) / len(temp_list)
# # print(int(average))
#
#
# # built in function for pandas
# print(data["temp"].mean())
#
# # to get the maximum value
# # note: data = pandas.read_csv("weather_data.csv")
# print(data["temp"].max()) # max is 24
# # Get data in Columns
# print(data["condition"])
# data.condition # pandas converts each series into attributes
# # Get data in Row: table[column_search_through]
# # data.day = data["day"]
# print(data[data.day == "Monday"])
#
# # Challenge: print day that had highest temp in the week
# print(data[data.temp == data.temp.max()]) # 6 Sunday 24 Sunny
monday = data[data.day == "Monday"]
print(monday.condition) # 0 Sunny
# convert from celsius to farenheit
monday_temp = int(monday.temp)
monday_temp_F = monday_temp * 9/5 + 32
print(monday_temp_F)
# Create a dataframe from scratch
data_dict = {
"students": ["Elvin", "Diana", "Jonny"],
"scores": [76, 56, 65]
}
data = pandas.DataFrame(data_dict)
print(data)
# students scores
# 0 Elvin 76
# 1 Diana 56
# 2 Jonny 65
# this will create csv in current directory
data.to_csv("new_data.csv")
| [
"elvinkrivera@gmail.com"
] | elvinkrivera@gmail.com |
9d14d6702c380b23bdbc1f209bb5f8a3e6a6beb7 | 46bab53f41324fa880626d80c7a175e11ec30f5b | /sinar/representatives/setuphandlers.py | f322b7a6cba088432f71f03cc810a8c9149343b1 | [] | no_license | joemariedimzon/sinar.representatives | 8d21b5447b65f55fbde809c74dc74be6bc0bfdf7 | 11d63647a1d82c739a6d4312363392f8a6ca79ed | refs/heads/master | 2021-01-18T05:00:12.128279 | 2015-07-07T07:51:19 | 2015-07-07T07:51:19 | 38,667,596 | 0 | 0 | null | 2015-07-07T06:07:04 | 2015-07-07T06:07:03 | null | UTF-8 | Python | false | false | 384 | py | from collective.grok import gs
from sinar.representatives import MessageFactory as _
@gs.importstep(
name=u'sinar.representatives',
title=_('sinar.representatives import handler'),
description=_(''))
def setupVarious(context):
if context.readDataFile('sinar.representatives.marker.txt') is None:
return
portal = context.getSite()
# do anything here
| [
"khairil.yusof@gmail.com"
] | khairil.yusof@gmail.com |
041918d613aacc9314e933b4e996bb8b4af16f3c | e1fe8da6e18428401aed898f792e3193848ba094 | /config_babymodel_1.1_lr.py | 732f215a0ec9b5dc07f2f35e5b05963c08eb72ee | [] | no_license | li3cmz/SRGT | 0e1fe4531a7f01e0b04ac71ae8ed0fba41bd2c04 | 7b03f81487e949237988f420c65bef4e7d2cce09 | refs/heads/master | 2020-12-03T16:53:44.748993 | 2020-09-07T12:59:15 | 2020-09-07T12:59:15 | 231,397,670 | 1 | 0 | null | 2020-01-03T14:01:58 | 2020-01-02T14:26:18 | Python | UTF-8 | Python | false | false | 24,209 | py | """Config
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=invalid-name
import copy
import texar as tx
import tensorflow as tf
initial_lr = 5e-4
max_nepochs = 20 # Total number of training epochs
# (including pre-train and full-train)
pretrain_nepochs = 10 # Number of pre-train epochs (training as autoencoder) ###modify
display = 500 # Display the training results every N training steps.
display_eval = 1e10 # Display the dev results every N training steps (set to a
# very large value to disable it).
restore = './out_1019_baby_model_1.1/checkpoints/ckpt-4'#'./out_0919_main_v24-005-002/checkpoints/ckpt-5' ###modify # Model snapshot to restore from
model_name = 'EvolveGTAE'
lambda_adj_final = 1 # Weight of the adj_final loss
lambda_rephraser = 1
lambda_adj_cft = 1
max_sequence_length_y = 128 # Maximum number of tokens in a sentence ###check
max_sequence_length_ctx = 128
max_decode_yy = 128 ###maybe can't be so big
batch_size=8
max_utterance_cnt=2
vocab_size = 30522
bos_token_id = 101
eos_token_id = 102
pad_id = 0
distributed = False
vocab_file = './bert_pretrained_models/bert_pretrained_models/uncased_L-12_H-768_A-12/vocab.txt'
do_lower_case = True
## Data configs
feature_original_types = {
# Reading features from TFRecord data file.
# E.g., Reading feature "text_ids" as dtype `tf.int64`;
# "FixedLenFeature" indicates its length is fixed for all data instances;
# and the sequence length is limited by `max_seq_length`.
"input_ids_x1x2ysx1xx2": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"input_mask_x1x2ysx1xx2": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"segment_ids_x1x2ysx1xx2": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"input_ids_x1x2ysx1xx2yy": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"input_mask_x1x2ysx1xx2yy": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"segment_ids_x1x2ysx1xx2yy": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"input_ids_x1x2": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"input_mask_x1x2": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"segment_ids_x1x2": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"input_ids_x1xx2": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"input_mask_x1xx2": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"segment_ids_x1xx2": ["tf.int64", "FixedLenFeature", max_sequence_length_ctx],
"input_ids_y1": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_mask_y1": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"segment_ids_y1": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_ids_y2": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_mask_y2": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"segment_ids_y2": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_ids_y3": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_mask_y3": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"segment_ids_y3": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_ids_yy1": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_mask_yy1": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"segment_ids_yy1": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_ids_yy2": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_mask_yy2": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"segment_ids_yy2": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_ids_yy3": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"input_mask_yy3": ["tf.int64", "FixedLenFeature", max_sequence_length_y],
"segment_ids_yy3": ["tf.int64", "FixedLenFeature", max_sequence_length_y]
}
feature_convert_types = {
# Converting feature dtype after reading. E.g.,
# Converting the dtype of feature "text_ids" from `tf.int64` (as above)
# to `tf.int32`
"input_ids_x1x2ysx1xx2": "tf.int32",
"input_mask_x1x2ysx1xx2": "tf.int32",
"segment_ids_x1x2ysx1xx2": "tf.int32",
"input_ids_x1x2ysx1xx2yy": "tf.int32",
"input_mask_x1x2ysx1xx2yy": "tf.int32",
"segment_ids_x1x2ysx1xx2yy": "tf.int32",
"input_ids_x1x2": "tf.int32",
"input_mask_x1x2": "tf.int32",
"segment_ids_x1x2": "tf.int32",
"input_ids_x1xx2": "tf.int32",
"input_mask_x1xx2": "tf.int32",
"segment_ids_x1xx2": "tf.int32",
"input_ids_y1": "tf.int32",
"input_mask_y1": "tf.int32",
"segment_ids_y1": "tf.int32",
"input_ids_y2": "tf.int32",
"input_mask_y2": "tf.int32",
"segment_ids_y2": "tf.int32",
"input_ids_y3": "tf.int32",
"input_mask_y3": "tf.int32",
"segment_ids_y3": "tf.int32",
"input_ids_yy1": "tf.int32",
"input_mask_yy1": "tf.int32",
"segment_ids_yy1": "tf.int32",
"input_ids_yy2": "tf.int32",
"input_mask_yy2": "tf.int32",
"segment_ids_yy2": "tf.int32",
"input_ids_yy3": "tf.int32",
"input_mask_yy3": "tf.int32",
"segment_ids_yy3": "tf.int32"
}
mini="" ###modify #"/mini" or ""
text_data_dir = "./data/TimeTravel/bert3{}".format(mini) ###modify bert or bert2
adj_data_dir = "./data/TimeTravel{}".format(mini)
train_data = {
'batch_size': batch_size,
#'seed': 123,
'datasets': [
{
"files": "{}/train_supervised_large.tf_record".format(text_data_dir),
"data_name": "text",
'data_type': 'tf_record',
"feature_original_types": feature_original_types,
"feature_convert_types": feature_convert_types,
},
{
'files': '{}/TimeTravel.train_supervised_large_y1_adjs_dirt.tfrecords'.format(adj_data_dir), ###modify
'data_type': 'tf_record',
'data_name': 'y1_d',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_y2_adjs_dirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'y2_d',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_y3_adjs_dirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'y3_d',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_yy1_adjs_dirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'yy1_d',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_yy2_adjs_dirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'yy2_d',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_yy3_adjs_dirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'yy3_d',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_y1_adjs_undirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'y1_und',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_y2_adjs_undirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'y2_und',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_y3_adjs_undirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'y3_und',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_yy1_adjs_undirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'yy1_und',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_yy2_adjs_undirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'yy2_und',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
},
{
'files': '{}/TimeTravel.train_supervised_large_yy3_adjs_undirt.tfrecords'.format(adj_data_dir),
'data_type': 'tf_record',
'data_name': 'yy3_und',
'numpy_options': {
'numpy_ndarray_name': 'adjs',
'shape': [max_sequence_length_y + 2, max_sequence_length_y + 2],
'dtype': 'tf.int32'
},
'feature_original_types':{
'adjs':['tf.string', 'FixedLenFeature']
}
}
],
'name': 'train',
'shuffle':True,
"shuffle_buffer_size": 1000
}
val_data = copy.deepcopy(train_data)
val_data['datasets'][0]['files'] = '{}/dev_data.tf_record'.format(text_data_dir)
val_data['datasets'][1]['files'] = '{}/TimeTravel.dev_data_y1_adjs_dirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][2]['files'] = '{}/TimeTravel.dev_data_y2_adjs_dirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][3]['files'] = '{}/TimeTravel.dev_data_y3_adjs_dirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][4]['files'] = '{}/TimeTravel.dev_data_yy1_adjs_dirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][5]['files'] = '{}/TimeTravel.dev_data_yy2_adjs_dirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][6]['files'] = '{}/TimeTravel.dev_data_yy3_adjs_dirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][7]['files'] = '{}/TimeTravel.dev_data_y1_adjs_undirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][8]['files'] = '{}/TimeTravel.dev_data_y2_adjs_undirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][9]['files'] = '{}/TimeTravel.dev_data_y3_adjs_undirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][10]['files'] = '{}/TimeTravel.dev_data_yy1_adjs_undirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][11]['files'] = '{}/TimeTravel.dev_data_yy2_adjs_undirt.tfrecords'.format(adj_data_dir)
val_data['datasets'][12]['files'] = '{}/TimeTravel.dev_data_yy3_adjs_undirt.tfrecords'.format(adj_data_dir)
val_data['shuffle'] = False
test_data = copy.deepcopy(train_data)
test_data['datasets'][0]['files'] = '{}/test_data.tf_record'.format(text_data_dir)
test_data['datasets'][1]['files'] = '{}/TimeTravel.test_data_y1_adjs_dirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][2]['files'] = '{}/TimeTravel.test_data_y2_adjs_dirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][3]['files'] = '{}/TimeTravel.test_data_y3_adjs_dirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][4]['files'] = '{}/TimeTravel.test_data_yy1_adjs_dirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][5]['files'] = '{}/TimeTravel.test_data_yy2_adjs_dirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][6]['files'] = '{}/TimeTravel.test_data_yy3_adjs_dirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][7]['files'] = '{}/TimeTravel.test_data_y1_adjs_undirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][8]['files'] = '{}/TimeTravel.test_data_y2_adjs_undirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][9]['files'] = '{}/TimeTravel.test_data_y3_adjs_undirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][10]['files'] = '{}/TimeTravel.test_data_yy1_adjs_undirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][11]['files'] = '{}/TimeTravel.test_data_yy2_adjs_undirt.tfrecords'.format(adj_data_dir)
test_data['datasets'][12]['files'] = '{}/TimeTravel.test_data_yy3_adjs_undirt.tfrecords'.format(adj_data_dir)
test_data['shuffle'] = False
dim_hidden = 768
dim_hidden_mini = 512
model = {
'gpt2_hidden_dim':dim_hidden,
'dim_c_big': dim_hidden,
'dim_c': dim_hidden,
'wordEmbedder':{
'dim':dim_hidden
},
'gpt2_posEmbedder':{
'dim':dim_hidden
},
'bert_encoder':{
# default params are enough
},
'bidirectionalRNNEncoder':{
"rnn_cell_fw": {
'type': 'GRUCell',
'kwargs': {
'num_units': dim_hidden/2, ###check Q
},
},
"rnn_cell_share_config": True,
"output_layer_fw": {
"num_layers": 0,
"layer_size": 128,
"activation": "identity",
"final_layer_activation": None,
"other_dense_kwargs": None,
"dropout_layer_ids": [],
"dropout_rate": 0.5,
"variational_dropout": False
},
"output_layer_bw": {
# Same hyperparams and default values as "output_layer_fw"
# ...
},
"output_layer_share_config": True,
"name": "bidirectional_rnn_encoder"
},
'unidirectionalRNNEncoder':{
"rnn_cell": {
'type': 'GRUCell',
'kwargs': {
'num_units': dim_hidden, ###check Q ###debug
},
},
"output_layer": {
"num_layers": 0,
"layer_size": 128,
"activation": "identity",
"final_layer_activation": None,
"other_dense_kwargs": None,
"dropout_layer_ids": [],
"dropout_rate": 0.5,
"variational_dropout": False
},
"name": "unidirectional_rnn_encoder"
},
'EmbeddingNormalize':{
"name": "EmbeddingNormalize"
},
'EmbeddingNormalizeNN1':{
"name": "EmbeddingNormalizeNN",
"size":dim_hidden,
"epsilon": 1e-3,
"decay":0.99,
"name_scope":'nnBN1'
},
'EmbeddingNormalizeNN2':{
"name": "EmbeddingNormalizeNN",
"size":dim_hidden,
"epsilon": 1e-3,
"decay":0.99,
"name_scope":'nnBN2'
},
'encoder': {
'num_blocks': 2,
'dim': dim_hidden,
'use_bert_config': False,
'embedding_dropout': 0.1,
'residual_dropout': 0.1,
'graph_multihead_attention': {
'name': 'multihead_attention',
'num_units': dim_hidden,
'num_heads': 8,
'dropout_rate': 0.1,
'output_dim': dim_hidden,
'use_bias': False,
},
'initializer': None,
'poswise_feedforward': {
"layers": [
{
"type": "Dense",
"kwargs": {
"name": "conv1",
"units": dim_hidden*4, ###debug
"activation": "relu",
"use_bias": True,
}
},
{
"type": "Dropout",
"kwargs": {
"rate": 0.1,
}
},
{
"type": "Dense",
"kwargs": {
"name": "conv2",
"units": dim_hidden,
"use_bias": True,
}
}
],
"name": "ffn"
},
'name': 'graph_transformer_encoder',
},
'cross_graph_encoder': {
'num_blocks': 2,
'dim': dim_hidden,
'use_bert_config': False,
'use_adj':False,
'embedding_dropout': 0.1,
'residual_dropout': 0.1,
'graph_multihead_attention': {
'name': 'multihead_attention',
'num_units': dim_hidden,
'num_heads': 8,
'dropout_rate': 0.1,
'output_dim': dim_hidden,
'use_bias': False,
},
'initializer': None,
'poswise_feedforward': {
"layers": [
{
"type": "Dense",
"kwargs": {
"name": "conv1",
"units": dim_hidden*4, ###debug
"activation": "relu",
"use_bias": True,
}
},
{
"type": "Dropout",
"kwargs": {
"rate": 0.1,
}
},
{
"type": "Dense",
"kwargs": {
"name": "conv2",
"units": dim_hidden,
"use_bias": True,
}
}
],
"name": "ffn"
},
'name': 'cross_graph_encoder',
},
'adjMultiheadAttention_encoder':{
'initializer': None,
'num_heads': 8,
'output_dim': 1,
'num_units': 512,
'dropout_rate': 0.1,
'use_bias': True,
"name": "adjMultiheadAttention_encoder",
},
'transformer_encoder':{
'num_blocks': 2,
'dim': dim_hidden, ###debug
'use_bert_config': False,
'embedding_dropout': 0.1,
'residual_dropout': 0.1,
'multihead_attention':{
'name': 'multihead_attention',
'num_units': dim_hidden,
'output_dim': dim_hidden,
'num_heads': 8,
'dropout_rate': 0.1,
'output_dim': dim_hidden,
'use_bias': False,
},
'initializer': None,
'poswise_feedforward': {
"layers": [
{
"type": "Dense",
"kwargs": {
"name": "conv1",
"units": dim_hidden*4,
"activation": "relu",
"use_bias": True,
}
},
{
"type": "Dropout",
"kwargs": {
"rate": 0.1,
}
},
{
"type": "Dense",
"kwargs": {
"name": "conv2",
"units": dim_hidden,
"use_bias": True,
}
}
],
"name": "ffn"
},
'name': 'transformer_encoder',
},
'transformer_decoderToEncoder':{
'dim': dim_hidden,
'num_blocks': 12,
'multihead_attention': {
'use_bias': True,
'num_units': dim_hidden,
'num_heads': 12,
'output_dim': dim_hidden,
},
'initializer': {
'type': 'variance_scaling_initializer',
'kwargs': {
'scale': 1.0,
'mode': 'fan_avg',
'distribution': 'uniform',
},
},
'poswise_feedforward': {
"layers": [
{
"type": "Dense",
"kwargs": {
"name": "conv1",
"units": dim_hidden*4,
"activation": "relu",
"use_bias": True,
}
},
{
"type": "Dropout",
"kwargs": {
"rate": 0.1,
}
},
{
"type": "Dense",
"kwargs": {
"name": "conv2",
"units": dim_hidden,
"use_bias": True,
}
}
],
"name": "ffn"
}
},
'pooling_aggregator':{
"output_dim": dim_hidden,
"input_dim":dim_hidden,
"concat": True,
"pooling":'meanpooling',
"dropout_rate":0.0,
"l2_reg":0.1,
"use_bias":True,
"activation":tf.nn.relu,
"seed":1024,
"update_weights":False,
"name": "pooling_aggregator",
},
'rephrase_encoder': {
'rnn_cell': {
'type': 'GRUCell',
'kwargs': {
'num_units': dim_hidden
},
'dropout': {
'input_keep_prob': 0.5
}
}
},
'rephrase_decoder': {
'rnn_cell': {
'type': 'GRUCell',
'kwargs': {
'num_units': dim_hidden,
},
'dropout': {
'input_keep_prob': 0.5,
'output_keep_prob': 0.5
},
},
'attention': {
'type': 'DynamicBahdanauAttention',
'kwargs': {
'num_units': dim_hidden,
},
'attention_layer_size': dim_hidden,
},
'max_decoding_length_train': max_decode_yy,
'max_decoding_length_infer': max_decode_yy,
},
'opt': {
'optimizer': {
'type': 'AdamOptimizer',
'kwargs': {
'learning_rate': 5e-4,
},
},
},
}
| [
"15992507766yz"
] | 15992507766yz |
a52afad79d275173735bfbc72a33cf1ba2a7a17e | a217801fdf840d97785f06a1e2381d6ed62d7852 | /volume/drivers/netapp/dataontap/nfs_base.py | 1e7c08ae0f1bae744bd926cd2f9e9962e8f06264 | [] | no_license | TonyChengTW/Cinder_Extend | fb05cdda9d925d1c8344595a19472125959e4830 | 5e20383660cf5c0340aa8fa3cf387bb8b59efc4b | refs/heads/master | 2020-06-18T09:54:06.834743 | 2016-11-30T03:01:16 | 2016-11-30T03:01:16 | 75,145,443 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,631 | py | # Copyright (c) 2012 NetApp, Inc. All rights reserved.
# Copyright (c) 2014 Ben Swartzlander. All rights reserved.
# Copyright (c) 2014 Navneet Singh. All rights reserved.
# Copyright (c) 2014 Clinton Knight. All rights reserved.
# Copyright (c) 2014 Alex Meade. All rights reserved.
# Copyright (c) 2014 Bob Callaway. All rights reserved.
# Copyright (c) 2015 Tom Barron. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume driver for NetApp NFS storage.
"""
import math
import os
import re
import shutil
import threading
import time
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
import six.moves.urllib.parse as urlparse
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder.image import image_utils
from cinder import utils
from cinder.volume.drivers.netapp import options as na_opts
from cinder.volume.drivers.netapp import utils as na_utils
from cinder.volume.drivers import nfs
LOG = logging.getLogger(__name__)
class NetAppNfsDriver(nfs.NfsDriver):
"""Base class for NetApp NFS driver for Data ONTAP."""
# do not increment this as it may be used in volume type definitions
VERSION = "1.0.0"
REQUIRED_FLAGS = ['netapp_login', 'netapp_password',
'netapp_server_hostname']
def __init__(self, *args, **kwargs):
na_utils.validate_instantiation(**kwargs)
self._execute = None
self._context = None
self._app_version = kwargs.pop("app_version", "unknown")
super(NetAppNfsDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(na_opts.netapp_connection_opts)
self.configuration.append_config_values(na_opts.netapp_basicauth_opts)
self.configuration.append_config_values(na_opts.netapp_transport_opts)
self.configuration.append_config_values(na_opts.netapp_img_cache_opts)
self.configuration.append_config_values(na_opts.netapp_nfs_extra_opts)
def set_execute(self, execute):
self._execute = execute
def do_setup(self, context):
super(NetAppNfsDriver, self).do_setup(context)
self._context = context
na_utils.check_flags(self.REQUIRED_FLAGS, self.configuration)
self.zapi_client = None
def check_for_setup_error(self):
"""Returns an error if prerequisites aren't met."""
super(NetAppNfsDriver, self).check_for_setup_error()
def get_pool(self, volume):
"""Return pool name where volume resides.
:param volume: The volume hosted by the driver.
:return: Name of the pool where given volume is hosted.
"""
return volume['provider_location']
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
vol_size = volume.size
snap_size = snapshot.volume_size
self._clone_volume(snapshot.name, volume.name, snapshot.volume_id)
share = self._get_volume_location(snapshot.volume_id)
volume['provider_location'] = share
path = self.local_path(volume)
run_as_root = self._execute_as_root
if self._discover_file_till_timeout(path):
self._set_rw_permissions(path)
if vol_size != snap_size:
try:
self.extend_volume(volume, vol_size)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(
_LE("Resizing %s failed. Cleaning volume."),
volume.name)
self._execute('rm', path, run_as_root=run_as_root)
else:
raise exception.CinderException(
_("NFS file %s not discovered.") % volume['name'])
return {'provider_location': volume['provider_location']}
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
self._clone_volume(snapshot['volume_name'],
snapshot['name'],
snapshot['volume_id'])
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
nfs_mount = self._get_provider_location(snapshot.volume_id)
if self._volume_not_present(nfs_mount, snapshot.name):
return True
self._execute('rm', self._get_volume_path(nfs_mount, snapshot.name),
run_as_root=self._execute_as_root)
def _get_volume_location(self, volume_id):
"""Returns NFS mount address as <nfs_ip_address>:<nfs_mount_dir>."""
nfs_server_ip = self._get_host_ip(volume_id)
export_path = self._get_export_path(volume_id)
return nfs_server_ip + ':' + export_path
def _clone_volume(self, volume_name, clone_name, volume_id, share=None):
"""Clones mounted volume using NetApp API."""
raise NotImplementedError()
def _get_provider_location(self, volume_id):
"""Returns provider location for given volume."""
volume = self.db.volume_get(self._context, volume_id)
return volume.provider_location
def _get_host_ip(self, volume_id):
"""Returns IP address for the given volume."""
return self._get_provider_location(volume_id).rsplit(':')[0]
def _get_export_path(self, volume_id):
"""Returns NFS export path for the given volume."""
return self._get_provider_location(volume_id).rsplit(':')[1]
def _volume_not_present(self, nfs_mount, volume_name):
"""Check if volume exists."""
try:
self._try_execute('ls', self._get_volume_path(nfs_mount,
volume_name))
except processutils.ProcessExecutionError:
# If the volume isn't present
return True
return False
def _try_execute(self, *command, **kwargs):
# NOTE(vish): Volume commands can partially fail due to timing, but
# running them a second time on failure will usually
# recover nicely.
tries = 0
while True:
try:
self._execute(*command, **kwargs)
return True
except processutils.ProcessExecutionError:
tries += 1
if tries >= self.configuration.num_shell_tries:
raise
LOG.exception(_LE("Recovering from a failed execute. "
"Try number %s"), tries)
time.sleep(tries ** 2)
def _get_volume_path(self, nfs_share, volume_name):
"""Get volume path (local fs path) for given volume name on given nfs
share.
@param nfs_share string, example 172.18.194.100:/var/nfs
@param volume_name string,
example volume-91ee65ec-c473-4391-8c09-162b00c68a8c
"""
return os.path.join(self._get_mount_point_for_share(nfs_share),
volume_name)
def create_cloned_volume(self, volume, src_vref):
"""Creates a clone of the specified volume."""
vol_size = volume.size
src_vol_size = src_vref.size
self._clone_volume(src_vref.name, volume.name, src_vref.id)
share = self._get_volume_location(src_vref.id)
volume['provider_location'] = share
path = self.local_path(volume)
if self._discover_file_till_timeout(path):
self._set_rw_permissions(path)
if vol_size != src_vol_size:
try:
self.extend_volume(volume, vol_size)
except Exception as e:
LOG.error(
_LE("Resizing %s failed. Cleaning volume."),
volume.name)
self._execute('rm', path,
run_as_root=self._execute_as_root)
raise e
else:
raise exception.CinderException(
_("NFS file %s not discovered.") % volume['name'])
return {'provider_location': volume['provider_location']}
def _update_volume_stats(self):
"""Retrieve stats info from volume group."""
raise NotImplementedError()
def copy_image_to_volume(self, context, volume, image_service, image_id):
"""Fetch the image from image_service and write it to the volume."""
super(NetAppNfsDriver, self).copy_image_to_volume(
context, volume, image_service, image_id)
LOG.info(_LI('Copied image to volume %s using regular download.'),
volume['name'])
self._register_image_in_cache(volume, image_id)
def _register_image_in_cache(self, volume, image_id):
"""Stores image in the cache."""
file_name = 'img-cache-%s' % image_id
LOG.info(_LI("Registering image in cache %s"), file_name)
try:
self._do_clone_rel_img_cache(
volume['name'], file_name,
volume['provider_location'], file_name)
except Exception as e:
LOG.warning(_LW('Exception while registering image %(image_id)s'
' in cache. Exception: %(exc)s')
% {'image_id': image_id, 'exc': e.__str__()})
def _find_image_in_cache(self, image_id):
"""Finds image in cache and returns list of shares with file name."""
result = []
if getattr(self, '_mounted_shares', None):
for share in self._mounted_shares:
dir = self._get_mount_point_for_share(share)
file_name = 'img-cache-%s' % image_id
file_path = '%s/%s' % (dir, file_name)
if os.path.exists(file_path):
LOG.debug('Found cache file for image %(image_id)s'
' on share %(share)s'
% {'image_id': image_id, 'share': share})
result.append((share, file_name))
return result
def _do_clone_rel_img_cache(self, src, dst, share, cache_file):
"""Do clone operation w.r.t image cache file."""
@utils.synchronized(cache_file, external=True)
def _do_clone():
dir = self._get_mount_point_for_share(share)
file_path = '%s/%s' % (dir, dst)
if not os.path.exists(file_path):
LOG.info(_LI('Cloning from cache to destination %s'), dst)
self._clone_volume(src, dst, volume_id=None, share=share)
_do_clone()
@utils.synchronized('clean_cache')
def _spawn_clean_cache_job(self):
"""Spawns a clean task if not running."""
if getattr(self, 'cleaning', None):
LOG.debug('Image cache cleaning in progress. Returning... ')
return
else:
# Set cleaning to True
self.cleaning = True
t = threading.Timer(0, self._clean_image_cache)
t.start()
def _clean_image_cache(self):
"""Clean the image cache files in cache of space crunch."""
try:
LOG.debug('Image cache cleaning in progress.')
thres_size_perc_start =\
self.configuration.thres_avl_size_perc_start
thres_size_perc_stop = \
self.configuration.thres_avl_size_perc_stop
for share in getattr(self, '_mounted_shares', []):
try:
total_size, total_avl = \
self._get_capacity_info(share)
avl_percent = int((total_avl / total_size) * 100)
if avl_percent <= thres_size_perc_start:
LOG.info(_LI('Cleaning cache for share %s.'), share)
eligible_files = self._find_old_cache_files(share)
threshold_size = int(
(thres_size_perc_stop * total_size) / 100)
bytes_to_free = int(threshold_size - total_avl)
LOG.debug('Files to be queued for deletion %s',
eligible_files)
self._delete_files_till_bytes_free(
eligible_files, share, bytes_to_free)
else:
continue
except Exception as e:
LOG.warning(_LW('Exception during cache cleaning'
' %(share)s. Message - %(ex)s')
% {'share': share, 'ex': e.__str__()})
continue
finally:
LOG.debug('Image cache cleaning done.')
self.cleaning = False
def _shortlist_del_eligible_files(self, share, old_files):
"""Prepares list of eligible files to be deleted from cache."""
raise NotImplementedError()
def _find_old_cache_files(self, share):
"""Finds the old files in cache."""
mount_fs = self._get_mount_point_for_share(share)
threshold_minutes = self.configuration.expiry_thres_minutes
cmd = ['find', mount_fs, '-maxdepth', '1', '-name',
'img-cache*', '-amin', '+%s' % threshold_minutes]
res, _err = self._execute(*cmd, run_as_root=self._execute_as_root)
if res:
old_file_paths = res.strip('\n').split('\n')
mount_fs_len = len(mount_fs)
old_files = [x[mount_fs_len + 1:] for x in old_file_paths]
eligible_files = self._shortlist_del_eligible_files(
share, old_files)
return eligible_files
return []
def _delete_files_till_bytes_free(self, file_list, share, bytes_to_free=0):
"""Delete files from disk till bytes are freed or list exhausted."""
LOG.debug('Bytes to free %s', bytes_to_free)
if file_list and bytes_to_free > 0:
sorted_files = sorted(file_list, key=lambda x: x[1], reverse=True)
mount_fs = self._get_mount_point_for_share(share)
for f in sorted_files:
if f:
file_path = '%s/%s' % (mount_fs, f[0])
LOG.debug('Delete file path %s', file_path)
@utils.synchronized(f[0], external=True)
def _do_delete():
if self._delete_file(file_path):
return True
return False
if _do_delete():
bytes_to_free -= int(f[1])
if bytes_to_free <= 0:
return
def _delete_file(self, path):
"""Delete file from disk and return result as boolean."""
try:
LOG.debug('Deleting file at path %s', path)
cmd = ['rm', '-f', path]
self._execute(*cmd, run_as_root=self._execute_as_root)
return True
except Exception as ex:
LOG.warning(_LW('Exception during deleting %s'), ex.__str__())
return False
def clone_image(self, context, volume,
image_location, image_meta,
image_service):
"""Create a volume efficiently from an existing image.
image_location is a string whose format depends on the
image service backend in use. The driver should use it
to determine whether cloning is possible.
Returns a dict of volume properties eg. provider_location,
boolean indicating whether cloning occurred.
"""
image_id = image_meta['id']
cloned = False
post_clone = False
try:
cache_result = self._find_image_in_cache(image_id)
if cache_result:
cloned = self._clone_from_cache(volume, image_id, cache_result)
else:
cloned = self._direct_nfs_clone(volume, image_location,
image_id)
if cloned:
post_clone = self._post_clone_image(volume)
except Exception as e:
msg = e.msg if getattr(e, 'msg', None) else e.__str__()
LOG.info(_LI('Image cloning unsuccessful for image'
' %(image_id)s. Message: %(msg)s')
% {'image_id': image_id, 'msg': msg})
vol_path = self.local_path(volume)
volume['provider_location'] = None
if os.path.exists(vol_path):
self._delete_file(vol_path)
finally:
cloned = cloned and post_clone
share = volume['provider_location'] if cloned else None
bootable = True if cloned else False
return {'provider_location': share, 'bootable': bootable}, cloned
def _clone_from_cache(self, volume, image_id, cache_result):
"""Clones a copy from image cache."""
cloned = False
LOG.info(_LI('Cloning image %s from cache'), image_id)
for res in cache_result:
# Repeat tries in other shares if failed in some
(share, file_name) = res
LOG.debug('Cache share: %s', share)
if (share and
self._is_share_vol_compatible(volume, share)):
try:
self._do_clone_rel_img_cache(
file_name, volume['name'], share, file_name)
cloned = True
volume['provider_location'] = share
break
except Exception:
LOG.warning(_LW('Unexpected exception during'
' image cloning in share %s'), share)
return cloned
def _direct_nfs_clone(self, volume, image_location, image_id):
"""Clone directly in nfs share."""
LOG.info(_LI('Checking image clone %s from glance share.'), image_id)
cloned = False
image_locations = self._construct_image_nfs_url(image_location)
run_as_root = self._execute_as_root
for loc in image_locations:
share = self._is_cloneable_share(loc)
if share and self._is_share_vol_compatible(volume, share):
LOG.debug('Share is cloneable %s', share)
volume['provider_location'] = share
(__, ___, img_file) = loc.rpartition('/')
dir_path = self._get_mount_point_for_share(share)
img_path = '%s/%s' % (dir_path, img_file)
img_info = image_utils.qemu_img_info(img_path,
run_as_root=run_as_root)
if img_info.file_format == 'raw':
LOG.debug('Image is raw %s', image_id)
self._clone_volume(
img_file, volume['name'],
volume_id=None, share=share)
cloned = True
break
else:
LOG.info(
_LI('Image will locally be converted to raw %s'),
image_id)
dst = '%s/%s' % (dir_path, volume['name'])
image_utils.convert_image(img_path, dst, 'raw',
run_as_root=run_as_root)
data = image_utils.qemu_img_info(dst,
run_as_root=run_as_root)
if data.file_format != "raw":
raise exception.InvalidResults(
_("Converted to raw, but"
" format is now %s") % data.file_format)
else:
cloned = True
self._register_image_in_cache(
volume, image_id)
break
return cloned
def _post_clone_image(self, volume):
"""Do operations post image cloning."""
LOG.info(_LI('Performing post clone for %s'), volume['name'])
vol_path = self.local_path(volume)
if self._discover_file_till_timeout(vol_path):
self._set_rw_permissions(vol_path)
self._resize_image_file(vol_path, volume['size'])
return True
raise exception.InvalidResults(
_("NFS file could not be discovered."))
def _resize_image_file(self, path, new_size):
"""Resize the image file on share to new size."""
LOG.debug('Checking file for resize')
if self._is_file_size_equal(path, new_size):
return
else:
LOG.info(_LI('Resizing file to %sG'), new_size)
image_utils.resize_image(path, new_size,
run_as_root=self._execute_as_root)
if self._is_file_size_equal(path, new_size):
return
else:
raise exception.InvalidResults(
_('Resizing image file failed.'))
def _is_file_size_equal(self, path, size):
"""Checks if file size at path is equal to size."""
data = image_utils.qemu_img_info(path,
run_as_root=self._execute_as_root)
virt_size = data.virtual_size / units.Gi
if virt_size == size:
return True
else:
return False
def _discover_file_till_timeout(self, path, timeout=45):
"""Checks if file size at path is equal to size."""
# Sometimes nfs takes time to discover file
# Retrying in case any unexpected situation occurs
retry_seconds = timeout
sleep_interval = 2
while True:
if os.path.exists(path):
return True
else:
if retry_seconds <= 0:
LOG.warning(_LW('Discover file retries exhausted.'))
return False
else:
time.sleep(sleep_interval)
retry_seconds -= sleep_interval
def _is_cloneable_share(self, image_location):
"""Finds if the image at location is cloneable."""
conn, dr = self._check_get_nfs_path_segs(image_location)
return self._check_share_in_use(conn, dr)
def _check_get_nfs_path_segs(self, image_location):
"""Checks if the nfs path format is matched.
WebNFS url format with relative-path is supported.
Accepting all characters in path-names and checking
against the mounted shares which will contain only
allowed path segments. Returns connection and dir details.
"""
conn, dr = None, None
if image_location:
nfs_loc_pattern = \
('^nfs://(([\w\-\.]+:{1}[\d]+|[\w\-\.]+)(/[^\/].*)'
'*(/[^\/\\\\]+)$)')
matched = re.match(nfs_loc_pattern, image_location, flags=0)
if not matched:
LOG.debug('Image location not in the'
' expected format %s', image_location)
else:
conn = matched.group(2)
dr = matched.group(3) or '/'
return conn, dr
def _share_match_for_ip(self, ip, shares):
"""Returns the share that is served by ip.
Multiple shares can have same dir path but
can be served using different ips. It finds the
share which is served by ip on same nfs server.
"""
raise NotImplementedError()
def _check_share_in_use(self, conn, dir):
"""Checks if share is cinder mounted and returns it."""
try:
if conn:
host = conn.split(':')[0]
ip = na_utils.resolve_hostname(host)
share_candidates = []
for sh in self._mounted_shares:
sh_exp = sh.split(':')[1]
if sh_exp == dir:
share_candidates.append(sh)
if share_candidates:
LOG.debug('Found possible share matches %s',
share_candidates)
return self._share_match_for_ip(ip, share_candidates)
except Exception:
LOG.warning(_LW("Unexpected exception while "
"short listing used share."))
return None
def _construct_image_nfs_url(self, image_location):
"""Construct direct url for nfs backend.
It creates direct url from image_location
which is a tuple with direct_url and locations.
Returns array of urls with nfs scheme if nfs store
else returns url. It needs to be verified
by backend before use.
"""
direct_url, locations = image_location
if not direct_url and not locations:
raise exception.NotFound(_('Image location not present.'))
urls = []
if not locations:
urls.append(direct_url)
else:
for location in locations:
url = location['url']
if not location['metadata']:
urls.append(url)
break
location_type = location['metadata'].get('type')
if not location_type or location_type.lower() != "nfs":
urls.append(url)
break
share_location = location['metadata'].get('share_location')
mountpoint = location['metadata'].get('mountpoint')
if not share_location or not mountpoint:
urls.append(url)
break
url_parse = urlparse.urlparse(url)
abs_path = os.path.join(url_parse.netloc, url_parse.path)
rel_path = os.path.relpath(abs_path, mountpoint)
direct_url = "%s/%s" % (share_location, rel_path)
urls.append(direct_url)
return urls
def extend_volume(self, volume, new_size):
"""Extend an existing volume to the new size."""
LOG.info(_LI('Extending volume %s.'), volume['name'])
path = self.local_path(volume)
self._resize_image_file(path, new_size)
def _is_share_vol_compatible(self, volume, share):
"""Checks if share is compatible with volume to host it."""
raise NotImplementedError()
def _check_share_can_hold_size(self, share, size):
"""Checks if volume can hold image with size."""
_tot_size, tot_available = self._get_capacity_info(
share)
if tot_available < size:
msg = _("Container size smaller than required file size.")
raise exception.VolumeDriverException(msg)
def _move_nfs_file(self, source_path, dest_path):
"""Moves source to destination."""
@utils.synchronized(dest_path, external=True)
def _move_file(src, dst):
if os.path.exists(dst):
LOG.warning(_LW("Destination %s already exists."), dst)
return False
self._execute('mv', src, dst, run_as_root=self._execute_as_root)
return True
try:
return _move_file(source_path, dest_path)
except Exception as e:
LOG.warning(_LW('Exception moving file %(src)s. Message - %(e)s')
% {'src': source_path, 'e': e})
return False
def _get_export_ip_path(self, volume_id=None, share=None):
"""Returns export ip and path.
One of volume id or share is used to return the values.
"""
if volume_id:
host_ip = self._get_host_ip(volume_id)
export_path = self._get_export_path(volume_id)
elif share:
host_ip = share.split(':')[0]
export_path = share.split(':')[1]
else:
raise exception.InvalidInput(
'A volume ID or share was not specified.')
return host_ip, export_path
def _get_share_capacity_info(self, nfs_share):
"""Returns the share capacity metrics needed by the scheduler."""
used_ratio = self.configuration.nfs_used_ratio
oversub_ratio = self.configuration.nfs_oversub_ratio
# The scheduler's capacity filter will reduce the amount of
# free space that we report to it by the reserved percentage.
reserved_ratio = 1 - used_ratio
reserved_percentage = round(100 * reserved_ratio)
total_size, total_available = self._get_capacity_info(nfs_share)
apparent_size = total_size * oversub_ratio
apparent_size_gb = na_utils.round_down(
apparent_size / units.Gi, '0.01')
apparent_free_size = total_available * oversub_ratio
apparent_free_gb = na_utils.round_down(
float(apparent_free_size) / units.Gi, '0.01')
capacity = dict()
capacity['reserved_percentage'] = reserved_percentage
capacity['total_capacity_gb'] = apparent_size_gb
capacity['free_capacity_gb'] = apparent_free_gb
return capacity
def _get_capacity_info(self, nfs_share):
"""Get total capacity and free capacity in bytes for an nfs share."""
export_path = nfs_share.rsplit(':', 1)[1]
return self.zapi_client.get_flexvol_capacity(export_path)
def _check_volume_type(self, volume, share, file_name):
"""Match volume type for share file."""
raise NotImplementedError()
def _convert_vol_ref_share_name_to_share_ip(self, vol_ref):
"""Converts the share point name to an IP address
The volume reference may have a DNS name portion in the share name.
Convert that to an IP address and then restore the entire path.
:param vol_ref: Driver-specific information used to identify a volume
:return: A volume reference where share is in IP format.
"""
# First strip out share and convert to IP format.
share_split = vol_ref.rsplit(':', 1)
vol_ref_share_ip = na_utils.resolve_hostname(share_split[0])
# Now place back into volume reference.
vol_ref_share = vol_ref_share_ip + ':' + share_split[1]
return vol_ref_share
def _get_share_mount_and_vol_from_vol_ref(self, vol_ref):
"""Get the NFS share, the NFS mount, and the volume from reference
Determine the NFS share point, the NFS mount point, and the volume
(with possible path) from the given volume reference. Raise exception
if unsuccessful.
:param vol_ref: Driver-specific information used to identify a volume
:return: NFS Share, NFS mount, volume path or raise error
"""
# Check that the reference is valid.
if 'source-name' not in vol_ref:
reason = _('Reference must contain source-name element.')
raise exception.ManageExistingInvalidReference(
existing_ref=vol_ref, reason=reason)
vol_ref_name = vol_ref['source-name']
self._ensure_shares_mounted()
# If a share was declared as '1.2.3.4:/a/b/c' in the nfs_shares_config
# file, but the admin tries to manage the file located at
# 'my.hostname.com:/a/b/c/d.vol', this might cause a lookup miss below
# when searching self._mounted_shares to see if we have an existing
# mount that would work to access the volume-to-be-managed (a string
# comparison is done instead of IP comparison).
vol_ref_share = self._convert_vol_ref_share_name_to_share_ip(
vol_ref_name)
for nfs_share in self._mounted_shares:
cfg_share = self._convert_vol_ref_share_name_to_share_ip(nfs_share)
(orig_share, work_share, file_path) = \
vol_ref_share.partition(cfg_share)
if work_share == cfg_share:
file_path = file_path[1:] # strip off leading path divider
LOG.debug("Found possible share %s; checking mount.",
work_share)
nfs_mount = self._get_mount_point_for_share(nfs_share)
vol_full_path = os.path.join(nfs_mount, file_path)
if os.path.isfile(vol_full_path):
LOG.debug("Found share %(share)s and vol %(path)s on "
"mount %(mnt)s",
{'share': nfs_share, 'path': file_path,
'mnt': nfs_mount})
return nfs_share, nfs_mount, file_path
else:
LOG.debug("vol_ref %(ref)s not on share %(share)s.",
{'ref': vol_ref_share, 'share': nfs_share})
raise exception.ManageExistingInvalidReference(
existing_ref=vol_ref,
reason=_('Volume not found on configured storage backend.'))
def manage_existing(self, volume, existing_vol_ref):
"""Manages an existing volume.
The specified Cinder volume is to be taken into Cinder management.
The driver will verify its existence and then rename it to the
new Cinder volume name. It is expected that the existing volume
reference is an NFS share point and some [/path]/volume;
e.g., 10.10.32.1:/openstack/vol_to_manage
or 10.10.32.1:/openstack/some_directory/vol_to_manage
:param volume: Cinder volume to manage
:param existing_vol_ref: Driver-specific information used to identify a
volume
"""
# Attempt to find NFS share, NFS mount, and volume path from vol_ref.
(nfs_share, nfs_mount, vol_path) = \
self._get_share_mount_and_vol_from_vol_ref(existing_vol_ref)
LOG.debug("Asked to manage NFS volume %(vol)s, with vol ref %(ref)s",
{'vol': volume['id'],
'ref': existing_vol_ref['source-name']})
self._check_volume_type(volume, nfs_share, vol_path)
if vol_path == volume['name']:
LOG.debug("New Cinder volume %s name matches reference name: "
"no need to rename.", volume['name'])
else:
src_vol = os.path.join(nfs_mount, vol_path)
dst_vol = os.path.join(nfs_mount, volume['name'])
try:
shutil.move(src_vol, dst_vol)
LOG.debug("Setting newly managed Cinder volume name to %s",
volume['name'])
self._set_rw_permissions_for_all(dst_vol)
except (OSError, IOError) as err:
exception_msg = (_("Failed to manage existing volume %(name)s,"
" because rename operation failed:"
" Error msg: %(msg)s."),
{'name': existing_vol_ref['source-name'],
'msg': err})
raise exception.VolumeBackendAPIException(data=exception_msg)
return {'provider_location': nfs_share}
def manage_existing_get_size(self, volume, existing_vol_ref):
"""Returns the size of volume to be managed by manage_existing.
When calculating the size, round up to the next GB.
:param volume: Cinder volume to manage
:param existing_vol_ref: Existing volume to take under management
"""
# Attempt to find NFS share, NFS mount, and volume path from vol_ref.
(nfs_share, nfs_mount, vol_path) = \
self._get_share_mount_and_vol_from_vol_ref(existing_vol_ref)
try:
LOG.debug("Asked to get size of NFS vol_ref %s.",
existing_vol_ref['source-name'])
file_path = os.path.join(nfs_mount, vol_path)
file_size = float(utils.get_file_size(file_path)) / units.Gi
vol_size = int(math.ceil(file_size))
except (OSError, ValueError):
exception_message = (_("Failed to manage existing volume "
"%(name)s, because of error in getting "
"volume size."),
{'name': existing_vol_ref['source-name']})
raise exception.VolumeBackendAPIException(data=exception_message)
LOG.debug("Reporting size of NFS volume ref %(ref)s as %(size)d GB.",
{'ref': existing_vol_ref['source-name'], 'size': vol_size})
return vol_size
def unmanage(self, volume):
"""Removes the specified volume from Cinder management.
Does not delete the underlying backend storage object. A log entry
will be made to notify the Admin that the volume is no longer being
managed.
:param volume: Cinder volume to unmanage
"""
CONF = cfg.CONF
vol_str = CONF.volume_name_template % volume['id']
vol_path = os.path.join(volume['provider_location'], vol_str)
LOG.info(_LI("Cinder NFS volume with current path \"%(cr)s\" is "
"no longer being managed."), {'cr': vol_path})
| [
"tony.pig@gmail.com"
] | tony.pig@gmail.com |
06200d05ebb12a1a2b6a7c497f9b1f61524469f1 | 9e45f21ecf4dcd80a5a03b0f612de238528e93ed | /main.py | c93abc50acbcedaf2ce2a28c0676ffb0b14e9121 | [] | no_license | gartnera/vcloud-health-check | 64f85ee6957152c5feb8c6a0b197046881b2a05a | b358e9cb6f6b3957a70ecd0cfb1d8181e0f6ee2b | refs/heads/master | 2021-06-12T23:50:51.774304 | 2019-02-22T16:16:32 | 2019-02-22T16:17:23 | 172,096,859 | 0 | 0 | null | 2021-03-31T19:00:19 | 2019-02-22T16:18:01 | Python | UTF-8 | Python | false | false | 3,698 | py | import json
import socket
import time
import datetime
import ssl
from urllib.request import urlopen
from colorama import Fore, Style
from pyvcloud.vcd.client import BasicLoginCredentials
from pyvcloud.vcd.client import Client
from pyvcloud.vcd.org import Org
from pyvcloud.vcd.vdc import VDC
from pyvcloud.vcd.vapp import VApp
from pyvcloud.vcd.vm import VM
import paramiko
class IgnoreHostKeyPolicy:
def missing_host_key(self, client, hostname, key):
return True
socket.setdefaulttimeout(10)
# load config
with open('config.json', 'r') as f:
config = json.load(f)
client = Client(config['url'])
client.set_highest_supported_version()
client.set_credentials(BasicLoginCredentials(config['user'], config['org'], config['password']))
print("Fetching Org...")
org = Org(client, resource=client.get_org())
print("Fetching VDC...")
vdc = VDC(client, resource=org.get_vdc(config['vdc']))
print("Fetching vApp...")
vapp_resource = vdc.get_vapp(config['vapp'])
vapp = VApp(client, resource=vapp_resource)
print("Validating VMs...")
vms = vapp.get_all_vms()
names = map(lambda vm: vm.get('name'), vms)
names = list(names)
services = config['services']
for service in services:
name = service['vm']
index = names.index(name)
service['resource'] = vms[index]
def health_check_tcp(service):
s = socket.socket()
try:
s.connect((service['ip'], service['port']))
s.recv(100)
s.close()
return True
except:
s.close()
return False
def health_check_urlopen(service, proto):
ip = service['ip']
port = service['port']
url = service['url']
try:
res = urlopen(f'{proto}://{ip}:{port}{url}', context=ssl._create_unverified_context())
code = res.getcode()
if code == 200:
return True
else:
return False
except:
return False
def health_check_ssh(service):
ip = service['ip']
port = service['port']
username = service['username']
try:
client = paramiko.SSHClient()
client.set_missing_host_key_policy(IgnoreHostKeyPolicy)
client.connect(ip, username=username, password='', port=port)
client.close()
return True
except Exception as e:
print(e)
return False
def health_check(service):
check = service['check']
if check == 'tcp':
return health_check_tcp(service)
elif check == 'ssh':
return health_check_ssh(service)
elif check == 'http':
return health_check_urlopen(service, 'http')
elif check == 'https':
return health_check_urlopen(service, 'https')
else:
raise "Invalid check type: " + check
def reset_service(client, service):
task_monitor = client.get_task_monitor()
vm = VM(client, resource=service['resource'])
print("Powering off...")
resource = vm.power_off()
task_monitor.wait_for_success(resource)
vm.reload()
print("Restoring snapshot...")
resource = vm.snapshot_revert_to_current()
task_monitor.wait_for_success(resource)
vm.reload()
print("Powering on...")
resource = vm.power_on()
task_monitor.wait_for_success(resource)
vm.reload()
print("VM reset finished")
# begin health checks
while True:
now = str(datetime.datetime.now())
print(f'Running healthcheck at {now}')
for service in services:
name = service['name']
vm = service['vm']
if health_check(service):
print(f'[{Fore.GREEN}OK{Style.RESET_ALL}] {name} ({vm})')
else:
print(f'[{Fore.RED}FAIL{Style.RESET_ALL}] {name} ({vm})')
reset_service(client, service)
time.sleep(10 * 60)
| [
"alex@agartner.com"
] | alex@agartner.com |
7ff7b31639606384900999110b8c3d95f11d40c9 | 57380fbd73c21ecde654a50741d7844a5607ac36 | /test.py | c892f09a592e7bbf0be4db2e2e1203984934222b | [] | no_license | liewrichmond/pyCollect | c3c0131c26d13720d085541ea0a9f17e4ca372ab | 192e7849fcb86723e94e5229ce5403d8d0b4a250 | refs/heads/master | 2020-12-18T18:46:53.358256 | 2020-01-28T20:33:20 | 2020-01-28T20:33:20 | 235,488,047 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 564 | py | import base64
import os
from cryptography.fernet import Fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
pw = input("Enter Pw")
password = b"password"
salt = os.urandom(16)
kdf = PBKDF2HMAC(algorithm=hashes.SHA256, length=32, salt=salt, iterations=100000, backend=default_backend())
key = base64.urlsafe_b64encode(kdf.derive(password))
f = Fernet(key)
token = f.encrypt(b"Hello")
msg = f.decrypt(token)
print(msg.decode("utf-8") )
| [
"richmond.liew.97@gmail.com"
] | richmond.liew.97@gmail.com |
c9db0d7b57d079f4e6bbabea23d2f06346c0830c | ca8c5a631c6151890e9b7ee2535385df880ac3ca | /questao6v2.py | a102858e4d4127c6a3dbf651e3706b6c4fb3e0ec | [] | no_license | cadupsg/mcmc | 522c290f509084aa45028a651f3d32dbfb0d3772 | 1d8aa63d019ad1b22b0778a75f5a8926f6b41f2e | refs/heads/master | 2020-05-17T21:46:15.950699 | 2019-06-16T17:29:47 | 2019-06-16T17:29:47 | 183,981,285 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,108 | py | import numpy as np
import math
import random
from matplotlib import pyplot as plt
def gera_rand(valor_min, valor_max):
#gera valor aleatorio a partir de uma distribuicao uniforme entre os valores a e b (limites de integracao)
limite = valor_max - valor_min
num = random.uniform(0,1)
return valor_min + limite*num
def f_de_x(x, alfa):
# retorna a funcao que sera integrada
return (x**alfa)
def monte_carlo(n_amostras, alfa, a, b):
# calcula a media amostral e a utiliza para estimar o valor de g(alfa,a,b)
soma_amostras = 0
for i in range(n_amostras):
x = gera_rand(a, b)
soma_amostras += f_de_x(x, alfa)
return (b - a) * float(soma_amostras/n_amostras)
def integral_analitica(alfa, a , b):
alfa = float(alfa)
a = float(a)
b = float(b)
return (((b**(alfa+1))/(alfa+1))-((a**(alfa+1))/(alfa+1)))
def mostra_grafico(n):
# definicao de variaveis
estim_g1 = [[0 for i in range(n)] for ialfa in range(3)]
erro1 = [[0 for i in range(n)] for ialfa in range(3)]
estim_g2 = [[0 for i in range(n)] for ialfa in range(3)]
erro2 = [[0 for i in range(n)] for ialfa in range(3)]
estim_g3 = [[0 for i in range(n)] for ialfa in range(3)]
erro3 = [[0 for i in range(n)] for ialfa in range(3)]
# calcula os erros para cada alfa e b
for alfa in xrange(1,4):
g_analitica1 = integral_analitica(alfa, 0, 1)
g_analitica2 = integral_analitica(alfa, 0, 2)
g_analitica3 = integral_analitica(alfa, 0, 4)
for i in xrange(1, n+1):
estim_g1[alfa-1][i-1] = monte_carlo(i, alfa, 0, 1)
erro1[alfa-1][i-1] = (abs(estim_g1[alfa-1][i-1]-g_analitica1))/g_analitica1
estim_g2[alfa-1][i-1] = monte_carlo(i, alfa, 0, 2)
erro2[alfa-1][i-1] = (abs(estim_g2[alfa-1][i-1]-g_analitica2))/g_analitica2
estim_g3[alfa-1][i-1] = monte_carlo(i, alfa, 0, 4)
erro3[alfa-1][i-1] = (abs(estim_g3[alfa-1][i-1]-g_analitica3))/g_analitica3
# parte que cria os graficos
cor = ['red', 'blue', 'magenta', 'k']
eixoX = np.linspace(0,n,n)
for i in xrange(0,3):
# cria figura com 3 graficos
fig, axes = plt.subplots(nrows=1, ncols=3, figsize=(20, 20), dpi=80)
axes[0].plot(eixoX, erro1[i], ls = '-', lw = '1.5', c = cor[0])
axes[0].set_ylabel('Erro', color='k')
axes[0].set_title('b = 1', color='k')
axes[1].plot(eixoX, erro2[i], ls = '-', lw = '1.5', c = cor[1])
axes[1].set_ylabel('Erro', color='k')
axes[1].set_title('b = 2', color='k')
axes[2].plot(eixoX, erro3[i], ls = '-', lw = '1.5', c = cor[2])
axes[2].set_ylabel('Erro', color='k')
axes[2].set_title('b = 4', color='k')
# Colocando titulo do grafico
fig.suptitle('Erro x N - Alfa = ' + str(i+1) )
plt.show()
plt.close()
return
# calcula a media do estimador
# Ep = monte_carlo(1000000)
# print(Ep)
# cria o grafico
mostra_grafico(10000) | [
"noreply@github.com"
] | cadupsg.noreply@github.com |
2bf3f597e8025c8b8805d3462d370391acaf8535 | fd97689f062e6d90837ea27b9a5e3de87bcd1e92 | /Cliente/MET.py | 1037d266937331ca50ced2198eb1c3abeead74d4 | [] | no_license | Edresson/MET | 9f7b8a43bdea29ee844d0c98a20f0aef4afbcdd2 | 5945116d0d52fdf8f892a5f266bf6b51afb529eb | refs/heads/master | 2023-08-31T10:18:35.942324 | 2019-10-29T12:17:15 | 2019-10-29T12:17:15 | 93,848,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75,202 | py | # -*- coding: utf-8 -*-
import pygame
import sys
import os
#from qtpy import QtCore, QtGui
from PyQt5 import QtCore, QtGui, QtWidgets,QtTest
import time
from matplotlib.figure import Figure
#from qtpy import QtTest
from threading import Thread
#from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import (
FigureCanvasQTAgg as FigureCanvas,
NavigationToolbar2QT as NavigationToolbar)
import threading
import matplotlib.pyplot as plt
import math
import pickle
##### imports celula e motor####
#from Modulos import celula
from Modulos import clientMotor
Motor = clientMotor
from Modulos import clientCelula
celula = clientCelula
from Modulos import webcam
#from Modulos import celula
#
#from Modulos import celula
### PDF Imports ###
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.pagesizes import letter
from reportlab.lib.units import cm, mm, inch, pica
import os.path
from datetime import datetime
from reportlab.lib.utils import ImageReader
from io import BytesIO
from PIL import Image
from reportlab.pdfbase.pdfmetrics import stringWidth
webc = webcam.Webcam()
celping = celula.ping()
motping= Motor.ping()
log_file=open('MET_Logs.log', 'w')
if celping[0] == 0:
print("Aparentemente o Raspberry Pi não está connectado no Roteador, ou aconteceu algo de errado com o mesmo,Verifique se o mesmo está com o IP:",celping[1]," Se ele está connectado no mesmo roteador que o Notebook , ou ainda se a Porta UDP :",celping[2]," não está em uso por outro serviço nesta rede \n \n",file=log_file)
#nao está pingando
else:
print(" Ping Ok ! Raspberry Pi está configurado corretamente \n",file=log_file)
if motping[0] == 0:
print("Aparentemente o Raspberry Pi não está connectado no Roteador, ou aconteceu algo de errado com o mesmo,Verifique se o mesmo está com o IP:",motping[1]," Se ele está connectado no mesmo roteador que o Notebook , ou ainda se a Porta UDP :",motping[2]," não está em uso por outro serviço nesta rede\n \n",file=log_file)
#nao está pingando
else:
print(" Ping Ok ! Raspberry Pi está configurado corretamente \n"," Caso não seja altere no arquivo IP-Raspberry.txt ",file=log_file)
if motping[0] == 1 and celping[0] == 0 :
print(" Aparentemente o Problema está com a port UDP: ",celping[2]," Você pode ter aberto 2 instancias do software ao mesmo tempo , reinicie o Notebook, se persistir reiniciei também o RaspBerry Pi",file=log_file)
sys.exit()
elif motping[0] == 0 and celping[0] == 1 :
print(" Aparentemente o Problema está com a port UDP:",motping[2]," Caso não seja altere no arquivo IP-Raspberry.txt ",file=log_file)
sys.exit()
elif motping[0] == 0 and celping[0] == 0:
print(" Aparentemente o Problema está no Raspberry Pi, Verifique se o ip dele é mesmo:",motping[1],file=log_file)
sys.exit()
Motor.start_thread()
testes = []
contando = 0
fig = plt.figure(figsize=(9,9))
tipodeensaio = 0
FormatoCorpoProva = 0
AreaCorpoProva = 0
deslocamentos = []
forcas = []
flag = 0
flag2 =0
tempinicioteste = 0
qforca = None
maxforca = None
maxdeslocamento = None
VelocidadeEn = 0
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtWidgets.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtWidgets.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtWidgets.QApplication.translate(context, text, disambig)
class Ui_MainWindow():
def __init__(self):
self.result= QtWidgets.QMessageBox()
self.result.setText("Você deseja fazer mais um teste nesse lote?")
self.result.addButton(QtWidgets.QMessageBox.Yes)
self.result.addButton(QtWidgets.QMessageBox.No)
self.webcam_fim= QtWidgets.QMessageBox()
self.webcam_fim.setText("Você deseja tirar uma foto do objeto?")
self.webcam_fim.addButton(QtWidgets.QMessageBox.Yes)
self.webcam_fim.addButton(QtWidgets.QMessageBox.No)
self.ensaiologin = False
self.filedir = 0
self.u = []
self.thread3 = ServerThread()
self.Index=0
self.Index22 =0
self.text= str()
self.A = []
self.Linhas=[]
self.Grafic = QtWidgets.QWidget()
self.Grafic.setObjectName(_fromUtf8("Grafic"))
self.verticalLayoutWidget = QtWidgets.QWidget(self.Grafic)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(50, 80, 871, 411))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.frame = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.frame.setObjectName(_fromUtf8("verticalLayout_2"))
self.t=1
self.fig = Figure(figsize=(5,5), dpi=100)
self.ax1f1 = self.fig.add_subplot(111,xlabel='Deslocamento(mm)', ylabel='Força(N)', title='')
self.canvas = FigureCanvas(self.fig)
self.line1, =self.ax1f1.plot([],[])
self.fig.canvas.draw()
self.ax1f1.grid(True)
self.canvas = FigureCanvas(self.fig)
self.frame.addWidget(self.canvas)
self.canvas.draw()
self.toolbar = NavigationToolbar(self.canvas,
self.Grafic, coordinates=True)
self.frame.addWidget(self.toolbar)
def selecionar(self):
text = self.combo.currentText()
self.text = text.replace(" Celula de Carga Fator:",";")
self.t = ''
for i in self.text:
if(i != ';'):
self.t = self.t + i
else:
self.t =''
#print(self.text,self.t)
self.CALIBRA = open("Fator_Calibracao.txt","w")
self.CALIBRA.write(self.t)
self.CALIBRA.close()
celula.iniciarcel(self.t)
self.updateCelulaInterface()
self.obs3.setGeometry(QtCore.QRect(20,190,741,41))
self.obs3.setText(_translate("MainWindow", "Celula: "+self.text+" Selecionada, Agora a maquina Opera com esta Celula de Carga",None))
self.obs3.show()
def combo2_chosen(self, text=0):
text = self.combo2.currentText()
self.Index=self.combo2.currentIndex()
self.Index22 = str(text)
def combo_chosen(self, text=0):
text = self.combo.currentText()
self.Index=self.combo.currentIndex()
self.text = text.replace(" Celula de Carga Fator:",";")
def Excluir(self):
self.combo.removeItem(self.Index)
for i, valor in enumerate(self.A):
if valor == self.text:
self.A.pop(i)
self.CALIBRA = open("conf_celulas.txt","w")
for i in self.A:
self.CALIBRA.write(str(i))
self.CALIBRA.close()
self.obs3.setGeometry(QtCore.QRect(20,190,741,41))
self.obs3.setText(_translate("MainWindow", "Celula: " +self.text+ " Excluida",None))
self.obs3.show()
def ecalibrar(self):
self.bcal.hide()
self.ecal.hide()
self.obs3.setText(_translate("MainWindow", "Calibrando Celula de Carga Aguarde ! ",None))
VALUE_SERIAL= celula.calibrar()
B = self.pcal.value()
Fator= (float(VALUE_SERIAL)/float(B))
print(Fator,B,VALUE_SERIAL)
self.combo.clear()
self.t = str()
for i, valor in enumerate(self.A):
if valor == self.text:
self.posicao = i
self.p = 0
self.j =0
while(self.p == 0):
if(self.A[i][self.j] != ';'):
self.t= self.t + self.A[i][self.j]
self.j += 1
else:
self.p =1
self.A.pop(i)
self.A.append(self.t+";"+str(Fator)+"\n")
self.CALIBRA = open("conf_celulas.txt","w")
for k in self.A:
self.CALIBRA.write(k)
self.CALIBRA.close()
self.bcal2.hide()
self.obs3.setText(_translate("MainWindow", "Celula de Carga Calibrada agora você já pode Colocar novamente as Garras/Mordentes\n Celula: "+self.t,None))
self.bcal.show()
self.ecal.show()
self.pcal.hide()
def editCalibra(self):
self.bcal2.hide()
self.obs3.hide()
celula.tare()
self.pcal = QtWidgets.QDoubleSpinBox(self.Calibra)
self.obs3 = QtWidgets.QLabel(self.Calibra)
self.bcal2 = QtWidgets.QPushButton(self.Calibra)
self.bcal.hide()
self.ecal.hide()
self.pcal.setGeometry(QtCore.QRect(210,240,81,29))
self.pcal.setObjectName(_fromUtf8("pcal"))
self.pcal.setRange(0,10000.00)
self.pcal.setValue(1.00)
self.pcal.show()
self.obs3.setGeometry(QtCore.QRect(20,190,741,71))
self.obs3.setObjectName(_fromUtf8("obs"))
self.obs3.setText(_translate("MainWindow", "Informe o Valor do Peso Padrão (EM KG), após coloque o mesmo na celula de Carga e Clique em continuar.",None))
self.obs3.show()
self.bcal2.setGeometry(QtCore.QRect(190,340,151,21))
self.bcal2.setObjectName(_fromUtf8("bcal"))
self.bcal2.setText(_translate("MainWindow", "Continuar",None))
self.bcal2.show()
self.bcal2.clicked.connect(self.ecalibrar)
def editcalib(self):
self.combo.hide()
self.bcal2 = QtWidgets.QPushButton(self.Calibra)
self.bcal2.setGeometry(QtCore.QRect(190,340,151,21))
self.bcal2.setObjectName(_fromUtf8("bcal"))
self.bcal2.setText(_translate("MainWindow", "Continuar",None))
self.bcal2.clicked.connect(self.editCalibra)
self.bcal2.show()
self.bcal.hide()
self.ecal.hide()
self.ccal.hide()
self.dcal.hide()
self.scal.hide()
self.obs3.setGeometry(QtCore.QRect(20,190,741,41))
self.obs3.setObjectName(_fromUtf8("obs"))
self.obs3.setText(_translate("MainWindow", "OBS: Retire as Garras/Mordentes da Celula de Carga, Não deixe nada apenas a Celula de Carga, após Clique em Continuar.",None))
self.obs3.show()
def add_nova(self):
self.combo.hide()
self.obs3.hide()
self.bcal2 = QtWidgets.QPushButton(self.Calibra)
self.bcal2.setGeometry(QtCore.QRect(190,340,151,21))
self.bcal2.setObjectName(_fromUtf8("bcal"))
self.bcal2.setText(_translate("MainWindow", "Continuar",None))
self.bcal2.clicked.connect(self.calibrar)
self.bcal2.show()
self.bcal.hide()
self.ecal.hide()
self.scal.hide()
self.obs3.setGeometry(QtCore.QRect(20,190,741,41))
self.obs3.setObjectName(_fromUtf8("obs"))
self.obs3.setText(_translate("MainWindow", "OBS: Retire as Garras/Mordentes da Celula de Carga, Não deixe nada apenas a Celula de Carga, após Clique em Continuar.",None))
self.obs3.show()
def Editar(self):
self.scal.show()
self.obs3.hide()
self.ecal.hide()
self.bcal.hide()
self.ccal = QtWidgets.QPushButton(self.Calibra)
self.ccal.setGeometry(QtCore.QRect(150,110,131,29))
self.ccal.setObjectName(_fromUtf8("bcal"))
self.dcal = QtWidgets.QPushButton(self.Calibra)
self.dcal.setGeometry(QtCore.QRect(530,110,151,29))
self.dcal.setObjectName(_fromUtf8("bcal"))
self.combo.setGeometry(QtCore.QRect(290,20,192,40))
self.combo.setObjectName(_fromUtf8("pcal"))
self.combo.show()
self.dcal.setText(_translate("MainWindow", "Excluir",None))
self.ccal.setText(_translate("MainWindow", "Calibrar",None))
self.dcal.clicked.connect(self.Excluir)
self.ccal.clicked.connect(self.editcalib)
self.ccal.show()
self.dcal.show()
self.CALIBRA = open("conf_celulas.txt","r")
self.A = self.CALIBRA.readlines()
self.CALIBRA.close()
self.CALIBRA = open("conf_celulas.txt","a")
self.b=[]
for i in range(len(self.A)):
self.b.append(self.A[i].replace(";"," Celula de Carga Fator:"))
self.combo.addItems(self.b)
#self.combo.connect(self.combo, QtCore.SIGNAL('activated(QString)'), self.combo_chosen)
self.combo.activated.connect(self.combo_chosen)
self.CALIBRA.close()
def resetgrafic(self):
deslocamentos= [0]
forcas= [0]
self.PlotGrafico()
def PlotGrafico(self):
self.line1.set_data(deslocamentos, forcas)
self.fig.canvas.draw()
def zeraf(self):
self.Forca_grafic.setValue(0.00)
def zerades(self):
self.Deslocamento_grafic.setValue(0.00)
def Subir(self):
self.pushButton_3.setDisabled(True)
self.pushButton_2.setVisible(True)
self.pushButton_3.setVisible(True)
self.parar_ajuste.setVisible(True)
Motor.Subir_descer(self.Vel_ajuste.value(),1,self.deslb.value())
self.pushButton_3.setDisabled(False)
def Descer(self):
self.pushButton_2.setVisible(True)
self.pushButton_3.setVisible(True)
self.parar_ajuste.setVisible(True)
Motor.Subir_descer(self.Vel_ajuste.value(),2,self.deslb.value())
def Parando(self):
global flag
flag =0
global flag2
global deslocamentos
global forcas
global testes
self.u = []
flag2 =0
self.pushButton_2.setVisible(True)
self.pushButton_3.setVisible(True)
self.parar_ajuste.setVisible(True)
self.pushButton.setVisible(True)
self.pushButton_4.setVisible(False)
self.emergrafic.setVisible(False)
Motor.Parar()
self.confirmar_continuacao()
def confirmar_continuacao(self):
result_webcam_fim = self.webcam_fim.exec_()
if result_webcam_fim == QtWidgets.QMessageBox.No:
pass
if result_webcam_fim== QtWidgets.QMessageBox.Yes:
self.webcamcapture_final()
result1 = self.result.exec_()
if result1 == QtWidgets.QMessageBox.Yes:
self.Config.setCurrentWidget(self.Config)
lotes(self.input.text(),deslocamentos,forcas)
self.Config.setCurrentWidget(self.Config_2)
if result1 == QtWidgets.QMessageBox.No:
self.inputl.show()
self.input.show()
self.botaobrowser.show()
lotes(self.input.text(),deslocamentos,forcas,)
self.ax1f1.cla()
self.ax1f1.grid(True)
self.pushButton.hide()
if(len(testes) > 0):
pass
self.Linhas = []
self.combo2.setGeometry(QtCore.QRect(90,20,192,30))
self.combo2.setObjectName(_fromUtf8("p2cal"))
self.combo2.show()
self.bcombo.setGeometry(QtCore.QRect(90,50,61, 31))
self.bcombo.setText(_translate("MainWindow", "Excluir", None))
self.bcombo.clicked.connect(self.excluirlinha_grafic)
self.bcombo.setObjectName(_fromUtf8("p2cal"))
self.bcombo.show()
for i in range(0,len(testes)):
self.u.append(testes[i]["nome"])
self.aux, = self.ax1f1.plot(list(testes[i]["x1"]),list(testes[i]["x2"]),label='${i}$'.format(i=str(testes[i]["nome"])))
self.Linhas.append(self.aux)
self.ax1f1.legend(loc ='best')
self.fig.canvas.draw()
self.combo2.addItems(self.u)
#self.combo2.connect(self.combo2, QtCore.SIGNAL('activated(QString)'), self.combo2_chosen)
self.combo2.activated.connect(self.combo2_chosen)
contando = 0
self.pushButton_6.show()
self.pushButton_7.show()
pass
def returnposteste(self,index):
global testes
for i in range(0,len(testes)):
if(str(testes[i]["nome"]) == str(index)):
return i
def cancelartestes(self) :
global testes
global contando
contando = 0
testes = []
self.bcombo.hide()
self.combo2.clear()
self.pushButton_6.hide()
self.pushButton_7.hide()
self.combo2.hide()
self.ax1f1.cla()
self.ax1f1.grid(True)
self.line1, = self.ax1f1.plot([],[])
self.fig.canvas.draw_idle()
self.pushButton.show()
def gerarpdf(self):
global testes
self.bcombo.hide()
self.pushButton_6.hide()
self.pushButton_7.hide()
global VelocidadeEn
global forcas
global deslocamentos
global FormatoCorpoProva
global fig
fig2 = []
Image2 = []
imgdata2 = []
now = datetime.now()
if os.path.isdir("Ensaios/"+str(now.year)): # vemos de este diretorio já existe
pass
else:
os.mkdir("Ensaios/"+str(now.year)) # aqui criamos o diretorio
if os.path.isdir("Ensaios/"+str(now.year)+"/"+str(now.month)): # vemos de este diretorio já existe
pass
else:
os.mkdir("Ensaios/"+str(now.year)+"/"+str(now.month)) # aqui criamos o diretorio
if os.path.isdir("Ensaios/"+str(now.year)+"/"+str(now.month)+"/"+str(now.day)): # vemos de este diretorio já existe
pass
else:
os.mkdir("Ensaios/"+str(now.year)+"/"+str(now.month)+"/"+str(now.day)) # aqui criamos o diretorio
if os.path.isdir("Ensaios/"+str(now.year)+"/"+str(now.month)+"/"+str(now.day)+"/"+str(self.input.text())+"Hora"+str(now.hour)+"-"+str(now.minute)+ "-"+ str(now.second)): # vemos de este diretorio já existe
pass
else:
os.mkdir("Ensaios/"+str(now.year)+"/"+str(now.month)+"/"+str(now.day)+"/"+str(self.input.text())+"Hora"+str(now.hour)+"-"+str(now.minute)+ "-"+ str(now.second)) # aqui criamos o diretorio
listdir1 = os.listdir('TempImagens/')
print(os.listdir('TempImagens/'))
for i in listdir1:
os.system('mv '+'TempImagens/'+i+" Ensaios/"+str(now.year)+"/"+str(now.month)+"/"+str(now.day)+"/"+str(self.input.text())+"Hora"+str(now.hour)+"-"+str(now.minute)+ "-"+ str(now.second)+"/"+str(i))
Forcamaxima = forcas[-1]
maxdeslocamento = deslocamentos[-1]
Posicaomaxima = deslocamentos[-1]
pdf2 = Canvas("Ensaios/"+"Ensaio_Atual.pdf", pagesize = letter) #Nome do arquivo e Tipo do papel
pdf = Canvas("Ensaios/"+str(now.year)+"/"+str(now.month)+"/"+str(now.day)+"/"+str(self.input.text())+"Hora"+str(now.hour)+"-"+str(now.minute)+ "-"+ str(now.second)+"/"+str(self.input.text())+"Hora:"+str(now.hour)+"-"+str(now.minute)+ "-"+ str(now.second)+".pdf", pagesize = letter) #Nome do arquivo e Tipo do papel
pdf.setFont('Helvetica-Bold', 12)
pdf2.setFont('Helvetica-Bold', 12)
tupla = (' Máquina de Ensaio de Tração e Compressão', '','','','','','','','', ' Ensaio','', 'N° da Solicitação: _________', 'Solicitante/Setor: __________________________________','Inspetor: ___________________________________','Responsável: ___________________________________','' ,
'Data: ' + str(now.day)+'/'+str(now.month)+'/'+str(now.year), 'Hora: ' + str(now.hour)+":"+str(now.minute)+ ":"+ str(now.second) ,'', '', '','' ,'')
lista = pdf.beginText(inch * 1, inch * 10)
lista2 = pdf2.beginText(inch * 1, inch * 10)
for i in range(0,len(tupla)):
lista.textLine(tupla[i])
lista2.textLine(tupla[i])
fig.clf()
ax = fig.add_subplot(111,xlabel='Deslocamento(mm)', ylabel='Força(N)', title='')
ax.grid(True)
for i in range(0,len(testes)):
ax.plot(list(testes[i]["x1"]),list(testes[i]["x2"]),label='${i}$'.format(i=str(testes[i]["nome"])))
ax.legend(loc ='best')
with open("Ensaios/"+str(now.year)+"/"+str(now.month)+"/"+str(now.day)+"/"+str(self.input.text())+"Hora"+str(now.hour)+"-"+str(now.minute)+ "-"+ str(now.second)+"/"+"save.txt","wb") as fp:
pickle.dump(testes,fp)
"""CALIBRA.write(str(testes)+"\n")
CALIBRA.close()"""
imgdata = BytesIO()
fig.savefig(imgdata, format='png')
imgdata.seek(0) # rewind the data
Image = ImageReader(imgdata)
pdf2.drawText(lista2)
pdf.drawText(lista)
pdf2.drawImage(Image ,130,50, width=400,height=350)
pdf.drawImage(Image ,130,50, width=400,height=350)
pdf2.showPage()
pdf.showPage()
for j in range(0,len(testes)):
fig.clf()
ax2= fig.add_subplot(111,xlabel='Deslocamento(mm)', ylabel='Força(N)', title='')
#ax2.cla()
ax2.grid(True)
ax2.plot(list(testes[j]["x1"]),list(testes[j]["x2"]))
X = list(testes[j]["x1"]).copy()
Y = list(testes[j]["x2"]).copy()
X.sort()
Y.sort()
xmax = X[-1]
ymax = Y[-1]
if testes[j]["area"] == 0.0:
testes[j]["area"] = '_______'
tupla = ( '','','','',' Nome Ensaio: '+str(testes[j]["nome"]),'','Tipo de ensaio: '+str(testes[j]["tipo"]) ,
'Formato do corpo de prova: '+str(testes[j]["formato"] ),
'Posição Máxima: '+str( xmax )+" mm",'Força Máxima: '+str(ymax)+'N', 'Área do corpo de prova: '+str(testes[j]["area"])+' mm²', 'Velocidadede ensaio: '+str(testes[j]["vel"])+' mm/min','Comprimento do corpo de prova: __________ mm' ,)
lista3 = pdf.beginText(inch * 1, inch * 10)
lista4 = pdf2.beginText(inch * 1, inch * 10)
for i in range(0,len(tupla)):
lista3.textLine(tupla[i])
lista4.textLine(tupla[i])
pdf.drawText(lista3)
imgdata2 = BytesIO()
fig.savefig(imgdata2 , format='png')
imgdata2.seek(0) # rewind the data
Image2 = ImageReader(imgdata2)
pdf2.drawText(lista3)
pdf.drawText(lista4)
pdf2.drawImage(Image2 ,130,50, width=400,height=350)
pdf.drawImage(Image2 ,130,50, width=400,height=350)
pdf2.showPage()
pdf.showPage()
pdf2.save()
self.cancelartestes()
pdf.save()
x = [0]
y = [0]
def excluirlinha_grafic(self):
global testes
self.line1.set_data([],[])
self.combo2.removeItem(self.Index)
try:
self.idx = int(self.returnposteste(self.Index22))
except:
pass
try:
self.Linhas[self.idx].set_data([], [])
except:
pass
testes.pop(self.idx)
self.ax1f1.cla()
self.Linhas = []
for i in range(0,len(testes)):
self.u.append(testes[i]["nome"])
self.aux, = self.ax1f1.plot(list(testes[i]["x1"]),list(testes[i]["x2"]),label='${i}$'.format(i=str(testes[i]["nome"])))
self.Linhas.append(self.aux)
self.ax1f1.legend(loc ='best')
self.ax1f1.grid(True)
self.fig.canvas.draw_idle()
def Parando3(self,i = None):
global flag2
global flag
flag = 0
flag2 =0
self.pushButton_2.setVisible(True)
self.pushButton_3.setVisible(True)
self.parar_ajuste.setVisible(False)
self.pushButton.setVisible(True)
self.pushButton_4.setVisible(False)
self.emergrafic.setVisible(False)
"""deslocamentos = [0]
forcas = [0]
self.Deslocamento_grafic.setValue(float(0.00))
self.Forca_grafic.setValue(float(0.00))
self.ax1f1.set_ylim(0, forcas[-1]+10)
self.ax1f1.set_xlim(0, deslocamentos[-1]+10)
self.line1.set_data(deslocamentos,forcas)
self.fig.canvas.draw()"""
Motor.Parar()
self.confirmar_continuacao()
def Parando2(self):
global flag2
flag2 =0
self.pushButton_2.setVisible(True)
self.pushButton_3.setVisible(True)
self.parar_ajuste.setVisible(True)
Motor.Parar()
def verificar_Browser_Ensaio(self):
try:
with open(str(self.filedir[0]),"rb") as fp:
testes = pickle.load(fp)
return 1
except:
self.ensaiologin = False
self.res= QtWidgets.QMessageBox()
self.res.setText("Aparentemente você selecionou o arquivo de Browser Ensaio incorretamente, você deve selecionar o arquivo save.txt, você deseja tentar novamente e tentar continuar um antigo teste?")
self.res.addButton(QtWidgets.QMessageBox.Yes)
self.res.addButton(QtWidgets.QMessageBox.No)
result1 = self.res.exec_()
if result1 == QtWidgets.QMessageBox.Yes:
self.func_browser()
return self.verificar_Browser_Ensaio()
if result1 == QtWidgets.QMessageBox.No:
return 0
def iniciar(self):
global deslocamentos
global forcas
global testes
global contando
self.inputl.hide()
self.input.hide()
self.botaobrowser.hide()
if(self.ensaiologin == True and self.ensaiologin != None ):
resul= self.verificar_Browser_Ensaio()
if resul == 1:
with open(str(self.filedir[0]),"rb") as fp:
testes = pickle.load(fp)
contando = len(testes)
self.ensaiologin = False
else:
self.ensaiologin = False
try:
arquivo = open("Fator_Calibracao.txt","r")
fator = arquivo.readline()
celula.iniciarcel(str(fator))
except:
print("O Arquivo Fator_Calibracao.txt, está corrompido ou foi excluido você não pode iniciar o ensaio sem este arquivo, solução: vá até a interface ,selecione a aba celula de carga e escolha novamente a celula de carga isso irá criar o arquivo novamente. \n",file=log_file)
sys.exit()
self.Config.setCurrentWidget(self.Grafic)
deslocamentos = [0]
forcas = [0]
self.Linhas = []
self.pushButton_2.setVisible(False)
self.pushButton_3.setVisible(False)
self.parar_ajuste.setVisible(False)
self.pushButton.setVisible(False)
self.pushButton_4.setVisible(True)
self.emergrafic.setVisible(True)
global flag2
global qforca
global maxforca
global maxdeslocamento
global tempinicioteste
global VelocidadeEn
global tipodeensaio
if(self.checkBox.isChecked() == True):
#Motor.subir()
Motor.Subir_descer(self.Velocidade.value(),1,0)
tipodeensaio = "Tração"
else:
Motor.Subir_descer(self.Velocidade.value(),0,0)
tipodeensaio = "Compressão"
#Motor.baixar()
VelocidadeEn = self.Velocidade.value()
#Motor.calcular( float(VelocidadeEn) )
tempinicioteste = time.time()
if(self.checkBox_3.checkState() == 2):
qforca = self.Velocidade_2.value()
else:
qforca = None
if(self.checkBox_4.checkState() == 2):
max_forca= self.Velocidade_3.value()
else:
max_forca = None
if (self.checkBox_5.checkState() == 2):
maxdeslocamento= self.Velocidade_4.value()
else:
maxdeslocamento= None
if(self.checkBox_6.isChecked() == True):
a = self.a_retangulo.value()
b = self.b_retangulo.value()
else:
a =None
b = None
if(self.checkBox_7.isChecked() == True):
c = self.Velocidade_8.value()
d = self.d_retangulo.value()
else:
c =None
d = None
if(self.checkBox_8.isChecked() == True):
e = self.D_cilimdro.value()
f = self.H_cilindro.value()
else:
e =None
f = None
Area(a,b,c,d,e,f)
flag2 =1
self.thread3.start()
self.thread3.UPsig.connect(self.update1)
self.thread3.Stopsig.connect(self.Parando3)
#QtWidgets.QWidget.connect(self.thread3, QtCore.SIGNAL("UP"), self.update1)
#QtWidgets.QWidget.connect(self.thread3, QtCore.SIGNAL("Parando"), self.Parando3)
def update1(self,lista):
self.Deslocamento_grafic.setValue(lista[0])
self.Forca_grafic.setValue(float(lista[1])*9.8)
self.ax1f1.set_ylim(0, lista[2])
self.ax1f1.set_xlim(0, lista[3])
self.line1.set_data(lista[4],lista[5])
self.fig.canvas.draw_idle()
def calibrar(self):
celula.tare()
self.bcal2.hide()
self.obs3.hide()
self.pcal = QtWidgets.QDoubleSpinBox(self.Calibra)
self.obs3 = QtWidgets.QLabel(self.Calibra)
self.obs4 = QtWidgets.QLabel(self.Calibra)
self.qline = QtWidgets.QLineEdit(self.Calibra)
self.bcal2 = QtWidgets.QPushButton(self.Calibra)
self.bcal.hide()
self.ecal.hide()
self.pcal.setGeometry(QtCore.QRect(210,240,81,29))
self.pcal.setObjectName(_fromUtf8("pcal"))
self.pcal.setRange(0,3000.00)
self.pcal.setValue(1.00)
self.pcal.show()
self.obs3.setGeometry(QtCore.QRect(20,190,741,41))
self.obs3.setObjectName(_fromUtf8("obs"))
self.obs3.setText(_translate("MainWindow", "Informe o Valor do Peso Padrão (EM KG), após coloque o mesmo na celula de Carga , de um nome para a nova celula e Clique em continuar.",None))
self.obs3.show()
self.qline.setGeometry(QtCore.QRect(180,300,151,21))
self.qline.show()
self.obs4.setGeometry(QtCore.QRect(180,280,151,21))
self.obs4.setObjectName(_fromUtf8("obs"))
self.obs4.setText(_translate("MainWindow", "Nome da Celula:",None))
self.obs4.show()
self.bcal2.setGeometry(QtCore.QRect(190,340,151,21))
self.bcal2.setObjectName(_fromUtf8("bcal"))
self.bcal2.setText(_translate("MainWindow", "Continuar",None))
self.bcal2.show()
self.bcal2.clicked.connect(self.Ccalibrar)
def Ccalibrar(self):
self.bcal.hide()
self.ecal.hide()
self.obs3.setText(_translate("MainWindow", "Calibrando Celula de Carga Aguarde ! ",None))
VALUE_SERIAL=celula.calibrar()
B = self.pcal.value()
Fator= (float(VALUE_SERIAL)/float(B))
A = self.qline.text()
self.CALIBRA = open("conf_celulas.txt","r")
self.A = self.CALIBRA.readlines()
self.CALIBRA.close()
self.t= ''
self.C = []
self.posicao = 0
for i, valor in enumerate(self.A):
self.p = 0
self.j =0
while(self.p == 0):
if(self.A[i][self.j] != ';'):
self.t= self.t + self.A[i][self.j]
self.j += 1
else:
self.p =1
self.C.append(self.t.replace("\n",""))
self.t =''
if(self.t.replace("\n","") == A):
self.posicao = i
if(A != self.C[self.posicao]):
CALIBRA = open("conf_celulas.txt","a")
CALIBRA.write(str(A)+";")
CALIBRA.write(str(Fator)+"\n")
CALIBRA.close()
self.bcal2.hide()
self.obs3.setText(_translate("MainWindow", "Celula de Carga calibrada agora você já pode Colocar novamente as Garras/Mordentes\n Celula:"+str(A),None))
self.obs4.hide()
self.obs3.hide()
self.pcal.hide()
self.qline.hide()
self.bcal2.hide()
self.bcal.show()
self.ecal.show()
self.bcal2.hide()
else:
self.bcal2.hide()
self.obs3.setText(_translate("MainWindow", "Não foi Adicionado a Nova Celula, pois a celula com o nome:"+str(A)+"já existe vá em editar para recalibra-la",None))
self.obs4.hide()
self.pcal.hide()
self.qline.hide()
self.bcal2.hide()
self.bcal.show()
self.ecal.show()
self.bcal2.hide()
def updateCelulaInterface(self):
try:
CALIBRA = open("conf_celulas.txt","r")
except:
print("O Arquivo conf_celulas.txt, está corrompido ou foi excluido você não pode iniciar o ensaio sem este arquivo, solução: Adicione uma versao antiga do arquivo, se não tiver crie o arquivo e adicione a seguinte linha: celulatest;100000 \n Após você deve ir na aba celula de carga no software e adicionar novamente suas celulas de cargas pois os cadastros anteriores foram perdidas\n",file=log_file)
sys.exit()
try:
arquivo = open("Fator_Calibracao.txt","r")
fator = arquivo.readline()
except:
print("O Arquivo Fator_Calibracao.txt, está corrompido ou foi excluido você não pode iniciar o ensaio sem este arquivo, solução: vá até a interface ,selecione a aba celula de carga e escolha novamente a celula de carga isso irá criar o arquivo novamente. \n",file=log_file)
sys.exit()
A =CALIBRA.readlines()
CALIBRA.close()
t= ''
C = []
posicao = 0
for i, valor in enumerate(A):
for j, text in enumerate(valor):
if(text == ';'):
posicao = j+1
if(valor[posicao::] == fator):
posicao = posicao-1
self.input2.setText(_translate("MainWindow", "Celula de Carga: "+str(valor[:posicao]) ,None))
self.input2.show()
arquivo.close()
CALIBRA.close()
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(924, 599)
MainWindow.setMinimumSize(924, 599)
MainWindow.setMaximumSize(924, 599)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.Config = QtWidgets.QTabWidget(self.centralwidget)
self.Config.setGeometry(QtCore.QRect(0, 0, 961, 581))
self.Config.setObjectName(_fromUtf8("Config"))
self.Config_2 = QtWidgets.QWidget()
self.Config_2.setObjectName(_fromUtf8("Config_2"))
self.input = QtWidgets.QLineEdit(self.Config_2)
self.input.setGeometry(QtCore.QRect(600, 20, 151, 21))
self.input.setObjectName(_fromUtf8("input"))
self.inputl = QtWidgets.QLabel(self.Config_2)
self.inputl.setGeometry(QtCore.QRect(500, 20, 100, 21))
self.inputl.setObjectName(_fromUtf8("inputl"))
self.inputl.setText(_translate("MainWindow", "Nome do Lote:",None))
self.inputl.show()
self.input2 = QtWidgets.QLabel(self.Config_2)
self.input2.setGeometry(QtCore.QRect(500, 50,210,21))
self.input2.setObjectName(_fromUtf8("inputl"))
#self.input2.setText(_translate("MainWindow", "Celula de Carga:",None))
self.updateCelulaInterface()
#self.input2.show()
self.pushButton = QtWidgets.QPushButton(self.Config_2)
self.pushButton.setGeometry(QtCore.QRect(40, 20, 151, 21))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.button_webcam = QtWidgets.QPushButton(self.Config_2)
self.button_webcam.setGeometry(QtCore.QRect(250, 20, 151, 21))
self.button_webcam.setObjectName(_fromUtf8("button_webcam"))
self.combo_webcam = QtWidgets.QComboBox(self.Config_2)
self.combo_webcam.setGeometry(QtCore.QRect(250, 60, 151, 21))
self.combo_webcam.setObjectName(_fromUtf8("combo_webcam"))
self.combo_webcam.show()
clist = webc.cameralist()
clist = clist[::-1]
self.combo_webcam.addItems(clist)
self.t_ensaio = QtWidgets.QFrame(self.Config_2)
self.t_ensaio.setGeometry(QtCore.QRect(50, 90, 201, 201))
self.t_ensaio.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.t_ensaio.setFrameShadow(QtWidgets.QFrame.Raised)
self.t_ensaio.setObjectName(_fromUtf8("t_ensaio"))
self.label = QtWidgets.QLabel(self.t_ensaio)
self.label.setGeometry(QtCore.QRect(50, 0, 101, 17))
self.label.setObjectName(_fromUtf8("label"))
self.checkBox = QtWidgets.QRadioButton(self.t_ensaio)
self.checkBox.setGeometry(QtCore.QRect(20, 50, 151, 22))
self.checkBox.setObjectName(_fromUtf8("checkBox"))
self.checkBox_2 = QtWidgets.QRadioButton(self.t_ensaio)
self.checkBox_2.setGeometry(QtCore.QRect(20, 90, 161, 22))
self.checkBox_2.setObjectName(_fromUtf8("checkBox_2"))
self.Velocidade = QtWidgets.QDoubleSpinBox(self.t_ensaio)
self.Velocidade.setGeometry(QtCore.QRect(27, 160,81, 29))
self.Velocidade.setObjectName(_fromUtf8("Velocidade"))
self.Velocidade.setRange(8, 175 )
self.Velocidade.setValue(10)
self.label_2 = QtWidgets.QLabel(self.t_ensaio)
self.label_2.setGeometry(QtCore.QRect(40, 130, 141, 17))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.label_3 = QtWidgets.QLabel(self.t_ensaio)
self.label_3.setGeometry(QtCore.QRect(120, 170, 57, 20))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.frame_2 = QtWidgets.QFrame(self.Config_2)
self.frame_2.setGeometry(QtCore.QRect(270, 90, 361, 201))
self.frame_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName(_fromUtf8("frame_2"))
self.checkBox_3 = QtWidgets.QCheckBox(self.frame_2)
self.checkBox_3.setGeometry(QtCore.QRect(30, 50, 161, 22))
self.checkBox_3.setObjectName(_fromUtf8("checkBox_3"))
self.label_4 = QtWidgets.QLabel(self.frame_2)
self.label_4.setGeometry(QtCore.QRect(120, 0, 111, 17))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.Velocidade_2 = QtWidgets.QDoubleSpinBox(self.frame_2)
self.Velocidade_2.setGeometry(QtCore.QRect(200, 50, 81, 21))
self.Velocidade_2.setObjectName(_fromUtf8("Velocidade_2"))
self.Velocidade_2.setRange(0,99.00)
self.Velocidade_2.setValue(99.00)
self.label_5 = QtWidgets.QLabel(self.frame_2)
self.label_5.setGeometry(QtCore.QRect(210, 40, 61, 16))
font = QtGui.QFont()
font.setPointSize(8)
self.label_5.setFont(font)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.checkBox_4 = QtWidgets.QCheckBox(self.frame_2)
self.checkBox_4.setGeometry(QtCore.QRect(30, 100, 161, 22))
self.checkBox_4.setObjectName(_fromUtf8("checkBox_4"))
self.Velocidade_3 = QtWidgets.QDoubleSpinBox(self.frame_2)
self.Velocidade_3.setGeometry(QtCore.QRect(200, 100, 81, 21))
self.Velocidade_3.setObjectName(_fromUtf8("Velocidade_3"))
self.Velocidade_3.setRange(0,10000.00)
self.label_6 = QtWidgets.QLabel(self.frame_2)
self.label_6.setGeometry(QtCore.QRect(210, 80, 71, 20))
font = QtGui.QFont()
font.setPointSize(8)
self.label_6.setFont(font)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.checkBox_5 = QtWidgets.QCheckBox(self.frame_2)
self.checkBox_5.setGeometry(QtCore.QRect(30, 150, 161, 22))
self.checkBox_5.setObjectName(_fromUtf8("checkBox_5"))
self.Velocidade_4 = QtWidgets.QDoubleSpinBox(self.frame_2)
self.Velocidade_4.setGeometry(QtCore.QRect(200, 150, 81, 21))
self.Velocidade_4.setObjectName(_fromUtf8("Velocidade_4"))
self.Velocidade_4.setRange(0,5000.00)
self.label_7 = QtWidgets.QLabel(self.frame_2)
self.label_7.setGeometry(QtCore.QRect(190, 130, 111, 20))
font = QtGui.QFont()
font.setPointSize(8)
self.label_7.setFont(font)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.label_8 = QtWidgets.QLabel(self.frame_2)
self.label_8.setGeometry(QtCore.QRect(280, 100, 57, 20))
self.label_8.setObjectName(_fromUtf8("label_8"))
self.label_9 = QtWidgets.QLabel(self.frame_2)
self.label_9.setGeometry(QtCore.QRect(280, 160, 57, 20))
self.label_9.setObjectName(_fromUtf8("label_9"))
self.t_ensaio_2 = QtWidgets.QFrame(self.Config_2)
self.t_ensaio_2.setGeometry(QtCore.QRect(660, 90, 201, 201))
self.t_ensaio_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.t_ensaio_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.t_ensaio_2.setObjectName(_fromUtf8("t_ensaio_2"))
self.label_10 = QtWidgets.QLabel(self.t_ensaio_2)
self.label_10.setGeometry(QtCore.QRect(40, 0, 101, 17))
self.label_10.setObjectName(_fromUtf8("label_10"))
self.desl = QtWidgets.QLabel(self.t_ensaio_2)
self.desl.setGeometry(QtCore.QRect(20, 20, 141, 17))
self.desl.setObjectName(_fromUtf8("desl"))
self.deslb = QtWidgets.QDoubleSpinBox(self.t_ensaio_2)
self.deslb.setGeometry(QtCore.QRect(27, 40, 81, 29))
self.deslb.setObjectName(_fromUtf8("Vel_ajuste"))
self.deslb.setRange(8, 175)
self.deslb.setValue(30)
self.deslm = QtWidgets.QLabel(self.t_ensaio_2)
self.deslm.setGeometry(QtCore.QRect(110, 50, 57, 20))
self.deslm.setObjectName(_fromUtf8("label_12"))
self.Vel_ajuste = QtWidgets.QDoubleSpinBox(self.t_ensaio_2)
self.Vel_ajuste.setGeometry(QtCore.QRect(27, 90, 81, 29))
self.Vel_ajuste.setObjectName(_fromUtf8("Vel_ajuste"))
self.Vel_ajuste.setRange(8, 175)
self.Vel_ajuste.setValue(120)
self.label_11 = QtWidgets.QLabel(self.t_ensaio_2)
self.label_11.setGeometry(QtCore.QRect(20, 70, 141, 17))
self.label_11.setObjectName(_fromUtf8("label_11"))
self.label_12 = QtWidgets.QLabel(self.t_ensaio_2)
self.label_12.setGeometry(QtCore.QRect(110, 90, 57, 20))
self.label_12.setObjectName(_fromUtf8("label_12"))
self.pushButton_2 = QtWidgets.QPushButton(self.t_ensaio_2)
self.pushButton_2.setGeometry(QtCore.QRect(110, 140, 51, 31))
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.botaodiretorio = QtWidgets.QPushButton(self.Config_2)
self.botaodiretorio.setGeometry(QtCore.QRect(800, 50, 100, 21))
self.botaodiretorio.setObjectName(_fromUtf8("pushButton_2"))
self.botaobrowser = QtWidgets.QPushButton(self.Config_2)
self.botaobrowser.setGeometry(QtCore.QRect(800, 20, 120, 21))
self.botaobrowser.setObjectName(_fromUtf8("pushButton_2"))
self.pushButton_3 = QtWidgets.QPushButton(self.t_ensaio_2)
self.pushButton_3.setGeometry(QtCore.QRect(40, 140, 41, 31))
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.parar_ajuste = QtWidgets.QPushButton(self.t_ensaio_2)
self.parar_ajuste.setGeometry(QtCore.QRect(60, 175, 80, 21))
self.parar_ajuste.setObjectName(_fromUtf8("parar_ajuste"))
self.raio_tubo = QtWidgets.QFrame(self.Config_2)
self.raio_tubo.setGeometry(QtCore.QRect(210, 320, 521, 191))
self.raio_tubo.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.raio_tubo.setFrameShadow(QtWidgets.QFrame.Raised)
self.raio_tubo.setObjectName(_fromUtf8("raio_tubo"))
self.label_13 = QtWidgets.QLabel(self.raio_tubo)
self.label_13.setGeometry(QtCore.QRect(140, 0, 271, 17))
self.label_13.setObjectName(_fromUtf8("label_13"))
self.checkBox_6 = QtWidgets.QRadioButton(self.raio_tubo)
self.checkBox_6.setGeometry(QtCore.QRect(40, 30, 111, 22))
self.checkBox_6.setObjectName(_fromUtf8("checkBox_6"))
self.checkBox_7 = QtWidgets.QRadioButton(self.raio_tubo)
self.checkBox_7.setGeometry(QtCore.QRect(40, 80, 101, 22))
self.checkBox_7.setObjectName(_fromUtf8("checkBox_7"))
self.checkBox_8 = QtWidgets.QRadioButton(self.raio_tubo)
self.checkBox_8.setGeometry(QtCore.QRect(40, 130, 101, 22))
self.checkBox_8.setObjectName(_fromUtf8("checkBox_8"))
self.a_retangulo = QtWidgets.QDoubleSpinBox(self.raio_tubo)
self.a_retangulo.setGeometry(QtCore.QRect(180, 30, 81, 21))
self.a_retangulo.setObjectName(_fromUtf8("a_retangulo"))
self.a_retangulo.setRange(0,1000.00)
self.b_retangulo = QtWidgets.QDoubleSpinBox(self.raio_tubo)
self.b_retangulo.setGeometry(QtCore.QRect(260, 30, 81, 21))
self.b_retangulo.setObjectName(_fromUtf8("b_retangulo"))
self.b_retangulo.setRange(0,1000.00)
self.retanguloima = QtWidgets.QLabel(self.raio_tubo)
#,posicaoesquerdadireita,posicaoparabaixoaumentar,largura,altura
self.retanguloima.setGeometry(QtCore.QRect(350, 10, 120, 60))
self.retanguloima.setObjectName(_fromUtf8("retangulo"))
self.pixmap1 = QtGui.QPixmap('Imagens/retangulo1.png')
self.pixmap1= self.pixmap1.scaledToWidth(60)
#self.pixmap1= self.pixmap1.scaledToHeight(150)
self.retanguloima.setPixmap(self.pixmap1)
self.tuboima = QtWidgets.QLabel(self.raio_tubo)
#,posicaoesquerdadireita,posicaoparabaixoaumentar,largura,altura
self.tuboima.setGeometry(QtCore.QRect(350, 37, 120, 100))
self.tuboima.setObjectName(_fromUtf8("tubo"))
self.pixmap2 = QtGui.QPixmap('Imagens/tubo1.png')
self.pixmap2= self.pixmap2.scaledToWidth(80)
#self.pixmap1= self.pixmap1.scaledToHeight(150)
self.tuboima.setPixmap(self.pixmap2)
self.ciliima = QtWidgets.QLabel(self.raio_tubo)
#,posicaoesquerdadireita,posicaoparabaixoaumentar,largura,altura
self.ciliima.setGeometry(QtCore.QRect(400, 100, 120, 100))
self.ciliima.setObjectName(_fromUtf8("tubo"))
self.pixmap3 = QtGui.QPixmap('Imagens/cilindro.png')
self.pixmap3= self.pixmap3.scaledToWidth(70)
#self.pixmap1= self.pixmap1.scaledToHeight(150)
self.ciliima.setPixmap(self.pixmap3)
self.label_15 = QtWidgets.QLabel(self.raio_tubo)
self.label_15.setGeometry(QtCore.QRect(190, 15, 61, 21))
font = QtGui.QFont()
font.setPointSize(8)
self.label_15.setFont(font)
self.label_15.setObjectName(_fromUtf8("label_15"))
self.label_16 = QtWidgets.QLabel(self.raio_tubo)
self.label_16.setGeometry(QtCore.QRect(280, 10, 61, 31))
font = QtGui.QFont()
font.setPointSize(8)
self.label_16.setFont(font)
self.label_16.setObjectName(_fromUtf8("label_16"))
self.Velocidade_8 = QtWidgets.QDoubleSpinBox(self.raio_tubo)
self.Velocidade_8.setGeometry(QtCore.QRect(180, 80, 81, 21))
self.Velocidade_8.setObjectName(_fromUtf8("Velocidade_8"))
self.Velocidade_8.setRange(0,1000.00)
self.d_retangulo = QtWidgets.QDoubleSpinBox(self.raio_tubo)
self.d_retangulo.setGeometry(QtCore.QRect(260, 80, 81, 21))
self.d_retangulo.setObjectName(_fromUtf8("d_retangulo"))
self.d_retangulo.setRange(0,1000.00)
self.label_17 = QtWidgets.QLabel(self.raio_tubo)
self.label_17.setGeometry(QtCore.QRect(190, 66, 61, 20))
font = QtGui.QFont()
font.setPointSize(8)
self.label_17.setFont(font)
self.label_17.setObjectName(_fromUtf8("label_17"))
self.label_18 = QtWidgets.QLabel(self.raio_tubo)
self.label_18.setGeometry(QtCore.QRect(280, 70, 61, 16))
font = QtGui.QFont()
font.setPointSize(8)
self.label_18.setFont(font)
self.label_18.setObjectName(_fromUtf8("label_18"))
self.D_cilimdro = QtWidgets.QDoubleSpinBox(self.raio_tubo)
self.D_cilimdro.setGeometry(QtCore.QRect(180, 130, 81, 21))
self.D_cilimdro.setObjectName(_fromUtf8("D_cilimdro"))
self.D_cilimdro.setRange(0,1000.00)
self.H_cilindro = QtWidgets.QDoubleSpinBox(self.raio_tubo)
self.H_cilindro.setGeometry(QtCore.QRect(260, 130, 81, 21))
self.H_cilindro.setObjectName(_fromUtf8("H_cilindro"))
self.H_cilindro.setRange(0,1000.00)
self.label_19 = QtWidgets.QLabel(self.raio_tubo)
self.label_19.setGeometry(QtCore.QRect(190, 120, 61, 16))
font = QtGui.QFont()
font.setPointSize(8)
self.label_19.setFont(font)
self.label_19.setObjectName(_fromUtf8("label_19"))
self.label_20 = QtWidgets.QLabel(self.raio_tubo)
self.label_20.setGeometry(QtCore.QRect(280, 120, 61, 16))
font = QtGui.QFont()
font.setPointSize(8)
self.label_20.setFont(font)
self.label_20.setObjectName(_fromUtf8("label_20"))
self.pushButton_4 = QtWidgets.QPushButton(self.Config_2)
self.pushButton_4.setGeometry(QtCore.QRect(240, 20, 101, 21))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_4.sizePolicy().hasHeightForWidth())
self.pushButton_4.setSizePolicy(sizePolicy)
self.pushButton_4.setObjectName(_fromUtf8("pushButton_4"))
self.emergrafic = QtWidgets.QPushButton(self.Grafic)
self.emergrafic.setGeometry(QtCore.QRect(750, 20, 101, 21))
self.emergrafic.setSizePolicy(sizePolicy)
self.emergrafic.setObjectName(_fromUtf8("pushButton_4"))
self.Config.addTab(self.Config_2, _fromUtf8(""))
self.Deslocamento_grafic = QtWidgets.QDoubleSpinBox(self.Grafic)
self.Deslocamento_grafic.setGeometry(QtCore.QRect(170, 90, 131, 31))
self.Deslocamento_grafic.setObjectName(_fromUtf8("Deslocamento_grafic"))
self.Deslocamento_grafic.setRange(0,900)
self.Forca_grafic = QtWidgets.QDoubleSpinBox(self.Grafic)
self.Forca_grafic.setGeometry(QtCore.QRect(540, 90, 121, 31))
self.Forca_grafic.setObjectName(_fromUtf8("Forca_grafic"))
self.Forca_grafic.setRange(0,10000)
self.label_21 = QtWidgets.QLabel(self.Grafic)
self.label_21.setGeometry(QtCore.QRect(180, 70, 111, 17))
self.label_21.setObjectName(_fromUtf8("label_21"))
self.label_22 = QtWidgets.QLabel(self.Grafic)
self.label_22.setGeometry(QtCore.QRect(570, 70, 111, 17))
self.label_22.setObjectName(_fromUtf8("label_22"))
self.label_23 = QtWidgets.QLabel(self.Grafic)
self.label_23.setGeometry(QtCore.QRect(310, 100, 111, 17))
self.label_23.setObjectName(_fromUtf8("label_23"))
self.label_24 = QtWidgets.QLabel(self.Grafic)
self.label_24.setGeometry(QtCore.QRect(670, 100, 111, 20))
self.label_24.setObjectName(_fromUtf8("label_24"))
self.pushButton_5 = QtWidgets.QPushButton(self.Grafic)
self.pushButton_5.setGeometry(QtCore.QRect(110, 20, 110, 29))
self.pushButton_5.setObjectName(_fromUtf8("pushButton_5"))
self.pushButton_6 = QtWidgets.QPushButton(self.Grafic)
self.pushButton_6.setGeometry(QtCore.QRect(560, 20,131 , 29))
self.pushButton_6.setObjectName(_fromUtf8("pushButton_6"))
self.pushButton_7 = QtWidgets.QPushButton(self.Grafic)
self.pushButton_7.setGeometry(QtCore.QRect(320, 20, 131, 29))
self.pushButton_7.setObjectName(_fromUtf8("pushButton_7"))
self.Config.addTab(self.Grafic, _fromUtf8(""))
MainWindow.setCentralWidget(self.centralwidget)
self.fig_dict = {}
#definiçaõ Celula de Carga
self.Calibra = QtWidgets.QWidget()
self.Calibra.setObjectName(_fromUtf8("Celula de Carga"))
self.obs = QtWidgets.QLabel(self.Calibra)
self.obs.setGeometry(QtCore.QRect(20,50,841,21))
self.obs.setObjectName(_fromUtf8("obs"))
self.bcal = QtWidgets.QPushButton(self.Calibra)
self.bcal.setGeometry(QtCore.QRect(150,110,131,29))
self.bcal.setObjectName(_fromUtf8("bcal"))
self.obs3 = QtWidgets.QLabel(self.Calibra)
self.ecal = QtWidgets.QPushButton(self.Calibra)
self.ecal.setGeometry(QtCore.QRect(530,110,151,29))
self.ecal.setObjectName(_fromUtf8("ecal"))
self.scal = QtWidgets.QPushButton(self.Calibra)
self.scal.setGeometry(QtCore.QRect(330,110,161,29))
self.scal.setObjectName(_fromUtf8("scal"))
self.combo = QtWidgets.QComboBox(self.Calibra)
self.Config.addTab(self.Calibra, _fromUtf8(""))
self.combo2 = QtWidgets.QComboBox(self.Grafic)
self.bcombo = QtWidgets.QPushButton(self.Grafic)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 924, 23))
self.menubar.setObjectName(_fromUtf8("menubar"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.Config.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
self.obs.hide()
self.combo.hide()
self.combo2.hide()
self.scal.hide()
self.bcombo.hide()
def func_browser(self):
file = QtWidgets.QFileDialog()
self.filedir = file.getOpenFileName()
#print (self.filedir)
self.ensaiologin = True
def relatorios(self):
os.system('rm -rf /home/laboratorio/Desktop/Ensaios')
os.system('cp -R /opt/MET-Master/Ensaios/ /home/laboratorio/Desktop/Ensaios/')
os.system('chmod 777 /home/laboratorio/Desktop/Ensaios/ -R')
os.system("nautilus /home/laboratorio/Desktop/Ensaios/")
#os.system("exo-open --launch FileManager Ensaios/")
def webcamcapture_final(self):
global contando
escolhido = self.combo.currentText()
self.combo.clear()
cameras = webc.cameralist()
cameras = cameras[::-1]
self.combo.addItems(cameras)
#print(self.combo.currentText(),cameras[0])
imagesavedir = 'TempImagens/'+self.input.text()+str(contando)+'-final.png'
ind=0
for i in range(len(cameras)):
if str(cameras[i]) == escolhido:
ind = i
webc.main(imagesavedir,cameras[ind])
def webcamcapture(self):
global contando
escolhido = self.combo.currentText()
self.combo.clear()
cameras = webc.cameralist()
cameras = cameras[::-1]
self.combo.addItems(cameras)
#print(self.combo.currentText(),cameras[0])
imagesavedir = 'TempImagens/'+self.input.text()+str(contando)+'-inicial.png'
ind=0
for i in range(len(cameras)):
if str(cameras[i]) == escolhido:
ind = i
webc.main(imagesavedir,cameras[ind])
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MET", None))
self.pushButton.setText(_translate("MainWindow", "Iniciar Ensaio", None))
self.pushButton.setStyleSheet('color: Blue')
self.pushButton.clicked.connect(self.iniciar)
self.button_webcam.setText(_translate("MainWindow", "Imagem capture", None))
self.button_webcam.clicked.connect(self.webcamcapture)
self.bcal.setText(_translate("MainWindow", "Adicionar Nova", None))
self.bcal.clicked.connect(self.add_nova)
self.scal.setText(_translate("MainWindow", "Selecionar Celula", None))
self.scal.clicked.connect(self.selecionar)
self.ecal.setText(_translate("MainWindow", "Editar/Calibrar", None))
self.ecal.clicked.connect(self.Editar)
self.label.setText(_translate("MainWindow", "Tipo de ensaio:", None))
self.checkBox.setText(_translate("MainWindow", "Ensaio de tração", None))
self.checkBox.setChecked(True)
self.checkBox_2.setText(_translate("MainWindow", "Ensaio de compressão", None))
self.label_2.setText(_translate("MainWindow", "Velocidade de ensaio", None))
self.label_3.setText(_translate("MainWindow", "mm/min", None))
self.checkBox_3.setText(_translate("MainWindow", "Parada queda de Força ",None))
self.label_4.setText(_translate("MainWindow", "Parada automatica", None))
self.label_5.setText(_translate("MainWindow", "% de Força", None))
self.checkBox_4.setText(_translate("MainWindow", "Parada de Força maxima", None))
self.label_6.setText(_translate("MainWindow", "Força maxima", None))
self.checkBox_5.setText(_translate("MainWindow", "Parada deslocamento", None))
self.label_7.setText(_translate("MainWindow", "Deslocamento Máximo", None))
self.label_8.setText(_translate("MainWindow", "N", None))
self.label_9.setText(_translate("MainWindow", "mm", None))
self.label_10.setText(_translate("MainWindow", "Ajustes Manuais", None))
self.desl.setText(_translate("MainWindow", "deslocamento", None))
self.label_11.setText(_translate("MainWindow", "Velocidade do ajuste", None))
self.label_12.setText(_translate("MainWindow", "mm/min", None))
self.deslm.setText(_translate("MainWindow", "mm", None))
self.botaodiretorio.setText(_translate("MainWindow", "Relatórios", None))
self.botaodiretorio.clicked.connect(self.relatorios)
self.botaodiretorio.show()
self.botaobrowser.setText(_translate("MainWindow", "Browser Ensaio", None))
self.botaobrowser.clicked.connect(self.func_browser)
self.botaobrowser.show()
self.pushButton_2.setText(_translate("MainWindow", "Descer", None))
self.pushButton_2.clicked.connect(self.Descer)
self.pushButton_3.setText(_translate("MainWindow", "Subir", None))
self.pushButton_3.clicked.connect(self.Subir)
self.parar_ajuste.setText(_translate("MainWindow", "Parar", None))
self.parar_ajuste.clicked.connect(self.Parando2)
self.label_13.setText(_translate("MainWindow", "Àrea de Seção do Corpo de Prova", None))
self.checkBox_6.setText(_translate("MainWindow", "Retangular", None))
self.checkBox_7.setText(_translate("MainWindow", "Tubo", None))
self.checkBox_8.setText(_translate("MainWindow", "Cilíndrico", None))
self.label_15.setText(_translate("MainWindow", "L", None))
self.label_16.setText(_translate("MainWindow", "l", None))
self.label_17.setText(_translate("MainWindow", "L", None))
self.label_18.setText(_translate("MainWindow", "D", None))
self.label_19.setText(_translate("MainWindow", "D", None))
self.label_20.setText(_translate("MainWindow", "H", None))
self.pushButton_4.setText(_translate("MainWindow", "Emergência", None))
self.pushButton_4.setStyleSheet('color: red')
self.pushButton_4.clicked.connect(self.Parando)
self.emergrafic.setText(_translate("MainWindow", "Emergência", None))
self.emergrafic.setStyleSheet('color: red')
self.emergrafic.clicked.connect(self.Parando)
self.Config.setTabText(self.Config.indexOf(self.Config_2), _translate("MainWindow", "Configurações", None))
self.label_21.setText(_translate("MainWindow", "Deslocamento", None))
self.label_22.setText(_translate("MainWindow", "Força", None))
self.label_23.setText(_translate("MainWindow", "mm", None))
self.label_24.setText(_translate("MainWindow", "N", None))
self.pushButton_5.setText(_translate("MainWindow", "Reset Gráfico", None))
self.pushButton_5.clicked.connect(self.resetgrafic)
self.pushButton_6.setText(_translate("MainWindow", "Cancelar Test", None))
self.pushButton_6.clicked.connect(self.cancelartestes)
self.pushButton_7.setText(_translate("MainWindow", "Gerar Relátorio", None))
self.pushButton_7.clicked.connect(self.gerarpdf)
self.Config.setTabText(self.Config.indexOf(self.Grafic), _translate("MainWindow", "Gráfico", None))
self.Config.setTabText(self.Config.indexOf(self.Calibra), _translate("MainWindow", "Celula de Carga", None))
self.pushButton_6.hide()
self.pushButton_7.hide()
#Celula de Carga
self.obs.setText(_translate("MainWindow", "OBS: Retire as Garras/Mordentes da Celula de Carga, Não deixe nada apenas a Celula de Carga, Clique em Iniciar.", None))
self.combo.hide()
self.pushButton_4.setVisible(False)
self.emergrafic.setVisible(False)
self.combo.hide()
#self.label12.hide()
class MyForm(QtWidgets.QMainWindow):
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
def closeEvent(self,event):
result = QtWidgets.QMessageBox.question(self,
"Confirmar Fechamento do Programa...",
"Você deseja realmente sair do programa ?",
QtWidgets.QMessageBox.Yes| QtWidgets.QMessageBox.No)
event.ignore()
if result == QtWidgets.QMessageBox.Yes:
flag2 =0
Motor.Parar()
event.accept()
class ServerThread(QtCore.QThread):
UPsig = QtCore.pyqtSignal(list)
Stopsig =QtCore.pyqtSignal(int)
def __init__(self, parent=None):
QtCore.QThread.__init__(self)
def start_server(self):
global flag
global VelocidadeEn
global qforca
global maxforca
global maxdeslocamento
global tempinicioteste
global forcas
global deslocamentos
tempo = time.time()
if(flag == 0):
global flag2
while(flag2 == 1):
QtTest.QTest.qWait(500)
flag =1
Forca = celula.getvalue()
if Forca == None:
Forca = 0
pass
'''self.aviso= QtWidgets.QMessageBox()
self.aviso.setText("Por Favor verifique o HX711, aparentemente o mesmo encontra-se desconnectado !")
self.aviso.addButton(QtWidgets.QMessageBox.Yes)
result1 = self.aviso.exec_()'''
else:
tempodecorrido = (time.time() - tempinicioteste)/60
deslocamento = (float(VelocidadeEn))*float(tempodecorrido)
deslocamentos.append(deslocamento)
forcas.append((float(Forca)*9.8))
forcaanterior = forcas[-1]
maiorvalor = forcas.copy()
maiorvalor.sort()
if( time.time()- tempo > 0.8):
lista = [float(deslocamento),float(Forca),float(maiorvalor[-1])+30,float(deslocamentos[-1])+30,deslocamentos,forcas]
#self.emit(QtCore.SIGNAL("UP"), lista)
self.UPsig.emit(lista)
tempo = time.time()
if( flag2 == 1 and maxdeslocamento != None and float(maxdeslocamento) != 0 and float(deslocamento) >= float(maxdeslocamento)):
flag2 =0
#self.emit(QtCore.SIGNAL("Parando"), 1)
self.Stopsig.emit(1)
lista = [float(deslocamento),float(Forca),maiorvalor[-1]+10,deslocamentos[-1]+10,deslocamentos,forcas]
#self.emit(QtCore.SIGNAL("UP"), lista)
self.UPsig.emit(lista)
if(flag2 == 1 and maxforca != None and float(maxforca) != 0 and float(Forca) >= float(maxforca)):
#self.emit(QtCore.SIGNAL("Parando"), 1)
self.Stopsig.emit(1)
flag2 =0
#self.emit(QtCore.SIGNAL("Parando"), 1)
self.Stopsig.emit(1)
lista = [float(deslocamento),float(Forca),maiorvalor[-1]+10,deslocamentos[-1]+10,deslocamentos,forcas]
self.UPsig.emit(lista)
#self.emit(QtCore.SIGNAL("UP"), lista)
if(flag2 == 1 and qforca != None and float(qforca) != 0 and (float(forcaanterior)*(1 - (float(qforca)/100))) > Forca ):
flag2 =0
for i in range(0,10):
QtTest.QTest.qWait(20)
Forca = celula.getvalue()
tempodecorrido = (time.time() - tempinicioteste)/60
deslocamento = (float(VelocidadeEn))*float(tempodecorrido)
deslocamentos.append(deslocamento)
forcas.append((float(Forca)*9.8))
forcaanterior = forcas[-1]
maiorvalor = forcas.copy()
maiorvalor.sort()
#self.emit(QtCore.SIGNAL("Parando"), 1)
self.Stopsig.emit(1)
lista = [float(deslocamento),float(Forca),maiorvalor[-1]+10,deslocamentos[-1]+10,deslocamentos,forcas]
self.UPsig.emit(lista)
#self.emit(QtCore.SIGNAL("UP"), lista)
flag =0
def run(self):
self.start_server()
def Area(Retangulo_A,Retangulo_B,Tubo_L,Tubo_D,Cilindro_D,Cilindro_H):
global AreaCorpoProva
global FormatoCorpoProva
FormatoCorpoProva = ""
AreaCorpoProva = 0.0
if(Retangulo_A != None and Retangulo_B != None):
#calcular area
AreaCorpoProva = float(Retangulo_A) * float(Retangulo_B)
if(Tubo_L != None and Tubo_D != None):
AreaCorpoProva = math.pi * float(Tubo_L)* float(Tubo_D)
FormatoCorpoProva = "Tubo"
if(Cilindro_D != None and Cilindro_H != None):
AreaCorpoProva = (math.pi*((float(Cilindro_D)*float(Cilindro_H))))+ 2*(math.pi*(float(Cilindro_D)*float(Cilindro_D))/4)
FormatoCorpoProva = "Cilíndrico"
def lotes(nome,x1,x2):
global contando
global testes
global AreaCorpoProva
global VelocidadeEn
global tipodeensaio
global FormatoCorpoProva
testes.append({})
nome=nome+str(contando)
testes[contando]["nome"] = nome
testes[contando]["area"] = AreaCorpoProva
testes[contando]["vel"] = VelocidadeEn
testes[contando]["formato"] = FormatoCorpoProva
testes[contando]["tipo"] =tipodeensaio
testes[contando]["cont"] = contando
testes[contando]["x1"] = x1
testes[contando]["x2"] = x2
contando+=1
if __name__ == "__main__":
app =QtWidgets.QApplication(sys.argv)
myapp = MyForm()
myapp.show()
sys.exit(app.exec_())
| [
"edresson1@gmail.com"
] | edresson1@gmail.com |
eeadb0d59f0e5edcc75362a413d612ff5264e146 | 34c4d65b67a4e8804bb608bdda825dfdbf553b7f | /tmall/apps/home/views.py | 165d9615f1c194f1bc7c709769edd43e0ce52c75 | [] | no_license | xiaofloat/Tmall | 9778133e3d530f5978e862769563216c8fd9883a | f344812609cc5443c285323adf23e6f6b3da69c3 | refs/heads/master | 2020-03-30T19:21:21.766008 | 2018-10-04T08:24:39 | 2018-10-04T08:24:39 | 151,538,577 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,047 | py | from django.http import HttpResponse
from django.shortcuts import render
# Create your views here.
from apps.home.models import Navigation, Category, Banner, Shop, Review, Property, PropertyValue, ShopCar
def index(request):
# 导航
navigations = Navigation.objects.all()
# 一级分类
categorys = Category.objects.all()
for category in categorys:
# 二级分类
category.subs = category.submenu_set.all()
for sub in category.subs:
# 获取二级菜单分类的数据
sub.subs2 = sub.submenu2_set.all()
category.shops = category.shop_set.all()[0:5]
# 获取商品的图片
for shop in category.shops:
shop.img = shop.shopimage_set.filter(type='type_single').order_by('shop_img_id').first()
banners = Banner.objects.all().order_by('banner_id')
count = 0
if request.session.get('user'):
count = ShopCar.objects.filter(user_id=request.session.get('user').uid, status=1).all().count()
request.session['count'] = count
return render(request, 'index.html', {
'navigations': navigations,
'banners': banners,
'categorys': categorys,
})
def shop_detail(request, id):
try:
shop = Shop.objects.get(shop_id=id)
# 商品的图片信息
shop.imgs = shop.shopimage_set.all()
# 评论数
review_count = Review.objects.filter(shop_id=id).count()
# 属性
properties = Property.objects.filter(cate__cate_id=shop.cate.cate_id)
for property in properties:
# 属性值
property.pro_value = property.propertyvalue_set.get(shop_id=id, property_id=property.property_id)
return render(request, 'shop_detail.html', {
'shop': shop,
'review_count': review_count,
'properties': properties,
})
except Shop.DoesNotExist as e:
pass
except Shop.MultipleObjectsReturned as e:
pass
| [
"591006214@qq.com"
] | 591006214@qq.com |
27787db5508ea4f431a85ff0106cbd04decd963f | fe0a71710a1c2981bf514b673de5e8d21a9d6b6b | /e12.py | 291a565e94380730b0904229690635cc27df228e | [] | no_license | s-c-p/pea | 3efd92de83c5950a404af17be3dd46a474014629 | 8cb717c84531920d6689eb9b747ce182807abfb9 | refs/heads/master | 2020-03-18T19:53:03.430199 | 2018-05-28T16:25:43 | 2018-05-28T16:25:43 | 135,183,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,662 | py | from pdb import set_trace as st
def dirty_prime_gen(seed1, seed2):
x = (seed1 + seed2) // 6
if x % 2 == 0:
k = x // 2
else:
k = x // 2 + 1
yield 6*k + 1
k += 1
while True:
yield 6*k - 1
yield 6*k + 1
k += 1
def trngl_num_gen():
n = 1
while True:
yield (n*(n+1))//2
n += 1
def num_is_trngl(num):
d = (8*num + 1)**0.5
if int(d) == d:
return (d - 1)//2
return False
def is_prime(test, known):
for x in known:
if x > test:
return False
if test % x == 0:
return False
return True
def prime_list(lim, known=None):
if known == None:
known = [2, 3, 5, 7]
dp = dirty_prime_gen(known[-2], known[-1])
while len(known) < lim:
sp = next(dp)
if is_prime(sp, known):
known.append(sp)
return known
def prime_factorize(num, known_primes):
pf = dict()
for p in known_primes:
if p > num:
break
n = num
times = int()
while n % p == 0:
n /= p
times += 1
pf[p] = times
return {k:v for k, v in pf.items() if v != 0}
def the_func(dct):
# (12, 504, {2: 3, 3: 2, 7: 1}),
# 504 = 2*2*2 * 3*3 * 7
# 504 = product of 6 distinct prime numbers - a, b, c, d, e, f
# ans = a * b * c * d**2 * e**2 * f**6
chars = "abcdefghijklmnopqrstuvwxyz"
grp_size = sum(dct.values())
pph = list(chars[:grp_size]) # primeNum place holder
i = int()
raw_eqn = list()
for pwr, reps in dct.items():
for _ in range(reps):
cpph = pph[i]
frag = "%s**%d" % (cpph, pwr-1)
raw_eqn.append(frag)
i += 1
code = "def x(%s): return %s" % (", ".join(pph), "*".join(raw_eqn))
st()
return grp_size, eval(code)
def find_least_f_num(min, max):
ans = list()
knp = prime_list(max) # misuse but okay
for num in range(min, max+1):
fzn = prime_factorize(num, knp)
nf = sum(fzn.values())
ans.append((nf, num, fzn))
return sorted(ans, key=lambda x: x[0])
return ans
from pprint import pprint as pp
research = find_least_f_num(501, 600)
pp(research)
exit()
# (512, 576, 540, 567, 600)
from itertools import product
def get_trngl_num_with_n_factors(n):
knp = prime_list(n) # misuse but ok
pf = prime_factorize(n, knp)
grp_size, functor = the_func(pf)
knp = knp[:grp_size*2]
for group in product(knp, grp_size):
x = functor(group)
if num_is_trngl(x):
print(x)
break
return
#get_trngl_num_with_n_factors(504)
| [
"prasadsatish45@binkmail.com"
] | prasadsatish45@binkmail.com |
f6e33093ee08e98ba7188ecf58aa4a5ab4947a42 | 675d2f12f45e59b585dcbe9624ca276ce0c03228 | /tests/testcustomer.py | 828598a4ad6db905f09a8c3456467fbdf16d0818 | [] | no_license | h4ck3rm1k3/arbk | 32b99c713641c2e657555af1d3aa85255fc3f63d | e3fa702cc8ecac6d534041aa53d00dff402a87ba | refs/heads/master | 2021-01-16T17:40:11.593235 | 2012-05-03T06:27:58 | 2012-05-03T06:27:58 | 3,917,153 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 643 | py | import sys
sys.path.append (".")
from django.core.management import setup_environ
import settings
setup_environ(settings)
from arbkdb.models import ArbkCompany
from arbkdb.models import ArbkBusinesscategory
from arbkdb.models import ArbkLegalentity
cat = ArbkBusinesscategory.objects.all()[0]
print ArbkCompany.objects.count()
x= ArbkCompany.objects.all()
print x
c = x[0]
print "company id %s " % c.id
print "company name %s " % c.name
print "category %s " % c.primarycategory
print "owner name %s " % c.owner.name
print "owner id %d " % c.owner.id
print "authorized persons %s " % c.authpeople.all()
print "owners %s " % c.owners.all()
| [
"JamesMikeDuPont@googlemail.com"
] | JamesMikeDuPont@googlemail.com |
554c35e910983da8506bebeaa917eb1f0be2f7b6 | d3330aebb59cb8f79e5a6e27624816d499861849 | /7 - Repetições (for e while)/desafio65.py | c697f08c992b3ba569ac536e34feb8ee3dab5f47 | [] | no_license | WayneRocha/Desafios_python-CeV | 5716cd9910f79292d9d329e6d7bb1f8fec482241 | 707296c07c4499ef1343590a2f9a41206aeac1dd | refs/heads/main | 2023-01-30T09:40:54.223359 | 2020-12-14T01:48:33 | 2020-12-14T01:48:33 | 321,194,978 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 591 | py | # Ler números perguntando se o úsuario quer continuar
# No final, mostrar a média entre todos os números e qual é o maior e qual é o menor
continuar = 'S'
cont = media = 0
while 'S' in continuar:
n = int(input('Digite um número: '))
continuar = str(input('Quer continuar?: ')).upper()
cont += 1
media += n
if cont == 1:
maior = n
menor = n
if n > maior:
maior = n
elif n < menor:
menor = n
print('\n', '-=' * 20, '\n')
media = media / cont
print(f'Média: {media:.2f}\nMaior números: {maior}\nMenor número: {menor}')
| [
"dev.waynerocha@gmail.com"
] | dev.waynerocha@gmail.com |
f97ab01cab32fae8969bd7eae09c4c02fbf750e2 | ec72d31a75eed12eff34dd18ce0b4d30721a533e | /GUI/PlotWithShading.py | 8db001f52c7905f47889a74d06289d77d37f5aed | [] | no_license | esmam/MRATPython27 | 6a613220701a3c2779b86319a94ead497aaf18a4 | 1891c583c3d23fa9f60a7ceb5e9fd9ab23677abb | refs/heads/master | 2021-01-10T12:07:00.491762 | 2015-10-20T04:08:45 | 2015-10-20T04:08:45 | 44,581,402 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,446 | py | import sys
import mainwindow
import random
import numpy as np
import matplotlib
matplotlib.use("Qt5Agg")
from PyQt5 import QtCore
from PyQt5.QtWidgets import QApplication, QMainWindow, QMenu, QVBoxLayout, QSizePolicy, QMessageBox, QWidget
from numpy import arange, sin, pi
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.patches as mpatches
#import Ui_MainWindow from mainwindow
from mainwindow import *
class MyMplCanvas(FigureCanvas):
"""Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.)."""
def __init__(self, parent=None, width=7, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
#self.axes = fig.add_subplot(111)
#self.axes = plt.subplots(1,1, sharex=True)(111)
# We want the axes cleared every time plot() is called
#self.axes.hold(False)
#self.axes.hold(False)
#self.compute_initial_figure()
#
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QSizePolicy.Expanding,
QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def compute_initial_figure(self):
pass
class MyStaticMplCanvas(MyMplCanvas):
"""Simple canvas with a sine plot."""
#fig, (ax1) = plt.subplots(1,1, sharex=True)
#axes = plt.subplots(1,1, sharex=True)
def update(self,x,y,y1):
t = arange(-700, 400,600)
s = np.array([[-1, 2, 3,9],[4, 5, 6,10]], np.int32)
s1 = np.array([[-1, 2, 3,9],[4, 5, 6,10]], np.int32)
s1 = np.array([y,y1], np.float32)
self.axes.plot(x,y,'r',alpha=1.00,linewidth=2)
self.axes.hold(True)
self.axes.plot(x,y1,color='blue', alpha=1.00,linewidth=2)
#self.axes.fill_between(x, y, y1, where=y>=y1, facecolor='green', interpolate=True)
self.axes.set_ylabel('value')
self.axes.set_xlabel('Time in (s)')
def compute_initial_figure1(self,start,end,step,y,y1, cond1name,cond2name):
self.plt = plt
t = arange(-700, 400,600)
s = np.array([[-1, 2, 3,9],[4, 5, 6,10]], np.int32)
x = arange(start-.001,end,step)
s1 = np.array([[-1, 2, 3,9],[4, 5, 6,10]], np.int32)
s1 = np.array([y,y1], np.float32)
#l1 = self.axes.plot(x,y,'r',alpha=1.00,linewidth=2)
#self.axes.hold(True)
#l2 = self.axes.plot(x,y1,color='blue', alpha=1.00,linewidth=2)
l3, = self.plt.plot(x,y,'r',alpha=1.00,linewidth=2)
plt.hold(True)
l4, = self.plt.plot(x,y1,color='blue', alpha=1.00,linewidth=2)
#self.axes.legend((l1, l2), ('Line', 'Line', 'center'))
#self.plt.legend(handles = [l3], loc = 1) #) #, ('Line', 'Line', 'center'))
#self.plt.legend(handles = [l4], loc = 2) #)
self.plt.legend((l3, l4), (cond1name, cond2name))
#(handles=[line2], loc=4)
#self.axes.fill_between(x, y, y1, where=y>=y1, facecolor='green', interpolate=True)
#self.axes.set_ylabel('nAm')
#self.axes.set_xlabel('Time in (ms)')
self.plt.title("Analysis results for region:")
self.plt.ylabel('nAm')
self.plt.xlabel('Time in (ms)')
#self.resize((20,10))
self.plt.show()
#self.axes.legend((l1, l2), ('Line', 'Line', 'center'))
'''x = np.array([1,2])
data = np.array([10,8])
err = np.array([2,1])
b1 = self.axes.bar(x-.2,2*err,0.4,color='b',bottom=data - err,alpha=0.3)
self.axes.legend([(l1,l2)], ['nice legend graphic'],shadow=True,fancybox=True,numpoints=1)
#self.axes.axis([0,3,0,15])'''
def compute_initial_figure(self):
t = arange(-700, 400,600)
s = np.array([[-1, 2, 3,9],[4, 5, 6,10]], np.int32)
#self.axes.plot(t,s)
class MyDynamicMplCanvas(MyMplCanvas):
"""A canvas that updates itself every second with a new plot."""
def __init__(self, *args, **kwargs):
MyMplCanvas.__init__(self, *args, **kwargs)
timer = QtCore.QTimer(self)
timer.timeout.connect(self.update_figure)
timer.start(1000)
def compute_initial_figure(self):
self.axes.plot([0, 1, 2, 3], [1, 2, 0, 4], 'r')
def update_figure(self):
# Build a list of 4 random integers between 0 and 10 (both inclusive)
l = [random.randint(0, 10) for i in range(4)]
self.axes.plot([0, 1, 2, 3], l, 'r')
self.draw()
class ApplicationWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.setWindowTitle("application main window")
self.file_menu = QMenu('&File', self)
self.file_menu.addAction('&Quit', self.fileQuit,
QtCore.Qt.CTRL + QtCore.Qt.Key_Q)
self.menuBar().addMenu(self.file_menu)
self.help_menu = QMenu('&Help', self)
self.menuBar().addSeparator()
self.menuBar().addMenu(self.help_menu)
self.help_menu.addAction('&About', self.about)
self.main_widget = QWidget(self)
l = QVBoxLayout(self.main_widget)
sc = MyStaticMplCanvas(self.main_widget, width=1, height=1, dpi=50)
#dc = MyDynamicMplCanvas(self.main_widget, width=5, height=4, dpi=100)
l.addWidget(sc)
#l.addWidget(dc)
self.main_widget.setFocus()
self.setCentralWidget(self.main_widget)
self.statusBar().showMessage("All hail matplotlib!", 2000)
def fileQuit(self):
self.close()
def closeEvent(self, ce):
self.fileQuit()
def about(self):
QMessageBox.about(self, "About",
"""embedding_in_qt5.py example
Copyright 2015 BoxControL
This program is a simple example of a Qt5 application embedding matplotlib
canvases. It is base on example from matplolib documentation, and initially was
developed from Florent Rougon and Darren Dale.
http://matplotlib.org/examples/user_interfaces/embedding_in_qt4.html
It may be used and modified with no restriction; raw copies as well as
modified versions may be distributed without limitation."""
)
if __name__ == '__main__':
app = QApplication(sys.argv)
aw = ApplicationWindow()
aw.setWindowTitle("PyQt5 Matplot Example")
#aw.show()
#sys.exit(qApp.exec_())
app.exec_() | [
"esmamansouri@esmas-MacBook-Pro.local"
] | esmamansouri@esmas-MacBook-Pro.local |
22fadaa89258cfd71fbb4ceb4ec2726f9103a2f5 | c6cb41d881fa5af1e7f4f7f569db706fc1d86169 | /denoise2.py | c373a7d633df6d5daf26d6d25f32f82d47d74ad7 | [] | no_license | GriffinKennedy/MSAN_501 | fc7503c606278dbcb1e8b913d6cb2a3c704cb411 | cfdfab5a40a13ef70d22da43bab42be342c8943c | refs/heads/master | 2021-01-19T19:56:53.911195 | 2017-08-23T18:48:00 | 2017-08-23T18:48:00 | 101,210,707 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 570 | py | from filter import *
def median(region):
'''take the median of the 3X3 region and return the median number.'''
region.sort()
if len(region) < 1:
return None
elif len(region) % 2 == 1:
x = int(len(region)/2) #using int as a floor function can do int then add one for a ceiling
w = region[x]
return w
else:
y = (len(region)/2)-1
z = len(region)/2
output = round((region[y] + region[z]) /2, 3)
return output
img = open(sys.argv)
img.show()
denoised = filter(img, median)
denoised.show() | [
"griffinjeffreykennedy@gmail.com"
] | griffinjeffreykennedy@gmail.com |
2d943c418c9e86e3c9e93cc7b94d5d699da7d550 | 8c217046bf4e7e4ef56f418f9d16fa69567fb1a6 | /pythonds/arrayList.py | 47f9f5b28cc7aa20746778fa5911e83ee6b95526 | [] | no_license | ilovepeppa/python-data-structure | 8cf545dfaa9aa000e7a742e5302500a5e178574f | 7dfa1cdef10d6cd8a25b4ec03f4e90b38b3774aa | refs/heads/master | 2020-04-24T03:22:39.595838 | 2019-02-25T13:44:26 | 2019-02-25T13:44:26 | 171,668,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,313 | py | class Node:
def __init__(self, data):
self.data = data
self.next = None
def get_data(self):
return self.data
def get_next(self):
return self.next
def set_data(self, data):
self.data = data
def set_next(self, next):
self.next = next
class ArrayList:
def __init__(self):
self.head = None
def is_empty(self):
return self.head is None
def size(self):
count = 0
current = self.head
while current:
count += 1
current = current.get_next()
return count
def remove(self, item):
current = self.head
prev = None
while current and current.get_data() != item:
prev = current
current = current.get_next()
if current is None:
return
if prev is None:
self.head = current.get_next()
else:
prev.set_next(current.get_next())
def add(self, item):
raise NotImplementedError
def search(self, item):
raise NotImplementedError
def __str__(self):
result = []
current = self.head
while current:
result.append(current.get_data())
current = current.get_next()
return str(result)
| [
"ilovecode@yeah.net"
] | ilovecode@yeah.net |
4aac5a9dd38805a25bbb1b7c3b169e1d8e7747e5 | 23333296ad85d46ee72bba4eb3da3ab3e9a2a9d3 | /amazon/base_amazon/amazon_filter.py | aee6e3025872c3478dd01ddec7d406841f0d2a43 | [] | no_license | khyatikansara/machine-learning | 54966cd756b851b26868cb9fa7bdcf6f339c8163 | 27b90708beeeea955e937c897fa755bcaf08a1fc | refs/heads/master | 2021-07-29T22:19:32.445093 | 2021-07-28T19:31:10 | 2021-07-28T19:31:10 | 183,756,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py | from django import template
register = template.library()
@register.filter()
def men_clothes(colthes,symbol='Men'):
colthes = "shirt"
return colthes | [
"khyatikansara@gmail.com"
] | khyatikansara@gmail.com |
aa0fcf34b2eaa1e03d6f22e5b2f5270cb3256928 | f3d3b3ea381f937109a9ef8c230078c35e5c8b65 | /Ether/upload/src/main/ether/parser/ETHERLexer.py | 3ebaa584918f8b8eee8c4f40e5b4baaeaa67e2ba | [] | no_license | TanDung2512/parserSmartContractPython | e268fa8d2971250aab1cd5f87638d19e70c2d8e3 | 6b6a154004e8d0b9957e9c96b99a4b0d28888e37 | refs/heads/master | 2020-04-26T18:07:02.905020 | 2019-03-04T11:46:37 | 2019-03-04T11:46:37 | 173,734,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,016 | py | # Generated from main/ether/parser/ETHER.g4 by ANTLR 4.7.1
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\r")
buf.write("\u00be\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
buf.write("\4&\t&\3\2\6\2O\n\2\r\2\16\2P\3\2\3\2\3\3\3\3\3\4\3\4")
buf.write("\3\5\3\5\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3")
buf.write("\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\6\t")
buf.write("s\n\t\r\t\16\tt\3\n\6\nx\n\n\r\n\16\ny\3\13\7\13}\n\13")
buf.write("\f\13\16\13\u0080\13\13\3\13\3\13\7\13\u0084\n\13\f\13")
buf.write("\16\13\u0087\13\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17")
buf.write("\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25")
buf.write("\3\25\3\26\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32")
buf.write("\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\37\3\37\3 ")
buf.write("\3 \3!\3!\3\"\3\"\3#\3#\3$\3$\3%\3%\3&\3&\2\2\'\3\3\5")
buf.write("\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\2\31\2\33")
buf.write("\2\35\2\37\2!\2#\2%\2\'\2)\2+\2-\2/\2\61\2\63\2\65\2\67")
buf.write("\29\2;\2=\2?\2A\2C\2E\2G\2I\2K\r\3\2!\5\2\13\f\17\17\"")
buf.write("\"\5\2\62;CHch\3\2\62;\4\2C\\c|\5\2\62;C\\c|\4\2CCcc\4")
buf.write("\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2IIii\4\2JJj")
buf.write("j\4\2KKkk\4\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4\2PPpp\4\2")
buf.write("QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUuu\4\2VVvv\4\2WWww\4")
buf.write("\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2\u00a8\2\3\3")
buf.write("\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2")
buf.write("\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2")
buf.write("\2\25\3\2\2\2\2K\3\2\2\2\3N\3\2\2\2\5T\3\2\2\2\7V\3\2")
buf.write("\2\2\tX\3\2\2\2\13Z\3\2\2\2\r\\\3\2\2\2\17d\3\2\2\2\21")
buf.write("n\3\2\2\2\23w\3\2\2\2\25~\3\2\2\2\27\u0088\3\2\2\2\31")
buf.write("\u008a\3\2\2\2\33\u008c\3\2\2\2\35\u008e\3\2\2\2\37\u0090")
buf.write("\3\2\2\2!\u0092\3\2\2\2#\u0094\3\2\2\2%\u0096\3\2\2\2")
buf.write("\'\u0098\3\2\2\2)\u009a\3\2\2\2+\u009c\3\2\2\2-\u009e")
buf.write("\3\2\2\2/\u00a0\3\2\2\2\61\u00a2\3\2\2\2\63\u00a4\3\2")
buf.write("\2\2\65\u00a6\3\2\2\2\67\u00a8\3\2\2\29\u00aa\3\2\2\2")
buf.write(";\u00ac\3\2\2\2=\u00ae\3\2\2\2?\u00b0\3\2\2\2A\u00b2\3")
buf.write("\2\2\2C\u00b4\3\2\2\2E\u00b6\3\2\2\2G\u00b8\3\2\2\2I\u00ba")
buf.write("\3\2\2\2K\u00bc\3\2\2\2MO\t\2\2\2NM\3\2\2\2OP\3\2\2\2")
buf.write("PN\3\2\2\2PQ\3\2\2\2QR\3\2\2\2RS\b\2\2\2S\4\3\2\2\2TU")
buf.write("\7*\2\2U\6\3\2\2\2VW\7+\2\2W\b\3\2\2\2XY\7.\2\2Y\n\3\2")
buf.write("\2\2Z[\7=\2\2[\f\3\2\2\2\\]\5;\36\2]^\5\37\20\2^_\5\61")
buf.write("\31\2_`\5\35\17\2`a\5\37\20\2ab\5=\37\2bc\5%\23\2c\16")
buf.write("\3\2\2\2de\5;\36\2ef\5\37\20\2fg\5\61\31\2gh\5\35\17\2")
buf.write("hi\5=\37\2ij\5\63\32\2jk\5+\26\2kl\5\37\20\2lm\5\61\31")
buf.write("\2m\20\3\2\2\2no\7\62\2\2op\7z\2\2pr\3\2\2\2qs\t\3\2\2")
buf.write("rq\3\2\2\2st\3\2\2\2tr\3\2\2\2tu\3\2\2\2u\22\3\2\2\2v")
buf.write("x\t\4\2\2wv\3\2\2\2xy\3\2\2\2yw\3\2\2\2yz\3\2\2\2z\24")
buf.write("\3\2\2\2{}\t\4\2\2|{\3\2\2\2}\u0080\3\2\2\2~|\3\2\2\2")
buf.write("~\177\3\2\2\2\177\u0081\3\2\2\2\u0080~\3\2\2\2\u0081\u0085")
buf.write("\t\5\2\2\u0082\u0084\t\6\2\2\u0083\u0082\3\2\2\2\u0084")
buf.write("\u0087\3\2\2\2\u0085\u0083\3\2\2\2\u0085\u0086\3\2\2\2")
buf.write("\u0086\26\3\2\2\2\u0087\u0085\3\2\2\2\u0088\u0089\t\7")
buf.write("\2\2\u0089\30\3\2\2\2\u008a\u008b\t\b\2\2\u008b\32\3\2")
buf.write("\2\2\u008c\u008d\t\t\2\2\u008d\34\3\2\2\2\u008e\u008f")
buf.write("\t\n\2\2\u008f\36\3\2\2\2\u0090\u0091\t\13\2\2\u0091 ")
buf.write("\3\2\2\2\u0092\u0093\t\f\2\2\u0093\"\3\2\2\2\u0094\u0095")
buf.write("\t\r\2\2\u0095$\3\2\2\2\u0096\u0097\t\16\2\2\u0097&\3")
buf.write("\2\2\2\u0098\u0099\t\17\2\2\u0099(\3\2\2\2\u009a\u009b")
buf.write("\t\20\2\2\u009b*\3\2\2\2\u009c\u009d\t\21\2\2\u009d,\3")
buf.write("\2\2\2\u009e\u009f\t\22\2\2\u009f.\3\2\2\2\u00a0\u00a1")
buf.write("\t\23\2\2\u00a1\60\3\2\2\2\u00a2\u00a3\t\24\2\2\u00a3")
buf.write("\62\3\2\2\2\u00a4\u00a5\t\25\2\2\u00a5\64\3\2\2\2\u00a6")
buf.write("\u00a7\t\26\2\2\u00a7\66\3\2\2\2\u00a8\u00a9\t\27\2\2")
buf.write("\u00a98\3\2\2\2\u00aa\u00ab\t\30\2\2\u00ab:\3\2\2\2\u00ac")
buf.write("\u00ad\t\31\2\2\u00ad<\3\2\2\2\u00ae\u00af\t\32\2\2\u00af")
buf.write(">\3\2\2\2\u00b0\u00b1\t\33\2\2\u00b1@\3\2\2\2\u00b2\u00b3")
buf.write("\t\34\2\2\u00b3B\3\2\2\2\u00b4\u00b5\t\35\2\2\u00b5D\3")
buf.write("\2\2\2\u00b6\u00b7\t\36\2\2\u00b7F\3\2\2\2\u00b8\u00b9")
buf.write("\t\37\2\2\u00b9H\3\2\2\2\u00ba\u00bb\t \2\2\u00bbJ\3\2")
buf.write("\2\2\u00bc\u00bd\13\2\2\2\u00bdL\3\2\2\2\b\2Pty~\u0085")
buf.write("\3\b\2\2")
return buf.getvalue()
class ETHERLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
WS = 1
LB = 2
RB = 3
COMA = 4
SEMI = 5
SENDETH = 6
SENDTOKEN = 7
ADDRESS = 8
AMOUNT = 9
TOKENID = 10
ERRORTOK = 11
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'('", "')'", "','", "';'" ]
symbolicNames = [ "<INVALID>",
"WS", "LB", "RB", "COMA", "SEMI", "SENDETH", "SENDTOKEN", "ADDRESS",
"AMOUNT", "TOKENID", "ERRORTOK" ]
ruleNames = [ "WS", "LB", "RB", "COMA", "SEMI", "SENDETH", "SENDTOKEN",
"ADDRESS", "AMOUNT", "TOKENID", "A", "B", "C", "D", "E",
"F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P",
"Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "ERRORTOK" ]
grammarFileName = "ETHER.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.1")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
| [
"kimtoan1998@gmail.com"
] | kimtoan1998@gmail.com |
bddcf65c29b45dbf206b1a7dff571681d0bc0e78 | 32880c94046c0219c2964c2a369c6af2b1c57b52 | /renderer/support/generate_kicad.py | 3e1b6147b9ac14c3916903a38aea866dbf62dcf7 | [] | no_license | mdeweerd/pogojig | 599dc10e3ac95ac2082ebc629780a8f50bd5c178 | 13a57211f0d0feb34b452b3e19be83a095707ed6 | refs/heads/master | 2023-04-07T15:35:55.071294 | 2019-11-12T11:24:29 | 2019-11-12T11:24:29 | 359,135,003 | 0 | 0 | null | 2021-04-18T12:19:09 | 2021-04-18T12:19:08 | null | UTF-8 | Python | false | false | 11,606 | py | #!/usr/bin/env python3
import os
import sys
import time
from os import path
from textwrap import dedent
import pkgutil
import subprocess
import xml.etree.ElementTree as xe
import ezdxf
__version__ = '0.1'
PIN_TS_BASE = 0x23420000
TEDIT_BASE = 0x23430000
PATH_BASE = 0x23440000
def sch_template(name, num_pins, yspace=200):
templ = f'''
EESchema Schematic File Version 5
EELAYER 30 0
EELAYER END
$Descr A3 16535 11693
encoding utf-8
Sheet 1 1
Title "{name}"
Date "{time.strftime("%d %b %Y")}"
Rev ""
Comp ""
Comment1 ""
Comment2 ""
Comment3 ""
Comment4 ""
Comment5 ""
Comment6 ""
Comment7 ""
Comment8 ""
Comment9 ""
$EndDescr
{{components}}
$EndSCHEMATC
'''
components = []
for i in range(num_pins):
identifier = f'TP{i}'
value = 'pogopin'
x, y = 1000, 1000 + i*yspace
components.append(dedent(f'''
$Comp
L Connector:Conn_01x01_Female {identifier}
U 1 1 {PIN_TS_BASE + i:08X}
P {x} {y}
F 0 "{identifier}" H {x-50} {y+50} 50 0000 R CNN
F 1 "{value}" H {x+50} {y} 50 0000 L CNN
F 2 "Pogopin:AutogeneratedPogopinFootprint" H {x} {y} 50 0001 C CNN
F 3 "~" H {x} {y} 50 0001 C CNN
1 {x} {y}
-1 0 0 1
$EndComp
''').strip())
return dedent(templ).lstrip().format(components='\n'.join(components))
def pcb_template(outline, pins, annular=0.5):
pcb_templ = f'''
(kicad_pcb (version 20190605) (host pogojig "({__version__})")
(general
(thickness 1.6)
(drawings {len(pins)})
(tracks 0)
(modules {len(pins)})
(nets {len(pins)+1})
)
(page "A4")
(layers
(0 "F.Cu" signal)
(31 "B.Cu" signal)
(32 "B.Adhes" user)
(33 "F.Adhes" user)
(34 "B.Paste" user)
(35 "F.Paste" user)
(36 "B.SilkS" user)
(37 "F.SilkS" user)
(38 "B.Mask" user)
(39 "F.Mask" user)
(40 "Dwgs.User" user)
(41 "Cmts.User" user)
(42 "Eco1.User" user)
(43 "Eco2.User" user)
(44 "Edge.Cuts" user)
(45 "Margin" user)
(46 "B.CrtYd" user)
(47 "F.CrtYd" user)
(48 "B.Fab" user)
(49 "F.Fab" user)
)
(setup
(last_trace_width 0.25)
(trace_clearance 0.2)
(zone_clearance 0.508)
(zone_45_only no)
(trace_min 0.2)
(via_size 0.8)
(via_drill 0.4)
(via_min_size 0.4)
(via_min_drill 0.3)
(uvia_size 0.3)
(uvia_drill 0.1)
(uvias_allowed no)
(uvia_min_size 0.2)
(uvia_min_drill 0.1)
(max_error 0.005)
(defaults
(edge_clearance 0.01)
(edge_cuts_line_width 0.05)
(courtyard_line_width 0.05)
(copper_line_width 0.2)
(copper_text_dims (size 1.5 1.5) (thickness 0.3) keep_upright)
(silk_line_width 0.12)
(silk_text_dims (size 1 1) (thickness 0.15) keep_upright)
(other_layers_line_width 0.1)
(other_layers_text_dims (size 1 1) (thickness 0.15) keep_upright)
)
(pad_size 3.14159 3.14159)
(pad_drill 1.41421)
(pad_to_mask_clearance 0.051)
(solder_mask_min_width 0.25)
(aux_axis_origin 0 0)
(visible_elements FFFFFF7F)
(pcbplotparams
(layerselection 0x010fc_ffffffff)
(usegerberextensions false)
(usegerberattributes false)
(usegerberadvancedattributes false)
(creategerberjobfile false)
(excludeedgelayer true)
(linewidth 0.100000)
(plotframeref false)
(viasonmask false)
(mode 1)
(useauxorigin false)
(hpglpennumber 1)
(hpglpenspeed 20)
(hpglpendiameter 15.000000)
(psnegative false)
(psa4output false)
(plotreference true)
(plotvalue true)
(plotinvisibletext false)
(padsonsilk false)
(subtractmaskfromsilk false)
(outputformat 1)
(mirror false)
(drillshape 1)
(scaleselection 1)
(outputdirectory ""))
)
(net 0 "")
{{net_defs}}
(net_class "Default" "This is the default net class."
(clearance 0.2)
(trace_width 0.25)
(via_dia 0.8)
(via_drill 0.4)
(uvia_dia 0.3)
(uvia_drill 0.1)
{{net_class_defs}}
)
{{module_defs}}
{{edge_cuts}}
)'''
module_defs = []
for i, pin in enumerate(pins):
(x, y), hole_dia = pin # all dimensions in mm here
pad_dia = hole_dia + 2*annular
mod = f'''
(module "Pogopin:AutogeneratedPogopinFootprint" (layer "F.Cu") (tedit {TEDIT_BASE + i:08X}) (tstamp {PIN_TS_BASE + i:08X})
(at {x} {y})
(descr "Pogo pin {i}")
(tags "test point plated hole")
(path "/{PATH_BASE + i:08X}")
(attr virtual)
(fp_text reference "TP{i}" (at 0 -{pad_dia/2 + 1}) (layer "F.SilkS")
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text value "pogo pin {i}" (at 0 {pad_dia/2 + 1}) (layer "F.Fab")
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text user "%R" (at 0 -{pad_dia/2 + 1}) (layer "F.Fab")
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_circle (center 0 0) (end {pad_dia} 0) (layer "F.CrtYd") (width 0.05))
(fp_circle (center 0 0) (end 0 -{pad_dia}) (layer "F.SilkS") (width 0.12))
(pad "1" thru_hole circle (at 0 0) (size {pad_dia} {pad_dia}) (drill {hole_dia}) (layers *.Cu *.Mask)
(net {i+1} "pogo{i}"))
)'''
module_defs.append(mod)
edge_cuts = [ f'(gr_line (start {x1} {y1}) (end {x2} {y2}) (layer "Edge.Cuts") (width 0.05))'
for (x1, y1), (x2, y2) in outline ]
net_defs = [ f'(net {i+1} "pogo{i}")' for i, _pin in enumerate(pins) ]
net_class_defs = [ f'(add_net "pogo{i}")' for i, _pin in enumerate(pins) ]
return pcb_templ.format(
net_defs='\n'.join(net_defs),
net_class_defs='\n'.join(net_class_defs),
module_defs='\n'.join(module_defs),
edge_cuts='\n'.join(edge_cuts))
def inkscape_query_all(filename):
proc = subprocess.run([ os.environ.get('INKSCAPE', 'inkscape'), filename, '--query-all'], capture_output=True)
proc.check_returncode()
data = [ line.split(',') for line in proc.stdout.decode().splitlines() ]
return { id: (float(x), float(y), float(w), float(h)) for id, x, y, w, h in data }
SVG_NS = {
'svg': 'http://www.w3.org/2000/svg',
'inkscape': 'http://www.inkscape.org/namespaces/inkscape',
'sodipodi': 'http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd'
}
def svg_find_elements(doc, tag, layer=None):
for i, g in enumerate(doc.findall('svg:g', SVG_NS)):
if g.attrib.get(f'{{{SVG_NS["inkscape"]}}}groupmode') != 'layer':
continue
label = g.attrib.get(f'{{{SVG_NS["inkscape"]}}}label', '')
if not layer or label == layer:
yield from g.iter(tag)
# def svg_get_scale(doc):
# w = doc.attrib['width']
# h = doc.attrib['height']
#
# if not w.endswith('mm') and h.endswith('mm'):
# raise ValueError('Document dimensions in SVG must be set to millimeters')
#
# w, h = float(w[:-2]), float(h[:-2])
# _x, _y, vb_w, vb_h = map(float, doc.attrib['viewBox'].split())
# scale_x, scale_y = vb_w / w, vb_h / h
# assert abs(1 - scale_x/scale_y) < 0.001
# return scale_x
def svg_get_viewbox_mm(doc):
w = doc.attrib['width']
h = doc.attrib['height']
if not w.endswith('mm') and h.endswith('mm'):
raise ValueError('Document dimensions in SVG must be set to millimeters')
w, h = float(w[:-2]), float(h[:-2])
x, y, vb_w, vb_h = map(float, doc.attrib['viewBox'].split())
scale_x, scale_y = vb_w / w, vb_h / h
return x/scale_x, y/scale_y, w, h
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('svg', metavar='pogo_map.svg', help='Input inkscape SVG pogo pin map (use provided template!)')
parser.add_argument('outline', metavar='outline.dxf', help='Board outline DXF generated by OpenSCAD')
parser.add_argument('output', default='kicad', help='Output directory/project name and path')
parser.add_argument('-y', '--yspace', type=int, default=200, help='Schematic pin Y spacing in mil (default: 200)')
parser.add_argument('-a', '--annular', type=float, default=0.5, help='Pogo pin annular ring width in mm (default: 0.5)')
parser.add_argument('-l', '--svg-layer', type=str, default='Test Points', help='Name of SVG layer containing pogo pins')
parser.add_argument('-n', '--name', default='jig', help='Output KiCAD project name')
args = parser.parse_args()
if not path.exists(args.output):
os.mkdir(args.output)
if not path.isdir(args.output):
raise SystemError(f'Output path "{args.output}" is not a directory')
with open(args.svg, 'r') as f:
doc = xe.fromstring(f.read())
pogo_circle_ids = [ circle.attrib['id'] for circle in svg_find_elements(doc, f'{{{SVG_NS["svg"]}}}circle', args.svg_layer) ]
# scale = svg_get_scale(doc)
page_x, page_y, page_w, page_h = svg_get_viewbox_mm(doc)
MM_PER_IN = 25.4
SVG_DEF_DPI = 96
px_to_mm = lambda px: px/SVG_DEF_DPI * MM_PER_IN
query = inkscape_query_all(args.svg)
dims = [ query[id] for id in pogo_circle_ids ]
assert all( abs(1 - w/h) < 0.001 for _x, _y, w, h in dims )
print('origin:', page_x, page_y)
print('dims:', page_w, page_h)
pins = [ (
(page_x + px_to_mm(x) + px_to_mm(w)/2,
page_y - page_h + px_to_mm(y) + px_to_mm(w)/2),
px_to_mm(w)) for x, y, w, h in dims ]
doc = ezdxf.readfile(args.outline)
outline = []
for line in doc.modelspace().query('LINE'):
(x1, y1, _z1), (x2, y2, _z2) = line.dxf.start, line.dxf.end
outline.append(((x1, -y1), (x2, -y2)))
with open(path.join(args.output, f'{args.name}.sch'), 'w', encoding='utf8') as sch:
sch.write(sch_template(f'{args.name} generated schematic (PogoJig v{__version__})', len(pins), yspace=args.yspace))
with open(path.join(args.output, f'{args.name}.kicad_pcb'), 'w', encoding='utf8') as pcb:
pcb.write(pcb_template(outline, pins, annular=args.annular))
with open(path.join(args.output, f'{args.name}.pro'), 'w', encoding='utf8') as f:
f.write(pkgutil.get_data('pogojig.kicad', 'kicad.pro').decode('utf8'))
with open(path.join(args.output, f'{args.name}-cache.lib'), 'w', encoding='utf8') as f:
f.write(pkgutil.get_data('pogojig.kicad', 'kicad-cache.lib').decode('utf8'))
| [
"git@jaseg.net"
] | git@jaseg.net |
ccc95a8679b749bc527794939994aee82257f6dd | 1d182c8cf1ce19019e0b1cba4a16ee1a2a49751e | /data/base.py | d4e7c2318658561292e5f341ea1513223aa70af8 | [
"MIT"
] | permissive | zxt881108/pytorch-cv | e30ac8638a8819b637c6bbef717f733264229126 | 6f2d1760f12c9a56a3e7b19ba74bc41451ea284c | refs/heads/master | 2020-06-18T18:16:09.741626 | 2019-04-29T14:11:06 | 2019-04-29T14:11:06 | 196,396,348 | 5 | 0 | null | 2019-07-11T13:06:29 | 2019-07-11T13:06:28 | null | UTF-8 | Python | false | false | 4,270 | py | """Base dataset methods."""
import os
from torch.utils import data
class ClassProperty(object):
"""Readonly @ClassProperty descriptor for internal usage."""
def __init__(self, fget):
self.fget = fget
def __get__(self, owner_self, owner_cls):
return self.fget(owner_cls)
class SimpleDataset(data.Dataset):
"""Simple Dataset wrapper for lists and arrays.
Parameters
----------
data : dataset-like object
Any object that implements `len()` and `[]`.
"""
def __init__(self, data):
self._data = data
def __len__(self):
return len(self._data)
def __getitem__(self, idx):
return self._data[idx]
class _LazyTransformDataset(data.Dataset):
"""Lazily transformed dataset."""
def __init__(self, data, fn):
super(_LazyTransformDataset, self).__init__()
self._data = data
self._fn = fn
def __len__(self):
return len(self._data)
def __getitem__(self, idx):
item = self._data[idx]
if isinstance(item, tuple):
return self._fn(*item)
return self._fn(item)
def transform(self, fn):
self._fn = fn
class VisionDataset(data.Dataset):
"""Base Dataset with directory checker.
Parameters
----------
root : str
The root path of xxx.names, by default is '~/.mxnet/datasets/foo', where
`foo` is the name of the dataset.
"""
def __init__(self, root):
super(VisionDataset, self).__init__()
if not os.path.isdir(os.path.expanduser(root)):
helper_msg = "{} is not a valid dir. Did you forget to initialize \
datasets described in: \
`http://gluon-cv.mxnet.io/build/examples_datasets/index.html`? \
You need to initialize each dataset only once.".format(root)
raise OSError(helper_msg)
@property
def classes(self):
raise NotImplementedError
@property
def num_class(self):
"""Number of categories."""
return len(self.classes)
def transform(self, fn, lazy=True):
"""Returns a new dataset with each sample transformed by the
transformer function `fn`.
Parameters
----------
fn : callable
A transformer function that takes a sample as input and
returns the transformed sample.
lazy : bool, default True
If False, transforms all samples at once. Otherwise,
transforms each sample on demand. Note that if `fn`
is stochastic, you must set lazy to True or you will
get the same result on all epochs.
Returns
-------
Dataset
The transformed dataset.
"""
trans = _LazyTransformDataset(self, fn)
if lazy:
return trans
return SimpleDataset([i for i in trans])
#### for debug (Note: delete)
from PIL import Image
import numpy as np
class DemoDataset(data.Dataset):
"""Simple Dataset wrapper for lists and arrays.
Parameters
----------
data : dataset-like object
Any object that implements `len()` and `[]`.
"""
def __init__(self, num):
self._num = num
def __len__(self):
return self._num
def __getitem__(self, idx):
return Image.fromarray(np.random.randint(0, 255, size=(60, 60, 3)).astype(np.uint8))
def transform(self, fn, lazy=True):
"""Returns a new dataset with each sample transformed by the
transformer function `fn`.
Parameters
----------
fn : callable
A transformer function that takes a sample as input and
returns the transformed sample.
lazy : bool, default True
If False, transforms all samples at once. Otherwise,
transforms each sample on demand. Note that if `fn`
is stochastic, you must set lazy to True or you will
get the same result on all epochs.
Returns
-------
Dataset
The transformed dataset.
"""
trans = _LazyTransformDataset(self, fn)
if lazy:
return trans
return SimpleDataset([i for i in trans])
| [
"tinyshine@yeah.net"
] | tinyshine@yeah.net |
15e658cebdec635d1af21b37ceec8ac02139f33b | 7a867cfa0e882314eebaeca69deb972b5ed035a5 | /Archive/acceptable results/DeepCars.py | f0c340bde33f016b923d542a7e9553fe23cf6d93 | [] | no_license | lucianzhong/deepcars-reinforcement-learning | 3b6b71003475837f924ca330321730b53e872273 | 5f279ee2f1fb748ae1ce37d07cf711d4fe981f92 | refs/heads/master | 2020-12-30T05:46:00.149469 | 2019-09-27T21:05:25 | 2019-09-27T21:05:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,113 | py | import pygame, random, os, time, sys
import numpy as np
from pygame.locals import *
# =======================================================================================================================
# -------------------------------------------Global Parameter values----------------------------------------------------
# =======================================================================================================================
DefaultTextColor = (255, 97, 3)
BackgroundColor = (255, 255, 255)
FPS = 100
Horiz_Move_Rate = 4
AddNewCarRate = 1
CarsMinSpeed = 2 # Min. realative speed for the other cars wrt to the PlayerSpeed
CarsMaxSpeed = 2 # Max. realative speed for the other cars wrt to the PlayerSpeed
PlayerSpeed = 8 # Player's speed wrt to side walls (ground speed)
CarWidth = 50
CarHeight = 100
SpaceWidth = 5 # Width of space between objects, e.g. btw car and line/wall
LineWidth = 5 # Width of the lines in between the lanes (dashed lines)
LineHeight = 25
WallWidth = 50 # Width of the walls on the left and right sides
NoOfLanes = 5
MaxCarsInLane = 5 # Maximum number of cars vertically in one lane (for window size setting)
ActionList = ['Left', 'Stay', 'Right']
# Define window's dimensions wrt objects dimensions
WindowWidth = (CarWidth + 2 * SpaceWidth) * NoOfLanes + LineWidth * (NoOfLanes - 1) + 2 * WallWidth
WindowHeight = CarHeight * MaxCarsInLane + 2*SpaceWidth + (MaxCarsInLane-1)*LineWidth
# Calculate the x coordinate of top-right pixel of cars for staying in all lanes
LaneXCoorVec = []
for i in range(NoOfLanes):
tempX = WallWidth + SpaceWidth + i * (CarWidth + SpaceWidth + LineWidth + SpaceWidth)
LaneXCoorVec.append(tempX)
# No of pixels in between the vertical grids
NoOfHorGridPixels = LaneXCoorVec[1] - LaneXCoorVec[0]
# Calculate the y coordinate of top-right pixel of cars for staying in all grid rectangles
LaneYCoorVec = []
for i in range(MaxCarsInLane):
tempY = SpaceWidth + i*(CarHeight + SpaceWidth + LineWidth + SpaceWidth)
LaneYCoorVec.append(tempY)
# No of pixels in between the vertical grids
NoOfVerGridPixels = LaneYCoorVec[1] - LaneYCoorVec[0]
# =======================================================================================================================
# -------------------------------------------Grid World Class-----------------------------------------------------------
# =======================================================================================================================
class GridWorld:
def __init__(self):
# Initializations
self.MainClock = 0
self.WindowSurface = 0
self.font = 0
self.PlayerImage = 0
self.CarsImageVec = []
self.LineImage = 0
self.HorizLineImage = 0
self.LeftWall = 0
self.RightWall = 0
self.LineRecSamples = []
self.HorizLineRecSamples = []
self.CarAddCounter = AddNewCarRate
self.PassedCarsCount = 1 # No. of cars that the agent has passed (start from 1 to avoid deving to 0 in SuccessRate)
self.HitCarsCount = 0 # No. of cars that are hit by player
self.OtherCarsVec = []
self.PlayerLane = int(round((NoOfLanes - 1) / 2))
self.PlayerRect = 0
def Terminate(self):
pygame.quit()
print("The game is terminated")
# sys.exit()
def ObservationSpace(self):
return NoOfLanes+1 # State vector size
def ActionSpace(self):
return 3 # Action vector size: [Left Stay Right]
def Reset(self): # Get initial state
StateVec, _, _, _, _, _ = self.update(1)
return StateVec
def WaitForPlayerToPressKey(self):
print("Press a key to continue or Esc to terminate")
while True:
for event in pygame.event.get():
if event.type == QUIT:
self.Terminate()
if event.type == KEYDOWN:
if event.key == K_ESCAPE: # escape quits
self.Terminate()
return
def DrawText(self,text, font, TextColor, surface, x, y):
textobj = font.render(text, 1, TextColor)
textrect = textobj.get_rect()
textrect.topleft = (x, y)
surface.blit(textobj, textrect)
def PlayerHasHitBaddie(self,PlayerRect, baddies):
for b in baddies:
if PlayerRect.colliderect(b['rec']):
return True
return False
def PygameInitialize(self):
# set up pygame, the window, and the mouse cursor
pygame.init()
self.MainClock = pygame.time.Clock()
self.WindowSurface = pygame.display.set_mode((WindowWidth, WindowHeight))
pygame.display.set_caption('Deep Cars Grid World (ITUarc)')
pygame.mouse.set_visible(False)
self.font = pygame.font.SysFont(None, 30)
# images
print(os.getcwd())
self.PlayerImage = pygame.image.load('image/MyCar')
self.PlayerImage = pygame.transform.scale(self.PlayerImage, (CarWidth, CarHeight))
Car1Image = pygame.image.load('image/Car1')
Car1Image = pygame.transform.scale(Car1Image, (CarWidth, CarHeight))
Car2Image = pygame.image.load('image/Car2')
Car2Image = pygame.transform.scale(Car2Image, (CarWidth, CarHeight))
Car3Image = pygame.image.load('image/Car3')
Car3Image = pygame.transform.scale(Car3Image, (CarWidth, CarHeight))
Car4Image = pygame.image.load('image/Car4')
Car4Image = pygame.transform.scale(Car4Image, (CarWidth, CarHeight))
Car5Image = pygame.image.load('image/Car5')
Car5Image = pygame.transform.scale(Car5Image, (CarWidth, CarHeight))
self.CarsImageVec = [Car1Image, Car2Image, Car3Image, Car4Image, Car5Image]
LeftWallImage = pygame.image.load('image/left')
RightWallImage = pygame.image.load('image/right')
self.LineImage = pygame.image.load('image/black_line')
self.LineImage = pygame.transform.scale(self.LineImage, (LineWidth, WindowHeight))
self.HorizLineImage = pygame.image.load('image/Horizontal_Dashes')
self.HorizLineImage = pygame.transform.scale(self.HorizLineImage, (WindowWidth, LineWidth))
# Define walls
self.LeftWall = {'rec': pygame.Rect(0, -2 * WindowHeight, WallWidth, 3 * WindowHeight),
'surface': pygame.transform.scale(LeftWallImage, (WallWidth, 3 * WindowHeight))
}
self.RightWall = {'rec': pygame.Rect(WindowWidth - WallWidth, -2 * WindowHeight, WallWidth, 3 * WindowHeight),
'surface': pygame.transform.scale(RightWallImage, (WallWidth, 3 * WindowHeight))
}
# Define line rectangles
for i in range(NoOfLanes - 1):
LineXCoord = WallWidth + (i) * LineWidth + (i + 1) * (SpaceWidth + CarWidth + SpaceWidth)
NewLineRec = pygame.Rect(LineXCoord, 0, LineWidth, WindowHeight)
self.LineRecSamples.append(NewLineRec)
for i in range(MaxCarsInLane - 1):
LineYCoord = LaneYCoorVec[i + 1]
NewLineRec = pygame.Rect(0, LineYCoord, WindowWidth, LineWidth)
self.HorizLineRecSamples.append(NewLineRec)
self.PlayerRect = self.PlayerImage.get_rect()
self.PlayerRect.topleft = (LaneXCoorVec[self.PlayerLane], LaneYCoorVec[MaxCarsInLane - 2])
print('The game has initiated')
def update(self,ActionIndex,TrainingFlag=False):
Action = ActionList[ActionIndex] # Pick the action from action list
# ==============================================Define new cars======================================================
if self.CarAddCounter >= AddNewCarRate:
self.CarAddCounter = 0
NewCarLaneNo = random.randint(0, NoOfLanes - 1)
NewCar = {'rec': pygame.Rect(LaneXCoorVec[NewCarLaneNo], 0 - CarHeight, CarWidth, CarHeight),
'speed': NoOfVerGridPixels,
'XCoord': NewCarLaneNo, # x coordinate in grid world
'YCoord': MaxCarsInLane - 1, # y coordinate in grid world
'surface': self.CarsImageVec[random.randint(0, len(self.CarsImageVec) - 1)] # Randomize cars visuals
}
self.OtherCarsVec.append(NewCar)
self.CarAddCounter += 1
# =================================================Movements=========================================================
# Side walls
self.LeftWall['rec'].move_ip(0, NoOfVerGridPixels)
self.RightWall['rec'].move_ip(0, NoOfVerGridPixels)
# Move the player left
if Action is 'Left' and self.PlayerLane is not 0:
self.PlayerRect.move_ip(-1 * NoOfHorGridPixels, 0)
self.PlayerLane -= 1
# print('Player car lane number has chnaged to ', update.PlayerLane + 1)
# Move the player right
if Action is 'Right' and self.PlayerLane is not NoOfLanes - 1:
self.PlayerRect.move_ip(+1 * NoOfHorGridPixels, 0)
self.PlayerLane += 1
# print('Player car lane number has chnaged to ', update.PlayerLane + 1)
# Move other cars backward
for Car in self.OtherCarsVec:
Car['rec'].move_ip(0, +1 *Car['speed'])
Car['YCoord'] -= 1
# ================================Remove the other cars that pass the game window====================================
for Car in self.OtherCarsVec:
#if Car['rec'].top > WindowHeight + SpaceWidth or Car['rec'].top + CarHeight < - SpaceWidth:
if Car['YCoord'] < 0:
self.OtherCarsVec.remove(Car)
self.PassedCarsCount += 1
# ==================================================================================================================
# -----------------------------------------------State and reward---------------------------------------------------
# ==================================================================================================================
StateVec = []
for i in range(0,NoOfLanes+1):
StateVec.append(MaxCarsInLane - 2) # [Player Lane Number , Distance to the car in front in lane (i) ]
StateVec[0] = self.PlayerLane
for Car in self.OtherCarsVec:
if Car['YCoord'] < StateVec[Car['XCoord']+1]:
# Car['YCoord'] < StateVec[Car['XCoord']+1]: ===> For more than one car in the same lane, select the closer one
StateVec[Car['XCoord']+1] = Car['YCoord'] # Number of grid rectangles existing in between (including car rectangle)
done =False
if self.PlayerHasHitBaddie(self.PlayerRect,self.OtherCarsVec):
Reward = -100
self.PassedCarsCount -= 1
self.HitCarsCount += 1
done = True
else:
Reward = 1
# =======================================Draw the game world on the window===========================================
self.WindowSurface.fill(BackgroundColor)
for i in range(0, len(self.LineRecSamples)):
self.WindowSurface.blit(self.LineImage, self.LineRecSamples[i])
for i in range(0, len(self.HorizLineRecSamples)):
self.WindowSurface.blit(self.HorizLineImage, self.HorizLineRecSamples[i])
self.WindowSurface.blit(self.PlayerImage, self.PlayerRect)
for Car in self.OtherCarsVec:
self.WindowSurface.blit(Car['surface'], Car['rec'])
self.DrawText('Cars passed: %s' % (self.PassedCarsCount), self.font, DefaultTextColor, self.WindowSurface, WallWidth + SpaceWidth,
SpaceWidth)
self.DrawText('Cars hit: %s' % (self.HitCarsCount), self.font, DefaultTextColor, self.WindowSurface, WallWidth + SpaceWidth,
10*SpaceWidth)
self.WindowSurface.blit(self.LeftWall['surface'], self.LeftWall['rec'])
self.WindowSurface.blit(self.RightWall['surface'], self.RightWall['rec'])
# ============Move walls' Y coordinate to -2*WindowHeight when their top left pixel reached to y = 0================
if self.LeftWall['rec'].topleft[1] >= - WindowHeight / 2:
self.LeftWall['rec'].topleft = [0, -2 * WindowHeight]
self.RightWall['rec'].topleft = [WindowWidth - WallWidth, -2 * WindowHeight]
# ==============================================Environment update==================================================
SuccessRate = round(100 * self.PassedCarsCount / (self.PassedCarsCount + self.HitCarsCount), 2)
if TrainingFlag is True: # Test part
self.DrawText('Training: %s' % (SuccessRate), self.font, DefaultTextColor, self.WindowSurface, WallWidth + SpaceWidth,
20 * SpaceWidth)
else:
self.DrawText('Test: %s' % (SuccessRate), self.font, DefaultTextColor, self.WindowSurface, WallWidth + SpaceWidth,
20 * SpaceWidth)
pygame.display.update()
self.MainClock.tick(FPS)
# ==================================================================================================================
# -----------------------------------------------ESC for Terminate--------------------------------------------------
# ==================================================================================================================
IsTerminated = False
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_ESCAPE: # escape quits
IsTerminated = True
self.Terminate()
return StateVec, Reward, IsTerminated, self.HitCarsCount, self.PassedCarsCount, done
if __name__ == "__main__":
from DeepCars import GridWorld as envObj
env = envObj()
env.PygameInitialize()
while True:
env.update(ActionIndex=1,TrainingFlag=False)
time.sleep(0.1)
| [
"majid.moghadam2006@gmail.com"
] | majid.moghadam2006@gmail.com |
829ab5dd073fffa7a0106f8a17ef99c99741075e | 2d1a99d473c68850861108f46bfa3cbc9c2166e6 | /lists/urls.py | 7599bea92be1c68aa2ebdd272a8cdbe537db66a4 | [] | no_license | MidSummersEveee/TDD_Intro | bda1fec2b7e51b75660fbe6901423d9705468b15 | 47c0c087bdea96f50350f0dc00ff1b5d79ffd4fc | refs/heads/master | 2021-04-24T22:35:05.372531 | 2018-01-20T19:06:18 | 2018-01-20T19:06:18 | 116,833,267 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,173 | py | """superlists URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from lists import views
urlpatterns = [
# url(r'^lists/new$', views.new_list, name='new_list'),
# url(r'^lists/(\d+)/$', views.view_list, name='view_list'),
# url(r'^lists/(\d+)/add_item$', views.add_item, name='add_item'),
# url(r'^lists/the-only-list-in-the-world/$', views.view_list, name='view_list'),
url(r'^new$', views.new_list, name='new_list'),
url(r'^(\d+)/$', views.view_list, name='view_list'),
url(r'^(\d+)/add_item$', views.add_item, name='add_item'),
] | [
"midsummerseve1@gmail.com"
] | midsummerseve1@gmail.com |
577521db4220a5eb4893f05eaf977b6ff0153d22 | 7346040136a34a1722cdc990d042365d7d16a798 | /momshop/migrations/0010_order_adress.py | ef748b6a46b4c1938040bdee1f5406976e153d7b | [] | no_license | LuciDreamer/momstyle | 5a353264a0cb7ec49390f75ffbf7b242251f9b08 | d094ebdfe60e62e0d420bdd8aee404bf53eb889a | refs/heads/master | 2020-03-19T11:41:56.462048 | 2018-06-09T14:11:06 | 2018-06-09T14:11:06 | 136,470,392 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | # Generated by Django 2.0.5 on 2018-05-16 12:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('momshop', '0009_auto_20180516_1352'),
]
operations = [
migrations.AddField(
model_name='order',
name='adress',
field=models.CharField(default='', max_length=500, verbose_name='Адресс'),
),
]
| [
"americanpsycho@list.ru"
] | americanpsycho@list.ru |
59d3da7ea5f0a00091ca5118284d6596f35cccc6 | a15a0026d8107a330f4445a0bb07f1e0266d3c0d | /tests/testServer.py | 91a6873b6d3f8e009f7c256c34d25a285e69779b | [
"Apache-2.0"
] | permissive | JosedeKruif/ShiCo | 436659175e10939fe47ced7e791e049a58c37c87 | e91849f9b3d26c00d33e77eb086656013cdf5113 | refs/heads/master | 2021-01-12T18:07:40.124506 | 2016-10-19T07:47:19 | 2016-10-19T07:47:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,954 | py | import unittest
import json
import shico.server
import shico.server.app
from shico.server.utils import initApp
class ServerTest(unittest.TestCase):
'''Tests for server'''
@classmethod
def setUpClass(self):
# Fake models! Only made so we can do unittests
initApp(shico.server.app.app, 'tests/w2vModels/*.w2v', True, None)
self.app = shico.server.app.app.test_client()
def testTrackService(self):
'''Test calls to /track/<terms>. Response should be valid JSON with the
correct structure.'''
terms = 'x'
resp = self.app.get('/track/' + terms)
self.assertEqual(resp.status_code, 200,
'Response should be code 200')
try:
jsonStr = resp.data
respJson = json.loads(jsonStr)
except:
self.fail('Response should be valid JSON')
keyList = [ 'stream', 'networks', 'embedded', 'vocabs' ]
respKeys = respJson.keys()
for key in keyList:
if key not in respKeys:
self.fail('Missing key: ' + key)
self._checkStream(respJson['stream'])
self._checkNetwork(respJson['networks'])
self._checkEmbedded(respJson['embedded'])
self._checkVocab(respJson['vocabs'])
def _checkStream(self, data):
'''Check the structure of the stream data is correct.'''
wordsPerResult = None
for year,wordList in data.iteritems():
nWordsInList = len(wordList)
self.assertGreater(nWordsInList, 0,
'Word lists should contain words')
if wordsPerResult is None:
wordsPerResult = nWordsInList
else:
self.assertEqual(wordsPerResult, nWordsInList,
'All results should contain the same number' +
' of words')
for word in wordList:
self.assertIsNotNone(wordList[word],
'Items in wordList should be word: ' +
'weight dictionary entries')
def _checkNetwork(self, data):
'''Check the structure of the network data is correct.'''
for year, net in data.iteritems():
self.assertEqual(sorted(net.keys()), sorted(['nodes', 'links']),
'Each network should contain "nodes" and "links"'
'but %s does not' % year)
for node in net['nodes']:
self.assertEqual(sorted(node.keys()),
sorted(['name', 'type', 'count']),
'Each node should contain "name", "type" and '
'"count", but a node on %s does not' % year)
for link in net['links']:
self.assertEqual(sorted(link.keys()),
sorted(['source', 'target', 'value']),
'Each link should contain "source", "target" '
'and "value", but a link on %s does not'
% year)
def _checkEmbedded(self, data):
'''Check the structure of the embedded data is correct.'''
for year, embeddings in data.iteritems():
self.assertGreater(len(embeddings), 0,
'List should contain some embeddings')
for item in embeddings:
self.assertEqual(sorted(item.keys()), sorted(['word', 'x', 'y']),
'Embeddings should contain "word", "x" and "y"'
'but %s does not' % year)
def _checkVocab(self, data):
'''Check the structure of the vocabularies data is correct.'''
for year, seedVocabs in data.iteritems():
self.assertGreater(len(seedVocabs), 0,
'List should contain some seed-vocabulary dictionaries')
def testAppData(self):
'''Test calls to /load-settings. Response should be valid JSON.'''
resp = self.app.get('/load-settings')
self.assertEqual(resp.status_code, 200,
'Response should be code 200')
try:
jsonStr = resp.data
respJson = json.loads(jsonStr)
except:
self.fail('Response should be valid JSON')
for key in ['cleaning', 'years']:
self.assertTrue(key in respJson,
'"' + key + '" should be a key in the response')
years = respJson['years']
for key in ['first', 'last', 'values']:
self.assertTrue(key in years,
'"' + key + '" should be a key in the response')
for key in ['first', 'last']:
self.assertTrue(str(years[key]) in years['values'],
'"' + key + '" should be a key in values')
| [
"c.martinez@esciencecenter.nl"
] | c.martinez@esciencecenter.nl |
53fb7e478eeea17c938792a4b9c62ad3cf392b74 | a66a3d65a67409788ed1ca64ad04a433ef5521b7 | /Python基础/code/C05/e0504.py | 4b1f2f0b82f96b4aab04b58bde85acdd108ea326 | [] | no_license | miniBamboo/PythonLearning | 0cd3c4f4f08ce36eae749736860874df503b2ef2 | f23fbadb9ecd5e54b8dfde153f487473f17de1a8 | refs/heads/main | 2023-02-24T20:58:44.955101 | 2021-01-30T08:23:34 | 2021-01-30T08:23:34 | 318,386,303 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | py | #if的嵌套
has_ticket = False
if has_ticket:
knife_length = int(input('刀子长度:'))
if knife_length > 20:
print("安检不通过")
else:
print("安检通过")
else:
print('没票,不能进站')
| [
"noreply@github.com"
] | miniBamboo.noreply@github.com |
96740a5818f496c48cced1e2c40379baf0a7e573 | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /native_client/pnacl/driver/pnacl-driver.py | 2ac806f22b80cbb2246dd980fe3d41b59f3c1040 | [
"BSD-3-Clause"
] | permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 29,863 | py | #!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import subprocess
from driver_tools import AddHostBinarySearchPath, DefaultOutputName, \
DefaultPCHOutputName, DriverChain, GetArch, ParseArgs, ParseTriple, \
Run, RunDriver, RunWithEnv, TempNameGen, UnrecognizedOption
from driver_env import env
from driver_log import DriverOpen, Log
import filetype
import pathtools
EXTRA_ENV = {
'ALLOW_TRANSLATE': '0', # Allow bitcode translation before linking.
# It doesn't normally make sense to do this.
'ALLOW_NATIVE' : '0', # Allow native objects (.S,.s,.o) to be in the
# linker line for .pexe generation.
# It doesn't normally make sense to do this.
# CXX_EH_MODE specifies how to deal with C++ exception handling:
# * 'none': Strips out use of C++ exception handling.
# * 'sjlj': Enables the setjmp()+longjmp()-based implementation of
# C++ exception handling.
'CXX_EH_MODE': 'none',
'FORCE_INTERMEDIATE_LL': '0',
# Produce an intermediate .ll file
# Useful for debugging.
# NOTE: potentially different code paths and bugs
# might be triggered by this
'LANGUAGE' : '', # C or CXX (set by SetTool)
'INCLUDE_CXX_HEADERS': '0', # This is set by RunCC.
# Command-line options
'GCC_MODE' : '', # '' (default), '-E', '-c', or '-S'
'SHARED' : '0', # Identify if the target is a shared library.
'STDINC' : '1', # Include standard headers (-nostdinc sets to 0)
'STDINCCXX' : '1', # Include standard cxx headers (-nostdinc++ sets to 0)
'USE_STDLIB' : '1', # Include standard libraries (-nostdlib sets to 0)
'STDLIB' : 'libc++', # C++ Standard Library.
'DEFAULTLIBS' : '1', # Link with default libraries
'DIAGNOSTIC' : '0', # Diagnostic flag detected
'PIC' : '0', # Generate PIC
'NEED_DASH_E' : '0', # Used for stdin inputs, which must have an explicit
# type set (using -x) unless -E is specified.
'VERBOSE' : '0', # Verbose (-v)
'SHOW_VERSION': '0', # Version (--version)
'PTHREAD' : '0', # use pthreads?
'INPUTS' : '', # Input files
'OUTPUT' : '', # Output file
'UNMATCHED' : '', # Unrecognized parameters
'BIAS_NONE' : '',
'BIAS_ARM' : '-D__arm__ -D__ARM_ARCH_7A__ -D__ARMEL__',
'BIAS_MIPS32' : '-D__mips__',
'BIAS_X8632' : '-D__i386__ -D__i386 -D__i686 -D__i686__ -D__pentium4__',
'BIAS_X8664' : '-D__amd64__ -D__amd64 -D__x86_64__ -D__x86_64 -D__core2__',
'BIAS_ARM_NONSFI': '${BIAS_ARM} -D__native_client_nonsfi__',
'BIAS_X8632_NONSFI': '${BIAS_X8632} -D__native_client_nonsfi__',
'FRONTEND_TRIPLE' : 'le32-unknown-nacl',
'OPT_LEVEL' : '', # Default for most tools is 0, but we need to know
# if it's explicitly set or not when the driver
# is only used for linking + translating.
'CC_FLAGS' : '-O${#OPT_LEVEL ? ${OPT_LEVEL} : 0} ' +
'-fno-vectorize -fno-slp-vectorize ' +
'-fno-common ${PTHREAD ? -pthread} ' +
'-nostdinc ${BIAS_%BIAS%} ' +
'-fno-gnu-inline-asm ' +
'-target ${FRONTEND_TRIPLE} ' +
'${IS_CXX ? -fexceptions}',
'ISYSTEM' : '${ISYSTEM_USER} ${STDINC ? ${ISYSTEM_BUILTIN}}',
'ISYSTEM_USER' : '', # System include directories specified by
# using the -isystem flag.
'ISYSTEM_BUILTIN':
'${BASE_USR}/usr/include ' +
'${ISYSTEM_CLANG} ' +
'${ISYSTEM_CXX} ' +
'${BASE_USR}/include ' +
'${BASE_SDK}/include ',
'ISYSTEM_CLANG' : '${BASE_LLVM}/lib/clang/${CLANG_VER}/include',
'ISYSTEM_CXX' :
'${INCLUDE_CXX_HEADERS && STDINCCXX ? ${ISYSTEM_CXX_include_paths}}',
'ISYSTEM_CXX_include_paths' : '${BASE_USR}/include/c++/v1',
# Only propagate opt level to linker if explicitly set, so that the
# linker will know if an opt level was explicitly set or not.
'LD_FLAGS' : '${#OPT_LEVEL ? -O${OPT_LEVEL}} ' +
'${SHARED ? -shared : -static} ' +
'${PIC ? -fPIC} ${@AddPrefix:-L:SEARCH_DIRS} ' +
'--pnacl-exceptions=${CXX_EH_MODE}',
'SEARCH_DIRS' : '', # Directories specified using -L
# Library Strings
'EMITMODE' : '${!USE_STDLIB || SHARED ? nostdlib : static}',
# This is setup so that LD_ARGS_xxx is evaluated lazily.
'LD_ARGS' : '${LD_ARGS_%EMITMODE%}',
# ${ld_inputs} signifies where to place the objects and libraries
# provided on the command-line.
'LD_ARGS_nostdlib': '-nostdlib ${ld_inputs}',
'LD_ARGS_static':
'-l:crt1.x -l:crti.bc -l:crtbegin.bc '
'${CXX_EH_MODE==sjlj ? -l:sjlj_eh_redirect.bc : '
'${CXX_EH_MODE==none ? -l:unwind_stubs.bc}} ' +
'${ld_inputs} ' +
'--start-group ${STDLIBS} --end-group',
'LLVM_PASSES_TO_DISABLE': '',
# Flags for translating to native .o files.
'TRANSLATE_FLAGS' : '-O${#OPT_LEVEL ? ${OPT_LEVEL} : 0}',
'STDLIBS' : '${DEFAULTLIBS ? '
'${LIBSTDCPP} ${LIBPTHREAD} ${LIBNACL} ${LIBC} '
'${LIBGCC_BC} ${LIBPNACLMM}}',
'LIBSTDCPP' : '${IS_CXX ? -lc++ -lm -lpthread }',
# The few functions in the bitcode version of compiler-rt unfortunately
# depend on libm. TODO(jvoung): try rewriting the compiler-rt functions
# to be standalone.
'LIBGCC_BC' : '-lgcc -lm',
'LIBC' : '-lc',
'LIBNACL' : '-lnacl',
'LIBPNACLMM': '-lpnaclmm',
# Enabled/disabled by -pthreads
'LIBPTHREAD': '${PTHREAD ? -lpthread}',
# IS_CXX is set by pnacl-clang and pnacl-clang++ programmatically
'CC' : '${IS_CXX ? ${CLANGXX} : ${CLANG}}',
'RUN_CC': '${CC} ${emit_llvm_flag} ${mode} ${CC_FLAGS} ' +
'${@AddPrefix:-isystem :ISYSTEM} ' +
'-x${typespec} ${infile} -o ${output}',
}
def AddLLVMPassDisableFlag(*args):
env.append('LLVM_PASSES_TO_DISABLE', *args)
env.append('LD_FLAGS', *args)
def AddLDFlag(*args):
env.append('LD_FLAGS', *args)
def AddTranslatorFlag(*args):
# pass translator args to ld in case we go all the way to .nexe
env.append('LD_FLAGS', *['-Wt,' + a for a in args])
# pass translator args to translator in case we go to .o
env.append('TRANSLATE_FLAGS', *args)
def AddCCFlag(*args):
env.append('CC_FLAGS', *args)
def AddDiagnosticFlag(*args):
env.append('CC_FLAGS', *args)
env.set('DIAGNOSTIC', '1')
def SetTarget(*args):
arch = ParseTriple(args[0])
env.set('FRONTEND_TRIPLE', args[0])
AddLDFlag('--target=' + args[0])
def SetStdLib(*args):
"""Set the C++ Standard Library."""
lib = args[0]
if lib != 'libc++':
Log.Fatal('Only libc++ is supported as standard library')
def IsPortable():
return env.getone('FRONTEND_TRIPLE').startswith('le32-')
stdin_count = 0
def AddInputFileStdin():
global stdin_count
# When stdin is an input, -x or -E must be given.
forced_type = filetype.GetForcedFileType()
if not forced_type:
# Only allowed if -E is specified.
forced_type = 'c'
env.set('NEED_DASH_E', '1')
stdin_name = '__stdin%d__' % stdin_count
env.append('INPUTS', stdin_name)
filetype.ForceFileType(stdin_name, forced_type)
stdin_count += 1
def IsStdinInput(f):
return f.startswith('__stdin') and f.endswith('__')
def HandleDashX(arg):
if arg == 'none':
filetype.SetForcedFileType(None)
return
filetype.SetForcedFileType(filetype.GCCTypeToFileType(arg))
def AddVersionFlag(*args):
env.set('SHOW_VERSION', '1')
AddDiagnosticFlag(*args)
def AddBPrefix(prefix):
""" Add a path to the list searched for host binaries and include dirs. """
AddHostBinarySearchPath(prefix)
prefix = pathtools.normalize(prefix)
if pathtools.isdir(prefix) and not prefix.endswith('/'):
prefix += '/'
# Add prefix/ to the library search dir if it exists
if pathtools.isdir(prefix):
env.append('SEARCH_DIRS', prefix)
# Add prefix/include to isystem if it exists
include_dir = prefix + 'include'
if pathtools.isdir(include_dir):
env.append('ISYSTEM_USER', include_dir)
CustomPatterns = [
( '--driver=(.+)', "env.set('CC', pathtools.normalize($0))\n"),
( '--pnacl-allow-native', "env.set('ALLOW_NATIVE', '1')"),
( '--pnacl-allow-translate', "env.set('ALLOW_TRANSLATE', '1')"),
( '--pnacl-frontend-triple=(.+)', SetTarget),
( ('-target','(.+)'), SetTarget),
( ('--target=(.+)'), SetTarget),
( '--pnacl-exceptions=(none|sjlj)', "env.set('CXX_EH_MODE', $0)"),
( '(--pnacl-allow-nexe-build-id)', AddLDFlag),
( '(--pnacl-disable-abi-check)', AddLDFlag),
( '(--pnacl-disable-pass=.+)', AddLLVMPassDisableFlag),
]
GCCPatterns = [
( '-o(.+)', "env.set('OUTPUT', pathtools.normalize($0))"),
( ('-o', '(.+)'), "env.set('OUTPUT', pathtools.normalize($0))"),
( '-E', "env.set('GCC_MODE', '-E')"),
( '-S', "env.set('GCC_MODE', '-S')"),
( '-c', "env.set('GCC_MODE', '-c')"),
( '-nostdinc', "env.set('STDINC', '0')"),
( '-nostdinc\+\+', "env.set('STDINCCXX', '0')"),
( '-nostdlib', "env.set('USE_STDLIB', '0')"),
( '-nodefaultlibs', "env.set('DEFAULTLIBS', '0')"),
( '-?-stdlib=(.*)', SetStdLib),
( ('-?-stdlib', '(.*)'), SetStdLib),
# Flags to pass to native linker
( '(-Wn,.*)', AddLDFlag),
( '-rdynamic', "env.append('LD_FLAGS', '-export-dynamic')"),
# Flags to pass to pnacl-translate
( '-Wt,(.*)', AddTranslatorFlag),
( ('-Xtranslator','(.*)'), AddTranslatorFlag),
# We don't care about -fPIC, but pnacl-ld and pnacl-translate do.
( '-fPIC', "env.set('PIC', '1')"),
# We must include -l, -Xlinker, and -Wl options into the INPUTS
# in the order they appeared. This is the exactly behavior of gcc.
# For example: gcc foo.c -Wl,--start-group -lx -ly -Wl,--end-group
#
( '(-l.+)', "env.append('INPUTS', $0)"),
( ('(-l)','(.+)'), "env.append('INPUTS', $0+$1)"),
( ('-Xlinker','(.*)'), "env.append('INPUTS', '-Xlinker=' + $0)"),
( '(-Wl,.*)', "env.append('INPUTS', $0)"),
( '(-Bstatic)', "env.append('INPUTS', $0)"),
( '(-Bdynamic)', "env.append('INPUTS', $0)"),
( '-O([sz])', "env.set('OPT_LEVEL', $0)\n"),
( '-O([0-3])', "env.set('OPT_LEVEL', $0)\n"),
( '-O([0-9]+)', "env.set('OPT_LEVEL', '3')\n"),
( '-O', "env.set('OPT_LEVEL', '1')\n"),
( ('-isystem', '(.*)'),
"env.append('ISYSTEM_USER', pathtools.normalize($0))"),
( '-isystem(.+)',
"env.append('ISYSTEM_USER', pathtools.normalize($0))"),
( ('-I', '(.+)'), "env.append('CC_FLAGS', '-I'+pathtools.normalize($0))"),
( '-I(.+)', "env.append('CC_FLAGS', '-I'+pathtools.normalize($0))"),
# -I is passed through, so we allow -isysroot and pass it through as well.
# However -L is intercepted and interpreted, so it would take more work
# to handle -sysroot w/ libraries.
( ('-isysroot', '(.+)'),
"env.append('CC_FLAGS', '-isysroot ' + pathtools.normalize($0))"),
( '-isysroot(.+)',
"env.append('CC_FLAGS', '-isysroot ' + pathtools.normalize($0))"),
# NOTE: the -iquote =DIR syntax (substitute = with sysroot) doesn't work.
# Clang just says: ignoring nonexistent directory "=DIR"
( ('-iquote', '(.+)'),
"env.append('CC_FLAGS', '-iquote', pathtools.normalize($0))"),
( ('-iquote(.+)'),
"env.append('CC_FLAGS', '-iquote', pathtools.normalize($0))"),
( ('-idirafter', '(.+)'),
"env.append('CC_FLAGS', '-idirafter'+pathtools.normalize($0))"),
( '-idirafter(.+)',
"env.append('CC_FLAGS', '-idirafter'+pathtools.normalize($0))"),
( ('(-include)','(.+)'), AddCCFlag),
( ('(-include.+)'), AddCCFlag),
( '(--relocatable-pch)', AddCCFlag),
( '(-g)', AddCCFlag),
( '(-W.*)', AddCCFlag),
( '(-w)', AddCCFlag),
( '(-std=.*)', AddCCFlag),
( '(-ansi)', AddCCFlag),
( ('(-D)','(.*)'), AddCCFlag),
( '(-D.+)', AddCCFlag),
( ('(-U)','(.*)'), AddCCFlag),
( '(-U.+)', AddCCFlag),
( '(-f.*)', AddCCFlag),
( '(-pedantic)', AddCCFlag),
( '(-pedantic-errors)', AddCCFlag),
( '(-g.*)', AddCCFlag),
( '(-v|--v)', "env.append('CC_FLAGS', $0)\n"
"env.set('VERBOSE', '1')"),
( '(-pthreads?)', "env.set('PTHREAD', '1')"),
# No-op: accepted for compatibility in case build scripts pass it.
( '-static', ""),
( ('-B','(.*)'), AddBPrefix),
( ('-B(.+)'), AddBPrefix),
( ('-L','(.+)'), "env.append('SEARCH_DIRS', pathtools.normalize($0))"),
( '-L(.+)', "env.append('SEARCH_DIRS', pathtools.normalize($0))"),
( '(-Wp,.*)', AddCCFlag),
( '(-Xpreprocessor .*)', AddCCFlag),
( ('(-Xclang)', '(.*)'), AddCCFlag),
# Accept and ignore default flags
( '-m32', ""),
( '-emit-llvm', ""),
( '(-MG)', AddCCFlag),
( '(-MMD)', AddCCFlag),
( '(-MM?)', "env.append('CC_FLAGS', $0)\n"
"env.set('GCC_MODE', '-E')"),
( '(-MP)', AddCCFlag),
( ('(-MQ)','(.*)'), AddCCFlag),
( '(-MD)', AddCCFlag),
( ('(-MT)','(.*)'), AddCCFlag),
( ('(-MF)','(.*)'), "env.append('CC_FLAGS', $0, pathtools.normalize($1))"),
( ('-x', '(.+)'), HandleDashX),
( '-x(.+)', HandleDashX),
( ('(-mllvm)', '(.+)'), AddCCFlag),
# Ignore these gcc flags
( '(-msse)', ""),
( '(-march=armv7-a)', ""),
( '(-pipe)', ""),
( '(-shared)', "env.set('SHARED', '1')"),
( '(-s)', AddLDFlag),
( '(--strip-all)', AddLDFlag),
( '(--strip-debug)', AddLDFlag),
# Ignore these assembler flags
( '(-Qy)', ""),
( ('(--traditional-format)', '.*'), ""),
( '(-gstabs)', ""),
( '(--gstabs)', ""),
( '(-gdwarf2)', ""),
( '(--gdwarf2)', ""),
( '(--fatal-warnings)', ""),
( '(-meabi=.*)', ""),
( '(-mfpu=.*)', ""),
( '(-mfloat-abi=.+)', AddCCFlag),
# GCC diagnostic mode triggers
( '(-print-.*)', AddDiagnosticFlag),
( '(--print.*)', AddDiagnosticFlag),
( '(-dumpspecs)', AddDiagnosticFlag),
( '(--version)', AddVersionFlag),
# These are preprocessor flags which should be passed to the frontend, but
# should not prevent the usual -i flags (which DIAGNOSTIC mode does)
( '(-d[DIMNU])', AddCCFlag),
( '(-d.*)', AddDiagnosticFlag),
# Catch all other command-line arguments
( '(-.+)', "env.append('UNMATCHED', $0)"),
# Standard input
( '-', AddInputFileStdin),
# Input Files
# Call ForceFileType for all input files at the time they are
# parsed on the command-line. This ensures that the gcc "-x"
# setting is correctly applied.
( '(.*)', "env.append('INPUTS', pathtools.normalize($0))\n"
"filetype.ForceFileType(pathtools.normalize($0))"),
]
def CheckSetup():
if not env.has('IS_CXX'):
Log.Fatal('"pnacl-driver" cannot be used directly. '
'Use pnacl-clang or pnacl-clang++.')
def DriverOutputTypes(driver_flag, compiling_to_native):
output_type_map = {
('-E', False) : 'pp',
('-E', True) : 'pp',
('-c', False) : 'po',
('-c', True) : 'o',
('-S', False) : 'll',
('-S', True) : 's',
('', False) : 'pexe',
('', True) : 'nexe',
}
return output_type_map[(driver_flag, compiling_to_native)]
def ReadDriverRevision():
rev_file = env.getone('DRIVER_REV_FILE')
nacl_ver = DriverOpen(rev_file, 'rb').readlines()[0]
m = re.search(r'\[GIT\].*/native_client(?:\.git)?:\s*([0-9a-f]{40})',
nacl_ver)
if m:
return m.group(1)
# fail-fast: if the REV file exists but regex search failed,
# we need to fix the regex to get nacl-version.
if not m:
Log.Fatal('Failed to parse REV file to get nacl-version.')
def main(argv):
env.update(EXTRA_ENV)
CheckSetup()
ParseArgs(argv, CustomPatterns + GCCPatterns)
# "configure", especially when run as part of a toolchain bootstrap
# process, will invoke gcc with various diagnostic options and
# parse the output. In these cases we do not alter the incoming
# commandline. It is also important to not emit spurious messages.
if env.getbool('DIAGNOSTIC'):
if env.getbool('SHOW_VERSION'):
code, stdout, stderr = Run(env.get('CC') + env.get('CC_FLAGS'),
redirect_stdout=subprocess.PIPE)
out = stdout.split('\n')
nacl_version = ReadDriverRevision()
out[0] += ' nacl-version=%s' % nacl_version
stdout = '\n'.join(out)
print stdout,
else:
Run(env.get('CC') + env.get('CC_FLAGS'))
return 0
unmatched = env.get('UNMATCHED')
if len(unmatched) > 0:
UnrecognizedOption(*unmatched)
# If -arch was given, we are compiling directly to native code
compiling_to_native = GetArch() is not None
if env.getbool('ALLOW_NATIVE'):
if not compiling_to_native:
Log.Fatal("--pnacl-allow-native without -arch is not meaningful.")
# For native/mixed links, also bring in the native libgcc and
# libcrt_platform to avoid link failure if pre-translated native
# code needs functions from it.
env.append('LD_FLAGS', env.eval('-L${LIBS_NATIVE_ARCH}'))
env.append('STDLIBS', '-lgcc')
env.append('STDLIBS', '-lcrt_platform')
flags_and_inputs = env.get('INPUTS')
output = env.getone('OUTPUT')
if len(flags_and_inputs) == 0:
if env.getbool('VERBOSE'):
# -v can be invoked without any inputs. Runs the original
# command without modifying the commandline for this case.
Run(env.get('CC') + env.get('CC_FLAGS'))
return 0
else:
Log.Fatal('No input files')
gcc_mode = env.getone('GCC_MODE')
output_type = DriverOutputTypes(gcc_mode, compiling_to_native)
# '-shared' modifies the output from the linker and should be considered when
# determining the final output type.
if env.getbool('SHARED'):
if compiling_to_native:
Log.Fatal('Building native shared libraries not supported')
if gcc_mode != '':
Log.Fatal('-c, -S, and -E are disallowed with -shared')
output_type = 'pll'
# INPUTS consists of actual input files and a subset of flags like -Wl,<foo>.
# Create a version with just the files.
inputs = [f for f in flags_and_inputs if not IsFlag(f)]
header_inputs = [f for f in inputs
if filetype.IsHeaderType(filetype.FileType(f))]
# Handle PCH case specially (but only for a limited sense...)
if header_inputs and gcc_mode != '-E':
# We only handle doing pre-compiled headers for all inputs or not at
# all at the moment. This is because DriverOutputTypes only assumes
# one type of output, depending on the "gcc_mode" flag. When mixing
# header inputs w/ non-header inputs, some of the outputs will be
# pch while others will be output_type. We would also need to modify
# the input->output chaining for the needs_linking case.
if len(header_inputs) != len(inputs):
Log.Fatal('mixed compiling of headers and source not supported')
CompileHeaders(header_inputs, output)
return 0
needs_linking = (gcc_mode == '')
if env.getbool('NEED_DASH_E') and gcc_mode != '-E':
Log.Fatal("-E or -x required when input is from stdin")
# There are multiple input files and no linking is being done.
# There will be multiple outputs. Handle this case separately.
if not needs_linking:
if output != '' and len(inputs) > 1:
Log.Fatal('Cannot have -o with -c, -S, or -E and multiple inputs: %s',
repr(inputs))
for f in inputs:
intype = filetype.FileType(f)
if not (filetype.IsSourceType(intype) or filetype.IsHeaderType(intype)):
if ((output_type == 'pp' and intype != 'S') or
(output_type == 'll') or
(output_type == 'po' and intype != 'll') or
(output_type == 's' and intype not in ('ll','po','S')) or
(output_type == 'o' and intype not in ('ll','po','S','s'))):
Log.Fatal("%s: Unexpected type of file for '%s'",
pathtools.touser(f), gcc_mode)
if output == '':
f_output = DefaultOutputName(f, output_type)
else:
f_output = output
namegen = TempNameGen([f], f_output)
CompileOne(f, output_type, namegen, f_output)
return 0
# Linking case
assert(needs_linking)
assert(output_type in ('pll', 'pexe', 'nexe'))
if output == '':
output = pathtools.normalize('a.out')
namegen = TempNameGen(flags_and_inputs, output)
# Compile all source files (c/c++/ll) to .po
for i in xrange(0, len(flags_and_inputs)):
if IsFlag(flags_and_inputs[i]):
continue
intype = filetype.FileType(flags_and_inputs[i])
if filetype.IsSourceType(intype) or intype == 'll':
flags_and_inputs[i] = CompileOne(flags_and_inputs[i], 'po', namegen)
# Compile all .s/.S to .o
if env.getbool('ALLOW_NATIVE'):
for i in xrange(0, len(flags_and_inputs)):
if IsFlag(flags_and_inputs[i]):
continue
intype = filetype.FileType(flags_and_inputs[i])
if intype in ('s','S'):
flags_and_inputs[i] = CompileOne(flags_and_inputs[i], 'o', namegen)
# We should only be left with .po and .o and libraries
for f in flags_and_inputs:
if IsFlag(f):
continue
intype = filetype.FileType(f)
if intype in ('o','s','S') or filetype.IsNativeArchive(f):
if not env.getbool('ALLOW_NATIVE'):
Log.Fatal('%s: Native object files not allowed in link. '
'Use --pnacl-allow-native to override.', pathtools.touser(f))
assert(intype in ('po','o','so','ldscript') or filetype.IsArchive(f))
# Fix the user-specified linker arguments
ld_inputs = []
for f in flags_and_inputs:
if f.startswith('-Xlinker='):
ld_inputs.append(f[len('-Xlinker='):])
elif f.startswith('-Wl,'):
ld_inputs += f[len('-Wl,'):].split(',')
else:
ld_inputs.append(f)
if env.getbool('ALLOW_NATIVE'):
ld_inputs.append('--pnacl-allow-native')
# Invoke the linker
env.set('ld_inputs', *ld_inputs)
ld_args = env.get('LD_ARGS')
ld_flags = env.get('LD_FLAGS')
RunDriver('pnacl-ld', ld_flags + ld_args + ['-o', output])
return 0
def IsFlag(f):
return f.startswith('-')
def CompileHeaders(header_inputs, output):
if output != '' and len(header_inputs) > 1:
Log.Fatal('Cannot have -o <out> and compile multiple header files: %s',
repr(header_inputs))
for f in header_inputs:
f_output = output if output else DefaultPCHOutputName(f)
RunCC(f, f_output, mode='', emit_llvm_flag='')
def CompileOne(infile, output_type, namegen, output = None):
if output is None:
output = namegen.TempNameForInput(infile, output_type)
chain = DriverChain(infile, output, namegen)
SetupChain(chain, filetype.FileType(infile), output_type)
chain.run()
return output
def RunCC(infile, output, mode, emit_llvm_flag='-emit-llvm'):
intype = filetype.FileType(infile)
typespec = filetype.FileTypeToGCCType(intype)
include_cxx_headers = ((env.get('LANGUAGE') == 'CXX') or
(intype in ('c++', 'c++-header')))
env.setbool('INCLUDE_CXX_HEADERS', include_cxx_headers)
if IsStdinInput(infile):
infile = '-'
RunWithEnv("${RUN_CC}", infile=infile, output=output,
emit_llvm_flag=emit_llvm_flag, mode=mode,
typespec=typespec)
def RunLLVMAS(infile, output):
if IsStdinInput(infile):
infile = '-'
# This is a bitcode only step - so get rid of "-arch xxx" which
# might be inherited from the current invocation
RunDriver('pnacl-as', [infile, '-o', output],
suppress_inherited_arch_args=True)
def RunNativeAS(infile, output):
if IsStdinInput(infile):
infile = '-'
RunDriver('pnacl-as', [infile, '-o', output])
def RunTranslate(infile, output, mode):
if not env.getbool('ALLOW_TRANSLATE'):
Log.Fatal('%s: Trying to convert bitcode to an object file before '
'bitcode linking. This is supposed to wait until '
'translation. Use --pnacl-allow-translate to override.',
pathtools.touser(infile))
args = env.get('TRANSLATE_FLAGS') + [mode, '--allow-llvm-bitcode-input',
infile, '-o', output]
if env.getbool('PIC'):
args += ['-fPIC']
RunDriver('pnacl-translate', args)
def RunOpt(infile, outfile, pass_list):
filtered_list = [pass_option for pass_option in pass_list
if pass_option not in env.get('LLVM_PASSES_TO_DISABLE')]
RunDriver('pnacl-opt', filtered_list + [infile, '-o', outfile])
def SetupChain(chain, input_type, output_type):
assert(output_type in ('pp','ll','po','s','o'))
cur_type = input_type
# source file -> pp
if filetype.IsSourceType(cur_type) and output_type == 'pp':
chain.add(RunCC, 'cpp', mode='-E')
cur_type = 'pp'
if cur_type == output_type:
return
# header file -> pre-process
if filetype.IsHeaderType(cur_type) and output_type == 'pp':
chain.add(RunCC, 'cpp', mode='-E')
cur_type = 'pp'
if cur_type == output_type:
return
# source file -> ll
if (filetype.IsSourceType(cur_type) and
(env.getbool('FORCE_INTERMEDIATE_LL') or output_type == 'll')):
chain.add(RunCC, 'll', mode='-S')
cur_type = 'll'
if cur_type == output_type:
return
# ll -> po
if cur_type == 'll':
chain.add(RunLLVMAS, 'po')
cur_type = 'po'
if cur_type == output_type:
return
# source file -> po (we also force native output to go through this phase
if filetype.IsSourceType(cur_type) and output_type in ('po', 'o', 's'):
chain.add(RunCC, 'po', mode='-c')
cur_type = 'po'
if cur_type == output_type:
return
# po -> o
if (cur_type == 'po' and output_type == 'o'):
# If we aren't using biased bitcode, then at least -expand-byval
# must be run to work with the PPAPI shim calling convention.
if IsPortable():
chain.add(RunOpt, 'expand.po', pass_list=['-expand-byval'])
chain.add(RunTranslate, 'o', mode='-c')
cur_type = 'o'
if cur_type == output_type:
return
# po -> s
if cur_type == 'po':
# If we aren't using biased bitcode, then at least -expand-byval
# must be run to work with the PPAPI shim calling convention.
if IsPortable():
chain.add(RunOpt, 'expand.po', pass_list=['-expand-byval'])
chain.add(RunTranslate, 's', mode='-S')
cur_type = 's'
if cur_type == output_type:
return
# S -> s
if cur_type == 'S':
chain.add(RunCC, 's', mode='-E')
cur_type = 's'
if output_type == 'pp':
return
if cur_type == output_type:
return
# s -> o
if cur_type == 's' and output_type == 'o':
chain.add(RunNativeAS, 'o')
cur_type = 'o'
if cur_type == output_type:
return
Log.Fatal("Unable to compile .%s to .%s", input_type, output_type)
def get_help(argv):
tool = env.getone('SCRIPT_NAME')
if '--help-full' in argv:
# To get ${CC}, etc.
env.update(EXTRA_ENV)
code, stdout, stderr = Run('"${CC}" -help',
redirect_stdout=subprocess.PIPE,
redirect_stderr=subprocess.STDOUT,
errexit=False)
return stdout
else:
return """
This is a "GCC-compatible" driver using clang under the hood.
Usage: %s [options] <inputs> ...
BASIC OPTIONS:
-o <file> Output to <file>.
-E Only run the preprocessor.
-S Generate bitcode assembly.
-c Generate bitcode object.
-I <dir> Add header search path.
-L <dir> Add library search path.
-D<key>[=<val>] Add definition for the preprocessor.
-W<id> Toggle warning <id>.
-f<feature> Enable <feature>.
-Wl,<arg> Pass <arg> to the linker.
-Xlinker <arg> Pass <arg> to the linker.
-Wt,<arg> Pass <arg> to the translator.
-Xtranslator <arg> Pass <arg> to the translator.
-Wp,<arg> Pass <arg> to the preprocessor.
-Xpreprocessor,<arg> Pass <arg> to the preprocessor.
-x <language> Treat subsequent input files as having type <language>.
-static Produce a static executable (the default).
-Bstatic Link subsequent libraries statically.
-Bdynamic Link subsequent libraries dynamically.
-fPIC Ignored (only used by translator backend)
(accepted for compatibility).
-pipe Ignored (for compatibility).
-O<n> Optimation level <n>: 0, 1, 2, 3, 4 or s.
-g Generate complete debug information.
-gline-tables-only Generate debug line-information only
(allowing for stack traces).
-flimit-debug-info Generate limited debug information.
-save-temps Keep intermediate compilation results.
-v Verbose output / show commands.
-h | --help Show this help.
--help-full Show underlying clang driver's help message
(warning: not all options supported).
""" % (tool)
| [
"enrico.weigelt@gr13.net"
] | enrico.weigelt@gr13.net |
62534f045eeb94f5d86597eaacf9a34a6e611f66 | c013152ee83ef19552b5ef24e45624f47d36163a | /predict_module/predict.py | 39c13aa712f02e796ba445c545176fed37db9c09 | [] | no_license | stone0705/MyDiscordBOT | 2e9d1c8642e49fd663f138b43ca16bc73ff15173 | a4a4d24a3056e27643d1caaf281f8e1cbb790741 | refs/heads/master | 2021-06-12T08:39:48.339587 | 2021-02-23T14:45:30 | 2021-02-23T14:45:30 | 129,620,943 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,336 | py | import predict_module.arima_predictor as arima_predictor
import predict_module.data_module as data_module
import module.mltd_api as mltd_api
from module.log_handler import get_file_and_stream_logger
logger = get_file_and_stream_logger('discord.predict')
def save_predictor(event_id, rank_num):
data = data_module.get_pandas_data(event_id, rank_num)
if len(data) > 6:
predictor = arima_predictor.get_auto_arima_preditctor(data)
arima_predictor.save_predictor(predictor, event_id, rank_num)
logger.info('event:{} rank:{} save auto arima model DONE'.format(event_id, rank_num))
else:
logger.info('sample is too less for create predictor')
def thread_save_predictor(event_id):
for rank_num in mltd_api.get_monitor_rank():
save_predictor(event_id, rank_num)
def predict(event_id, rank_num, n_step):
predictor = arima_predictor.load_predictor(event_id, rank_num)
if predictor:
predict_values = predictor.predict(n_step)
last_value = mltd_api.get_last_rank(event_id)[rank_num]['score']
sum_list = []
for i in range(len(predict_values)):
sum_list.append(sum(predict_values[0: i+1]))
sum_list = [int(last_value + diff) for diff in sum_list]
return sum_list, predictor.predictor_info
else:
return None | [
"stone07050@gmail.com"
] | stone07050@gmail.com |
9fbb83b194430a63c9e1f46639c66baafb84800c | c2feb53a5bc347823fb120f47aabe653203821e2 | /app.py | a9648ffd2f66800d5ca331c0786df5ebf57fc098 | [] | no_license | shivarajmishra/cvdwebapp-py | 95de5be1fba616857bbd628cc9759809ef5bc2c4 | 1bb3cac4a914cee6b4188e9da07b490de60dfee6 | refs/heads/main | 2023-03-23T03:12:47.309165 | 2021-03-06T11:27:15 | 2021-03-06T11:27:15 | 343,658,455 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,184 | py | # -*- coding: utf-8 -*-
import numpy as np
import pickle
from flask import Flask, request, render_template
# Load ML model
model = pickle.load(open('model_heart.pkl', 'rb'))
# Create application
app = Flask(__name__)
# Bind home function to URL
@app.route('/')
def home():
return render_template('index.html')
# Bind predict function to URL
@app.route('/predict', methods =['POST'])
def predict():
# Put all form entries values in a list
features = [float(i) for i in request.form.values()]
# Convert features to array
array_features = [np.array(features)]
# Predict features
prediction = model.predict(array_features)
output = prediction
# Check the output values and retrive the result with html tag based on the value
if output == 1:
return render_template('index.html',
result = 'Heart disease - Unlikely. No need to worry!')
else:
return render_template('index.html',
result = 'Heart disease - Likely.Please go and see a doctor!')
if __name__ == '__main__':
#Run the application
app.run()
| [
"noreply@github.com"
] | shivarajmishra.noreply@github.com |
5fce59d5a94e156aea5b898047ea4662ddc0249a | caa8a8d6c4f3b585022308d520ab418d8dd57fb2 | /Sudoku/wsgi.py | 7dd09a15b8fd2abfdd12bf954b47c25116838b92 | [] | no_license | ashishkarkera1/Sudoku-Solver | 74eafa9767c6ec6949f726c0d4bc776d44f587b7 | 48c1316dc1ddc1600eba02da3ce9dc20a789e5e7 | refs/heads/main | 2023-01-10T22:57:54.780682 | 2020-11-11T07:34:15 | 2020-11-11T07:34:15 | 311,748,585 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | """
WSGI config for Sudoku project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Sudoku.settings')
application = get_wsgi_application()
| [
"ashish.mp139@gmail.com"
] | ashish.mp139@gmail.com |
6235ff1283a1cd1df9f2920ac2d4acc0b4fda5f2 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_tubercles.py | 1fd9350940d02997c44f6017604e905edf183a0b | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py |
#calss header
class _TUBERCLES():
def __init__(self,):
self.name = "TUBERCLES"
self.definitions = tubercle
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['tubercle']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
f0451f16b411e970a5d7200a3d708e5364c0ed27 | 4e84fa17701451c25ccb81e71ececd39c95ca29c | /Gorillas/color.py | 62e59171884ca7d9c937ec24bc562483b5711520 | [] | no_license | inquisitev/SE3860-Reengineering | 989d84f2c133d864aa53eea8678611a908a70755 | 2d06a070f254bc4e7f2b481f9eb5c5de07f2665c | refs/heads/master | 2023-05-13T01:00:34.710560 | 2021-03-27T02:19:59 | 2021-03-27T02:19:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 283 | py | class Color(object):
"""Helper class for Color constants"""
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
LIGHT_GRAY = (211, 211, 211)
DARK_GRAY = (169, 169, 169)
INVISIBLE = (255, 255, 255, 0)
| [
"adamrhuber@gmail.com"
] | adamrhuber@gmail.com |
1efff66c50dfe7bfa3de1e86997b18a82893fa6b | e98cbf9aa4685f305849f47a7fe2d6688e8f151e | /server/demo/keras_text_summarization/library/seq2seq_test.py | 7078244d2fd6f6a06343a978f86256980b6d22d8 | [] | no_license | JosephNaa/CapstonDesign-titleGenerator | 7cdbd403118c3575d3ece1f234ce7153ce97ecfe | 0810f490e600cfec31618be4413acbcc210164a9 | refs/heads/master | 2022-11-08T03:04:26.794942 | 2020-06-22T18:29:35 | 2020-06-22T18:29:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,812 | py | from __future__ import print_function
from keras.models import Model
from keras.layers import Embedding, Dense, Input
from keras.layers.recurrent import LSTM
from keras.preprocessing.sequence import pad_sequences
from keras.callbacks import ModelCheckpoint
from keras_text_summarization.library.utility.glove_loader import load_glove, GLOVE_EMBEDDING_SIZE
import numpy as np
import os
from konlpy.tag import Okt
import re
HIDDEN_UNITS = 100
DEFAULT_BATCH_SIZE = 64
VERBOSE = 1
DEFAULT_EPOCHS = 10
okt = Okt()
class Seq2SeqSummarizer(object):
model_name = 'seq2seq'
def __init__(self, config):
self.num_input_tokens = config['num_input_tokens']
self.max_input_seq_length = config['max_input_seq_length']
self.num_target_tokens = config['num_target_tokens']
self.max_target_seq_length = config['max_target_seq_length']
self.input_word2idx = config['input_word2idx']
self.input_idx2word = config['input_idx2word']
self.target_word2idx = config['target_word2idx']
self.target_idx2word = config['target_idx2word']
self.config = config
self.version = 0
if 'version' in config:
self.version = config['version']
encoder_inputs = Input(shape=(None,), name='encoder_inputs')
encoder_embedding = Embedding(input_dim=self.num_input_tokens, output_dim=HIDDEN_UNITS,
input_length=self.max_input_seq_length, name='encoder_embedding')
encoder_lstm = LSTM(units=HIDDEN_UNITS, return_state=True, name='encoder_lstm')
encoder_outputs, encoder_state_h, encoder_state_c = encoder_lstm(encoder_embedding(encoder_inputs))
encoder_states = [encoder_state_h, encoder_state_c]
decoder_inputs = Input(shape=(None, self.num_target_tokens), name='decoder_inputs')
decoder_lstm = LSTM(units=HIDDEN_UNITS, return_state=True, return_sequences=True, name='decoder_lstm')
decoder_outputs, decoder_state_h, decoder_state_c = decoder_lstm(decoder_inputs,
initial_state=encoder_states)
decoder_dense = Dense(units=self.num_target_tokens, activation='softmax', name='decoder_dense')
decoder_outputs = decoder_dense(decoder_outputs)
model = Model([encoder_inputs, decoder_inputs], decoder_outputs)
model.compile(loss='categorical_crossentropy', optimizer='nadam', metrics=['accuracy'])
self.model = model
self.encoder_model = Model(encoder_inputs, encoder_states)
decoder_state_inputs = [Input(shape=(HIDDEN_UNITS,)), Input(shape=(HIDDEN_UNITS,))]
decoder_outputs, state_h, state_c = decoder_lstm(decoder_inputs, initial_state=decoder_state_inputs)
decoder_states = [state_h, state_c]
decoder_outputs = decoder_dense(decoder_outputs)
self.decoder_model = Model([decoder_inputs] + decoder_state_inputs, [decoder_outputs] + decoder_states)
def load_weights(self, weight_file_path):
if os.path.exists(weight_file_path):
self.model.load_weights(weight_file_path)
def transform_input_text(self, texts):
temp = []
for line in texts:
x = []
line = re.sub("↑", "상승", line)
line = re.sub("↓", "하강", line)
line = re.sub("%", "퍼센트", line)
line = re.sub(u"[,.'-_:;#$…’‘”“·…\[\]\"]"," ", line)
tokenized_data = okt.pos(line)
for word in tokenized_data:
wid = 1
if word in self.input_word2idx:
wid = self.input_word2idx[word]
x.append(wid)
if len(x) >= self.max_input_seq_length:
break
temp.append(x)
temp = pad_sequences(temp, maxlen=self.max_input_seq_length)
print(temp.shape)
return temp
def transform_target_encoding(self, texts):
temp = []
for line in texts:
x = []
line = re.sub("↑", "상승", line)
line = re.sub("↓", "하강", line)
line = re.sub("%", "퍼센트", line)
line = re.sub(u"[,.'-_:;#$…’‘”“·…\[\]\"]"," ", line)
tokenized_data = okt.pos(line)
tokenized_data = ['START'] + tokenized_data + [' END']
for word in tokenized_data:
x.append(word)
if len(x) >= self.max_target_seq_length:
break
temp.append(x)
temp = np.array(temp)
print(temp.shape)
return temp
def generate_batch(self, x_samples, y_samples, batch_size):
num_batches = len(x_samples) // batch_size
while True:
for batchIdx in range(0, num_batches):
start = batchIdx * batch_size
end = (batchIdx + 1) * batch_size
encoder_input_data_batch = pad_sequences(x_samples[start:end], self.max_input_seq_length)
decoder_target_data_batch = np.zeros(shape=(batch_size, self.max_target_seq_length, self.num_target_tokens))
decoder_input_data_batch = np.zeros(shape=(batch_size, self.max_target_seq_length, self.num_target_tokens))
for lineIdx, target_words in enumerate(y_samples[start:end]):
for idx, w in enumerate(target_words):
w2idx = 0 # default [UNK]
if w in self.target_word2idx:
w2idx = self.target_word2idx[w]
if w2idx != 0:
decoder_input_data_batch[lineIdx, idx, w2idx] = 1
if idx > 0:
decoder_target_data_batch[lineIdx, idx - 1, w2idx] = 1
yield [encoder_input_data_batch, decoder_input_data_batch], decoder_target_data_batch
@staticmethod
def get_weight_file_path(model_dir_path):
return model_dir_path + '/' + Seq2SeqSummarizer.model_name + '-weights.h5'
@staticmethod
def get_config_file_path(model_dir_path):
return model_dir_path + '/' + Seq2SeqSummarizer.model_name + '-config.npy'
@staticmethod
def get_architecture_file_path(model_dir_path):
return model_dir_path + '/' + Seq2SeqSummarizer.model_name + '-architecture.json'
def fit(self, Xtrain, Ytrain, Xtest, Ytest, epochs=None, batch_size=None, model_dir_path=None):
if epochs is None:
epochs = DEFAULT_EPOCHS
if model_dir_path is None:
model_dir_path = './models'
if batch_size is None:
batch_size = DEFAULT_BATCH_SIZE
self.version += 1
self.config['version'] = self.version
config_file_path = Seq2SeqSummarizer.get_config_file_path(model_dir_path)
weight_file_path = Seq2SeqSummarizer.get_weight_file_path(model_dir_path)
checkpoint = ModelCheckpoint(weight_file_path)
np.save(config_file_path, self.config)
architecture_file_path = Seq2SeqSummarizer.get_architecture_file_path(model_dir_path)
open(architecture_file_path, 'w').write(self.model.to_json())
Ytrain = self.transform_target_encoding(Ytrain)
Ytest = self.transform_target_encoding(Ytest)
Xtrain = self.transform_input_text(Xtrain)
Xtest = self.transform_input_text(Xtest)
train_gen = self.generate_batch(Xtrain, Ytrain, batch_size)
test_gen = self.generate_batch(Xtest, Ytest, batch_size)
train_num_batches = len(Xtrain) // batch_size
test_num_batches = len(Xtest) // batch_size
history = self.model.fit_generator(generator=train_gen, steps_per_epoch=train_num_batches,
epochs=epochs,
verbose=VERBOSE, validation_data=test_gen, validation_steps=test_num_batches,
callbacks=[checkpoint])
self.model.save_weights(weight_file_path)
return history
def summarize(self, input_text):
input_seq = []
input_wids = []
input_text = re.sub(u"[,.'-_:;#$…’‘”“·…\[\]\"]"," ", input_text)
input_text = re.sub("↑", "상승", input_text )
input_text = re.sub("↓", "하강", input_text )
input_text = re.sub("%", "퍼센트", input_text )
line = okt.pos(input_text)
# print(line)
for word in line:
idx = 1 # default [UNK]
if word in self.input_word2idx:
idx = self.input_word2idx[word]
# print(idx)
input_wids.append(idx)
input_seq.append(input_wids)
input_seq = pad_sequences(input_seq, self.max_input_seq_length)
states_value = self.encoder_model.predict(input_seq)
target_seq = np.zeros((1, 1, self.num_target_tokens))
target_seq[0, 0, self.target_word2idx['START']] = 1
target_text = ''
target_text_len = 0
terminated = False
while not terminated:
output_tokens, h, c = self.decoder_model.predict([target_seq] + states_value)
sample_token_idx = np.argmax(output_tokens[0, -1, :])
sample_word = self.target_idx2word[sample_token_idx]
# print(sample_word[0])
target_text_len += 1
if sample_word != 'START' and sample_word[0] != 'END':
target_text += ' ' + sample_word[0]
if sample_word == 'END' or target_text_len >= self.max_target_seq_length:
terminated = True
target_seq = np.zeros((1, 1, self.num_target_tokens))
target_seq[0, 0, sample_token_idx] = 1
states_value = [h, c]
return target_text.strip()
| [
"noreply@github.com"
] | JosephNaa.noreply@github.com |
095d1a7c8922fccbf07990c9763b418b26e9ddad | 940a7c5fd90a92d6ee9576245631748400c9e858 | /hunger.py | 4e6ed64cce147fa44bb21437a0c4cf8690408405 | [] | no_license | nilaybnrj1/test | c39af083568263610b23ac269560b6372b210728 | cb1494cc9b49b343fd7c9b1a501cac742bc31744 | refs/heads/master | 2020-07-22T09:21:55.247654 | 2019-09-10T18:13:57 | 2019-09-10T18:13:57 | 207,148,712 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py | output = raw_input("Are you hungry??")
if(output.capitalize()=="YES"):
print("Eat Burger")
print("Eat pizza")
print("Ëat fries")
else:
print("Go Study")
thirsty = raw_input("Do u want water??")
if(thirsty=="yes"):
print("Give him Water")
| [
"nilaybnrj@gmail.com"
] | nilaybnrj@gmail.com |
00cb10bec4ffa0cc0c712066567af7bbfef85cee | 6f7408443a8dd8e620fe6b77dae59a2ec86fc593 | /core/core.py | 4103e61d6bdb69cdf2eaf6fb5a5886a3eaa4d30f | [
"Apache-2.0"
] | permissive | 3b295/FolderDiffTool | 6e0386845a48e58a85f0e54dcd0509ac8d633c46 | fdd014fbdf92e3011b645c89a7c87a8049699e26 | refs/heads/master | 2021-01-19T10:03:14.333345 | 2017-04-23T08:47:50 | 2017-04-23T08:47:50 | 87,820,832 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,867 | py | # -*- coding: utf-8 -*-
import os
import time
import copy
from typing import Union
from hashlib import sha1
from .tools.serialization import load_dict, save_dict
from .tools.color_print import Green, Red, Black, Blue
STRFTIME_TEMP = '%Y-%m-%d %H:%M:%S'
class TreeNode(object):
"""Tree node"""
# Todo: add __annotations__ for function
def __init__(self, name=None, *args, **kwargs):
self.attrs = {}
self.name = name
try:
self.attrs['ctime'] = kwargs.get('ctime', None)
self.attrs['atime'] = kwargs.get('atime', None)
self.attrs['mtime'] = kwargs.get('mtime', None)
except ValueError as e:
pass
def __eq__(self, other):
if isinstance(other, self.__class__) and self.attrs == other.attrs and self.name == other.name:
return True
return False
def __repr__(self):
return "< {} name: {} {}>".format(self.__class__, self.name, self.__hash__())
def __hash__(self):
# 一个文件的全部属性相等(不包括名字), 就认为他们相等
return hash(frozenset(sorted(self.attrs.items())))
class FileNode(TreeNode):
"""File Node"""
pass
class FolderNode(TreeNode):
"""Folder node"""
def __init__(self, *args, **kwargs):
super(FolderNode, self).__init__(*args, **kwargs)
self._subnodes = set()
def add_subnode(self, subnode):
self._subnodes.add(subnode)
def get_subnodes_amount(self):
"""返回子节点的数量"""
return self._subnodes.__len__()
def get_subnodes(self) -> set:
"""return subnodes of set form"""
return self._subnodes
# FIXME: folder属性相等和子节点属性相等应该分离成两个函数
def __eq__(self, other):
if not super(FolderNode, self).__eq__(other):
return False
if set(self._subnodes) == set(other._subnodes):
return True
return False
def __hash__(self):
return super(FolderNode, self).__hash__()
class FileTree(object):
"""文件树"""
def __init__(self):
self._tree = None
@classmethod
def from_folder(cls, folder):
"""使用一个文件夹路径来初始化这棵树
:param folder: 文件夹路径
:return: None
"""
ex = cls.__new__(cls)
ex._tree = ex._create_help(folder)
return ex
@classmethod
def from_json(cls, file):
"""使用一个JSON文件初始化树"""
ex = cls.__new__(cls)
ex._tree = FileTree.dict2tree(load_dict(file))
return ex
def save_json(self, folder):
"""保存为JSON格式的文件 """
save_dict(folder, self.convert2dict())
def convert2dict(self) -> dict:
"""将树转化为易于转化为dict的形式(尽量兼容JSON)
:return: dict object
"""
def func(node):
rst = {'name': node.name, 'attrs': node.attrs}
if isinstance(node, FolderNode):
subnodes = [func(x) for x in node.get_subnodes()]
rst.update({'subnodes': subnodes})
return rst
return func(self._tree)
@staticmethod
def dict2tree(_dict):
"""将 convert2dict 的过程反过来"""
def func(node):
f = None
ls = []
for k, v in node.items():
if k == 'subnodes':
f = FolderNode()
for sub in v:
f.add_subnode(func(sub))
else:
ls.append((k, v))
if not f:
f = FileNode()
for k, v in ls:
setattr(f, k, v)
return f
return func(_dict)
def _create_help(self, _path: str) -> Union[FileNode, FolderNode]:
"""
用于创建树形结构的迭代函数
:param _path:
:return:
"""
if os.path.isfile(_path):
filename = os.path.split(_path)[1]
rst = FileNode(filename, **self._get_date(_path))
elif os.path.isdir(_path):
foldername = _path.split('\\')[-1]
rst = FolderNode(foldername, **self._get_date(_path))
fs = os.listdir(_path)
for f in fs:
whole_f = os.path.join(_path, f)
rst.add_subnode(self._create_help(whole_f))
else:
raise PathStrError('_path is {}'.format(_path))
return rst
def _get_date(self, folder: str) -> dict:
"""
获取详细的文件信息
:param folder: 获取信息的文件路径
:return: 详细的文件信息
"""
rst = {
'ctime': time.strftime(STRFTIME_TEMP, time.localtime(os.path.getctime(folder))),
'atime': time.strftime(STRFTIME_TEMP, time.localtime(os.path.getatime(folder))),
'mtime': time.strftime(STRFTIME_TEMP, time.localtime(os.path.getmtime(folder))),
}
return rst
def deff(self, other):
"""
:param other: other tree deffed
:return: None
"""
def rec(former, latter):
rst = former.__class__()
if isinstance(rst, FolderNode):
for i in set(latter.attrs.keys()) | set(former.attrs.keys()):
if i in ['mtime']: # 文件夹记录mtime的话 页面有点乱
continue
rst.attrs[i] = DoalData(former.attrs.get(i, None), latter.attrs.get(i, None))
ls = {x.name: x for x in latter.get_subnodes()}
for sub in former.get_subnodes():
l = ls.pop(sub.name, None)
if l:
rst.add_subnode(rec(sub, l))
else:
rst.add_subnode(rec(sub, sub.__class__()))
for v in ls.values():
rst.add_subnode(rec(v.__class__(), v))
else:
for i in set(latter.attrs.keys()) | set(former.attrs.keys()):
rst.attrs[i] = DoalData(former.attrs.get(i, None), latter.attrs.get(i, None))
rst.name = DoalData(former.name, latter.name)
return rst
rst = DiffFileTree()
rst._tree = rec(self._tree, other._tree)
return rst
def graph(self, ctime=False, atime=False, mtime=False):
""" 图形化的树形结构 """
def get_tree(tree, cur_level, indention={}):
"""递归的打印tree"""
rst = ''
indention[cur_level] = tree.get_subnodes_amount()
for subnode in tree.get_subnodes():
# FIXME: \n in linux
for l in range(cur_level):
rst += '│ ' if indention[l] > 1 else ' '
extra_data = ''
# 添加额外的内容
if ctime:
extra_data += "\t{}\t".format(subnode.attrs['ctime'])
if atime:
extra_data += "\t{}\t".format(subnode.attrs['atime'])
# FIXME: 文件夹不想显示mtime属性, 先暂时屏蔽掉
if mtime:
m = subnode.attrs['mtime']
if m:
extra_data += "\t{}\t".format(m)
# from IPython import embed
# embed()
if indention[cur_level] > 1:
rst += '├── {}\t\t\t{}\n'.format(subnode.name, extra_data)
elif indention[cur_level] == 1:
rst += '└── {}\t\t\t{}\n'.format(subnode.name, extra_data)
if isinstance(subnode, FolderNode):
rst += get_tree(subnode, cur_level + 1)
indention[cur_level] -= 1
return rst
return get_tree(self._tree, 0)
def __repr__(self):
return "<FileTree: folder in {}>".format(self._tree.name) if self._tree else "<FileTree: None>"
class DiffFileTree(FileTree):
"""表示两棵树diff的结果
name attrs中的数据全部被变成DoalDta类型的双重数据
"""
pass
class DoalData(object):
"""同时储存新旧两个值"""
NEW = 1
CHANGE = 2
DEL = 3
NOCHANGE = 4
def __init__(self, old, new):
self.new = new
self.old = old
if new:
if old:
if new == old:
self.type = self.NOCHANGE
else:
self.type = self.CHANGE
else:
self.type = self.NEW
else:
if old:
self.type = self.DEL
else:
raise TypeError('old and new can not be None!')
def __str__(self):
if self.type == self.NEW:
return Green(str(self.new))
elif self.type == self.DEL:
return Blue(str(self.old))
elif self.type == self.CHANGE:
return Red(str(self.old) + ' --> ' + str(self.new))
elif self.type == self.NOCHANGE:
return str(self.old)
else:
raise TypeError("<DoalData#type> is only [NEW, CHANGE, OLD]")
def __getattr__(self, item):
# FIXME: 子元素究竟用谁的子节点, 先暂时用原来的, 应该用两个子节点diff 过后的结果来做的
if self.type == self.DEL:
raise AttributeError("<DoalData> data is deleted")
elif self.type == self.NEW:
return getattr(self.new, item)
elif self.type == self.CHANGE:
return getattr(self.old, item)
else:
raise TypeError("<DoalData#type> is only [NEW, CHANGE, OLD]")
class PathStrError(Exception):
"""path string Error """
pass
| [
"xwystz@gmail.com"
] | xwystz@gmail.com |
d93f1eac9a51b554e79f2210ef4ec9efb9dc75e3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02785/s616461833.py | e25ba5505a0fe199b73bcb1668bb380fc510363a | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | n, k = map(int, input().split())
h = list(map(int, input().split()))
print(sum(sorted(h)[::-1][k:])) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
62399305566740e036f5d3cadfa83129029a47be | 4b1ee798df48d1082cf86fc31b631919f54a842f | /scripts/compare human faces and tell is it the same person.py | c08cd727d4e222b13f1fe8e9a37e1e0692ca5943 | [
"MIT"
] | permissive | salemalem/opencv-python-projects | c28eca14a8663de958e34ebbffb28ea088bace43 | 07e3985bc57baeb113026006f179cc07b0af71b9 | refs/heads/main | 2023-01-23T12:07:09.049370 | 2020-12-07T20:49:39 | 2020-12-07T20:49:39 | 316,210,086 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | import cv2
import numpy as np
"""
follow this article to install prerequisites
https://nitratine.net/blog/post/python-face-recognition-tutorial/
because you have to install cmake and visual studio c++ build tools
"""
# import face_recognition
# imgAnna = face_recognition.load_image_file("../r") | [
"shyngys.shynbolatov@gmail.com"
] | shyngys.shynbolatov@gmail.com |
32fed0e2f70a9029b625edcb50dbddec5e20b476 | 344f14720343c0cb5b05c98bf83738d3c7974568 | /reviews/urls.py | e98b379fe88b58aa450764eb4549d78a9b2111ac | [] | no_license | oliviamcreativelabs/distiller | 37b486761ecc653fa9c75dd97fd747fef3cf411e | 1698c0033c49a2a4a4ebad09a10b8f85bf451a01 | refs/heads/master | 2022-12-23T20:32:43.963902 | 2020-01-11T06:45:51 | 2020-01-11T06:45:51 | 228,523,823 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py | from django.urls import path
from . import views
app_name = 'reviews'
urlpatterns = [
path('', views.review, name='review'),
path('reviews/<int:review_id>', views.review_detail, name='review_detail'),
path('whiskey', views.whiskey_list, name='whiskey_list'),
path('whiskey/', views.whiskey_detail, name='whiskey_detail'),
path('whiskey/<int:whiskey_id>/add_review', views.add_review, name='add_review'),
]
| [
"contactus@oliviamcreativelabs.com"
] | contactus@oliviamcreativelabs.com |
bd8ee5ad27a5a862644e0a2854c0e2ccf6290776 | e91f6eeeee5aa92b8be609a02b33f951f5209885 | /load_basic.py | 786273d7ce93cca89f01d446b1cde39f15243e5f | [
"LicenseRef-scancode-public-domain"
] | permissive | yyztc/itop | ae72eb1f55dc1e7acf6407a1e4dc907c8ff37de7 | f674627f5ce3bfd0d966a2da2b2cb77ac06af235 | refs/heads/master | 2020-11-28T09:20:02.855734 | 2018-01-19T05:40:57 | 2018-01-19T05:40:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,773 | py | from sqlalchemy import create_engine
import re
from pandas import Series, DataFrame, concat
import pandas as pd
from pymongo import MongoClient
import subprocess as t
import pdb
import logging
from logging.config import fileConfig
import configparser
fileConfig('logger_config.ini')
logger=logging.getLogger('infoLogger')
class LoadBasic():
def __init__(self):
self.cfg = configparser.ConfigParser()
self.cfg.read("config.ini")
cmdb_db = self.cfg.get("cmdb","db")
cmdb_str = self.cfg.get("cmdb","conn_str")
self.client = MongoClient(cmdb_str)
self.db = self.client[cmdb_db]
self.engine = create_engine(
"mysql+pymysql://root:Password1@127.0.0.1:3306/itop?charset=utf8", encoding="utf-8", echo=False)
def load_to_itopdb(self, df, source_table_name):
self.engine.execute("delete from %s" % source_table_name)
df.to_sql(source_table_name, con=self.engine,
if_exists='append', index=False)
def apply_by_php(self, source_table_name):
source_table_id = source_table_name.split('_').pop()
php_cmd = "php -q /itop_data/http_dir/itop/synchro/synchro_exec.php --auth_user=%s --auth_pwd=%s --data_sources=%s" % (
'admin', 'Password1', source_table_id)
output = t.getoutput(php_cmd)
logger.info(output + "\n")
def load_location(self):
location_source_table = 'synchro_data_location_77'
location_coll = self.db['merge_location']
location_df = pd.DataFrame(list(location_coll.find())).assign(org_id=lambda x: 1, name=lambda x: x[
'merge_location'].str.upper()).replace('', 'OTHERS')[['org_id', 'name']]
self.load_to_itopdb(
df=location_df, source_table_name=location_source_table)
self.apply_by_php(source_table_name=location_source_table)
def load_brand(self):
brand_source_table = 'synchro_data_brand_73'
brand_coll = self.db['merge_brand']
brand_df = pd.DataFrame(list(brand_coll.find())).assign(name=lambda x: x[
'merge_brand'].str.upper()).assign(primary_key=lambda x: x['name']).replace('', 'OTHERS')[['name', 'primary_key']]
self.load_to_itopdb(df=brand_df, source_table_name=brand_source_table)
self.apply_by_php(source_table_name=brand_source_table)
def load_model(self):
get_brand_id_sql = "select id,name as brand_name from %s" % (
'view_Brand')
brand_id_df = pd.read_sql(get_brand_id_sql, con=self.engine).assign(
brand_id=lambda x: x['id'].map(lambda y: str(int(y))))[['brand_id', 'brand_name']]
model_source_table = 'synchro_data_model_74'
model_coll = self.db['merge_model']
model_df = pd.DataFrame(list(model_coll.find())).assign(name=lambda x: x['merge_model_name']).assign(
primary_key=lambda x: x['name'].str.upper()).replace('', 'OTHERS')
model_df = pd.merge(model_df, brand_id_df, how='left', left_on='merge_brand_name', right_on='brand_name').assign(type=lambda x:x['merge_model_type']).loc[model_df['name'] != 'OTHERS',['primary_key', 'brand_id', 'name','type']]
self.load_to_itopdb(df=model_df, source_table_name=model_source_table)
self.apply_by_php(source_table_name=model_source_table)
def load_osfamily(self):
osfamily_source_table = 'synchro_data_osfamily_75'
osfamily_coll = self.db['merge_osfamily']
osfamily_df = pd.DataFrame(list(osfamily_coll.find())).assign(name=lambda x: x[
'merge_osfamily'].str.upper()).assign(primary_key=lambda x: x['name']).replace('', 'OTHERS')[['name', 'primary_key']]
self.load_to_itopdb(df=osfamily_df, source_table_name=osfamily_source_table)
self.apply_by_php(source_table_name=osfamily_source_table)
def load_osversion(self):
get_osfamily_id_sql = "select id,name as osfamily_name from %s" % (
'view_OSFamily')
osfamily_id_df = pd.read_sql(get_osfamily_id_sql, con=self.engine).assign(
osfamily_id=lambda x: x['id'].map(lambda y: str(int(y))))[['osfamily_id', 'osfamily_name']]
osversion_source_table = 'synchro_data_osversion_76'
osversion_coll = self.db['merge_osversion']
osversion_df = pd.DataFrame(list(osversion_coll.find())).assign(name=lambda x: x['merge_osversion']).assign(
primary_key=lambda x: x['name'].str.upper()).replace('', 'OTHERS')
osversion_df['merge_osfamily'] = osversion_df['merge_osfamily'].str.upper()
osversion_df = pd.merge(osversion_df, osfamily_id_df, how='left', left_on='merge_osfamily', right_on='osfamily_name')
# pdb.set_trace()
osversion_df=osversion_df.loc[osversion_df['name'] != 'OTHERS',['primary_key', 'osfamily_id', 'name']]
self.load_to_itopdb(df=osversion_df, source_table_name=osversion_source_table)
self.apply_by_php(source_table_name=osversion_source_table)
def load_network_type(self):
network_type_source_table = 'synchro_data_networkdevicetype_81'
network_type_coll = self.db['merge_network_type']
network_type_df = pd.DataFrame(list(network_type_coll.find())).assign(org_id=lambda x: 1, name=lambda x: x[
'merge_network_type'].str.upper()).replace('', 'OTHERS')[['name']]
self.load_to_itopdb(
df=network_type_df, source_table_name=network_type_source_table)
self.apply_by_php(source_table_name=network_type_source_table)
def main(self):
self.load_location()
self.load_brand()
self.load_model()
self.load_osfamily()
self.load_osversion()
self.load_network_type()
if __name__ == '__main__':
lb = LoadBasic()
lb.main()
| [
"dennis.zhang@cargosmart.com"
] | dennis.zhang@cargosmart.com |
99385898d20ef2894b279456181c78b1a4750330 | 18fc9c471aee6d27d88ac5f62a38caf25cc04ec6 | /Application/app/migrations/0013_auto_20210617_1055.py | 8f39400f39c296817313d6ec322e46a12c39ac28 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | mercychege/Pest-Control | e7948d74f1b32017cbf3bc9a0faeb8d850d92ab7 | d738a5ff2e1d3b76b8d263dfad32ed6d5c37349e | refs/heads/master | 2023-06-02T22:19:23.100890 | 2021-06-17T12:13:12 | 2021-06-17T12:13:12 | 377,108,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 565 | py | # Generated by Django 3.2.4 on 2021-06-17 10:55
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0012_auto_20210617_0959'),
]
operations = [
migrations.RemoveField(
model_name='shopproduct',
name='pesticide_image',
),
migrations.RemoveField(
model_name='shopproduct',
name='pesticide_name',
),
migrations.RemoveField(
model_name='shopproduct',
name='shop_name',
),
]
| [
"evendungu96@gmail.com"
] | evendungu96@gmail.com |
e15cbb4ed345d9ed84a81615e72dae49bc59a4eb | af7dae32d540a2cb5aee8bef9eb4989bd229628e | /crabConfig_RECO.py | ea6571a0cd29c473765ee6eeba5247206aad4d7a | [] | no_license | alberto-sanchez/Chi_c-in-pPb2016_MC | 29ac2d885099470156f47d53d9f9bdb712716800 | 7659626c3c407c9614dd4306fc0df1b5273a4fe5 | refs/heads/master | 2023-01-22T16:25:30.073180 | 2020-12-02T22:37:05 | 2020-12-02T22:37:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,130 | py | from CRABClient.UserUtilities import config
config = config()
config.section_("General")
config.General.requestName = 'Chi_c_pPb8TeV_MC_RECO_v5'
config.General.workArea = 'crab_projects'
config.General.transferOutputs = True
config.General.transferLogs = False
config.section_("JobType")
config.JobType.allowUndistributedCMSSW = True #Allow SLC7
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'ChiCJpsiMuMu_Pythia8_8p16TeV_TuneCUETP8M1_RECO.py'
config.JobType.maxMemoryMB = 2000
config.JobType.maxJobRuntimeMin = 800
config.JobType.outputFiles = ['ChiCJpsiMuMu_Pythia8_8p16TeV_TuneCUETP8M1_RECO.root']
config.section_("Data")
config.Data.inputDataset = '/Chi_c_pPb8TeV_privateMC_GEN/okukral-Chi_c_pPb8TeV_MC_DIGI_v5-ad67f9fa96b42625e03746b1b4851542/USER'
config.Data.inputDBS = 'phys03'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob = 2
config.Data.outLFNDirBase = '/store/group/phys_heavyions/okukral/pPb/%s' % (config.General.requestName)
config.Data.publication = True
config.Data.outputDatasetTag = config.General.requestName
config.section_("Site")
config.Site.storageSite = 'T2_CH_CERN'
| [
"noreply@github.com"
] | alberto-sanchez.noreply@github.com |
5122d40789893ab4f9b66ac76fb5793d48a1a642 | 29943373eb987fc37ccba1fa6d7df9da1077a627 | /hello_world.py | 88ccb54bd631137b6ad8527483691c55a875608d | [] | no_license | lujingze/hello-world | 88ae37f0f643210d3d49a905ffe45b46be0be285 | a30d58c4af2151c5d2945ffbf06da9602a5fc3a0 | refs/heads/master | 2020-05-19T13:39:05.529046 | 2019-05-05T15:31:45 | 2019-05-05T15:31:45 | 185,045,341 | 0 | 0 | null | 2019-05-05T15:31:46 | 2019-05-05T14:57:02 | Python | UTF-8 | Python | false | false | 42 | py | print("Hello,world!")
print("Hello,git!") | [
"noreply@github.com"
] | lujingze.noreply@github.com |
23259865da4b2ba2241e13dc4a003730ecd8244e | f483545d7765c25d1b315027726dbd74bc77b98a | /myproject/helloflask/__init__.py | 3c841b6144c426d612c3be2276bab54c47abc33d | [] | no_license | niceman5/pythonProject | e51b44a50776100a63443d7da850ba4b8b00f5eb | 3589fd200b56f68b856d2b4d2031c2a1135168a0 | refs/heads/master | 2023-07-10T16:12:57.756944 | 2023-06-27T08:13:54 | 2023-06-27T08:13:54 | 135,047,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,573 | py | from flask import Flask, g, request, Response, make_response
from flask import session, render_template, Markup, url_for
from datetime import date, datetime, timedelta
import os
from helloflask.init_db import init_database, db_session
app = Flask(__name__)
import helloflask.views
import helloflask.tests
import helloflask.filters
app.debug = True
app.jinja_env.trim_blocks = True
# config["connect_args"] = {"options": "-c timezone=utc"}
def dated_url_for(endpoint, **values):
if endpoint == 'static':
filename = values.get('filename', None)
if filename:
file_path = os.path.join(app.root_path,
endpoint, filename)
values['q'] = int(os.stat(file_path).st_mtime)
return url_for(endpoint, **values)
@app.context_processor
def override_url_for():
return dict(url_for=dated_url_for)
app.config.update(
connect_args={"options": "-c timezone=utc"},
SECRET_KEY='X1243yRH!mMwf',
SESSION_COOKIE_NAME='pyweb_flask_session',
PERMANENT_SESSION_LIFETIME=timedelta(31) # 31 days
)
@app.before_first_request
def beforeFirstRequest():
print(">> before_first_request!!")
init_database() # initialize database
@app.after_request
def afterReq(response):
print(">> after_request!!")
return response
@app.teardown_request
def teardown_request(exception):
print(">>> teardown request!!", exception)
@app.teardown_appcontext
def teardown_context(exception):
print(">>> teardown context!!", exception)
db_session.remove() # remove used db-session
| [
"niceman555@gmail.com"
] | niceman555@gmail.com |
7e0e11a25de222a5998cf039e5d07b16e1e5ee3d | 0cfb5831a748ebd46e438e3ad7e7a09c1d196499 | /com/chapter_02/section_03/task_2.3.1_string.py | 0ced5f96b6c94cd49087d941d8d2db0b958d7a97 | [] | no_license | StevenGeGe/pythonFromIntroductionToPractice01 | 7cfe8cdb4bc5c0ddbe25b44976231d72d9e10108 | 9d2ba499056b30ded14180e6c4719ee48edd9772 | refs/heads/master | 2023-02-15T04:08:59.878711 | 2020-12-28T13:27:55 | 2020-12-28T13:27:55 | 310,980,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 503 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# @Time : 2020/11/8 14:44
# @Author : Yong
# @Email : Yong_GJ@163.com
# @File : task_2.3.1_string.py
# @Software: PyCharm
# title() : 以首字母大写的方式显示每个单词,即将每个单词的首字母全部大写或者全部小写。
# 更改字符串的小写
name_big = "ada love"
print(name_big.title()) # 输出: Ada Love
# 更改字符串的大写
name_small = "All The World"
print(name_small.title()) # 输出: All The World
| [
"Yong_GJ@163.com"
] | Yong_GJ@163.com |
86123d9ee958cff4bb4bb00d945ec0cef102bd14 | 6d752fd3d18dff5f876913c0462306a81b932e16 | /lekcja7/triangles.py | 38b46a0ca72670a6402015afa844410ab3014ee8 | [] | no_license | madinhos/PYTHON | 955049206c2d9b7b26177d351e50b259887e74b3 | 4d0028c4024ad62a5a215de3ca9f715e3be84335 | refs/heads/master | 2021-01-11T03:20:01.483879 | 2017-01-25T09:01:11 | 2017-01-25T09:01:11 | 71,047,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,278 | py | from points import Point
class Triangle:
def __init__(self, x1=0, y1=0, x2=0, y2=0, x3=0, y3=0):
#sprawdzanie czy pkt sa wspolliniowe obliczajac wyznacznik macierzy
# |x1 y1 1|
# det |x2 y2 1| == 0
# |x3 y3 1|
det = x1*y2*1 + x2*y3*1 + x3*y1*1 - 1*y2*x3 - 1*y3*x1 - 1*y1*x2
if (det == 0):
raise ValueError("bledne dane inicjalizacyjne")
self.pt1 = Point(x1, y1)
self.pt2 = Point(x2, y2)
self.pt3 = Point(x3, y3)
def __str__(self):
return "[(%s, %s), (%s, %s), (%s, %s)]" % (self.pt1.x, self.pt1.y, self.pt2.x, self.pt2.y, self.pt3.x, self.pt3.y)
def __repr__(self):
return "Triangle(%s, %s, %s, %s, %s, %s)" % (self.pt1.x, self.pt1.y, self.pt2.x, self.pt2.y, self.pt3.x, self.pt3.y)
def __eq__(self, other):
if not isinstance(other, Triangle):
raise ValueError("bledny argument")
if(self.pt1 == other.pt1 and self.pt2 == other.pt2 and self.pt3 == other.pt3):
return True
else:
return False
def __ne__(self, other): # obsluga tr1 != tr2
return not self == other
def center(self):
x = (self.pt1.x + self.pt2.x + self.pt3.x)/3
y = (self.pt1.y + self.pt2.y + self.pt3.y)/3
return Point(x,y)
def area(self):
return (abs((self.pt1.x * (self.pt2.y - self.pt3.y) + self.pt2.x * (self.pt3.y - self.pt1.y) + self.pt3.x * (self.pt1.y - self.pt2.y)) / 2))
def move(self, x, y):
if not (isinstance(x, int) or isinstance(y, int) or isinstance(x, float) or isinstance(y, float)):
raise ValueError("bledne argumenty")
self.pt1.x += x
self.pt2.x += x
self.pt3.x += x
self.pt1.y += y
self.pt2.y += y
self.pt3.y += y
return self
def make4(self):
ab = Point((self.pt1.x + self.pt2.x)/2, (self.pt1.y + self.pt2.y)/2) #srodek odcinka pt1 i pt2
bc = Point((self.pt2.x + self.pt3.x)/2, (self.pt2.y + self.pt3.y)/2) #srodek odcinka pt2 i pt3
ca = Point((self.pt3.x + self.pt1.x)/2, (self.pt3.y + self.pt1.y)/2) #srodek odcinka pt3 i pt1
tr1 = Triangle(self.pt1.x, self.pt1.y, ab.x, ab.y, ca.x, ca.y)
tr2 = Triangle(ab.x, ab.y, self.pt2.x, self.pt2.y, bc.x, bc.y)
tr3 = Triangle(ca.x,ca.y, ab.x, ab.y, bc.x, bc.y)
tr4 = Triangle(ca.x, ca.y, bc.x, bc.y, self.pt3.x, self.pt3.y)
print(tr1, tr2,tr3,tr4)
return [tr1, tr2, tr3 ,tr4]
| [
"noreply@github.com"
] | madinhos.noreply@github.com |
4592909cbecdc99a76075adfdb88ebecd628f893 | e247d9261676f257752c0c6beac161954137a81c | /src/0670.maximum-swap/maximum-swap.py | a768dba246b1ee138757c7df172f980aba66c1ea | [
"MIT"
] | permissive | henrymorgen/Just-Code | 8fbbd8288b485372a44e10b0078b5edb8af61a3b | fa03ebb89edd8f2292de7c0644dbab88dc1d924c | refs/heads/master | 2022-10-19T05:59:53.134092 | 2020-06-10T02:26:43 | 2020-06-10T02:26:43 | 273,656,532 | 1 | 2 | MIT | 2020-06-20T07:02:38 | 2020-06-20T07:02:38 | null | UTF-8 | Python | false | false | 447 | py | class Solution:
def maximumSwap(self, num: int) -> int:
num = list(str(num))
max_idx = len(num) - 1
xi = yi = 0
for i in range(len(num) - 1, -1, -1):
if num[i] > num[max_idx]:
max_idx = i
elif num[i] < num[max_idx]:
xi = i
yi = max_idx
num[xi], num[yi] = num[yi], num[xi]
return int("".join(num)) | [
"yaxe522@163.com"
] | yaxe522@163.com |
a81d7956a626945b195bcafc4386c7d53c6e29b9 | 2d9a3ce2a04190d0032e8a298829022260b1d76b | /indra/tests/test_minerva.py | 666c8bbb86a2c4b3c0e68143059fb90f8eb1292c | [
"BSD-2-Clause",
"BSD-2-Clause-Views"
] | permissive | sorgerlab/indra | f127a0f9bdd2d3f48df14575883fd31e2f4de4bf | 6d6ca1174792b6c5a05cbf3afcb9f138fabcec6a | refs/heads/master | 2023-08-21T13:25:54.654995 | 2023-06-11T16:46:41 | 2023-06-11T16:46:41 | 22,848,436 | 158 | 61 | BSD-2-Clause | 2023-08-30T21:47:59 | 2014-08-11T17:44:05 | Python | UTF-8 | Python | false | false | 3,299 | py | import os
from indra.sources.minerva.api import *
from indra.sources.minerva.processor import SifProcessor
from indra.sources.minerva.minerva_client import get_model_ids
from indra.statements import Activation, Inhibition
models_to_ids = get_model_ids()
tgfb_id = models_to_ids['TGFbeta signalling']
apopt_id = models_to_ids['Apoptosis pathway']
model_id_to_sif_strs = {
tgfb_id: ['sa44 POSITIVE csa5', 'sa18 NEGATIVE csa3'],
apopt_id: ['sa18 POSITIVE sa15', 'csa2 POSITIVE sa9']
}
def test_process_sif_strs():
sp = process_sif_strs(model_id_to_sif_strs)
assert sp
assert isinstance(sp, SifProcessor)
assert len(sp.statements) == 4
# Correct statement types
assert isinstance(sp.statements[0], Activation)
assert isinstance(sp.statements[1], Inhibition)
assert isinstance(sp.statements[2], Activation)
assert isinstance(sp.statements[3], Activation)
# Using the same code ('sa18'), get different agents depending on model
assert sp.statements[1].subj.name == 'MAPK3'
assert sp.statements[1].subj.get_grounding() == ('HGNC', '6877')
assert sp.statements[2].subj.name == 'CASP9'
assert sp.statements[2].subj.get_grounding() == ('HGNC', '1511')
# If possible, get FamPlex family
# "sa44 POSITIVE csa5" is "ACVR1 POSITIVE SMAD2/3_complex"
assert sp.statements[0].obj.name == 'SMAD2_3'
assert sp.statements[0].obj.get_grounding() == ('FPLX', 'SMAD2_3')
# Otherwise create Agent with BoundConditions
# "csa2 POSITIVE sa9" is "csa2 POSITIVE sa9"
assert sp.statements[3].subj.name == 'FAS'
assert sp.statements[3].subj.bound_conditions
assert sp.statements[3].subj.bound_conditions[0].agent.name == 'FASLG'
# Both main agent and BoundCondition agent have groundings
assert sp.statements[3].subj.get_grounding() == ('HGNC', '11920')
assert sp.statements[3].subj.bound_conditions[0].agent.get_grounding() == (
'HGNC', '11936')
# Statements have evidence
assert sp.statements[0].evidence
assert sp.statements[0].evidence[0].source_api == 'minerva'
assert sp.statements[0].evidence[0].annotations['sif_str'] == \
'sa44 POSITIVE csa5'
assert sp.statements[0].evidence[0].annotations['minerva_model_id'] == \
tgfb_id
def test_process_file():
fname = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'minerva_test1.sif')
sp = process_file(fname, tgfb_id)
assert sp
assert isinstance(sp, SifProcessor)
assert len(sp.statements) == 2
def test_process_files():
fname1 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'minerva_test1.sif')
fname2 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'minerva_test2.sif')
# One file
sp = process_files({tgfb_id: fname1})
assert sp
assert isinstance(sp, SifProcessor)
assert len(sp.statements) == 2
# Multiple files
sp = process_files({tgfb_id: fname1, apopt_id: fname2})
assert sp
assert isinstance(sp, SifProcessor)
assert len(sp.statements) == 4
def test_process_from_web():
sp = process_from_web(filenames=['TGFB_pathway_stable_raw.sif'])
assert sp
assert isinstance(sp, SifProcessor)
assert len(sp.statements) > 20
| [
"dianakolusheva@gmail.com"
] | dianakolusheva@gmail.com |
c288be163fc503676e07dbc33ab1ccc5193348d6 | f28591fab50d9b7a539c66b5a81fc91d1bc2ce64 | /py3/def/uint32_rotateleft.py | 3d8529dece0a6541a402dce9cfeefd84e5370f9e | [] | no_license | tnzw/tnzw.github.io | b8a5fe1f8479736bbf2b3594d511a1282939a3b3 | 6d95968db793cebcfa77cb49eecd987f821350db | refs/heads/master | 2023-04-21T14:22:49.849859 | 2023-03-31T15:55:01 | 2023-03-31T15:55:01 | 176,712,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 532 | py | # uint32_rotateleft.py Version 1.0.0
# Copyright (c) 2020 Tristan Cavelier <t.cavelier@free.fr>
# This program is free software. It comes without any warranty, to
# the extent permitted by applicable law. You can redistribute it
# and/or modify it under the terms of the Do What The Fuck You Want
# To Public License, Version 2, as published by Sam Hocevar. See
# http://www.wtfpl.net/ for more details.
def uint32_rotateleft(uint32, n):
n %= 32
if n < 0: n += 32
return (((uint32 << n) & 0xFFFFFFFF) | (uint32 >> (32 - n)))
| [
"tzw56702@outlook.com"
] | tzw56702@outlook.com |
2cb8567db896b48de24f85b7962d8387b7c42cc1 | d84b0446912ee8e62f5f36be2c966552779cd15d | /5_2_encode_Au_fea.py | b7407ccecbd7c5bc8013a5b7dd63380067cc7af2 | [] | no_license | SCLinDennis/Multi-People-Behavioral-Classification-Framework | aa62c415bdf54e40d4733445eff445862905dec6 | f70e0769ef989d9f13010ada0e33ff7abe64f0e3 | refs/heads/master | 2020-03-24T03:01:36.998472 | 2018-07-27T08:45:14 | 2018-07-27T08:45:14 | 142,401,275 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,998 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Sep 20 17:31:59 2017
@author: dennis60512
"""
import os, glob, sys
import multiprocessing as mp
import pandas as pd
import collections
import joblib as ib
from collections import defaultdict
import numpy as np
from sklearn.ensemble import RandomForestClassifier as RFC
from scipy import stats as stats
import scipy as sp
import pdb
import scipy.stats as stats
#%%
def getFunctional(data):
"""Functional list: ['max','min','mean','median','standard_deviation','1_percentile','99_percentile',
'99minus1_percentile','skewneww','kurtosis','min_pos','max_pos','low_quar',
'up_quar','quartile_range'] """
Functional = []
#0 max
Functional.append(np.max(data, axis = 0))
#1 min
Functional.append(np.min(data, axis = 0))
#2 mean
Functional.append(np.mean(data, axis = 0))
#3 median
Functional.append(np.median(data, axis = 0))
#4 standard deviation
Functional.append(np.std(data, axis = 0) )
#5 1st_percentile
Functional.append(np.percentile(data, 1, axis = 0))
#6 99th percentile
Functional.append(np.percentile(data, 99, axis = 0))
#7 99th percentile - 1st percentile
Functional.append(Functional[-1]-Functional[-2])
#8 skewness
Functional.append(stats.skew(data, axis=0))
#9 kurtosis
Functional.append(stats.kurtosis(data, axis=0))
#10 minmum position
Functional.append((np.argmin(data, axis=0)).astype(float)/len(data))
#11 maximum position
Functional.append((np.argmax(data, axis=0)).astype(float)/len(data))
#12 lower quartile
Functional.append(np.percentile(data, 25, axis = 0))
#13 upper quartile
Functional.append(np.percentile(data, 75, axis = 0))
#14 interqyartile range
Functional.append(Functional[-1]-Functional[-2])
#return np.asanyarray(Functional)
return np.vstack(Functional).reshape(1, -1)
def select_index(df, start, end, frame_rate):
answer = 1
index_tmp = np.where((df.index.values> start*frame_rate)& (df.index.values <end*frame_rate))
if len(index_tmp[0]) ==0:
# print("Sorry. We cannot find feature between " + str(start)+" and "+ str(end))
answer = 0
index = df.index.values[index_tmp[0]]
df_out = df.ix[index]
df_out.drop('confidence', axis=1, inplace=True)
return df_out, answer
def delta_extract(df):
df_delta = df.diff().fillna(0)
fea = [df, df_delta]
df_out = pd.concat(fea, axis = 1)
return df_out
def key_translate(date):
index = ''
if len(date.split('\\')[-1].split('_')) == 3:
index = ''.join(date.split('\\')[-1].split('_')[0:2])
elif len(date.split('\\')[-1].split('_')) == 4:
index = ''.join(date.split('\\')[-1].split('_')[0:2]) + '_' +date.split('\\')[-1].split('_')[2]
return index
def load_audio(feaAudio, AudioPath, index, label, keepname):
for idx,i2 in enumerate(AudioPath):
if i2.split('\\')[-1][-5] != 'N' and i2.split('\\')[-1][-5] != 'H'and i2.split('\\')[-1][-5] != 'X'and i2.split('\\')[-1][-5] != 'F':
if i2.split('\\')[-1][0:-6] == keepname:
da=sp.io.loadmat(i2)['Audio_data'].tolist()
da[0].append(label)
feaAudio[index].append(da[0])
return feaAudio
'''
def Col_feature_extend(feature, start_time, end_time, start_extend, end_extend, frame_rate):
new_df = np.array([])
for key3, value in feature.items():
new_df_tmp, x = select_index(value, start_time+start_extend, end_time+end_extend, frame_rate)
if len(new_df) == 0:
new_df = new_df_tmp.as_matrix()
else:
new_df = np.vstack((new_df, new_df_tmp))
if len(new_df) != 0:
New_feat = getFunctional(new_df)
else:
New_feat = getFunctional(df_tmp_array)
return New_feat
'''
def Act_feature_extend(feature, start_time, end_time, start_extend, end_extend , frame_rate):
accum = 0
for key3, value in feature.items():
new_df_tmp, x = select_index(value, start_time+start_extend, end_time+end_extend, frame_rate)
if x == 1:
new_df_tmp = new_df_tmp.as_matrix()
accum += 1
if accum == 1:
New_feat = getFunctional(new_df_tmp)
else:
New_feat += getFunctional(new_df_tmp)
if accum != 0:
New_feat = New_feat/accum
else:
New_feat = getFunctional(df_tmp_array)
return New_feat
'''
def whospeakmost(label, moreorless): #moreorless == 1(more), 0(less)
tmp = 0
for key, value in label.items():
if tmp == 0:
tmp = len(value)
speakmost = key
else:
if moreorless == 1:
if len(value) > tmp:
tmp = len(value)
speakmost = key
else:
if len(value) < tmp:
tmp = len(value)
speakmost = key
return speakmost
'''
def label_preprocess(Act_label):
for key in Act_label:
for i in range(len(Act_label[key])):
Act_label[key][i] = Act_label[key][i].split(' ')
b_set = set(tuple(x) for x in Act_label[key] )
Act_label[key] = [ list(x) for x in b_set ]
return Act_label
def Act_feature_extend_new(feature, start_time, end_time, start_extend, end_extend, frame_rate, fea_main):
new_df_tmp, x = select_index(feature, start_time+start_extend, end_time+end_extend, frame_rate)
if x ==1:
new_df_tmp = new_df_tmp.as_matrix()
New_feat = getFunctional(new_df_tmp)
fea_out = fea_main - New_feat
else:
fea_out = fea_main
return fea_out
'''
def Act_feature_extend_new2(feature, start_time, end_time, start_extend, end_extend, frame_rate, fea_main):
new_df_tmp, x = select_index(feature, start_time+start_extend, end_time+end_extend, frame_rate)
if x ==1:
new_df_tmp = new_df_tmp.as_matrix()
New_feat = getFunctional(new_df_tmp)
fea_out = New_feat - fea_main
else:
fea_out = -fea_main
return fea_out
'''
#%%
os.chdir('D:\\Lab\\Dennis\\Gamania\\Script')
WORKDIR = '..\\Data\\AU_Feature\\'
LABELDIR = '.\\VideoLabel\\VideoLabel\\VideoLabelNewCut\\'
FEATUREDIR = '.\\VideoFeatureNewCut\\ActionUnit\\'
#FEATUREDIR = '.\\VideoFeature_3class\\ActionUnit\\Interact+delta\\'
ROOT = os.getcwd()
LABELTYPE = ['Act']
frame_rate = 30
#commingtohelp = [ '..\\Data\\AU_Feature\\07_11_1_feature.pkl', '..\\Data\\AU_Feature\\07_11_2_feature.pkl', '..\\Data\\AU_Feature\\07_12_1_feature.pkl', '..\\Data\\AU_Feature\\07_12_2_feature.pkl', '..\\Data\\AU_Feature\\07_12_3_feature.pkl', '..\\Data\\AU_Feature\\07_13_1_feature.pkl', '..\\Data\\AU_Feature\\07_13_2_feature.pkl', '..\\Data\\AU_Feature\\07_14_1_feature.pkl', '..\\Data\\AU_Feature\\07_14_2_feature.pkl', '..\\Data\\AU_Feature\\07_18_feature.pkl', '..\\Data\\AU_Feature\\07_19_1_feature.pkl', '..\\Data\\AU_Feature\\07_19_2_feature.pkl', '..\\Data\\AU_Feature\\07_19_3_feature.pkl']
#commingtohelp2 = [ '..\\Data\\AU_Feature\\07_20_1_feature.pkl', '..\\Data\\AU_Feature\\07_20_3_feature.pkl', '..\\Data\\AU_Feature\\07_21_1_feature.pkl', '..\\Data\\AU_Feature\\07_21_2_feature.pkl', '..\\Data\\AU_Feature\\07_21_3_feature.pkl', '..\\Data\\AU_Feature\\07_24_1_feature.pkl', '..\\Data\\AU_Feature\\07_24_2_feature.pkl', '..\\Data\\AU_Feature\\07_24_3_feature.pkl', '..\\Data\\AU_Feature\\07_25_1_feature.pkl', '..\\Data\\AU_Feature\\07_25_2_feature.pkl', '..\\Data\\AU_Feature\\07_26_feature.pkl', '..\\Data\\AU_Feature\\07_27_2_feature.pkl', '..\\Data\\AU_Feature\\07_27_3_feature.pkl']
#commingtohelp3 = [ '..\\Data\\AU_Feature\\06_20_2_feature.pkl', '..\\Data\\AU_Feature\\06_21_feature.pkl', '..\\Data\\AU_Feature\\06_26_1_feature.pkl', '..\\Data\\AU_Feature\\06_26_2_feature.pkl', '..\\Data\\AU_Feature\\06_27_feature.pkl', '..\\Data\\AU_Feature\\06_28_feature.pkl', '..\\Data\\AU_Feature\\06_30_feature.pkl', '..\\Data\\AU_Feature\\07_03_1_feature.pkl', '..\\Data\\AU_Feature\\07_03_2_feature.pkl', '..\\Data\\AU_Feature\\07_05_feature.pkl', '..\\Data\\AU_Feature\\07_06_1_feature.pkl', '..\\Data\\AU_Feature\\07_06_2_feature.pkl', '..\\Data\\AU_Feature\\07_07_feature.pkl']
commingtohelp4 = ['..\\Data\\AU_Feature\\05_24_feature.pkl', '..\\Data\\AU_Feature\\06_20_1_feature.pkl', '..\\Data\\AU_Feature\\06_20_2_feature.pkl', '..\\Data\\AU_Feature\\06_21_feature.pkl', '..\\Data\\AU_Feature\\06_26_2_feature.pkl', '..\\Data\\AU_Feature\\06_27_feature.pkl', '..\\Data\\AU_Feature\\07_24_3_feature.pkl', '..\\Data\\AU_Feature\\07_25_1_feature.pkl']
commingtohelp5 = ['..\\Data\\AU_Feature\\06_28_feature.pkl']
#%%
for Label in LABELTYPE:
feaAudio = collections.defaultdict(list)
feaVideo = collections.defaultdict(list)
fesLength2 = collections.defaultdict(list)
for date in sorted(glob.glob(WORKDIR+'\\*_feature.pkl')):
if date not in commingtohelp4:
# if 1 != 0:
index = key_translate(date)
Act_label = ib.load(LABELDIR+ index + '_' + Label + '.pkl')
fea_Com = ib.load(date)
print("Loading" + date)
#Label Preprocessing
Act_label = label_preprocess(Act_label)
# spk_less = whospeakmost(Act_label, 0)
# spk_most = whospeakmost(Act_label, 1)
#Audio path
'''
ifiles = glob.glob('D:\\Lab\\Dennis\\Gamania\\Jim\\labeled_wav\\feature_tovideo_new\\*.mat')
'''
#Initialize the feature
df = []
length = 0
#loop the people
label_index = collections.defaultdict(list)
for key2 in fea_Com.keys():
print('Now doing people ' + key2 +'\n')
label_tmp = []
add = []
add.append(length)
length_tmp = 0
#loop the label
for i1, tmp in enumerate(Act_label[key2]):
if tmp[0] == '0':
lab1 = '0'
lab2 = '0'
elif tmp[0] == '1':
lab1 = '1'
lab2 = '1'
elif tmp[0] == '2':
lab1 = '1'
lab2 = '2'
elif tmp[0] == '3':
lab1 = '1'
lab2 = '2'
start = tmp[1]
end = tmp[2]
keepname = tmp[3]
answer = 0
fea_withdelta = delta_extract(fea_Com[key2])
df_tmp, answer = select_index(fea_Com[key2], float(start), float(end), frame_rate)
# df_tmp, answer = select_index(fea_Com[key2], float(start), float(end), frame_rate)
if answer ==1:
label_tmp.append(1)
else:
label_tmp.append(0)
#load Au feature
'''
if answer == 1:
feaAudio = load_audio(feaAudio, ifiles, index, int(lab), keepname)
'''
#Video Feature Preprocessing
df_tmp_array = df_tmp.as_matrix()
if len(df_tmp_array) != 0:
if len(np.where(np.isnan(df_tmp_array))[0]) > 0:
pdb.set_trace()
feaComCut = getFunctional(df_tmp_array)
'''
#Create New Feature
if Label == 'Col':#fea_Com, start, end, -10, 0,
New_feat = Col_feature_extend(fea_Com, float(start), float(end), -10, 0, frame_rate)
'''
#Create New Feature2
if Label == 'Act':#fea_Com, start, end, start_extend, end_extend
New_feat = Act_feature_extend(fea_Com, float(start), float(end), 0, 10, frame_rate)
'''
New_feat = Act_feature_extend_new(fea_Com[spk_less], float(start), float(end), 5, 0, frame_rate, feaComCut)
New_feat2 = Act_feature_extend_new2(fea_Com[spk_most], float(start), float(end), 0, 5, frame_rate, feaComCut)
'''
feaComCut = np.append(feaComCut, New_feat) #!!!!!!!!
if len(np.where(np.isnan(feaComCut))[0]) > 0:
pdb.set_trace()
#Append the label to the encoded feature
print(feaComCut.shape)
feaComCut = np.hstack((feaComCut, np.array([int(lab1), int(lab2)])))
df.append(feaComCut.tolist())
length_tmp += 1
label_index[key2] = label_tmp
#Feature Length statistics
length += length_tmp
add.append(length)
fesLength2[index + key2] = add
#Save the feature to dictionary
feaVideo[index] = df
# ib.dump(feaAudio, FEATUREDIR + Label+'_feaAudio.pkl')
# ib.dump(feaVideo, FEATUREDIR + Label+'_feaVideo.pkl')
# ib.dump(fesLength2, FEATUREDIR + Label+ '_fesLength2.pkl')
#
| [
"noreply@github.com"
] | SCLinDennis.noreply@github.com |
157292defe780bc18ac136f186d305631f58e9d7 | dfbe8e7a27635292a0e465fdc75673f74243bef1 | /aspm/aspmissue/urls.py | 963ec3f3c285b04636ec0655f9cd2177eeb85821 | [] | no_license | imon91/blackbox | cfe3f1d687d97cf3ef843925e88f1a2e7fb7c3a2 | c6bc84bc219268b11119bd837d6cd223630b3c7b | refs/heads/master | 2023-02-12T17:39:47.976482 | 2021-01-18T04:53:19 | 2021-01-18T04:53:19 | 330,557,898 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | from django.contrib import admin
from django.urls import path
from aspmissue import views
app_name = "aspmissue"
urlpatterns = [
path('', views.index, name='index'),
path('model_detail/<int:id>', views.modelDetail, name='modelDetail'),
path('market_issue/', views.marketIssue, name='marketIssue'),
path('xcel_view/<int:id>', views.xcel_view, name='xcel_view')
]
| [
"tasnim.hosen.ewu@gmail.com"
] | tasnim.hosen.ewu@gmail.com |
a6e15d5a25286fc2482158a1d9166878f2d680bb | f13e110a3c2346981e3b2b0ec73b466a927f73fd | /ucsc/chromhmm2category.py | ccf8557c81173af623e68146ccd8e96afbd5cfc6 | [] | no_license | epgg/script | 0ea1249f11fb442ab169e1db2e3f564b5c76252b | 03550cb1d0d458a12f41c3da6922a618bcea641a | refs/heads/master | 2021-06-02T04:08:37.923058 | 2018-01-25T19:33:59 | 2018-01-25T19:33:59 | 32,166,906 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 249 | py | import sys
if len(sys.argv) != 2:
print 'Require input file, output to stdout'
sys.exit()
with open(sys.argv[1]) as fin:
for line in fin:
lst=line.split('\t')
print '{0}\t{1}\t{2}\t{3}'.format(lst[1],lst[2],lst[3],lst[4].split('_')[0])
| [
"lidaof@gmail.com"
] | lidaof@gmail.com |
4c13c1b16129e4ea923b3a8845fa0d873f5515cb | 471c56d189c21733371fb60f3d4a13e69b6c8c0d | /plot_comp_prediction_clstm.py | ffb3a6bdfb0b40079d1f116578e2cd5e96cf6b3f | [] | no_license | inoue0406/svg | 2b3d50e17526d27b37e352a535a8468b23d5773b | 6a12e052ca9d9a54eaae1657e236259b00aabdc9 | refs/heads/master | 2020-08-13T12:25:41.729998 | 2019-11-03T06:31:06 | 2019-11-03T06:31:06 | 214,967,485 | 0 | 0 | null | 2019-10-14T06:43:43 | 2019-10-14T06:43:43 | null | UTF-8 | Python | false | false | 8,294 | py | #
# Plot Predicted Rainfall Data
# for non-probabilistic clstm model
#
import torch
import numpy as np
import torch.utils.data as data
from torch.autograd import Variable
from torch.utils.data import DataLoader
import argparse
import pandas as pd
import h5py
import os
import sys
import random
import itertools
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
import utils
from jma_pytorch_dataset import *
from scaler import *
from colormap_JMA import Colormap_JMA
def inv_scaler(x):
"""
Back to original scale
"""
return (x ** 2.0)*201.0
def plot_rainfall(pic_tg,pic_pred,pic_path,fname):
# input
# pic_tg: numpy array with [time,x,y] dim
# pic_pred: numpy array with [nsmple,time,x,y] dim
print('Plotting: ',fname,np.max(pic_tg),np.max(pic_pred))
# plot
cm = Colormap_JMA()
for nt in range(pic_tg.shape[0]):
fig, ax = plt.subplots(figsize=(20, 8))
fig.suptitle("Precip prediction starting at: "+fname, fontsize=30)
#
id = nt
dtstr = str((id+1)*5)
# target
plt.subplot(1,2,1)
im = plt.imshow(pic_tg[id,:,:],vmin=0,vmax=50,cmap=cm,origin='lower')
plt.title("true:"+dtstr+"min", fontsize=30)
plt.axis('off')
plt.grid()
# predicted
plt.subplot(1,2,2)
im = plt.imshow(pic_pred[id,:,:],vmin=0,vmax=50,cmap=cm,origin='lower')
plt.title("pred:"+dtstr+"min", fontsize=30)
plt.axis('off')
plt.grid()
# color bar
fig.subplots_adjust(right=0.93,top=0.85)
cbar_ax = fig.add_axes([0.94, 0.15, 0.01, 0.7])
fig.colorbar(im, cax=cbar_ax)
# save as png
nt_str = '_dt%02d' % nt
plt.savefig(pic_path+'/'+'comp_pred_'+fname+nt_str+'.png')
plt.close()
def make_gifs(x, idx, name,frame_predictor,encoder,decoder):
all_gen = []
frame_predictor.hidden = frame_predictor.init_hidden()
x_in = x[0]
all_gen.append(x_in)
for i in range(1, opt.n_eval):
# h = encoder(x_in)
# if opt.last_frame_skip or i < opt.n_past:
# h, skip = h
# else:
# h, _ = h
# h = h.detach()
# if i < opt.n_past:
# h_target = encoder(x[i])[0].detach()
# frame_predictor(h)
# x_in = x[i]
# all_gen.append(x_in)
# else:
# h = frame_predictor(h.detach())
# x_in = decoder([h, skip]).detach()
# all_gen.append(x_in)
if i < opt.n_past:
x_in = x[i-1] # use ground truth frame for the first half
h, skip = encoder(x_in)
h = h.detach()
else:
x_in = x_pred # use predicted frame for the second half (NOT use ground truth)
_, skip = encoder(x_in)
h = h_pred
h_pred = frame_predictor(h).detach()
x_pred = decoder([h_pred, skip]).detach()
all_gen.append(x_pred)
# prep np.array to be plotted
TRU = np.zeros([opt.n_eval, opt.batch_size, 1, opt.image_width, opt.image_width])
GEN = np.zeros([opt.n_eval, opt.batch_size, 1, opt.image_width, opt.image_width])
for i in range(opt.n_eval):
TRU[i,:,:,:,:] = inv_scaler(x[i].cpu().numpy())
GEN[i,:,:,:,:] = inv_scaler(all_gen[i].cpu().numpy())
# plot
print(" ground truth max:",np.max(TRU)," gen max:",np.max(GEN))
for j in range(opt.batch_size):
plot_rainfall(TRU[:,j,0,:,:],GEN[:,j,0,:,:],opt.log_dir,name+"_sample"+str(j))
# plot comparison of predicted vs ground truth
def plot_comp_prediction(opt,df_sampled,mode='png_ind'):
print("Random Seed: ", opt.seed)
random.seed(opt.seed)
torch.manual_seed(opt.seed)
torch.cuda.manual_seed_all(opt.seed)
dtype = torch.cuda.FloatTensor
# ---------------- load the models ----------------
tmp = torch.load(opt.model_path)
frame_predictor = tmp['frame_predictor']
frame_predictor.eval()
encoder = tmp['encoder']
decoder = tmp['decoder']
encoder.train()
decoder.train()
frame_predictor.batch_size = opt.batch_size
opt.g_dim = tmp['opt'].g_dim
opt.num_digits = tmp['opt'].num_digits
# --------- transfer to gpu ------------------------------------
frame_predictor.cuda()
encoder.cuda()
decoder.cuda()
# ---------------- set the options ----------------
opt.dataset = tmp['opt'].dataset
opt.last_frame_skip = tmp['opt'].last_frame_skip
opt.channels = tmp['opt'].channels
opt.image_width = tmp['opt'].image_width
print(opt)
# --------- load a dataset ------------------------------------
# loading datasets
train_dataset = JMARadarDataset(root_dir=opt.data_root,
csv_file=opt.train_path,
tdim_use=opt.n_past,
transform=None)
valid_dataset = JMARadarDataset(root_dir=opt.data_root,
csv_file=opt.valid_path,
tdim_use=opt.n_past,
transform=None)
train_loader = DataLoader(dataset=train_dataset,
num_workers=opt.data_threads,
batch_size=opt.batch_size,
shuffle=True,
drop_last=True,
pin_memory=True)
test_loader = DataLoader(dataset=valid_dataset,
num_workers=opt.data_threads,
batch_size=opt.batch_size,
shuffle=False,
drop_last=True,
pin_memory=True)
def get_training_batch():
while True:
for sequence in train_loader:
batch = utils.normalize_data(opt, dtype, sequence)
yield batch
training_batch_generator = get_training_batch()
def get_testing_batch():
while True:
for sequence in test_loader:
batch = utils.normalize_data(opt, dtype, sequence)
yield batch
testing_batch_generator = get_testing_batch()
for i in range(0, opt.N, opt.batch_size):
print(i)
# plot train
train_x = next(training_batch_generator)
make_gifs(train_x, i, 'train',frame_predictor,encoder,decoder)
# plot test
test_x = next(testing_batch_generator)
make_gifs(test_x, i, 'test',frame_predictor,encoder,decoder)
break
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', default=100, type=int, help='batch size')
parser.add_argument('--data_root', default='data', help='root directory for data')
parser.add_argument('--train_path', default='', help='csv file containing filenames for training')
parser.add_argument('--valid_path', default='', help='csv file containing filenames for validation')
parser.add_argument('--model_path', default='', help='path to model')
parser.add_argument('--log_dir', default='', help='directory to save generations to')
parser.add_argument('--seed', default=1, type=int, help='manual seed')
parser.add_argument('--n_past', type=int, default=2, help='number of frames to condition on')
parser.add_argument('--n_future', type=int, default=28, help='number of frames to predict')
parser.add_argument('--num_threads', type=int, default=0, help='number of data loading threads')
parser.add_argument('--N', type=int, default=256, help='number of samples')
parser.add_argument('--data_threads', type=int, default=5, help='number of data loading threads')
opt = parser.parse_args()
os.makedirs('%s' % opt.log_dir, exist_ok=True)
opt.n_eval = opt.n_past+opt.n_future
opt.max_step = opt.n_eval
# samples to be plotted
sample_path = '../datasets/jma/sampled_forplot_3day_JMARadar.csv'
# read sampled data in csv
df_sampled = pd.read_csv(sample_path)
print('samples to be plotted')
print(df_sampled)
plot_comp_prediction(opt,df_sampled,mode='png_ind')
| [
"inoue0406@gmail.com"
] | inoue0406@gmail.com |
5cbbcad90b7a18247ef4129e11896b12752543ab | ec827bd5df431c9400946e8d0593448814b5534b | /venv/bin/ipython | 498f13bc79c779676e375d1d51d86e95af3fa922 | [] | no_license | grantnicholas/pytone | 7acd70878de8090d06d7a2911a67b3dbb3b64256 | b89c688cc88588a3758fff288bc9b1364534b42e | refs/heads/master | 2021-01-23T06:19:47.203418 | 2014-09-21T21:52:27 | 2014-09-21T21:52:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | #!/home/grant/Desktop/pytone/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from IPython import start_ipython
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(start_ipython())
| [
"grantnicholas2015@u.northwestern.edu"
] | grantnicholas2015@u.northwestern.edu | |
fa6dde240db424b8080dacc5451263ed9cfa16bf | 609a0e76f0a78230e7da0f4a83699fbe5b51cdb1 | /tools/_conf/app_agent.py | 1cab971b68d56bae21b74c1675a6d1021bc354e8 | [] | no_license | aocn/klspider | a02e600e143c809f547c9e1a85048c31d8777d68 | f360716e93d730542aa0d4a8d9b8f06bcb3dd8ad | refs/heads/master | 2020-05-21T08:00:34.300381 | 2019-05-14T12:22:36 | 2019-05-14T12:22:36 | 185,970,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,951 | py | # coding:utf-8
# url-agent
user_agent = [
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
"Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11",
"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)",
"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10",
"Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13",
"Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+",
"Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0",
"Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)",
"UCWEB7.0.2.37/28/999",
"NOKIA5700/ UCWEB7.0.2.37/28/999",
"Openwave/ UCWEB7.0.2.37/28/999",
"Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999",
"Mozilla/6.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/8.0 Mobile/10A5376e Safari/8536.25",
]
# 代理 ip-address
proxies = [
'61.135.217.7:80',
'180.173.199.79:47404',
'118.190.95.35:9001',
'118.190.210.227:3128',
'118.190.199.55:80',
'115.194.160.162:8118'
]
| [
"willion@sohu.com"
] | willion@sohu.com |
209340b5ea6beaf54ab6e63bbae04b5fee1383f3 | 926cb4f1027e02daf9a99efe5f30adea7ddabb89 | /bigfootgenes/snpedia_fetcher.py | 7db587c25d1372a298a3e1adae6a075399ffe942 | [] | no_license | xiaojay/bigfootgenes | 3e2efff0ac09dc211323f9c3b8273312165eda73 | 32160bd78b50984f72e9fa3b7cb36cecf773395d | refs/heads/master | 2021-01-20T17:15:00.719306 | 2015-07-20T01:47:55 | 2015-07-20T01:47:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 762 | py | from snpedia import Snpedia
import json
class SnpediaFetcher:
def __init__(self):
"""docstring for __init__"""
self.snpedia = Snpedia()
def write_all_snps(self, filepath):
"""Get all the snp names"""
with open(filepath, 'w') as file:
for snp in self.snpedia.get_snps():
file.write("{}\n".format(snp))
def write_snp_wikitext_to_file(self, snps, filepath):
"""get the wikitext for the snps array"""
with open(filepath, 'w') as file:
for snp in snps:
trimmed_snp = snp.strip()
if trimmed_snp:
result = self.snpedia.get_wikitext(trimmed_snp)
file.write("{}\n".format(json.dumps(result)))
| [
"tchheng@yahoo-inc.com"
] | tchheng@yahoo-inc.com |
4be4f8e4711f8dd3aea7a7b7da5b4f617ce93612 | 7a6336e4cc67d24144d84a4ac254a0199123ebc5 | /tools/show_tfrecord.py | e7758bcdce8a39e813d85d1496a2ed4b4b81f9cf | [] | no_license | ThomasTracy/SSD_tensorflow | e63860801cfee106db24fc22c303f9732d97ff4a | 798110c086e1a8d2cfcd2778796dc4c662312b45 | refs/heads/master | 2020-08-02T23:50:48.703107 | 2019-11-03T10:23:42 | 2019-11-03T10:23:42 | 211,550,833 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,599 | py | from datasets import voc07
from datasets import dataset_factory
from train import FLAGS
from matplotlib import pyplot as plt
from preprocessing import ssd_vgg_preprocessing
import tensorflow.contrib.slim as slim
import tensorflow as tf
import numpy
def read_tfrecord(input_file):
keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
'image/format': tf.FixedLenFeature((), tf.string, default_value='jpeg'),
'image/height': tf.FixedLenFeature([1], tf.int64),
'image/width': tf.FixedLenFeature([1], tf.int64),
'image/channels': tf.FixedLenFeature([1], tf.int64),
'image/shape': tf.FixedLenFeature([3], tf.int64),
'image/object/bbox/xmin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/xmax': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymax': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/label': tf.VarLenFeature(dtype=tf.int64),
'image/object/bbox/difficult': tf.VarLenFeature(dtype=tf.int64),
'image/object/bbox/truncated': tf.VarLenFeature(dtype=tf.int64),
}
items_to_handlers = {
'image': slim.tfexample_decoder.Image('image/encoded', 'image/format'),
'shape': slim.tfexample_decoder.Tensor('image/shape'),
'object/bbox': slim.tfexample_decoder.BoundingBox(
['ymin', 'xmin', 'ymax', 'xmax'], 'image/object/bbox/'),
'object/label': slim.tfexample_decoder.Tensor('image/object/bbox/label'),
'object/difficult': slim.tfexample_decoder.Tensor('image/object/bbox/difficult'),
'object/truncated': slim.tfexample_decoder.Tensor('image/object/bbox/truncated'),
}
decoder = slim.tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers)
items_to_discriptions = {
'image': 'A color image of varying height and width.',
'shape': 'Shape of the image',
'object/bbox': 'A list of bounding boxes, one per each object.',
'object/label': 'A list of labels, one per each object.',
}
def get_from_tfrecord():
return dataset_factory.get_dataset('pascalvoc_2007', 'train', 'D:\Data\VOC\\train')
def show_one_image(image, bboxes, labels):
if not isinstance(image, list):
raise ValueError('Please wrappe inputs in list first')
num_subplot = len(image)
plt.figure('Image')
for i in range(num_subplot):
plt.subplot(1, num_subplot, i+1)
plt.imshow(image[i])
shape = [image[i].shape[0], image[i].shape[1]]
bbox = bboxes[i]
label = labels[i]
for ([y_min, x_min, y_max, x_max], l) in zip(bbox, label):
x_draw = x_min * shape[1]
y_draw = y_min * shape[0]
w_draw = (x_max - x_min) * shape[1]
h_draw = (y_max - y_min) * shape[0]
plt.gca().add_patch(plt.Rectangle((x_draw, y_draw), w_draw, h_draw,
edgecolor='r', linewidth=1, facecolor='None'))
plt.text(x_draw + w_draw/2, y_draw, l)
plt.show()
def run():
dataset = get_from_tfrecord()
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset,
num_readers=FLAGS.num_readers,
common_queue_capacity=20 * FLAGS.batch_size,
common_queue_min=10 * FLAGS.batch_size,
shuffle=True
)
preprocess_fun = ssd_vgg_preprocessing.preprocess_image
[image_org, shape_org, bbox_org, label_org] = provider.get(['image', 'shape', 'object/bbox', 'object/label'])
# image, labels, bboxes = preprocess_fun(image_org, label_org,
# bbox_org, out_shape=(300, 300),
# data_format='NCHW',
# is_training=True)
with tf.Session() as sess:
init_op = tf.global_variables_initializer()
sess.run(init_op)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
'''此处image,bbox等一定要在同一个线程中同时取出。若分别通过不同的sess.run取出,
会多次激活provider.get,而每次激活时shuffle又为True,将导致image,bbox,shape等
不匹配,混乱。
'''
image_org, shape_org, bbox_org, label_org = sess.run([image_org, shape_org, bbox_org, label_org])
# image_show, shape_org, bbox_show, label_show = sess.run([image, shape_org, bboxes, labels])
image_org_tensor = tf.convert_to_tensor(image_org, image_org.dtype)
label_org_tensor = tf.convert_to_tensor(label_org, label_org.dtype)
bbox_org_tensor = tf.convert_to_tensor(bbox_org, bbox_org.dtype)
image_show, label_show, bbox_show = preprocess_fun(image_org_tensor, label_org_tensor,
bbox_org_tensor, out_shape=(512, 512),
data_format='NCHW',
is_training=True)
image_show, label_show, bbox_show = sess.run([image_show, label_show, bbox_show])
image_show = numpy.transpose(image_show, (1, 2, 0))
print(image_show.shape)
print(label_show, label_org)
print(bbox_show, bbox_org)
show_one_image([image_org, image_show],
[bbox_org, bbox_show],
[label_org, label_show])
coord.request_stop()
coord.join(threads)
if __name__ == '__main__':
run() | [
"songyd0616@163.com"
] | songyd0616@163.com |
744ee2bb56ae535922e26f94a34b216ec18a274c | 078dced2a6813ecd953d1654be4ad98e0d809546 | /交叉验证.py | 3c86791e37fbef9b42294bc1e403da4520179270 | [] | no_license | KeyNG-Y/Pytorch_sth | aeab7703f6c4d7be9b0a2a3a0d5c2252c069351d | d055ebcc04b97b831440503f01fe85c6412edbb5 | refs/heads/main | 2023-08-16T06:46:39.975694 | 2021-09-30T10:59:12 | 2021-09-30T10:59:12 | 410,261,964 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,217 | py | ##Train Set + Val Set +Test Set
import torch
from torchvision import transforms, datasets
batch_size = 200
train_db = datasets.MNIST("../data", train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
train_loader = torch.utils.data.DataLoader(train_db, batch_size=batch_size, shuffle=True)
test_db = datasets.MNIST("..\data", train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
test_loader = torch.utils.data.DataLoader(test_db, batch_size=batch_size, shuffle=True)
print("train:", len(train_db), "test:", len(test_db))
train_db, val_db = torch.utils.data.random_split(train_db, [50000, 10000])#分割
print("db_1:", len(train_db), "db_2:", len(val_db))
train_loader = torch.utils.data.DataLoader(train_db, batch_size=batch_size, shuffle=True)
val_loader = torch.utils.data.DataLoader(val_db,batch_size=batch_size, shuffle=True)
print("train_loader:", len(train_loader), "val_loader:", len(val_loader),"test_loader",len(test_loader))
| [
"noreply@github.com"
] | KeyNG-Y.noreply@github.com |
089d8e1178a00b5071edd255d1ee7e32fd8ef409 | 6a79b7f79503e25aab15a4ea1cf3bde8cbee5d16 | /FP/Laboratory Assignments/Zoo/domain.py | 1f8807a9d7b999d7bee2117302611933d32c5b47 | [] | no_license | birsandiana99/UBB-Work | 5b2bbc942cb34ae2dc7c3f1c3712ef53b55a28a4 | 20a7a0bdf4fb9c25370114dee61e0d85f7fcef2b | refs/heads/master | 2023-07-11T11:19:37.430561 | 2021-08-22T13:33:12 | 2021-08-22T13:33:12 | 398,805,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | def create(code, name, type, species):
return {"code":code,"name":name,"type":type,"species":species}
def getType(animal):
return animal["type"]
def getName(animal):
return animal["name"]
def getSpecies(animal):
return animal["species"]
def getCode(animal):
return animal["code"]
def setType(animal,new_type):
animal["type"]=new_type
| [
"56911032+birsandiana99@users.noreply.github.com"
] | 56911032+birsandiana99@users.noreply.github.com |
16dad0a21b26c5ed2a8d46473bd468c52ae395a0 | 4ca47225b7b3f468eab9dc391aebc00485869a65 | /manage.py | d2a4ed6f2d2903ff622d9f08387979013f43a709 | [] | no_license | HippyFizz/conduster | cec1f972610c827ed692c4e48b06b9215ffbfac3 | 9d90f838da6ff44402c550934d1c323a26f29036 | refs/heads/master | 2020-03-08T20:29:58.126183 | 2018-04-06T11:02:00 | 2018-04-06T11:02:00 | 128,383,501 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "condust.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"buptyozzz@gmail.com"
] | buptyozzz@gmail.com |
56b5cf1eaba651687a7c590fa1649daae00ec525 | 1b0755fafd5993c8fe5c847d0f3b250f0705cc87 | /perf/__init__.py | ccef7a523ee945da1eb514e9d7dade75768eb8dd | [
"MIT"
] | permissive | pombredanne/perf | 65b722b2822daf598798da40917abdc608708ec3 | da5f2259815c39569957f584a7e1e57cfdbbb927 | refs/heads/master | 2021-04-29T11:31:23.533547 | 2016-12-16T14:50:02 | 2016-12-16T14:50:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 913 | py | from __future__ import division, print_function, absolute_import
__version__ = '0.9.2'
# Clocks
try:
# Python 3.3+ (PEP 418)
from time import monotonic as monotonic_clock, perf_counter
except ImportError:
import sys
import time
monotonic_clock = time.time
if sys.platform == "win32":
perf_counter = time.clock
else:
perf_counter = time.time
del sys, time
__all__ = ['monotonic_clock', 'perf_counter']
from perf._utils import is_significant, python_implementation, python_has_jit # noqa
__all__.extend(('is_significant', 'python_implementation', 'python_has_jit'))
from perf._metadata import format_metadata # noqa
__all__.append('format_metadata')
from perf._bench import Run, Benchmark, BenchmarkSuite, add_runs # noqa
__all__.extend(('Run', 'Benchmark', 'BenchmarkSuite', 'add_runs'))
from perf._runner import Runner # noqa
__all__.append('Runner')
| [
"vstinner@redhat.com"
] | vstinner@redhat.com |
18038f0af6c237d5b9db5678328e4d466f172dc2 | 57fec0f5928beaaeb2dc66004267204e77bf05a7 | /scripts/05-gmaps-test.py | ca95867cc5fec1d0fc87836f9afd89caf7c679cc | [] | no_license | fgolemo/neo-m8p-python | a26d382cd0a8d90bd8eca4a6a2c13a51bc1a08b9 | f9af936cdc804b24a76b697df749b0aca0325bed | refs/heads/master | 2020-06-21T09:55:13.280892 | 2019-07-25T17:36:07 | 2019-07-25T17:36:07 | 197,414,904 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 138 | py | import cv2
from neom8p.gmaps import get_gmap
map = get_gmap(45.530807,-73.613293, 19)
cv2.imshow("map", map)
cv2.waitKey(1)
print ("yo") | [
"fgolemo@gmail.com"
] | fgolemo@gmail.com |
5136795babade67f3065818c554052d815a7e273 | 48a517c985012550cf8eedcecf7a4730f4940ba4 | /config.ini | a109ae28b5754da0473191d775d22addc9f0bb38 | [
"MIT"
] | permissive | sharmaking/MarketStragetyClient | 29ac09a6509f40317dc3ae707f3f5bf37b6ba04d | fe5fe151149ba061872c752bb426d997176be046 | refs/heads/master | 2021-01-18T16:28:13.924217 | 2014-03-14T06:09:19 | 2014-03-14T06:09:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 642 | ini | #!/usr/bin/python
# -*- coding: utf-8 -*-
#config.ini
#服务器IP地址
#HOST = '192.168.1.186' #内网ip
HOST = '180.166.168.126' #公网ip
#服务器端口号
PORT = 18202
#请求类型
REQUEST_TYPE = 2 #0:请求当天数据,1:请求某一天数据,2:请求某一段时间数据
#当天请求类型
REQUEST_FLAG = 0 #0:实时请求,1:从开盘时间重新接收,2:按指定时间开始接收(仅在REQUEST_TYPE=0时有意义)
#开始时间
START_TIME = "2013-1-1 9:30:0" #请求数据开始时间
#结束时间
END_TIME = "2014-4-27 9:30:0" #请求数据结束时间
#是否定义全部股票
SUB_ALL_STOCK = False
| [
"abramsayka@gmail.com"
] | abramsayka@gmail.com |
b6c185d0a2e97c9a334607abb9e94f890f3678f6 | 9fb9569f95ae10da9762fe246c6ffbedab177e12 | /python3/GetPic.py | 258acfecc1f13238300a761a57523c502bf986bb | [] | no_license | king404kid/pythonSpace | ac8508c842851d3523ba1bf39a21f07dc40e0a9e | caca863b2a3c18c2840255d6a9f95850b548942c | refs/heads/master | 2021-01-01T03:56:12.078462 | 2016-04-26T11:50:53 | 2016-04-26T11:50:53 | 56,913,770 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 996 | py | #coding=utf-8
import re
import urllib.request
import os
def getHtml(url):
page = urllib.request.urlopen(url)
html = page.read()
codec = page.info().get_param('charset') # py2的写法是getparam
# print(codec)
html = html.decode(codec)
return html
def getImg(html):
# reg = r'src="(.+?\.jpg)" pic_ext' # r貌似没有区别
# reg = 'src="(.+?\.jpg)" pic_ext'
reg = r'src="(.+?\.jpg)"(?:\s>| size)'
imgre = re.compile(reg)
imglist = imgre.findall(html)
return imglist
def saveImg(html):
imglist = getImg(html)
x = 0
for imgurl in imglist:
print(imgurl)
temp = urllib.request.urlretrieve(imgurl, getSaveUrl(x))
# print(temp)
x += 1
def getSaveUrl(x):
isExists = os.path.exists('pic')
if not isExists:
os.makedirs('pic')
return 'pic/python%s.jpg' % x
# html = getHtml("http://tieba.baidu.com/p/2460150866")
html = getHtml("http://tieba.baidu.com/p/4490600900")
saveImg(html) | [
"king404kid@qq.com"
] | king404kid@qq.com |
ad2238efd01ffaabc7001f5a5e801fbe84d4faad | 308877da7981f1db43d64d8e1cbb411f94328ccc | /C4-DB/wk2Ex2-db.py | 054d569493e8b9eaacdc31e93c340b7512a50589 | [] | no_license | reachravirajbhat/LearnPython | 5e92f78d0366d449813e0770888bb26482aa6158 | f40a76c4015d25fdd8dae65f1bdea4a52b71213d | refs/heads/master | 2020-03-19T04:38:00.960727 | 2018-06-02T20:32:33 | 2018-06-02T20:32:33 | 135,848,333 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,621 | py | import sqlite3
conn = sqlite3.connect('orgemaildb.sqlite')
cur = conn.cursor()
# If a table names <Counts> already exists, clean up its content and delete it
cur.execute('DROP TABLE IF EXISTS Counts')
# Creat a new table named <Counts> with two fields <org> and <count>
cur.execute('CREATE TABLE Counts (org TEXT, count INTEGER)')
# Get text file name to source data to populate <Counts> table
fname = input('Enter source file name: ')
if (len(fname) < 1): fname = 'mbox.txt'
fh = open(fname)
# Traverse every line in the file to get email id
for line in fh:
if not line.startswith('From: '): continue
# Split the line into words separated by space to get the email id.
pieces = line.split()
email = pieces[1]
# Split email id into two pieces to get username and org name separated
# by '@'
pieces = email.split('@')
org = pieces[1]
# Search <Counts> table to see if there is already and entry with
# org name.
cur.execute('SELECT count FROM Counts WHERE org = ? ', (org,))
row = cur.fetchone()
if row is None:
# No entry with this org name exists. Add a new one
cur.execute('''INSERT INTO Counts (org, count)
VALUES (?, 1)''', (org,))
else:
# Entry exists; Increment the count for the entry by one
cur.execute('UPDATE Counts SET count = count + 1 WHERE org = ?',
(org,))
conn.commit()
# https://www.sqlite.org/lang_select.html
sqlstr = 'SELECT org, count FROM Counts ORDER BY count DESC LIMIT 10'
for row in cur.execute(sqlstr):
print(str(row[0]), row[1])
cur.close()
| [
"noreply@github.com"
] | reachravirajbhat.noreply@github.com |
f246eeb596a197247031628bd0b9723509ae18fe | 77ed9ed2ddc86c1e5c39ee398693677127b0be3a | /biebfeed/forms.py | 5b39467c8df2b9cb84d49f601e376e860cdcf78f | [] | no_license | chrissiegman/showcase-project | eec97bb3b8b17b074369bbce84bd56f28eb24cb3 | b679e05c26cf7e6fa266d96383b9a5a6e28697c1 | refs/heads/master | 2016-08-11T17:02:19.024159 | 2015-11-30T04:07:56 | 2015-11-30T04:07:56 | 44,128,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | from django import forms
from biebfeed.models import TwitterTarget
class FriendForm(forms.ModelForm):
class Meta:
model = TwitterTarget
fields = ('user', 'target_username',) | [
"chrissiegman@yahoo.com"
] | chrissiegman@yahoo.com |
455fd32375b0d1efe8ff77d4e88ee05890596812 | 33ce4d07c02e53028050154996cc74fcd71c59d0 | /Sklearn Examples.py | b3a8ea98425d548aee79220c1ea6e51eaac117e7 | [] | no_license | mstampfer/Equities | 97ccf0a66dd4bbd7c84482f85608eec02aed48e7 | 2c8e23d4f77c51261fe97ce53cf13d043d9ef8e5 | refs/heads/master | 2021-01-10T06:14:24.975395 | 2015-12-14T12:22:33 | 2015-12-14T12:22:33 | 44,776,698 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,223 | py | print(__doc__)
import numpy as np
from time import time
from operator import itemgetter
from scipy.stats import randint as sp_randint
from sklearn.grid_search import GridSearchCV, RandomizedSearchCV
from sklearn.datasets import load_digits
from sklearn.ensemble import RandomForestClassifier
# get some data
digits = load_digits()
X, y = digits.data, digits.target
# build a classifier
clf = RandomForestClassifier(n_estimators=20)
# Utility function to report best scores
def report(grid_scores, n_top=3):
top_scores = sorted(grid_scores, key=itemgetter(1), reverse=True)[:n_top]
for i, score in enumerate(top_scores):
print("Model with rank: {0}".format(i + 1))
print("Mean validation score: {0:.3f} (std: {1:.3f})".format(
score.mean_validation_score,
np.std(score.cv_validation_scores)))
print("Parameters: {0}".format(score.parameters))
print("")
# specify parameters and distributions to sample from
param_dist = {"max_depth": [3, None],
"max_features": sp_randint(1, 11),
"min_samples_split": sp_randint(1, 11),
"min_samples_leaf": sp_randint(1, 11),
"bootstrap": [True, False],
"criterion": ["gini", "entropy"]}
# run randomized search
n_iter_search = 20
random_search = RandomizedSearchCV(clf, param_distributions=param_dist,
n_iter=n_iter_search)
start = time()
random_search.fit(X, y)
print("RandomizedSearchCV took %.2f seconds for %d candidates"
" parameter settings." % ((time() - start), n_iter_search))
report(random_search.grid_scores_)
# use a full grid over all parameters
param_grid = {"max_depth": [3, None],
"max_features": [1, 3, 10],
"min_samples_split": [1, 3, 10],
"min_samples_leaf": [1, 3, 10],
"bootstrap": [True, False],
"criterion": ["gini", "entropy"]}
# run grid search
grid_search = GridSearchCV(clf, param_grid=param_grid)
start = time()
grid_search.fit(X, y)
print("GridSearchCV took %.2f seconds for %d candidate parameter settings."
% (time() - start, len(grid_search.grid_scores_)))
report(grid_search.grid_scores_) | [
"git@axonconsulting.com"
] | git@axonconsulting.com |
6c7319e3ca63f14691405f670db9d264022ea892 | 755b78b64fd005d5c9b9fb01e6873f291c29d9a6 | /admin/admin/general/migrations/0007_card_expire_at.py | ef2d89801c40904143a9a16b4fb074803be3a11d | [] | no_license | Unanimad/fideli.club | 87b72e53745fa8579636da467f19ba97e6c67638 | 7530568d0de428574d930b755dd96c7e78e66556 | refs/heads/master | 2020-05-27T01:02:11.341241 | 2017-02-18T00:40:59 | 2017-02-18T00:40:59 | 188,431,717 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2017-02-06 23:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('general', '0006_auto_20170205_2058'),
]
operations = [
migrations.AddField(
model_name='card',
name='expire_at',
field=models.DateTimeField(blank=True, null=True, verbose_name='Válido até'),
),
]
| [
"raphaelf.ti@gmail.com"
] | raphaelf.ti@gmail.com |
8f52fb011cd6b10bf88162b48c40c0db42f2d237 | 1a2e9f08fcec8a6bf11a68d5a128fb234eee6905 | /learning_log/ll_env/bin/easy_install | 0657d1dd16dca763e6bb7cd06c0487c60d7d681f | [] | no_license | ciancsmith/learning_log | a76d8ff8b9a5d01dac2b3b60937a65b401219bec | 8e7e5c36e7793ff532c5bf98aa95606b63fd3e15 | refs/heads/master | 2022-04-26T14:13:47.890159 | 2020-04-27T15:50:11 | 2020-04-27T15:50:11 | 259,282,563 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 263 | #!/home/pi/Projects/learning_log/ll_env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"ciancs@laptop.home"
] | ciancs@laptop.home | |
2a36a0fcda34a4ebf842a1cd4201ec5da5abc376 | 4c2c1bab16cb05316d093576b24ae848482a3211 | /etc/changelog.py | 4d144ad781084ac6290c564f8f577ba9bdfbc1c8 | [
"BSD-3-Clause-Clear"
] | permissive | fmatter/lingtreemaps | 56c3e3c528e579a9e15ac690ceb8559f4d18edee | 49f58bfd29d28aa0a8ecef90b081726969384dc4 | refs/heads/main | 2023-07-06T05:37:54.734190 | 2023-06-22T03:50:39 | 2023-06-22T03:50:39 | 530,096,540 | 6 | 2 | BSD-3-Clause | 2022-10-01T17:18:15 | 2022-08-29T06:39:23 | Python | UTF-8 | Python | false | false | 222 | py | import keepachangelog
import configparser
config = configparser.ConfigParser()
config.read(".bumpversion.cfg")
changes = keepachangelog.release(
"CHANGELOG.md", new_version=config["bumpversion"]["current_version"]
)
| [
"florianmatter@gmail.com"
] | florianmatter@gmail.com |
0a0c2c1a4782726b7b221e64225fcd0cd0c9dde0 | 79f78f91827953851090b55137fd04bed4596860 | /cifar/models/wrn.py | 9f3ec91e339ce4822c60726a4ee5ac31b8db6fd0 | [
"BSD-3-Clause"
] | permissive | matej-ulicny/harmonic-networks | 850c20b45844d77977ade676e98b73fc4e7da7f1 | de2bbc636b0f1b928e3e043d4bd3090d100ff627 | refs/heads/master | 2022-05-04T15:18:43.334953 | 2022-04-22T12:16:05 | 2022-04-22T12:16:05 | 184,291,128 | 52 | 9 | null | null | null | null | UTF-8 | Python | false | false | 2,630 | py | """
Network definition of Wide Residual Network.
The code is based on pytorch implementation of WRN:
https://github.com/szagoruyko/wide-residual-networks/tree/master/pytorch
2019 Matej Ulicny
"""
import torch
import torch.nn.functional as F
import utils
def resnet(depth, width, num_classes, dropout):
assert (depth - 4) % 6 == 0, 'depth should be 6n+4'
n = (depth - 4) // 6
widths = [int(v * width) for v in (16, 32, 64)]
def gen_block_params(ni, no):
return {
'conv0': utils.conv_params(ni, no, 3),
'conv1': utils.conv_params(no, no, 3),
'bn0': utils.bnparams(ni),
'bn1': utils.bnparams(no),
'convdim': utils.conv_params(ni, no, 1) if ni != no else None,
}
def gen_group_params(ni, no, count):
return {'block%d' % i: gen_block_params(ni if i == 0 else no, no)
for i in range(count)}
flat_params = utils.cast(utils.flatten({
'conv0': utils.conv_params(3, 16, 3),
'group0': gen_group_params(16, widths[0], n),
'group1': gen_group_params(widths[0], widths[1], n),
'group2': gen_group_params(widths[1], widths[2], n),
'bn': utils.bnparams(widths[2]),
'fc': utils.linear_params(widths[2], num_classes),
}))
utils.set_requires_grad_except_bn_(flat_params)
def block(x, params, base, mode, stride):
o1 = F.relu(utils.batch_norm(x, params, base + '.bn0', mode), inplace=True)
y = F.conv2d(o1, params[base + '.conv0'], stride=stride, padding=1)
o2 = F.relu(utils.batch_norm(y, params, base + '.bn1', mode), inplace=True)
if dropout > 0:
o2 = F.dropout(o2, p=dropout, training=mode, inplace=False)
z = F.conv2d(o2, params[base + '.conv1'], stride=1, padding=1)
if base + '.convdim' in params:
return z + F.conv2d(o1, params[base + '.convdim'], stride=stride)
else:
return z + x
def group(o, params, base, mode, stride):
for i in range(n):
o = block(o, params, '%s.block%d' % (base,i), mode, stride if i == 0 else 1)
return o
def f(input, params, mode):
x = F.conv2d(input, params['conv0'], padding=1)
g0 = group(x, params, 'group0', mode, 1)
g1 = group(g0, params, 'group1', mode, 2)
g2 = group(g1, params, 'group2', mode, 2)
o = F.relu(utils.batch_norm(g2, params, 'bn', mode))
o = F.avg_pool2d(o, 8, 1, 0)
o = o.view(o.size(0), -1)
o = F.linear(o, params['fc.weight'], params['fc.bias'])
return o
return f, flat_params
| [
"mtj.ulicny@gmail.com"
] | mtj.ulicny@gmail.com |
41ebec25755d59ff6b7c39a02ee7b633ecb9eb93 | 24223ef61937be40f0ea23db279a93b75a0b7a0f | /pygogo/utils.py | e1c94efb0cea6c5c6bfdab9424b8a04a82d3f199 | [
"MIT"
] | permissive | liutaihua/pygogo | cfd13a036bcbdf7767fa05e31ab2161be9c6a99b | 7b7a99fdf28cef3185cf7f3f8f0cad8b8d5691b2 | refs/heads/master | 2021-01-18T01:48:15.294501 | 2016-01-01T10:58:27 | 2016-01-01T10:58:27 | 48,997,690 | 1 | 0 | null | 2016-01-04T13:08:29 | 2016-01-04T13:08:29 | null | UTF-8 | Python | false | false | 8,266 | py | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
pygogo.utils
~~~~~~~~~~~~
Misc classes and functions that don't warrant their own module
Examples:
basic usage::
>>> CustomEncoder().encode(range(5))
'[0, 1, 2, 3, 4]'
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import logging
import sys
from json import JSONEncoder
from builtins import *
module_hdlr = logging.StreamHandler(sys.stdout)
module_logger = logging.getLogger(__name__)
module_logger.addHandler(module_hdlr)
class CustomEncoder(JSONEncoder):
"""A unicode aware JSON encoder that can handle iterators, dates, and times
Examples:
>>> CustomEncoder().encode(range(5))
'[0, 1, 2, 3, 4]'
>>> from json import dumps
>>> dumps(range(5), cls=CustomEncoder)
'[0, 1, 2, 3, 4]'
"""
def default(self, obj):
""" Encodes a given object
Args:
obj (scalar): The object to encode.
Returns:
The encoded object
Examples:
>>> CustomEncoder().default(range(5))
[0, 1, 2, 3, 4]
"""
if hasattr(obj, 'real'):
encoded = float(obj)
elif hasattr(obj, 'union'):
encoded = tuple(obj)
elif set(['next', 'union', '__iter__']).intersection(dir(obj)):
encoded = list(obj)
else:
encoded = str(obj)
return encoded
class StructuredMessage(object):
"""Converts a message and kwargs to a json string
Attributes:
kwargs (dict): Keyword arguments passed to
:class:`~pygogo.utils.CustomEncoder`.
Args:
message (string): The message to log.
kwargs (dict): Keyword arguments passed to
:class:`~pygogo.utils.CustomEncoder`.
Returns:
New instance of :class:`StructuredMessage`
See also:
:class:`pygogo.utils.StructuredAdapter`
Examples:
>>> from json import loads
>>> msg = StructuredMessage('hello world', key='value')
>>> loads(str(msg)) == {'message': 'hello world', 'key': 'value'}
True
"""
def __init__(self, message=None, **kwargs):
"""Initialization method.
Args:
message (string): The message to log.
kwargs (dict): Keyword arguments passed to
:class:`~pygogo.utils.CustomEncoder`.
Returns:
New instance of :class:`StructuredMessage`
Examples:
>>> StructuredMessage('message') # doctest: +ELLIPSIS
<pygogo.utils.StructuredMessage object at 0x...>
"""
kwargs['message'] = message
self.kwargs = kwargs
def __str__(self):
""" String method
Returns:
str: The encoded object
Examples
>>> from json import loads
>>> msg = str(StructuredMessage('hello world', key='value'))
>>> loads(msg) == {'message': 'hello world', 'key': 'value'}
True
"""
return str(CustomEncoder().encode(self.kwargs))
class StructuredAdapter(logging.LoggerAdapter):
"""A logging adapter that creates a json string from a log message and the
`extra` kwarg
See also:
:class:`pygogo.utils.StructuredMessage`
:meth:`pygogo.Gogo.get_structured_logger`
Examples:
>>> from io import StringIO
>>> from json import loads
>>> s = StringIO()
>>> logger = logging.getLogger()
>>> hdlr = logging.StreamHandler(s)
>>> logger.addHandler(hdlr)
>>> structured_logger = StructuredAdapter(logger, {'all': True})
>>> structured_logger.debug('hello', extra={'key': u'value'})
>>> loads(s.getvalue()) == {
... 'all': True, 'message': 'hello', 'key': 'value'}
True
"""
def process(self, msg, kwargs):
""" Modifies the message and/or keyword arguments passed to a logging
call in order to insert contextual information.
Args:
msg (str): The message to log.
kwargs (dict):
Returns:
Tuple of (:class:`~pygogo.utils.StructuredMessage`, modified kwargs)
Examples:
>>> from json import loads
>>> logger = logging.getLogger()
>>> structured_logger = StructuredAdapter(logger, {'all': True})
>>> extra = {'key': 'value'}
>>> m, k = structured_logger.process('message', {'extra': extra})
>>> loads(m) == {'all': True, 'message': 'message', 'key': 'value'}
True
>>> k == {'extra': {'all': True, 'key': 'value'}}
True
"""
extra = kwargs.get('extra', {})
extra.update(self.extra)
kwargs['extra'] = extra
return str(StructuredMessage(msg, **extra)), kwargs
class LogFilter(logging.Filter):
"""Filters log messages depending on level
Attributes:
level (int): The logging level.
+-------------------------+-------+
| logging level attribute | value |
+=========================+=======+
| CRITICAL | 50 |
+-------------------------+-------+
| ERROR | 40 |
+-------------------------+-------+
| WARNING | 30 |
+-------------------------+-------+
| INFO | 20 |
+-------------------------+-------+
| DEBUG | 10 |
+-------------------------+-------+
| NOTSET | 0 |
+-------------------------+-------+
Args:
level (int): The logging level.
Returns:
New instance of :class:`LogFilter`
See also:
:meth:`pygogo.Gogo.update_hdlr`
"""
def __init__(self, level):
"""Initialization method.
Args:
level (int): The logging level.
Returns:
New instance of :class:`LogFilter`
Examples:
>>> LogFilter(40) # doctest: +ELLIPSIS
<pygogo.utils.LogFilter object at 0x...>
"""
self.high_level = level
def filter(self, record):
"""Determines whether or a not a message should be logged.
Args:
record (obj): The event to (potentially) log
Returns:
bool: True if the event level is lower than self.high_level
Examples:
>>> attrs = {'levelno': logging.INFO}
>>> record = logging.makeLogRecord(attrs)
>>> LogFilter(40).filter(record)
True
"""
return record.levelno < self.high_level
def get_structured_filter(name='', **kwargs):
"""Returns a structured filter that injects contextual information into
log records.
Args:
kwargs (dict): The contextual information you wish to inject
See also:
:meth:`pygogo.Gogo.update_hdlr`
Returns:
New instance of :class:`pygogo.utils.StructuredFilter`
Examples:
>>> structured_filter = get_structured_filter(user='fred')
>>> structured_filter # doctest: +ELLIPSIS
<pygogo.utils...StructuredFilter object at 0x...>
>>>
>>> logger = logging.getLogger('structured_filter')
>>> hdlr = logging.StreamHandler(sys.stdout)
>>> formatter = logging.Formatter('User %(user)s said, "%(message)s".')
>>> hdlr.setFormatter(formatter)
>>> logger.addFilter(structured_filter)
>>> logger.addHandler(hdlr)
>>> logger.debug('A debug message')
User fred said, "A debug message".
"""
class StructuredFilter(logging.Filter):
"""
Injects contextual information into log records.
"""
def filter(self, record):
"""Adds contextual information to a log record
Args:
record (obj): The event to contextualize
Returns:
bool: True
"""
for k, v in kwargs.items():
setattr(record, k, v)
return True
return StructuredFilter(name)
| [
"reubano@gmail.com"
] | reubano@gmail.com |
5d6b8730c27a10b0c7bdf6f11f869a273fba0a89 | 0bfb09735655d0c74b45f59897386cdc20d06794 | /pytools_git/clim_from_a20/clim_hinter.py | 306620a57be5a0ce09b8c30e4c5eb1ec97e217cf | [] | no_license | aleatorius/roms_mypytools | 693d0adc034e108677fa5e8cbdc7d2425f2f3fbc | 22939e90a44ef421540d9d3e9240b8468322d422 | refs/heads/master | 2021-01-20T12:34:20.968892 | 2015-07-06T15:32:35 | 2015-07-06T15:32:35 | 26,806,264 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,960 | py | #!/home/mitya/testenv/bin/python -B # -*- coding: utf-8 -*-
import os
import sys
from datetime import datetime
import numpy as np
from netCDF4 import Dataset
import netcdftime
import time as tm
from calendar import monthrange
import datetime
from datetime import date, time
import argparse
# load boost before: module load boost/1.53.0 nco
parser = argparse.ArgumentParser(description='transect write 0.1')
parser.add_argument(
'-i',
help='input file',
dest='inf',
action="store"
)
parser.add_argument(
'-o',
help='input file',
dest='outf',
action="store"
)
args = parser.parse_args()
def unpack(ina):
if ina.ndim == 0:
print "is it scalar or 0d array?"
outa = ina[()]
else:
outa = np.zeros(ina.shape)
outa[:] = ina[:]
return outa
fimexdir="/home/mitya/pytools_git/clim_from_a20/fimex_config/"
f = Dataset(args.inf)
inter_time = []
inter_notime = []
for i in f.variables.keys():
print i, f.variables[i].dimensions
try:
print "try statement"
if any("eta" in s for s in f.variables[i].dimensions):
if any("time" in l for l in f.variables[i].dimensions):
if any("eta_rho" in l for l in f.variables[i].dimensions):
inter_time.append((i,"rho"))
elif any("eta_psi" in l for l in f.variables[i].dimensions):
inter_time.append((i,"psi"))
elif any("eta_u" in l for l in f.variables[i].dimensions):
inter_time.append((i,"u"))
elif any("eta_v" in l for l in f.variables[i].dimensions):
inter_time.append((i,"v"))
else:
pass
else:
inter_notime.append(i)
else:
pass
except:
pass
print "time dependent", inter_time
print "time independent", inter_notime
f.close()
iniout = "iniout"
for a in inter_time:
print a
i=a[0]
if a[1]=="rho":
cfg = open(fimexdir+"template.cfg", "r")
inp_contents = cfg.readlines()
input_ind = inp_contents.index("[input]\n")
output_ind = inp_contents.index("[output]\n")
contents = inp_contents[:]
os.system("ncks -v "+"lat_"+str(a[1])+",lon_"+str(a[1])+","+str(i)+" "+args.inf+" -o "+"inp_"+str(i)+".nc")
contents.insert(input_ind+1, "file=inp_"+str(i)+".nc\n")
contents.insert(output_ind+2, "file="+str(iniout)+"_"+str(i)+".nc\n")
cfg_out = open("list.cfg", "w")
contents = "".join(contents)
cfg_out.write(contents)
cfg_out.close()
os.system("fimex -c list.cfg")
ncrename = "ncrename -v lat,lat_"+str(a[1])+" -v lon,lon_"+str(a[1])+" "+str(iniout)+"_"+str(i)+".nc"
print ncrename
os.system(ncrename)
cfg.close()
elif a[1]=="u":
cfg = open(fimexdir+"template_u.cfg", "r")
inp_contents = cfg.readlines()
input_ind = inp_contents.index("[input]\n")
output_ind = inp_contents.index("[output]\n")
contents = inp_contents[:]
os.system("ncks -v "+"lat_"+str(a[1])+",lon_"+str(a[1])+","+str(i)+" "+args.inf+" -o "+"inp_"+str(i)+".nc")
contents.insert(input_ind+1, "file=inp_"+str(i)+".nc\n")
contents.insert(output_ind+2, "file="+str(iniout)+"_"+str(i)+".nc\n")
cfg_out = open("list.cfg", "w")
contents = "".join(contents)
cfg_out.write(contents)
cfg_out.close()
os.system("fimex -c list.cfg")
ncrename = "ncrename -v lat,lat_"+str(a[1])+" -v lon,lon_"+str(a[1])+" "+str(iniout)+"_"+str(i)+".nc"
print ncrename
os.system(ncrename)
cfg.close()
elif a[1]=="v":
cfg = open(fimexdir+"template_v.cfg", "r")
inp_contents = cfg.readlines()
input_ind = inp_contents.index("[input]\n")
output_ind = inp_contents.index("[output]\n")
contents = inp_contents[:]
os.system("ncks -v "+"lat_"+str(a[1])+",lon_"+str(a[1])+","+str(i)+" "+args.inf+" -o "+"inp_"+str(i)+".nc")
contents.insert(input_ind+1, "file=inp_"+str(i)+".nc\n")
contents.insert(output_ind+2, "file="+str(iniout)+"_"+str(i)+".nc\n")
cfg_out = open("list.cfg", "w")
contents = "".join(contents)
cfg_out.write(contents)
cfg_out.close()
os.system("fimex -c list.cfg")
ncrename = "ncrename -v lat,lat_"+str(a[1])+" -v lon,lon_"+str(a[1])+" "+str(iniout)+"_"+str(i)+".nc"
print ncrename
os.system(ncrename)
cfg.close()
else:
pass
if not args.outf:
output = args.inf+"_hinter"
else:
output = args.outf
os.system("rm -f inp_*.nc")
os.system(" nccopy -k 2 iniout_zeta.nc "+output)
os.system("rm -f iniout_zeta.nc")
os.system("for i in iniout_*.nc; do echo $i; ncks -A $i "+output+";done")
os.system("rm -f iniout_*.nc")
| [
"aleatorius@gmail.com"
] | aleatorius@gmail.com |
aa76a6699424378883e0efde17ed8d3a4a6d32d0 | 068580ed7675e52a5deac783266c2ccc1098ea29 | /checkout/models.py | 646a27f4b37e0011ce166646f4372670918ff90b | [] | no_license | Code-Institute-Submissions/honeyshopstef | c32d7ba2db3fe852e09e479e161a33c497750aaa | 5be4a7b4d16d930a5afa302b2ff05f2e25397531 | refs/heads/master | 2022-11-29T18:04:44.519752 | 2020-07-30T20:03:17 | 2020-07-30T20:03:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,642 | py | import uuid
from django.db import models
from django.db.models import Sum
from django.conf import settings
from django_countries.fields import CountryField
from products.models import Product
from profiles.models import UserProfile
class Order(models.Model):
order_number = models.CharField(max_length=32, null=False, editable=False)
user_profile = models.ForeignKey(UserProfile, on_delete=models.SET_NULL,
null=True, blank=True,
related_name='orders')
full_name = models.CharField(max_length=50, null=False, blank=False)
email = models.EmailField(max_length=254, null=False, blank=False)
phone_number = models.CharField(max_length=20, null=False, blank=False)
country = CountryField(blank_label='Country *', null=False, blank=False)
postcode = models.CharField(max_length=20, null=True, blank=True)
town_or_city = models.CharField(max_length=40, null=False, blank=False)
street_address1 = models.CharField(max_length=80, null=False, blank=False)
street_address2 = models.CharField(max_length=80, null=True, blank=True)
county = models.CharField(max_length=80, null=True, blank=True)
date = models.DateTimeField(auto_now_add=True)
delivery_cost = models.DecimalField(max_digits=6, decimal_places=2,
null=False, default=0)
order_total = models.DecimalField(max_digits=10, decimal_places=2,
null=False, default=0)
grand_total = models.DecimalField(max_digits=10, decimal_places=2,
null=False, default=0)
original_bag = models.TextField(null=False, blank=False, default='')
stripe_pid = models.CharField(max_length=254,
null=False, blank=False, default='')
def _generate_order_number(self):
"""
Generate a random, unique order number using UUID
"""
return uuid.uuid4().hex.upper()
def update_total(self):
"""
Update grand total each time a line item is added,
accounting for delivery costs.
"""
self.order_total = self.lineitems.aggregate(Sum('lineitem_total'))['lineitem_total__sum'] or 0
if self.order_total < settings.FREE_DELIVERY_THRESHOLD:
self.delivery_cost = self.order_total * settings.STANDARD_DELIVERY_PERCENTAGE / 100
else:
self.delivery_cost = 0
self.grand_total = self.order_total + self.delivery_cost
self.save()
def save(self, *args, **kwargs):
"""
Override the original save method to set the order number
if it hasn't been set already.
"""
if not self.order_number:
self.order_number = self._generate_order_number()
super().save(*args, **kwargs)
def __str__(self):
return self.order_number
class OrderLineItem(models.Model):
order = models.ForeignKey(Order, null=False, blank=False, on_delete=models.CASCADE, related_name='lineitems')
product = models.ForeignKey(Product, null=False, blank=False, on_delete=models.CASCADE)
quantity = models.IntegerField(null=False, blank=False, default=0)
lineitem_total = models.DecimalField(max_digits=6, decimal_places=2, null=False, blank=False, editable=False)
def save(self, *args, **kwargs):
"""
Override the original save method to set the lineitem total
and update the order total.
"""
self.lineitem_total = self.product.price * self.quantity
super().save(*args, **kwargs)
def __str__(self):
return f'SKU {self.product.sku} on order {self.order.order_number}' | [
"iiostefanos@gmail.com"
] | iiostefanos@gmail.com |
83b38e6545a2e6f6c5ec106a23e8e80be0b059b2 | 37ea661bbaed3706158b59ceede71c1e8d10f645 | /Huffman_encode.py | 638cadb1211deb2bcedb9b788437c52dc1f843a4 | [] | no_license | Prabhanda-Akiri/Design-of-Algorithms | 678769bee527eb76cfb08024ba6de7e7eb132649 | 1e4ba02d5b953a3ad3a427047e0c7b85749e5d10 | refs/heads/master | 2021-05-11T23:59:04.277537 | 2018-04-16T05:48:06 | 2018-04-16T05:48:06 | 117,523,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,794 | py | import heapq
class Node:
def __init__(self,s,f,y,x):
self.freq=f
self.symbol=s
self.left=y
self.right=x
def __lt__(self,other):
return self.freq<other.freq
class Huffman:
def __init__(self,S,F,n):
self.S=S
self.F=F
self.n=n
self.leaves=[]
self.heap=[]
for i in range(n):
leaf=Node(S[i],F[i],None,None)
heapq.heappush(self.heap,leaf)
self.leaves.append(leaf)
def apply(self):
for i in range(self.n-1):
x=heapq.heappop(self.heap)
y=heapq.heappop(self.heap)
node=Node(None,(x.freq+y.freq),y,x)
heapq.heappush(self.heap,node)
def show_codes(self):
self.root=heapq.heappop(self.heap)
self.recurse_print(self.root,[])
def recurse_print(self,v,L):
if v.left==None and v.right==None:
print('Symbol : ',v.symbol,' Code : ',end=' ')
for i in range(len(L)):
print(L[i],end='')
print()
else:
L.append(0)
self.recurse_print(v.left,L)
if L :
L.pop(-1)
L.append(1)
self.recurse_print(v.right,L)
if v.symbol==None:
L.pop(-1)
def main():
n=6
#n=int(input('\nEnter the number of Symbols: '))
print('\nSymbols and their corresponding frequencies....\n')
S=['a','b','c','d','e','f']
F=[20,12,10,8,4,3]
"""for i in range(n):
s=input()
s=s.split()
S.append(s[0])
F.append(s[1])"""
h=Huffman(S,F,n)
h.apply()
h.show_codes()
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | Prabhanda-Akiri.noreply@github.com |
a47f8034e2370aec414aa1e5b290f1bff3f65fe2 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_2700486_0/Python/jbaek/codejam3.py | 66cc08fb16dc343fe03e3fc66bf66e11429e006d | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,392 | py | from math import *
from itertools import *
import os
from decimal import *
ALLGRIDS = []
def main():
global ALLGRIDS
f = open("/home/jackie/Documents/Codejam/in")
lines = f.readlines()
cases = int(lines.pop(0))
for i in range(cases):
ALLGRIDS = []
print "Case #%d:" % (i+1),
guide = split_to_int(lines)
number = guide[0]
x = guide[1]
y = guide[2]
diamonds = []
grid = {}
if x == 0 and y == 0:
print "1.0"
continue
ALLGRIDS.append(grid)
do_problem(number, diamonds)
total = len(ALLGRIDS)
win = 0
for grid in ALLGRIDS:
if x in grid and grid[x] >= y+1:
win += 1
answer = str(Decimal(win)/Decimal(total))
if "." not in answer:
answer += ".0"
print answer
def do_problem(number,diamonds):
global ALLGRIDS
for i in range(number):
for j in range(len(ALLGRIDS)):
helper(ALLGRIDS[j], 0)
# drops one diamond
def helper(grid, pos):
global ALLGRIDS
if pos not in grid:
grid[pos]=0
highest = grid[pos]
if blockedleft(grid, pos):
if blockedright(grid,pos):
grid[pos]+=2
return
else:
helper(grid, pos+1)
return
elif blockedright(grid,pos):
helper(grid, pos-1)
return
# go on ground
elif highest == 0:
grid[pos]=1
return
else:
# right
newgrid = grid.copy()
ALLGRIDS.append(newgrid)
helper(newgrid, pos+1)
# left
helper(grid, pos-1)
def blockedleft(grid, pos):
return pos-1 in grid and grid[pos-1]>grid[pos]
def blockedright(grid, pos):
return pos+1 in grid and grid[pos+1]>grid[pos]
# general helper functions
def split_to_int(lines):
return [int(v) for v in lines.pop(0).split()]
def factors(n):
return set(reduce(list.__add__,
([i, n//i] for i in range(1, int(n**0.5) + 1) if n % i == 0)))
def isPrime(n):
if n == 2 or n == 3: return True
if n < 2 or n%2 == 0: return False
if n < 9: return True
if n%3 == 0: return False
r = int(n**0.5)
f = 5
while f <= r:
if n%f == 0: return False
if n%(f+2) == 0: return False
f +=6
return True
g = {0:1, 2:1}
#helper(g, 0)
#print ALLGRIDS
main()
| [
"eewestman@gmail.com"
] | eewestman@gmail.com |
ee9a241f9d288ae78366ae06757b0dee588ce874 | 5acc77c4d594c1750a9b7477499ee25b4c307bca | /ehpi_action_recognition/train_ehpi.py | 3c8f3b90123e199bd9a2df7439bbf06c510462ca | [
"MIT"
] | permissive | noboevbo/ehpi_action_recognition | bc15a3c260c79b85a82844a2779c9b1ec9cf42fd | 3b77eeb5103f0f11c8d4be993ec79dddad7e661c | refs/heads/master | 2021-12-29T05:24:31.891044 | 2021-12-19T16:23:36 | 2021-12-19T16:23:36 | 180,351,212 | 113 | 23 | null | 2019-04-23T11:24:27 | 2019-04-09T11:22:45 | Python | UTF-8 | Python | false | false | 3,006 | py | import os
import random
from typing import List
import torch
from ehpi_action_recognition.config import ehpi_dataset_path
from nobos_commons.data_structures.constants.dataset_part import DatasetPart
from nobos_commons.data_structures.dimension import ImageSize
from nobos_torch_lib.configs.training_configs.training_config_base import TrainingConfigBase
from nobos_torch_lib.datasets.action_recognition_datasets.ehpi_dataset import EhpiDataset, RemoveJointsOutsideImgEhpi, \
ScaleEhpi, TranslateEhpi, FlipEhpi, NormalizeEhpi
from nobos_torch_lib.datasets.samplers.imbalanced_dataset_sampler import ImbalancedDatasetSampler
from nobos_torch_lib.models.detection_models.shufflenet_v2 import ShuffleNetV2
from torch.utils.data import ConcatDataset, DataLoader
from torchvision.transforms import transforms
from ehpi_action_recognition.trainer_ehpi import TrainerEhpi
foot_indexes: List[int] = [11, 14]
knee_indexes: List[int] = [10, 13]
def get_train_set(dataset_path: str, image_size: ImageSize):
num_joints = 15
left_indexes: List[int] = [3, 4, 5, 9, 10, 11]
right_indexes: List[int] = [6, 7, 8, 12, 13, 14]
datasets: List[EhpiDataset] = [
# Set 1
EhpiDataset(os.path.join(dataset_path, "ofp_record_2019_03_11_HSRT_30FPS"),
transform=transforms.Compose([
RemoveJointsOutsideImgEhpi(image_size),
ScaleEhpi(image_size),
TranslateEhpi(image_size),
FlipEhpi(left_indexes=left_indexes, right_indexes=right_indexes),
NormalizeEhpi(image_size)
]), num_joints=num_joints, dataset_part=DatasetPart.TEST),
# Set 2
EhpiDataset(os.path.join(dataset_path, "2019_03_13_Freilichtmuseum_30FPS"),
transform=transforms.Compose([
RemoveJointsOutsideImgEhpi(image_size),
ScaleEhpi(image_size),
TranslateEhpi(image_size),
FlipEhpi(left_indexes=left_indexes, right_indexes=right_indexes),
NormalizeEhpi(image_size)
]), num_joints=num_joints, dataset_part=DatasetPart.TRAIN),
]
for dataset in datasets:
dataset.print_label_statistics()
return ConcatDataset(datasets)
if __name__ == '__main__':
batch_size = 128
seed = 0
random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
# Train set
train_set = get_train_set(ehpi_dataset_path, image_size=ImageSize(1280, 720))
sampler = ImbalancedDatasetSampler(train_set, dataset_type=EhpiDataset)
train_loader = DataLoader(train_set, batch_size=batch_size, sampler=sampler, num_workers=1)
# config
train_config = TrainingConfigBase("ehpi_model", "models")
train_config.weight_decay = 0
train_config.num_epochs = 140
trainer = TrainerEhpi()
trainer.train(train_loader, train_config, model=ShuffleNetV2(3))
| [
"Dennis.Ludl@reutlingen-university.de"
] | Dennis.Ludl@reutlingen-university.de |
71396ce9f2428f3fce5cdcf1f604372da415a061 | 2a0cb5107e21f6ce9d535fb3bbd20af9eb0da0dd | /intersection.py | 9b51a9e624db2b6fb9fa0a20a4b15692cdb90b6c | [] | no_license | YuliangShi/Louvre-Evacuation | 5c692ce3ff15a5619f9042efe4f43f17e13a62d8 | dc35c3b4aac33a8ea5891710483d83baf718e20b | refs/heads/master | 2020-04-18T18:52:01.817043 | 2019-01-28T04:32:08 | 2019-01-28T04:32:08 | 167,697,139 | 1 | 0 | null | 2019-01-27T23:36:57 | 2019-01-26T14:21:49 | Python | UTF-8 | Python | false | false | 3,673 | py | import random
import numpy.linalg as LA
import numpy as np
# intersection
class Intersection:
"""
Attributes:
position: position
"""
def __init__(self, position, all_blocks, length, exit=False):
self.position = np.array([position[0] * 35.4, position[1] * 35.4, position[2]]) # center of the intersection
self.length = length * 35.4
self.exit = exit
self.all_blocks = all_blocks
self.capacity = self.length * self.length * 0.4
self.wait_time = 10 # waiting time in the intersection
self.people = []
self.in_blocks = []
self.out_blocks = []
self.in_rows = []
self.out_rows = []
self.n_in_rows = 0
self.n_out_rows = 0
for each in self.all_blocks:
default = [each.A[0] - each.B[0], each.A[1] - each.B[1], each.A[1]]
if LA.norm(each.A - self.position) < LA.norm(each.B - self.position):
if each.dirc == default[:-1]:
self.in_blocks.append(each)
self.in_rows += each.all_rows
self.n_in_rows += each.n_r
else:
self.out_blocks.append(each)
self.out_rows += each.all_rows
self.n_out_rows += each.n_r
else:
if each.dirc == default[:-1]:
self.out_blocks.append(each)
self.out_rows += each.all_rows
self.n_out_rows += each.n_r
else:
self.in_blocks.append(each)
self.in_rows += each.all_rows
self.n_in_rows += each.n_r
def get_out_blocks(self):
return self.out_blocks
def get_in_blocks(self):
return self.in_blocks
def final_move(self, dt):
# for people in the rows towards the intersection
if len(self.people) < self.capacity:
for row in self.in_rows:
for ppl in row.all_indv:
if ppl.is_in_block():
break
self.capacity += 1
row.remove_first()
ppl.wait_time = self.wait_time
ppl.v = 0
ppl.p = self.position
ppl.block = self
ppl.row = None
# for people already in the intersection
if not self.exit:
count = 0
for ppl in self.people:
ppl.wait_time -= dt
if ppl.wait_time <= 0:
self.capacity -= 1
index = count
select_row = self.out_rows[index]
ppl.change_row(select_row)
ppl.p = select_row.mid_pt_pos - (self.len / 2) * ppl.block.dirc # endpoint
if count == self.n_out_row:
count = 0
else:
count += 1
def verts_for_plot(self):
verts = [
tuple((self.position + np.array([(self.length / 2), (self.length / 2), 0]))[:2]),
tuple((self.position + np.array([-(self.length / 2), (self.length / 2), 0]))[:2]),
tuple((self.position + np.array([-(self.length / 2), -(self.length / 2), 0]))[:2]),
tuple((self.position + np.array([(self.length / 2), -(self.length / 2), 0]))[:2]),
tuple((self.position + np.array([(self.length / 2), (self.length / 2), 0]))[:2])
]
return verts, int(self.position[2]) | [
"noreply@github.com"
] | YuliangShi.noreply@github.com |
49df46b47998c18b9a1e1cd63e336461b0b668e5 | 5390d79dad71ad0d9ff9d0777435dcaf4aad16b3 | /chapter_05/toppings5.py | bb3053276c058e6ce16e156ef1659461aab3c552 | [] | no_license | JasperMi/python_learning | 19770d79cce900d968cec76dac11e45a3df9c34c | 8111d0d12e4608484864dddb597522c6c60b54e8 | refs/heads/master | 2020-11-26T08:57:02.983869 | 2020-03-11T10:14:55 | 2020-03-11T10:14:55 | 218,935,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | py | requested_toppings = []
if requested_toppings:
for requested_topping in requested_toppings:
print("Adding " + requested_topping + ".")
print("\nFinished making your pizza!")
else:
print("Are you sure you want a plain pizza?")
| [
"darmi19@163.com"
] | darmi19@163.com |
9cc841955865b8156f5d230b757de13af5001bc1 | 5b251d8c242af845cfe7a7df6f0525655223177e | /src/piece.py | 7fc443ec3587ba7c542eca8920981256b24fede3 | [] | no_license | Julien-Leos/gameJam-Barcelona | ed1e098c6cdd5c0e38c6e9203862d529aab13035 | 1164d37b5a6efa228cfb0336ceef360a6bae8cc3 | refs/heads/master | 2021-03-25T23:11:54.404367 | 2020-03-16T08:52:12 | 2020-03-16T08:52:12 | 247,653,107 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | import pygame
from object import Object
class Piece(Object):
def __init__(self, name, pos, core):
self.name = name
self.core = core
self.initSprite(pos)
def initSprite(self, pos):
self.image = pygame.image.load("assets/pieces/" + self.name + ".png").convert_alpha()
self.rect = self.image.get_rect()
self.rect.move_ip(*pos)
def update(self, dt):
self.core.window.blit(self.image, self.rect) | [
"leos.julien@epitech.eu"
] | leos.julien@epitech.eu |
1054dd165bede753467fe48c2c390ef14b7e43b8 | 5d07d3de51a26618e97e60be9a61df049d2c41a1 | /classifyingRestaurantCategoryfromBusiness.py | 706fe93cc04dc4eba4a04909e96e9eb4212a0f07 | [] | no_license | prajwalv/YelpDataset-Reviews-SentimentAnalysis | d52a335044e1d416de707af569c752601698d28b | f1bb7e03b51f79c81116ea1b94aba9271dd968f0 | refs/heads/master | 2020-04-15T21:23:33.045309 | 2019-01-10T10:14:08 | 2019-01-10T10:14:08 | 165,031,369 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 739 | py | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import nltk
from nltk.corpus import stopwords
# In[2]:
business = pd.read_csv('dataset/yelp_academic_dataset_business.csv')
# In[3]:
business.shape
# In[4]:
business.head()
# In[5]:
business = business[pd.notnull(business['categories'])]
business.shape
# In[9]:
indexList = []
for i in range(len(business)):
if 'Restaurants' not in business['categories'].iloc[i]:
indexList.append(i)
business=business.drop(business.index[indexList])
business = business[['business_id','categories']]
business
# In[10]:
business.to_csv("onlyRestaurants.csv",index=False)
| [
"prajwal.venkatesh@sjsu.edu"
] | prajwal.venkatesh@sjsu.edu |
193cb91ce7cabc2daeb6898364f78bd9d496cf4b | 9fc6604ae98e1ae91c490e8201364fdee1b4222a | /eg_delivery_return_disclaimer_msg/wizards/msg_by_unifonic.py | 1e5e3eb6e45160c46c0dadf6f1a4942c11dc796a | [] | no_license | nabiforks/baytonia | b65e6a7e1c7f52a7243e82f5fbcc62ae4cbe93c4 | 58cb304d105bb7332f0a6ab685015f070988ba56 | refs/heads/main | 2023-03-23T21:02:57.862331 | 2021-01-04T03:40:58 | 2021-01-04T03:40:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,566 | py | from odoo import models, fields, api
from odoo.exceptions import Warning
class MsgByUnifonic(models.TransientModel):
_name = "msg.by.unifonic"
number = fields.Char(string="Number")
message = fields.Text(string="Message")
@api.model
def default_get(self, fields_list):
res = super(MsgByUnifonic, self).default_get(fields_list)
picking_id = self.env["stock.picking"].browse(self._context.get("active_id"))
sms_instance_id = self.env["sms.instance"].search([("provider", "=", "unifonic_sms")], limit=1)
if picking_id and sms_instance_id:
message = sms_instance_id.return_disclaimer_msg
dst_number = picking_id.partner_id.phone or picking_id.partner_id.mobile or None
if message:
url = "https://oddo.baytonia.com/delivery_return/confirm/{}".format(picking_id.id)
message = message.replace("{{order_number}}", picking_id.name)
message = message.replace("{{customer_name}}", picking_id.partner_id.name)
message = message.replace("{{total_amount}}", str(picking_id.total_amount))
message = message.replace("{{return_approve_url}}", url)
res["number"] = dst_number
res["message"] = message
return res
@api.multi
def send_msg_customer_by_unifonic(self):
if self.message and self.number:
self.env["post.sms.wizard"].send_sms(body=self.message, dst_number=self.number)
else:
raise Warning("Number and Message are required")
| [
"ash@odoxsofthub.com"
] | ash@odoxsofthub.com |
7f733621867abbd79a0a8d2784f7d57814b625e5 | ebd24e400986c57b4bb1b9578ebd8807a6db62e8 | /InstaGrade-FormBuilder/xlsxwriter/test/comparison/test_chart_errorbars05.py | 002e0d8055c1d99983bc226195274cbf4b92c183 | [] | no_license | nate-parrott/ig | 6abed952bf32119a536a524422037ede9b431926 | 6e0b6ac0fb4b59846680567150ce69a620e7f15d | refs/heads/master | 2021-01-12T10:15:15.825004 | 2016-12-13T21:23:17 | 2016-12-13T21:23:17 | 76,399,529 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,706 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2014, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_errorbars05.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of an XlsxWriter file with error bars."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'bar'})
chart.axis_ids = [49016832, 49019136]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$B$1:$B$5',
'x_error_bars': {'type': 'standard_error'},
})
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$C$1:$C$5',
})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| [
"nateparro2t@gmail.com"
] | nateparro2t@gmail.com |
b4baaad945cd270dd9b2485737dee0511528adf2 | 3adf2723035bcda7f1f31fb873a084e270439aa7 | /src/sanitize_tweets.py | 7361c48ee2b55c4077c12a6698cd515919dcc2e7 | [] | no_license | altaha/insight-challenge | cef91878f247ebb3fbf38b608f92dbbbb4009061 | 3cffdb0fa0b1012c7ced2fecbe808c178b8565c6 | refs/heads/master | 2020-12-28T10:31:29.036951 | 2015-11-08T16:18:47 | 2015-11-08T16:18:47 | 45,347,597 | 0 | 0 | null | 2015-11-07T03:21:35 | 2015-11-01T16:17:41 | Python | UTF-8 | Python | false | false | 1,146 | py | import getopt
import sys
from tweeter import TweetSanitizer
## Feature 1 execution script ##
tweets_out = None
def write_line(line):
tweets_out.write(line + '\n')
def sanitize_all_tweets(input_file):
sanitizer = TweetSanitizer()
with open(input_file) as tweets_in:
for line in tweets_in:
tweet = sanitizer.sanitize_tweet(line)
write_line(str(tweet))
write_line('')
num_unicode = sanitizer.num_tweets_with_unicode()
write_line('{0} tweets contained unicode.'.format(num_unicode))
def main(argv):
input_file = '../data-gen/tweets.txt'
output_file = '../tweet_output/ft1.txt'
try:
(opts, args) = getopt.getopt(argv, 'i:o:')
except getopt.GetoptError:
print 'sanitize_tweets.py -i <inputfile> -o <outputfile>'
sys.exit(2)
for opt, arg in opts:
if opt in ('-i'):
input_file = arg
elif opt in ('-o'):
output_file = arg
global tweets_out
tweets_out = open(output_file, 'w')
sanitize_all_tweets(input_file)
tweets_out.close()
if __name__ == '__main__':
main(sys.argv[1:])
| [
"ahmedsalam.2008@gmail.com"
] | ahmedsalam.2008@gmail.com |
5ad138fa284a69c9c985ba8a2084ea57d9d8d176 | 0071aad01ab5e91b7d32567470bd729c23bac656 | /g2048.py | d75f388736b07dd7f87d31f67252e7ab02cbf060 | [] | no_license | Hakuyume/2048-rl | 19c29e24492bd1efaddbbe0dad28474752b2d97f | ff0593582b293bcf1c21bd2e26701da6d24c6647 | refs/heads/master | 2021-01-22T18:33:36.057004 | 2017-08-26T06:47:37 | 2017-08-26T06:47:37 | 100,769,933 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,058 | py | import numpy as np
import random
class G2048(object):
def __init__(self, size=4):
self.size = size
self.board = np.empty((size, size), dtype=np.uint8)
def reset(self):
self.score = 0
self.board[:] = 0
for _ in range(2):
self._add()
@property
def movability(self):
m = np.zeros(4, dtype=bool)
for d in range(4):
board = np.rot90(self.board, d)
if np.logical_and(board[:, :-1] == 0, board[:, 1:] > 0).any():
m[d] = True
elif np.logical_and(
board[:, :-1] > 0, board[:, :-1] == board[:, 1:]).any():
m[d] = True
return m
@property
def is_finished(self):
return not self.movability.any()
def _add(self):
blank = tuple(zip(*np.where(self.board == 0)))
if len(blank) > 0:
u, v = random.choice(blank)
if random.uniform(0, 1) > 1 / 4:
self.board[u, v] = 1
else:
self.board[u, v] = 2
def move(self, direction):
change = False
for line in np.rot90(self.board, direction):
v, w = 0, 0
new_line = np.zeros_like(line)
while v < self.size:
if line[v] == 0:
v += 1
elif new_line[w] == line[v]:
new_line[w] += 1
self.score += 1 << new_line[w]
change = True
v += 1
w += 1
elif new_line[w] == 0:
new_line[w] = line[v]
change = change or not v == w
v += 1
else:
w += 1
line[:] = new_line
if change:
self._add()
def normalize(self):
self.board[:] = min(
(np.rot90(b, r)
for b in (self.board, self.board.transpose())
for r in range(4)),
key=lambda b: tuple(b.flatten()))
| [
"Hakuyume@users.noreply.github.com"
] | Hakuyume@users.noreply.github.com |
668e8c564cf283299aa099f69921aede679766be | 2eda564b35f13394376ae5cbca5d96765b0e9956 | /src/util/Build/NaveFugaBuilder.py | f3608a659b944731807ce6cdaa365a2730907f5b | [] | no_license | izabely-furtado/GameNave4 | 59388709117ca66b42657d573f92fd72b8bf0a53 | 76571aa421347a89c5f9f2f09392f1e173ed3f84 | refs/heads/master | 2021-01-10T10:28:37.307696 | 2016-01-26T07:48:20 | 2016-01-26T07:48:20 | 50,402,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,120 | py | from src.util.Build import NaveBuilder
from src.util.FabricaNaves import FabricaNaveFuga
from src.cgd import Path
class NaveFugaBuilder(NaveBuilder.NaveBuilder):
def __init__(self):
super(NaveFugaBuilder, self).__init__()
self.build_dano()
self.build_imagem_nave()
self.build_imagem_explosao()
self.build_som()
self.build_nave()
def build_dano(self):
self.nave_product.set_dano(0)
def build_imagem_nave(self):
self.nave_product.imagem_nave = Path.get_path() + "/Imagem/Nave/Fuga.png"
def build_imagem_explosao(self):
self.nave_product.imagem_explosao = Path.get_path() + "/Imagem/Nave/Boss.png"
def build_som(self):
self.nave_product.som = Path.get_path() + "/Som/MusicNave.wav"
def build_nave(self):
self.nave_product.nave_fabrica = FabricaNaveFuga.FabricaNaveFuga(self.nave_product.imagem_nave,
self.nave_product.imagem_explosao,
self.nave_product.som)
| [
"izabelyfurtado1@gmail.com"
] | izabelyfurtado1@gmail.com |
20d2b0520d4d5779175d100a16458efd8709edec | d0aadde0a55e888ce7f4e8ab71261f5ca132d437 | /Factorial_using_While loop.py | cf09ceacd9b477605d5c7f560f702f029cb7c2ae | [] | no_license | rajeshmanas/Python_for_Beginners | bd8b08ed7b064a89822f31b1164c06ef9781fa0c | be9e0eae9645550f7e6756b8577b968bef0e93eb | refs/heads/master | 2022-12-03T19:16:28.906288 | 2020-08-17T18:15:48 | 2020-08-17T18:15:48 | 285,635,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | # Factorial code using WHILE loop
print('enter the number for the factorial you want:')
i_want_fact_for = int(input())
#print(type(i_want_fact_for))
Fact_value = 1
i =1
while i <= i_want_fact_for:
#print(i)
Fact_value = Fact_value * i
i+=1
#print(Fact_value)
print('Factorial value of {} is: {}'.format(i_want_fact_for, Fact_value))
# End of Factorial code using WHILE loop | [
"noreply@github.com"
] | rajeshmanas.noreply@github.com |
8ee0c7c66379fbead56732ab779d72356e965329 | 925f199438b3af508cf083ce094cb6a5f208fed8 | /src/lt_847.py | ed54216792f6792912f298fe087f8840d98ee563 | [] | no_license | oxhead/CodingYourWay | b1b50236cdfb06669c123fd9202ce3d87304a3bf | e60ba45fe2f2e5e3b3abfecec3db76f5ce1fde59 | refs/heads/master | 2020-08-06T16:45:21.054650 | 2018-06-26T03:53:38 | 2018-06-26T03:53:38 | 30,577,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,323 | py | """
https://leetcode.com/contest/weekly-contest-87/problems/shortest-path-visiting-all-nodes/
"""
"""
An undirected, connected graph of N nodes (labeled 0, 1, 2, ..., N-1) is given as graph.
graph.length = N, and j != i is in the list graph[i] exactly once, if and only if nodes i and j are connected.
Return the length of the shortest path that visits every node. You may start and stop at any node, you may revisit nodes multiple times, and you may reuse edges.
Example 1:
Input: [[1,2,3],[0],[0],[0]]
Output: 4
Explanation: One possible path is [1,0,2,0,3]
Example 2:
Input: [[1],[0,2,4],[1,3,4],[2],[1,2]]
Output: 4
Explanation: One possible path is [0,1,4,2,3]
Note:
1 <= graph.length <= 12
0 <= graph[i].length < graph.length
"""
class Solution:
def shortestPathLength(self, graph):
"""
:type graph: List[List[int]]
:rtype: int
"""
def traverse(queue):
while queue:
current_node, visited, current_length = queue.pop(0)
if len(visited) == len(graph):
return current_length
for neighbor in graph[current_node]:
queue.append((neighbor, visited | set([neighbor]), current_length + 1))
num_edges = float('inf')
endpoints = []
for node_id in range(len(graph)):
node_edges = graph[node_id]
if len(node_edges) < num_edges:
endpoints = [node_id]
num_edges = len(node_edges)
elif len(node_edges) == num_edges:
endpoints.append(node_id)
queue = []
print(endpoints)
for node_id in endpoints[1:2]:
queue.append((node_id, set([node_id]), 0))
return traverse([x for x in queue])
if __name__ == '__main__':
test_cases = [
#([[1,2,3],[0],[0],[0]], 4),
#([[1],[0,2,4],[1,3,4],[2],[1,2]], 4),
#([[1],[0,2],[1,3],[2],[1,5],[4]], 6),
#([[1],[0,2,6],[1,3],[2],[5],[4,6],[1,5,7],[6]], 9),
([[1,4,6,8,9],[0,6],[9],[5],[0],[7,3],[0,1],[9,5],[0],[0,2,7]], 10),
]
for test_case in test_cases:
print('case:', test_case)
output = Solution().shortestPathLength(test_case[0])
print('output:', output)
assert output == test_case[1]
| [
"kmscout@gmail.com"
] | kmscout@gmail.com |
fc686a9f898be02faba630adf24adbfd497e8873 | 5a61331a743dcd262f9c0ac28c683fbf16bdfc17 | /dsr/dsr/controller.py | b96eb1508eaf776806b69a3083a03e6e2e2aea87 | [
"BSD-3-Clause"
] | permissive | hwfluid/deep-symbolic-regression | b205861bf47c4c97561890605b84337c79e8c069 | 08c5b80a865bdbba7acd9d3904a0b848640fcbc6 | refs/heads/master | 2023-01-11T08:25:53.305445 | 2020-11-18T16:29:20 | 2020-11-18T16:29:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,496 | py | """Controller used to generate distribution over hierarchical, variable-length objects."""
from functools import partial
import tensorflow as tf
import numpy as np
from scipy import signal
from numba import jit, prange
from dsr.program import Program
class LinearWrapper(tf.contrib.rnn.LayerRNNCell):
"""
RNNCell wrapper that adds a linear layer to the output.
See: https://github.com/tensorflow/models/blob/master/research/brain_coder/single_task/pg_agent.py
"""
def __init__(self, cell, output_size):
self.cell = cell
self._output_size = output_size
def __call__(self, inputs, state, scope=None):
with tf.variable_scope(type(self).__name__):
outputs, state = self.cell(inputs, state, scope=scope)
logits = tf.layers.dense(outputs, units=self._output_size)
return logits, state
@property
def output_size(self):
return self._output_size
@property
def state_size(self):
return self.cell.state_size
def zero_state(self, batch_size, dtype):
return self.cell.zero_state(batch_size, dtype)
class Controller(object):
"""
Recurrent neural network (RNN) controller used to generate expressions.
Specifically, the RNN outputs a distribution over pre-order traversals of
symbolic expression trees. It is trained using REINFORCE with baseline.
Parameters
----------
sess : tf.Session
TenorFlow Session object.
summary : bool
Write tensorboard summaries?
debug : int
Debug level, also used in learn(). 0: No debug. 1: Print shapes and
number of parameters for each variable.
cell : str
Recurrent cell to use. Supports 'lstm' and 'gru'.
num_layers : int
Number of RNN layers.
num_units : int or list of ints
Number of RNN cell units in each of the RNN's layers. If int, the value
is repeated for each layer.
initiailizer : str
Initializer for the recurrent cell. Supports 'zeros' and 'var_scale'.
embedding : bool
Embed each observation?
embedding_size : int
Size of embedding for each observation if embedding=True.
optimizer : str
Optimizer to use. Supports 'adam', 'rmsprop', and 'sgd'.
learning_rate : float
Learning rate for optimizer.
observe_action : bool
Observe previous action token?
observe_parent : bool
Observe parent token?
observe_sibling : bool
Observe sibling token?
constrain_const : bool
Prevent constants with unary parents or constant siblings?
constrain_trig : bool
Prevent trig functions with trig function ancestors?
constrain_inv : bool
Prevent unary function with inverse unary function parent?
constrain_min_len : bool
Prevent terminals that would cause the expression to be shorter than
min_length? If False, only trivial expressions (length 1) are prevented.
constrain_max_len : bool
Prevent unary/binary functions that would cause the expression to exceed
max_length? If False, sampling ends after max_length and dangling nodes
are filled in with x1's.
constrain_num_const : bool
Prevent constants that would exceed max_const?
min_length : int (>= 1) or None
Minimum length of a sampled traversal when constrain_min_len=True. If
None or constrain_min_len=False, expressions have no minimum length.
max_length : int (>= 3)
Maximum length of a sampled traversal.
max_const : int (>= 1) or None
Maximum number of constants of a sampled traversal when
constrain_num_const=True. If None or constrain_num_const=False,
expressions may have any number of constants.
entropy_weight : float
Coefficient for entropy bonus.
ppo : bool
Use proximal policy optimization (instead of vanilla policy gradient)?
ppo_clip_ratio : float
Clip ratio to use for PPO.
ppo_n_iters : int
Number of optimization iterations for PPO.
ppo_n_mb : int
Number of minibatches per optimization iteration for PPO.
pqt : bool
Train with priority queue training (PQT)?
pqt_k : int
Size of priority queue.
pqt_batch_size : int
Size of batch to sample (with replacement) from priority queue.
pqt_weight : float
Coefficient for PQT loss function.
pqt_use_pg : bool
Use policy gradient loss when using PQT?
"""
def __init__(self, sess, debug=0, summary=True,
# Architecture hyperparameter
# RNN cell hyperparameters
cell='lstm',
num_layers=1,
num_units=32,
initializer='zeros',
# Embedding hyperparameters
embedding=False,
embedding_size=4,
# Optimizer hyperparameters
optimizer='adam',
learning_rate=0.001,
# Observation space hyperparameters
observe_action=True,
observe_parent=True,
observe_sibling=True,
# Constraint hyperparameters
constrain_const=True,
constrain_trig=True,
constrain_inv=True,
constrain_min_len=True,
constrain_max_len=True,
constrain_num_const=False,
min_length=2,
max_length=30,
max_const=None,
# Loss hyperparameters
entropy_weight=0.0,
# PPO hyperparameters
ppo=False,
ppo_clip_ratio=0.2,
ppo_n_iters=10,
ppo_n_mb=4,
# PQT hyperparameters
pqt=False,
pqt_k=10,
pqt_batch_size=1,
pqt_weight=200.0,
pqt_use_pg=False):
self.sess = sess
self.summary = summary
self.rng = np.random.RandomState(0) # Used for PPO minibatch sampling
# Hyperparameters
self.observe_parent = observe_parent
self.observe_sibling = observe_sibling
self.constrain_const = constrain_const and "const" in Program.library
self.constrain_trig = constrain_trig
self.constrain_inv = constrain_inv
self.constrain_min_len = constrain_min_len
self.constrain_max_len = constrain_max_len
self.constrain_num_const = constrain_num_const
self.min_length = min_length
self.max_length = max_length
self.max_const = max_const
self.entropy_weight = entropy_weight
self.ppo = ppo
self.ppo_n_iters = ppo_n_iters
self.ppo_n_mb = ppo_n_mb
self.pqt = pqt
self.pqt_k = pqt_k
self.pqt_batch_size = pqt_batch_size
n_choices = Program.L
# Placeholders, computed after instantiating expressions
self.batch_size = tf.placeholder(dtype=tf.int32, shape=(), name="batch_size")
self.r = tf.placeholder(dtype=tf.float32, shape=(None,), name="r")
self.baseline = tf.placeholder(dtype=tf.float32, shape=(), name="baseline")
# Parameter assertions/warnings
assert observe_action + observe_parent + observe_sibling > 0, "Must include at least one observation."
assert max_length >= 3, "Must have max length at least 3."
if min_length is None:
assert not constrain_min_len, "Cannot constrain min length when min_length=None"
else:
assert min_length >= 1, "Must have min length at least 1."
assert max_length >= min_length, "Min length cannot exceed max length."
if not constrain_min_len:
print("Warning: min_length={} will not be respected because constrain_min_len=False. Overriding to None.".format(min_length))
self.min_length = None
if max_const is None:
assert not constrain_num_const, "Cannot constrain max num consts when max_const=None"
else:
assert max_const >= 1, "Must have max num const at least 1."
if Program.const_token is None:
print("Warning: max_const={} will have no effect because there is no constant token.".format(max_const))
self.constrain_num_const = False
self.max_const = None
elif not constrain_num_const:
print("Warning: max_const={} will not be repsected because constrain_num_const=False. Overriding to None.".format(max_const))
self.max_const = None
self.compute_parents_siblings = any([self.observe_parent,
self.observe_sibling,
self.constrain_const])
# Build controller RNN
with tf.name_scope("controller"):
def make_initializer(name):
if name == "zeros":
return tf.zeros_initializer()
if name == "var_scale":
return tf.contrib.layers.variance_scaling_initializer(
factor=0.5, mode='FAN_AVG', uniform=True)
raise ValueError("Did not recognize initializer '{}'".format(name))
def make_cell(name, num_units, initializer):
if name == 'lstm':
return tf.nn.rnn_cell.LSTMCell(num_units, initializer=initializer)
if name == 'gru':
return tf.nn.rnn_cell.GRUCell(num_units, kernel_initializer=initializer, bias_initializer=initializer)
raise ValueError("Did not recognize cell type '{}'".format(name))
# Create recurrent cell
if isinstance(num_units, int):
num_units = [num_units] * num_layers
initializer = make_initializer(initializer)
cell = tf.contrib.rnn.MultiRNNCell(
[make_cell(cell, n, initializer=initializer) for n in num_units])
cell = LinearWrapper(cell=cell, output_size=n_choices)
# Define input dimensions
n_action_inputs = n_choices + 1 # Library tokens + empty token
n_parent_inputs = n_choices + 1 - len(Program.terminal_tokens) # Parent sub-library tokens + empty token
n_sibling_inputs = n_choices + 1 # Library tokens + empty tokens
# Create embeddings
if embedding:
with tf.variable_scope("embeddings",
initializer=tf.random_uniform_initializer(minval=-1.0, maxval=1.0)):
if observe_action:
action_embeddings = tf.get_variable("action_embeddings", [n_action_inputs, embedding_size], trainable=True)
if observe_parent:
parent_embeddings = tf.get_variable("parent_embeddings", [n_parent_inputs, embedding_size], trainable=True)
if observe_sibling:
sibling_embeddings = tf.get_variable("sibling_embeddings", [n_sibling_inputs, embedding_size], trainable=True)
# First observation is all empty tokens
initial_obs = tuple()
for n in [n_action_inputs, n_parent_inputs, n_sibling_inputs]:
obs = tf.constant(n - 1, dtype=np.int32)
obs = tf.broadcast_to(obs, [self.batch_size])
initial_obs += (obs,)
# Define prior on logits; currently only used to apply hard constraints
arities = np.array([Program.arities[i] for i in range(n_choices)])
prior = np.zeros(n_choices, dtype=np.float32)
if self.min_length is not None and self.min_length > 1:
prior[arities == 0] = -np.inf
prior = tf.constant(prior, dtype=tf.float32)
prior_dims = tf.stack([self.batch_size, n_choices])
prior = tf.broadcast_to(prior, prior_dims)
initial_prior = prior
# Returns concatenated one-hot or embeddings from observation tokens
# Used for both raw_rnn and dynamic_rnn
def get_input(obs):
action, parent, sibling = obs
observations = []
if observe_action:
if embedding:
obs = tf.nn.embedding_lookup(action_embeddings, action)
else:
obs = tf.one_hot(action, depth=n_action_inputs)
observations.append(obs)
if observe_parent:
if embedding:
obs = tf.nn.embedding_lookup(parent_embeddings, parent)
else:
obs = tf.one_hot(parent, depth=n_parent_inputs)
observations.append(obs)
if observe_sibling:
if embedding:
obs = tf.nn.embedding_lookup(sibling_embeddings, sibling)
else:
obs = tf.one_hot(sibling, depth=n_sibling_inputs)
observations.append(obs)
input_ = tf.concat(observations, -1)
return input_
# Applies constraints
def get_action_parent_sibling_prior_dangling(actions, dangling):
n = actions.shape[0] # Batch size
i = actions.shape[1] - 1 # Current index
action = actions[:, -1] # Current action
prior = np.zeros((n, Program.L), dtype=np.float32)
# Depending on the constraints, may need to compute parents and siblings
if self.compute_parents_siblings:
parent, sibling = parents_siblings(actions, arities=Program.arities, parent_adjust=Program.parent_adjust)
else:
parent = np.zeros(n, dtype=np.int32)
sibling = np.zeros(n, dtype=np.int32)
# Update dangling with (arity - 1) for each element in action
dangling += Program.arities[action] - 1
# Constrain unary of constant or binary of two constants
if self.constrain_const:
# Use action instead of parent here because it's really adj_parent
constraints = np.isin(action, Program.unary_tokens) # Unary action (or unary parent)
constraints += sibling == Program.const_token # Constant sibling
prior += make_prior(constraints, [Program.const_token], Program.L)
# Constrain trig function with trig function ancestor
if self.constrain_trig:
constraints = trig_ancestors(actions, Program.arities, Program.trig_tokens)
prior += make_prior(constraints, Program.trig_tokens, Program.L)
# Constrain inverse unary operators
if self.constrain_inv:
for p, c in Program.inverse_tokens.items():
# No need to compute parents because only unary operators are constrained
# by their inverse, and action == parent for all unary operators
constraints = action == p
prior += make_prior(constraints, [c], Program.L)
# Constrain total number of constants
if self.constrain_num_const:
constraints = np.sum(actions == Program.const_token, axis=1) == self.max_const
prior += make_prior(constraints, [Program.const_token], Program.L)
# Constrain maximum sequence length
# Never need to constrain max length for first half of expression
if self.constrain_max_len and (i + 2) >= self.max_length // 2:
remaining = self.max_length - (i + 1)
assert sum(dangling > remaining) == 0, (dangling, remaining)
constraints = dangling >= remaining - 1 # Constrain binary
prior += make_prior(constraints, Program.binary_tokens, Program.L)
constraints = dangling == remaining # Constrain unary
prior += make_prior(constraints, Program.unary_tokens, Program.L)
# Constrain minimum sequence length
# Constrain terminals when dangling == 1 until selecting the (min_length)th token
if self.constrain_min_len and (i + 2) < self.min_length:
constraints = dangling == 1 # Constrain terminals
prior += make_prior(constraints, Program.terminal_tokens, Program.L)
return action, parent, sibling, prior, dangling
# Given the actions chosen so far, return the observation, the prior, and the updated dangling
# Uses py_func to retrieve action/parent/sibling/dangling
def get_next_obs_prior_dangling(actions_ta, dangling):
# Get current action batch
actions = tf.transpose(actions_ta.stack()) # Shape: (?, time)
# Compute parent, sibling, prior, and dangling
action, parent, sibling, prior, dangling = tf.py_func(func=get_action_parent_sibling_prior_dangling,
inp=[actions, dangling],
Tout=[tf.int32, tf.int32, tf.int32, tf.float32, tf.int32])
# Observe previous action, parent, and/or sibling
obs = (action, parent, sibling)
# Set the shapes for returned Tensors
action.set_shape([None])
parent.set_shape([None])
sibling.set_shape([None])
prior.set_shape([None, Program.L])
dangling.set_shape([None])
return obs, prior, dangling
# Define loop function to be used by tf.nn.raw_rnn.
initial_cell_input = get_input(initial_obs)
def loop_fn(time, cell_output, cell_state, loop_state):
if cell_output is None: # time == 0
finished = tf.zeros(shape=[self.batch_size], dtype=tf.bool)
obs = initial_obs
next_input = get_input(obs)
next_cell_state = cell.zero_state(batch_size=self.batch_size, dtype=tf.float32) # 2-tuple, each shape (?, num_units)
emit_output = None
actions_ta = tf.TensorArray(dtype=tf.int32, size=0, dynamic_size=True, clear_after_read=False) # Read twice
obs_tas = (tf.TensorArray(dtype=tf.int32, size=0, dynamic_size=True, clear_after_read=True), # Action inputs
tf.TensorArray(dtype=tf.int32, size=0, dynamic_size=True, clear_after_read=True), # Parent inputs
tf.TensorArray(dtype=tf.int32, size=0, dynamic_size=True, clear_after_read=True)) # Sibling inputs
priors_ta = tf.TensorArray(dtype=tf.float32, size=0, dynamic_size=True, clear_after_read=True)
prior = initial_prior
lengths = tf.ones(shape=[self.batch_size], dtype=tf.int32)
dangling = tf.ones(shape=[self.batch_size], dtype=tf.int32)
next_loop_state = (
actions_ta,
obs_tas,
priors_ta,
obs,
prior,
dangling,
lengths, # Unused until implementing variable length
finished)
else:
actions_ta, obs_tas, priors_ta, obs, prior, dangling, lengths, finished = loop_state
logits = cell_output + prior
next_cell_state = cell_state
emit_output = logits
action = tf.multinomial(logits=logits, num_samples=1, output_dtype=tf.int32)[:, 0]
# When implementing variable length:
# action = tf.where(
# tf.logical_not(finished),
# tf.multinomial(logits=logits, num_samples=1, output_dtype=tf.int32)[:, 0],
# tf.zeros(shape=[self.batch_size], dtype=tf.int32))
next_actions_ta = actions_ta.write(time - 1, action) # Write chosen actions
next_obs, next_prior, next_dangling = get_next_obs_prior_dangling(next_actions_ta, dangling)
next_input = get_input(next_obs)
next_obs_tas = ( # Write OLD observation
obs_tas[0].write(time - 1, obs[0]), # Action inputs
obs_tas[1].write(time - 1, obs[1]), # Parent inputs
obs_tas[2].write(time - 1, obs[2])) # Sibling inputs
next_priors_ta = priors_ta.write(time - 1, prior) # Write OLD prior
finished = next_finished = tf.logical_or(
finished,
time >= self.max_length)
# When implementing variable length:
# finished = next_finished = tf.logical_or(tf.logical_or(
# finished, # Already finished
# next_dangling == 0), # Currently, this will be 0 not just the first time, but also at max_length
# time >= self.max_length)
next_lengths = tf.where(
finished, # Ever finished
lengths,
tf.tile(tf.expand_dims(time + 1, 0), [self.batch_size]))
next_loop_state = (next_actions_ta,
next_obs_tas,
next_priors_ta,
next_obs,
next_prior,
next_dangling,
next_lengths,
next_finished)
return (finished, next_input, next_cell_state, emit_output, next_loop_state)
# Returns RNN emit outputs TensorArray (i.e. logits), final cell state, and final loop state
with tf.variable_scope('policy'):
_, _, loop_state = tf.nn.raw_rnn(cell=cell, loop_fn=loop_fn)
actions_ta, obs_tas, priors_ta, _, _, _, _, _ = loop_state
self.actions = tf.transpose(actions_ta.stack(), perm=[1, 0]) # (?, max_length)
self.obs = [tf.transpose(obs_ta.stack(), perm=[1, 0]) for obs_ta in obs_tas] # [(?, max_length)] * 3
self.priors = tf.transpose(priors_ta.stack(), perm=[1, 0, 2]) # (?, max_length, n_choices)
# Generates dictionary containing placeholders needed for a batch of sequences
def make_batch_ph(name):
with tf.name_scope(name):
dict_ = {
"actions" : tf.placeholder(tf.int32, [None, max_length]),
"obs" : (tf.placeholder(tf.int32, [None, max_length]),
tf.placeholder(tf.int32, [None, max_length]),
tf.placeholder(tf.int32, [None, max_length])),
"priors" : tf.placeholder(tf.float32, [None, max_length, n_choices]),
"lengths" : tf.placeholder(tf.int32, [None,]),
"masks" : tf.placeholder(tf.float32, [None, max_length])
}
return dict_
# Generates tensor for neglogp of a batch given actions, obs, priors, masks, and lengths
def make_neglogp(actions, obs, priors, masks, lengths):
with tf.variable_scope('policy', reuse=True):
logits, _ = tf.nn.dynamic_rnn(cell=cell,
inputs=get_input(obs),
sequence_length=lengths,
dtype=tf.float32)
logits += priors
# Negative log probabilities of sequences
neglogp_per_step = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits,
labels=actions)
neglogp = tf.reduce_sum(neglogp_per_step * masks, axis=1) # Sum over time
# NOTE: The above implementation is the same as the one below, with a few caveats:
# Exactly equivalent when removing priors.
# Equivalent up to precision when including clipped prior.
# Crashes when prior is not clipped due to multiplying zero by -inf.
# actions_one_hot = tf.one_hot(self.actions_ph, depth=n_choices, axis=-1, dtype=tf.float32)
# neglogp_per_step = -tf.nn.log_softmax(logits + tf.clip_by_value(self.priors_ph, -2.4e38, 0)) * actions_one_hot
# neglogp_per_step = tf.reduce_sum(neglogp_per_step, axis=2)
# neglogp = self.neglogp = tf.reduce_sum(neglogp_per_step * self.mask_ph, axis=1) # Sum over time
return neglogp, neglogp_per_step
# On policy batch (used for REINFORCE/PPO)
self.sampled_batch = make_batch_ph("sampled_batch")
# Off policy batch (used for PQT)
if pqt:
self.off_policy_batch = make_batch_ph("off_policy_batch")
# Setup losses
with tf.name_scope("losses"):
neglogp, neglogp_per_step = make_neglogp(**self.sampled_batch)
# Entropy loss
# Entropy = neglogp * p = neglogp * exp(-neglogp)
entropy_per_step = neglogp_per_step * tf.exp(-neglogp_per_step)
entropy = tf.reduce_sum(entropy_per_step * self.sampled_batch["masks"], axis=1) # Sum over time
entropy_loss = -self.entropy_weight * tf.reduce_mean(entropy, name="entropy_loss")
loss = entropy_loss
# PPO loss
if ppo:
assert not pqt, "PPO is not compatible with PQT"
self.old_neglogp_ph = tf.placeholder(dtype=tf.float32, shape=(None,), name="old_neglogp")
ratio = tf.exp(self.old_neglogp_ph - neglogp)
clipped_ratio = tf.clip_by_value(ratio, 1. - ppo_clip_ratio, 1. + ppo_clip_ratio)
ppo_loss = -tf.reduce_mean(tf.minimum(ratio * (self.r - self.baseline), clipped_ratio * (self.r - self.baseline)))
loss += ppo_loss
# Define PPO diagnostics
clipped = tf.logical_or(ratio < (1. - ppo_clip_ratio), ratio > 1. + ppo_clip_ratio)
self.clip_fraction = tf.reduce_mean(tf.cast(clipped, tf.float32))
self.sample_kl = tf.reduce_mean(neglogp - self.old_neglogp_ph)
# Policy gradient loss
else:
if not pqt or (pqt and pqt_use_pg):
pg_loss = tf.reduce_mean((self.r - self.baseline) * neglogp, name="pg_loss")
loss += pg_loss
# Priority queue training loss
if pqt:
pqt_neglogp, _ = make_neglogp(**self.off_policy_batch)
pqt_loss = pqt_weight * tf.reduce_mean(pqt_neglogp, name="pqt_loss")
loss += pqt_loss
self.loss = loss
# Create summaries
with tf.name_scope("summary"):
if self.summary:
if ppo:
tf.summary.scalar("ppo_loss", ppo_loss)
else:
if not pqt or (pqt and pqt_use_pg):
tf.summary.scalar("pg_loss", pg_loss)
if pqt:
tf.summary.scalar("pqt_loss", pqt_loss)
tf.summary.scalar("entropy_loss", entropy_loss)
tf.summary.scalar("total_loss", self.loss)
tf.summary.scalar("reward", tf.reduce_mean(self.r))
tf.summary.scalar("baseline", self.baseline)
tf.summary.histogram("reward", self.r)
tf.summary.histogram("length", tf.reduce_sum(self.sampled_batch["masks"], axis=0))
self.summaries = tf.summary.merge_all()
def make_optimizer(name, learning_rate):
if name == "adam":
return tf.train.AdamOptimizer(learning_rate=learning_rate)
if name == "rmsprop":
return tf.train.RMSPropOptimizer(learning_rate=learning_rate, decay=0.99)
if name == "sgd":
return tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
raise ValueError("Did not recognize optimizer '{}'".format(name))
# Create training op
optimizer = make_optimizer(name=optimizer, learning_rate=learning_rate)
with tf.name_scope("train"):
self.train_op = optimizer.minimize(self.loss)
if debug >= 1:
total_parameters = 0
print("")
for variable in tf.trainable_variables():
shape = variable.get_shape()
n_parameters = np.product(shape)
total_parameters += n_parameters
print("Variable: ", variable.name)
print(" Shape: ", shape)
print(" Parameters:", n_parameters)
print("Total parameters:", total_parameters)
def sample(self, n):
"""Sample batch of n expressions"""
feed_dict = {self.batch_size : n}
actions, obs, priors = self.sess.run([self.actions, self.obs, self.priors], feed_dict=feed_dict)
return actions, obs, priors
def train_step(self, r, b, actions, obs, priors, mask, priority_queue):
"""Computes loss, trains model, and returns summaries."""
feed_dict = {self.r : r,
self.baseline : b,
self.sampled_batch["actions"] : actions,
self.sampled_batch["obs"] : obs,
self.sampled_batch["lengths"] : np.full(shape=(actions.shape[0]), fill_value=self.max_length, dtype=np.int32),
self.sampled_batch["priors"] : priors,
self.sampled_batch["masks"] : mask}
if self.pqt:
# Sample from the priority queue
dicts = [extra_data for (item, extra_data) in priority_queue.random_sample(self.pqt_batch_size)]
pqt_actions = np.stack([d["actions"] for d in dicts], axis=0)
pqt_obs = tuple([np.stack([d["obs"][i] for d in dicts], axis=0) for i in range(3)])
pqt_priors = np.stack([d["priors"] for d in dicts], axis=0)
pqt_masks = np.stack([d["masks"] for d in dicts], axis=0)
# Update the feed_dict
feed_dict.update({
self.off_policy_batch["actions"] : pqt_actions,
self.off_policy_batch["obs"] : pqt_obs,
self.off_policy_batch["lengths"] : np.full(shape=(pqt_actions.shape[0]), fill_value=self.max_length, dtype=np.int32),
self.off_policy_batch["priors"] : pqt_priors,
self.off_policy_batch["masks"] : pqt_masks
})
if self.ppo:
# Compute old_neglogp to be used for training
old_neglogp = self.sess.run(self.neglogp, feed_dict=feed_dict)
# Perform multiple epochs of minibatch training
feed_dict[self.old_neglogp_ph] = old_neglogp
indices = np.arange(len(r))
for epoch in range(self.ppo_n_iters):
self.rng.shuffle(indices)
minibatches = np.array_split(indices, self.ppo_n_mb)
for i, mb in enumerate(minibatches):
mb_feed_dict = {k : v[mb] for k, v in feed_dict.items() if k not in [self.baseline, self.batch_size, self.sampled_batch["masks"]]}
mb_feed_dict.update({
self.baseline : b,
self.sampled_batch["masks"] : mask[mb, :],
self.batch_size : len(mb)
})
_ = self.sess.run([self.train_op], feed_dict=mb_feed_dict)
# Diagnostics
# kl, cf, _ = self.sess.run([self.sample_kl, self.clip_fraction, self.train_op], feed_dict=mb_feed_dict)
# print("epoch", epoch, "i", i, "KL", kl, "CF", cf)
else:
_ = self.sess.run([self.train_op], feed_dict=feed_dict)
# Return summaries
if self.summary:
summaries = self.sess.run(self.summaries, feed_dict=feed_dict)
else:
summaries = None
return summaries
def make_prior(constraints, constraint_tokens, library_length):
"""
Given a batch of constraints and the corresponding tokens to be constrained,
returns a prior that is added to the logits when sampling the next action.
For example, given library_length=5 and constraint_tokens=[1,2], a
constrained row of the prior will be: [0.0, -np.inf, -np.inf, 0.0, 0.0].
Parameters
__________
constraints : np.ndarray, shape=(batch_size,), dtype=np.bool_
Batch of constraints.
constraint_tokens : np.ndarray, dtype=np.int32
Array of which tokens to constrain.
library_length : int
Length of library.
Returns
_______
prior : np.ndarray, shape=(batch_size, library_length), dtype=np.float32
Prior adjustment to logits given constraints. Since these are hard
constraints, ach element is either 0.0 or -np.inf.
"""
prior = np.zeros((constraints.shape[0], library_length), dtype=np.float32)
for t in constraint_tokens:
prior[constraints == True, t] = -np.inf
return prior
@jit(nopython=True, parallel=True)
def trig_ancestors(tokens, arities, trig_tokens):
"""
Given a batch of action sequences, determines whether the next element of
the sequence has an ancestor that is a trigonometric function.
The batch has shape (N, L), where N is the number of sequences (i.e. batch
size) and L is the length of each sequence. In some cases, expressions may
already be complete; in these cases, this function sees the start of a new
expression, even though the return value for these elements won't matter
because they will be masked in loss calculations.
Parameters
__________
tokens : np.ndarray, shape=(N, L), dtype=np.int32
Batch of action sequences. Values correspond to library indices.
arities : np.ndarray, dtype=np.int32
Array of arities corresponding to library indices.
trig_tokens : np.ndarray, dtype=np.int32
Array of tokens corresponding to trig functions.
Returns
_______
ancestors : np.ndarray, shape=(N,), dtype=np.bool_
Whether the next element of each sequence has a trig function ancestor.
"""
N, L = tokens.shape
ancestors = np.zeros(shape=(N,), dtype=np.bool_)
# Parallelized loop over action sequences
for r in prange(N):
dangling = 0
threshold = None # If None, current branch does not have trig ancestor
for c in range(L):
arity = arities[tokens[r, c]]
dangling += arity - 1
# Turn "on" if a trig function is found
# Remain "on" until branch completes
if threshold is None:
for trig_token in trig_tokens:
if tokens[r, c] == trig_token:
threshold = dangling - 1
break
# Turn "off" once the branch completes
else:
if dangling == threshold:
threshold = None
# If the sequences ended "on", then there is a trig ancestor
if threshold is not None:
ancestors[r] = True
return ancestors
@jit(nopython=True, parallel=True)
def parents_siblings(tokens, arities, parent_adjust):
"""
Given a batch of action sequences, computes and returns the parents and
siblings of the next element of the sequence.
The batch has shape (N, L), where N is the number of sequences (i.e. batch
size) and L is the length of each sequence. In some cases, expressions may
already be complete; in these cases, this function sees the start of a new
expression, even though the return value for these elements won't matter
because they will be masked in loss calculations.
Parameters
__________
tokens : np.ndarray, shape=(N, L), dtype=np.int32
Batch of action sequences. Values correspond to library indices.
arities : np.ndarray, dtype=np.int32
Array of arities corresponding to library indices.
parent_adjust : np.ndarray, dtype=np.int32
Array of parent sub-library index corresponding to library indices.
Returns
_______
adj_parents : np.ndarray, shape=(N,), dtype=np.int32
Adjusted parents of the next element of each action sequence.
siblings : np.ndarray, shape=(N,), dtype=np.int32
Siblings of the next element of each action sequence.
"""
N, L = tokens.shape
empty_parent = np.max(parent_adjust) + 1 # Empty token is after all non-empty tokens
empty_sibling = len(arities) # Empty token is after all non-empty tokens
adj_parents = np.full(shape=(N,), fill_value=empty_parent, dtype=np.int32)
siblings = np.full(shape=(N,), fill_value=empty_sibling, dtype=np.int32)
# Parallelized loop over action sequences
for r in prange(N):
arity = arities[tokens[r, -1]]
if arity > 0: # Parent is the previous element; no sibling
adj_parents[r] = parent_adjust[tokens[r, -1]]
continue
dangling = 0
# Loop over elements in an action sequence
for c in range(L):
arity = arities[tokens[r, L - c - 1]]
dangling += arity - 1
if dangling == 0: # Parent is L-c-1, sibling is the next
adj_parents[r] = parent_adjust[tokens[r, L - c - 1]]
siblings[r] = tokens[r, L - c]
break
return adj_parents, siblings
| [
"petersen33@llnl.gov"
] | petersen33@llnl.gov |
a2d189784bb2a282ec8d7cdf005a0c8612dceb9b | bd08d0532f20b7285b437c9bf620de1bbcd5b9ea | /aalh_iit_buildings_006/populate-iso8601-amerdate.py | 08c1fdd9ca6bdcee638e2292f3d12d555f36c6ff | [
"Unlicense"
] | permissive | johndewees/iitmigration | a9e8a31ba6ceb541ce12c22fd612596cc243dbca | 4dadfbecda719d6e7d60af076a231aedec3c862f | refs/heads/main | 2023-03-14T17:06:58.777683 | 2021-03-27T20:44:58 | 2021-03-27T20:44:58 | 320,086,321 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,719 | py | from openpyxl import load_workbook
import re
filename = 'aalh_iit_buildings_006.xlsx'
wb = load_workbook(filename)
ws = wb['Metadata Template']
minimumcol = 15
maximumcol = 15
minimumrow = 7
maximumrow = 515
iterationrow = 7
targetcol = 15
isostandardcol = 16
for row in ws.iter_rows(min_row=minimumrow, min_col=minimumcol, max_row=maximumrow, max_col=maximumcol):
print(iterationrow)
print(ws.cell(row=iterationrow, column=targetcol).value)
try:
for cell in row:
ameryear = None
yearraw = ws.cell(row=iterationrow, column=targetcol).value
if yearraw.find(',') != -1:
ameryearre = re.findall('\d\d\d\d', yearraw)
ameryear = ameryearre[0]
print(ameryear)
else:
print('Not an American formatted date (year)')
for cell in row:
amermon = None
monraw = ws.cell(row=iterationrow, column=targetcol).value
if monraw.find(',') != -1:
if monraw.find('Jan') != -1:
amermon = '01'
elif monraw.find('jan') != -1:
amermon = '01'
elif monraw.find('Feb') != -1:
amermon = '02'
elif monraw.find('feb') != -1:
amermon = '02'
elif monraw.find('Mar') != -1:
amermon = '03'
elif monraw.find('mar') != -1:
amermon = '03'
elif monraw.find('Apr') != -1:
amermon = '04'
elif monraw.find('apr') != -1:
amermon = '04'
elif monraw.find('May') != -1:
amermon = '05'
elif monraw.find('may') != -1:
amermon = '05'
elif monraw.find('Jun') != -1:
amermon = '06'
elif monraw.find('jun') != -1:
amermon = '06'
elif monraw.find('Jul') != -1:
amermon = '07'
elif monraw.find('jul') != -1:
amermon = '07'
elif monraw.find('Aug') != -1:
amermon = '08'
elif monraw.find('aug') != -1:
amermon = '08'
elif monraw.find('Sep') != -1:
amermon = '09'
elif monraw.find('sep') != -1:
amermon = '09'
elif monraw.find('Oct') != -1:
amermon = '10'
elif monraw.find('oct') != -1:
amermon = '10'
elif monraw.find('Nov') != -1:
amermon = '11'
elif monraw.find('nov') != -1:
amermon = '11'
elif monraw.find('Dec') != -1:
amermon = '12'
elif monraw.find('dec') != -1:
amermon = '12'
print(amermon)
else:
print('Not an American formatted date (month)')
for cell in row:
amerday = None
dayraw = ws.cell(row=iterationrow, column=targetcol).value
if dayraw.find(',') != -1:
daypart1 = dayraw.split(',')
daypart2 = daypart1[0]
daypart3 = daypart2.split()
daypart4 = daypart3[1]
if daypart4.startswith('1'):
amerday = daypart4
elif daypart4.startswith('2'):
amerday = daypart4
elif daypart4.startswith('3'):
amerday = daypart4
else:
amerday = '0' + daypart4
print(amerday)
else:
print('Not an American formatted date (day)')
for cell in row:
testvar = ws.cell(row=iterationrow, column=targetcol).value
if testvar.find('/') != -1:
testvarlist = testvar.split('/')
testvaryear = testvarlist[2]
testvaryear = testvaryear.strip()
testvarmonth = testvarlist[0]
testvarmonth = testvarmonth.strip()
testvarmonth = int(testvarmonth)
if testvarmonth < 10:
testvarmonth = str(testvarmonth)
testvarmonth = '0' + testvarmonth
else:
testvarmonth = str(testvarmonth)
testvarday = testvarlist[1]
testvarday = testvarday.strip()
testvarday = int(testvarday)
if testvarday < 10:
testvarday = str(testvarday)
testvarday = '0' + testvarday
else:
testvarday = str(testvarday)
isodate = testvaryear + '-' + testvarmonth + '-' + testvarday
ws.cell(row=iterationrow, column=targetcol).value = isodate
#print(isodate)
else:
print ('Not a date formatted with a slash')
for cell in row:
if ameryear == None:
print('Not an American formatted date at all')
else:
amerdatetrans = ameryear + '-' + amermon + '-' + amerday
ws.cell(row=iterationrow, column=isostandardcol).value = amerdatetrans
print(amerdatetrans)
except:
print('Not an American formatted date at all')
iterationrow = iterationrow + 1
wb.save('aalh_iit_buildings_006.xlsx') | [
"noreply@github.com"
] | johndewees.noreply@github.com |
749cac2dd489ba3c7348b649105879df3dd38fef | f04d9267e97612c591939b7e50c43439f7862f6a | /bin/zj.py | 596f2faac4b53bcef4c66743f9a30e7e95e5e1f8 | [] | no_license | xzap/config | d9dfd69cfc2a8f39d00c409f5da26e71f4bb20f7 | 4a44f4851b3ffdd9ccede05cafc3f48fb8c33904 | refs/heads/master | 2016-09-06T16:46:41.224907 | 2015-06-01T00:33:18 | 2015-06-01T00:33:18 | 220,711 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,897 | py | #!/bin/env python3
import urllib.request
import urllib.parse
import os,re
from http import cookiejar
import sys
url = 'http://jcpt.zjer.cn/base/loginUnified.jspx?type=2&callBack=www.zjer.cn:80/login_unify.jspx&productTicket=10&returnUrl=http://www.zjer.cn/channel/uhome/index.jhtml?'
cj = cookiejar.CookieJar()
opener = urllib.request.build_opener()
opener.add_handler(urllib.request.HTTPCookieProcessor(cj))
urllib.request.install_opener(opener)
params={"username":"jgp280017",
"password":"280017",
"flag":1}
params = urllib.parse.urlencode(params)
req = urllib.request.Request(url,data=params.encode("utf-8"))
# print (cj)
cc = urllib.request.urlopen(req).read().decode("utf-8")
urlmy = "http://jskj.zjer.cn/my.do?method=main"
teachers = [
("毛丽云","mly115321","http://jskj.zjer.cn/user.do?method=main&remoteUserId=11458"),
("钱凯红","qkh12441x","http://jskj.zjer.cn/user.do?method=main&remoteUserId=18271"),
("沃珊珊","wss180089","http://jskj.zjer.cn/user.do?method=main&remoteUserId=19506"),
("沈森","ss100518","http://jskj.zjer.cn/user.do?method=main&remoteUserId=19676"),
("王彩华","wch195721","http://jskj.zjer.cn/user.do?method=main&remoteUserId=20533"),
("毛丽娟","mlj190066","http://jskj.zjer.cn/user.do?method=main&remoteUserId=26396"),
("陆羽操","lyc210311","http://jskj.zjer.cn/user.do?method=main&remoteUserId=28233"),
("沈英","sy11302x","http://jskj.zjer.cn/user.do?method=main&remoteUserId=28670"),
("纪晓玲","jxl060029","http://jskj.zjer.cn/user.do?method=main&remoteUserId=29390"),
("高爱琪","gaoaq","http://jskj.zjer.cn/user.do?method=main&remoteUserId=30140"),
("王厦","wx165022","http://jskj.zjer.cn/user.do?method=main&remoteUserId=31907"),
("孟兰兰","mll315024","http://jskj.zjer.cn/user.do?method=main&remoteUserId=31951"),
("钱雅琼","qyq183229","http://jskj.zjer.cn/user.do?method=main&remoteUserId=31967"),
("秦强华","qqh193825","http://jskj.zjer.cn/user.do?method=main&remoteUserId=32904"),
("蒋萍","jp050047","http://jskj.zjer.cn/user.do?method=main&remoteUserId=32908"),
("袁铭英","ymy184425","http://jskj.zjer.cn/user.do?method=main&remoteUserId=32962"),
("金其琴","jqq242026","http://jskj.zjer.cn/user.do?method=main&remoteUserId=32969"),
("范炯","fj25002X","http://jskj.zjer.cn/user.do?method=main&remoteUserId=32991"),
("蔡初明","ccm180069","http://jskj.zjer.cn/user.do?method=main&remoteUserId=32993"),
("徐学东","xxd204431","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33003"),
("马微","mw244049","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33016"),
("汪健","430782","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33019"),
("潘芝芳","pzf262529","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33028"),
("周鼎","zd040014","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33045"),
("顾渭梅","gwm152842","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33048"),
("朱伟","zw150054","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33136"),
("浦瑞芳","prf270023","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33142"),
("许黎明","xlm041229","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33144"),
("姚碧红","ybh290083","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33233"),
("陈玉珠","cyz024425","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33261"),
("陆永兴","234567","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33359"),
("胡青秀","hqx125927","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33399"),
("朱军","zj180044","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33423"),
("罗文献","lwx161328","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33432"),
("沈娅","sy210046a","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33440"),
("金建新","jjx025926","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33443"),
("王群芳","wqf250023","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33571"),
("董萍","dp055522","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33652"),
("朱颖红","zyh284121","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33662"),
("陈红","ch302045","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33665"),
("吴春燕","lansedehai","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33677"),
("缪晓菊","miaomiao123","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33680"),
("沈文燕","swy174426","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33688"),
("蔡明孝","cmx225716","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33825"),
("金加岭","jjl190022","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33846"),
("沈娟","sj160049","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33892"),
("许建忠","xjz240015","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33904"),
("王怡","wy291027","http://jskj.zjer.cn/user.do?method=main&remoteUserId=34051"),
("于海珠","yhz243826","http://jskj.zjer.cn/user.do?method=main&remoteUserId=34054"),
("许春俭","xcj222326","http://jskj.zjer.cn/user.do?method=main&remoteUserId=34145"),
("张丽敏","zlm160826","http://jskj.zjer.cn/user.do?method=main&remoteUserId=34355"),
("赵袁兰","zyl125026","http://jskj.zjer.cn/user.do?method=main&remoteUserId=34554"),
("薛九红","xjh170021","http://jskj.zjer.cn/user.do?method=main&remoteUserId=34687"),
("章梅","zm270029","http://jskj.zjer.cn/user.do?method=main&remoteUserId=34720"),
("金宇超","jqm","http://jskj.zjer.cn/user.do?method=main&remoteUserId=34888"),
("张小燕","zxy034421","http://jskj.zjer.cn/user.do?method=main&remoteUserId=34896"),
("顾一冬","gyd110824","http://jskj.zjer.cn/user.do?method=main&remoteUserId=35257"),
("刘善萍","lsp060028","http://jskj.zjer.cn/user.do?method=main&remoteUserId=35310"),
("曹溢慧","cyh250043","http://jskj.zjer.cn/user.do?method=main&remoteUserId=35317"),
("薛卫红","xwh280065","http://jskj.zjer.cn/user.do?method=main&remoteUserId=35492"),
("李洁","lj310025","http://jskj.zjer.cn/user.do?method=main&remoteUserId=36125"),
("沈斌","sb230029","http://jskj.zjer.cn/user.do?method=main&remoteUserId=36221"),
("俞丹","yd010047","http://jskj.zjer.cn/user.do?method=main&remoteUserId=44040"),
("芮燕飞","ryf010060","http://jskj.zjer.cn/user.do?method=main&remoteUserId=44073"),
("顾琴","gq31182x","http://jskj.zjer.cn/user.do?method=main&remoteUserId=44078"),
("张海珍","zhz290049","http://jskj.zjer.cn/user.do?method=main&remoteUserId=45142"),
("魏萍","wp022041","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49347"),
("丁建芳","djf172523","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49446"),
("鲁苑波","luyuanbo","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49449"),
("於善芳","fish595","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49513"),
("雷雪锋","lei","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49514"),
("徐月强","xyq714053","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49516"),
("杨丽红","ylh081087","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49517"),
("芮琼","rq240048","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49554"),
("李吉","liji1956","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49602"),
("沈叶波","syb19001X","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49610"),
("吴玲华","wlh263521","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49614"),
("蔡明孝","68235338","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49630"),
("陈小英","cxy625282","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49632"),
("曹瑞月","cry110068","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49636"),
("吴学先","wxx310029","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49676"),
("陈国红","cgh122323","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49797"),
("倪芳","nf070328","http://jskj.zjer.cn/user.do?method=main&remoteUserId=51800"),
("倪懂平","ndp243828","http://jskj.zjer.cn/user.do?method=main&remoteUserId=88475"),
("陆文权","lwq252812","http://jskj.zjer.cn/user.do?method=main&remoteUserId=88642"),
("沈晓英","djxxsxy","http://jskj.zjer.cn/user.do?method=main&remoteUserId=88647"),
("汤顺强","djxxtsq","http://jskj.zjer.cn/user.do?method=main&remoteUserId=88650"),
("王梦婕","wmj070029","http://jskj.zjer.cn/user.do?method=main&remoteUserId=88652"),
("徐斌","djxxxb","http://jskj.zjer.cn/user.do?method=main&remoteUserId=88654"),
("周鸣章","djxxzmz","http://jskj.zjer.cn/user.do?method=main&remoteUserId=88656"),
("张忠","zz120512","http://jskj.zjer.cn/user.do?method=main&remoteUserId=89056"),
("刘叶梅","lym08414X","http://jskj.zjer.cn/user.do?method=main&remoteUserId=89224"),
("嘉善县杜鹃小学","jsdjxx","http://jskj.zjer.cn/user.do?method=main&remoteUserId=209744"),
("鲁苑波","lyb11002X","http://jskj.zjer.cn/user.do?method=main&remoteUserId=247641"),
("计国平","jgp280017","http://jskj.zjer.cn/user.do?method=main&remoteUserId=254633"),
("蒋秀青","蒋秀青","http://jskj.zjer.cn/user.do?method=main&remoteUserId=300443"),
("吴春燕","wcy160026","http://jskj.zjer.cn/user.do?method=main&remoteUserId=321610"),
("雷雪锋","lxf165718","http://jskj.zjer.cn/user.do?method=main&remoteUserId=321978"),
("金宇超","jyc300041","http://jskj.zjer.cn/user.do?method=main&remoteUserId=324404"),
("金婷立","jtl180520","http://jskj.zjer.cn/user.do?method=main&remoteUserId=333649"),
("缪晓菊","mxj11202x","http://jskj.zjer.cn/user.do?method=main&remoteUserId=347029"),
("汤顺强","tsq250047","http://jskj.zjer.cn/user.do?method=main&remoteUserId=351619"),
("房晨杨","fcy250527","http://jskj.zjer.cn/user.do?method=main&remoteUserId=699674"),
("黄鑫权","hxq190519","http://jskj.zjer.cn/user.do?method=main&remoteUserId=700334"),
("钱立丰","qlf064711","http://jskj.zjer.cn/user.do?method=main&remoteUserId=700335"),
("王涛","wt223217","http://jskj.zjer.cn/user.do?method=main&remoteUserId=728857"),
("鲁婷婷","ltt05302X","http://jskj.zjer.cn/user.do?method=main&remoteUserId=728859"),
("顾艳","gy12302X","http://jskj.zjer.cn/user.do?method=main&remoteUserId=728860"),
("金涛","jt14501X","http://jskj.zjer.cn/user.do?method=main&remoteUserId=728861"),
("黄维","hw182826","http://jskj.zjer.cn/user.do?method=main&remoteUserId=728862"),
("顾林红","glh170827","http://jskj.zjer.cn/user.do?method=main&remoteUserId=1085242"),
("吴叶","wy200029","http://jskj.zjer.cn/user.do?method=main&remoteUserId=1086602"),
]
# teachers =[
# ("毛丽娟","mlj190066","http://jskj.zjer.cn/user.do?method=main&remoteUserId=26396"),
# ("金加岭","jjl190022","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33846"),
# ("沈娟","sj160049","http://jskj.zjer.cn/user.do?method=main&remoteUserId=33892"),
# ("刘善萍","lsp060028","http://jskj.zjer.cn/user.do?method=main&remoteUserId=35310"),
# ("魏萍","wp022041","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49347"),
# ("曹瑞月","cry110068","http://jskj.zjer.cn/user.do?method=main&remoteUserId=49636"),
# ("王梦婕","wmj070029","http://jskj.zjer.cn/user.do?method=main&remoteUserId=88652"),
# ("金婷立","jtl180520","http://jskj.zjer.cn/user.do?method=main&remoteUserId=333649"),
# ("顾林红","glh170827","http://jskj.zjer.cn/user.do?method=main&remoteUserId=1085242"),
# ]
ok = 0
fail = 0
num = 0
no =[]
for i in teachers :
num+=1
print (num,ok,fail,i[0],end="\t")
cc = urllib.request.urlopen(i[2]).read().decode()
if '空间主人尚未开通个人空间' in cc :
print ("fail!")
fail+=1
no.append(i)
else :
ok+=1
print ('done!')
print ("="*100)
rusult = "一共有老师%s人,开通老师%s人,未开通老师%s人.名单如下:" % (num,ok,fail)
print (rusult)
for i in no :
print ("%s\t%s\t%s" %(i[0],i[1],i[2]))
# 吴春燕 lansedehai http://jskj.zjer.cn/user.do?method=main&remoteUserId=33677
# 吴春燕 wcy160026 http://jskj.zjer.cn/user.do?method=main&remoteUserId=321610
# 汤顺强 djxxtsq http://jskj.zjer.cn/user.do?method=main&remoteUserId=88650
# 汤顺强 tsq250047 http://jskj.zjer.cn/user.do?method=main&remoteUserId=351619
# 缪晓菊 miaomiao123 http://jskj.zjer.cn/user.do?method=main&remoteUserId=33680
# 缪晓菊 mxj11202x http://jskj.zjer.cn/user.do?method=main&remoteUserId=347029
# 蔡明孝 68235338 http://jskj.zjer.cn/user.do?method=main&remoteUserId=49630
# 蔡明孝 cmx225716 http://jskj.zjer.cn/user.do?method=main&remoteUserId=33825
# 金宇超 jqm http://jskj.zjer.cn/user.do?method=main&remoteUserId=34888
# 金宇超 jyc300041 http://jskj.zjer.cn/user.do?method=main&remoteUserId=324404
# 雷雪锋 lei http://jskj.zjer.cn/user.do?method=main&remoteUserId=49514
# 雷雪锋 lxf165718 http://jskj.zjer.cn/user.do?method=main&remoteUserId=321978
# 鲁苑波 luyuanbo http://jskj.zjer.cn/user.do?method=main&remoteUserId=49449
# 鲁苑波 lyb11002X http://jskj.zjer.cn/user.do?method=main&remoteUserId=247641
| [
"xzap@163.com"
] | xzap@163.com |
707ceed62394e8d6b1cfb912ad56f1ee82179d14 | 6c5fac6cbe16d044fd7cef6b8efdfa3a60fbaa30 | /watchlist/migrations/0001_initial.py | 20f06b74512b5d5eb17b28b4c643a810c96c46d1 | [] | no_license | sumeetmathpati/django-moviedb | 580daf0f7c08b83ec857d47fa29fe59c25d1d204 | 4bbc380ba0f56d65e5ba7a9140a8622cd60908c3 | refs/heads/master | 2023-09-04T06:45:40.351783 | 2021-11-02T03:13:46 | 2021-11-02T03:13:46 | 423,691,755 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 649 | py | # Generated by Django 3.2.9 on 2021-11-01 16:32
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Movie',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=63)),
('description', models.CharField(blank=True, max_length=255, null=True)),
('rating', models.IntegerField(blank=True, null=True)),
],
),
]
| [
"sumeet221b@gmail.com"
] | sumeet221b@gmail.com |
647302f01a898d5f98da1f00057803163ef6d0a6 | 75bdfdd400637f920987b52d03b9d3bac4628178 | /env3/bin/easy_install | 35706a2671299794ca19f50e3e36d7ae6426ceff | [] | no_license | dhanrajnambiar/leaderboard | 88cab01d9d8837a6f00483c4ce8a5b4ecbce2b86 | e72896471cf6ca96b9a2c8e924478421e80ed585 | refs/heads/master | 2021-09-01T13:48:32.594984 | 2017-12-27T08:35:01 | 2017-12-27T08:35:01 | 114,249,421 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | #!/home/dhan/PYTHON/django_leaderboard/env3/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"dhanrajp3531@gmail.com"
] | dhanrajp3531@gmail.com | |
4da21962949cdb0845498afbc0e1c2c503787e34 | 4b1440da569df9fe28b0fb7b1f7c41307415786a | /aulaszeus/public/models.py | 68c517ce9bac57967cee509da21c221191d27aa8 | [] | no_license | zerossB/aulas_zeus | 2618568b6b970763c4d408593261caf16d224e74 | 002fc29578aed3aa36a9b400882e638b68c123ea | refs/heads/master | 2021-01-17T06:49:17.629595 | 2016-07-29T22:23:52 | 2016-07-29T22:23:52 | 64,513,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | # -*- coding: utf-8 -*-
"""
aulaszeus.public.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
aulaszeus public models module
:copyright: (c) 2016 by Haynesss.
:license: BSD, see LICENSE for more details.
""" | [
"zerossb@gmail.com"
] | zerossb@gmail.com |
1dc54f0daec274a019407bef2af50c7e4dc8cf20 | 3821202cc9d747a7a2d2878c13b1337f6b477a7d | /sudoku.py | 3944e162ad0cfbc4f7336e3977a8d5f38e8d1ac3 | [] | no_license | HaukurP/Sudoku | 48b11aaa5fb880e9c59327369b5eec6f9050f77f | 53c44495d8e68ceb49a42f35603d3e673f8236e3 | refs/heads/master | 2021-01-23T00:20:12.195231 | 2012-08-06T15:43:27 | 2012-08-06T15:43:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,052 | py | import sys, copy
class Board:
"""creates a new board with all possibilities open"""
# A Board has numeric values infront of a True or False value
# A True value means that the numeric value infront is a solution
# A False value means that the numeric value(s) are possible solutions
# The data is set up, looking at sudoku as:
# 0 1 2
# | 0 1 2 | 0 1 2 | ....
# | 3 4 5 | .....
# | 6 7 8 | ....
# ______________________
# | 3 | 4 .
# | 0 1 2 | .... :
def __init__(self):
self.data = [None]*9
for i in range(9):
self.data[i]=[None]*9
for j in range(9):
self.data[i][j] = [1,2,3,4,5,6,7,8,9,False]
def print_board(self):
"""prints out the board, badly at start"""
# Use: board.print_board()
# Before: board is a Board
# After: board has been print out
print("_______________________________________")
for m in range(3):
for k in range(3):
for i in range(3):
for j in range(3):
length = len(self.data[i+m*3][j+3*k])-1
print(self.data[i+3*m][j+3*k][:length],end="")
print(end="---")
print()
print("_______________________________________")
def import_data(self,bdata):
"""imports data from a list to a Board"""
# Use: board.import_data(bdata)
# Before: board is a Board, bdata is a list that looks like a Board
# After: bdata is now a Board
self.data = copy.deepcopy(bdata)
def add_final(self,number,i,j):
"""adds a number to a square, as an answer"""
# Use: board.add_final(number,box,square)
# Before: board is a Board, number: 1-9, box: 0-8, square: 0-8
# After: number has been added as an answer to the board
self.data[i][j] = [number, False]
self.make_true()
def add_numbers(self,numbers,i,j):
"""adds a number to a square and checks if it's an answer"""
#bæta við seinna?
def check_horizontal(self):
"""checks what possibilities are not ok, horizontally"""
# Use: board.check_horizontal()
# Before: board is a Board
# After: if there is a solution in a line that number has been removed as an option through the line
removed = False #removed is a bad solution for checking if something was removed from the board
for i in range(9):
for j in range(9):
if self.data[i][j][1] == True:
m,k = i//3,j//3
removed |= self.remove_from_line(self.data[i][j][0],m,k)
return removed
def remove_from_line(self,number,m,k):
"""removes a number from a line in the board"""
# Use: board.remove_from_line(number,m,k)
# Before: board: Board, number: 1-9, m: 0-2, k: 0-2
# After: number has been removed from a line denoted by m and k
removed = False
for i in range(3):
for j in range(3):
if self.data[i+m*3][j+3*k][1] != True:
if number in self.data[i+m*3][j+3*k]:
self.data[i+m*3][j+3*k].remove(number)
removed = True
return removed
def check_vertically(self):
"""checks what possibilities are not ok, vertically"""
# Use: board.check_vertically()
# Before: same as check_horizontal
# After: same as checkhriz
removed = False
for i in range(9):
for j in range(9):
if self.data[i][j][1] == True:
m,k = i%3,j%3
removed |= self.remove_from_row(self.data[i][j][0],m,k)
return removed
def remove_from_row(self,number,m,k):
"""removes a number from a row in the board"""
# Use: board.remove_from_row(number,m,k)
# Before: same as remove from line
# After: -||-
removed = False
for i in range(0,9,3):
for j in range(0,9,3):
if self.data[i+m][j+k][1] != True:
if number in self.data[i+m][j+k]:
self.data[i+m][j+k].remove(number)
removed = True
return removed
def check_box(self):
"""checks what possibilities are not ok, inside a box"""
# Use: board.check_box()
# Before: -||-
# After: -||-
removed = False
for i in range(9):
for j in range(9):
if self.data[i][j][1] == True:
number = self.data[i][j][0]
for k in range(9):
if self.data[i][k][1] != True:
if number in self.data[i][k]:
self.data[i][k].remove(number)
removed = True
return removed
def remove_options(self):
"""calls for the check* operations"""
# Use: board.remove_options()
# Before: board: Board
# After: all the check operations have been executed
removed = self.check_horizontal()
if removed:
self.make_true()
dontcare = self.check_vertically()
else:
removed = self.check_vertically()
if removed:
self.make_true()
dontcare = self.check_box()
else:
removed = self.check_box()
return removed
def what_is_left(self):
"""finds out what numbers are left and adds them as solutions"""
# Use: left = board.what_is_left()
# Before: board: Board
# After: what_is_left* operations have been executed
removed = False
for i in range(3):
for j in range(3):
m,k = i//3,j//3
removed |= self.deduce_line(self.what_is_left_in_line(m,k),m,k)
for i in range(0,9,3):
for j in range(0,9,3):
m,k = i%3,j%3
removed |= self.deduce_row(self.what_is_left_in_row(m,k),m,k)
for i in range(9):
removed |= self.deduce_box(self.what_is_left_in_box(i),i)
return removed
def what_is_left_in_box(self,i):
"""finds out what numbers are left to solve in a box"""
# Use: left = board.what_is_left_in_box(box)
# Before: board: Board, box: 0-8
# After: finds out what numbers are left to solve in a box
left = [1,2,3,4,5,6,7,8,9]
for j in range(9):
if self.data[i][j][1] == True:
left.remove(self.data[i][j][0])
return left
def deduce_box(self,left,i):
"""finds what possibilites are the only ones left in a box and adds them"""
# Use: board.deduce_box(left,box)
# Before: board: Board, left: [], box: 0-8
# After: given what numbers are left to solve in a box, checks what numbers only
# appear once in a box and deduces that it must be a solution
removed = False
for number in left:
often = 0
cordx,cordy = 0,0
for j in range(9):
if self.data[i][j][1] == True:
pass
elif number in self.data[i][j]:
often += 1
cordx,cordy = i,j
else:
pass
if often == 1:
self.data[cordx][cordy] = [number,True]
removed = True
return removed
def what_is_left_in_line(self,m,k):
"""finds out what numbers are left to solve in a line"""
# Use: left = board.what_is_left_in_line(m,k)
# Before: board: Board, m: 0-2, k: 0-2
# After: finds what numbers are left to solve in a line denoted by m and k
left = [1,2,3,4,5,6,7,8,9]
for i in range(3):
for j in range(3):
if self.data[i+m*3][j+3*k][1] == True:
left.remove(self.data[i+m*3][j+3*k][0])
return left
def deduce_line(self,left,m,k):
"""finds what possibilites are the only ones left in a line and adds them"""
# Use: board.deduce_line(left,m,k)
# Before: board: Board, left: [], m: 0-2, k: 0-2
# After: given what numbers are left to solve in a line, checks what numbers only
# appear once in a line and deduces that it must be a solution
removed = False
for number in left:
often = 0
cordx,cordy = 0,0
for i in range(3):
for j in range(3):
if self.data[i+m*3][j+3*k][1] == True:
pass
elif number in self.data[i+m*3][j+3*k]:
often += 1
cordx,cordy = i+m*3,j+3*k
else:
pass
if often == 1:
self.data[cordx][cordy] = [number,True]
removed = True
return removed
def what_is_left_in_row(self,m,k):
"""finds out what numbers are left in the row"""
# Use: left = board.what_is_left_in_row(m,k)
# Before: board: Board, m: 0-2, k: 0-2
# After: same as in what_is_left_in_line
left = [1,2,3,4,5,6,7,8,9]
for i in range(3):
for j in range(3):
if self.data[i+m][j+k][1] == True:
left.remove(self.data[i+m][j+k][0])
return left
def deduce_row(self,left,m,k):
"""finds what possibilites are the only ones left in a row and adds them"""
# Use: board.deduce_row(left,m,k)
# Before: board: Board, left: [], m: 0-2, k: 0-2
# After: same as in deduce_line
removed = False
for number in left:
often = 0
cordx,cordy = 0,0
for i in range(3):
for j in range(3):
if self.data[i+m][j+k][1] == True:
pass
elif number in self.data[i+m][j+k]:
often += 1
cordx,cordy = i+m,j+k
else:
pass
if often == 1:
self.data[cordx][cordy] = [number,True]
removed = True
return removed
def try_solving(self):
"""helping function"""
# Use: board.try_solving()
# Before: board: Board
# After: calls for check* and what_is_left* operations
removed = self.remove_options()
removed |= self.make_true()
removed |= self.what_is_left()
return removed
def make_true(self):
"""makes a square true if there is single possibilites (solves)"""
# Use: board.make_true()
# Before: board: Board
# After: if a number was a single option but was False (not a solution) it is now True (a solution)
madetrue = False
for i in range(9):
for j in range(9):
if self.data[i][j][1] == False:
self.data[i][j][1] = True
madetrue = True
return madetrue
def is_solved(self):
"""returns true if sudoku is solved, else false"""
# Use: is_board_solved = board.is_solved()
# Before: board: Board
# After: is_board_solved = True if the board is solved, False otherwise
solved = True
for i in range(9):
for j in range(9):
if self.data[i][j][1] != True:
solved = False
break
return solved
def multiple_solutions(self):
"""returns multiple sudoku boards where a number has been picked"""
# Use: solutions = board.multiple_solutions()
# Before: board: Board
# After: solutions = [board1,board2,...]
i,j = self.find_lowest_amount()
length = len(self.data[i][j])-1
multipleboards = [None]*length
for k in range(length):
numbers = self.data[i][j][k]
multipleboards[k] = copy.deepcopy(self.data)
multipleboards[k][i][j] = [numbers,True]
#print("Guessing on",numbers,"in",i,j)
return multipleboards
def find_lowest_amount(self):
"""finds the lowest amount of options in a square on the whole board and returns the index of it"""
# Use: i,j = board.finelowestamount()
# Before: board: Board
# After: i,j = box, square where the lowest amount of options are
counter = 3
while counter <= 10:
for i in range (9):
for j in range(9):
if len(self.data[i][j]) <= counter and self.data[i][j][1] != True:
return [i,j]
counter += 1
| [
"hpj3@hi.is"
] | hpj3@hi.is |
3bec7b7333638c70a8ec9b0059d57272eb3716d7 | 00df814bea30397f1381b24bde87cf3890fa5493 | /kegg_annotation.py | 067d50d38347b80c9fa310f8473f3390abf05224 | [] | no_license | wangchengww/my-tools | 896d5301d9b2b9f59b5aff16c1afac369e07c7c8 | 101eb12747b8f750a1a46acd74b938361f922449 | refs/heads/master | 2022-01-18T14:00:47.598946 | 2019-03-20T11:19:21 | 2019-03-20T11:19:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,752 | py | #!/usr/bin/env python
#-*-coding:utf8-*-
# author: Todzhu 2017.12.01
import time,requests,argparse,os
parser = argparse.ArgumentParser(description="Function: KEGG Automatic Annotation use for uniprot protein")
parser.add_argument('-p',required=True,help='Input uniprot protein list')
parser.add_argument('-t',required=True,help='Input the taxonomy id in KEGG')
args = parser.parse_args()
KO2Gene = '/home/ztt/Annotation/KO2Gene'
Uniprot2KO = '/home/ztt/Annotation/Uniprot2KO_all'
Uniprot2KEGG = '/home/ztt/Annotation/Uniprot2KEGG_all'
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'}
def keggAnnotation(prot_list,taxonomy):
dict_ko = {}
dict_kegg = {}
dict_path = {}
path_name_list = []
path_description_list = []
dict_ko_gene = {}
#Download the taxonomy whole pathway
print time.strftime("%Y-%m-%d %H:%M:%S",time.localtime())+'\n'
print 'Now loading {} pathway information from KEGG...\n'.format(taxonomy)
with open('%s_path.txt' % taxonomy,'w') as f:
response = requests.get('http://rest.kegg.jp/link/pathway/{}'.format(taxonomy),headers=headers)
f.write(response.text)
with open('%s_path.txt' % taxonomy, 'rb') as f:
for line in f:
line = line.strip().split('\t')
path = line[0]
pathway = line[1].split(':')[1]
dict_path.setdefault(path,[]).append(pathway)
# Get the pathway name to pathway description
response = requests.get('http://rest.kegg.jp/list/pathway/{}'.format(taxonomy),headers=headers)
result = response.content.replace('\n', '\t').strip().split('\t')
for line in result:
if 'path:' in line:
path_name_list.append(line.split(':')[1])
else:
path_description_list.append(line)
dict_path_description = dict(zip(path_name_list, path_description_list))
with open(Uniprot2KO, 'rb') as ko:
for line in ko:
line = line.strip()
prot = line.split('\t')[0]
ko = line.split('\t')[1]
dict_ko[prot] = ko
with open(Uniprot2KEGG, 'rb') as f:
for line in f:
line = line.strip()
prot = line.split('\t')[0]
kegg = line.split('\t')[1]
dict_kegg[prot] = kegg
with open(KO2Gene, 'rb') as f:
for line in f:
line = line.strip()
if 'ko:' in line:
ko_id = line.split('\t')[0]
ko_id = ko_id.split(':')[1]
gene = line.split('\t')[1]
dict_ko_gene[ko_id] = gene
print 'Now writing results...\n'
with open('KEGG_pathway_annotation.xls','w') as f:
f.write('Protein accession'+'\t'+'KEGG KO No.'+'\t'+'KEGG Gene'+'\t'+'KEGG pathway'+'\n')
with open(prot_list, 'rb') as list:
header = list.readline()
for line in list:
prot = line.strip().split('\t')[0]
if prot in dict_ko:
ko = dict_ko[prot]
if ko in dict_ko_gene:
f.write(prot + '\t' + ko + '\t')
f.write(dict_ko_gene[ko]+'\t')
else:
f.write(prot)
if dict_kegg[prot] in dict_path:
for path in dict_path[dict_kegg[prot]]:
f.write(path + ' ' + dict_path_description[path] + '; ')
f.write('\n')
else:
f.write(prot+'\n')
os.system('rm *_path.txt')
print 'KEGG annotation done !\n'
print time.strftime("%Y-%m-%d %H:%M:%S",time.localtime())+'\n'
keggAnnotation(args.p,args.t)
| [
"Todzhu@126.com"
] | Todzhu@126.com |
4b78834a9edd0128f840016e525d7d935625b28d | 76bee397181215b5e392bc05bfc60cd6fe6e7e73 | /machine learning/lab11(CNN).py | 54a5206b96ed05c84111cbc4a822ae756e1f7517 | [] | no_license | dongriDK/Python | 28ff10cc1816a8674d336e39c8ac84908f799548 | e02da063c3a2bd1cb82e29d763eb8eb18b4adaf4 | refs/heads/master | 2021-07-05T03:27:56.874395 | 2020-08-26T00:36:02 | 2020-08-26T00:36:02 | 161,972,611 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,813 | py | # # CNN 연습
# import tensorflow as tf
# import numpy as np
# import matplotlib.pyplot as plt
#
# sess = tf.InteractiveSession()
# image = np.array([[[[1],[2],[3]],
# [[4],[5],[6]],
# [[7],[8],[9]]]], dtype=np.float32)
#
# print(image.shape)
# plt.imshow(image.reshape(3,3), cmap='Greys')
# # plt.show()
#
# weight = tf.constant([[[[1.]],[[1.]]],[[[1.]],[[1.]]]])
#
# print("weight.shape", weight.shape)
# conv2d = tf.nn.conv2d(image, weight, strides=[1, 1, 1, 1], padding='SAME')
# conv2d_img = conv2d.eval()
#
# print("conv2d_img.shape", conv2d_img.shape)
#
# conv2d_img = np.swapaxes(conv2d_img, 0, 3)
# for i, one_img in enumerate(conv2d_img):
# print(one_img.reshape(3,3))
# plt.subplot(1, 2, i+1), plt.imshow(one_img.reshape(3,3), cmap='gray')
#
#
# image = np.array([[[[4], [3]], [[2], [1]]]], dtype=np.float32)
# pool = tf.nn.max_pool(image, ksize=[1, 2, 2, 1],
# strides=[1, 1, 1, 1], padding='SAME')
# print(pool.shape)
# print(pool.eval())
# # CNN으로 MNist
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
import random
import matplotlib.pyplot as plt
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
# print("A")
# img = mnist.train.images[0].reshape(28, 28)
# plt.imshow(img, cmap='gray')
# plt.show()
keep_prob = tf.compat.v1.placeholder(tf.float32)
keep_prob = 0.7
learning_rate = 0.001
training_epochs = 15
batch_size = 100
X = tf.compat.v1.placeholder(tf.float32, [None, 784])
# 28x28 size, 1 color
X_img = tf.reshape(X, [-1, 28, 28, 1])
Y = tf.compat.v1.placeholder(tf.float32, [None, 10])
# 3x3 filter, 색 1개, 32개의 filter
W1 = tf.Variable(tf.random.normal([3, 3, 1, 32], stddev=0.01))
# 1x1 padding
# Conv -> (?, 28, 28, 32) => 32개의 filter
# Pool -> (?, 14, 14, 32)
L1 = tf.nn.conv2d(X_img, W1, strides=[1, 1, 1, 1], padding='SAME')
L1 = tf.nn.relu(L1)
# 2x2 padding
# ksize=[1,2,2,1] -> 2,2 : filter size
L1 = tf.nn.max_pool2d(L1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
L1 = tf.nn.dropout(L1, rate=keep_prob)
# 3x3 filter, 색 32개(이전 필터의 개수), 64개의 필터
# L2 ImgIn shape(?, 14, 14, 32)
W2 = tf.Variable(tf.random.normal([3, 3, 32, 64], stddev=0.01))
# Conv -> (?, 14, 14, 64)
# Pool -> (?, 7, 7, 64)
# Reshape -> (?, 3136)
L2 = tf.nn.conv2d(L1, W2, strides=[1, 1, 1, 1], padding='SAME')
L2 = tf.nn.relu(L2)
L2 = tf.nn.max_pool2d(L2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# -1 : n개의 값 을 reshape 하겠다
# L2 = tf.reshape(L2, [-1, 7*7*64])
L2 = tf.nn.dropout(L2, rate=keep_prob)
# ImgIn shape=(?, 7, 7, 64)
W3 = tf.Variable(tf.random.normal([3, 3, 64, 128], stddev=0.01))
L3 = tf.nn.conv2d(L2, W3, strides=[1, 1, 1, 1], padding="SAME")
L3 = tf.nn.relu(L3)
L3 = tf.nn.max_pool2d(L3, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding="SAME")
L3 = tf.nn.dropout(L3, rate=keep_prob)
L3_flat = tf.reshape(L3, [-1, 128*4*4])
#Fully Connected layer
W4 = tf.compat.v1.get_variable("W4", shape=[4*4*128, 625], initializer=tf.contrib.layers.xavier_initializer())
b4 = tf.Variable(tf.random.normal([625]))
L4 = tf.nn.relu(tf.matmul(L3_flat, W4) + b4)
L4 = tf.nn.dropout(L4, rate=keep_prob)
W5 = tf.compat.v1.get_variable("W5", shape=[625, 10], initializer=tf.contrib.layers.xavier_initializer())
b5 = tf.Variable(tf.random.normal([10]))
hypothesis = tf.matmul(L4, W5) + b5
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits=hypothesis, labels=Y))
optimizer = tf.compat.v1.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
# Training and Evaluation
sess = tf.compat.v1.Session()
sess.run(tf.compat.v1.global_variables_initializer())
print('Learning started. It takes sometime.')
for epoch in range(training_epochs):
avg_cost = 0
total_batch = int(mnist.train.num_examples / batch_size)
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
feed_dict = {X:batch_xs, Y:batch_ys, keep_prob: 0.7}
c, _ = sess.run([cost, optimizer], feed_dict=feed_dict)
avg_cost += c / total_batch
print('Epoch:', '%04d' %(epoch + 1), 'cost =', '{:.9f}'.format(avg_cost))
print('Learnging Finished!')
correct_prediction = tf.equal(tf.compat.v1.argmax(hypothesis, 1), tf.compat.v1.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print('Accuracy:', sess.run(accuracy, feed_dict={X:mnist.test.images, Y:mnist.test.labels, keep_prob:1}))
r = random.randint(0, mnist.test.num_examples-1)
print("Label: ", sess.run(tf.compat.v1.argmax(mnist.test.labels[r:r+1], 1)))
print("Prediction: ", sess.run(tf.compat.v1.argmax(hypothesis, 1), feed_dict={X:mnist.test.images[r:r+1], keep_prob:1}))
# # MNIST Convolution layer
# sess = tf.InteractiveSession()
# img = img.reshape(-1, 28, 28, 1)
# W1 = tf.Variable(tf.random.normal([3, 3, 1, 5], stddev=0.01))
# conv2d = tf.nn.conv2d(img, W1, strides=[1, 2, 2, 1], padding='SAME')
# print(conv2d)
# sess.run(tf.compat.v1.global_variables_initializer())
# conv2d_img = conv2d.eval()
# conv2d_img = np.swapaxes(conv2d_img, 0, 3)
# for i, one_img in enumerate(conv2d_img):
# plt.subplot(1, 5, i+1), plt.imshow(one_img.reshape(14, 14), cmap='gray')
# plt.show()
#
# # Max pooling (subsampling)
# pool = tf.nn.max_pool(conv2d, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# print(pool)
# sess.run(tf.compat.v1.global_variables_initializer())
# pool_img = pool.eval()
# pool_img = np.swapaxes(pool_img, 0, 3)
# for i, one_img in enumerate(pool_img):
# plt.subplot(1, 5, i+1), plt.imshow(one_img.reshape(7, 7), cmap='gray')
# plt.show()
| [
"noreply@github.com"
] | dongriDK.noreply@github.com |
2d9d5ce37e2bddf4580731cbd7078e72d96add82 | fba717d388e3abe96a175e649c44db8c9e6d59d4 | /Agents/D3QN_baseline_nn_concat/DoubleDuelingDQN.py | 036915df9fe8438901e907cab36a6737f1103b47 | [
"BSD-3-Clause"
] | permissive | 19ahmed99/l2rpn_opponent_modelling | 0c6a688f9cf0097f84c52236138648400f7a9de0 | 5a04f74fe065e2b3788d3aa8378acd06ee3d2426 | refs/heads/main | 2023-04-22T10:12:05.604133 | 2021-05-01T21:37:57 | 2021-05-01T21:37:57 | 363,488,923 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,011 | py | # Copyright (c) 2020, RTE (https://www.rte-france.com)
# See AUTHORS.txt
# This Source Code Form is subject to the terms of the Mozilla Public License, version 2.0.
# If a copy of the Mozilla Public License, version 2.0 was not distributed with this file,
# you can obtain one at http://mozilla.org/MPL/2.0/.
# SPDX-License-Identifier: MPL-2.0
# This file is part of L2RPN Baselines, L2RPN Baselines a repository to host baselines for l2rpn competitions.
import os
import json
import math
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from grid2op.Agent import AgentWithConverter
from grid2op.Converter import IdToAct
from DoubleDuelingDQNConfig import DoubleDuelingDQNConfig as cfg
from DoubleDuelingDQN_NN import DoubleDuelingDQN_NN
from prioritized_replay_buffer import PrioritizedReplayBuffer
class DoubleDuelingDQN(AgentWithConverter):
def __init__(self,
observation_space,
action_space,
name=__name__,
is_training=False):
# Call parent constructor
AgentWithConverter.__init__(self, action_space,
action_space_converter=IdToAct)
self.obs_space = observation_space
# Filter
#print("Actions filtering...")
self.action_space.filter_action(self._filter_action)
#print("..Done")
self.action_path = "./allactions.npy"
self.converter = IdToAct(self.action_space)
self.converter.init_converter()
self.converter.save(*os.path.split(self.action_path))
self.all_actions = np.array(self.converter.all_actions).tolist()
self.all_acts_dict = {tuple(el.to_vect().tolist()): i for i, el in enumerate(self.all_actions)}
# Store constructor params
self.name = name
self.num_frames = cfg.N_FRAMES
self.is_training = is_training
self.batch_size = cfg.BATCH_SIZE
self.lr = cfg.LR
# Declare required vars
self.Qmain = None
self.obs = None
self.state = []
self.frames = []
# Declare training vars
self.per_buffer = None
self.done = False
self.frames2 = None
self.epoch_rewards = None
self.epoch_rewards_moving_avg = None
self.losses = None
self.epoch_alive = None
self.Qtarget = None
self.epsilon = 0.0
# Store the current opponent action
self.opponent_action = None
# List of all opponent actions
self.all_opponent_actions = []
# Stores opponent next action
self.opponent_next_action = None
# List of all opponent next actions
self.all_opponent_next_actions = []
self.count_non_do_nothing_opp_act = 0
# Compute dimensions from intial spaces
self.observation_size = self.obs_space.size_obs()
self.action_size = self.action_space.size()
self.action_vect_size = 258
# Load network graph
self.Qmain = DoubleDuelingDQN_NN(self.action_size,
self.observation_size,
num_frames=self.num_frames,
learning_rate=self.lr,
learning_rate_decay_steps=cfg.LR_DECAY_STEPS,
learning_rate_decay_rate=cfg.LR_DECAY_RATE)
# Setup training vars if needed
if self.is_training:
self._init_training()
def _filter_action(self, action):
MAX_ELEM = 2
act_dict = action.impact_on_objects()
elem = 0
elem += act_dict["force_line"]["reconnections"]["count"]
elem += act_dict["force_line"]["disconnections"]["count"]
elem += act_dict["switch_line"]["count"]
elem += len(act_dict["topology"]["bus_switch"])
elem += len(act_dict["topology"]["assigned_bus"])
elem += len(act_dict["topology"]["disconnect_bus"])
elem += len(act_dict["redispatch"]["generators"])
if elem <= MAX_ELEM:
return True
return False
def _init_training(self):
self.epsilon = cfg.INITIAL_EPSILON
self.frames2 = []
self.epoch_rewards = []
self.epoch_rewards_moving_avg = []
self.losses = []
self.epoch_alive = []
self.per_buffer = PrioritizedReplayBuffer(cfg.PER_CAPACITY, cfg.PER_ALPHA)
self.Qtarget = DoubleDuelingDQN_NN(self.action_size,
self.observation_size,
num_frames = self.num_frames)
def _reset_state(self, current_obs):
# Initial state
self.obs = current_obs
self.state = self.convert_obs(self.obs)
self.done = False
def _reset_frame_buffer(self):
# Reset frame buffers
self.frames = []
if self.is_training:
self.frames2 = []
def _save_current_frame(self, state):
self.frames.append(state.copy())
if len(self.frames) > self.num_frames:
self.frames.pop(0)
def _save_next_frame(self, next_state):
self.frames2.append(next_state.copy())
if len(self.frames2) > self.num_frames:
self.frames2.pop(0)
def _adaptive_epsilon_decay(self, step):
ada_div = cfg.DECAY_EPSILON / 10.0
step_off = step + ada_div
ada_eps = cfg.INITIAL_EPSILON * -math.log10((step_off + 1) / (cfg.DECAY_EPSILON + ada_div))
ada_eps_up_clip = min(cfg.INITIAL_EPSILON, ada_eps)
ada_eps_low_clip = max(cfg.FINAL_EPSILON, ada_eps_up_clip)
return ada_eps_low_clip
def _save_hyperparameters(self, logpath, env, steps):
r_instance = env._reward_helper.template_reward
hp = {
"lr": cfg.LR,
"lr_decay_steps": cfg.LR_DECAY_STEPS,
"lr_decay_rate": cfg.LR_DECAY_RATE,
"batch_size": cfg.BATCH_SIZE,
"stack_frames": cfg.N_FRAMES,
"iter": steps,
"e_start": cfg.INITIAL_EPSILON,
"e_end": cfg.FINAL_EPSILON,
"e_decay": cfg.DECAY_EPSILON,
"discount": cfg.DISCOUNT_FACTOR,
"per_alpha": cfg.PER_ALPHA,
"per_beta": cfg.PER_BETA,
"per_capacity": cfg.PER_CAPACITY,
"update_freq": cfg.UPDATE_FREQ,
"update_hard": cfg.UPDATE_TARGET_HARD_FREQ,
"update_soft": cfg.UPDATE_TARGET_SOFT_TAU,
"reward": dict(r_instance)
}
hp_filename = "{}-hypers.json".format(self.name)
hp_path = os.path.join(logpath, hp_filename)
with open(hp_path, 'w') as fp:
json.dump(hp, fp=fp, indent=2)
## Agent Interface
def convert_obs(self, observation):
li_vect= []
for el in observation.attr_list_vect:
v = observation._get_array_from_attr_name(el).astype(np.float32)
v_fix = np.nan_to_num(v)
v_norm = np.linalg.norm(v_fix)
if v_norm > 1e6:
v_res = (v_fix / v_norm) * 10.0
else:
v_res = v_fix
li_vect.append(v_res)
return np.concatenate(li_vect)
def convert_act(self, action):
return super().convert_act(action)
## Baseline Interface
def reset(self, observation):
self._reset_state(observation)
self._reset_frame_buffer()
def my_act(self, state, reward, done=False):
# Register current state to stacking buffer
self._save_current_frame(state)
# We need at least num frames to predict
if len(self.frames) < self.num_frames:
return 0 # Do nothing
# Infer with the last num_frames states
a, _ = self.Qmain.predict_move(np.array(self.frames), np.array(self.all_opponent_actions[-self.num_frames:]))
return a
def act(self, obs, reward, done):
self.obs = obs
# Store opponent action
self.store_opponent_action(obs)
transformed_observation = self.convert_obs(obs)
encoded_act = self.my_act(transformed_observation, reward, done)
return self.convert_act(encoded_act)
def load(self, path):
self.Qmain.load_network(path)
if self.is_training:
self.Qmain.update_target_hard(self.Qtarget.model)
def save(self, path):
self.Qmain.save_network(path)
# Store current opponents action
# This function is used as an alterantive way to retrieve the opponent action with using the info variable returned by the step function
# This function can be used when the agent is training or not
def store_opponent_action(self, obs):
# A Do_Nothing action
opponent_action = self.action_space({})
# Get all the powerline id that will require maintenance
maintenance = obs.time_next_maintenance
maintenance_powerline_id = [i for i in range(len(maintenance)) if maintenance[i] != -1]
# Retrive all the cooldown_duration for disconnected powerlines that are equal to 47 timesteps
cooldown_duration = obs.time_before_cooldown_line
cooldown_powerline_id = [i for i in range(len(cooldown_duration)) if cooldown_duration[i] == 47]
for pid in cooldown_powerline_id:
# Check if it is disconnected due to a maintenacance or an attack
if pid in maintenance_powerline_id and maintenance[pid] == 0:
cooldown_powerline_id.remove(pid)
else:
powerline_attacked = pid
opponent_action = self.action_space({"change_line_status": [int(powerline_attacked)]})
self.count_non_do_nothing_opp_act += 1
# Convert the opponent action to its vector representation
opp_act_as_vect = (self.converter() + opponent_action).to_vect()
self.opponent_action = opp_act_as_vect
self.all_opponent_actions.append(self.opponent_action)
# Store the opponent action using the info variable returned by the step() function which give information about the next observation
# This function can only be used during the training
def store_opponent_next_action(self, info):
opponent_action = self.action_space()
attack_duration = info["opponent_attack_duration"]
if attack_duration == 48:
powerline_attacked = np.where(info["opponent_attack_line"])[0]
# Let the opponent action be a powerline disconnection action of the powerline attacked
opponent_action = self.action_space({"change_line_status": [int(powerline_attacked)]})
# Convert the opponent action to its vector representation
opp_act_as_vect = (self.converter() + opponent_action).to_vect()
self.opponent_next_action = opp_act_as_vect
self.all_opponent_next_actions.append(self.opponent_action)
## Training Procedure
def train(self, env,
iterations,
save_path,
num_pre_training_steps=0,
logdir = "logs-train"):
# Make sure we can fill the experience buffer
if num_pre_training_steps < self.batch_size * self.num_frames:
num_pre_training_steps = self.batch_size * self.num_frames
# Loop vars
num_training_steps = iterations
num_steps = num_pre_training_steps + num_training_steps
self.epsilon = cfg.INITIAL_EPSILON
alive_steps = 0
total_reward = 0
self.done = True
step = 0
# Create file system related vars
logpath = os.path.join(logdir, self.name)
os.makedirs(save_path, exist_ok=True)
modelpath = os.path.join(save_path, self.name + ".h5")
self.tf_writer = tf.summary.create_file_writer(logpath, name=self.name)
self._save_hyperparameters(save_path, env, num_steps)
# Training loop
while step < num_steps:
# Init first time or new episode
if self.done:
new_obs = env.reset() # This shouldn't raise
self.reset(new_obs)
if cfg.VERBOSE and step % 1000 == 0:
print("Step [{}] -- Random [{}]".format(step, self.epsilon))
# Save current observation to stacking buffer
self._save_current_frame(self.state)
# Store opponent current action
self.store_opponent_action(new_obs)
# Choose an action
if step <= num_pre_training_steps:
a = self.Qmain.random_move()
elif np.random.rand(1) < self.epsilon:
a = self.Qmain.random_move()
elif len(self.frames) < self.num_frames:
a = 0 # Do nothing
else:
a, _ = self.Qmain.predict_move(np.array(self.frames), np.array(self.all_opponent_actions[-self.num_frames:]))
# Convert it to a valid action
act = self.convert_act(a)
# Execute action
new_obs, reward, self.done, info = env.step(act)
new_state = self.convert_obs(new_obs)
# if info["is_illegal"] or info["is_ambiguous"] or \
# info["is_dispatching_illegal"] or info["is_illegal_reco"]:
# # if cfg.VERBOSE:
# print (a, info)
# Store opponent next action
self.store_opponent_next_action(info)
# Save new observation to stacking buffer
self._save_next_frame(new_state)
# Save to experience buffer
if len(self.frames2) == self.num_frames:
self.per_buffer.add(np.array(self.frames),
a, np.array(self.all_opponent_actions[-4:]),
reward,
np.array(self.frames2), np.array(self.all_opponent_next_actions[-4:]),
self.done)
# Perform training when we have enough experience in buffer
if step >= num_pre_training_steps:
training_step = step - num_pre_training_steps
# Decay chance of random action
self.epsilon = self._adaptive_epsilon_decay(training_step)
# Perform training at given frequency
if step % cfg.UPDATE_FREQ == 0 and \
len(self.per_buffer) >= self.batch_size:
# Perform training
self._batch_train(training_step, step)
if cfg.UPDATE_TARGET_SOFT_TAU > 0.0:
tau = cfg.UPDATE_TARGET_SOFT_TAU
# Update target network towards primary network
self.Qmain.update_target_soft(self.Qtarget.model, tau)
# Every UPDATE_TARGET_HARD_FREQ trainings, update target completely
if cfg.UPDATE_TARGET_HARD_FREQ > 0 and \
step % (cfg.UPDATE_FREQ * cfg.UPDATE_TARGET_HARD_FREQ) == 0:
self.Qmain.update_target_hard(self.Qtarget.model)
total_reward += reward
if self.done:
self.epoch_rewards.append(total_reward)
current_reward_moving_avg = sum(self.epoch_rewards)/len(self.epoch_rewards)
self.epoch_rewards_moving_avg.append(current_reward_moving_avg)
self.epoch_alive.append(alive_steps)
if cfg.VERBOSE:
print("Survived [{}] steps".format(alive_steps))
print("Total reward [{}]".format(total_reward))
alive_steps = 0
total_reward = 0
else:
alive_steps += 1
# Save the network every 1000 iterations
if step > 0 and step % 1000 == 0:
modelpath = os.path.join(save_path, self.name + str(step) +".h5")
self.save(modelpath)
# Iterate to next loop
step += 1
# Make new obs the current obs
self.obs = new_obs
self.state = new_state
# Save model after all steps
modelpath = os.path.join(save_path, self.name + str(step) +".h5")
self.save(modelpath)
print("Number of opponent action that are not do_nothing : {} ".format(self.count_non_do_nothing_opp_act))
return self.epoch_rewards, self.epoch_rewards_moving_avg, self.losses
def _batch_train(self, training_step, step):
"""Trains network to fit given parameters"""
# Sample from experience buffer
sample_batch = self.per_buffer.sample(self.batch_size, cfg.PER_BETA)
s_batch = sample_batch[0]
a_batch = sample_batch[1]
opp_a_batch = sample_batch[2]
r_batch = sample_batch[3]
s2_batch = sample_batch[4]
opp_next_a_batch = sample_batch[5]
d_batch = sample_batch[6]
w_batch = sample_batch[7]
idx_batch = sample_batch[8]
Q = np.zeros((self.batch_size, self.action_size))
input_s_size = self.observation_size * self.num_frames
input_opp_size = self.action_vect_size * self.num_frames
# Reshape frames to 1D
input_s_t = np.reshape(s_batch, (self.batch_size, input_s_size))
input_opp_t = np.reshape(opp_a_batch, (self.batch_size, input_opp_size))
input_s_t_1 = np.reshape(s2_batch, (self.batch_size, input_s_size))
input_opp_t_1 = np.reshape(opp_next_a_batch, (self.batch_size, input_opp_size))
# Save the graph just the first time
if training_step == 0:
tf.summary.trace_on()
# T Batch predict
Q = self.Qmain.model.predict([input_s_t, input_opp_t], batch_size = self.batch_size)
## Log graph once and disable graph logging
if training_step == 0:
with self.tf_writer.as_default():
tf.summary.trace_export(self.name + "-graph", step)
# T+1 batch predict
Q1 = self.Qmain.model.predict([input_s_t_1,input_opp_t_1], batch_size=self.batch_size)
Q2 = self.Qtarget.model.predict([input_s_t_1,input_opp_t_1], batch_size=self.batch_size)
# Compute batch Qtarget using Double DQN
for i in range(self.batch_size):
doubleQ = Q2[i, np.argmax(Q1[i])]
Q[i, a_batch[i]] = r_batch[i]
if d_batch[i] == False:
Q[i, a_batch[i]] += cfg.DISCOUNT_FACTOR * doubleQ
# Batch train
loss = self.Qmain.train_on_batch([input_s_t, input_opp_t], Q, w_batch)
self.losses.append(loss)
# Update PER buffer
priorities = self.Qmain.batch_sq_error
# Can't be zero, no upper limit
priorities = np.clip(priorities, a_min=1e-8, a_max=None)
self.per_buffer.update_priorities(idx_batch, priorities)
# Log some useful metrics every even updates
if step % (cfg.UPDATE_FREQ * 2) == 0:
with self.tf_writer.as_default():
mean_reward = np.mean(self.epoch_rewards)
mean_alive = np.mean(self.epoch_alive)
if len(self.epoch_rewards) >= 100:
mean_reward_100 = np.mean(self.epoch_rewards[-100:])
mean_alive_100 = np.mean(self.epoch_alive[-100:])
else:
mean_reward_100 = mean_reward
mean_alive_100 = mean_alive
tf.summary.scalar("mean_reward", mean_reward, step)
tf.summary.scalar("mean_alive", mean_alive, step)
tf.summary.scalar("mean_reward_100", mean_reward_100, step)
tf.summary.scalar("mean_alive_100", mean_alive_100, step)
tf.summary.scalar("loss", loss, step)
tf.summary.scalar("lr", self.Qmain.train_lr, step)
if cfg.VERBOSE:
print("loss =", loss)
| [
"ahmedelherazy@hotmail.co.uk"
] | ahmedelherazy@hotmail.co.uk |
88955fc4240c920212bff289e57759c002aae596 | 37b71cd51fdbeb341bff9ced83f64bb7c678abd3 | /products/urls.py | d7bf35059f310d67834ba5a921944b06c361407b | [] | no_license | NunoMars/P8-Pur-Beurre | 30a18724dd9fd7dacf4ea09b771a3cb4fa8ca6e5 | 6d14dde1fc665ae59f617ee89e10be44df8c8de0 | refs/heads/master | 2022-12-14T01:44:38.287880 | 2021-01-20T22:34:49 | 2021-01-20T22:34:49 | 234,334,198 | 0 | 0 | null | 2022-12-08T11:47:19 | 2020-01-16T14:08:48 | JavaScript | UTF-8 | Python | false | false | 585 | py | from django.conf.urls import url
from django.urls import path
from . import views
urlpatterns = [
url("home", views.index, name="home"),
url("contact", views.contact, name="contact"),
url("search", views.search, name="search_product"),
url(r"^(?P<product>[0-9]+)/$", views.products_list, name="products_list"),
url(
r"^product_detail/(?P<product>[0-9]+)/$",
views.product_view,
name="product_detail",
),
url("history", views.history, name="history"),
url("mentions_legales", views.mentions_legales, name="mentions_legales"),
]
| [
"nuno.ricardo.mars@gmail.com"
] | nuno.ricardo.mars@gmail.com |
38a617be71de230dce5e1cd1f41b5dcbc6665c3a | 3db2f2cde94f49736472fc28e753afd950b07b33 | /node.py | f7c58999e938cbb09fad5f9425e808ddaaa511c5 | [] | no_license | agnetedjupvik/astar | 11d76f2640077a5264871743043bb4580528e186 | 7e8f65f7ac8f9e7c87422bb3c3f094c4a6828520 | refs/heads/master | 2021-01-12T13:38:30.500148 | 2016-09-30T14:11:03 | 2016-09-30T14:11:03 | 69,457,225 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | class Node:
def __init__(self, position, parent, g, h):
self.position = position
self.parent = parent
self.g = g
self.h = h
self.f = g + h
| [
"agnetedjupvik@gmail.com"
] | agnetedjupvik@gmail.com |
aed1104508ade19aabfc431031cc8bc97095f554 | 5af3c4197ab9035ad4283d061329631f4e64eb87 | /venv/Scripts/pip-script.py | 2d164f6cc756d94ecbdf2e93fca15094860966d1 | [] | no_license | SudoAzek/ArkanoidFinalProject | dd797275ae929e6d1e4221a8ee9bbd2b2ddb27f1 | 15161860c31379a1b6237d3077eacb16d015ab7a | refs/heads/master | 2020-10-01T08:21:37.664190 | 2019-12-12T02:02:53 | 2019-12-12T02:02:53 | 227,499,022 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 419 | py | #!C:\Users\sudoa\PycharmProjects\ArkanoidProject\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
)
| [
"sudoazek@gmail.com"
] | sudoazek@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.