seq_id stringlengths 4 11 | text stringlengths 113 2.92M | repo_name stringlengths 4 125 ⌀ | sub_path stringlengths 3 214 | file_name stringlengths 3 160 | file_ext stringclasses 18
values | file_size_in_byte int64 113 2.92M | program_lang stringclasses 1
value | lang stringclasses 93
values | doc_type stringclasses 1
value | stars int64 0 179k ⌀ | dataset stringclasses 3
values | pt stringclasses 78
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
41244155673 | import streamlit as st
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
# Taken from the model and evaluation notebook
"""
Regression performance on train and test sets
"""
def regression_performance(X_train, y_train, X_test, y_test, pipeline):
st.write("Model Evaluation")
st.write("* Train Set")
regression_evaluation(X_train, y_train, pipeline)
st.write("* Test Set")
regression_evaluation(X_test, y_test, pipeline)
def regression_evaluation(X, y, pipeline):
prediction = pipeline.predict(X)
st.write('R2 Score:', r2_score(y, prediction).round(3))
st.write('Mean Absolute Error:',
mean_absolute_error(y, prediction).round(3))
st.write('Mean Squared Error:', mean_squared_error(y, prediction).round(3))
st.write('Root Mean Squared Error:', np.sqrt(
mean_squared_error(y, prediction)).round(3))
st.write("\n")
"""
Regression plot evaluation
"""
def regression_evaluation_plots(
X_train, y_train, X_test, y_test, pipeline, alpha_scatter=0.5):
# convert seaborn to 1-dimensional shape
y_train = y_train.to_numpy().flatten()
y_test = y_test.to_numpy().flatten()
pred_train = pipeline.predict(X_train)
pred_test = pipeline.predict(X_test)
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(12, 6))
sns.scatterplot(x=y_train, y=pred_train, alpha=alpha_scatter, ax=axes[0])
sns.lineplot(x=y_train, y=y_train, color='red', ax=axes[0])
axes[0].set_xlabel("Actual")
axes[0].set_ylabel("Predictions")
axes[0].set_title("Train Set")
sns.scatterplot(x=y_test, y=pred_test, alpha=alpha_scatter, ax=axes[1])
sns.lineplot(x=y_test, y=y_test, color='red', ax=axes[1])
axes[1].set_xlabel("Actual")
axes[1].set_ylabel("Predictions")
axes[1].set_title("Test Set")
st.pyplot(fig)
| Shida18719/heritage-housing-issues | src/machine_learning/evaluate_regression.py | evaluate_regression.py | py | 1,919 | python | en | code | 0 | github-code | 90 |
36856429495 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys, os
curdir = os.path.abspath(os.path.dirname(__file__))
sys.path = [os.path.dirname(curdir)] + sys.path
import logging, time
from loghog import LoghogHandler
def setup_logging():
logger = logging.getLogger()
# If the server-side specifies a secret, you must provide it here as well.
# If a secret is specified here, all messages are signed using HMAC.
# Any messages with invalid signatures will be ignored by the server.
handler = LoghogHandler('app-with-secret', secret='my-big-secret')
handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s'))
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
setup_logging()
log = logging.getLogger()
while True:
log.info("That is one hot jalapño!")
time.sleep(1)
| activefrequency/loghog-python | examples/message-signing.py | message-signing.py | py | 872 | python | en | code | 3 | github-code | 90 |
24207463160 | import io
import time
from picamera import PiCamera
import requests
server_url = 'https://46.0.1.2'
def sendDataToServer():
frame = get_image()
temp = get_temperature()
payload = {'image': frame, 'temp': temp}
# headers = {'content_type': 'image/jpeg'}
response = requests.post(url = server_url+'/putInfo', data = payload)
def get_image():
camera = PiCamera()
camera.capture('/home/pi/Desktop/image.jpg')
return open('/home/pi/Desktop/image.jpg', 'rb')
def get_temperature():
return 30
while True:
sendDataToServer()
time.sleep(5)
| marcbenedi/CreatED2018 | r.py | r.py | py | 581 | python | en | code | 0 | github-code | 90 |
73250750377 | '''
Handle deployments from here.
'''
from lib.utils import git
from lib.utils.colorprinter import colorprint, print_with_spinner
from lib.anchor import Anchor
from lib.plugins.firebase import FirebasePlugin
from lib.services.builder_service import builder
from lib.services.firebase_service import Firebase
from lib.services.stdio_service import login_with_email, get_changelog, get_version
BUILDING_APK = 'Building APK. Please be patient..'
BUILD_SUCCESSFUL = '[✓] Built successfully'
class DeployService(Anchor):
'''
An Anchor class to deploy a project.
While the bulk of the work is done by the plugins, this class
does supplementary work.
'''
def __init__(self, release_type):
super().__init__()
self.apply(FirebasePlugin())
self.build = builder()
self.release_type = release_type
def delegate(self):
''' Public method used as the CLI hook. '''
login_with_email(Firebase().login_with_email)
changelog = get_changelog()
version = get_version()
spinning = print_with_spinner('GREEN', BUILDING_APK)
build_details = spinning(self.build)()
colorprint('GREEN')(BUILD_SUCCESSFUL)
self.apply_plugins('deploy_project',
version=version,
changelog=changelog,
branch=git.branch(),
deployerName=git.whoami(),
build_details=build_details,
release_type=self.release_type
)
| ypradhan/Harbor-CLI | lib/services/deploy_service.py | deploy_service.py | py | 1,583 | python | en | code | 0 | github-code | 90 |
1367243359 | from block_cd_lasso import BlockCDLasso
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import Lasso
from pandas import read_csv
print("Demo: Regression on simulated data")
cov_matrix = np.identity(50)
cov_matrix += 0.8
for i in range(50): cov_matrix[i,i] = 1
sim_data = np.vstack(np.random.multivariate_normal(
mean=np.array(range(0, 50)),
cov=cov_matrix,
size=1000
))
X = StandardScaler().fit_transform(sim_data[:, 0:49])
y = sim_data[:, 49]
model = BlockCDLasso(0.01, X, y)
print("Starting coordinate descent")
betas, beta_hist, objective_hist = model.fit(max_cycles=100, n_blocks=5, pool_size=5, optimize=False)
print("Objective descent history: %s\n" % objective_hist)
skmodel = Lasso(alpha=0.01 * 1000, fit_intercept=False)
skmodel.fit(X, y)
print("Difference in coefficients between this approach and scikit: %s\nMean absolute difference: %f\n" %
(betas - skmodel.coef_, np.mean(np.abs(betas - skmodel.coef_))))
print("\n\n==================\n\nDemo: classification on the Spam dataset")
spam = read_csv("data/spam.csv")
last_col = spam.columns[-1]
Y = (spam[last_col] * 2)-1 # convert to 1/-1
spam = spam.drop(last_col, axis=1)
stdscaler = StandardScaler().fit(spam)
X = stdscaler.transform(spam)
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.2)
model = BlockCDLasso(1e-4, X_train, y_train)
betas, beta_hist, objective_hist = model.fit(max_cycles=60, n_blocks=4, pool_size=4, optimize=False)
print("Objective history: %s" % objective_hist)
predictions = np.array([1 if x > 0 else -1 for x in BlockCDLasso.predict(X_test, betas)])
print("Classification accuracy on holdout set: %f" % (predictions == y_test).mean())
| jacobw125/uw-data-558 | polished_code_release/demo.py | demo.py | py | 1,769 | python | en | code | 1 | github-code | 90 |
33464659689 | import re
from ..item_fetching.search_config import SUPPORTED_CURRENCIES
CURRENCY_NAMES = tuple([curr for (curr, _) in SUPPORTED_CURRENCIES])
CURRENCY_NAME = str
# CURRENCIES = Dict[CURRENCY_NAME, 'Currency']
# CI_INFO = Dict[CURRENCY_NAME, Dict[CURRENCY_NAME, float]]
# CI_INFO_DIR = r".\exchange_rates"
# CI_INFO_PATH_TEMPLATE = rf"{CI_INFO_DIR}\currencies_info_"
# CI_INFO_PATH = f"{CI_INFO_PATH_TEMPLATE}{date.today()}.json"
# We are checking multiple objects in a loop, so
# it is more efficient to compile REs in advance
EUR_RE = re.compile(r"(€|eur.*)", re.IGNORECASE)
POUND_RE = re.compile(r"(£|.*pound.*|gbp)", re.IGNORECASE)
CZ_CROWN_RE = re.compile(r"kč|czk|czech crown", re.IGNORECASE)
RU_RE = re.compile(r".*rub.*", re.IGNORECASE)
# WEBSITE_NAME = ITEM_NAME = str
# PRICE = float
# WEBSITE_ITEM = Tuple[List[PRICE], ITEM_NAME, WEBSITE_NAME]
# WEBSITE_ITEMS = List[WEBSITE_ITEM]
| ddrddrr/clothing_searcher | web_app/price_checker_web_app/main_app/backend/currency_processing/currency_proc_config.py | currency_proc_config.py | py | 898 | python | en | code | 0 | github-code | 90 |
39854768059 | class Pessoa:
def __init__(self, nome, idade):
self.nome = nome
self.idade = idade
class Aluno(Pessoa):
def __init__(self, nome, idade, matricula):
super().__init__(nome, idade)
self.matricula = matricula
pessoa = Pessoa("João", 30)
print(f"Nome: {pessoa.nome}, Idade: {pessoa.idade}")
aluno = Aluno("Maria", 20, "2023-123")
print(f"Nome: {aluno.nome}, Idade: {aluno.idade}, Matrícula: {aluno.matricula}")
| JotaXDr/ExOrientadoObj2 | Ex1.py | Ex1.py | py | 451 | python | pt | code | 0 | github-code | 90 |
35501960625 | from time import sleep
import matplotlib.pyplot as plt
import numpy as np
def go_to_sleep(seconds=20):
print("The kernel is going to sleep...")
t = 0
try:
while t < seconds:
if t % 5 == 0:
print("zzz.", end="")
else:
print(".", end="")
sleep(1)
t += 1
print()
except KeyboardInterrupt:
print()
print(f"You woke the kernel after {t} seconds.")
finally:
print("The kernel is awake.")
def _plot_residuals(ax, y):
ax.errorbar(
np.arange(y.size),
y,
yerr=1,
fmt="o",
color="green",
capsize=2,
elinewidth=1.5,
ecolor="black",
capthick=1.5,
markeredgecolor="black",
label="data",
zorder=0,
)
ax.axhline(
0, color="r", linestyle="--", linewidth="2.0", label="best-fit curve", zorder=1
)
def residuals_examples(n_points=50):
fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex=True)
ax1.set_ylabel("Residuals")
ax2.set_ylabel("Residuals")
ax3.set_ylabel("Residuals")
ax3.set_xlabel("x")
y0 = np.random.normal(size=n_points) - 0.5
ax1.set_title("linearly increasing residuals")
_plot_residuals(ax1, y0 + np.linspace(-3, 3, n_points))
ax2.set_title("oscillating residuals")
_plot_residuals(ax2, y0 + 3 * np.sin(np.linspace(-10, 10, n_points)))
ax3.set_title("no obvious structure")
_plot_residuals(ax3, y0)
fig.tight_layout()
plt.show()
if __name__ == "__main__":
residuals_with_structure()
| marshrossney/percolation | p1b-experiment/utils.py | utils.py | py | 1,612 | python | en | code | 0 | github-code | 90 |
10252542007 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 14 01:55:47 2020
@author: janibasha
"""
import streamlit as st
import pickle
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
def predict(message):
model=load_model('b_lstm.h5')
with open('tokenizer.pickle', 'rb') as handle:
tokenizer = pickle.load(handle)
x_1 = tokenizer.texts_to_sequences([message])
x_1 = pad_sequences(x_1, maxlen=500)
predictions = model.predict(x_1)[0][0]
return predictions
st.title("Hotel Reviews Sentiment Classifier ")
hide_streamlit_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style>
"""
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
import base64
@st.cache(allow_output_mutation=True)
def get_base64_of_bin_file(bin_file):
with open(bin_file, 'rb') as f:
data = f.read()
return base64.b64encode(data).decode()
def set_png_as_page_bg(png_file):
bin_str = get_base64_of_bin_file(png_file)
page_bg_img = '''
<style>
body {
background-image: url("data:image/png;base64,%s");
background-size: cover;
}
</style>
''' % bin_str
st.markdown(page_bg_img, unsafe_allow_html=True)
return
set_png_as_page_bg('background.png')
message = st.text_area("Please Give Us Your Hotel Experience")
if st.button("Analyze"):
with st.spinner('Analyzing the text …'):
prediction=predict(message)
if prediction > 0.6:
st.error("Negative review with {:.2f} confidence".format(prediction))
elif prediction <0.4:
st.success("Positive review with {:.2f} confidence".format(1-prediction))
st.balloons()
else:
st.warning("Not sure! Try to add some more words")
| jani-excergy/Hotel_senti | app.py | app.py | py | 2,026 | python | en | code | 1 | github-code | 90 |
39533571225 |
import numpy
import pickle
extLabelData = "pkl"
ID_DONT_CARE = 'DontCare'
ID_DOUBLE_ROOT = 'DoubleRoot'
ID_COLUMN = "Column"
ID_ROW = "Row"
ID_MAP_PIXEL2REGIONID = "MapPixel2RegionId"
ID_LIST_REGION = "ListRegion"
'''
ROIData 紀錄某個 id 的 ROI 的屬性
'''
class ROIData:
def __init__(self, idROI, listRegionId):
self.idROI = idROI
self.listRegionId = listRegionId
self.dictAttribute = { ID_DONT_CARE:False , ID_DOUBLE_ROOT:False}
'''
LabelData 紀錄圖片標記結果
'''
class LabelData:
def __init__(self, cntImageColumn, cntImageRow):
self.cntImageColumn = cntImageColumn
self.cntImageRow = cntImageRow
self.mapPixel2RegionId = numpy.zeros((cntImageRow, cntImageColumn), numpy.int32)
self.listROI = []
def ApplyReIdMap(self, mapReId):
for i in range(len(self.listROI)):
roiCurrent = self.listROI[i]
listRegionId = []
for j in range(len(roiCurrent.listRegionId)):
listRegionId.extend(mapReId[roiCurrent.listRegionId[j]])
roiCurrent.listRegionId = listRegionId
def LoadFromFile(self, path):
fileData = open(path, "rb")
dictData = pickle.load(fileData)
fileData.close()
# extract from dict
if(ID_COLUMN in dictData and ID_ROW in dictData):
cntImageColumn = dictData[ID_COLUMN]
cntImageRow = dictData[ID_ROW]
if(self.cntImageColumn != cntImageColumn or self.cntImageRow != cntImageRow):
print("image width and height not match, label data broken!")
return
else:
print("image width not found, label data broken!")
return
if(ID_MAP_PIXEL2REGIONID in dictData):
self.mapPixel2RegionId = dictData[ID_MAP_PIXEL2REGIONID]
else:
print("region id record not founrd, label data broken!")
if(ID_LIST_REGION in dictData):
self.listROI = dictData[ID_LIST_REGION]
else:
print("region attribute record not founrd, label data broken!")
def SaveToFile(self, path):
fileData = open(path, "wb")
# zip into dict
dictData = {ID_COLUMN:self.cntImageColumn, ID_ROW:self.cntImageRow, ID_MAP_PIXEL2REGIONID:self.mapPixel2RegionId, ID_LIST_REGION:self.listROI}
pickle.dump(dictData, fileData)
fileData.close()
| moooonbird/PanopticFPN | LabelData.py | LabelData.py | py | 2,542 | python | en | code | 1 | github-code | 90 |
21806280307 | # import nocsmtranslator
# print(nocsmtranslator.translateForMe("en","th","chicken eat a huge cat"))
# print(nocsmtranslator.translateForMe("th","en","ไก่จิกเด็กตายบนปากโอ่ง"))
# -*- coding: utf-8 -*-
import http.client, urllib.parse, uuid, json
# **********************************************
# *** Update or verify the following values. ***
# **********************************************
# Replace the subscriptionKey string value with your valid subscription key.
subscriptionKey = '75a648040ae949f594acc7b25221a622'
host = 'api.cognitive.microsofttranslator.com'
path = '/translate?api-version=3.0'
# Translate to German and Italian.
params = "&to=en";
text = 'สวัสดีครับ'
def translate (content):
headers = {
'Ocp-Apim-Subscription-Key': subscriptionKey,
'Content-type': 'application/json',
'X-ClientTraceId': str(uuid.uuid4())
}
conn = http.client.HTTPSConnection(host)
conn.request ("POST", path + params, content, headers)
response = conn.getresponse ()
return response.read ()
requestBody = [{
'Text' : text,
}]
content = json.dumps(requestBody, ensure_ascii=False).encode('utf-8')
result = translate (content)
# Note: We convert result, which is JSON, to and from an object so we can pretty-print it.
# We want to avoid escaping any Unicode characters that result contains. See:
# https://stackoverflow.com/questions/18337407/saving-utf-8-texts-in-json-dumps-as-utf8-not-as-u-escape-sequence
output = json.dumps(json.loads(result), indent=4, ensure_ascii=False)
print (output) | umaruzamak/work | whatsup.py | whatsup.py | py | 1,618 | python | en | code | 0 | github-code | 90 |
12215720225 | #!/usr/bin/env python
#
# This script uses ffmpeg to extract frames from video files in a directory
#
import argparse
import os
import subprocess
import sys
parser = argparse.ArgumentParser()
parser.add_argument(
"--size_limit",
type=int,
default=1024 * 1024 * 32,
help="Ignore files above this size",
)
parser.add_argument(
"--frame_limit",
type=int,
default=256,
help="Ignore files with more than these many frames",
)
parser.add_argument(
"--suffix",
default='"_%04d.png"',
help="Filename suffix for ffmpeg (file type can be changed here)",
)
parser.add_argument("--output_dir", default="./", help="Place to dump the images")
parser.add_argument("dir", help="Directory to scan")
args = parser.parse_args()
files = []
def get_num_frames(filepath):
get_frames_cmd = [
"ffprobe",
"-v",
"error",
"-count_frames",
"-select_streams",
"v:0",
"-show_entries",
"stream=nb_read_frames",
"-of",
"default=nokey=1:noprint_wrappers=1",
filepath,
]
result = subprocess.run(get_frames_cmd, stdout=subprocess.PIPE)
if len(result.stdout.decode("utf-8").strip()) > 0:
num_frames = int(result.stdout.decode("utf-8").strip().split("\n")[0])
else:
num_frames = 0
return num_frames
def extract_frames(filepath, output_dir, output_format):
extract_frames_cmd = ["ffmpeg", "-i", filepath, output_dir + "/" + output_format]
result = subprocess.run(extract_frames_cmd, stdout=subprocess.PIPE)
for filename in sorted(os.listdir(args.dir)):
filepath = args.dir + "/" + filename
if (
os.path.getsize(filepath) < args.size_limit
and get_num_frames(filepath) < args.frame_limit
):
print("Extracting frames from " + filepath)
extract_frames(filepath, args.output_dir, filename.split(".")[0] + args.suffix)
| alvarop/scripts | frame_extractor/frame_extractor.py | frame_extractor.py | py | 1,919 | python | en | code | 0 | github-code | 90 |
4068474104 | """
Imports driver and shipment data from the json formatted files drivers.json and
shipments.json and inserts the data into the driver and shipment tables in the
geospatial dataabase maship. The files live in ./data directory.
"""
import os
from json import loads as json_loads
import django
from django.contrib.gis.geos import Point
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'maship.maship.settings')
django.setup()
from api.models import Driver, Shipment
def import_data():
"""
Reads json files from ./data directoruy and imports entries
into Driver and Shipment tables in the database.
"""
driver_filename = './data/drivers.json'
shipment_filename = './data/shipments.json'
# Import drivers
data = ''
with open(driver_filename) as _f:
data = _f.read()
if data:
json_data = json_loads(data)
for driver_id, loc_data in json_data.items():
coordinates = loc_data['coordinates']
lat = coordinates['latitude']
lon = coordinates['longitude']
#Create and save the Driver object
kwargs = {
'driverId': driver_id,
'lat': lat,
'lon': lon,
'point': Point(lon, lat)
}
Driver(**kwargs).save()
print('saved driver {0}, {1} {2}'.format(driver_id, lat, lon))
# Import shipments
data = ''
with open(shipment_filename) as _f:
data = _f.read()
if data:
json_data = json_loads(data)
for shipment_id, ship_data in json_data.items():
coordinates = ship_data['coordinates']
lat = coordinates['latitude']
lon = coordinates['longitude']
#Create and save Shipment object
kwargs = {
'shipmentId': shipment_id,
'lat': lat,
'lon': lon,
'point': Point(lon, lat)
}
Shipment(**kwargs).save()
print("saved shipment {0} {1} {2}".format(shipment_id, lat, lon))
if __name__ == "__main__":
import_data()
| bartelby/maship | etl/import_data.py | import_data.py | py | 2,128 | python | en | code | 0 | github-code | 90 |
35449796140 | def solution(numbers, hand):
phone=[[1,2,3],[4,5,6],[7,8,9],[-1,0,-2]]
leftFlag = 0
curLeft = -1
curRight = -2
x= 0
y= 0
lx = 0
ly = 0
rx = 0
ry = 0
leftDistance = 0
rightDistance = 0
answer = ''
if hand == "left":
leftFlag=1
for i in range(len(numbers)):
if(numbers[i]==1 or numbers[i]== 4 or numbers[i]==7):
curLeft = numbers[i]
answer= answer +"L"
elif(numbers[i]==3 or numbers[i]== 6 or numbers[i]==9):
curRight = numbers[i]
answer= answer +"R"
else:
for column in range(4):
for row in range(3):
if(phone[column][row]==numbers[i]):
x = row
y = column
for column in range(4):
for row in range(3):
if(phone[column][row]==curLeft):
lx = row
ly = column
for column in range(4):
for row in range(3):
if(phone[column][row]==curRight):
rx = row
ry = column
leftDistance = abs(x-lx) + abs(y-ly)
rightDistance = abs(x-rx) + abs(y-ry)
if(leftDistance<rightDistance):
curLeft = numbers[i]
answer= answer +"L"
elif(leftDistance>rightDistance):
curRight = numbers[i]
answer= answer +"R"
else:
if(leftFlag==1):
curLeft = numbers[i]
answer= answer +"L"
else:
curRight = numbers[i]
answer= answer +"R"
return answer
def main():
numbers = [1, 3, 4, 5, 0, 2, 1, 5, 0, 2, 1, 2, 1, 0, 5, 9, 3, 4, 5, 0, 2, 1, 0, 5, 9, 5,1, 3, 4, 5, 0, 2, 1, 0, 5, 9, 5,1, 3, 4, 5, 0, 2, 1, 0, 5, 9, 5]
hand = "right"
print(solution(numbers,hand))
main() | wseungjin/codingTest | kakao/2020_summer/20_summer_kakao_intern1.py | 20_summer_kakao_intern1.py | py | 2,057 | python | en | code | 0 | github-code | 90 |
18257328419 | def main():
N,A,B = map(int,input().split())
syo = N//(A+B)
amari = N%(A+B)
ans = syo*A
if amari < A:
ans += amari
else:
ans += A
return ans
print(main())
| Aasthaengg/IBMdataset | Python_codes/p02754/s273363319.py | s273363319.py | py | 201 | python | en | code | 0 | github-code | 90 |
18042841269 | N=int(input())
ans=0
reject=0
A=[]
for i in range(1,N+1):
ans+=i
if ans>=N:
reject=ans-N
for j in range(1,i+1):
if j!=reject:
print(j)
break | Aasthaengg/IBMdataset | Python_codes/p03910/s596461328.py | s596461328.py | py | 200 | python | en | code | 0 | github-code | 90 |
71716356777 | from socket import socket, AF_INET, SOCK_STREAM
serverPort = 12000
serverSocket = socket(AF_INET, SOCK_STREAM)
serverSocket.bind(('',serverPort))
serverSocket.listen(1)
print ('Server ready')
connectionSocket, addr = serverSocket.accept()
print ("accepted connection")
sentence = connectionSocket.recv(1024).decode()
print ("Received message from " + str(addr[0]) + ": " + sentence)
print (addr)
capitalizedSentence = sentence.upper()
connectionSocket.sendto(capitalizedSentence.encode(), addr)
connectionSocket.close()
| asuradev99/Python_Labs | TCP_Server.py | TCP_Server.py | py | 533 | python | en | code | 0 | github-code | 90 |
18396638139 | #import operator
n = int(input())
l = set()
cnt = 1
d = {}
o = {}
for i in range(n):
s,p = input().split()
p = int(p)
d[(s, p)] = cnt
if s in o:
x = o[s]
x.append(p)
o[s] = x
else:
o[s] = [p]
l.add(s)
cnt = cnt+1
w = list(l)
w.sort()
k = []
#print(" ")
for i in w:
m = o[i]
m.sort(reverse = True)
for j in m:
#print(i,j,d[(i,j)])
k.append(d[(i,j)])
for h in k:
print(h) | Aasthaengg/IBMdataset | Python_codes/p03030/s880647599.py | s880647599.py | py | 466 | python | en | code | 0 | github-code | 90 |
32411551584 | from flask import Flask, render_template, request, redirect, url_for, flash, Response, make_response, json
from Psycopg2_functions import *
from Web_projects import geocode
from Web_projects import getgroup
from dashapp import create_dash_app
app = Flask(__name__)
app.secret_key = b'_5#y2L"F4Q8z\n\xec]/'
@app.route("/", methods=['GET', 'POST'])
def Home():
return render_template('home.html')
@app.route("/help")
def help():
return "<h1>GNU</h1>"
@app.route("/dash")
def dashh():
dash_app = create_dash_app(app)
dash_app_html = dash_app.index()
return render_template("dashapp.html", dash_app=dash_app_html)
@app.route("/projects")
def projects():
return render_template('projects.html', posts=list_posts(pgconnect()))
@app.route("/links")
def links():
return render_template('links.html', posts=list_posts(pgconnect()))
@app.route("/blog", methods=['GET','POST'])
def blog():
return render_template('blog.html', posts=list_posts(pgconnect()), post_comments=list_post_comments(pgconnect()))
@app.route("/add_new_post", methods=['GET','POST'])
def add_new_post():
if request.method == "POST":
# getting input with name = fname in HTML form
author = request.form.get("author")
title = request.form.get("title")
email = request.form.get("email")
content = request.form.get("content")
post_status = add_post(author,title, content, email, pgconnect())
if post_status == "uniqueerror":
flash("danger")
elif post_status == "success":
flash("success")
return redirect(url_for("blog"))
@app.route("/add_new_comment", methods=['GET','POST'])
def add_new_comment():
if request.method == "POST":
# getting input with name = fname in HTML form
author = request.form.get("author")
email = request.form.get("email")
content = request.form.get("comment_content")
print("Added comment")
post_id = request.form.get("post_id")
print("Added comment")
add_comment(post_id, author, content, email, pgconnect())
return redirect(url_for("blog"))
@app.route("/delete", methods=['GET','POST'])
def delete_post():
title = request.args.get("title")
post_status = psdelete_post(title, pgconnect())
if post_status == "danger":
flash("deletedanger")
elif post_status == "success":
flash("deletesuccess")
return redirect(url_for("blog"))
@app.route("/edit", methods=['GET','POST'])
def edit_post():
if request.method == 'GET':
title = request.args.get("title")
return redirect(f'{url_for("blog")}#{title}')
elif request.method == 'POST':
return redirect(f'{url_for("blog")}')
@app.route('/teamscramble', methods=['GET','POST'])
def teamscramble():
return render_template('teamscramble.html')
@app.route("/calculate_groups", methods=['GET','POST'])
def calculate_groups():
if request.method == 'POST':
groupstring = request.form.get("people")
groupnumber = int(request.form.get("groupnumber"))
groups = getgroup.getgroups(groupstring,groupnumber)
return render_template('teamscramble.html', groups=groups)
# Here follow temperature data items.
@app.route("/temperature", methods=['GET','POST'])
def temperature():
return render_template('temperature.html')
@app.route("/get_temperature", methods=['GET','POST'])
def get_temperature():
if request.method == 'POST':
location = request.form.get("location")
x = geocode.getlocation(location)
temperatures, timestamps,station_name = list_temps(*x)
print('\n', temperatures, '\n', timestamps,'\n')
return render_template('temperature.html', temperatures = temperatures, timestamps = timestamps, station_name = station_name)
else:
return render_template('temperature.html')
@app.route("/workout", methods=["GET","POST"])
def workout():
if request.method == 'POST':
return render_template("workout.html")
elif request.method =='GET':
selected_exercise = request.args.get("exercise")
selexname = selected_exercise
if selected_exercise: selected_exercise = select_single_exercise(selected_exercise)
return render_template('workout.html', exercise_name=selexname, selected_exercise=selected_exercise, exercises=select_exercises(),workouts=select_workouts(), sessions = list_sessions())
@app.route("/add_workout",methods=["GET","POST"])
def add_workout():
if request.method == "POST":
weights = request.form.getlist("weight")
reps = request.form.getlist("reps")
exercises = request.form.getlist("exercise")
sets = request.form.getlist("sets")
workout=[]
for i in range(len(weights)):
exercise = {
"exercise_name": f"{exercises[i]}",
"repetitions": reps[i],
"sets": sets[i],
"weight": weights[i]
}
workout.append(exercise)
print(workout)
psadd_workout(workout)
return redirect(url_for('workout'))
elif request.method =="GET":
name=request.args.get("name")
exercises = get_session(name)
return render_template("workout.html",sessionexercises=exercises, sessions = list_sessions(), name=name)
@app.route("/add_session", methods=["POST","GET"])
def add_session():
if request.method =="POST":
session_name = request.form.get("session_name")
exercise_list = request.form.getlist("exercise")
if not session_name or not exercise_list: return render_template("workout.html")
psadd_session(session_name,exercise_list)
return redirect(url_for("workout"))
elif request.method =="GET":
return render_template("workout.html", exercises =select_exercises(), sessions = list_sessions(), add_session=True)
@app.route("/add_exercise", methods=["POST"])
def add_exercise():
if request.method=="POST":
exercise = request.form.get("addexercise")
print(exercise)
pgadd_exercise(exercise)
return redirect(url_for("add_session"))
@app.route("/login", methods=["POST","GET"])
def login():
if request.method=="POST":
username = request.form.get("username")
password = request.form.get("password")
if len(username)>3 or len(password)>3:
try:
mylogin = pglogin(username,password)
resp = make_response(redirect(url_for('Home')))
print(mylogin[0],mylogin[1])
resp.set_cookie('userID', str(mylogin[0]))
resp.set_cookie('username', mylogin[1])
flash('success')
return resp
except:
flash('danger')
return redirect(url_for('login'))
else:
flash('Username and password need to be more than 3 characters.')
elif request.method=="GET":
if not request.cookies.get("userID"):
return render_template("login.html")
else:
return redirect(url_for('Home'))
@app.route("/register", methods=["POST","GET"])
def register():
if request.method=="POST":
username = request.form.get("username")
password = request.form.get("password")
success_status = pgregister(username,password)
return render_template("login.html")
elif request.method=="GET":
return redirect(url_for('Home'))
@app.route("/logout", methods=["POST","GET"])
def logout():
if request.method=="POST":
resp = make_response(render_template("home.html"))
resp.set_cookie('userID', '', expires=0)
return resp
elif request.method == "GET":
resp = make_response(render_template("home.html"))
resp.set_cookie('userID', '', expires=0)
resp.set_cookie('username','', expires=0)
return resp
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=3000)
| Aendraes/Website-public | website/app.py | app.py | py | 8,242 | python | en | code | 0 | github-code | 90 |
9826623629 | import os
import sys
import json
import subprocess
playbook_path = 'main.yml'
do_verbose = False
def set_verbose(value):
do_verbose = value
def make_vars(host_pkg, state):
vars = {}
vars['input'] = {}
vars['input']['pkg_list'] = host_pkg
vars['input']['pkg_state'] = state
return vars
def run(host_pkg, state):
vars = make_vars(host_pkg, state)
err = sys.stderr
out = sys.stdout
if (do_verbose):
devnull = open(os.devnull, 'w')
err = devnull
out = devnull
status = subprocess.run([
"ansible-playbook", playbook_path,
"--extra-vars", json.dumps(vars)
], stdout=out, stderr=err)
if (do_verbose):
devnull.close()
return status.returncode
| SyncopatedLinux/cfgmgmt | plugins/modules/pkg_manager/library/runner.py | runner.py | py | 755 | python | en | code | null | github-code | 90 |
70031397097 | import cv2
import numpy as np
cap = cv2.VideoCapture(0)
while(1):
_, frame = cap.read()
# It converts the BGR color space of image to HSV color space
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# Threshold of blue in HSV space
lower_blue = np.array([35, 140, 60])
upper_blue = np.array([255, 255, 180])
# preparing the mask to overlay
mask = cv2.inRange(hsv, lower_blue, upper_blue)
# The black region in the mask has the value of 0,
# so when multiplied with original image removes all non-blue regions
res = cv2.bitwise_and(frame, frame, mask = mask)
kernel = np.ones((15,15), np.float32)/255
smoothed = cv2.filter2D(res, -1, kernel)
blur = cv2.GaussianBlur(res, (15,15) , 0)
median= cv2.medianBlur(res,15)
bilateral = cv2.bilateralFilter(res, 15, 75, 75)
cv2.imshow('frame', frame)
cv2.imshow('blur', blur)
cv2.imshow('result', res)
cv2.imshow('smoothed', smoothed)
cv2.imshow('median', median)
cv2.imshow('bilateral', bilateral)
cv2.waitKey(0)
cv2.destroyAllWindows()
cap.release() | EliStones/EatSleepCode | day14/noiseRemoval.py | noiseRemoval.py | py | 1,100 | python | en | code | 0 | github-code | 90 |
6319640957 | # Ultralytics YOLO 🚀, AGPL-3.0 license
import torch
from ultralytics.models.yolo.detect import DetectionValidator
from ultralytics.utils import ops
__all__ = ['NASValidator']
class NASValidator(DetectionValidator):
"""
Ultralytics YOLO NAS Validator for object detection.
Extends `DetectionValidator` from the Ultralytics models package and is designed to post-process the raw predictions
generated by YOLO NAS models. It performs non-maximum suppression to remove overlapping and low-confidence boxes,
ultimately producing the final detections.
Attributes:
args (Namespace): Namespace containing various configurations for post-processing, such as confidence and IoU thresholds.
lb (torch.Tensor): Optional tensor for multilabel NMS.
Example:
```python
from ultralytics import NAS
model = NAS('yolo_nas_s')
validator = model.validator
# Assumes that raw_preds are available
final_preds = validator.postprocess(raw_preds)
```
Note:
This class is generally not instantiated directly but is used internally within the `NAS` class.
"""
def postprocess(self, preds_in):
"""Apply Non-maximum suppression to prediction outputs."""
boxes = ops.xyxy2xywh(preds_in[0][0])
preds = torch.cat((boxes, preds_in[0][1]), -1).permute(0, 2, 1)
return ops.non_max_suppression(preds,
self.args.conf,
self.args.iou,
labels=self.lb,
multi_label=False,
agnostic=self.args.single_cls,
max_det=self.args.max_det,
max_time_img=0.5)
| ultralytics/ultralytics | ultralytics/models/nas/val.py | val.py | py | 1,846 | python | en | code | 15,778 | github-code | 90 |
13064369496 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.db.models.deletion
import modelcluster.contrib.taggit
import modelcluster.fields
import wagtail.core.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0005_make_filter_spec_unique'),
('taggit', '0001_initial'),
('wagtailcore', '0013_update_golive_expire_help_text'),
('portal_pages', '0030_marketplaceentrypage_state'),
]
operations = [
migrations.CreateModel(
name='BlogIndexPage',
fields=[
('page_ptr', models.OneToOneField(primary_key=True, parent_link=True, serialize=False, to='wagtailcore.Page', auto_created=True, on_delete=django.db.models.deletion.CASCADE)),
('intro', wagtail.core.fields.RichTextField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='BlogPage',
fields=[
('page_ptr', models.OneToOneField(primary_key=True, parent_link=True, serialize=False, to='wagtailcore.Page', auto_created=True, on_delete=django.db.models.deletion.CASCADE)),
('body', wagtail.core.fields.RichTextField()),
('date', models.DateField(verbose_name='Post date')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page', models.Model),
),
migrations.CreateModel(
name='BlogPageTag',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('content_object', modelcluster.fields.ParentalKey(related_name='tagged_items', to='portal_pages.BlogPage')),
('tag', models.ForeignKey(related_name='portal_pages_blogpagetag_items', to='taggit.Tag', on_delete=django.db.models.deletion.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.AddField(
model_name='blogpage',
name='tags',
field=modelcluster.contrib.taggit.ClusterTaggableManager(help_text='A comma-separated list of tags.', blank=True, to='taggit.Tag', verbose_name='Tags', through='portal_pages.BlogPageTag'),
preserve_default=True,
),
migrations.AddField(
model_name='blogpage',
name='top_image',
field=models.ForeignKey(related_name='+', blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='wagtailimages.Image'),
preserve_default=True,
),
]
| rapidpro/rapidpro-community-portal | src/rapidpro_community_portal/apps/portal_pages/migrations/0031_auto_20150520_1419.py | 0031_auto_20150520_1419.py | py | 2,832 | python | en | code | 18 | github-code | 90 |
30942020727 | # Open the file in read mode
file = open("data.txt", "r")
# Read the entire contents of the file
content = file.read()
# Print the content
print(content)
# Close the file
file.close()
# Open the file in write mode
file = open("data.txt", "w")
# Write content to the file
file.write("Hello, World!")
# Close the file
file.close() | AndriiVyshnevskyi/pythonProject | gitignore.py | gitignore.py | py | 334 | python | en | code | 0 | github-code | 90 |
73423663976 | # TO INCLUDE CHARACTERS THAT ALREADY
# HAVE A SPECIAL MEANING IN PYTHON
# THERE IS BACKSLASH '\'
# EASY CHECKING USING 'in'
# 'in' CHECKS IF ONE STRING IS PART OF ANOTHER
print("e" in "blueberry") # => True
print("blue" in "blueberry") # => True
print("blue" in "strawberry") # => False
def common_letters(string_one, string_two):
newLst = []
for letter in string_one:
if (letter in string_two) and not (letter in newLst):
newLst.append(letter)
return newLst
print(common_letters("hello", "world")) # => ['l', 'o'] | Hyunu02/basic_py | strings/easyWayToIterate.py | easyWayToIterate.py | py | 534 | python | en | code | 0 | github-code | 90 |
17352490272 | from googleapiclient.discovery import build
from google.oauth2 import service_account
import datetime
from googleapiclient.errors import HttpError
class GoogleCalendar:
def __init__(self):
key_file = 'calendarprojectPrivKey.json'
# this now has to be defined as a single element in an array:
# scope = ("https://www.googleapis.com/auth/calendar.readonly",)
scope = ["https://www.googleapis.com/auth/calendar.readonly"]
self.creds = service_account.Credentials.from_service_account_file('calendarprojectPrivKey.json', scopes=scope)
def getEvents(self):
totalEventString = ""
try:
service = build('calendar', 'v3', credentials=self.creds)
# Call the Calendar API
now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
print('Getting the upcoming 10 events')
# change from primary using the list method: https://developers.google.com/calendar/api/v3/reference/calendarList/list
events_result = service.events().list(calendarId='googleServiceAccountNameHere@group.calendar.google.com', timeMin=now,maxResults=10,
singleEvents=True,orderBy='startTime').execute()
events = events_result.get('items', [])
for event in events:
start = event['start'].get('dateTime', event['start'].get('date'))
try:
formattedDate = datetime.datetime.strptime(start, '%Y-%m-%dT%H:%M:%S%z')
except ValueError:
# recurring events like birthdays just have:
formattedDate = datetime.datetime.strptime(start, '%Y-%m-%d')
formattedDate = formattedDate.replace(hour=12)
oneEventString = formattedDate.strftime("%a %d/%m/%Y %I:%M%p "+event['summary']+"\n")
print('boo: ', oneEventString)
totalEventString = totalEventString + oneEventString
except HttpError as error:
totalEventString = "Google Calendar isn't happy"
return totalEventString
| zogspat/bedside | googleCalendar.py | googleCalendar.py | py | 1,860 | python | en | code | 0 | github-code | 90 |
34770885878 | #########################
#鉴于WeiYi的框架比较复杂,令我欲仙欲死两周时间,我决定重新搭一个框架
#这个框架基于先进的tensorflow2.0 舍弃了很多原来有的语法
#e.g. placeholder 等
#本文描述的是如何生成信道模型。
#我们这里采用y = Hx+n的经典模型
import numpy as np
import math
from scipy.linalg import toeplitz
from scipy.linalg import sqrtm
import scipy.io as scio
class Channel_generator():
def __init__(self,Nu,Nt,L_mu=8,noise_var = 0.1,type = "IID",modulation_order = 2):
self.aChannelMatrix = np.zeros(shape=(Nu, Nt)).astype(np.complex64);
self.Nu = Nu
self.Nt = Nt
self.type = type
self.L_mu = L_mu
self.noise_scale = math.sqrt(noise_var)
self.changeH()
assert modulation_order ==1 or modulation_order ==2 or modulation_order ==4 or \
modulation_order ==6 or modulation_order ==8,"Unsupported Modulation order"
self.modulation_chart = np.array(scio.loadmat('ConsChart.mat')["cons_" + str(modulation_order)]) # 星座点初始化
self.max_cons = pow(2,modulation_order/2) - 1
self.symbolnumber = pow(2,modulation_order)
pass
def changeH(self):
if self.type =="IID":
self.aChannelMatrix = np.random.normal(size=(self.Nu, self.Nt), scale=1.0 / math.sqrt(self.Nu * 2)).astype(np.float32) + 1j * np.random.normal(
size=(self.Nu, self.Nt), scale=1.0 / math.sqrt(self.Nu * 2)).astype(np.float32)
elif self.type == "READ":
if not hasattr(self, 'channel_mat'):
self.channel_mat = scio.loadmat('channel_mat.mat')['H']
self.pos = 0
try:
self.aChannelMatrix = self.channel_mat[:,:,self.pos]
self.pos = self.pos + 1
except :
print("Error")
pass
elif self.type == "COR":
rho_r_m = 0.5
rho_t_m = 0.5
rxrandangle = 2 * math.pi * np.random.normal(size = None)
txrandangle = 2 * math.pi * np.random.normal(size = None)
rho_r = rho_r_m * np.exp(2 * math.pi * rxrandangle * 1j)
rho_t = rho_t_m * np.exp(2 * math.pi * txrandangle * 1j)
rr_vec = np.power(np.full((1, self.Nu), rho_r),np.arange(self.Nu))
rr = toeplitz(rr_vec)
rt_vec = np.power(np.full((1, self.Nt),rho_t ),np.arange(self.Nt))
rt = toeplitz(rt_vec)
Hiid = np.random.normal(size=(self.Nu, self.Nt), scale=1.0 / math.sqrt(self.Nu * 2)).astype(np.float32) + 1j * np.random.normal(
size=(self.Nu, self.Nt), scale=1.0 / math.sqrt(self.Nu * 2)).astype(np.float32)
H = np.matmul(sqrtm(rr), Hiid)
self.aChannelMatrix = np.matmul(H, sqrtm(rt))
pass
else :
print("fuck")
self.aChannelMatrixHermit = np.conjugate(np.transpose(self.aChannelMatrix))
def output(self,ifreal =True):
###############################
#这里的输入是[x_1 x_2 x_3....x_setnum]
#这里的输出是[y_1 y_2 y_3....y_setnum]
#其中x_i,y_i都是纵向量!每一列都是一个样本
#最后 aChannelMatirx 就是信道矩阵H
###############################
#Generate X
Xsymbol = np.asarray(np.random.randint(0, self.symbolnumber, self.Nt))
trueX = self.modulation_chart[Xsymbol]
#Generate Noise:
noiserealPart = np.random.normal(size=(self.Nu, 1), scale=self.noise_scale).astype(np.float32)
noiseimagPart = np.random.normal(size=(self.Nu, 1), scale=self.noise_scale).astype(np.float32)
truenoise = noiserealPart + 1j * noiseimagPart
#Generate Y
trueY = np.matmul(self.aChannelMatrix, trueX) + truenoise
if ifreal:
Youtput = np.transpose(np.vstack((np.real(trueY),np.imag(trueY))))
##
upH = np.hstack((np.real(self.aChannelMatrix),-np.imag(self.aChannelMatrix)))
doH = np.hstack((np.imag(self.aChannelMatrix),np.real(self.aChannelMatrix)))
H = np.vstack((upH,doH))
##
upH = np.hstack((np.real(self.aChannelMatrixHermit),-np.imag(self.aChannelMatrixHermit)))
doH = np.hstack((np.imag(self.aChannelMatrixHermit),np.real(self.aChannelMatrixHermit)))
HHermit = np.vstack((upH,doH))
Xoutput = np.transpose(np.vstack((np.real(trueX),np.imag(trueX))))
return Xoutput,H,HHermit,Youtput
else:
Youtput = np.transpose(trueY)
Xoutput = np.transpose(trueX)
return Xoutput,self.aChannelMatrix,self.aChannelMatrixHermit,Youtput
def multipleoutput(self,setnum = 10,ifreal=True,ifchangeChannel =True):
XCube = np.empty([setnum,2*self.Nt])
YCube = np.empty([setnum,2*self.Nu])
HCube = np.empty([setnum,2*self.Nu,2*self.Nt])
HHCube = np.empty([setnum,2*self.Nt,2*self.Nu])
self.changeH()
for i in range(0,setnum):
Xoutput,H,HH,Youtput = self.output(ifreal=ifreal)
XCube[i,:] = Xoutput
HCube[i,:,:] = H
HHCube[i,:,:] = HH
YCube[i,:] = Youtput
if ifchangeChannel:
self.changeH()
return XCube,HCube,HHCube,YCube
def harddes(self,Rv_estimated_X):#由于有星座点表,因此在这里直接一个硬判决的函数声明
####解释下算法:由于这里星座点都在奇数上,我们可以:
#### -1 0 1 2 3 4 ####
#### 如果在区间内,则 首先对检测值取ceil 或者 floor 取其中奇数的那个
#### 如果在区间外,取最大值
#简除过大值
Rv_estimated_X = np.where(Rv_estimated_X <-self.max_cons,-self.max_cons,Rv_estimated_X)
Rv_estimated_X = np.where(Rv_estimated_X > self.max_cons, self.max_cons,Rv_estimated_X)
#判断
Rv_estimated_X= np.where(Rv_estimated_X ==0 , np.sign(Rv_estimated_X), Rv_estimated_X)
Rv_X_hard= np.where(np.ceil(Rv_estimated_X) % 2, np.ceil(Rv_estimated_X), np.floor(Rv_estimated_X))
return Rv_X_hard
if __name__ == '__main__':
#如下是使用说明
channel = Channel_generator(64,32,modulation_order=4)
shit = channel.multipleoutput(setnum=30)
print(shit)
shit = channel.harddes(np.array([1.2,-1.3,3,3.1,4,-1,-3]))
pass
| RetardHuang/MIMO_detection_CEP_Net | Channel_generator.py | Channel_generator.py | py | 6,450 | python | en | code | 0 | github-code | 90 |
16764940546 | # -*- coding: utf-8 -*-
"""
Created on Wed Oct 4 14:11:52 2023
@author: Jichu
"""
import pandas as pd
# Load your dataset into a Pandas DataFrame
df = pd.read_csv("Population.csv")
# Create a list of country codes for aggregation
country_codes_to_aggregate = ["ARM", "AZE", "BLR", "GEO", "KAZ", "KGZ", "LVA", "MDA", "RUS", "TJK", "TKM", "UKR", "UZB", "LTU", "EST"]
# Filter the DataFrame to keep only the selected country codes
filtered_df = df[df["Country Code"].isin(country_codes_to_aggregate)]
# Group by "Country", "Sector", and "Gas", and sum the values
result_df = filtered_df.groupby(["Country", "sector", "gas"]).sum().reset_index()
# Generate a list of year columns from 1960 to 2021
years = [str(year) for year in range(1960, 2022)]
# Sum the values for each year across different countries
result_df = result_df.groupby(['sector', 'gas']).sum().reset_index()
# Flatten the column names and reset the index
result_df.columns = [" ".join(col).strip() if col[0] not in ('sector', 'gas') else "Result" for col in result_df.columns.values]
result_df.reset_index(drop=True, inplace=True)
# Export the result to a CSV file
result_df.to_csv("USSR_Population.csv", index=False) | jichuan-zhang/ESDA_Code | 0090_w1/Normalisation/USSR.py | USSR.py | py | 1,223 | python | en | code | 0 | github-code | 90 |
5387983590 | import numpy as np
from copy import deepcopy
from tkinter import *
from tkinter import messagebox
from sens import sensModel
# ИЗМЕНЕНИЕ СТРУКТУРЫ X и Y
def vectorStructureChanges(changes, changedX, changedY, n, k, X1, X2, Y1, Y2):
helper = 0
# подготавливаем вектор с изменениями
for i in range(0, n):
changes[i] = i
for i in range(0, k):
if changedX[i] != -0.0001:
for j in range(k, n):
if changedX[j] == -0.0001:
# изменение структуры вектора Х
helper = changedX[i]
changedX[i] = changedX[j]
changedX[j] = helper
# изменение структуры вектора Y
helper = changedY[i]
changedY[i] = changedY[j]
changedY[j] = helper
# сохранение информации об изменениях
changes[i] = j
changes[j] = i
break
# создание подвекторов
for i in range(0, k):
X1[i] = (changedX[i])
Y1[i] = (changedY[i])
j = 0
for i in range(k, n):
X2[j] = changedX[i]
Y2[j] = changedY[i]
j += 1
# ИЗМЕНЕНИЕ СТРУКТУРЫ МАТРИЦЫ А
def matrixStructureChanges(A, changed1A, changed2A, changes, n, k, A11, A12, A21, A22):
# меняем местами строки
for i in range(0, n):
if i != changes[i]:
num = int(changes[i])
for j in range(0, n):
changed1A[i, j] = A[num, j]
for i in range(0, n):
for j in range(0, n):
changed2A[i, j] = changed1A[i, j]
# меняем местами столбцы
for j in range(0, n):
if j != changes[j]:
num = int(changes[j])
for i in range(0, n):
changed2A[i, j] = changed1A[i, num]
# создание подматриц
for i in range(0, k):
for j in range(0, k):
A11[i, j] = changed2A[i, j]
for i in range(0, k):
l = 0
for j in range(k, n):
A12[i, l] = changed2A[i, j]
l += 1
l = 0
for i in range(k, n):
for j in range(0, k):
A21[l, j] = changed2A[i, j]
l += 1
l = 0
for i in range(k, n):
m = 0
for j in range(k, n):
A22[l, m] = changed2A[i, j]
m += 1
l += 1
# ВЫЧИСЛЕНИЯ
# вычисление Х1
def calculationsX(A11, A12, X2, Y1, k):
# создание матрицы Е
E = np.zeros((k, k))
for i in range(0, k):
for j in range(0, k):
if i == j:
E[i, j] = 1
answer1 = E - A11
answer1 = np.array(np.matrix(answer1).I) # обратная матрица
answer2 = A12.dot(X2) # умножение матрицы А12 на вектор Х2
answer2 = answer2 + Y1 # А12*Х2 + Y1
answer = answer1.dot(answer2) # (E - A11)^-1 * (A12*X2 + Y1)
return answer
# вычисление Y2
def calculationsY(A21, A22, X1, X2, k, n):
# создание матрицы Е
E = np.zeros((n-k, n-k))
for i in range(0, n-k):
for j in range(0, n-k):
if i == j:
E[i, j] = 1
answer1 = E - A22
answer1 = answer1.dot(X2) # (E - A22)*X2
answer2 = A21.dot(X1) # A21*X1
answer = answer1 - answer2 # (E - A22)*X2 - A21*X1
return answer
# ----------------------------------------------------------------------------------------------------------------------
# ВЫЧИСЛЕНИЕ МАТРИЦЫ W
# Проверка матрицы А на продуктивность
def comCheckA(A, n):
for j in range(0, n):
sumNum = 0
for i in range(0, n):
sumNum += A[i][j]
# Если сумма значений столбца больше или равна единице
if sumNum > 1.0 or sumNum == 1.0:
# Поиск максимального числа в столбце
maxNum = A[0, 0]
for l in range(0, n):
if maxNum < A[l][j]:
maxNum = A[l][j]
# Уменьшаем значение максимального числа так, чтобы сумма элементов столбца была меньше 1
for i in range(0, n):
if maxNum == A[i, j]:
sumNum = sumNum - 1
A[i, j] = A[i, j] - sumNum - 0.1
return A
# Основные рассчеты
def comCalc(A, X, Y, Yj, n, W, AXY):
# Рассчет W[i][j] = A[i][j]*X[j]
for i in range(0, n):
for j in range(0, n):
W[i, j] = A[i][j] * X[j]
# Рассчет Yj
for j in range(0, n):
num = 0
for i in range(0, n):
num += W[i][j]
Yj[j] = X[j] - num
# Проверка баланса B0
Y_sum = 0
Yj_sum = 0
for i in range(0, n):
Y_sum += Y[i]
Yj_sum += Yj[i]
if round(Y_sum) != round(Yj_sum):
messagebox.showinfo('Ошибка', 'Ошибка основного баланса! Y != Yj')
sys.exit()
# Проверка баланса B(1-4)
for i in range(0, n):
answer = 0
for j in range(0, n):
answer += W[i][j]
AXY[i] = answer + Y[i]
for i in range(0, n):
if round(X[i]) != round(AXY[i]):
messagebox.showinfo('Ошибка', 'Ошибка второстепенного баланса. X != A*X+Y')
sys.exit()
# Уменьшение элементов матрицы А
def comReduceA(A, n):
row = 0
column = 0
answer = 0
maxNum = np.zeros(n)
# Вычисление суммы столбцов матрицы А
for j in range(0, n):
num = 0
for i in range(0, n):
num += A[i][j]
maxNum[j] = num
# Вычисление столбца с максимальной суммой.
# Заполнение матрицы с максимальными значениями значениями данного столбца
for j in range(0, n):
if maxNum[j] == max(maxNum):
column = j
for i in range(0, n):
maxNum[i] = A[i][j]
# Вычисление максимального элемента в максимальном столбце
for i in range(0, n):
if maxNum[i] == max(maxNum):
row = i
answer = maxNum[i]
# Уменьшение максимального элемента
answer = answer - 0.1
A[row][column] = answer
A = comCheckA(A, n)
return A
# Вывод матрицы в окно пользователя
def comOutput(root, startA, startW, startY, startYj, startAXY, startX, A, n, aText):
# Создание всех вспомогательных средств, пояснения и обозначения
wNameLabel = Label(text='Выходная матрица')
wNameLabel.grid(row=n + 10, column=0)
nameLabel = Label(text='')
nameLabel.grid(row=n + 11, column=0)
# Обозначения по строкам
for i in range(n + 12, n + n + 12):
wiNameLabel = Label(text=i - n - 11)
wiNameLabel.grid(row=i, column=0)
wiNameLabel = Label(text='Yj')
wiNameLabel.grid(row=n + n + 13, column=0)
# Обозначения по столбцам
for j in range(1, n + 1):
wjNameLabel = Label(text=j)
wjNameLabel.grid(row=n + 11, column=j)
wjNameLabel = Label(text='Y')
wjNameLabel.grid(row=n + 11, column=n + 1)
wjNameLabel = Label(text='X')
wjNameLabel.grid(row=n + 11, column=n + 2)
wjNameLabel = Label(text='∑W + Y')
wjNameLabel.grid(row=n + 11, column=n + 3)
# Вывод значений
for i in range(n + 12, n + n + 12):
for j in range(1, n + 1):
mes = StringVar(root, round(startW[i - n - 12, j - 1], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=i, column=j)
for i in range(n + 12, n + n + 12):
mes = StringVar(root, round(startY[i - n - 12], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=i, column=n + 1)
mes = StringVar(root, round(startY.sum(), 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=n + n + 13, column=n + 1)
for i in range(n + 12, n + n + 12):
mes = StringVar(root, round(startX[i - n - 12], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=i, column=n + 2)
for i in range(n + 12, n + n + 12):
mes = StringVar(root, round(startAXY[i - n - 12], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=i, column=n + 3)
for j in range(1, n + 1):
mes = StringVar(root, round(startYj[j - 1], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=n + n + 13, column=j)
# Вывод информации об изменении А
myCheck = 1
for i in range(0, n):
for j in range(0, n):
if A[i, j] != startA[i, j]:
aText.append('Необходимо уменьшить A[{0}][{1}], на {2}.'.format(i + 1, j + 1, startA[i, j] - A[i, j]))
myCheck += 1
if not aText:
aText.append('Таблица продуктивна.')
for i in range(0, len(aText)):
newALabel = Label(root, text=aText[i])
newALabel.grid(row=n + 12 + i, column=n + 4)
def comProd(root, A, W, Y, Yj, AXY, X, n, prodBtn):
prodBtn.config(state=DISABLED)
# Создание вспомогательных средств
label = Label(text='Изменения\n продуктивности:')
label.grid(row=n + n + 17, column=0)
label = Label(text='A = ')
label.grid(row=n + n + 18, column=0)
# Матрица А
label = Label(text='')
label.grid(row=n + n + 19, column=0)
for i in range(n + n + 20, n + n + n + 20):
iNameLabel = Label(text=i - n - n - 19)
iNameLabel.grid(row=i, column=0)
for j in range(1, n + 1):
jNameLabel = Label(text=j)
jNameLabel.grid(row=n + n + 19, column=j)
for i in range(n + n + 20, n + n + n + 20):
for j in range(1, n + 1):
mes = StringVar(root, round(A[i - n - n - 20][j - 1], 3))
myEntry = Entry(root, textvariable=mes)
myEntry.grid(row=i, column=j)
# Выходная матрица
# Создание вспомогательных средств
label = Label(text='')
label.grid(row=n + n + n + 21, column=0)
label = Label(text='Выходная матрица')
label.grid(row=n + n + n + 22, column=0)
# Обозначения по строкам
label = Label(text='')
label.grid(row=n + n + n + 23, column=0)
for i in range(n + n + n + 24, n + n + n + n + 24):
wiNameLabel = Label(text=i - n - n - n - 23)
wiNameLabel.grid(row=i, column=0)
wiNameLabel = Label(text='Yj')
wiNameLabel.grid(row=n + n + n + n + 25, column=0)
# Обозначения по столбцам
for j in range(1, n + 1):
wjNameLabel = Label(text=j)
wjNameLabel.grid(row=n + n + n + 23, column=j)
wjNameLabel = Label(text='Y')
wjNameLabel.grid(row=n + n + n + 23, column=n + 1)
wjNameLabel = Label(text='X')
wjNameLabel.grid(row=n + n + n + 23, column=n + 2)
wjNameLabel = Label(text='∑W + Y')
wjNameLabel.grid(row=n + n + n + 23, column=n + 3)
# Вывод значений
for i in range(n + n + n + 24, n + n + n + n + 24):
for j in range(1, n + 1):
mes = StringVar(root, round(W[i - n - n - n - 24, j - 1], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=i, column=j)
for i in range(n + n + n + 24, n + n + n + n + 24):
mes = StringVar(root, round(Y[i - n - n - n - 24], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=i, column=n + 1)
mes = StringVar(root, round(Y.sum(), 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=n + n + n + n + 25, column=n + 1)
for i in range(n + n + n + 24, n + n + n + n + 24):
mes = StringVar(root, round(X[i - n - n - n - 24], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=i, column=n + 2)
for i in range(n + n + n + 24, n + n + n + n + 24):
mes = StringVar(root, round(AXY[i - n - n - n - 24], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=i, column=n + 3)
for j in range(1, n + 1):
mes = StringVar(root, round(Yj[j - 1], 3))
wEntry = Entry(root, textvariable=mes)
wEntry.grid(row=n + n + n + n + 25, column=j)
# расчет оптимального баланса
sensModel(n, A, X, Y, W)
# ----------------------------------------------------------------------------------------------------------------------
def comModel(root, A, X, Y, n):
# ПОДГОТОВКА ИСХОДНЫХ ДАННЫХ
# подготовка переменных
k = 0 # количество неизвестных данных в векторе X или известных в векторе Y
changed2A = np.zeros((n, n))
changes = np.zeros(n) # вектор с изменениями
# копии существующих массивов для облегчения рассчетов
changed1A = deepcopy(A)
changedX = deepcopy(X)
changedY = deepcopy(Y)
# Оригинальные значения матриц
origX = deepcopy(X)
origY = deepcopy(Y)
# получаем значение k
for i in range(0, n):
if X[i] == -0.0001:
k += 1
# создание подвекторов и подматриц
X1 = np.zeros(k)
X2 = np.zeros(n-k)
Y1 = np.zeros(n-k)
Y2 = np.zeros(k)
A11 = np.zeros((k, k))
A12 = np.zeros((k, n-k))
A21 = np.zeros((n-k, k))
A22 = np.zeros((n-k, n-k))
# решение
vectorStructureChanges(changes, changedX, changedY, n, k, X1, X2, Y1, Y2)
matrixStructureChanges(A, changed1A, changed2A, changes, n, k, A11, A12, A21, A22)
X1 = calculationsX(A11, A12, X2, Y1, k)
Y2 = calculationsY(A21, A22, X1, X2, k, n)
# Переворот векторов X, Y и матрицы А
for i in range(0, k):
changedX[i] = X1[i]
changedY[k+i] = Y2[i]
for i in range(0, n):
if i != changes[i]:
X[i] = changedX[int(changes[i])]
Y[i] = changedY[int(changes[i])]
else:
X[i] = changedX[i]
Y[i] = changedY[i]
# -------------------
# Создание необходимых матриц и векторов
W = np.zeros((n, n))
AXY = np.zeros(n)
Yj = np.zeros(n)
comCalc(A, X, Y, Yj, n, W, AXY)
# Создание исходных значений
startA = deepcopy(A)
startW = deepcopy(W)
startX = deepcopy(X)
startY = deepcopy(Y)
startYj = deepcopy(Yj)
startAXY = deepcopy(AXY)
# Проверка продуктивности
while 1:
myCheck = 0
for i in range(0, n):
if Y[i] <= 0 or Yj[i] <= 0:
myCheck = 1
if myCheck == 0:
# Вывод полученных значений
aText = []
comOutput(root, startA, startW, startY, startYj, startAXY, startX, A, n, aText)
# Если таблица не продуктивна, создаем кнопку для улучшения продуктивности
if aText[0] != 'Таблица продуктивна.':
label = Label(text='')
label.grid(row=n + n + 14, column=0)
prodBtn = Button(text='Улучшить\n продуктивность',
command=lambda: comProd(root, A, W, Y, Yj, AXY, X, n, prodBtn))
prodBtn.grid(row=n + n + 15, column=0)
label = Label(text='')
label.grid(row=n + n + 16, column=0)
else:
# расчет оптимального баланса
sensModel(n, A, X, Y, W)
return 0
else:
A = comReduceA(A, n)
X = deepcopy(origX)
Y = deepcopy(origY)
changed2A = np.zeros((n, n))
changes = np.zeros(n) # вектор с изменениями
# копии существующих массивов для облегчения рассчетов
changed1A = deepcopy(A)
changedX = deepcopy(X)
changedY = deepcopy(Y)
# создание подвекторов и подматриц
X1 = np.zeros(k)
X2 = np.zeros(n - k)
Y1 = np.zeros(k)
Y2 = np.zeros(n - k)
A11 = np.zeros((k, k))
A12 = np.zeros((k, n - k))
A21 = np.zeros((n - k, k))
A22 = np.zeros((n - k, n - k))
vectorStructureChanges(changes, changedX, changedY, n, k, X1, X2, Y1, Y2)
matrixStructureChanges(A, changed1A, changed2A, changes, n, k, A11, A12, A21, A22)
X1 = calculationsX(A11, A12, X2, Y1, k)
Y2 = calculationsY(A21, A22, X1, X2, k, n)
# Переворот векторов X, Y и матрицы А
for i in range(0, k):
changedX[i] = X1[i]
changedY[k + i] = Y2[i]
for i in range(0, n):
if i != changes[i]:
X[i] = changedX[int(changes[i])]
Y[i] = changedY[int(changes[i])]
else:
X[i] = changedX[i]
Y[i] = changedY[i]
comCalc(A, X, Y, Yj, n, W, AXY)
| KikinaA/exercise4.2 | leontief/combineSynthesisModel.py | combineSynthesisModel.py | py | 18,661 | python | ru | code | 0 | github-code | 90 |
18357582029 | import heapq
from collections import deque
def main():
N, M = list(map(int, input().split(' ')))
tasks = list()
for _ in range(N):
req_day, reward = list(map(int, input().split(' ')))
latest_start_day = M - req_day
tasks.append((-reward, latest_start_day))
# sort by desc order of latest_start_day
tasks.sort(key=lambda t: t[1], reverse=True)
tasks = deque(tasks)
task_que = []
neg_reward = 0
for d in range(M, -1, -1):
while len(tasks) > 0 and tasks[0][1] >= d:
heapq.heappush(task_que, tasks.popleft())
if len(task_que) == 0:
continue
# do task first with local max reward
task = heapq.heappop(task_que)
neg_reward += task[0]
print(-neg_reward)
if __name__ == '__main__':
main()
| Aasthaengg/IBMdataset | Python_codes/p02948/s519738686.py | s519738686.py | py | 817 | python | en | code | 0 | github-code | 90 |
70103864298 | import time
import uuid
import os.path
tokens = set()
def log(*args, **kwargs):
print(*args, **kwargs)
def get_local_time(time_seconds):
t = time.localtime(time_seconds)
return time.strftime("%Y-%m-%d %H:%M:%S", t)
def generate_token():
token = str(uuid.uuid4())
tokens.add(token)
return token
def encrypt(pwd):
from app import app
from hashlib import md5
key = app.secret_key + pwd
return md5(key.encode("ascii")).hexdigest()
def init_db():
if not os.path.exists("db"):
os.mkdir("db")
classes = ["User", "Topic", "Reply", "Board"]
for c in classes:
db_path = "db{}{}.json".format(os.sep, c)
if not os.path.exists(db_path):
with open(db_path, "w", encoding="utf-8") as f:
f.write("[]")
| Timuer/BBS | utils.py | utils.py | py | 724 | python | en | code | 0 | github-code | 90 |
29226682 | from django.shortcuts import render
from django.shortcuts import redirect
from .models import Post, Group, Follow
from django.shortcuts import get_object_or_404
from django.contrib.auth import get_user_model
from .forms import PostForm, CommentForm
from django.contrib.auth.decorators import login_required
from posts import utils
User = get_user_model()
def index(request):
post_list = Post.objects.all().order_by('-pub_date')
page_obj = utils.paginating(request, post_list)
context = {
'page_obj': page_obj,
}
return render(request, 'posts/index.html', context)
def group_posts(request, slug):
group = get_object_or_404(Group, slug=slug)
post_list = Post.objects.all().order_by('-pub_date')
page_obj = utils.paginating(request, post_list)
context = {
'group': group,
'page_obj': page_obj,
}
return render(request, 'posts/group_list.html', context)
def profile(request, username):
author = get_object_or_404(User, username=username)
post_list = author.posts.select_related('group')
following = request.user.is_authenticated and Follow.objects.filter(
author=author,
user=request.user
).exists()
page_obj = utils.paginating(request, post_list)
context = {
'author': author,
'page_obj': page_obj,
'following': following
}
return render(request, 'posts/profile.html', context)
def post_detail(request, post_id):
post = get_object_or_404(Post.objects.select_related(), id=post_id)
comments = post.comments.all()
form = CommentForm()
context = {
'post': post,
'form': form,
'comments': comments
}
return render(request, 'posts/post_detail.html', context)
@login_required
def post_create(request):
form = PostForm(
request.POST or None,
files=request.FILES or None
)
context = {'form': form}
if request.method == 'POST':
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.save()
return redirect('posts:profile', request.user.username)
return render(request, 'posts/create_post.html', context)
@login_required
def post_edit(request, post_id):
post = get_object_or_404(Post, id=post_id)
form = PostForm(
request.POST or None,
files=request.FILES or None,
instance=post
)
context = {'form': form,
'is_edit': True,
'post_id': post_id}
if request.user != post.author:
return redirect('posts:post_detail', post_id)
if request.method == 'POST':
if form.is_valid():
post = form.save(commit=False)
post.save()
return redirect('posts:post_detail', post_id)
return render(request, 'posts/create_post.html', context)
@login_required
def add_comment(request, post_id):
post = get_object_or_404(Post, id=post_id)
form = CommentForm(request.POST or None)
if form.is_valid():
comment = form.save(commit=False)
comment.author = request.user
comment.post = post
comment.save()
return redirect('posts:post_detail', post_id=post_id)
@login_required
def follow_index(request):
posts = Post.objects.filter(author__following__user=request.user)
page_obj = utils.paginating(request, posts)
context = {
'page_obj': page_obj
}
return render(request, 'posts/follow.html', context)
@login_required
def profile_follow(request, username):
author = get_object_or_404(User, username=username)
if author != request.user:
Follow.objects.get_or_create(
author=author,
user=request.user,
)
return redirect('posts:profile', username=username)
@login_required
def profile_unfollow(request, username):
current_user = request.user
author = get_object_or_404(User, username=username)
following_query = Follow.objects.filter(
author=author,
user=current_user
)
if following_query.exists():
following_query.delete()
return redirect('posts:index')
| maksimivanov1/hw05_final | yatube/posts/views.py | views.py | py | 4,143 | python | en | code | 0 | github-code | 90 |
21834315412 |
# coding: utf-8
# In[73]:
import itchat
import smtplib
import pickle
from email.mime.text import MIMEText
from itchat.content import TEXT
# function: send email
def send_email(email_type,email_content):
fromaddr = "me@senlyu.com"
subject = email_type
content = email_content
text_subtype = 'plain'
msg = MIMEText(content, text_subtype)
msg['Subject']= subject
msg['From'] = fromaddr
print("Message length is", len(msg))
server = smtplib.SMTP('localhost',25)
#server.login(fromaddr,"qwer1234")
server.set_debuglevel(1)
#for toaddrs in email_list:
server.sendmail(fromaddr, 'resembleblue@gmail.com', msg.as_string())
server.quit()
send_email('1','nothing')
# In[53]:
#test email add
#add_email('jobs@senlyu.com')
# In[55]:
#test email delete
#delete_email('123@123.com')
# In[96]:
#show_email()
# In[13]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
| senlyu/wechatrobot | test_version/mailtest.py | mailtest.py | py | 974 | python | en | code | 1 | github-code | 90 |
72776114538 | # -*- coding: utf-8 -*-
"""
image_processing_with_kmeans.py
Landon Halloran
07.03.2019
www.ljsh.ca
Demonstration of kmeans using multi-band image data. Good intro to several powerful
python modules! And good example of a practical unsupervised discrete ML application
to remote sensing data.
Data is downsampled Sentinel-2 data (bands 2,3,4,8) at 60m resolution in PNG format.
"""
# import these modules:
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import imageio
import glob
import seaborn as sns; sns.set(style="ticks", color_codes=True)
# import images to dictionary:
images = dict();
for image_path in glob.glob("*.png"):
print('reading ',image_path)
temp = imageio.imread(image_path)
temp = temp[:,:,0].squeeze()
images[image_path[6:8]] = temp
print('images have ', np.size(temp),' pixels each')
# make a 3D numpy array of data...
imagecube = np.zeros([images['B2'].shape[0],images['B2'].shape[1],4])
imagecube[:,:,0] = images['B2'] #
imagecube[:,:,1] = images['B3'] #
imagecube[:,:,2] = images['B4'] #
imagecube[:,:,3] = images['B8'] #
imagecube=imagecube/256 # scaling to between 0 and 1
# display an RGB or false colour image
thefigsize = (10,8)# set figure size
plt.figure(figsize=thefigsize)
plt.imshow(imagecube[:,:,0:3])
# sample random subset of images
Nsamples = 5000 # number of samples we take from image
imagesamples = []
for i in range(Nsamples):
xr=np.random.randint(0,imagecube.shape[1]-1)
yr=np.random.randint(0,imagecube.shape[0]-1)
imagesamples.append(imagecube[yr,xr,:])
# convert to pandas dataframe
imagessamplesDF=pd.DataFrame(imagesamples,columns = ['B2','B3','B4','B8'])
# make pairs plot (each band vs. each band)
seaborn_params_p = {'alpha': 0.6, 's': 40, 'edgecolor': 'k'}
pp1=sns.pairplot(imagessamplesDF, plot_kws = seaborn_params_p)#, hist_kws=seaborn_params_h)
#pp1.map_diag(sns.kdeplot, lw=2, legend=False, alpha=0.6) # not working.
# fit kmeans to to samples:
from sklearn.cluster import KMeans
NUMBER_OF_CLUSTERS = 5 # <---------- define number of clusters (groups) here!
KMmodel = KMeans(n_clusters=NUMBER_OF_CLUSTERS)
KMmodel.fit(imagessamplesDF)
KM_train = list(KMmodel.predict(imagessamplesDF))
i=0
for k in KM_train:
KM_train[i] = str(k)
i=i+1
imagessamplesDF2=imagessamplesDF
imagessamplesDF2['group'] = KM_train
# pair plots with clusters coloured:
pp2=sns.pairplot(imagessamplesDF,vars=['B2','B3','B4','B8'], hue='group',plot_kws = seaborn_params_p)
#
imageclustered=np.empty((imagecube.shape[0],imagecube.shape[1]))
i=0
for row in imagecube:
temp = KMmodel.predict(row)
imageclustered[i,:]=temp
i=i+1
# plot the map of the clustered data
plt.figure(figsize=thefigsize)
plt.imshow(imageclustered, cmap='rainbow') # see other colour maps @ https://matplotlib.org/examples/color/colormaps_reference.html
| lhalloran/Remote_Sensing_MSc_Course-Google_Earth_Engine | python/image_processing_with_kmeans.py | image_processing_with_kmeans.py | py | 2,853 | python | en | code | 6 | github-code | 90 |
70974902697 | import random
import sys
import time
from copy import deepcopy
def neighbors_from_data(data):
neighbors = {}
for i, line in enumerate(data):
if i == 0:
continue
lst = list()
for j, cost in enumerate(line, 1):
if j == i:
continue
lst.append((j, int(cost)))
neighbors[i] = lst
return neighbors
def default_path(neighbors: dict):
start_city = list(neighbors.keys())[0]
selected_neighbor = int()
path = []
current_city = start_city
total_cost = 0
while current_city not in path:
lowest_cost = 1000000000
for neighbor in neighbors[current_city]:
if neighbor[1] < lowest_cost and neighbor[0] not in path:
selected_neighbor = neighbor[0]
lowest_cost = neighbor[1]
path.append(current_city)
if lowest_cost != 1000000000:
total_cost += lowest_cost
current_city = selected_neighbor
path.append(start_city)
total_cost += neighbors[current_city][0][1]
return path, total_cost
def shake_path(path: list):
city = random.randrange(1, len(path)-1)
city2 = random.choice([c for c in range(1, len(path) - 1) if c != city])
cities = sorted([city, city2])
city, city2 = cities[0], cities[1]
if random.random() < 0.7:
path[city], path[city2] = path[city2], path[city]
return path
return path[:city] + list(reversed(path[city:city2])) + path[city2:]
def calculate_cost(path: list, neighbors: dict):
cost = 0
for city, next_city in zip(path, path[1:]):
if next_city < city:
cost += neighbors[city][next_city-1][1]
else:
cost += neighbors[city][next_city-2][1]
return cost
def tabu_search(start_path, tabu_length, num_of_shakes, time_limit, neighbors: dict):
current_path = start_path[0]
best_path = current_path
tabu_list = list()
tabu_list.append(current_path)
global_start = time.time()
while time.time() - global_start <= time_limit:
r = shake_path(deepcopy(current_path))
for _ in range(num_of_shakes):
if time.time() - global_start > time_limit:
break
w = shake_path(deepcopy(current_path))
if w not in tabu_list and (r in tabu_list or calculate_cost(w, neighbors) < calculate_cost(r, neighbors)):
r = w
if r not in tabu_list:
current_path = r
tabu_list.append(r)
current_cost = calculate_cost(current_path, neighbors)
best_cost = calculate_cost(best_path, neighbors)
if current_cost < best_cost:
print("new best", current_cost, "prev", best_cost, file=sys.stderr)
best_path = current_path
best_cost = current_cost
if len(tabu_list) >= tabu_length:
tabu_list.pop(0)
return best_path, best_cost
t, n = map(int, input().split())
data = [[0]]
for x in [[*map(int, input().split())] for i in range(n)]:
data.append(x)
nbs = neighbors_from_data(data)
bp, c = tabu_search(default_path(nbs), n*10, int(n**2/3), t, nbs)
print(c)
print(*bp, file=sys.stderr)
| jsxgod/Python-coursework | amh/l1/z2/zad2.py | zad2.py | py | 3,311 | python | en | code | 0 | github-code | 90 |
18349052039 | # 頂点数がN(N-1)/2, 辺がN(N-2)本できる
from collections import deque
N = int(input())
matches = [[a-1 for a in map(int, input().split())] for line in range(N)]
q = deque(range(N))
depth = [0]*N
waiting = [-1]*N
while q:
a = q.popleft()
b = matches[a].pop()
if waiting[b] == a:
depth[a] = depth[b] = max(depth[a], depth[b]) + 1
if matches[a]:
q.append(a)
if matches[b]:
q.append(b)
else:
waiting[a] = b
if any(matches):
print(-1)
else:
print(max(depth)) | Aasthaengg/IBMdataset | Python_codes/p02925/s926190235.py | s926190235.py | py | 509 | python | en | code | 0 | github-code | 90 |
8672834132 | import http.client
import hashlib
import urllib
import random
import json
import requests
def post_to_baidu(from_text, to_text, input_text):
'''
APP ID:20220212001080952
密钥:7hmzzpojDNLIdUD5MkUb
'''
appid = "20220212001080952"
secretKey = '7hmzzpojDNLIdUD5MkUb'
httpClient = None
myurl = '/api/trans/vip/translate'
fromLang = from_text
toLang = to_text
salt = random.randint(32768, 65536)
q = input_text
sign = appid + q + str(salt) + secretKey
sign = hashlib.md5(sign.encode()).hexdigest()
# 配置字段结束
my_url = myurl + '?appid=' + appid + '&q=' + urllib.parse.quote(q) + '&from=' + \
fromLang + '&to=' + toLang + '&salt=' + str(salt) + '&sign=' + sign
# NOTE 第一种方法
trans_url = 'http://api.fanyi.baidu.com/api/trans/vip/translate'
params = {
'q': input_text,
'from': from_text,
'to': to_text,
'appid': appid,
'salt': salt,
'sign': sign
}
try:
response = requests.get(trans_url, params=params)
result_dict = response.json()
if 'trans_result' in result_dict:
return result_dict['trans_result'][0]['src'], result_dict['trans_result'][0]['dst']
else:
print('Some error occured: ', result_dict)
except Exception as e:
print('访问失败!')
# NOTE 第二种方法 访问失败,不建议尝试。
# try:
# httpClient = http.client.HTTPConnection('api.fanyi.baidu.com')
# httpClient.request('Get', my_url)
# # response是HTTPResponse对象
# response = httpClient.getresponse()
# result_all = response.read().decode("utf-8")
# result = json.loads(result_all)
# print (result)
# except Exception as e:
# print (e)
# finally:
# if httpClient:
# httpClient.close()
def chinese_or_english(input_text):
from_text = ''
to_text = ""
for word in input_text:
if '\u4e00' <= word <= '\u9fff':
from_text = 'zh'
to_text = "en"
else:
from_text = 'en'
to_text = "zh"
return from_text, to_text
def func_entry():
entry_text = input("请输入需要翻译的句子:")
from_text, to_text = chinese_or_english(input_text=entry_text.strip())
from_result, to_result = post_to_baidu(from_text, to_text, entry_text)
print(from_result, to_result)
if __name__ == "__main__":
func_entry()
| muyuchenzi/PYref | ReviewCode/QA_for_InterView/Python_Advance/English_Chinese_trans.py | English_Chinese_trans.py | py | 2,462 | python | en | code | 0 | github-code | 90 |
39198661001 | import os
import sys
import shutil
def install_sublime_plugin(packages_path, rewrite=False):
"""Writes plugin files to appropriate destination. If rewrite is True existing files will be overwritten"""
not_copied = []
source_path = os.path.dirname(os.path.realpath(__file__))
for folder in os.listdir(source_path):
folder_to_copy = os.path.join(packages_path, folder)
source_folder = os.path.join(source_path, folder)
if not os.path.isdir(source_folder) or folder.startswith("."):
continue
if not os.path.exists(folder_to_copy):
os.mkdir(folder_to_copy)
for file in os.listdir(folder):
source = os.path.join(source_folder, file)
destination = os.path.join(folder_to_copy, file)
file_exists = os.path.exists(destination)
if file_exists and not rewrite:
not_copied.append("{0} -> {1}".format(source, destination))
if not file_exists or rewrite:
try:
shutil.copy(source, destination)
print("{0} -> {1}".format(source, destination))
except IOError:
print("Cannot copy file {0} to the folder {1}.".format(source, folder_to_copy))
return not_copied
HELP_MESSAGE = """install.py [key] path
[key] - Command key (optional). Available keys:
-r Overwrite existing files.
-h Help.
path - Full path to Sublime Text Packages folder. Write this value in quotes."""
if __name__ == "__main__":
if len(sys.argv) == 1:
print('Specify path to Packages folder as first argument. Run script with -h key for help.')
elif sys.argv[1] == "-r":
if len(sys.argv) == 3:
install_sublime_plugin(sys.argv[2], True)
else:
print('Specify path to Packages folder as second argument. Run script with -h key for help.')
elif sys.argv[1] == "-h":
print(HELP_MESSAGE)
else:
not_copied_files = install_sublime_plugin(sys.argv[1])
if len(not_copied_files) > 0:
print('Following files already exist in destination. '
'Copy them manually or add their contents to existing files. Or run the program with -r key if you '
'want to replace existing files. Run script with -h key for help.')
for not_copied in not_copied_files:
print(not_copied)
| yahor-filipchyk/sublime-text-2-jbehave | install.py | install.py | py | 2,426 | python | en | code | 0 | github-code | 90 |
44699474274 | from sklearn.datasets import load_iris
from sklearn import tree
from sklearn.metrics import classification_report
import graphviz
iris = load_iris()
x_data = iris.data
y_data = iris.target
model = tree.DecisionTreeClassifier()
model.fit(x_data, y_data)
dot_data = tree.export_graphviz(model,out_file='tree.dot',
feature_names=['SepaLengthCm', 'SepalWidthCm', 'PetalLengthCm', 'PetalWidthCm'],
class_names=['setosa', 'versicolor', 'virginica'],
filled=True, rounded=True,
special_characters=True)
graph = graphviz.Source(dot_data)
print(classification_report(y_data, model.predict(x_data))) | PierreVon/Learning-python | Sklearn/7-2 CART.py | 7-2 CART.py | py | 738 | python | en | code | 0 | github-code | 90 |
44687285803 | #############################################################################################################
# Regresión POS Check | Apache License 2.0 #
# Software de generación automática de documentación para Test de Regresión en dispositivos POS #
# Javier Bernal | 2023 #
# Source code: https://github.com/WrathfulNico/RegresionPOS-Check #
#############################################################################################################
import os
import shutil
from datetime import datetime
def TextoASCII():
mensajes= ["\n\n[Verificación de Casos de Prueba Regresión Express]\n"]
with open('.\\resources\ASCII.text', 'r') as file: #Lee archivo ASCII.text
ascii = file.read()
file.close
with open('reporte.txt', 'a') as file:
file.write(str(ascii+"\n")) #Imprime archivo ASCII
for mensaje in mensajes:
file.write("%s\n" % mensaje)
file.close
return 0
def Documentacion():
mensajes=[]
mensajes.append(f"Hora de finalización "+ datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
with open('reporte.txt', 'a') as file:
file.write(str(mensajes)) #Imprime mensajes
file.close
carpeta = datetime.now().strftime("Ejecución_%Y-%m-%d_%H-%M-%S")
reporte = datetime.now().strftime("Reporte_%Y-%m-%d_%H-%M-%S")
reporte = reporte + '.txt'
ruta = os.getcwd()
archivos = os.listdir(ruta)
source = ".\\"
destination = '.\\tests'
os.mkdir(carpeta)
os.rename('reporte.txt', reporte) #Una vez cerrado el archivo será renombrado
for archivo in archivos: #Mueve los df generados a su carpeta correspondiente
if (archivo.endswith('.xlsx') and archivo != 'MET001.xlsx'):
shutil.move(os.path.join(ruta, archivo), carpeta)
if not os.path.exists("./tests"):
os.makedirs("tests") #Si no existe el directorio tests, el mismo será creado
for foldername in os.listdir(source): #Mueve carpetas
if foldername.startswith('Ejecución'):
shutil.move(os.path.join(source, foldername), destination)
for filename in os.listdir(source): #Mueve archivos .txt
if filename.startswith('Reporte') and filename.endswith('.txt'):
shutil.move(os.path.join(source, filename), destination)
ruta_reporte = os.path.join('.\\tests\\', reporte)
os.system('notepad.exe ' + ruta_reporte)
return 0
| WrathfulNico/RegresionPOS-Check | modulos/FileScript.py | FileScript.py | py | 2,789 | python | es | code | 0 | github-code | 90 |
21996240118 | import argparse
from http.server import HTTPServer, BaseHTTPRequestHandler
import json
import os
import hashlib
import hmac
import traceback
config = {}
try:
with open("./config.json") as file:
config = json.load(file)
except:
print("Cannot find config.json!")
exit(1)
if (__name__ != "__main__"):
exit(1)
def pull(repo):
"""Fuction called when the hook is called."""
print("Hook called.")
try:
os.system(f"cd {repo['directory']} && git reset --hard && git pull && {repo['command']}")
print("Pulled and executed command!")
except:
traceback.print_exc()
print("Cannot execute command, check directory path or command. Check that you can git pull the given repo without credentials.")
class RequestHandler(BaseHTTPRequestHandler):
"""HTTP request handler class."""
def do_POST(self):
"""Handle POST request."""
if (self.path == "/push"):
try:
length = int(self.headers["content-length"])
body = self.rfile.read(length)
message = json.loads(body)
branch = message["ref"].replace("refs/heads/", "")
for repo in config["repos"]:
if (repo["repo"] == message["repository"]["full_name"]):
hash = hmac.new(bytes(repo["secret"], "utf-8"), body, hashlib.sha1)
hash = hash.hexdigest()
hash_received = self.headers["X-Hub-Signature"].split("sha1=")[1]
if (branch == repo["branch"] and hash_received == hash):
pull(repo)
else:
print("Wrong secret or branch.")
except:
traceback.print_exc()
print("Cannot process request.")
server_address = ("0.0.0.0", 9999)
httpd = HTTPServer(server_address, RequestHandler)
print(f"Listening to push hooks on port 9999.")
httpd.serve_forever() | t0mm4rx/github-auto-pull | watcher.py | watcher.py | py | 1,998 | python | en | code | 7 | github-code | 90 |
3302254507 | #!/usr/bin/python3
"""
unit test file for base module and its Base class
"""
import unittest
from models.base import Base
class TestBaseClass(unittest.TestCase):
"""
class for testing Base class in base model
"""
def test_module_documentation(self):
"""
test checks for module documentation
"""
module = Base.__module__.__doc__
self.assertTrue(len(module) > 1)
def test_class_documentation(self):
"""
test checks for class documentation
"""
function_ = Base.__doc__
self.assertTrue(len(function_) > 1)
def test_Base_class(self):
"""
test Base class
"""
b1 = Base()
self.assertEqual(b1.id, 1)
b2 = Base()
self.assertEqual(b2.id, 2)
b3 = Base()
self.assertEqual(b3.id, 3)
b4 = Base(12)
self.assertEqual(b4.id, 12)
b5 = Base()
self.assertEqual(b5.id, 4)
if __name__ == "__main__":
unittest.main()
| amiresaye6/alx-higher_level_programming | 0x0C-python-almost_a_circle/tests/test_base.py | test_base.py | py | 1,020 | python | en | code | 0 | github-code | 90 |
38044266250 | # from: https://timcera.bitbucket.io/swmmtoolbox/docsrc/index.html
# https://bitbucket.org/timcera/swmmtoolbox/src/master/swmmtoolbox/swmmtoolbox.py
# copied to reduce dependencies
# ORIGINAL Author Tim Cera with BSD License
# Rewritten for custom use
# SWMM Version > 5.10.10
# Python Version >= 3.7
import copy
from os import remove
import datetime
import struct
from io import SEEK_END, SEEK_SET
from tqdm.auto import tqdm
from warnings import warn
from .definitions import OBJECTS, VARIABLES
from .._read_bin import BinaryReader
VARIABLES_DICT = {
OBJECTS.SUBCATCHMENT: VARIABLES.SUBCATCHMENT.LIST_,
OBJECTS.NODE : VARIABLES.NODE.LIST_,
OBJECTS.LINK : VARIABLES.LINK.LIST_,
OBJECTS.POLLUTANT : [],
OBJECTS.SYSTEM : VARIABLES.SYSTEM.LIST_,
}
_RECORDSIZE = 4
_FLOW_UNITS_METRIC = ['CMS', 'LPS', 'MLD']
_FLOW_UNITS_IMPERIAL = ['CFS', 'GPM', 'MGD']
_FLOW_UNITS = _FLOW_UNITS_IMPERIAL + _FLOW_UNITS_METRIC + [None]
_CONCENTRATION_UNITS = ['MG', 'UG', 'COUNTS']
_MAGIC_NUMBER = 516114522
_PROPERTY_LABELS = ['type', 'area', 'invert', 'max_depth', 'offset', 'length']
_NODES_TYPES = ['JUNCTION', 'OUTFALL', 'STORAGE', 'DIVIDER']
_LINK_TYPES = ['CONDUIT', 'PUMP', 'ORIFICE', 'WEIR', 'OUTLET']
class SwmmExtractValueError(Exception):
def __init__(self, message):
super().__init__("\n*\n* {}\n*\n".format(message))
class SwmmOutExtractWarning(UserWarning):
pass
class SwmmOutExtract(BinaryReader):
"""
The class that handles all extraction of data from the out file.
Attributes:
flow_unit (str): Flow unit. One of ['CMS', 'LPS', 'MLD', 'CFS', 'GPM', 'MGD']
labels (dict[str, list]): dictionary of the object labels as list (value) for each object type
(keys are: ``'link'``, ``'node'``, ``'subcatchment'``)
model_properties (dict[str, [dict[str, list]]]): property values for the subcatchments, nodes and links.
The Properties for the objects are.
- ``subcatchment``
- [area]
- ``node``
- [type, invert, max. depth]
- ``link``
- type,
- offsets
- ht. above start node invert (ft),
- ht. above end node invert (ft),
- max. depth,
- length
n_periods (int): number of periods (=index-values)
pollutant_units (dict[str, str]): Units per pollutant.
_pos_start_output (int): Start position of the data.
report_interval (datetime.timedelta): Intervall of the index.
start_date (datetime.datetime): Start date of the data.
swmm_version (str): SWMM Version
variables (dict[str, list]): variables per object-type inclusive the pollutants.
fp (file-like): Stream of the open file.
filename (str): Path to the .out-file.
Args:
filename (str): Path to the .out-file.
"""
def __init__(self, filename):
super().__init__(filename)
# ____
self.fp.seek(-6 * _RECORDSIZE, SEEK_END)
(
_pos_start_labels, # starting file position of ID names
_pos_start_input, # starting file position of input data
_pos_start_output, # starting file position of output data
_n_periods, # Number of reporting periods
error_code,
magic_num_end,
) = self._next(6)
# ____
self.fp.seek(0, SEEK_SET)
magic_num_start = self._next()
self.run_failed = False
# ____
# check errors
if magic_num_start != _MAGIC_NUMBER:
raise SwmmExtractValueError('Beginning magic number incorrect.')
if magic_num_end != _MAGIC_NUMBER:
warn('Ending magic number incorrect.', SwmmOutExtractWarning)
# raise SwmmExtractValueError('Ending magic number incorrect.')
_n_periods = 0
self.run_failed = True
elif error_code != 0:
warn(f'Error code "{error_code}" in output file indicates a problem with the run.', SwmmOutExtractWarning)
# raise SwmmExtractValueError(f'Error code "{error_code}" in output file indicates a problem with the run.')
self.run_failed = True
# ---
# read additional parameters from start of file
# Version number i.e. "51015"
self.swmm_version, self.flow_unit, n_subcatch, n_nodes, n_links, n_pollutants = self._next(6)
self.flow_unit = _FLOW_UNITS[self.flow_unit]
# ____
# self.fp.seek(_pos_start_labels, SEEK_SET) # not needed!
# print(self.fp.tell(), _pos_start_labels)
# assert _pos_start_labels == self.fp.tell()
# ____
# Read in the names
# get the dictionary of the object labels for each object type (link, node, subcatchment)
self.labels = {}
for kind, n in zip(OBJECTS.LIST_, [n_subcatch, n_nodes, n_links, n_pollutants, 0]):
self.labels[kind] = [self._next(n=self._next(), dtype='s') for _ in range(n)]
# ____
# print(self.fp.tell(), _pos_start_input)
# assert _pos_start_input == self.fp.tell()
# ____
# Update variables to add pollutant names to subcatchment, nodes, and links.
# get the dictionary of the object variables for each object type (link, node, subcatchment)
self.variables = copy.deepcopy(VARIABLES_DICT)
for kind in [OBJECTS.SUBCATCHMENT, OBJECTS.NODE, OBJECTS.LINK]:
self.variables[kind] += self.labels[OBJECTS.POLLUTANT]
# ____
# System vars do not have names per se, but made names = number labels
self.labels[OBJECTS.SYSTEM] = [''] # self.variables[OBJECTS.SYSTEM]
# ____
# Read codes of pollutant concentration UNITS = Number of pollutants * 4 byte integers
_pollutant_unit_labels = [_CONCENTRATION_UNITS[p] if p < len(_CONCENTRATION_UNITS) else 'NaN'
for p in self._next(n_pollutants, flat=False)]
self.pollutant_units = dict(zip(self.labels[OBJECTS.POLLUTANT], _pollutant_unit_labels))
# ____
# property values for subcatchments, nodes and links
# subcatchment
# area
# node
# type, invert, & max. depth
# link
# type, offsets [ht. above start node invert (ft), ht. above end node invert (ft)], max. depth, & length
self.model_properties = {}
for kind in [OBJECTS.SUBCATCHMENT, OBJECTS.NODE, OBJECTS.LINK]:
self.model_properties[kind] = {}
# ------
# read the property labels per object type
property_labels = []
for i in list(self._next(self._next(), flat=False)):
property_label = _PROPERTY_LABELS[i]
if property_label in property_labels:
property_label += '_2'
property_labels.append(property_label)
# ------
# read the values per object and per property
for label in self.labels[kind]:
self.model_properties[kind][label] = {}
for property_label in property_labels:
value = self._next(dtype={'type': 'i'}.get(property_label, 'f'))
if property_label == 'type':
value = {OBJECTS.NODE: _NODES_TYPES, OBJECTS.LINK: _LINK_TYPES}[kind][value]
self.model_properties[kind][label][property_label] = value
# ____
# double check variables
for kind in [OBJECTS.SUBCATCHMENT, OBJECTS.NODE, OBJECTS.LINK, OBJECTS.SYSTEM]:
n_vars = self._next()
assert n_vars == len(self.variables[kind])
self._next(n_vars)
# ____
self.start_date = datetime.datetime(1899, 12, 30) + datetime.timedelta(days=self._next(dtype='d'))
self.report_interval = datetime.timedelta(seconds=self._next())
# ____
self._bytes_per_period = self._infer_bytes_per_period()
# ____
# print(self.fp.tell(), _pos_start_output)
# assert _pos_start_output == self.fp.tell()
# if _pos_start_output == 0:
# Out File not complete!
self._pos_start_output = self.fp.tell()
self.n_periods = _n_periods
if _n_periods == 0:
self._infer_n_periods()
warn('Infer time periods of the output file due to an corrupt SWMM .out-file.', SwmmOutExtractWarning)
if self.n_periods == 0:
warn('There are zero time periods in the output file.', SwmmOutExtractWarning)
# raise SwmmExtractValueError('There are zero time periods in the output file.')
def __repr__(self):
return f'SwmmOutExtract(file="{self.filename}")'
def _infer_bytes_per_period(self):
"""
Calculate the bytes for each time period when reading the computed results
Returns:
int: bytes per period
"""
_bytes_per_period = 2 # for the datetime
for obj in [OBJECTS.SUBCATCHMENT, OBJECTS.NODE, OBJECTS.LINK]:
_bytes_per_period += len(self.variables[obj]) * len(self.labels[obj])
_bytes_per_period += len(self.variables[OBJECTS.SYSTEM])
_bytes_per_period *= _RECORDSIZE
return _bytes_per_period
def _get_selective_results(self, columns):
"""
get results of selective columns in .out-file
this function is due to its iterative reading slow,
but has it advantages with out-files with many columns (>1000) and fewer time-steps
Args:
columns (list[tuple]): list of column identifier tuple with [(kind, label, variable), ...]
Returns:
dict[str, list]: dictionary where keys are the column names ('/' as separator) and values are the list of result values
"""
n_vars_subcatch = len(self.variables[OBJECTS.SUBCATCHMENT])
n_vars_node = len(self.variables[OBJECTS.NODE])
n_vars_link = len(self.variables[OBJECTS.LINK])
n_subcatch = len(self.labels[OBJECTS.SUBCATCHMENT])
n_nodes = len(self.labels[OBJECTS.NODE])
n_links = len(self.labels[OBJECTS.LINK])
offset_list = []
values = {}
for kind, label, variable in columns:
values['/'.join([kind, label, variable])] = []
index_kind = OBJECTS.LIST_.index(kind)
index_variable = self.variables[kind].index(variable)
item_index = self.labels[kind].index(str(label))
offset_list.append((2 + index_variable + {
0: (item_index * n_vars_subcatch),
1: (n_subcatch * n_vars_subcatch +
item_index * n_vars_node),
2: (n_subcatch * n_vars_subcatch +
n_nodes * n_vars_node +
item_index * n_vars_link),
4: (n_subcatch * n_vars_subcatch +
n_nodes * n_vars_node +
n_links * n_vars_link)
}[index_kind])*_RECORDSIZE)
# offset_list = [o*_RECORDSIZE for o in offset_list]
# cols = list(values.keys())
# cols_sorted = sorted(cols, key=lambda e: offset_list[cols.index(e)])
# offset_sorted = sorted(offset_list)
# iter_label_offset = tuple(zip(cols_sorted, offset_sorted))
iter_label_offset = tuple(zip(values.keys(), offset_list))
for period_offset in tqdm(range(self._pos_start_output, # start
self._pos_start_output + self.n_periods * self._bytes_per_period, # stop
self._bytes_per_period),
desc=f'{repr(self)}.get_selective_results(n_cols={len(columns)})'): # step
# period_offset = self.pos_start_output + period * self.bytes_per_period
for label, offset in iter_label_offset:
self._set_position(offset + period_offset)
values[label].append(self._next_float())
return values
def _infer_n_periods(self):
not_done = True
period = 0
while not_done:
self.fp.seek(self._pos_start_output + period * self._bytes_per_period, SEEK_SET)
try:
dt = self._next(dtype='d')
# print(dt)
# print(datetime.datetime(1899, 12, 30) + datetime.timedelta(days=dt))
period += 1
except:
not_done = False
self.n_periods = period - 1
| michaeltryby/swmm_api | swmm_api/output_file/extract.py | extract.py | py | 12,685 | python | en | code | 1 | github-code | 90 |
1919780044 | class Solution:
def numUniqueEmails(self, emails: List[str]) -> int:
## create a hashset to store unique stringd
unique = set()
for e in emails:
## split the string before @ symbol as local and after @ symbol as domain
local,domain = e.split("@")
##consider local that are before + sign and 0 indicates just before the + sign
local = local.split("+")[0]
local = local.replace("." , "")
unique.add((local,domain))
return len(unique)
##emails = ["test.email+foo@example.com", ##"test.email.bar@example.com", "test.email@example.com"]
##solution = Solution()
##result = solution.numUniqueEmails(emails)
##print(result)
| mohdabdulrahman297/Leetcode | 0929-unique-email-addresses/0929-unique-email-addresses.py | 0929-unique-email-addresses.py | py | 779 | python | en | code | 0 | github-code | 90 |
10548641067 | def even_odd(num):
if num%2==0:
return True
return False
if __name__ == '__main__':
num = int(input('Enter a number to check whether it is even or odd : '))
if even_odd(num):
print(f'{num} is Even')
else:
print(f'{num} is Odd')
| KumarSantosh22/Python-Programs | even_odd.py | even_odd.py | py | 275 | python | en | code | 0 | github-code | 90 |
41863485286 | #! /usr/bin/python
from tempfile import mkstemp
from shutil import move
from os import remove, close
import re
import sys
def replace(file, pattern, subst):
print (file)
#Create temp file
fh, abs_path = mkstemp()
new_file = open(abs_path,'w')
old_file = open(file)
for line in old_file:
new_file.write(re.sub(pattern, subst, line))
#close temp file
new_file.close()
close(fh)
old_file.close()
#Remove original file
remove(file)
#Move new file
move(abs_path, file)
# Main program
if (len(sys.argv) == 1):
sys.exit("Specify the version. Example: %s 1.0.0.0\n" % sys.argv[0])
version = sys.argv[1]
versionnumber = re.sub("\.", ",", version)
print ("Setting the following files to use version %s\n" % version)
replace("../ohipsfs/firststage.rc", "#define VERSION_NUMBER [\d\,]*", "#define VERSION_NUMBER %s" % versionnumber);
replace("../ohipsfs/firststage.rc", "#define VERSION_STRING \"[\d\.]*\"", "#define VERSION_STRING \"%s\"" % version);
replace("../ohipsp/protector.rc", "#define VERSION_NUMBER [\d\,]*", "#define VERSION_NUMBER %s" % versionnumber);
replace("../ohipsp/protector.rc", "#define VERSION_STRING \"[\d\.]*\"", "#define VERSION_STRING \"%s\"" % version);
replace("../installer/installer.wxs", " Version=\"[\d\.]*\"", " Version=\"%s\"" % version);
replace("../ohipssvc/Properties/AssemblyInfo.cs", "\[assembly: AssemblyVersion(\"[\d\.]*\")\]", "\[assembly: AssemblyVersion(\"%s\")\]" % version);
replace("../ohipssvc/Properties/AssemblyInfo.cs", "\[assembly: AssemblyFileVersion(\"[\d\.]*\")\]", "\[assembly: AssemblyFileVersion(\"%s\")\]" % version);
replace("../ohipsui/Properties/AssemblyInfo.cs", "\[assembly: AssemblyVersion(\"[\d\.]*\")\]", "\[assembly: AssemblyVersion(\"%s\")\]" % version);
replace("../ohipsui/Properties/AssemblyInfo.cs", "\[assembly: AssemblyFileVersion(\"[\d\.]*\")\]", "\[assembly: AssemblyFileVersion(\"%s\")\]" % version);
replace("../ohipsui/TrayIcon.cs", "private string szVersion = \"[\d\.]*\";", "private string szVersion = \"%s\";" % version);
| 0xdabbad00/OpenHIPS | installer/setVersion.py | setVersion.py | py | 2,071 | python | en | code | 33 | github-code | 90 |
71105527657 | import argparse
import sys
import cv2
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from src.sudoku.sudoku import SudokuSolver
from src.sudoku_translator import SudokuTranlator
from src.regional_proposal.regional_proposal import RpMser
from src.utils.utils import draw_bboxes, draw_digits, remove_overlapped_bboxes
detector_model_path = "./model/digit_detector.h5"
classifier_model_path = "./model/digit_classifier.h5"
detector_model = tf.keras.models.load_model(detector_model_path)
classifier_model = tf.keras.models.load_model(classifier_model_path)
def parse_arguments(argv):
parser = argparse.ArgumentParser(description="CV-based sudoku solver")
parser.add_argument("img_path", type=str)
return parser.parse_args()
def main(args):
img = args.img_path
if isinstance(img, str):
img = cv2.imread(img)
# Turn img into grayscale
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Get regional proposal from MSER algorithm
mser = RpMser()
bboxes = mser.get_bboxes(gray)
# Remove some overlapped rps
bboxes = mser.remove_overlap_rp(bboxes, 0.2)
rps = mser.get_cropped_rps(gray.copy(), bboxes)
resized_rps = []
for rp in rps:
resized_rp = cv2.resize(rp, (32, 32))
resized_rps.append(resized_rp)
resized_rps = np.expand_dims(resized_rps, axis=-1)
# Detect rps containing digits
digits_bb = detector_model.predict(resized_rps)
# Filter out non-digit rps
cls_bb = np.array(bboxes)[digits_bb[:, 0] == 1, :]
# Again, Remove overlapped rps
cls_bb = remove_overlapped_bboxes(list(cls_bb))
rps = mser.get_cropped_rps(gray.copy(), cls_bb)
resized_rps = []
for rp in rps:
r = cv2.resize(rp, (32, 32))
resized_rps.append(r)
resized_rps = np.expand_dims(resized_rps, axis=-1)
sudoku_digits = classifier_model.predict(resized_rps)
st = SudokuTranlator()
sudoku = st.translate_sudoku(cls_bb, np.argmax(sudoku_digits, axis=1))
# Solve the sudoku
s = SudokuSolver(sudoku=sudoku.copy())
s.sudoku_solver_backtrack(0, 0)
print(s.sudoku)
solution_img = st.fill_sudoku(img.copy(), s.sudoku)
fig = plt.figure(figsize=(8, 8))
plt.axis("off")
plt.imshow(solution_img)
plt.show()
if __name__ == "__main__":
main(parse_arguments(sys.argv[1:]))
| ZequnZ/CV-based-sudoku-solver | cv_sudoku_solver.py | cv_sudoku_solver.py | py | 2,364 | python | en | code | 0 | github-code | 90 |
18203003869 | N=int(input())
d_pre=input().split()
d=[int(s) for s in d_pre]
d.sort()
d_2=d[::-1]
ans=1
if 0 in d_2:
print(0)
else:
for i in range(N):
ans*=d_2[i]
if ans>10**18:
print(-1)
break
elif i==N-1:
print(ans) | Aasthaengg/IBMdataset | Python_codes/p02658/s955953843.py | s955953843.py | py | 271 | python | en | code | 0 | github-code | 90 |
18214460579 | n,m,X=map(int,input().split())
c=[list(map(int,input().split())) for i in range(n)]
ans=float('inf')
from itertools import combinations as com
for l in range(1,n+1):
for i in com(list(range(n)),r=l):
cnt=[0]*m
p=0
for j in i:
p+=c[j][0]
for x in range(m):
cnt[x]+=c[j][x+1]
if min(cnt)>=X:ans=min(ans,p)
print(ans if ans!=float('inf')else -1) | Aasthaengg/IBMdataset | Python_codes/p02683/s151674991.py | s151674991.py | py | 418 | python | en | code | 0 | github-code | 90 |
9476148828 | from pyDatalog import pyDatalog
summ = ((0 + 999999) * 1000000) / 2
median = 100 / 2
pyDatalog.create_terms('Sum_n, Avg, Median, Prod_n')
pyDatalog.create_terms('factorial, N')
factorial[N] = N * factorial[N - 1]
factorial[1] = 1
print((Sum_n == summ)&(Avg == Sum_n/1000000)&(Median == median))
print()
print(Prod_n == factorial[100])
| bolotovmark/Labs_Python_PSTU | lab3/main.py | main.py | py | 338 | python | en | code | 1 | github-code | 90 |
69819815657 | #3.5 – Alterando a lista de convidados: Você acabou de saber que um de seus
# convidados não poderá comparecer ao jantar, portanto será necessário enviar um
# novo conjunto de convites. Você deverá pensar em outra pessoa para convidar.
# • Comece com seu programa do Exercício 3.4. Acrescente uma instrução print
# no final de seu programa, especificando o nome do convidado que não poderá comparecer.
# • Modifique sua lista, substituindo o nome do convidado que não poderá
# comparecer pelo nome da nova pessoa que você está convidando.
# • Exiba um segundo conjunto de mensagens com o convite, uma para cada
# pessoa que continua presente em sua lista.
guest_list = ['philipe', 'edee', 'joao', 'paulo', 'luan']
convite = "Social com os amigos "
message = "Podemos confirmar sua presença "
for list in guest_list:
print(convite + message + list.title() + "?")
popped_guest_list = guest_list.pop()
print("O " + popped_guest_list.title() + " não podera comparecer")
guest_list.insert(4, 'matheos')
for list in guest_list:
print(convite + list.title())
| CarolinaRodrigues/curso_intensivo_de_python_uma_eric_mat | Exercicos/03.Introdução_as_listas/3.5.py | 3.5.py | py | 1,094 | python | pt | code | 0 | github-code | 90 |
23005084113 | from pandas import read_csv
import numpy as np
from pandas import DataFrame as df
import pandas as pd
import os, csv
import matplotlib.pyplot as plt
csfont = {'fontname': 'Times New Roman'}
plt.rcParams["font.family"] = "Times New Roman"
SMALL_SIZE = 14
MEDIUM_SIZE = 16
BIGGER_SIZE = 18
TITLE_SIZE = 20
plt.rc('font', size=SMALL_SIZE) # controls default text sizes
plt.rc('axes', titlesize=SMALL_SIZE) # fontsize of the axes title
plt.rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels
plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels
plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick labels
plt.rc('legend', fontsize=SMALL_SIZE) # legend fontsize
plt.rc('figure', titlesize=BIGGER_SIZE)
path = os.getcwd() + r"\vysledky.csv"
data = pd.read_csv(path, encoding='cp1252')
names = data.iloc[:, 0]
sex = data.iloc[:, 1]
gamer = data.iloc[:, 2]
mean_games = data.iloc[:, 3]
first = data.iloc[:, 4]
second = data.iloc[:, 5]
third = data.iloc[:, 6]
fourth = data.iloc[:, 7]
fifth = data.iloc[:, 8]
on_green = data.iloc[:, 9]
on_red = data.iloc[:, 10]
on_ambulance = data.iloc[:, 11]
men_mean = mean_games.loc[sex == "M"]
women_mean = mean_games.loc[sex == "F"]
sex = [men_mean, women_mean]
fig = plt.figure(1)
plt.boxplot(sex)
plt.title("Průměrná reakční doba ve hře", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2], ['Muži', 'Ženy'])
plt.xlabel("Pohlaví")
plt.ylabel("Reakční doba [s]")
plt.savefig('Pohlavi.png', dpi=300)
plt.show()
# gamer
gamers = data["Mean"][(data["Gamer"] == True)]
no_gamers = data["Mean"][(data["Gamer"] == False)]
gaming = [gamers, no_gamers]
plt.boxplot(gaming)
plt.title("Průměrná reakční doba ve hře", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2], ['ANO', 'NE'])
plt.xlabel("Hráč počítačových her")
plt.ylabel("Reakční doba [s]")
plt.savefig('Hraci.png', dpi=300)
plt.show()
men_gamers = data["Mean"][(data["Gamer"] == True) & (data["Sex"] == "M")]
men_no_gamers = data["Mean"][(data["Gamer"] == False) & (data["Sex"] == "M")]
gaming = [men_gamers, men_no_gamers]
plt.boxplot(gaming)
plt.title("Průměrná reakční doba ve hře - muži", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2], ['ANO', 'NE'])
plt.xlabel("Hráč počítačových her")
plt.ylabel("Reakční doba [s]")
plt.savefig('Hraci_muzi.png', dpi=300)
plt.show()
women_gamers = data["Mean"][(data["Gamer"] == True) & (data["Sex"] == "F")]
women_no_gamers = data["Mean"][(data["Gamer"] == False) & (data["Sex"] == "F")]
gaming = [women_gamers, women_no_gamers]
plt.boxplot(gaming)
plt.title("Průměrná reakční doba ve hře - ženy", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2], ['ANO', 'NE'])
plt.xlabel("Hráč počítačových her")
plt.ylabel("Reakční doba [s]")
plt.savefig('Hraci_zeny.png', dpi=300)
plt.show()
mean_red_green = []
for i in range(len(on_green)):
mean_red_green.append(on_green[i])
mean_red_green.append(on_red[i])
item = [mean_red_green, on_ambulance]
plt.boxplot(item)
plt.title("Průměrná reakční doba pro rozdílné podněty", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2], ['Změna barvy', 'Zobrazení vozu záchranné služby'])
plt.xlabel("Druh podnětu")
plt.ylabel("Reakční doba [s]")
plt.savefig('podnety.png', dpi=300)
plt.show()
color = [on_green, on_red]
plt.boxplot(color)
plt.title("Průměrná reakční doba při změně barvy semaforu", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2], ['Na zelenou', 'Na červenou'])
plt.xlabel("Změna barvy")
plt.ylabel("Reakční doba [s]")
# plt.savefig('barva.png', dpi=300)
plt.show()
# nejlepší
compare = [first, second, third, fourth, fifth]
plt.boxplot(compare)
plt.title("Průměrná reakční doba v jedtnotlivých hrách", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2, 3, 4, 5], ['První', 'Druhá', 'Třetí', 'Čtvrtá', 'Pátá'])
plt.xlabel("Pořadí hry")
plt.ylabel("Reakční doba [s]")
plt.savefig('poradi.png', dpi=300)
plt.show()
# zlepšení
gamers_first_game = data["First game"][(data["Gamer"] == True)]
gamers_fifth_game = data["Fifth game"][(data["Gamer"] == True)]
no_gamers_first_game = data["First game"][(data["Gamer"] == False)]
no_gamers_fifth_game = data["Fifth game"][(data["Gamer"] == False)]
diff_gamers = gamers_first_game - gamers_fifth_game
diff_no_gamers = no_gamers_first_game - no_gamers_fifth_game
compare = [diff_gamers, diff_no_gamers]
plt.boxplot(compare)
plt.title("Průměrné zlepšení reakční doby", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2], ['ANO', 'NE'])
plt.xlabel("Hráč počítačových her")
plt.ylabel("Zlepšení reakčních časů [s]")
plt.savefig('zlepseni.png', dpi=300)
plt.show()
# hraci - muzi vs zeny
gamers_men = data["Mean"][(data["Gamer"] == True) & (data["Sex"] == "M")]
gamers_women = data["Mean"][(data["Gamer"] == True) & (data["Sex"] == "F")]
gaming = [gamers_men, gamers_women]
plt.boxplot(gaming)
plt.title("Průměrná reakční doba ve hře - jedinci hrající hry", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2], ['Muži', 'Ženy'])
plt.xlabel("Pohlaví")
plt.ylabel("Reakční doba [s]")
plt.savefig('gamers.png', dpi=300)
plt.show()
# nehraci - muzi vs zeny
no_gamers_men = data["Mean"][(data["Gamer"] == False) & (data["Sex"] == "M")]
no_gamers_women = data["Mean"][(data["Gamer"] == False) & (data["Sex"] == "F")]
gaming = [no_gamers_men, no_gamers_women]
plt.boxplot(gaming)
plt.title("Průměrná reakční doba ve hře - jedinci nehrající hry", fontweight='bold', fontsize=TITLE_SIZE)
plt.xticks([1, 2], ['Muži', 'Ženy'])
plt.xlabel("Pohlaví")
plt.ylabel("Reakční doba [s]")
plt.savefig('nogamers.png', dpi=300)
plt.show()
| VeselaCindy/Bitalino | results/graphs.py | graphs.py | py | 5,716 | python | cs | code | 0 | github-code | 90 |
7106899128 | # data preparation workflow
# use pdb files in NR_LH_Protein_Martin
# identify intearcting residues by a cutoff: 5 (or any other number)
# output a csv file.
# import stuff
from abdb import *
import sys
import os
from find_files import find_files
import numpy as np
# create outdir
outpath = 'abdb_outfiles_2019'
if os.path.isfile(outpath) == False:
os.system('mkdir %s' % outpath)
#define a cutoff
cutoff = 5
# examine median resolution
def get_median_resolution():
'''
get median resolution in the final dataset
:return:
'''
infile = 'abdb_outfiles_2019/respairs_segment_notationx_len_merged_angle_bnaber_phil_pc.csv'
df = pd.read_csv(infile)
print(df.head())
pdbids = df.pdbid.unique()
print(len(pdbids))
resolutions =[]
for pdbid in pdbids:
pdbfile = '/Users/rahmadakbar/greifflab/aims/aimugen/datasets/NR_LH_Protein_Martin/' + pdbid + '.pdb'
contents = open(pdbfile).read().splitlines()
for content in contents[:10]:
if 'RESOLUTION' in content:
parts = content.split()
resolution = float(parts[-1])
resolutions.append(resolution)
mean_resolution = round(sum(resolutions)/len(resolutions),2)
median_resolution = np.median(resolutions)
print('Median resolution %s, total structures %s' % (median_resolution, len(resolutions)))
# #start
# # get pdb with single antigen
# single_antigens = get_single_antigens()
# # sort by antibody
# absorted, agsorted = get_residue_pairs_ab2(single_antigens[:], outpath, cutoff)
# # account for inserted residues
# abinsert = get_unique_abresnumi(absorted,outpath)
# # add segments based on Martin numbering
# absegment = add_segments(abinsert, outpath)
# # add shift
# abshift = add_abshift(absegment,outpath)
# # add shifft loop wise
# abshiftloop = add_abshiftl(abshift,outpath)
# #get gap patterns data
# gap_patterns = get_numgaps_segment(abshiftloop,outpath)
# # add shift to ag
# add_agshift(abshiftloop, outpath)
# ## per segment run
# segment_files = find_files(outpath, 'segment.csv')
# segment_files = [item for item in segment_files if 'abshift' in item and str(cutoff) in item] # filter for preprocessed
# # make gap dataset
# for segment_file in segment_files[:]:
# make_gap_dataset(segment_file, outpath)
# batch_add_notationx(segment_files)
# # ouput separate count data for gaps
# notationx_files = find_files('abdb_outfiles_2019', 'notationx.csv')
# notationx_files = [item for item in notationx_files if str(cutoff) in item]
# batch_add_gap_count_data(notationx_files)
get_median_resolution()
| GreiffLab/manuscript_ab_epitope_interaction | src/abdb_prepdata_main_fig1.py | abdb_prepdata_main_fig1.py | py | 2,605 | python | en | code | 20 | github-code | 90 |
20359697916 | import unittest
from ttp_tools.ttp_util import allow_override, subclass, extend_class
# Copyright 2019 Richard Sanger, Wand Network Research Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class TestExtendClass(unittest.TestCase):
def setUp(self):
class base(object):
req = None
overwrite_me = "base_overwrite"
def __init__(self, req):
self.req = req
def method(self, items):
items.append("base:method")
items.append(self)
return items
def method2(self):
items = []
items.append("base:method2")
items.append(self)
return items
@staticmethod
def static_m(items):
items.append("base:static_m")
return items
@classmethod
def class_m(cls, items):
items.append("base:class_m")
items.append(cls)
return items
self.base = base
def test_base_works(self):
r = 'test_base_works'
i = self.base(r)
# Test req on the class
self.assertIs(self.base.req, None)
# Test req on the instance
self.assertEqual(i.req, r)
# Test method
self.assertListEqual(i.method([r]),
[r, "base:method", i])
# Test static_m of the class
self.assertListEqual(self.base.static_m([r]),
[r, "base:static_m"])
# Test static_m of an instance
self.assertListEqual(i.static_m([r]),
[r, "base:static_m"])
# Test classmethod of the class
self.assertListEqual(self.base.class_m([r]),
[r, "base:class_m", self.base])
# Test classmethod of an instance
self.assertListEqual(i.class_m([r]),
[r, "base:class_m", self.base])
def test_add_new_attribute(self):
self.assertFalse(hasattr(self.base, 'new_attr'))
@extend_class
class tmp(self.base):
new_attr = 'new_value'
i = self.base(None)
# Check the class
self.assertEqual(self.base.new_attr, 'new_value')
# Check the instance
self.assertEqual(i.new_attr, 'new_value')
def test_extending_empty_class(self):
@extend_class
class tmp(self.base):
pass
# Check this returns tmp as self.base
self.assertEqual(tmp, self.base)
# Make sure nothing has changed
self.test_base_works()
def test_multiple_extensions(self):
class other(object):
pass
@extend_class
class tmp(self.base, other):
new_attr = 'new_value'
# Check this returns the first
self.assertEqual(tmp, self.base)
# Check new_attr has been added
self.assertEqual(self.base.new_attr, 'new_value')
self.assertEqual(other.new_attr, 'new_value')
def test_fails_override_attribute(self):
with self.assertRaises(TypeError):
@extend_class
class tmp(self.base):
overwrite_me = 'I already exist and should fail'
with self.assertRaises(TypeError):
@extend_class
class tmp2(self.base):
def __init__(self, r):
self.r = r
def test_override_list(self):
# Note this still works in the case an override is
# not required
self.assertFalse(hasattr(self.base, 'new_attr'))
self.assertEqual(self.base.overwrite_me, 'base_overwrite')
@extend_class('new_attr', 'overwrite_me')
class tmp(self.base):
new_attr = 'new_value'
overwrite_me = 'ex_overwrite'
i = self.base(None)
# Check the class
self.assertEqual(self.base.new_attr, 'new_value')
self.assertEqual(self.base.overwrite_me, 'ex_overwrite')
# Check the instance
self.assertEqual(i.new_attr, 'new_value')
self.assertEqual(i.overwrite_me, 'ex_overwrite')
def test_allow_override_decorator(self):
# Note this still works in the case an override is
# not required
self.assertFalse(hasattr(self.base, 'new_attr'))
self.assertEqual(self.base.overwrite_me, 'base_overwrite')
@extend_class
class tmp(self.base):
new_attr = allow_override('new_value')
overwrite_me = allow_override('ex_overwrite')
i = self.base(None)
# Check the class
self.assertEqual(self.base.new_attr, 'new_value')
self.assertEqual(self.base.overwrite_me, 'ex_overwrite')
# Check the instance
self.assertEqual(i.new_attr, 'new_value')
self.assertEqual(i.overwrite_me, 'ex_overwrite')
@extend_class
class tmp2(self.base):
@allow_override
@staticmethod
def overwrite_me():
return 5
self.assertEqual(self.base.overwrite_me(), 5)
self.assertEqual(i.overwrite_me(), 5)
def test_subclass_instance_method(self):
# Here we check a second to ensure the func
# has bound correctly and that method and method2
# are not accidentally mapped to each only one
r = 'test_subclass_instance_method'
@extend_class
class tmp(self.base):
@subclass
def method(base, self, items):
items.append("ex:method")
items.append(self)
return base(self, items)
@subclass
def method2(base, self):
items = []
items.append("ex:method2")
items.append(self)
return items + base(self)
i = self.base(None)
# Test method
self.assertListEqual(i.method([r]),
[r, "ex:method", i, "base:method", i])
self.assertListEqual(i.method2(),
["ex:method2", i, "base:method2", i])
def test_subclass_static_method(self):
r = 'test_subclass_static_method'
@extend_class
class tmp(self.base):
@subclass
@staticmethod
def static_m(base, items):
items.append("ex:static_m")
return base(items)
i = self.base(None)
# Test static_m of the class
self.assertListEqual(self.base.static_m([r]),
[r, "ex:static_m", "base:static_m"])
# Test static_m of an instance
self.assertListEqual(i.static_m([r]),
[r, "ex:static_m", "base:static_m"])
def test_subclass_class_method(self):
r = 'test_subclass_class_method'
@extend_class
class tmp(self.base):
@subclass
@classmethod
def class_m(base, cls, items):
items.append("ex:class_m")
items.append(cls)
return base(cls, items)
i = self.base(None)
self.assertEqual(self.base, tmp)
# Test classmethod of the class
self.assertListEqual(self.base.class_m([r]),
[r, "ex:class_m", tmp, "base:class_m", tmp])
# Test classmethod of an instance
self.assertListEqual(i.class_m([r]),
[r, "ex:class_m", tmp, "base:class_m", tmp])
def test_allow_override_all_methods(self):
r = 'test_override_all_methods'
@extend_class
class tmp(self.base):
@allow_override
def method(self, items):
items.append("ex:method")
items.append(self)
return items
@allow_override
@staticmethod
def static_m(items):
items.append("ex:static_m")
return items
@allow_override
@classmethod
def class_m(cls, items):
items.append("ex:class_m")
items.append(cls)
return items
i = self.base(None)
# Test method
self.assertListEqual(i.method([r]),
[r, "ex:method", i])
# Test static_m of the class
self.assertListEqual(self.base.static_m([r]),
[r, "ex:static_m"])
# Test static_m of an instance
self.assertListEqual(i.static_m([r]),
[r, "ex:static_m"])
# Test classmethod of the class
self.assertListEqual(self.base.class_m([r]),
[r, "ex:class_m", self.base])
# Test classmethod of an instance
self.assertListEqual(i.class_m([r]),
[r, "ex:class_m", self.base])
def test_override_list_all_methods(self):
r = 'test_override_list_all_methods'
@extend_class('method', 'static_m', 'class_m')
class tmp(self.base):
def method(self, items):
items.append("ex:method")
items.append(self)
return items
@staticmethod
def static_m(items):
items.append("ex:static_m")
return items
@classmethod
def class_m(cls, items):
items.append("ex:class_m")
items.append(cls)
return items
i = self.base(None)
# Test method
self.assertListEqual(i.method([r]),
[r, "ex:method", i])
# Test static_m of the class
self.assertListEqual(self.base.static_m([r]),
[r, "ex:static_m"])
# Test static_m of an instance
self.assertListEqual(i.static_m([r]),
[r, "ex:static_m"])
# Test classmethod of the class
self.assertListEqual(self.base.class_m([r]),
[r, "ex:class_m", self.base])
# Test classmethod of an instance
self.assertListEqual(i.class_m([r]),
[r, "ex:class_m", self.base])
def test_override_in_subclass(self):
""" Check that setting a undefined method in a subclass works when it
already exists in the base. Without have to ignore it!!
"""
r = 'test_override_in_subclass'
class RealSub(self.base):
pass
base_class = self.base
@extend_class
class tmp(RealSub):
def method(self, items):
items.append("sub:method")
items.append(self)
return base_class.method(self, items)
i = RealSub(None)
# Test method
self.assertListEqual(i.method([r]),
[r, "sub:method", i, "base:method", i])
if __name__ == '__main__':
unittest.main()
| wandsdn/ttp-tools | tests/test_extend_class.py | test_extend_class.py | py | 11,486 | python | en | code | 0 | github-code | 90 |
13449147020 | # -*- coding: utf-8 -*-
"""
Created on Tue Nov 27 19:23:05 2018
@author: SunWei
"""
import numpy
import os
from fnmatch import fnmatch
import cv2
class Dataset():
# 采集数据
#filedir:文件路径
#content: 'train'/'test'
#num:数据数量
#code_num:验证码数量
#feature_set: 1 提取局部特征 0 不提取局部特征
#feature_num: 特征数量
#maxy:纵向上边界
#miny:纵向下边界
#minx:横向起始位置
#maxx:横向结束位置
#distance:每个字符相差距离
def __init__(self,filedir,num,feature_set=1,feature_num=5):
self.filedir=filedir
self.num=num
self.feature_set=feature_set
file_dir = self.filedir + 'Data'
for file in os.listdir(file_dir):
if fnmatch(file, '*.jpg'):
img_name = file
im = self._get_dynamic_binary_image(file_dir, img_name)
break
h, w = im.shape[:2]
x = numpy.zeros(shape=(w,))
y = numpy.zeros(shape=(h,))
xstart,xend,ystart,yend=[],[],[],[]
k,flag= 0,0
for file in os.listdir(file_dir):
if fnmatch(file, '*.jpg'):
img_name = file
im = self._get_dynamic_binary_image(file_dir, img_name)
im = self.clear_border(im)
im = self.interference_line(im)
im = 1-im/255
k=k+1
if k>30:
break
for i in range(w):
x[i] = im[:, i].sum() + x[i]
for i in range(h):
y[i] = im[i].sum() + y[i]
for i in range(1,len((x> 80))-1):
if (x > 80)[i] and flag==0:
xstart.append(i)
flag = 1
if ~((x > 80)[i]):
if flag:
xend.append(i)
flag = 0
flag=0
for i in range(1,(len(( y> 100))-1)):
if (y > 100)[i] and flag == 0:
ystart.append(i)
flag = 1
if ~((y > 100)[i]):
if flag:
yend.append(i)
flag = 0
self.maxy=int(yend[0])
self.miny=int(ystart[0])
self.maxx=int(xend[0])
self.minx =int(xstart[0])
self.distance = int(xstart[1]-xstart[0])
self.code_num = len(xstart)
if feature_set==0:
self.feature_num=(self.maxy-self.miny)*(self.maxx-self.minx)
else:
self.feature_num = feature_num
#特征提取
def feature(self, A):
midx = int(A.shape[1] / 2) + 1
midy = int(A.shape[0] / 2) + 1
A1 = A[0:midy, 0:midx].mean()
A2 = A[midy:A.shape[0], 0:midx].mean()
A3 = A[0:midy, midx:A.shape[1]].mean()
A4 = A[midy:A.shape[0], midx:A.shape[1]].mean()
A5 = A[midy - 1:midy + 2, midx - 1:midx + 2].mean()
AF = [A1,A2, A3, A4, A5]
return AF
# 训练已知图片的特征
def data(self):
data_set = numpy.zeros(shape=(self.num * self.code_num, self.feature_num))
k = 0
label = []
file_dir = self.filedir+'Data'
for file in os.listdir(file_dir):
if fnmatch(file, '*.jpg'):
img_name = file
im = self._get_dynamic_binary_image(file_dir, img_name)
im = self.clear_border(im)
im = self.interference_line(im)
for i in range(self.code_num):
if self.feature_set:
data_set[k * self.code_num + i] = self.feature(im[self.miny:self.maxy, i * self.distance+ self.minx:i * self.distance+ self.maxx])
else:
data_set[k * self.code_num + i] = im[self.miny:self.maxy, i * self.distance + self.minx:i * self.distance + self.maxx].flatten()
label.append(int(img_name.split('.')[0][i]))
k = k + 1
numpy.save(self.filedir+'label.npy', label)
print('label.npy'+'保存成功')
self.normalize_dataset(data_set)
numpy.save(self.filedir+'set.npy', data_set)
print('set.npy' + '保存成功')
# 归一化函数
def normalize_dataset(self,data_set):
for row in data_set:
for i in range(len(row)):
row[i] = row[i] / 255
# 降噪
def interference_line(self,img):
h, w = img.shape[:2]
for y in range(1, w - 1):
for x in range(1, h - 1):
count = 0
if img[x, y - 1] > 245:
count = count + 1
if img[x, y + 1] > 245:
count = count + 1
if img[x - 1, y] > 245:
count = count + 1
if img[x + 1, y] > 245:
count = count + 1
if count > 2:
img[x, y] = 255
return img
def clear_border(self,img):
'''去除边框'''
h, w = img.shape[:2]
for y in range(0, w):
for x in range(0, h):
if y < 4 or y > w - 4:
img[x, y] = 255
if x < 4 or x > h - 4:
img[x, y] = 255
return img
def _get_dynamic_binary_image(self,file_dir, img_name):
'''自适应阀值二值化'''
img_name = file_dir + '/' + img_name
im = cv2.imread(img_name)
im = cv2.cvtColor(im, cv2.COLOR_RGB2GRAY)
th1 = cv2.adaptiveThreshold(im, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 21, 1)
return th1
| VARed/ANN | Code_Recognition_By_SunWei/Dataset.py | Dataset.py | py | 5,615 | python | en | code | 0 | github-code | 90 |
29460228697 | #!/usr/bin/python
# encoding=utf-8
import requests as requests
from urllib import quote
def buildQueryWithoutEncode(params):
return buildQuery(params, False)
def buildQueryWithEncode(params):
return buildQuery(params, True)
def buildQuery(params, needEncode):
'''
将params组合成key1=value1&key2=value2字符串
:param params:字典
:param needEncode:value是否需要encode
:return string
'''
if not type(params) == dict:
return False
params_data = ''
for (key, value) in params.iteritems():
if checkEmpty(value):
value = bool(needEncode) and quote(value) or value
params_data = params_data + key + '=' + value + '&'
params_data = params_data[:-1]
return params_data
def checkEmpty(value):
'''
校验值非空
:return bool: 非空则为true
'''
if bool(value.strip()):
return True
return False
def curl(url, postdata):
'''
post请求
:param url: 请求地址
:param postdata: 请求参数
:return mixed: 返回值
:raise 响应异常
'''
if "file" in postdata:
file = {'file': postdata["file"]}
postdata.pop("file")
response = requests.post(url, data=postdata, files=file)
else:
response = requests.post(url, postdata)
if response.status_code == 200:
return response.content
else:
raise ValueError("ResponseCodeError: %i - %s" % (response.status_code, response.content))
| gusibi/zmop | zmop/WebUtil.py | WebUtil.py | py | 1,561 | python | en | code | 1 | github-code | 90 |
21942262420 | # web qq protocol
import os, sys
import json, re
import enum
import time
from PyQt5.QtCore import *
from PyQt5.QtNetwork import *
from PyQt5.QtDBus import *
from .imrelayfactory import IMRelayFactory
from .qqcom import *
from .qqsession import *
from .unimessage import *
from .filestore import QiniuFileStore, VnFileStore
from .tx2any import TX2Any, Chatroom
#
#
#
class WX2Tox(TX2Any):
def __init__(self, parent=None):
"docstring"
super(WX2Tox, self).__init__(parent)
self.agent_service = QQAGENT_SERVICE_NAME
self.agent_service_path = QQAGENT_SEND_PATH
self.agent_service_iface = QQAGENT_IFACE_NAME
self.agent_event_path = QQAGENT_EVENT_BUS_PATH
self.agent_event_iface = QQAGENT_EVENT_BUS_IFACE
self.relay_src_pname = 'WQU'
self.initDBus()
self.initRelay()
self.startWXBot()
return
# @param msg str
def uicmdHandler(self, msg):
if msg[0] != "'":
qDebug('not a uicmd, normal msg, omit for now.')
return
if msg.startswith("'help"):
friendId = self.peerToxId
uicmds = ["'help", "'qqnum <num>", "'passwd <pwd[|vfcode]>'", ]
self.peerRelay.sendMessage("\n".join(uicmds), self.peerRelay.peer_user)
pass
elif msg.startswith("'qqnum"):
qqnum = msg[6:].strip()
qDebug('the qqnum is:' + str(qqnum))
self.sendQQNum(qqnum)
pass
elif msg.startswith("'passwd"):
passwd, *vfcode = msg[8:].strip().split('|')
if len(vfcode) == 0: vfcode.append(4567)
vfcode = vfcode[0]
self.sendPasswordAndVerify(passwd, vfcode)
pass
else:
qDebug('unknown uicmd:' + msg[0:120])
return
def startWXBot(self):
cstate = self.getConnState()
qDebug('curr conn state:' + str(cstate))
need_send_notify = False
notify_msg = ''
if cstate == CONN_STATE_NONE:
# do nothing
qDebug('wait for qqagent bootup...')
QTimer.singleShot(2345, self.startWXBot)
pass
elif cstate == CONN_STATE_WANT_USERNAME:
need_send_notify = True
notify_msg = "Input qqnum: ('qqnum <1234567>)"
pass
elif cstate == CONN_STATE_WANT_PASSWORD:
need_send_notify = True
notify_msg = "Input password: ('passwd <yourpassword>)"
pass
elif cstate == CONN_STATE_CONNECTED:
qDebug('qqagent already logined.')
self.createWXSession()
pass
else:
qDebug('not possible.')
pass
if need_send_notify is True:
# TODO 这里有一个时序问题,有可能self.peerRelay为None,即relay还没有完全启动
# time.sleep(1) # hotfix lsself.peerRelay's toxkit is None sometime.
tkc = self.peerRelay.isPeerConnected(self.peerRelay.peer_user)
if tkc is True:
self.peerRelay.sendMessage(notify_msg, self.peerRelay.peer_user)
else:
self.notify_buffer.append(notify_msg)
self.need_send_notify = True
self.sendQRToRelayPeer()
# if logined is True: self.createWXSession()
return
@pyqtSlot(QDBusMessage)
def onDBusWantQQNum(self, message):
qDebug(str(message.arguments()))
self.startWXBot() # TODO 替换成登陆状态机方法
return
# @param a0=needvfc
# @param a1=vfcpic
@pyqtSlot(QDBusMessage)
def onDBusWantPasswordAndVerifyCode(self, message):
qDebug(str(message.arguments()))
need_send_notify = False
notify_msg = ''
cstate = CONN_STATE_WANT_PASSWORD
assert(cstate == CONN_STATE_WANT_PASSWORD)
need_send_notify = True
notify_msg = "Input password: ('passwd <yourpassword>)"
if need_send_notify is True:
tkc = False
tkc = self.peerRelay.isPeerConnected(self.peerRelay.peer_user)
qDebug(str(tkc))
if tkc is True:
self.peerRelay.sendMessage(notify_msg, self.peerRelay.peer_user)
else:
self.notify_buffer.append(notify_msg)
self.need_send_notify = True
return
@pyqtSlot(QDBusMessage)
def onDBusNewMessage(self, message):
# qDebug(str(message.arguments()))
args = message.arguments()
msglen = args[0]
msghcc = args[1]
if self.txses is None: self.createWXSession()
for arg in args:
if type(arg) == int:
qDebug(str(type(arg)) + ',' + str(arg))
else:
qDebug(str(type(arg)) + ',' + str(arg)[0:120])
hcc64_str = args[1]
hcc64 = hcc64_str.encode('utf8')
hcc = QByteArray.fromBase64(hcc64)
self.saveContent('qqmsgfromdbus.json', hcc)
wxmsgvec = QQMessageList()
wxmsgvec.setMessage(hcc)
strhcc = hcc.data().decode('utf8')
qDebug(strhcc[0:120].replace("\n", "\\n"))
jsobj = json.JSONDecoder().decode(strhcc)
# temporary send to friend
# self.toxkit.sendMessage(self.peerToxId, strhcc)
#############################
# AddMsgCount = jsobj['AddMsgCount']
# ModContactCount = jsobj['ModContactCount']
# grnames = self.wxproto.parseWebSyncNotifyGroups(hcc)
# self.txses.addGroupNames(grnames)
# self.txses.parseModContact(jsobj['ModContactList'])
msgs = wxmsgvec.getContent()
for msg in msgs:
fromUser = self.txses.getUserByName(msg.FromUserName)
toUser = self.txses.getUserByName(msg.ToUserName)
# qDebug(str(fromUser))
# qDebug(str(toUser))
if fromUser is None: qDebug('can not found from user object')
if toUser is None: qDebug('can not found to user object')
msg.FromUser = fromUser
msg.ToUser = toUser
# hot fix file ack
# {'value': {'mode': 'send_ack', 'reply_ip': 183597272, 'time': 1444550216, 'type': 101, 'to_uin': 1449732709, 'msg_type': 10, 'session_id': 27932, 'from_uin': 1449732709, 'msg_id': 47636, 'inet_ip': 0, 'msg_id2': 824152}, 'poll_type': 'file_message'}
if msg.FromUserName == msg.ToUserName:
qDebug('maybe send_ack msg, but dont known how process it, just omit.')
continue
self.sendMessageToToxByType(msg)
return
def sendMessageToToxByType(self, msg):
umsg = self.peerRelay.unimsgcls.fromQQMessage(msg, self.txses)
logstr = umsg.get()
dlogstr = umsg.dget()
qDebug(dlogstr.encode())
if msg.isOffpic():
qDebug(msg.offpic)
self.sendShotPicMessageToTox(msg, logstr)
elif msg.isFileMsg():
qDebug(msg.FileName.encode())
self.sendFileMessageToTox(msg, logstr)
else:
self.sendMessageToTox(msg, logstr)
return
def dispatchToToxGroup(self, msg, fmtcc):
if msg.FromUserName == 'newsapp':
qDebug('special chat: newsapp')
self.dispatchNewsappChatToTox(msg, fmtcc)
pass
elif msg.ToUserName == 'filehelper' or msg.FromUserName == 'filehelper':
qDebug('special chat: filehelper')
self.dispatchFileHelperChatToTox(msg, fmtcc)
pass
elif msg.PollType == QQ_PT_SESSION:
qDebug('qq sess chat')
self.dispatchQQSessChatToTox(msg, fmtcc)
pass
elif msg.FromUser.isGroup() or msg.ToUser.isGroup():
# msg.ToUserName.startswith('@@') or msg.FromUserName.startswith('@@'):
qDebug('wx group chat:')
# wx group chat
self.dispatchWXGroupChatToTox(msg, fmtcc)
pass
else:
qDebug('u2u group chat:')
# user <=> user
self.dispatchU2UChatToTox(msg, fmtcc)
pass
return
def dispatchNewsappChatToTox(self, msg, fmtcc):
groupchat = None
mkey = None
title = ''
mkey = 'newsapp'
title = 'newsapp@WQU'
if mkey in self.txchatmap:
groupchat = self.txchatmap[mkey]
# assert groupchat is not None
# 有可能groupchat已经就绪,但对方还没有接收请求,这时发送失败,消息会丢失
number_peers = self.peerRelay.groupNumberPeers(groupchat.group_number)
if number_peers < 2:
groupchat.unsend_queue.append(fmtcc)
### reinvite peer into group
self.peerRelay.groupInvite(groupchat.group_number, self.peerRelay.peer_user)
else:
self.peerRelay.sendGroupMessage(fmtcc, groupchat.group_number)
else:
groupchat = self.createChatroom(msg, mkey, title)
groupchat.unsend_queue.append(fmtcc)
return
def dispatchFileHelperChatToTox(self, msg, fmtcc):
groupchat = None
mkey = None
title = ''
if msg.FromUserName == 'filehelper':
mkey = msg.FromUser.Uin
title = '%s@WQU' % msg.FromUser.NickName
else:
mkey = msg.ToUser.Uin
title = '%s@WQU' % msg.ToUser.NickName
if mkey in self.txchatmap:
groupchat = self.txchatmap[mkey]
# assert groupchat is not None
# 有可能groupchat已经就绪,但对方还没有接收请求,这时发送失败,消息会丢失
number_peers = self.peerRelay.groupNumberPeers(groupchat.group_number)
if number_peers < 2:
groupchat.unsend_queue.append(fmtcc)
### reinvite peer into group
self.peerRelay.groupInvite(groupchat.group_number, self.peerRelay.peer_user)
else:
self.peerRelay.sendGroupMessage(fmtcc, groupchat.group_number)
else:
groupchat = self.createChatroom(msg, mkey, title)
groupchat.unsend_queue.append(fmtcc)
return
def dispatchWXGroupChatToTox(self, msg, fmtcc):
groupchat = None
mkey = None
title = ''
# TODO 这段代码好烂,在外层直接用的变量,到内层又检测是否为None,晕了
if msg.FromUser.isGroup():
if msg.FromUser is None:
# message pending and try get group info
qDebug('warning FromUser not found, wxgroup not found:' + msg.FromUserName)
if msg.FromUserName in self.pendingGroupMessages:
self.pendingGroupMessages[msg.FromUserName].append([msg,fmtcc])
else:
self.pendingGroupMessages[msg.ToUserName] = list()
self.pendingGroupMessages[msg.ToUserName].append([msg,fmtcc])
# QTimer.singleShot(1, self.getBatchGroupAll)
return
else:
mkey = msg.FromUser.Uin
title = '%s@WQU' % msg.FromUser.NickName
if len(msg.FromUser.NickName) == 0:
qDebug('maybe a temp group and without nickname')
title = 'TGC%s@WQU' % msg.FromUser.Uin
else:
if msg.ToUser is None:
qDebug('warning ToUser not found, wxgroup not found:' + msg.ToUserName)
if msg.FromUserName in self.pendingGroupMessages:
self.pendingGroupMessages[msg.ToUserName].append([msg,fmtcc])
else:
self.pendingGroupMessages[msg.ToUserName] = list()
self.pendingGroupMessages[msg.ToUserName].append([msg,fmtcc])
# QTimer.singleShot(1, self.getBatchGroupAll)
return
else:
mkey = msg.ToUser.Uin
title = '%s@WQU' % msg.ToUser.NickName
if len(msg.ToUser.NickName) == 0:
qDebug('maybe a temp group and without nickname')
title = 'TGC%s@WQU' % msg.ToUser.Uin
if mkey in self.txchatmap:
groupchat = self.txchatmap[mkey]
# assert groupchat is not None
# 有可能groupchat已经就绪,但对方还没有接收请求,这时发送失败,消息会丢失
number_peers = self.peerRelay.groupNumberPeers(groupchat.group_number)
if number_peers < 2:
groupchat.unsend_queue.append(fmtcc)
### reinvite peer into group
self.peerRelay.groupInvite(groupchat.group_number, self.peerRelay.peer_user)
else:
self.peerRelay.sendGroupMessage(fmtcc, groupchat.group_number)
else:
# TODO 如果是新创建的groupchat,则要等到groupchat可用再发,否则会丢失消息
groupchat = self.createChatroom(msg, mkey, title)
groupchat.unsend_queue.append(fmtcc)
return
def dispatchWXGroupChatToTox2(self, msg, fmtcc, GroupUser):
if msg.FromUser is None: msg.FromUser = GroupUser
elif msg.ToUser is None: msg.ToUser = GroupUser
else: qDebug('wtf???...')
self.dispatchWXGroupChatToTox(msg, fmtcc)
return
def dispatchQQSessChatToTox(self, msg, fmtcc):
groupchat = None
mkey = None
title = ''
# 如果来源User没有找到,则尝试新请求获取group_sig,则首先获取临时会话的peer用户信息
# 如果来源User没有找到,则尝试新请求获取好友信息
to_uin = None
if msg.FromUser is None:
to_uin = msg.FromUserName
elif msg.ToUser is None:
to_uin = msg.ToUserName
else:
pass
if to_uin is not None:
pcall = self.sysiface.asyncCall('getfriendinfo', to_uin, 'a0', 123, 'a1')
watcher = QDBusPendingCallWatcher(pcall)
watcher.finished.connect(self.onGetFriendInfoDone)
self.asyncWatchers[watcher] = [msg, fmtcc]
return
mkey = msg.ToUser.Uin
title = '%s@WQU' % msg.ToUser.NickName
if len(msg.ToUser.NickName) == 0:
qDebug('maybe a temp group and without nickname')
title = 'TGC%s@WQU' % msg.ToUser.Uin
if mkey in self.txchatmap:
groupchat = self.txchatmap[mkey]
# assert groupchat is not None
# 有可能groupchat已经就绪,但对方还没有接收请求,这时发送失败,消息会丢失
number_peers = self.peerRelay.groupNumberPeers(groupchat.group_number)
if number_peers < 2:
groupchat.unsend_queue.append(fmtcc)
### reinvite peer into group
self.peerRelay.groupInvite(groupchat.group_number, self.peerRelay.peer_user)
else:
self.peerRelay.sendGroupMessage(fmtcc, groupchat.group_number)
else:
# TODO 如果是新创建的groupchat,则要等到groupchat可用再发,否则会丢失消息
groupchat = self.createChatroom(msg, mkey, title)
groupchat.unsend_queue.append(fmtcc)
return
def dispatchU2UChatToTox(self, msg, fmtcc):
groupchat = None
mkey = None
title = ''
# 两个用户,正反向通信,使用同一个groupchat,但需要找到它
if msg.FromUser.Uin == self.txses.me.Uin:
mkey = msg.ToUser.Uin
title = '%s@WQU' % msg.ToUser.NickName
else:
mkey = msg.FromUser.Uin
title = '%s@WQU' % msg.FromUser.NickName
# TODO 可能有一个计算交集的函数吧
if mkey in self.txchatmap:
groupchat = self.txchatmap[mkey]
if groupchat is not None:
# assert groupchat is not None
# 有可能groupchat已经就绪,但对方还没有接收请求,这时发送失败,消息会丢失
number_peers = self.peerRelay.groupNumberPeers(groupchat.group_number)
if number_peers < 2:
groupchat.unsend_queue.append(fmtcc)
### reinvite peer into group
self.peerRelay.groupInvite(groupchat.group_number, self.peerRelay.peer_user)
else:
self.peerRelay.sendGroupMessage(fmtcc, groupchat.group_number)
else:
groupchat = self.createChatroom(msg, mkey, title)
groupchat.unsend_queue.append(fmtcc)
return
def createChatroom(self, msg, mkey, title):
group_number = ('WQU.%s' % mkey).lower()
group_number = self.peerRelay.createChatroom(mkey, title)
groupchat = Chatroom()
groupchat.group_number = group_number
groupchat.FromUser = msg.FromUser
groupchat.ToUser = msg.ToUser
groupchat.FromUserName = msg.FromUserName
self.txchatmap[mkey] = groupchat
self.relaychatmap[group_number] = groupchat
groupchat.title = title
if msg.PollType == QQ_PT_DISCUS:
groupchat.chat_type = CHAT_TYPE_DISCUS
elif msg.PollType == QQ_PT_QUN:
groupchat.chat_type = CHAT_TYPE_QUN
elif msg.PollType == QQ_PT_SESSION:
groupchat.chat_type = CHAT_TYPE_SESS
elif msg.PollType == QQ_PT_USER:
groupchat.chat_type = CHAT_TYPE_U2U
else:
qDebug('undefined behavior')
groupchat.Gid = msg.Gid
groupchat.ServiceType = msg.ServiceType
self.peerRelay.groupInvite(group_number, self.peerRelay.peer_user)
return groupchat
def sendMessageToWX(self, groupchat, mcc):
qDebug('here')
FromUser = groupchat.FromUser
ToUser = groupchat.ToUser
if groupchat.chat_type == CHAT_TYPE_QUN:
qDebug('send wx group chat:')
# wx group chat
self.sendWXGroupChatMessageToWX(groupchat, mcc)
pass
elif groupchat.chat_type == CHAT_TYPE_DISCUS:
qDebug('send wx discus chat:')
# wx discus chat
self.sendWXDiscusChatMessageToWX(groupchat, mcc)
pass
elif groupchat.chat_type == CHAT_TYPE_SESS:
qDebug('send wx sess chat:')
# wx sess chat
self.sendWXSessionChatMessageToWX(groupchat, mcc)
pass
elif groupchat.chat_type == CHAT_TYPE_U2U:
qDebug('send wx u2u chat:')
# user <=> user
self.sendU2UMessageToWX(groupchat, mcc)
pass
elif ToUser.isGroup() or FromUser.isGroup():
qDebug('send wx group chat:')
# wx group chat
self.sendWXGroupChatMessageToWX(groupchat, mcc)
pass
elif ToUser.isDiscus() or FromUser.isDiscus():
qDebug('send wx discus chat:')
# wx group chat
self.sendWXDiscusChatMessageToWX(groupchat, mcc)
pass
else:
qDebug('unknown chat:')
pass
# TODO 把从各群组来的发给WX端的消息,再发送给tox汇总端一份。
if True: return
from_username = groupchat.FromUser.UserName
to_username = groupchat.ToUser.UserName
args = [from_username, to_username, mcc, 1, 'more', 'even more']
reply = self.sysiface.call('sendmessage', *args) # 注意把args扩展开
rr = QDBusReply(reply)
if rr.isValid():
qDebug(str(len(rr.value())) + ',' + str(type(rr.value())))
else:
qDebug('rpc call error: %s,%s' % (rr.error().name(), rr.error().message()))
### TODO send message faild
return
def sendWXGroupChatMessageToWX(self, groupchat, mcc):
from_username = groupchat.FromUser.UserName
to_username = groupchat.ToUser.UserName
group_code = groupchat.ToUser.Uin
args = [to_username, from_username, mcc, group_code, 1, 'more', 'even more']
reply = self.sysiface.call('send_qun_msg', *args) # 注意把args扩展开
rr = QDBusReply(reply)
if rr.isValid():
qDebug(str(rr.value()) + ',' + str(type(rr.value())))
else:
qDebug('rpc call error: %s,%s' % (rr.error().name(), rr.error().message()))
### TODO send message faild
return
def sendWXDiscusChatMessageToWX(self, groupchat, mcc):
from_username = groupchat.FromUser.UserName
to_username = groupchat.ToUser.UserName
args = [to_username, from_username, mcc, 1, 'more', 'even more']
reply = self.sysiface.call('send_discus_msg', *args) # 注意把args扩展开
rr = QDBusReply(reply)
if rr.isValid():
qDebug(str(rr.value()) + ',' + str(type(rr.value())))
else:
qDebug('rpc call error: %s,%s' % (rr.error().name(), rr.error().message()))
### TODO send message faild
return
# TODO 修改为调用asyncGetRpc
def sendWXSessionChatMessageToWX(self, groupchat, mcc):
def on_dbus_reply(watcher):
groupchat, mcc = self.asyncWatchers[watcher]
pendReply = QDBusPendingReply(watcher)
message = pendReply.reply()
args = message.arguments()
qDebug(str(args))
# #####
hcc = args[0] # QByteArray
strhcc = self.hcc2str(hcc)
hccjs = json.JSONDecoder().decode(strhcc)
print('group sig', ':::', strhcc)
groupchat.group_sig = hccjs['result']['value']
self.sendWXSessionChatMessageToWX(groupchat, mcc)
self.asyncWatchers.pop(watcher)
return
# get group sig if None
if groupchat.group_sig is None:
gid = groupchat.Gid
tuin = groupchat.FromUser.UserName # 也有可能是ToUser.UserName
service_type = groupchat.ServiceType
pcall = self.sysiface.asyncCall('get_c2cmsg_sig', gid, tuin, service_type, 'a0', 123, 'a1')
watcher = QDBusPendingCallWatcher(pcall)
watcher.finished.connect(on_dbus_reply, Qt.QueuedConnection)
self.asyncWatchers[watcher] = [groupchat, mcc]
# ##########
from_username = groupchat.FromUser.UserName
to_username = groupchat.ToUser.UserName
group_sig = groupchat.group_sig
args = [to_username, from_username, mcc, group_sig, 1, 'more', 'even more']
reply = self.sysiface.call('send_sess_msg', *args) # 注意把args扩展开
rr = QDBusReply(reply)
if rr.isValid():
qDebug(str(rr.value()) + ',' + str(type(rr.value())))
else:
qDebug('rpc call error: %s,%s' % (rr.error().name(), rr.error().message()))
### TODO send message faild
return
def sendU2UMessageToWX(self, groupchat, mcc):
from_username = groupchat.FromUser.UserName
to_username = groupchat.ToUser.UserName
args = [to_username, from_username, mcc, 1, 'more', 'even more']
reply = self.sysiface.call('send_buddy_msg', *args) # 注意把args扩展开
rr = QDBusReply(reply)
if rr.isValid():
qDebug(str(rr.value()) + ',' + str(type(rr.value())))
else:
qDebug('rpc call error: %s,%s' % (rr.error().name(), rr.error().message()))
### TODO send message faild
return
def createWXSession(self):
if self.txses is not None:
return
self.txses = WXSession()
reply = self.sysiface.call('getselfinfo', 123, 'a1', 456)
rr = QDBusReply(reply)
# TODO check reply valid
qDebug(str(len(rr.value())) + ',' + str(type(rr.value())))
data64 = rr.value().encode() # to bytes
data = QByteArray.fromBase64(data64)
self.txses.setSelfInfo(data)
self.saveContent('selfinfo.json', data)
pcall = self.sysiface.asyncCall('getuserfriends', 'a0', 123, 'a1')
watcher = QDBusPendingCallWatcher(pcall)
watcher.finished.connect(self.onGetContactDone, Qt.QueuedConnection)
self.asyncWatchers[watcher] = 'getuserfriends'
pcall = self.sysiface.asyncCall('getgroupnamelist', 'a0', 123, 'a1')
watcher = QDBusPendingCallWatcher(pcall)
watcher.finished.connect(self.onGetContactDone, Qt.QueuedConnection)
self.asyncWatchers[watcher] = 'getgroupnamelist'
pcall = self.sysiface.asyncCall('getdiscuslist', 'a0', 123, 'a1')
watcher = QDBusPendingCallWatcher(pcall)
watcher.finished.connect(self.onGetContactDone, Qt.QueuedConnection)
self.asyncWatchers[watcher] = 'getdiscuslist'
# pcall = self.sysiface.asyncCall('getonlinebuddies', 'a0', 123, 'a1')
# watcher = QDBusPendingCallWatcher(pcall)
# watcher.finished.connect(self.onGetContactDone)
# self.asyncWatchers[watcher] = 'getgrouponlinebuddies'
# pcall = self.sysiface.asyncCall('getrecentlist', 'a0', 123, 'a1')
# watcher = QDBusPendingCallWatcher(pcall)
# watcher.finished.connect(self.onGetContactDone)
# self.asyncWatchers[watcher] = 'getrecentlist'
# reply = self.sysiface.call('getinitdata', 123, 'a1', 456)
# rr = QDBusReply(reply)
# # TODO check reply valid
# qDebug(str(len(rr.value())) + ',' + str(type(rr.value())))
# data64 = rr.value().encode('utf8') # to bytes
# data = QByteArray.fromBase64(data64)
# self.txses.setInitData(data)
# self.saveContent('initdata.json', data)
# reply = self.sysiface.call('getcontact', 123, 'a1', 456)
# rr = QDBusReply(reply)
# # TODO check reply valid
# qDebug(str(len(rr.value())) + ',' + str(type(rr.value())))
# data64 = rr.value().encode('utf8') # to bytes
# data = QByteArray.fromBase64(data64)
# self.txses.setContact(data)
# self.saveContent('contact.json', data)
# reply = self.sysiface.call('getgroups', 123, 'a1', 456)
# rr = QDBusReply(reply)
# # TODO check reply valid
# qDebug(str(len(rr.value())) + ',' + str(type(rr.value())))
# GroupNames = json.JSONDecoder().decode(rr.value())
# self.txses.addGroupNames(GroupNames)
# # QTimer.singleShot(8, self.getBatchContactAll)
# QTimer.singleShot(8, self.getBatchGroupAll)
return
def checkWXLogin(self):
reply = self.sysiface.call('islogined', 'a0', 123, 'a1')
qDebug(str(reply))
rr = QDBusReply(reply)
if not rr.isValid(): return False
qDebug(str(rr.value()) + ',' + str(type(rr.value())))
if rr.value() is False:
return False
return True
def getConnState(self):
reply = self.sysiface.call('connstate', 'a0', 123, 'a1')
qDebug(str(reply))
rr = QDBusReply(reply)
qDebug(str(rr.value()) + ',' + str(type(rr.value())))
return rr.value()
def sendQQNum(self, num):
reply = self.sysiface.call('inputqqnum', num, 'a0', 123, 'a1')
qDebug(str(reply))
rr = QDBusReply(reply)
qDebug(str(rr.value()) + ',' + str(type(rr.value())))
return
def sendPasswordAndVerify(self, password, verify_code):
reply = self.sysiface.call('inputverify', password, verify_code, 'a0', 123, 'a1')
qDebug(str(reply))
rr = QDBusReply(reply)
qDebug(str(rr.value()) + ',' + str(type(rr.value())))
return
def getGroupsFromDBus(self):
reply = self.sysiface.call('getgroups', 123, 'a1', 456)
rr = QDBusReply(reply)
# TODO check reply valid
qDebug(str(len(rr.value())) + ',' + str(type(rr.value())))
GroupNames = json.JSONDecoder().decode(rr.value())
return GroupNames
def onGetContactDone(self, watcher):
pendReply = QDBusPendingReply(watcher)
qDebug(str(watcher))
qDebug(str(pendReply.isValid()))
if pendReply.isValid():
hcc = pendReply.argumentAt(0)
qDebug(str(type(hcc)))
else:
hcc = pendReply.argumentAt(0)
qDebug(str(len(hcc)))
qDebug(str(hcc))
return
message = pendReply.reply()
args = message.arguments()
qDebug(str(args))
extrainfo = self.asyncWatchers[watcher]
self.saveContent('dr.'+extrainfo+'.json', args[0])
######
hcc = args[0] # QByteArray
strhcc = self.hcc2str(hcc)
qDebug(strhcc.encode())
hccjs = json.JSONDecoder().decode(strhcc)
print(extrainfo, ':::', strhcc)
if extrainfo == 'getuserfriends':
self.txses.setUserFriends(hcc)
if extrainfo == 'getgroupnamelist':
self.txses.setGroupList(hcc)
for um in hccjs['result']['gnamelist']:
gcode = um['code']
gname = um['name']
qDebug(b'get group detail...' + str(um).encode())
pcall = self.sysiface.asyncCall('get_group_detail', gcode, 'a0', 123, 'a1')
twatcher = QDBusPendingCallWatcher(pcall)
twatcher.finished.connect(self.onGetGroupOrDiscusDetailDone, Qt.QueuedConnection)
self.asyncWatchers[twatcher] = 'get_group_detail'
qDebug(b'get group detail...' + str(um).encode() + str(twatcher).encode())
if extrainfo == 'getdiscuslist':
self.txses.setDiscusList(hcc)
for um in hccjs['result']['dnamelist']:
did = um['did']
dname = um['name']
qDebug(b'get discus detail...' + str(um).encode())
pcall = self.sysiface.asyncCall('get_discus_detail', did, 'a0', 123, 'a1')
twatcher = QDBusPendingCallWatcher(pcall)
twatcher.finished.connect(self.onGetGroupOrDiscusDetailDone, Qt.QueuedConnection)
self.asyncWatchers[twatcher] = 'get_discus_detail'
qDebug(b'get discus detail...' + str(um).encode() + str(twatcher).encode())
self.asyncWatchers.pop(watcher)
return
# TODO delay dbus 请求响应合并处理
def onGetGroupOrDiscusDetailDone(self, watcher):
pendReply = QDBusPendingReply(watcher)
qDebug(str(watcher))
qDebug(str(pendReply.isValid()))
if pendReply.isValid():
hcc = pendReply.argumentAt(0)
qDebug(str(type(hcc)))
else:
hcc = pendReply.argumentAt(0)
qDebug(str(len(hcc)))
qDebug(str(hcc))
return
message = pendReply.reply()
args = message.arguments()
qDebug(str(args))
extrainfo = self.asyncWatchers[watcher]
self.saveContent('dr.'+extrainfo+'.json', args[0])
if len(args[0].data()) == 0:
qDebug('can not get group or discus list.')
sys.exit()
######
hcc = args[0] # QByteArray
strhcc = self.hcc2str(hcc)
hccjs = json.JSONDecoder().decode(strhcc)
print(extrainfo, ':::', strhcc)
if extrainfo == 'get_group_detail':
qDebug('gooooooooot')
self.txses.setGroupDetail(hcc)
pass
if extrainfo == 'get_discus_detail':
qDebug('gooooooooot')
self.txses.setDiscusDetail(hcc)
pass
self.asyncWatchers.pop(watcher)
return
def getBatchGroupAll(self):
groups2 = self.getGroupsFromDBus()
self.txses.addGroupNames(groups2)
groups = self.txses.getICGroups()
qDebug(str(groups))
reqcnt = 0
arg0 = []
for grname in groups:
melem = {'UserName': grname, 'ChatRoomId': ''}
arg0.append(melem)
argjs = json.JSONEncoder().encode(arg0)
pcall = self.sysiface.asyncCall('getbatchcontact', argjs)
watcher = QDBusPendingCallWatcher(pcall)
# watcher.finished.connect(self.onGetBatchContactDone)
watcher.finished.connect(self.onGetBatchGroupDone)
self.asyncWatchers[watcher] = arg0
reqcnt += 1
qDebug('async reqcnt: ' + str(reqcnt))
return
# @param message QDBusPengindCallWatcher
def onGetBatchGroupDone(self, watcher):
pendReply = QDBusPendingReply(watcher)
qDebug(str(watcher))
qDebug(str(pendReply.isValid()))
if pendReply.isValid():
hcc = pendReply.argumentAt(0)
qDebug(str(type(hcc)))
else:
hcc = pendReply.argumentAt(0)
qDebug(str(len(hcc)))
qDebug(str(hcc))
return
message = pendReply.reply()
args = message.arguments()
# qDebug(str(len(args)))
hcc = args[0] # QByteArray
strhcc = self.hcc2str(hcc)
hccjs = json.JSONDecoder().decode(strhcc)
# print(strhcc)
memcnt = 0
for contact in hccjs['ContactList']:
memcnt += 1
# print(contact)
# self.txses.addMember(contact)
grname = contact['UserName']
if not QQUser.isGroup(grname): continue
print('uid=%s,un=%s,nn=%s\n' % (contact['Uin'], contact['UserName'], contact['NickName']))
self.txses.addGroupUser(grname, contact)
if grname in self.pendingGroupMessages and len(self.pendingGroupMessages[grname]) > 0:
while len(self.pendingGroupMessages[grname]) > 0:
msgobj = self.pendingGroupMessages[grname].pop()
GroupUser = self.txses.getGroupByName(grname)
self.dispatchWXGroupChatToTox2(msgobj[0], msgobj[1], GroupUser)
qDebug('got memcnt: %s/%s' % (memcnt, len(self.txses.ICGroups)))
### flow next
# QTimer.singleShot(12, self.getBatchContactAll)
return
def getBatchContactAll(self):
groups = self.txses.getICGroups()
qDebug(str(groups))
reqcnt = 0
for grname in groups:
members = self.txses.getGroupMembers(grname)
arg0 = []
for member in members:
melem = {'UserName': member, 'EncryChatRoomId': group.UserName}
arg0.append(melem)
cntpertime = 50
while len(arg0) > 0:
subarg = arg0[0:cntpertime]
subargjs = json.JSONEncoder().encode(subarg)
pcall = self.sysiface.asyncCall('getbatchcontact', subargjs)
watcher = QDBusPendingCallWatcher(pcall)
watcher.finished.connect(self.onGetBatchContactDone)
self.asyncWatchers[watcher] = subarg
arg0 = arg0[cntpertime:]
reqcnt += 1
break
break
qDebug('async reqcnt: ' + str(reqcnt))
return
# @param message QDBusPengindCallWatcher
def onGetBatchContactDone(self, watcher):
pendReply = QDBusPendingReply(watcher)
qDebug(str(watcher))
qDebug(str(pendReply.isValid()))
if pendReply.isValid():
hcc = pendReply.argumentAt(0)
qDebug(str(type(hcc)))
else:
return
message = pendReply.reply()
args = message.arguments()
# qDebug(str(len(args)))
hcc = args[0] # QByteArray
strhcc = self.hcc2str(hcc)
hccjs = json.JSONDecoder().decode(strhcc)
# qDebug(str(self.txses.getGroups()))
print(strhcc)
memcnt = 0
for contact in hccjs['ContactList']:
memcnt += 1
# print(contact)
self.txses.addMember(contact)
qDebug('got memcnt: %s/%s' % (memcnt, len(self.txses.ICUsers)))
return
def onGetFriendInfoDone(self, watcher):
pendReply = QDBusPendingReply(watcher)
qDebug(str(watcher))
qDebug(str(pendReply.isValid()))
if pendReply.isValid():
hcc = pendReply.argumentAt(0)
qDebug(str(type(hcc)))
else:
hcc = pendReply.argumentAt(0)
qDebug(str(len(hcc)))
qDebug(str(hcc))
return
message = pendReply.reply()
args = message.arguments()
qDebug(str(args))
msg, fmtcc = self.asyncWatchers[watcher]
######
hcc = args[0] # QByteArray
strhcc = self.hcc2str(hcc)
hccjs = json.JSONDecoder().decode(strhcc)
print(':::', strhcc)
self.txses.addFriendInfo(hcc)
if msg.FromUser is None:
msg.FromUser = self.txses.getUserByName(msg.FromUserName)
elif msg.ToUser is None:
msg.ToUser = self.txses.getUserByName(msg.ToUserName)
else:
pass
assert(msg.FromUser is not None)
assert(msg.ToUser is not None)
self.dispatchQQSessChatToTox(msg, fmtcc)
self.asyncWatchers.pop(watcher)
return
# @param cb(data)
def getMsgImgCallback(self, msg, imgcb=None):
# 还有可能超时,dbus默认timeout=25,而实现有可能达到45秒。WTF!!!
args = [msg.offpic, msg.FromUserName]
offpic_file_path = msg.offpic.replace('/', '%2F')
args = [offpic_file_path, msg.FromUserName]
self.asyncGetRpc('get_msg_img', args, imgcb)
return
# @param cb(data)
def getMsgFileCallback(self, msg, imgcb=None):
# 还有可能超时,dbus默认timeout=25,而实现有可能达到45秒。WTF!!!
# TODO, msg.FileName maybe need urlencoded
args = [msg.MsgId, msg.FileName, msg.ToUserName]
self.asyncGetRpc('get_msg_file', args, imgcb)
return
# hot fix
g_w2t = None
def on_app_about_close():
qDebug('hereee')
global g_w2t
g_w2t.peerRelay.disconnectIt()
return
def main():
app = QCoreApplication(sys.argv)
import wxagent.qtutil as qtutil
qtutil.pyctrl()
w2t = WX2Tox()
global g_w2t
g_w2t = w2t
app.aboutToQuit.connect(on_app_about_close)
app.exec_()
return
if __name__ == '__main__': main()
| kitech/wxagent | wxagent/qq2any.py | qq2any.py | py | 38,187 | python | en | code | 76 | github-code | 90 |
27173529704 | #coding=utf-8
#!/usr/bin/env python3
global flag
import socket, sys, os, threading, time, configparser, re
#下面两个是关闭额外开启的一个线程使用的第三方工具代码 _async_raise()和stop_thread()
import ctypes,inspect
def _async_raise(tid, exctype):
"""raises the exception, performs cleanup if needed"""
tid = ctypes.c_long(tid)
if not inspect.isclass(exctype):
exctype = type(exctype)
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(exctype))
if res == 0:
raise ValueError("invalid thread id")
elif res != 1:
# """if it returns a number greater than one, you're in trouble,
# and you should call it again with exc=NULL to revert the effect"""
ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, None)
raise SystemError("PyThreadState_SetAsyncExc failed")
def stop_thread(thread):
_async_raise(thread.ident, SystemExit)
def log(message, clientAddr=None):
''' Write log '''
if clientAddr == None:
print('\033[92m[%s]\033[0m %s' % (time.strftime(r'%H:%M:%S, %m.%d.%Y'), message))
else:
print('\033[92m[%s] %s:%d\033[0m %s' % (
time.strftime(r'%H:%M:%S, %m.%d.%Y'), clientAddr[0], clientAddr[1], message))
class DataSockListener(threading.Thread):
''' Asynchronously accepts data connections '''
def __init__(self, server):
super().__init__()
self.daemon = True # Daemon
self.server = server
self.listenSock = server.dataListenSock
def run(self):
self.listenSock.settimeout(1.0) # Check for every 1 second
while True:
try:
(dataSock, clientAddr) = self.listenSock.accept()
except (socket.timeout):
pass
except (socket.error): # Stop when socket closes
break
else:
if self.server.dataSock != None: # Existing data connection not closed, cannot accept
dataSock.close()
log('Data connection refused from %s:%d.' % (clientAddr[0], clientAddr[1]), self.server.clientAddr)
else:
self.server.dataSock = dataSock
log('Data connection accpted from %s:%d.' % (clientAddr[0], clientAddr[1]), self.server.clientAddr)
class FTPServer(threading.Thread):
''' FTP server handler '''
def __init__(self, controlSock, clientAddr):
super().__init__()
self.daemon = True # Daemon
self.bufSize = 1024
self.controlSock = controlSock
self.clientAddr = clientAddr
self.dataListenSock = None
self.dataSock = None
self.dataAddr = '127.0.0.1'
self.dataPort = None
self.username = ''
self.authenticated = False
self.cwd = os.getcwd()
self.typeMode = 'Binary'
self.dataMode = 'PORT'
def run(self):
self.controlSock.send(b'220 Service ready for new user.\r\n')
global send_len
send_len += len(b'220 Service ready for new user.\r\n')
while True:
cmd = self.controlSock.recv(self.bufSize).decode('ascii')
global recv_len
recv_len += len(cmd)
if cmd == '': # Connection closed
self.controlSock.close()
log('Client disconnected.', self.clientAddr)
break
log('[' + (self.username if self.authenticated else '') + '] ' + cmd.strip(), self.clientAddr)
cmdHead = cmd.split()[0].upper()
if cmdHead == 'QUIT': # QUIT
self.controlSock.send(b'221 Service closing control connection. Logged out if appropriate.\r\b')
send_len += len(b'221 Service closing control connection. Logged out if appropriate.\r\b')
self.controlSock.close()
log('Client disconnected.', self.clientAddr)
break
elif cmdHead == 'HELP': # HELP
self.controlSock.send(b'214 QUIT HELP USER PASS PWD CWD TYPE PASV NLST RETR STOR\r\n')
send_len += len(b'214 QUIT HELP USER PASS PWD CWD TYPE PASV NLST RETR STOR\r\n')
elif cmdHead == 'USER': # USER
if len(cmd.split()) < 2:
self.controlSock.send(b'501 Syntax error in parameters or arguments.\r\n')
send_len += len(b'501 Syntax error in parameters or arguments.\r\n')
else:
self.username = cmd.split()[1]
self.controlSock.send(b'331 User name okay, need password.\r\n')
send_len += len(b'331 User name okay, need password.\r\n')
self.authenticated = False
elif cmdHead == 'PASS': # PASS
if self.username == '':
self.controlSock.send(b'503 Bad sequence of commands.\r\n')
send_len += len(b'503 Bad sequence of commands.\r\n')
else:
if len(cmd.split()) < 2:
self.controlSock.send(b'501 Syntax error in parameters or arguments.\r\n')
send_len += len(b'501 Syntax error in parameters or arguments.\r\n')
else:
self.controlSock.send(b'230 User logged in, proceed.\r\n')
send_len += len(b'230 User logged in, proceed.\r\n')
self.authenticated = True
elif cmdHead == 'PWD': # PWD
if not self.authenticated:
self.controlSock.send(b'530 Not logged in.\r\n')
send_len += len(b'530 Not logged in.\r\n')
else:
self.controlSock.send(('257 "%s" is the current directory.\r\n' % self.cwd).encode('ascii'))
send_len += len(('257 "%s" is the current directory.\r\n' % self.cwd).encode('ascii'))
elif cmdHead == 'CWD': # CWD
if not self.authenticated:
self.controlSock.send(b'530 Not logged in.\r\n')
send_len += len(b'530 Not logged in.\r\n')
elif len(cmd.split()) < 2:
self.controlSock.send(('250 "%s" is the current directory.\r\n' % self.cwd).encode('ascii'))
send_len += len(('250 "%s" is the current directory.\r\n' % self.cwd).encode('ascii'))
else:
programDir = os.getcwd()
os.chdir(self.cwd)
newDir = cmd.split()[1]
try:
os.chdir(newDir)
except (OSError):
self.controlSock.send(
b'550 Requested action not taken. File unavailable (e.g., file busy).\r\n')
send_len += len(b'550 Requested action not taken. File unavailable (e.g., file busy).\r\n')
else:
self.cwd = os.getcwd()
self.controlSock.send(('250 "%s" is the current directory.\r\n' % self.cwd).encode('ascii'))
send_len += len(('250 "%s" is the current directory.\r\n' % self.cwd).encode('ascii'))
os.chdir(programDir)
elif cmdHead == 'TYPE': # TYPE, currently only I is supported
if not self.authenticated:
self.controlSock.send(b'530 Not logged in.\r\n')
send_len += len(b'530 Not logged in.\r\n')
elif len(cmd.split()) < 2:
self.controlSock.send(b'501 Syntax error in parameters or arguments.\r\n')
send_len += len(b'501 Syntax error in parameters or arguments.\r\n')
elif cmd.split()[1] == 'I':
self.typeMode = 'Binary'
self.controlSock.send(b'200 Type set to: Binary.\r\n')
send_len += len(b'200 Type set to: Binary.\r\n')
else:
self.controlSock.send(b'504 Command not implemented for that parameter.\r\n')
send_len += len(b'504 Command not implemented for that parameter.\r\n')
elif cmdHead == 'PASV': # PASV, currently only support PASV
if not self.authenticated:
self.controlSock.send(b'530 Not logged in.\r\n')
send_len += len(b'530 Not logged in.\r\n')
else:
if self.dataListenSock != None: # Close existing data connection listening socket
self.dataListenSock.close()
self.dataListenSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
self.dataListenSock.bind((self.dataAddr, 0))
self.dataPort = self.dataListenSock.getsockname()[1]
self.dataListenSock.listen(5)
self.dataMode = 'PASV'
DataSockListener(self).start()
time.sleep(0.5) # Wait for connection to set up
self.controlSock.send(('227 Entering passive mode (%s,%s,%s,%s,%d,%d)\r\n' % (
self.dataAddr.split('.')[0], self.dataAddr.split('.')[1], self.dataAddr.split('.')[2],
self.dataAddr.split('.')[3], int(self.dataPort / 256), self.dataPort % 256)).encode('ascii'))
send_len += len(('227 Entering passive mode (%s,%s,%s,%s,%d,%d)\r\n' % (
self.dataAddr.split('.')[0], self.dataAddr.split('.')[1], self.dataAddr.split('.')[2],
self.dataAddr.split('.')[3], int(self.dataPort / 256), self.dataPort % 256)).encode('ascii'))
elif cmdHead == 'NLST': # NLST
if not self.authenticated:
self.controlSock.send(b'530 Not logged in.\r\n')
send_len += len(b'530 Not logged in.\r\n')
elif self.dataMode == 'PASV' and self.dataSock != None: # Only PASV implemented
self.controlSock.send(b'125 Data connection already open. Transfer starting.\r\n')
send_len += len(b'125 Data connection already open. Transfer starting.\r\n')
directory = '\r\n'.join(os.listdir(self.cwd)) + '\r\n'
self.dataSock.send(directory.encode('ascii'))
send_len += len(directory.encode('ascii'))
self.dataSock.close()
self.dataSock = None
self.controlSock.send(
b'225 Closing data connection. Requested file action successful (for example, file transfer or file abort).\r\n')
send_len += len(
b'225 Closing data connection. Requested file action successful (for example, file transfer or file abort).\r\n')
else:
self.controlSock.send(b"425 Can't open data connection.\r\n")
send_len += len(b"425 Can't open data connection.\r\n")
elif cmdHead == 'RETR':
if not self.authenticated:
self.controlSock.send(b'530 Not logged in.\r\n')
send_len += len(b'530 Not logged in.\r\n')
elif len(cmd.split()) < 2:
self.controlSock.send(b'501 Syntax error in parameters or arguments.\r\n')
send_len += len(b'501 Syntax error in parameters or arguments.\r\n')
elif self.dataMode == 'PASV' and self.dataSock != None: # Only PASV implemented
programDir = os.getcwd()
os.chdir(self.cwd)
self.controlSock.send(b'125 Data connection already open; transfer starting.\r\n')
send_len += len(b'125 Data connection already open; transfer starting.\r\n')
fileName = cmd.split()[1]
try:
self.dataSock.send(open(fileName, 'rb').read())
send_len += len(open(fileName, 'rb').read())
except (IOError):
self.controlSock.send(
b'550 Requested action not taken. File unavailable (e.g., file busy).\r\n')
send_len += len(
b'550 Requested action not taken. File unavailable (e.g., file busy).\r\n')
self.dataSock.close()
self.dataSock = None
self.controlSock.send(
b'225 Closing data connection. Requested file action successful (for example, file transfer or file abort).\r\n')
send_len += len(
b'225 Closing data connection. Requested file action successful (for example, file transfer or file abort).\r\n')
os.chdir(programDir)
else:
self.controlSock.send(b"425 Can't open data connection.\r\n")
send_len += len(b"425 Can't open data connection.\r\n")
elif cmdHead == 'STOR':
if not self.authenticated:
self.controlSock.send(b'530 Not logged in.\r\n')
send_len += len(b'530 Not logged in.\r\n')
elif len(cmd.split()) < 2:
self.controlSock.send(b'501 Syntax error in parameters or arguments.\r\n')
send_len += len(b'501 Syntax error in parameters or arguments.\r\n')
elif self.dataMode == 'PASV' and self.dataSock != None: # Only PASV implemented
programDir = os.getcwd()
os.chdir(self.cwd)
self.controlSock.send(b'125 Data connection already open; transfer starting.\r\n')
send_len += len(b'125 Data connection already open; transfer starting.\r\n')
fileOut = open(cmd.split()[1], 'wb')
time.sleep(0.5) # Wait for connection to set up
self.dataSock.setblocking(False) # Set to non-blocking to detect connection close
while True:
try:
data = self.dataSock.recv(self.bufSize)
recv_len += len(data)
if data == b'': # Connection closed
break
fileOut.write(data)
except (socket.error): # Connection closed
break
fileOut.close()
self.dataSock.close()
self.dataSock = None
self.controlSock.send(
b'225 Closing data connection. Requested file action successful (for example, file transfer or file abort).\r\n')
send_len += len(
b'225 Closing data connection. Requested file action successful (for example, file transfer or file abort).\r\n')
os.chdir(programDir)
else:
self.controlSock.send(b"425 Can't open data connection.\r\n")
send_len += len(b"425 Can't open data connection.\r\n")
class Menu():
def __init__(self):
self.menus = dict(cp['menus'])
self.lisn = None
def printMenu(self):
print('请选择要进行的操作:')
for key in self.menus:
print(key+'.'+self.menus[key])
self.selectFunc()
def listen(self):
while True:
(controlSock, clientAddr) = listenSock.accept()
addr = clientAddr[0]
# print(cp.has_option('whiteIP', addr))
# print(cp.has_option('blackIP', addr))
if cp.has_option('whiteIP', addr):
FTPServer(controlSock, clientAddr).start()
log("Connection accepted.", clientAddr)
else:
log("Connection refused.", clientAddr)
controlSock.send(b'403 Forbidden.')
global send_len
send_len = len(b'403 Forbidden.')
controlSock.close()
def selectFunc(self, option=''):
if option == '':
option = input()
option = int(option)
if option < 0 or option > 6:
print('请输入正确的选项')
self.printMenu()
else:
if option == 1:
global flag
if flag == True:
global send_len
global recv_len
send_len = 0
recv_len = 0
print("请注意,上次服务器运行期间流量已清零")
global listenSock
listenAddr = '127.0.0.1'
listenPort = int(cp['basic']['listenport'])
listenSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
listenSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listenSock.bind((listenAddr, listenPort))
listenSock.listen(int(cp['basic']['maxUser']))
log('Server started.')
self.lisn = threading.Thread(target = self.listen,name='listenThread')
self.lisn.start()
self.printMenu()
self.selectFunc()
elif option == 2:
if self.lisn != None:
print("对不起,请先关闭服务器")
return
listenSock.close()
print('请输入端口号')
listenPort = input()
listenPort = int(listenPort)
if listenPort < 10000 or listenPort > 20000:
print('请输入10000-20000范围的端口号')
print('--------------------------')
self.selectFunc(2)
else:
cp.set('basic', 'listenPort', str(listenPort))
cp.write(open('server.conf', 'w',encoding='utf-8'))
cp.write(sys.stdout)
print('端口号设置成功')
self.printMenu()
self.selectFunc()
elif option == 3:
if self.lisn != None:
print("对不起,请先关闭服务器")
return
listenSock.close()
print('请输入最大连接数')
maxUser = input()
maxUser = int(maxUser)
if maxUser < 0 or maxUser > 6:
print('请输入0-5的数字')
print('--------------------------')
self.selectFunc(3)
else:
cp.set('basic', 'maxUser', str(maxUser))
cp.write(open('server.conf', 'w',encoding='utf-8'))
cp.write(sys.stdout)
self.printMenu()
self.selectFunc()
elif option == 4:
if self.lisn != None:
print("对不起,请先关闭服务器")
return
listenSock.close()
print('请输入IP地址')
addr = input()
pattern = "^((?:(2[0-4]\d)|(25[0-5])|([01]?\d\d?))\.){3}(?:(2[0-4]\d)|(255[0-5])|([01]?\d\d?))$"
addr = re.search(pattern, addr, flags=0)
if addr:
addr = addr.group()
if cp.has_option('whiteIP', addr):
print('该IP已经存在')
self.selectFunc(4)
else:
cp.set('whiteIP', addr, addr)
if cp.has_option('blackIP', addr):
cp.remove_option('blackIP', addr)
cp.write(open('server.conf', 'w',encoding='utf-8'))
cp.write(sys.stdout)
print('IP添加成功')
self.printMenu()
self.selectFunc()
else:
print('请输入正确的IP地址')
self.selectFunc(4)
elif option == 5:
if self.lisn != None:
print("对不起,请先关闭服务器")
return
listenSock.close()
print('请输入要添加至黑名单的IP地址')
addr = input()
pattern = "^((?:(2[0-4]\d)|(25[0-5])|([01]?\d\d?))\.){3}(?:(2[0-4]\d)|(255[0-5])|([01]?\d\d?))$"
addr = re.search(pattern, addr, flags=0)
if addr:
addr = addr.group()
if cp.has_option('blackIP', addr):
print('该IP已经存在')
self.selectFunc(5)
else:
cp.set('blackIP', addr, addr)
if cp.has_option('blackIP', addr):
cp.remove_option('blackIP', addr)
cp.write(open('server.conf', 'w'))
cp.write(sys.stdout)
print('IP添加成功')
self.printMenu()
self.selectFunc()
else:
print('请输入正确的IP地址')
self.selectFunc(5)
elif option == 6:
print('\n当前上传流量为:',send_len,'Byte')
print('当前下载流量为:',recv_len,'Byte\n')
self.printMenu()
self.selectFunc()
elif option == 0:
if self.lisn == None:
log('服务器并未启动')
self.printMenu()
self.selectFunc()
else:
flag = True
stop_thread(self.lisn)
listenSock.close()
self.lisn = None
print('服务器已停止')
self.printMenu()
self.selectFunc()
def restart_program():
python = sys.executable
os.execl(python, python, * sys.argv)
if __name__ == '__main__':
global flag
flag = False
cp = configparser.ConfigParser()
cp.sections()
cp.read('server.conf',encoding='utf-8')
#listenAddr = socket.gethostname()
listenAddr = '127.0.0.1'
listenPort = int(cp['basic']['listenport'])
global listenSock
listenSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
listenSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listenSock.bind((listenAddr, listenPort))
listenSock.listen(int(cp['basic']['maxUser']))
global send_len
global recv_len
send_len = 0 #发送数据量初始化
recv_len = 0 #接收数据量初始化
menu = Menu()
menu.printMenu()
| qq12cvhj/ftp_manager | server/main.py | main.py | py | 23,016 | python | en | code | 1 | github-code | 90 |
43512049101 | import subprocess
import time
import gpiozero
import logging
STATSFILE = '/proc/diskstats'
FIELD = 12
INTERVAL = 0.05
GPIO = 21
ACTIVE_HIGH = True
led = gpiozero.LED(GPIO,
active_high=ACTIVE_HIGH,
)
while True:
try:
with open(STATSFILE,mode='r') as s:
stats = s.read()
disc_active = False
for l in stats.split('\n'):
try:
if int(l.split()[FIELD - 1]):
disc_active = True
break
except IndexError:
pass
led.value = disc_active
time.sleep(INTERVAL)
except Exception:
if args.debug:
raise
else:
logging.exception('')
| larryare/mr3000 | ssd_led.py | ssd_led.py | py | 756 | python | en | code | 17 | github-code | 90 |
7504895307 | from abc import abstractmethod
import datetime
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy.signal import resample
from scipy.spatial.distance import euclidean
from tslearn.metrics import dtw_path
class SensorReadings:
def __init__(self, session_id=None, data=None, activity=None, phone_position=None, model=None, avg_duplicate_ts=False, use_sensors=['ACCELEROMETER', 'GYROSCOPE', 'MAGNETOMETER'], chunk=False):
self.session_id = session_id
self.data = data
self.activity = activity
self.phone_position = phone_position
self.model = model
self.avg_duplicate_ts = avg_duplicate_ts
self.chunk = chunk
self.device_data = self.read_data(use_sensors)
def read_data(self, use_sensors=['ACCELEROMETER', 'GYROSCOPE', 'MAGNETOMETER']):
# TODO: a separate class for chunks
if self.chunk:
data = pd.DataFrame(np.array(self.data))
data.columns = ['timestamp', 'sensor', 'x', 'y', 'z']
else:
data = pd.read_csv(self.data)
self.sensors = []
all_sensors = list(sorted(data.sensor.unique()))
for s in all_sensors:
if s in use_sensors:
self.sensors.append(s)
device_data = {}
for sensor in self.sensors:
device_data[sensor] = {}
sensor_data_ts = data[data['sensor'] == sensor].reset_index(drop=True)
if self.avg_duplicate_ts:
sensor_data_ts = sensor_data_ts.groupby('timestamp').mean().reset_index()
sensor_data = sensor_data_ts.drop(['timestamp'], axis=1)
else:
sensor_data = sensor_data_ts.drop(['sensor', 'timestamp'], axis=1)
ts = sensor_data_ts['timestamp']
device_data[sensor]['ts'] = ts
device_data[sensor]['dt'] = ts.apply(lambda x: datetime.datetime.fromtimestamp(x / 1000.0))
device_data[sensor]['data'] = sensor_data
return device_data
def get_ts_summary(self):
summary = {}
for sensor in self.sensors:
summary[sensor] = {}
num_ts = len(self.device_data[sensor]['ts'])
eff_dict = mean_effective_frequency(self.device_data[sensor]['ts'])
summary[sensor]['Repeating timestamps (%)'] = \
round(100 * eff_dict['Repeating timestamps'] / num_ts, 2)
summary[sensor]['Overall timestamps'] = num_ts
summary[sensor].update(eff_dict)
return summary
def get_random_ts_vis(self, length_sec=5, size=3):
if len(self.device_data['ACCELEROMETER']['ts']) <= 5000:
return
random_start_frames = sorted(np.random.choice(len(self.device_data['ACCELEROMETER']['ts']) - 5000, size=size, replace=False))
fig, ax = plt.subplots(len(self.sensors), size, figsize=(25, 10) if len(self.sensors) == 3 else (25,6))
for k, sensor in enumerate(self.sensors):
eff_freq = mean_effective_frequency(self.device_data[sensor]['ts'])['Mean effective frequency']
num_frames = int(length_sec * eff_freq)
for i, s in enumerate(random_start_frames):
frame = self.device_data[sensor]['data'][s: s+num_frames]
if len(self.sensors) == 1:
ax[i].plot(frame)
else:
ax[k, i].plot(frame)
# plt.legend()
ax[k, i].set_title(f'{sensor}')
fig.suptitle(f'{self.activity} - {self.phone_position} - {self.model}', fontsize=16)
plt.show()
def draw_delays(self):
timestamps = {}
if len(self.sensors) > 1:
fig, ax = plt.subplots(1, 3 if len(self.sensors) == 3 else 1, figsize=(25,3))
for sensor in self.sensors:
timestamps[sensor] = self.device_data[sensor]['ts']
ts_df = pd.DataFrame.from_dict(timestamps)
delay_acc_gyro = ts_df[self.sensors[1]] - ts_df[self.sensors[0]]
if len(self.sensors) == 2:
ax.plot(delay_acc_gyro)
if len(self.sensors) == 3:
ax[0].plot(delay_acc_gyro)
delay_acc_mg = ts_df[self.sensors[2]] - ts_df[self.sensors[0]]
ax[1].plot(delay_acc_mg)
delay_gyro_mg = ts_df[self.sensors[2]] - ts_df[self.sensors[1]]
ax[2].plot(delay_gyro_mg)
plt.show()
else:
print('Only one sensor in the recording.')
return timestamps
def resample(self, desired_freq=33.3):
for i, s in enumerate(self.sensors):
ts = np.array(self.device_data[s]['ts'])
len_sec = np.floor((ts[-1] - ts[0]) / 1000)
if i == 0:
num_ts = int(len_sec * desired_freq)
self.device_data[s]['data'] = resample(self.device_data[s]['data'], num_ts)
def align(self, method='fast-dtw'):
for i in range(len(self.sensors) - 1):
# create alignment
if method == 'fast-dtw':
path ,_ = dtw_path(self.device_data[self.sensors[i]]['ts'], self.device_data[self.sensors[i + 1]]['ts'])
else:
raise ValueError('Provide the available alighning algorithm')
# get first sensor data and ts
aligned_s1_ts = self.device_data[self.sensors[i]]['ts'][[x[0] for x in path]].reset_index(drop=True)
aligned_s2_ts = self.device_data[self.sensors[i + 1]]['ts'][[x[1] for x in path]].reset_index(drop=True)
# get second sensor data and ts
aligned_s1_data = self.device_data[self.sensors[i]]['data'].iloc[[x[0] for x in path]].reset_index(drop=True)
aligned_s2_data = self.device_data[self.sensors[i + 1]]['data'].iloc[[x[1] for x in path]].reset_index(drop=True)
# change data to aligned version
self.device_data[self.sensors[i]]['ts'] = aligned_s1_ts
self.device_data[self.sensors[i]]['data'] = aligned_s1_data
self.device_data[self.sensors[i + 1]]['ts'] = aligned_s2_ts
self.device_data[self.sensors[i + 1]]['data'] = aligned_s2_data
if len(self.sensors) == 3:
# get first sensor data and ts
aligned_s0_ts = self.device_data[self.sensors[0]]['ts'][[x[0] for x in path]].reset_index(drop=True)
aligned_s0_data = self.device_data[self.sensors[0]]['data'].iloc[[x[0] for x in path]].reset_index(drop=True)
self.device_data[self.sensors[0]]['ts'] = aligned_s0_ts
self.device_data[self.sensors[0]]['data'] = aligned_s0_data
def stack(self):
data = []
for s in self.sensors:
data.append(self.device_data[s]['data'].copy())
return np.concatenate(data, axis=1)
@abstractmethod
def sample(self, stacked, len_ts=5, desired_freq=33.3):
frame_size = int(len_ts * desired_freq)
sampled = []
for i in range(frame_size, stacked.shape[0] - frame_size, frame_size):
sampled.append(stacked[i: i + frame_size])
return sampled
def mean_effective_frequency(ts):
intervals = np.array(ts)[1:] - np.array(ts)[:-1]
mean_upd = intervals.mean()
return {
'Repeating timestamps': (intervals == 0).sum(),
'Mean effective frequency': round(1000 / mean_upd, 3),
'Mean update interval': round(mean_upd, 3)
}
| bulatkh/trdl-ai | src/dataset/sensor_readings.py | sensor_readings.py | py | 7,497 | python | en | code | 0 | github-code | 90 |
10214677652 | def checkPL(arr, n):
ans = 0
st = ""
for i in range(n):
c1 = i
k = 1
while ((c1 - k) >= 0 and (c1 + k) < n and arr[c1 - k] == arr[c1 + k]):
k += 1
if (2 * k - 1 > ans):
st = arr[c1 - k+1: c1 + k]
# print("first")
# print(st, c1 - k+1, c1 + k)
ans = max(ans, 2 * k - 1)
c2 = i + 1
c1 = i
if (i + 1 < n and arr[c1] == arr[c2]):
k = 1
else:
continue
while (c1 - k >= 0 and c2 + k < n and arr[c1 - k] == arr[c2 + k]):
k += 1
if (2 * k > ans):
st = arr[c1 - (k)+1 : c2 + (k)]
# print("second")
# print(st, c1 - (k)+1 , c2 + (k))
ans = max(ans, 2 * k)
# print(arr[c1+1:c2+1])
return st
a = "abb"
print(checkPL(a, len(a))) | SheetanshKumar/smart-interviews-problems | InterviewBit/Strings/Longest Palindromic Substring.py | Longest Palindromic Substring.py | py | 860 | python | en | code | 6 | github-code | 90 |
13455167320 | # Реализуйте алгоритм задания случайных чисел без использования встроенного генератора псевдослучайных чисел. БЕЗ КАКИХ ЛИБО РАНДОМОВ
from datetime import datetime
import time
def Random_number (min, max):
if abs(max) > abs (min):
check = min - 1
max_abs = max + 1
else:
check = max + 1
max_abs = min - 1
random_number = check
while random_number < min or random_number > max:
temp_number = (float(time.time()) * float(datetime.now().time().microsecond)) / 1000000
sign = -1
if int(temp_number) % 2 != 0:
sign = 1
temp_number = temp_number - int(temp_number)
random_number = abs(max_abs) * temp_number * sign
random_number = int(random_number)
time.sleep(0.000001)
return random_number
def CreateList (size, min, max):
createList = []
for i in range(size):
createList.append(Random_number(min, max))
return createList
| Sveta2311/Python | home18.py | home18.py | py | 1,077 | python | en | code | 0 | github-code | 90 |
17682617911 | from authapp import api
from flask import jsonify
from flask_restful import Resource, reqparse, marshal_with
from authapp import db, models
from view_common import default_rule_fields
class DefaultRule(Resource):
def __init__(self):
"""Constructeur: liste les champs attendus dans le corps HTML"""
self.put_parser = reqparse.RequestParser()
self.put_parser.add_argument('Name', type=str,
location='json')
self.put_parser.add_argument('Method', type=str,
location='json')
self.put_parser.add_argument('Role', type=str,
location='json')
super(DefaultRule, self).__init__()
@marshal_with(default_rule_fields, envelope='Rule')
def get(self, Id):
"""affiche une regle par defaut de la base des authorization"""
aRule = models.DefaultRules.query.get_or_404(Id)
return aRule
@marshal_with(default_rule_fields, envelope='Rule')
def put(self, Id):
"""modifie une regle par defaut de la base des authorization"""
aRule = models.DefaultRules.query.get_or_404(Id)
args = self.put_parser.parse_args()
IfUpdated = lambda x, y: y if x is None else x
for attribut in ["Name", "Method", "Role"]:
setattr(aRule, attribut, IfUpdated(getattr(args, attribut), getattr(aRule, attribut)))
db.session.commit()
return aRule
def delete(self, Id):
"""supprime une regle par defaut"""
AllRules = models.DefaultRules.query.filter_by(Id=Id).all()
for rule in AllRules:
db.session.delete(rule)
aRule = models.DefaultRules.query.get_or_404(Id)
db.session.delete(aRule)
db.session.commit()
return jsonify({'result': True})
api.add_resource(DefaultRule, '/todo/aaa/v1.0/DefaultRules/<int:Id>', endpoint='DefaultRule') | ldurandadomia/Flask-Restful | authapp/view_single_def_rule.py | view_single_def_rule.py | py | 1,933 | python | en | code | 0 | github-code | 90 |
72821174057 | from odoo import fields, models
class CustomInfoOptionSet(models.Model):
_description = "Option Sets for Custom Information"
_name = "custom_info.option_set"
name = fields.Char(
index=True,
translate=True,
required=True,
)
code = fields.Char(
string="Code",
required=True,
)
active = fields.Boolean(
string="Active",
default=True,
)
note = fields.Text(
string="Note",
)
option_ids = fields.Many2many(
string="Options",
comodel_name="custom_info.option",
relation="rel_option_set_2_option",
column1="set_id",
column2="option_id",
)
| open-synergy/ssi-mixin | ssi_custom_information_mixin/models/custom_info_option_set.py | custom_info_option_set.py | py | 686 | python | en | code | 0 | github-code | 90 |
5488589594 | import numpy as np
import os
import pandas as pd
from astropy.io import fits
import tqdm
from cv2 import resize, INTER_CUBIC
import multiprocessing
from os.path import exists
ids = pd.read_csv("clf.csv").ID
zps = pd.read_csv("iDR4_zero-points.csv")
fits_folder = "/media/gjperin/SD-64GB-Gabriel/clf_fits"
bands = ["U",
"F378",
"F395",
"F410",
"F430",
"G",
"F515",
"R",
"F660",
"I",
"F861",
"Z"]
band_to_zp = {"U":"ZP_u",
"F378":"ZP_J0378",
"F395":"ZP_J0395",
"F410":"ZP_J0410",
"F430":"ZP_J0430",
"G":"ZP_g",
"F515":"ZP_J0515",
"R":"ZP_r",
"F660":"ZP_J0660",
"I":"ZP_i",
"F861":"ZP_J0861",
"Z":"ZP_z"
}
def calibrate(x,id,band):
ps = 0.55
zp = float(zps[zps["Field"]==id[7:20]][band_to_zp[band]])
return (10**(5-0.4*zp)/(ps*ps))*x
def gather_bands(id):
if exists(f"all_objects/{id}.npy"):
return
mat = []
for band in bands:
#print(f'{fits_folder}/{band}/{id}.fits')
x = fits.open(f'{fits_folder}/{band}/{id}.fits')[1].data
x = resize(x, dsize=(32, 32), interpolation=INTER_CUBIC)
x = calibrate(x,id,band)
mat.append(x)
np.save(f"all_objects/{id}.npy", np.array(mat))
with multiprocessing.Pool(multiprocessing.cpu_count()) as pool:
with tqdm.tqdm(total=len(ids)) as pbar:
for _ in pool.imap_unordered(gather_bands, ids):
pbar.update(1) | gabjp/LTS2.0-data | data/clf/packer.py | packer.py | py | 1,636 | python | en | code | 0 | github-code | 90 |
18540437469 | N = int(input())
A = list(map(int, input().split()))
d = {}
x = 0
for i in range(N):
x += A[i]
d[x] = d.get(x, 0) + 1
ans = d.get(0, 0)
for x in d.values():
ans += x * (x-1) // 2
print(ans) | Aasthaengg/IBMdataset | Python_codes/p03363/s069936239.py | s069936239.py | py | 195 | python | en | code | 0 | github-code | 90 |
41813776697 | from tkinter import *
from mailmerge import MailMerge
import xlrd
import sys
import os
import codecs
from tkinter import filedialog
from tkinter.filedialog import askdirectory
current_row = 0
sheet_num = 0
data_list = []
window = Tk()
window.geometry('650x450')
def exit_label():
btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
def Exl():
entered_text=exlentry.get()
filename = filedialog.askopenfilename()
exlentry.insert(0,str(filename))
exit_label()
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
data_list.append(filename)
print(data_list)
def wrd():
entered_text=wordentry.get()
filename = filedialog.askopenfilename()
wordentry.insert(0,str(filename))
exit_label()
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
data_list.append(filename)
print(data_list)
def load_dir():
exit_label()
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
path=askdirectory()
direntry.insert(0,str(path))
data_list.append(path)
print(data_list)
def load_dir_PDF():
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
exit_label()
path=askdirectory()
direntrypdf.insert(0,str(path))
data_list.append(path)
print(data_list)
def num():
exit_label()
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
entered_text=numentry.get()
numentry.insert(0,'entered sheet num: ')
numentry.insert(0,str(entered_text))
data_list.append(int(entered_text))
print(data_list)
def sys_exit():
window.destroy()
window.title("Variable pass")
window.configure(background='blue')
lbl1 = Label(window,text="Provide a link of Word ")
lbl1.grid(row=0,column=0,sticky=W)
exlentry = Entry(window, width=100,bg='white',textvariable=StringVar())
exlentry.grid(row=1,column=0,sticky=W)
exit_label()
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
btn = Button(window,text="Submit",width=6,command=Exl).grid(row=2,column=0,sticky=W)
lbl2 = Label(window,text="Provide a link of Excel")
lbl2.grid(row=3,column=0,sticky=W)
wordentry = Entry(window, width=100,bg='white')
wordentry.grid(row=4,column=0,sticky=W)
exit_label()
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
btn = Button(window,text="Submit",width=6,command=wrd).grid(row=5,column=0,sticky=W)
lbl3 = Label(window,text="Provide a directory link to save the documents")
lbl3.grid(row=6,column=0,sticky=W)
direntry = Entry(window, width=100,bg='white')
direntry.grid(row=7,column=0,sticky=W)
exit_label()
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
btn = Button(window,text="Submit",width=6,command=load_dir).grid(row=8,column=0,sticky=W)
lbl5 = Label(window,text="Provide a directory link to save the PDF")
lbl5.grid(row=9,column=0,sticky=W)
direntrypdf = Entry(window, width=100,bg='white')
direntrypdf.grid(row=10,column=0,sticky=W)
exit_label()
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
btn = Button(window,text="Submit",width=6,command=load_dir_PDF).grid(row=11,column=0,sticky=W)
lbl4 = Label(window,text="Provide a sheet number of the excel")
lbl4.grid(row=12,column=0,sticky=W)
numentry = Entry(window, width=20,bg='white')
numentry.grid(row=13,column=0,sticky=W)
exit_label()
#btn = Button(window,text="Exit",width=6,command=sys_exit).grid(row=14,column=0,sticky=E)
btn = Button(window,text="Submit",width=6,command=num).grid(row=14,column=0,sticky=W)
window.mainloop()
| agilewitinternal/CONSEN | LoadUi.py | LoadUi.py | py | 3,651 | python | en | code | 0 | github-code | 90 |
28164797476 | def to_camel_case(s):
if s.find("-") < 0:
return s
temp = s.split("-")
res = temp[0] + "".join(ele.title() for ele in temp[1:])
return res
def process(data):
if isinstance(data, dict):
# return {(to_camel_case(k), process(v)) for (k,v) in data.items()}
data2 = {}
for k in data.keys():
data2[to_camel_case(k)] = process(data[k])
return data2
elif isinstance(data, list):
return [process(e) for e in data]
else:
return data
| soyrochus/hog | Processnames.py | Processnames.py | py | 523 | python | en | code | 0 | github-code | 90 |
6694220849 | # USB_PORT = "/dev/ttyUSB0" # Arduino Uno R3 Compatible
USB_PORT = "/dev/ttyACM0" # Arduino Uno WiFi Rev2
# Imports
import serial
def sendMessage(message):
try:
usb = serial.Serial(USB_PORT, 9600, timeout=2)
except:
print("ERROR - Could not open USB serial port. Please check your port name and permissions.")
print("Exiting program.")
return -1
usb.write(bytes(message, 'ascii'))
| nkuma23/BruinBotVoice | sendMessage.py | sendMessage.py | py | 432 | python | en | code | 0 | github-code | 90 |
21633552718 |
# Code to acquire data on road collisions, crime, postcodes, IMD amd population
# Wrangles and exports files
# Enables subsequent document creation (make_documents.py)
# Adam Bricknell, Feb 2021
from sodapy import Socrata
import pandas as pd
import requests
from zipfile import ZipFile
from io import BytesIO
import os
# get road collision data and convert to dataframe
client = Socrata("opendata.camden.gov.uk", None)
results = client.get("puar-wf4h", limit=100000)
road_collision = pd.DataFrame.from_records(results)
columns_for_numeric = ["longitude", "latitude", "number_of_casualties"]
road_collision[columns_for_numeric] = road_collision[columns_for_numeric].apply(pd.to_numeric)
# get crime data
results = client.get("qeje-7ve7", limit=1000000)
crime = pd.DataFrame.from_records(results)
crime[["longitude", "latitude"]] = crime[["longitude", "latitude"]].apply(pd.to_numeric)
# get NSPL (check if this is used)
results = client.get("tr8t-gqz7", local_authority_code = "E09000007", limit=100000)
nspl = pd.DataFrame.from_records(results)
# get IMD data by LSOA
results = client.get("8x5x-eu22", local_authority_district_code = 'E09000007', limit=1000)
imd = pd.DataFrame.from_records(results)
# get population data by LSOA (takes a few minutes as it's a 40mb download)
url = 'https://www.ons.gov.uk/file?uri=/peoplepopulationandcommunity/populationandmigration/populationestimates/datasets/lowersuperoutputareamidyearpopulationestimates/mid2019sape22dt2/sape22dt2mid2019lsoasyoaestimatesunformatted.zip'
r = requests.get(url)
filebytes = BytesIO(r.content)
myzipfile = ZipFile(filebytes)
filename = myzipfile.namelist()[0]
n = myzipfile.open(filename)
population = pd.read_excel(n, 'Mid-2019 Persons', skiprows = 4, engine='openpyxl')
population = population[['LSOA Code', 'All Ages']] # columns of interest
# inner join IMD and Population, which prevents non-Camden LSOAs from going forward
imd_pop = imd.merge(population, left_on = 'lower_super_output_area_code', right_on = 'LSOA Code')
imd_pop[["longitude", "latitude"]] = imd_pop[["longitude", "latitude"]].apply(pd.to_numeric)
# find lsoa of collisions and crime by which LSOA lat/long is closest to the event lat/long
# using pythagoras to find distance to each LSOA, for each event
collision_lsoa = []
for i in range(len(road_collision)):
ix_max = (road_collision['longitude'][i] - imd_pop['longitude']).pow(2).add((road_collision['latitude'][i] - imd_pop['latitude']).pow(2)).argmin()
collision_lsoa.append(imd_pop['lower_super_output_area_code'][ix_max])
road_collision['lsoa_code'] = collision_lsoa
crime_lsoa = [] # ~200k rows so takes a minute or so
for i in range(len(crime)):
ix_max = (crime['longitude'][i] - imd_pop['longitude']).pow(2).add((crime['latitude'][i] - imd_pop['latitude']).pow(2)).argmin()
crime_lsoa.append(imd_pop['lower_super_output_area_code'][ix_max])
crime['lsoa_code'] = crime_lsoa
# group collisions data (inc diffrent types) by quarter and lsoa
road_collision['date'] = pd.to_datetime(road_collision['date'], infer_datetime_format=True)
road_collision['qtr'] = road_collision['date'].dt.quarter
road_collision['year'] = road_collision['date'].dt.year
collision_trends = road_collision['number_of_casualties'].groupby(by=[road_collision['lsoa_code'], road_collision['year'], road_collision['qtr']]).agg(['sum', 'count'])
collision_trends.rename(columns = {'sum': 'collisions_casualties', 'count': 'collisions_count'}, inplace = True)
collision_trends.reset_index(level=collision_trends.index.name, inplace = True)
# group crime data across quarter and lsoa (outcome date, different to date of crime)
crime['outcome_date'] = pd.to_datetime(crime['outcome_date'], infer_datetime_format=True)
crime['qtr'] = crime['outcome_date'].dt.quarter
crime['year'] = crime['outcome_date'].dt.year
crime_trends = crime['category'].groupby(by=[crime['lsoa_code'], crime['year'], crime['qtr']]).agg(['count'])
collision_trends.rename(columns = {'count': 'crime_outcome_count'}, inplace = True)
crime_trends.reset_index(level=crime_trends.index.name, inplace = True)
# join crime and collisions at quarter and LSOA level
all_trends = crime_trends.merge(collision_trends, how = 'left',
on = ['lsoa_code', 'year', 'qtr'])
# aggregate numeric collisions data, giving timeseries of Camden as a whole
collision_cols = ['casualty_age', 'number_of_casualties', 'number_of_vehicles']
road_collision[collision_cols] = road_collision[collision_cols].apply(pd.to_numeric)
collision_float_trends = pd.DataFrame(columns=['year', 'count', 'mean', 'sum', 'std', 'category'])
for i in range(len(collision_cols)):
x = road_collision[collision_cols[i]].groupby([road_collision['year']]).agg(['count', 'mean', 'sum', 'std'])
x = pd.DataFrame(x)
x.reset_index(inplace = True)
x['category'] = collision_cols[i]
collision_float_trends = collision_float_trends.append(x)
## might not need the above now
# aggregate categorical collisions data
cats_to_summarise = [
'number_of_casualties',
'number_of_vehicles',
'casualty_sex',
'casualty_class',
'casualty_age_band',
'casualty_severity',
'day',
'road_type',
'speed_limit',
'junction_detail',
'junction_control',
'road_class_1',
'weather',
'road_surface',
'casualty_age_band'
]
# make all categories upper case
road_collision[cats_to_summarise] = road_collision[cats_to_summarise].apply(lambda x: x.astype(str).str.upper())
# make summary of grouped categories
collision_cat_trends = pd.DataFrame(columns=['subcategory', 'count', 'category'])
for i in range(len(cats_to_summarise)):
x = road_collision[cats_to_summarise[i]].groupby([road_collision[cats_to_summarise[i]], road_collision['year']]).agg('count')
x = pd.DataFrame(x)
x.rename(columns = {cats_to_summarise[i]: 'count'}, inplace = True)
x.reset_index(inplace = True)
x.rename(columns = {cats_to_summarise[i]: 'subcategory'}, inplace = True)
x['category'] = cats_to_summarise[i]
collision_cat_trends = collision_cat_trends.append(x)
collision_cat_trends['event_type'] = 'collisions'
# make summary of grouped categories for crime data
crime_to_summarise = ['service', 'location_subtype', 'category']
crime[crime_to_summarise] = crime[crime_to_summarise].apply(lambda x: x.astype(str).str.upper())
crime_cat_trends = pd.DataFrame(columns=['subcategory', 'count', 'category'])
for i in range(len(crime_to_summarise)):
x = crime[crime_to_summarise[i]].groupby([crime[crime_to_summarise[i]], crime['year']]).agg('count')
x = pd.DataFrame(x)
x.rename(columns = {crime_to_summarise[i]: 'count'}, inplace = True)
x.reset_index(inplace = True)
x.rename(columns = {crime_to_summarise[i]: 'subcategory'}, inplace = True)
x['category'] = crime_to_summarise[i]
crime_cat_trends = crime_cat_trends.append(x)
crime_cat_trends['event_type'] = 'crime'
# append crime data
all_category_trends = collision_cat_trends.append(crime_cat_trends)
# get total events in most recent year (2019) for each LSOA
lsoa_events_latest = all_trends[all_trends['year'] == all_trends['year'].max()]
lsoa_events_latest = lsoa_events_latest[['count', 'collisions_count', 'collisions_casualties']].groupby([lsoa_events_latest['lsoa_code']]).agg(['sum'])
lsoa_events_latest.rename(columns = {'count': 'crimes_count'}, inplace = True)
lsoa_events_latest.reset_index(inplace = True)
lsoa_events_latest.columns = ['lsoa_code', 'crimes_count', 'collisions_count', 'collisions_casualties']
# adding rows for LSOAs with no crime/collisions in that year
all_lsoas = pd.DataFrame({'lsoa_code': imd_pop['lower_super_output_area_code'].unique()})
ix = ~all_lsoas['lsoa_code'].isin(lsoa_events_latest['lsoa_code']) # find LSOAs not in master list
if sum(ix) > 0:
ix_missing = ix[ix].index
to_add = []
for i in ix_missing:
to_add = to_add + [all_lsoas['lsoa_code'][i]]
to_append = pd.DataFrame({'lsoa_code': to_add})
to_append['crimes_count'] = [0] * len(to_add)
to_append['collisions_count'] = [0.0] * len(to_add)
to_append['collisions_casualties'] = [0.0] * len(to_add)
lsoa_events_latest = lsoa_events_latest.append(to_append, sort = False)
lsoa_events_latest.reset_index(drop=True, inplace = True)
# export (1) LSOA level pop and IMD, (2) LSOA level time series of collisions and crime
directory = os.path.dirname(os.path.abspath(__file__))
imd_pop.to_csv(directory + '/imd_pop_lsoa.csv', index = False)
all_trends.to_csv(directory + '/crime_collision_trends_lsoa.csv', index = False)
# export raw data
road_collision.to_csv(directory + '/road_collisions_all.csv', index = False)
crime.to_csv(directory + '/crime_all.csv', index = False)
# export Camden level time series
all_category_trends.to_csv(directory + '/camden_category_timeseries.csv', index = False)
collision_float_trends.to_csv(directory + '/camden_float_timeseries.csv', index = False)
lsoa_events_latest.to_csv(directory + '/camden_latest_events_lsoa.csv', index = False)
| adam-jb/camden_crime_data | process_data.py | process_data.py | py | 9,008 | python | en | code | 0 | github-code | 90 |
75025565416 | # -*- coding: utf-8 -*-
from odoo import fields, models, api
from datetime import date, timedelta
class IssueWizard(models.TransientModel):
_name = "issue.wizard"
_description = "Issue Wizard"
# Wizard to cofirm the issue.bok
def action_confirm(self):
issue_book_record = self.env["issue.book"].search([("id", "=", self._context.get("active_id"))])
issue_book_record.write({"state": "issued"})
issue_book_record.issue_date = date.today()
issue_book_record.submission_date = date.today() + timedelta(days=15)
for line in issue_book_record.book_lines_ids:
for _ in range(line.issue_quantity):
register_id = [
{
"entry_id": self._context.get("active_id"),
"issued_date": issue_book_record.issue_date,
"issued_book_id": line.book_name_id.id,
"book_name": line.book_name_id.book_name,
}
]
create_data = self.env["register.date"].create(register_id)
| muchhalaamit/custom_addons_15 | library_management/wizards/issue_wizard.py | issue_wizard.py | py | 1,100 | python | en | code | 0 | github-code | 90 |
37627474879 | #pip install -U pyDataverse
#https://curlconverter.com/python/
from pyDataverse.api import NativeApi
import os, sys, requests, json
def connectDVN():
filen = "../.env"
if not os.path.isfile(filen):
print('Configuration file .env not fount, copy env to .env and configure it!')
sys.exit()
with open(filen) as dcjson:
data = json.load(dcjson)
i = data['dataverse']
BASE_URL = i['baseurl']
API_TOKEN = i['apikey']
return {"BASE_URL":BASE_URL,"API_TOKEN":API_TOKEN}
def createDataset(ROOT,NAME):
return True
def addFIle(DOI,file):
env = connectDVN()
headers = { 'apikey': env['API_TOKEN'],'key': env['API_TOKEN']}
params = { }
try:
files = { 'file': open(file, 'rb'), 'jsonData': (None, '{"description":"My description.","directoryLabel":"source/code","categories":["Code"], "restrict":"true", "tabIngest":"false"}'), }
except Error as err:
print(f"Error: '{err}'")
################# request
try:
URL = env['BASE_URL'] + 'api/datasets/:persistentId/add?persistentId='+DOI+"&key="+env['API_TOKEN']
response = requests.post(URL,
params=params,
headers=headers,
files=files,
)
print(response)
except Error as err:
print(f"Error: '{err}'")
| ReneFGJr/Brapci3.1 | python/dataverse/pydvn.py | pydvn.py | py | 1,325 | python | en | code | 0 | github-code | 90 |
20600703005 | import cv2
from mtcnn import FaceDetector
from PIL import Image
import numpy
detector = FaceDetector()
def camera_detect():
video = cv2.VideoCapture(0)
while True:
ret, frame = video.read()
# 将 OpenCV 格式的图片转换为 PIL.Image
pil_im = Image.fromarray(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
# 绘制带人脸框的标注图
drawed_pil_im = detector.draw_bboxes(pil_im)
# 再转回 OpenCV 格式用于视频显示
frame = cv2.cvtColor(numpy.asarray(drawed_pil_im), cv2.COLOR_RGB2BGR)
cv2.imshow("Face Detection", frame)
# 输入 q 的时候结束循环(退出检测程序)
if cv2.waitKey(1) & 0xFF == ord("q"):
break
video.release()
cv2.destroyAllWindows()
if __name__ == "__main__":
camera_detect()
| imkuang/MTCNN-PyTorch | camera_demo.py | camera_demo.py | py | 835 | python | en | code | 14 | github-code | 90 |
13328503203 | from tkinter import *
from random import randint
from time import sleep
root = Tk()
root.title("Sorting Algorithms Visualiser")
def go():
data = []
quantity = int(myEntryQuantity.get())
for i in range(quantity):
data.append(randint(int(myEntryRangeMin.get()),int(myEntryRangeMax.get())))
bubble_sort(data)
def bubble_sort(data):
iterations = 1
sorted = False
while sorted == False:
changeMade = False
#log to the screen the current state of the data array
myText.insert(END,"Iteration " + str(iterations) + ": " + str(data)+"\n")
for i in range(0,len(data)-1):
if data[i] > data[i+1]:
buffer = data[i]
data[i] = data[i+1]
data[i+1] = buffer
changeMade = True
iterations += 1
plot_boxes(data)
sleep(1)
if changeMade == False:
sorted = True
def plot_boxes(data):
myCanvas.delete("all")
quantity = len(data)
rectangleWidth = 800/quantity
for i in range(quantity):
#TLX: i*canvas width/quantity of data items // TLY: canvas height - canvas height/max data value*current data value
#BRX: i*canvas width/quantity of data items+quantity of data items // BRY: height of canvas
myCanvas.create_rectangle(i*(800/quantity),400-400/50*data[i],i*(800/quantity)+800/quantity,400, fill="red")
myCanvas.update()
myCanvas = Canvas(root, width=800, height=400)
myLabelQuantity = Label(root, text="Quantity of data points to sort:")
myEntryQuantity = Entry(root)
myLabelRangeMin = Label(root, text="Minimum value:")
myEntryRangeMin = Entry(root)
myLabelRangeMax = Label(root, text="Maximum value:")
myEntryRangeMax = Entry(root)
myButton = Button(root, text="Go!", command=go)
myText = Text(root, height=6)
myEntryQuantity.insert(0,"10")
myEntryRangeMin.insert(0,"0")
myEntryRangeMax.insert(0,"25")
myCanvas.grid(row=0, columnspan=7)
myLabelQuantity.grid(row=1,column=0)
myEntryQuantity.grid(row=1,column=1)
myLabelRangeMin.grid(row=2,column=0)
myEntryRangeMin.grid(row=2,column=1)
myLabelRangeMax.grid(row=3,column=0)
myEntryRangeMax.grid(row=3,column=1)
myText.grid(rowspan=3, row=1, column=2)
myButton.grid(row=4, columnspan=3)
root.mainloop() | jjdshrimpton/PythonJunk | Sorting Visualiser.py | Sorting Visualiser.py | py | 2,283 | python | en | code | 0 | github-code | 90 |
18106373689 | def swap(l, i, j):
tmp = l[i]
l[i] = l[j]
l[j] = tmp
return l
def selection_sort(l):
cnt = 0
for i in range(len(l)):
minj = i
for j in range(i + 1, len(l)):
if l[j] < l[minj]:
minj = j
if i != minj:
swap(l, i, minj)
cnt += 1
return l, cnt
if __name__ == '__main__':
N = int(input())
l = list(map(int, input().split()))
sl, cnt = selection_sort(l)
print(' '.join(map(str, sl)))
print(cnt)
| Aasthaengg/IBMdataset | Python_codes/p02260/s248452737.py | s248452737.py | py | 516 | python | en | code | 0 | github-code | 90 |
72435655018 | import json
import os
class CacheException(Exception):
pass
class FileBackedCache(object):
LOCAL_FILE_NAME = '.small-improvements-cache'
def is_setup(self):
return os.path.isfile(self.LOCAL_FILE)
@property
def LOCAL_FILE(self):
return os.environ['HOME'] + f'/{self.LOCAL_FILE_NAME}'
def read_data(self):
try:
with open(self.LOCAL_FILE, 'rb') as data_file:
return json.loads(data_file.read())
except IOError:
raise CacheException(
'Could not find {}. You should run setup first.'.format(self.LOCAL_FILE)
)
except json.decoder.JSONDecodeError:
raise CacheException(
'File {} was not valid JSON. You should re-run setup to fix.'.format(
self.LOCAL_FILE
)
)
except Exception:
raise CacheException(
'Cound not read {}. You should re-run setup to fix.'.format(
self.LOCAL_FILE
)
)
def write_data(self, data):
try:
with open(self.LOCAL_FILE, 'w') as data_file:
data = json.dumps(data, sort_keys=True, indent=4)
data_file.write(data)
except IOError:
raise CacheException('Could not write {}'.format(self.LOCAL_FILE))
class MemoryBackedCache(object):
def is_setup(self):
return bool(getattr(self, '_cache', {}))
def read_data(self):
data = getattr(self, '_cache', {})
if not data:
raise CacheException('No data found')
return data
def write_data(self, data):
self._cache = data
| bcooksey/small-improvements-cli | caches.py | caches.py | py | 1,714 | python | en | code | 1 | github-code | 90 |
35050309622 | import logging as log
import sys
from pathlib import Path
def load_data(words_file=None):
UPPER_LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
LETTERS_AND_SPACE = UPPER_LETTERS + UPPER_LETTERS.lower() + " \t\n"
SYMBOLS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890 !?."
ETAOIN = "ETAOINSHRDLCUMWFGYPBVKJXQZ"
data = {}
data["WORDS"] = load_words(words_file)
data["UPPER_LETTERS"] = UPPER_LETTERS
data["LETTERS"] = UPPER_LETTERS
data["LETTERS_AND_SPACE"] = LETTERS_AND_SPACE
data["SYMBOLS"] = SYMBOLS
data["ETAOIN"] = ETAOIN
return data
def load_words(words_file=None):
words = {}
if words_file is not None:
words_file = Path(words_file).resolve()
if words_file.exists():
with words_file.open() as fo:
for word in fo.read().split("\n"):
words[word.lower()] = None
else:
print(f"Unable to find {words_file}")
sys.exit(1)
else:
log.debug("Skipped loading WORDS file")
return words
def get_english_count(message, data):
WORDS = data["WORDS"]
message = message.lower()
message = remove_non_letters(message, data)
possible_words = message.split()
if possible_words == []:
log.debug("No words")
return 0.0
matches = 0.0
for word in possible_words:
if word in WORDS:
matches += 1
log.debug(f"Matches:{matches}, Possible words: {possible_words}")
return float(matches) / len(possible_words)
def remove_non_letters(message, data):
LETTERS_AND_SPACE = data["LETTERS_AND_SPACE"]
letters_only = []
for symbol in message:
if symbol in LETTERS_AND_SPACE:
letters_only.append(symbol)
return "".join(letters_only)
def is_english(message, data, word_percentage=20, letter_percentage=85):
words_match = get_english_count(message, data) * 100 >= word_percentage
num_letters = len(remove_non_letters(message, data))
message_letter_percentage = 0.0
message_len = len(message)
if message_len > 0:
message_letter_percentage = (float(num_letters) / message_len) * 100
letters_match = message_letter_percentage >= letter_percentage
return words_match and letters_match
def get_words_file_path(path=None):
if path is None:
words_file = "content/english-words/words.txt"
else:
words_file = path
count = 0
while not Path(words_file).exists():
words_file = "../" + words_file
print(f"Looking for '{words_file}'")
count += 1
if count > 99:
break
return words_file
def make_word_patterns(data):
WORDS = data["WORDS"]
word_patterns = {}
data["WORD_PATTERNS"] = word_patterns
if len(WORDS) != 0:
for word in WORDS.keys():
if word.isalpha():
pattern = get_word_pattern(word)
if pattern not in word_patterns:
word_patterns[pattern] = [word]
else:
word_patterns[pattern].append(word)
return data
def get_word_pattern(word):
word = word.lower()
next_num = 0
letter_nums = {}
word_pattern = []
for letter in word:
if letter not in letter_nums:
letter_nums[letter] = str(next_num)
next_num += 1
word_pattern.append(letter_nums[letter])
return ".".join(word_pattern)
| srufle/cracking-codes | cc-py/detect_english.py | detect_english.py | py | 3,444 | python | en | code | 0 | github-code | 90 |
39128581039 | import cv2
import numpy as np
img = cv2.imread('../img/fish.jpg')
height, width = img.shape[:2]
dst1 = cv2.resize(img, (int(width*0.5), int(height*0.5)), interpolation=cv2.INTER_AREA)
dst2 = cv2.resize(img, None, None, 2, 2, cv2.INTER_CUBIC)
cv2.imshow("original", img)
cv2.imshow("small", dst1)
cv2.imshow("big", dst2)
cv2.waitKey(0)
cv2.destroyAllWindows()
| YeonwooSung/ai_book | CV/OpenCV/geometric_transform/scale_resize.py | scale_resize.py | py | 365 | python | en | code | 17 | github-code | 90 |
18016842263 | import cv2
import os
# Load the cascades
# Load the cascades
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
mouth_cascade = cv2.CascadeClassifier('haarcascade_mcs_mouth.xml')
# Create directory to save mouth images
if not os.path.exists('mouths'):
os.makedirs('mouths')
# Specify the source directory for images
source_dir = './image_data'
for filename in os.listdir(source_dir):
img = cv2.imread(os.path.join(source_dir, filename))
if img is not None:
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x, y, w, h) in faces:
roi_gray = gray[y:y + h, x:x + w]
roi_color = img[y:y + h, x:x + w]
# Adjust y coordinate for mouths (they will be lower in the image)
roi_gray = roi_gray[int(0.5 * h):h, :]
roi_color = roi_color[int(0.5 * h):h, :]
mouths = mouth_cascade.detectMultiScale(roi_gray, 1.3, 5)
for (mx, my, mw, mh) in mouths:
cropped_mouth = roi_color[my:my + mh, mx:mx + mw]
cv2.imwrite(f'./mouths/{filename}_mouth.png', cropped_mouth)
break # Save only one mouth per face
| draaliyu/Partial-face-extraction | mouth.py | mouth.py | py | 1,260 | python | en | code | 0 | github-code | 90 |
18113069099 | a = {}
n = int(input())
for i in range(n):
cmd, val = input().split()
if cmd == 'insert':
a[val] = val
elif cmd == 'find':
print('yes' if val in a else 'no')
| Aasthaengg/IBMdataset | Python_codes/p02269/s549675563.py | s549675563.py | py | 174 | python | en | code | 0 | github-code | 90 |
36188023606 | import networkx as nx
from collections import defaultdict
from .utils.word_sets import WordSetCollection
def get_inheritance_tree(df, df_values, langs_of_interest = None):
""" Constructs a inheritance tree with all items from etymdb"""
def add_node(inher_tree, ix, df_values):
inher_tree.add_node(ix)
nx.set_node_attributes(inher_tree,
{ix: {"lang": df_values.loc[ix].lang,
"lexeme": df_values.loc[ix].lexeme,
"meaning": df_values.loc[ix].meaning}}
)
inher_tree = nx.DiGraph()
for index, row in df.iterrows():
child_ix = row["child"]
parent_ix = row["parent"]
if child_ix >= 0 and parent_ix >= 0:
pair_ok = not langs_of_interest or (
df_values.loc[parent_ix].lang in langs_of_interest and
df_values.loc[child_ix].lang in langs_of_interest
)
if pair_ok:
add_node(inher_tree, parent_ix, df_values)
add_node(inher_tree, child_ix, df_values)
inher_tree.add_edge(parent_ix, child_ix)
return inher_tree
def get_children_relations(dag: nx.DiGraph, children_langs: list,
allowed_ancestors: dict) -> WordSetCollection:
""" Extends the cog_set and bor_set with cognates and borrowings found exploring the dag
Only use this function when you are dealing with a Directed Acyclic Graph
:param dag: Directional Acyclic Graph containing the inheritance links
:param allowed_ancestors: Dict of allowed ancestors for a given language
If the ancestors are allowed, it will go in the cogset, else in the borset
:return: The modified cognates and borrowing sets
"""
def is_sublist(sublst, lst):
for element in sublst:
try:
ind = lst.index(element)
except ValueError:
return False
lst = lst[ind + 1:]
return True
cog_set = WordSetCollection()
# We create a set of all possible ancestor languages
parent_langs = []
for child_lang in children_langs:
parent_langs.extend(allowed_ancestors[child_lang])
parent_langs = set(parent_langs)
# We look at the nodes without parents (roots of trees)
for deg in range(max(d for n, d in dag.in_degree()) - 1):
for source in [n for n, d in dag.in_degree() if d == deg and dag.nodes[n]["lang"] in parent_langs]:
# We save the parent word
cur_cogs = defaultdict(list)
cur_cogs[dag.nodes[source]["lang"]].append(dag.nodes[source]["lexeme"])
# We look at the descendants of the correct languages
all_descendants = nx.descendants(dag, source)
target_descendants = [d for d in all_descendants if dag.nodes[d]["lang"] in children_langs]
if len(target_descendants) == 0:
continue # If no descendants, we continue
# If we have two items or more that are "cousins"
for target in target_descendants:
target_lang = dag.nodes[target]["lang"]
paths_source_to_target = nx.all_simple_paths(dag, source=source, target=target)
# We look at each path (we should not get more than one often)
for path in paths_source_to_target:
# If all ancestors are allowed and ordered properly
if is_sublist([dag.nodes[ix]["lang"] for ix in path], allowed_ancestors[target_lang] + [target_lang]):
cur_cogs[target_lang].append(dag.nodes[target]["lexeme"])
if len(cur_cogs.keys()) > 1:
cog_set.add_word_set(cur_cogs)
return cog_set
| clefourrier/CopperMT | pipeline/data/management/from_etymdb/extract_data.py | extract_data.py | py | 3,814 | python | en | code | 9 | github-code | 90 |
7572441207 | from fastapi import FastAPI, Request
from transformers import AutoTokenizer, AutoModel
import uvicorn, json, datetime
import torch
import asyncio
DEVICE = "cuda"
DEVICE_ID = "0"
CUDA_DEVICE = f"{DEVICE}:{DEVICE_ID}" if DEVICE_ID else DEVICE
MAX_LENGTH = 4096
def torch_gc():
if torch.cuda.is_available():
with torch.cuda.device(CUDA_DEVICE):
torch.cuda.empty_cache()
torch.cuda.ipc_collect()
app = FastAPI()
gpu_lock = asyncio.Lock()
async def process_request(tokenizer, prompt, history, max_length, top_p, temperature):
async with gpu_lock:
# 处理请求,调用GPU
# response, history = model.chat(tokenizer,
# prompt,
# history,
# max_length,
# top_p,
# temperature)
response, history = model.chat(tokenizer,
prompt,
history=history,
max_length=max_length if max_length else 4096,
top_p=top_p if top_p else 0.7,
temperature=temperature if temperature else 0.3)
return response, history
@app.post("/")
async def create_item(request: Request):
global model, tokenizer
json_post_raw = await request.json()
json_post = json.dumps(json_post_raw)
json_post_list = json.loads(json_post)
prompt = json_post_list.get('prompt')
history = json_post_list.get('history')
max_length = json_post_list.get('max_length')
top_p = json_post_list.get('top_p')
temperature = json_post_list.get('temperature')
max_length=max_length if max_length else MAX_LENGTH
answer = {}
# print(f"client:{request.client.host}..........")
if len(prompt) > max_length:
answer = {
"response": f"输入的长度{len(prompt)}超过{max_length}, 目前AI的能力还无法处理这么长的输入...",
"history": history,
"status": 200,
"time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
else:
start_time = datetime.datetime.now()
time = start_time.strftime('%Y-%m-%d %H:%M:%S')
print(f"[{time}]:prompt length={len(prompt)}")
response, history = await process_request(tokenizer= tokenizer,
prompt= prompt,
history= history,
max_length= max_length if max_length else MAX_LENGTH,
top_p=top_p if top_p else 0.7,
temperature=temperature if temperature else 0.3)
answer = {
"response": response,
"history": history,
"status": 200,
"time": time
}
end_time = datetime.datetime.now()
total_secs = (end_time - start_time).total_seconds()
time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"[{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}]:response length={len(response)},total used {total_secs} secs")
torch_gc()
return answer
if __name__ == '__main__':
tokenizer = AutoTokenizer.from_pretrained("/root/autodl-tmp/models/chatglm-6b", trust_remote_code=True)
model = AutoModel.from_pretrained("/root/autodl-tmp/models/chatglm-6b", trust_remote_code=True).half().cuda()
model.eval()
uvicorn.run(app, host='0.0.0.0', port=6006, workers=1)
| toby911/learngit | models/chatglm_api.py | chatglm_api.py | py | 3,561 | python | en | code | 0 | github-code | 90 |
16718890517 | import pygame
import os
from functions import load_images_from_json, load_sounds_from_json
# FPS
FPS = 60
# game over
game_over = 0
# main menu
main_menu = True
level_menu = False
# level
level = 1
max_levels = 3
# score
score = 0
# tile size
TILE_SIZE = 50
# cols
COLS = 20
# screen size
SCREEN_WIDTH = TILE_SIZE * COLS
SCREEN_HEIGHT = TILE_SIZE * COLS
# colors
WHITE = (255, 255, 255)
SKYBLUE = 133, 183, 199
# sounds
music_on = True
sfx_on = True
# pause
is_paused = False
abs_dir = os.path.abspath(os.getcwd())
imgs_dir = os.path.join(abs_dir, 'image_paths.json')
sounds_dir = os.path.join(abs_dir, 'sound_paths.json')
font_dir = os.path.join(abs_dir, 'assets', 'fonts')
# import images
img_data = load_images_from_json(imgs_dir)
sound_data = load_sounds_from_json(sounds_dir)
# font
pygame.font.init()
font_score = pygame.font.SysFont('Arial', 30)
game_over_font = pygame.font.SysFont('Arial', 65)
# load external font
try :
info_font = pygame.font.Font(os.path.join(font_dir, "pixel_font.ttf"), 50)
info_font_2 = pygame.font.Font(os.path.join(font_dir, "pixel_font.ttf"), 30)
except FileNotFoundError as file_error:
print("Archivo no encontrado: ", file_error)
pygame.quit()
except pygame.error as pygame_error:
print("Error de pygame: ", pygame_error)
pygame.quit()
except Exception as error:
print("Error : ", error)
pygame.quit()
# load images
bkg_image = img_data['background_image']
restart_btn = img_data['restart_button']
next_btn = img_data['next_button']
home_btn = img_data['home_button']
start_btn = img_data['start_button']
options_btn = img_data['options_button']
exit_btn = img_data['exit_button']
information_btn = img_data['information_button']
sound_btn = img_data['sound_button']
music_btn = img_data['music_button']
back_btn = img_data['back_button']
level_1_btn = img_data['level_1_button']
level_2_btn = img_data['level_2_button']
level_3_btn = img_data['level_3_button']
UP_arrow = img_data['up_arrow']
DOWN_arrow = img_data['down_arrow']
LEFT_arrow = img_data['left_arrow']
RIGHT_arrow = img_data['right_arrow']
SPACE_key = img_data['space_key']
W_key = img_data['key_W']
A_key = img_data['key_A']
S_key = img_data['key_S']
D_key = img_data['key_D']
F_key = img_data['key_F']
P_key = img_data['key_P']
# load sounds
pygame.mixer.init()
bgm_music = pygame.mixer.music.load("assets/sounds/bgm.mp3")
pygame.mixer.music.set_volume(0.5)
coin_fx = sound_data['coin_sound']
coin_fx.set_volume(0.5)
jump_fx = sound_data['jump_sound']
jump_fx.set_volume(0.5)
game_over_fx = sound_data['game_over_sound']
game_over_fx.set_volume(0.5)
hurt_fx = sound_data['hurt_sound']
hurt_fx.set_volume(0.5)
ranged_fx = sound_data['ranged_sound']
ranged_fx.set_volume(0.5)
hit_fx = sound_data['hit_sound']
hit_fx.set_volume(0.5)
projectile_fx = sound_data['projectile_sound']
projectile_fx.set_volume(0.3)
power_fx = sound_data['power_sound']
power_fx.set_volume(0.5)
# sprite groups
tile_group = pygame.sprite.Group()
pirate_enemy_group = pygame.sprite.Group()
platforms_group = pygame.sprite.Group()
traps_group = pygame.sprite.Group()
coin_group = pygame.sprite.Group()
door_group = pygame.sprite.Group()
life_group = pygame.sprite.Group()
power_up_group = pygame.sprite.Group()
| nachoar12/utn-prog-lab | pirate-platformer/settings.py | settings.py | py | 3,230 | python | en | code | 0 | github-code | 90 |
72606475817 | #!/usr/bin/env python
# coding: utf-8
# In[3]:
from keras.datasets import imdb
# In[6]:
(train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=10000) #frequently used num_words words -> adequate data size
# In[7]:
train_data[0]
# In[9]:
train_labels[0]
# In[13]:
max([max(sequence) for sequence in train_data]) #check index
# In[14]:
word_index = imdb.get_word_index()
reverse_word_index = dict([(value, key) for (key, value) in word_index.items()])
decoded_review = ' '.join([reverse_word_index.get(i-3, '?') for i in train_data[0]])
# In[16]:
decoded_review #all keys with 0, 1, 2 is padding, start of doc, not in dict, so is removed by 3 and turned into ?
# In[19]:
import numpy as np
def vectorize_sequences(sequences, dimension=10000):
results = np.zeros((len(sequences), dimension)) #change to an integer(0) tensor
for i, sequence in enumerate(sequences):
results[i, sequence] = 1.
return results
x_train = vectorize_sequences(train_data)
x_test = vectorize_sequences(test_data)
# In[20]:
x_train[0]
# In[21]:
y_train = np.asarray(train_labels).astype('float32')
y_test = np.asarray(test_labels).astype('float32')
# In[34]:
train_data
# In[35]:
from keras import models
from keras import layers
model = models.Sequential()
model.add(layers.Dense(16, activation='relu', input_shape=(10000,)))
model.add(layers.Dense(16, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
# In[36]:
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])
# In[37]:
from keras import optimizers
model.compile(optimizer=optimizers.RMSprop(lr=0.001),
loss='binary_crossentropy',
metrics=['accuracy'])
# In[38]:
"""
from keras import losses
from keras import metrics
model.compile(optimizer=optimizers.RMSprop(lr=0.001),
loss=losses.binary_crossentropy,
metrics=[metrics.binary_accuracy])
"""
# In[39]:
x_val = x_train[:10000]
partial_x_train = x_train[10000:]
y_val = y_train[:10000]
partial_y_train = y_train[10000:]
# In[40]:
history = model.fit(partial_x_train,
partial_y_train,
epochs=20,
batch_size=512,
validation_data=(x_val, y_val))
# In[44]:
import matplotlib.pyplot as plt
history_dict = history.history
loss = history_dict['loss']
val_loss = history_dict['val_loss']
epochs = range(1, len(loss) + 1)
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title("Training and validation loss")
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()
# In[49]:
plt.clf()
acc = history_dict['accuracy']
val_acc = history_dict['val_accuracy']
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.xlabel('Epochs')
# In[50]:
history_dict.keys()
# In[52]:
model.fit(x_train, y_train, epochs=4, batch_size=512)
# In[53]:
results = model.evaluate(x_test, y_test)
# In[ ]:
| dajchoi/Deep_Learning | Movie Review.py | Movie Review.py | py | 3,149 | python | en | code | 0 | github-code | 90 |
18535283429 | X = int(input())
b = 2
p = 2
ans = 1
for b in range(1, X+1):
for p in range(2, X+1):
beki = b ** p
if beki <= X:
ans = max(ans, beki)
else:
break
print(ans)
| Aasthaengg/IBMdataset | Python_codes/p03352/s697232755.py | s697232755.py | py | 209 | python | en | code | 0 | github-code | 90 |
23083677840 | #Ex12
dn_dia = int(input("Quin es el teu dia de naixement? "))
dn_mes = int(input("Quin es el teu mes de naixement? "))
dn_any = int(input("Quin es el teu any de naixement? "))
da_dia = int(input("Quin dia es avui? "))
da_mes = int(input("En quin mes estem? "))
da_any = int(input("En quin any estem? "))
if da_any < dn_any:
print("Error")
elif da_any > dn_any:
if da_mes > dn_mes:
print(f"El teu aniversari va ser el {dn_dia}/{dn_mes}/{da_any}")
elif da_mes < dn_mes:
print(f"El teu aniversari sera el {dn_dia}/{dn_mes}/{da_any}")
| mgarcia003/Programacio | UF1/if/ex12.py | ex12.py | py | 564 | python | ca | code | 0 | github-code | 90 |
24439715068 | import math
n = input()
l = list(map(int,input().split()))
y = []
for i in l:
x = int(math.sqrt(i))
if x*x == i:
y.append(i)
print(sum(y))
| koradasandhyadevi/codemind-python | Program_to_find_the_sum_of_perfect_square_elements_in_an_array.py | Program_to_find_the_sum_of_perfect_square_elements_in_an_array.py | py | 159 | python | en | code | 0 | github-code | 90 |
33448656798 | import pythoncom
import openpyxl
from win32com.client import Dispatch, gencache
import sys
# Подключение к API7 программы Kompas 3D
def get_kompas_api7():
module = gencache.EnsureModule("{69AC2981-37C0-4379-84FD-5DD2F3C0A520}", 0, 1, 0)
api = module.IKompasAPIObject(
Dispatch("Kompas.Application.7")._oleobj_.QueryInterface(module.IKompasAPIObject.CLSID,
pythoncom.IID_IDispatch))
const = gencache.EnsureModule("{75C9F5D0-B5B8-4526-8681-9903C567D2ED}", 0, 1, 0).constants
return module, api, const
get_kompas_api7()
module7, api7, const7 = get_kompas_api7() # Подключаемся к API7
app7 = api7.Application # Получаем основной интерфейс
app7.Visible = True # Показываем окно пользователю (если скрыто)
app7.HideMessage = const7.ksHideMessageNo # Отвечаем НЕТ на любые вопросы программы
print(app7.ApplicationName(FullName=True)) # Печатаем название программы
doc7 = app7.Documents.Open(PathName=sys.argv[1],
Visible=True,
ReadOnly=True)
# Подключим константы API Компас
kompas6_constants = gencache.EnsureModule("{75C9F5D0-B5B8-4526-8681-9903C567D2ED}", 0, 1, 0).constants
kompas6_constants_3d = gencache.EnsureModule("{2CAF168C-7961-4B90-9DA2-701419BEEFE3}", 0, 1, 0).constants
# Подключим описание интерфейсов API5
kompas6_api5_module = gencache.EnsureModule("{0422828C-F174-495E-AC5D-D31014DBBE87}", 0, 1, 0)
kompas_object = kompas6_api5_module.KompasObject(
Dispatch("Kompas.Application.5")._oleobj_.QueryInterface(kompas6_api5_module.KompasObject.CLSID,
pythoncom.IID_IDispatch))
# Подключим описание интерфейсов API7
kompas_api7_module = gencache.EnsureModule("{69AC2981-37C0-4379-84FD-5DD2F3C0A520}", 0, 1, 0)
application = kompas_api7_module.IApplication(
Dispatch("Kompas.Application.7")._oleobj_.QueryInterface(kompas_api7_module.IApplication.CLSID,
pythoncom.IID_IDispatch))
Documents = application.Documents
# Получим активный документ
kompas_document = application.ActiveDocument
kompas_document_3d = kompas_api7_module.IKompasDocument3D(kompas_document)
iDocument3D = kompas_object.ActiveDocument3D()
kPart = iDocument3D.GetPart(kompas6_constants_3d.pTop_Part)
varcoll = kPart.VariableCollection()
varcoll.refresh()
wb_obj = openpyxl.load_workbook(sys.argv[2])
sheet_obj = wb_obj.active
column = sheet_obj.max_column + 1
values = []
for i in range(0, 7):
values.append(sheet_obj.cell(row=int(sys.argv[3])+1, column=int(sys.argv[4])+1+i).value)
print(values)
list_collms = ['N1','L1','N2','L2','a2','N3','L3']
for i in range(len(values)):
Variable = varcoll.GetByName(list_collms[i], True, True)
Variable.value = values[i]
# Перестраиваем модель
kPart.RebuildModel()
# Перерисовываем документ
iDocument3D.RebuildDocument()
savePath = kompas_document.PathName[:-4]
print(savePath)
kompas_document.SaveAs(savePath + f'{sys.argv[3]+1}.stp')
kompas_document.Close(True) | nickiteks/Deployment-Star-CCM-work | geom.py | geom.py | py | 3,391 | python | ru | code | 0 | github-code | 90 |
37793934774 | """
Script to process the summary statistics
"""
import pandas as pd
from config.constants import PROCESSED_DATA_PATH
TVL_AGG_DICT = {
"Total": pd.DataFrame(),
"Ethereum": pd.DataFrame(),
"Binance": pd.DataFrame(),
"Tron": pd.DataFrame(),
}
for chain, df_tvl_agg in TVL_AGG_DICT.items():
TVL_AGG_DICT[chain] = pd.read_csv(
f"{PROCESSED_DATA_PATH}/defillama/defillama_tvl_all_{chain}.csv"
)
| lyc0603/tvl-measurement | environ/data_processing/defillama_chain_tvl_info.py | defillama_chain_tvl_info.py | py | 426 | python | en | code | 1 | github-code | 90 |
4939969477 | from binder.BoundAssignmentExpression import BoundAssignmentExpression
from binder.BoundBinaryExpression import BoundBinaryExpression
from binder.BoundBlockStatement import BoundBlockStatement
from binder.BoundDeclarationExpression import BoundDeclarationExpression
from binder.BoundFunctionCall import BoundFunctionCall
from binder.BoundFunctionDeclaration import BoundFunctionDeclaration
from binder.BoundIfStatement import BoundIfStatement
from binder.BoundLiteralExpression import BoundLiteralExpression
from binder.BoundNode import BoundNode
from binder.BoundReturnStatement import BoundReturnStatement
from binder.BoundUnaryExpression import BoundUnaryExpression
from binder.BoundVariableExpression import BoundVariableExpression
from binder.BoundWhileStatement import BoundWhileStatement
from token_handling.TokenTypes import TokenTypes
from type_handling.Types import Types
from type_handling.helperFunctions import getType
from variables.default_functions.InbuiltFunctions import InbuiltFunctions
from random import random
from pointers import ptrVal, pointer
from error.ErrorBag import ErrorBag
class Evaluator:
def __init__(self, errorBagPtr: pointer):
self._errorBagPtr = errorBagPtr
@property
def errorBag(self) -> ErrorBag:
return ptrVal(self._errorBagPtr)
def evaluate(self, syntaxTree: BoundBlockStatement):
self.syntaxTree = syntaxTree
self.scope = None
self.returnFromBlock = False
return self.evaluateNode(self.syntaxTree)
def evaluateNode(self, node: BoundNode):
if isinstance(node, BoundBlockStatement):
return self.evaluateBlockStatement(node)
if isinstance(node, (BoundDeclarationExpression, BoundFunctionDeclaration)):
return self.evaluateDeclarationExpression(node)
if isinstance(node, BoundAssignmentExpression):
return self.evaluateAssignmentExpression(node)
if isinstance(node, BoundBinaryExpression):
return self.evaluateBinaryExpression(node)
if isinstance(node, BoundFunctionCall):
return self.evaluateFunctionCall(node)
if isinstance(node, BoundIfStatement):
return self.evaluateIfCondition(node)
if isinstance(node, BoundLiteralExpression):
return self.evaluateLiteralExpression(node)
if isinstance(node, BoundReturnStatement):
return self.evaluateReturnStatement(node)
if isinstance(node, BoundVariableExpression):
return self.evaluateVariableExpression(node)
if isinstance(node, BoundWhileStatement):
return self.evaluateWhileStatement(node)
if isinstance(node, BoundUnaryExpression):
return self.evaluateUnaryExpression(node)
def evaluateBlockStatement(self, node: BoundBlockStatement):
prevScope = self.scope
self.scope = node.scope
value = None
for boundExpression in node.children:
value = self.evaluateNode(boundExpression)
if self.returnFromBlock:
if node.isFunction:
self.returnFromBlock = False
break
self.scope = prevScope
return value
def evaluateAssignmentExpression(self, node: BoundAssignmentExpression):
val = self.evaluateNode(node.varValue)
self.scope.updateValue(node.varName, val, node.varValue.text_span)
return val
def evaluateBinaryExpression(self, node: BoundBinaryExpression):
# Arithmetic Operators
if node.operator.isInstance(TokenTypes.PlusOperator):
if node.type == Types.String:
return str(self.evaluateNode(node.left)) + str(
self.evaluateNode(node.right)
)
return self.evaluateNode(node.left) + self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.MinusOperator):
return self.evaluateNode(node.left) - self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.StarOperator):
return self.evaluateNode(node.left) * self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.SlashOperator):
if node.type == Types.Int:
return self.evaluateNode(node.left) // self.evaluateNode(node.right)
return self.evaluateNode(node.left) / self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.ModOperator):
return self.evaluateNode(node.left) % self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.CaretOperator):
return self.evaluateNode(node.left) ** self.evaluateNode(node.right)
# Boolean Operators
if node.operator.isInstance(TokenTypes.OrOperator):
return self.evaluateNode(node.left) or self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.AndOperator):
return self.evaluateNode(node.left) and self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.NEOperator):
return self.evaluateNode(node.left) != self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.EEOperator):
return self.evaluateNode(node.left) == self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.GEOperator):
return self.evaluateNode(node.left) >= self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.GTOperator):
return self.evaluateNode(node.left) > self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.LEOperator):
return self.evaluateNode(node.left) <= self.evaluateNode(node.right)
if node.operator.isInstance(TokenTypes.LTOperator):
return self.evaluateNode(node.left) < self.evaluateNode(node.right)
def evaluateDeclarationExpression(self, node: BoundDeclarationExpression):
# For Declaration, value need not be updated as the binder initiates with the value
# the variable gets.
# For Assignment, the value cannot be updated in the binder.
#
# eg: a = a + 2
# When evaluating, 'a' has a value of 'a + 2' if the value is updated in the binder,
# which leads to an infinite loop as 'a' keeps trying to find the value of 'a'.
if isinstance(node.varValue, BoundFunctionCall):
val = self.evaluateFunctionCall(node.varValue)
self.scope.updateValue(node.varName, val, node.varValue.text_span)
return val
return self.scope.tryGetVariable(node.varName)[1]
def evaluateFunctionCall(self, node: BoundFunctionCall):
success, func = self.scope.tryGetVariable(node.name)
if not success:
# All non declared variables should be taken care of in the binder
raise NameError(f"Variable {node.name} doesn't exist.")
params = []
for i in range(len(func.params)):
param = func.params[i].copy()
value = self.evaluateNode(node.paramValues[i])
param.value = BoundLiteralExpression(
Types.Int, value, node.paramValues[i].text_span, type(value) == list
)
params.append(param)
if node.function_type == InbuiltFunctions.Input:
input_val = input(*params)
if node.type == Types.Int:
try:
return int(input_val)
except:
self.errorBag.typeError(type(input_val), node.type, node.text_span)
return 0
if node.type == Types.Float:
try:
return float(input_val)
except:
self.errorBag.typeError(type(input_val), node.type, node.text_span)
return 0.0
if node.type == Types.Bool:
if input_val == "true" or input_val == "false":
return input_val == "true"
elif type(input_val) == bool:
return input_val
self.errorBag.typeError(type(input_val), node.type, node.text_span)
return False
return input_val
if node.function_type == InbuiltFunctions.Random:
return random()
if node.function_type == InbuiltFunctions.Print:
print(*params)
else:
functionBody = func.functionBody.copy()
functionBody.addVariables(params)
return self.evaluateNode(functionBody)
def evaluateIfCondition(self, node: BoundIfStatement):
isTrue = self.evaluateNode(node.condition)
if isTrue:
return self.evaluateNode(node.thenBlock)
elif node.elseBlock:
return self.evaluateNode(node.elseBlock)
def evaluateLiteralExpression(self, node: BoundLiteralExpression):
if node.isList:
return [self.evaluateNode(item) for item in node.value]
return node.value
def evaluateReturnStatement(self, node: BoundReturnStatement):
returnVal = self.evaluateNode(node.to_return)
self.returnFromBlock = True
return returnVal
def evaluateVariableExpression(self, node: BoundVariableExpression):
success, var = self.scope.tryGetVariable(node.name)
if not success:
# All non declared variables should be taken care of in the binder
raise NameError(f"Variable {node.name} doesn't exist.")
return self.evaluateNode(var.value)
def evaluateWhileStatement(self, node: BoundWhileStatement):
value = None
while self.evaluateNode(node.condition):
value = self.evaluateNode(node.whileBlock)
return value
def evaluateUnaryExpression(self, node: BoundUnaryExpression):
if node.operator.isInstance(TokenTypes.MinusOperator):
return -(self.evaluateNode(node.operand))
if node.operator.isInstance(TokenTypes.NotOperator):
return not (self.evaluateNode(node.operand))
if node.operator.isInstance(TokenTypes.PlusPlusOperator):
self.scope.updateValue(
node.operand.name,
self.evaluateNode(node.operand) + 1,
node.operand.text_span,
)
if node.operator.isInstance(TokenTypes.MinusMinusOperator):
self.scope.updateValue(
node.operand.name,
self.evaluateNode(node.operand) - 1,
node.operand.text_span,
)
return self.evaluateNode(node.operand)
| Lutetium-Vanadium/compiler | src/Evaluator.py | Evaluator.py | py | 10,598 | python | en | code | 0 | github-code | 90 |
12693071486 | from utils import *
import numpy as np
# Define the Points of Interest in Unreal coordinates
baseballDiamond = np.array([[-18045, 24560, 320]]) # Baseball Diamond
lakeFountain = np.array([[3000, 5000, -430]]) # Lake
tennisCourt = np.array([[-9400, -38390, 90]]) # Tennis Courts
farField = np.array([[-84820, -15650, 10]]) # Far Field
# Concatenate the POIs into a single array
worldPOIs = np.concatenate(
(
baseballDiamond,
lakeFountain,
tennisCourt,
farField,
)
)
# Define the Labels for the POIs
poiLabels = [
"Baseball Diamond",
"Lake Fountain",
"Tennis Court",
"Far Field",
]
# Create the Waypoints for the Parent Drone to fly
droneWaypoints = POIPath(
POIs=worldPOIs,
POI_Labels=poiLabels,
surveyAltitude=30,
sweepAmplitude=50,
numSweeps=0,
numWaypoints=120,
plotFlag=True,
)
# Generate Random Spawn Points for Child Drones
droneSpawnPoints = droneSpawn(
waypoints=droneWaypoints,
numDrones=1,
FOV=np.array([20, 90, 60]),
plotFlag=False,
)
| rbasaam/AirSim-Tools | diagnostic.py | diagnostic.py | py | 1,057 | python | en | code | 0 | github-code | 90 |
26208311905 | from telegram import Update
from telegram.ext import Updater, CommandHandler, CallbackContext
import config_bot
from telegram import Update
from telegram.ext import ApplicationBuilder, ContextTypes, CommandHandler
# from telegram import ChatAction
# Обработчик команды /get_link
async def get_link(update: Update, context: CallbackContext):
# Получаем ссылку на чат из аргументов команды
# chat_link = context.args[0]
# Извлекаем ID чата из ссылки
chat_id = '-1001589363065'
# Отправляем нужное действие бота, чтобы пользователь видел, что что-то происходит
# context.bot.send_chat_action(chat_id=update.effective_chat.id, action=ChatAction.TYPING)
# Получаем информацию о чате
chat = context.bot.get_chat(chat_id)
# Получаем итератор сообщений в чате
messages = context.bot.fetch_chat_history(chat_id, limit=10)
# Перебираем первые 10 сообщений и отправляем ссылки на них
for message in messages:
message_link = f"https://t.me/c/{chat_id}/{message.message_id}"
context.bot.send_message(update.effective_chat.id, message_link)
if __name__ == '__main__':
application = ApplicationBuilder().token(config_bot.TOKEN_BOT).build()
start_handler = CommandHandler('start', get_link)
application.add_handler(start_handler)
application.run_polling()
| chibiherbie/detective-assistant-in-tg | test_bot.py | test_bot.py | py | 1,575 | python | ru | code | 0 | github-code | 90 |
13896601702 | """
Reduce is a function that takes a function and a list as arguments, and returns a single value as result.
The function is called with a lambda function and a list and a new reduced result is returned.
This performs a repetitive operation over the pairs of the list.
Syntax:
reduce(function, sequence)
function - function that is called for each pair of the list
sequence - list of elements which is to be reduced
Example:
reduce(lambda x, y: x+y, [1,2,3,4,5])
# (((1+2)+3)+4)+5
# (((3)+3)+4)+5
# (((6)+4)+5)
# (((10)+5))
# 15
"""
# 1. Traditional Approach to add elements of a list
from functools import reduce
num_list = [1, 2, 3, 4, 5, 6]
sum = 0
for i in num_list:
sum = sum + i
print(sum)
# 2. Same thing in one line using reduce
sum = reduce(lambda x,y: x+y, num_list)
print(sum) | BalveerSinghYT/Python | Functions/reduce.py | reduce.py | py | 871 | python | en | code | 0 | github-code | 90 |
17963292589 | N=int(input())
A=list(map(int,input().split()))
A.sort()
i=N-1
ans=1
count=0
while i>=1:
if A[i-1]==A[i]:
ans*=A[i]
i-=2
count+=1
else:
i-=1
if count==2:
print(ans)
break
else:
print(0)
| Aasthaengg/IBMdataset | Python_codes/p03625/s079620105.py | s079620105.py | py | 251 | python | en | code | 0 | github-code | 90 |
14132436605 | from django.shortcuts import get_object_or_404
from rest_framework import viewsets
from rest_framework import mixins
from rest_framework import filters
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.permissions import (
IsAuthenticated,
IsAuthenticatedOrReadOnly,
)
from posts.models import Group, Post, User
from .permissions import IsAuthorOrReadOnly
from .serializers import (
CommentSerializer,
FollowSerializer,
GroupSerializer,
PostSerializer,
)
class GroupReadOnlyViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Group.objects.all()
serializer_class = GroupSerializer
class PostViewSet(viewsets.ModelViewSet):
queryset = Post.objects.all()
serializer_class = PostSerializer
permission_classes = (IsAuthorOrReadOnly, IsAuthenticatedOrReadOnly)
pagination_class = LimitOffsetPagination
def perform_create(self, serializer):
serializer.save(author=self.request.user)
class CommentViewSet(viewsets.ModelViewSet):
serializer_class = CommentSerializer
permission_classes = (IsAuthorOrReadOnly, IsAuthenticatedOrReadOnly)
def get_queryset(self):
post_id = self.kwargs.get("post_id")
post = get_object_or_404(Post, pk=post_id)
return post.comments.all()
def perform_create(self, serializer):
post_id = self.kwargs.get("post_id")
post = get_object_or_404(Post, pk=post_id)
serializer.save(author=self.request.user, post=post)
class CreateAndListViewSet(
mixins.CreateModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet
):
pass
class FollowViewSet(CreateAndListViewSet):
serializer_class = FollowSerializer
permission_classes = (IsAuthenticated,)
filter_backends = (filters.SearchFilter,)
search_fields = ("following__username",)
def get_queryset(self):
return self.request.user.follower.all()
def perform_create(self, serializer):
following_name = self.request.data.get("following")
following = get_object_or_404(User, username=following_name)
serializer.save(user=self.request.user, following=following)
| gweicox/api_final_yatube | yatube_api/api/views.py | views.py | py | 2,150 | python | en | code | 0 | github-code | 90 |
6712017811 | # https://github.com/NVIDIA/TensorRT/blob/master/quickstart/SemanticSegmentation/tutorial-runtime.ipynb
# Import required modules
import numpy as np
import os
import pycuda.driver as cuda
import pycuda.autoinit
import tensorrt as trt
import matplotlib.pyplot as plt
from PIL import Image
TRT_LOGGER = trt.Logger()
# Filenames of TensorRT plan file and input/output images.
engine_file = "fcn-resnet101.engine"
input_file = "input.ppm"
output_file = "output.ppm"
#Utilities for input / output processing
# For torchvision models, input images are loaded in to a range of [0, 1] and
# normalized using mean = [0.485, 0.456, 0.406] and stddev = [0.229, 0.224, 0.225].
def preprocess(image):
# Mean normalization
mean = np.array([0.485, 0.456, 0.406]).astype('float32')
stddev = np.array([0.229, 0.224, 0.225]).astype('float32')
data = (np.asarray(image).astype('float32') / float(255.0) - mean) / stddev
# Switch from HWC to to CHW order
return np.moveaxis(data, 2, 0)
def postprocess(data):
num_classes = 21
# create a color palette, selecting a color for each class
palette = np.array([2 ** 25 - 1, 2 ** 15 - 1, 2 ** 21 - 1])
colors = np.array([palette*i%255 for i in range(num_classes)]).astype("uint8")
# plot the segmentation predictions for 21 classes in different colors
img = Image.fromarray(data.astype('uint8'), mode='P')
img.putpalette(colors)
return img
# Load TensorRT engine
def load_engine(engine_file_path):
assert os.path.exists(engine_file_path)
print("Reading engine from file {}".format(engine_file_path))
with open(engine_file_path, "rb") as f, trt.Runtime(TRT_LOGGER) as runtime:
return runtime.deserialize_cuda_engine(f.read())
# Inference pipeline
def infer(engine, input_file, output_file):
print("Reading input image from file {}".format(input_file))
with Image.open(input_file) as img:
input_image = preprocess(img)
image_width = img.width
image_height = img.height
with engine.create_execution_context() as context:
# Set input shape based on image dimensions for inference
context.set_binding_shape(engine.get_binding_index("input"), (1, 3, image_height, image_width))
# Allocate host and device buffers
bindings = []
for binding in engine:
binding_idx = engine.get_binding_index(binding)
size = trt.volume(context.get_binding_shape(binding_idx))
dtype = trt.nptype(engine.get_binding_dtype(binding))
if engine.binding_is_input(binding):
input_buffer = np.ascontiguousarray(input_image)
input_memory = cuda.mem_alloc(input_image.nbytes)
bindings.append(int(input_memory))
else:
output_buffer = cuda.pagelocked_empty(size, dtype)
output_memory = cuda.mem_alloc(output_buffer.nbytes)
bindings.append(int(output_memory))
stream = cuda.Stream()
# Transfer input data to the GPU.
cuda.memcpy_htod_async(input_memory, input_buffer, stream)
# Run inference
context.execute_async_v2(bindings=bindings, stream_handle=stream.handle)
# Transfer prediction output from the GPU.
cuda.memcpy_dtoh_async(output_buffer, output_memory, stream)
# Synchronize the stream
stream.synchronize()
with postprocess(np.reshape(output_buffer, (image_height, image_width))) as img:
print("Writing output image to file {}".format(output_file))
img.convert('RGB').save(output_file, "PPM")
# plot output image
# plt.imshow(Image.open(input_file))
# print(Image.open(input_file))
# plt.show()
# print(Image.open(output_file))
# run infer
print("Running TensorRT inference for FCN-ResNet101")
with load_engine(engine_file) as engine:
infer(engine, input_file, output_file)
# # segment output
# plt.imshow(Image.open(output_file))
# 2nd infer with diff img size (cat 388x386)
import requests
from io import BytesIO
output_image="cat_input.ppm"
# Read sample image input and save it in ppm format
print("Exporting ppm image {}".format(output_image))
# response = requests.get("https://github.com/huukim911/onnx-exp/blob/main/cat.jpg")
with Image.open("cat.jpg") as img:
ppm = Image.new("RGB", img.size, (255, 255, 255))
ppm.paste(img, mask=img.split()[2])
ppm.save(output_image)
# run infer 2nd
input_file = "cat_input.ppm"
output_file = "cat_output.ppm"
print("Running TensorRT inference for FCN-ResNet101 2nd time")
with load_engine(engine_file) as engine:
infer(engine, input_file, output_file) | k9ele7en/tensorRT-exp | resnet101/infer_resnet101.py | infer_resnet101.py | py | 4,633 | python | en | code | 0 | github-code | 90 |
11215013329 | import numpy as numpy
import cv2
import matplotlib.pyplot as plt
NUM_COLUMNS = 4
ROWS_COUNT = len(fake_images) % NUM_COLUMNS
COLUMNS_COUNT = NUM_COLUMNS
subfig = []
fig = plt.figure(figsize=(12,9))
for i in range(1, len(fake_images) + 1):
subfig.append(fig.add_subplot(ROWS_COUNT, COLUMNS_COUNT, i))
img_bgr = cv2.imread('fake_images/' + str(i-1) + '.jpg')
img_rgb = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2RGB)
subfig[i-1].imshow(img_rgb)
fig.subplots_adjust(wspace=0.3, hspace=0.3)
plt.show() | ossan0922/python_study | show_image.py | show_image.py | py | 515 | python | en | code | 0 | github-code | 90 |
16250693329 | import numpy as np
from flask import Flask
from flask import request
from flask import jsonify
app = Flask(__name__)
def GeoReadyFun(Temp, E, Tar):
def heating(t, on_off):
heating_power = 5
start_period = 0.5
if on_off == 1:
if t < start_period:
return t * heating_power / samples_per_hour / start_period
else:
return heating_power / samples_per_hour
else:
return 0
samples_per_hour = 30
h_c = -0.077 / samples_per_hour
T_0 = float(Temp)
T_E = float(E)
target = float(Tar)
time_to_simulate = 4
time = np.linspace(0, time_to_simulate, time_to_simulate * samples_per_hour)
coef = 1.2
T = T_0
for point in time:
if T < target:
T = T + h_c * (T - T_E) + heating(point, 1) * coef
else:
break
time_needed = point * 60
return jsonify(minutes=int(time_needed))
@app.route("/")
def hello():
T = request.args.get('internal_temp')
E = request.args.get('external_temp')
Tar = request.args.get('target')
return GeoReadyFun(T, E, Tar)
| n0m0r3pa1n/geoready-server | app.py | app.py | py | 1,136 | python | en | code | 0 | github-code | 90 |
24778975099 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ordertogo', '0009_auto_20151109_2207'),
]
operations = [
migrations.CreateModel(
name='generic_variable',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('code', models.CharField(max_length=45, verbose_name=b'Code')),
('value', models.CharField(max_length=45, verbose_name=b'Value')),
('description', models.TextField(max_length=45, verbose_name=b'Descripcion')),
],
),
]
| contrerasjlu/bullpen-arepas-prod | ordertogo/migrations/0010_generic_variable.py | 0010_generic_variable.py | py | 741 | python | en | code | 0 | github-code | 90 |
40133285205 |
import copy
order_list = [['iphone',1000],['ipad',500],['airpod',800],['book',80],['toy',120],['xiaomi',300]]
order_total = int(len(order_list))
#print(order_total)
total_cost =0
total_order = []
good = False
calc_list = copy.deepcopy(order_list)
salary = input("input your salary:")
if salary.isdigit():
salary = int(salary)
while good != "q":
print("Your balance is ", salary)
for a in range(1,2):
for b in order_list:
print(a, b)
calc_list[(a-1)][0] = a
a += 1
#print(calc_list)
if good is False:
print('Your cart is empty')
else:
total_order.append(order_list[int(good)-1])
print("Your have order: ",total_order )
good = input("Which one you want to buy? Or press 'q' exit shopping:")
if good != "q":
price = calc_list[int(good)-1][1]
total_cost += price
#print(total_cost)
elif good == 'q':
remain_cost = int(salary) - total_cost
print("Shopping close")
if remain_cost >= 0:
print("Your balance is:",remain_cost,".Have a nice shopping")
else:
print("Money is not enough, return goods, close shopping")
break
else:
print("Please type Real number ")
| lp55323/Python-exercise | Day2_Exercise1.py | Day2_Exercise1.py | py | 1,363 | python | en | code | 0 | github-code | 90 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.