id stringlengths 1 7 | text stringlengths 6 1.03M | dataset_id stringclasses 1
value |
|---|---|---|
3287044 | from discord.ext import commands
from discord import errors
from cogs.utils import utils
import traceback
import datetime
import storage
import discord
import glob
import helpformat
description = '''Nurevam's Command List.
To enable more commands, you must visit dashboard to enable certain plugins you want to run.
If there is a problem with the prefix etc, please do @nurevam prefix to see what prefix you can do
Any problem relating to Nurevam, please do contact owner Maverun (´・ω・`)#3333
First └ mean it is commands under that plugin, and if there is one or more under commands, it is a sub command that can invoke by doing !parent subcommand such as !rank global
'''
bot = commands.Bot(command_prefix=commands.when_mentioned_or("!"), description=description,hidden = True,pm_help = False,help_command=helpformat.Custom_format())
bot.db= storage.Redis()
redis = utils.redis
def check_post(check):
if check == "None":
return None
elif check == "on":
return 30
async def say(ctx,**kwargs):
print("at say function",ctx,kwargs)
check = await bot.db.redis.hget("{}:Config:Delete_MSG".format(ctx.message.guild.id),ctx.command.cog_name.lower())
return await ctx.send(**kwargs,delete_after=check_post(check))
bot.say=say
@bot.event
async def on_ready():
print('Logged in')
print(bot.user.id)
print('------')
if not hasattr(bot, 'uptime'):
bot.uptime = datetime.datetime.utcnow()
bot.owner = (await bot.application_info()).owner
bot.background = {}
bot.id_discourse = 0
load_cogs()
await bot.change_presence(activity = discord.Game("http://nurevam.site/"))
async def command_checker(msg):
try:
if isinstance(msg.channel,discord.DMChannel):
if "!reply" in msg.content:
bot.command_prefix = commands.when_mentioned_or("!")
return
if bot.user.id == 181503794532581376:
bot.command_prefix = commands.when_mentioned_or("$")
bot.pm_help = False
return
cmd_prefix = await bot.db.redis.get("{}:Config:CMD_Prefix".format(msg.guild.id)) or "!"
cmd_prefix = cmd_prefix.split(",")
if '' in cmd_prefix: #check if "none-space" as a command, if true, return, in order to prevent any spam in case, lower chance of getting kick heh.
return
bot.command_prefix = commands.when_mentioned_or(*cmd_prefix)
if "help" in msg.content: #changing setting for help, if guild owner want Help command to be via PM or to guild.
if await bot.db.redis.get("{}:Config:Whisper".format(msg.guild.id)) == "on":
bot.dm_help =True
else:
bot.dm_help =False
except:
pass
@bot.event
async def on_message(msg): #For help commands and custom prefix.
await command_checker(msg)
await bot.process_commands(msg)
@bot.event
async def on_message_edit(before,msg): #get command from edit message with same feature as on_message..
await command_checker(msg)
await bot.process_commands(msg)
def load_cogs():
cogs = list_cogs()
for cogs in cogs:
try:
bot.load_extension(cogs)
print ("Load {}".format(cogs))
except Exception as e:
utils.prRed(cogs)
utils.prRed(e)
def list_cogs():
cogs = glob.glob("cogs/*.py")
clean = []
for c in cogs:
c = c.replace("/", "\\") # Linux fix
if "__init__" in c:
continue
clean.append("cogs." + c.split("\\")[1].replace(".py", ""))
return clean
@bot.event
async def on_error(event,*args,**kwargs):
print(event)
print(args)
print(kwargs)
Current_Time = datetime.datetime.utcnow().strftime("%b/%d/%Y %H:%M:%S UTC")
utils.prRed(Current_Time)
utils.prRed("Error!")
utils.prRed(traceback.format_exc())
error = '```py\n{}\n```'.format(traceback.format_exc())
await bot.owner.send("```py\n{}```".format(Current_Time + "\n"+ "ERROR!") + "\n" + error)
if __name__ == '__main__':
bot.run(utils.secret["nurevam_token"])
| StarcoderdataPython |
3270906 | from os import environ
def assert_in(file, files_to_check):
if file not in files_to_check:
raise AssertionError("{} does not exist in the list".format(str(file)))
return True
def assert_in_env(check_list: list):
for item in check_list:
assert_in(item, environ.keys())
return True
| StarcoderdataPython |
1799468 | # Faça um algoritmo que leia o salário de um funcionário e mostre seu novo salário, com 15% de aumento.
sal = float(input('Digite o salário: '))
print('O salario com aumento é de R$ {:.2f}'.format(sal * 1.15))
| StarcoderdataPython |
3362530 | <filename>03/kunningklown/solution.py
"""
read file
split data on line return file_line1 file_line2
split data on comma for both lines
iterate through each instruction for each line to create wire1 wire2
if points go from (0,0) to (0,3) line = (0,0),(0,1),(0,2),(0,3)
if r,l,u,d add number
"""
class TraceCircuit():
def __init__(self):
self._x = 0
self._y = 0
self._cords = []
self._steps = []
self._file_line_1 = []
self._file_line_2 = []
self._file_data = []
self._wires = []
self._get_data_from_file()
self._set_cords()
def _up(self, count: str):
# print(f"{self._y} added to {count} is {self._y + int(count)}")
# self._y += int(count)
self.lines_from_instruction("y", int(count))
# self._set_cords()
def _down(self, count: str):
# print(f"{self._y} subtract {count} is {self._y - int(count)}")
self.lines_from_instruction("y", int(count))
# self._y -= int(count)
# self._set_cords()
def _left(self, count: str):
# print(f"{self._x} subtract {count} is {self._x - int(count)}")
self.lines_from_instruction("x", int(count))
# self._x -= int(count)
# self._set_cords()
def _right(self, count: str):
# print(f"{self._x} added to {count} is {self._x + int(count)}")
self.lines_from_instruction("x", int(count))
# self._x += int(count)
# self._set_cords()
def _get_data_from_file(self):
lines = []
file_count = 0
with open('sample2.txt', 'r') as input_data:
parts = input_data.read().split("\n")
self._file_line_1 = parts[0].split(",")
self._file_data.append(self._file_line_1)
self._file_line_2 = parts[1].split(",")
self._file_data.append(self._file_line_2)
def _set_cords(self):
self._cords.append({"x": self._x, "y": self._y})
print({"x": self._x, "y": self._y})
def _process_intcode(self, instruction: list):
for step in instruction:
count = int(step[1:])
print(f"{step} : ")
if step.startswith("U"):
# self._up(step[1:])
self.lines_from_instruction("y", int(count))
if step.startswith("D"):
# self._down(step[1:])
self.lines_from_instruction("y", int(count * -1))
if step.startswith("L"):
# self._left(step[1:])
self.lines_from_instruction("x", int(count * -1))
if step.startswith("R"):
# self._right(step[1:])
self.lines_from_instruction("x", int(count))
# self._set_cords()
# print(f"{step}: {self._cords}\n\n")
def lines_from_instruction(self, x_or_y: str, end_point: int):
line_parts = []
# print(end_point)
for _ in range(end_point):
# print(f"{_} : {end_point}: {x_or_y}")
if x_or_y == 'x':
self._y = _
self._set_cords()
else:
self._x = _
self._set_cords()
line_parts.append(self._cords)
self._wires.append(line_parts)
def get_list_of_lines(self):
point_set = []
# print(self._file_line_1 == self._file_line_2)
for instruction in self._file_data:
# print(instruction)
self._process_intcode(instruction)
point_set.append(self._cords)
self._cords = [{"x": 0, "y": 0}]
self._x = 0
self._y = 0
# print(point_set[0] == point_set[1])
# self._process_intcode(self._file_line_1)
# point_set.append(self._cords)
# self._cords = [{"x": 0, "y": 0}]
# self._process_intcode(self._file_line_2)
# point_set.append(self._cords)
return point_set
def calc_mahatten(self, input_cord: list):
# abs(x1-x2)+(y1-y2)
# x1 = input_cord[0]['x']
x1 = 0
x2 = input_cord[1]
# y1 = input_cord[0]['y']
y1 = 0
y2 = input_cord[1]
# print(input_cord)
distance = abs(x1 - x2) + abs(y1 -y2)
return distance
test = TraceCircuit()
cord_set = test.get_list_of_lines()
# print(len(cord_set[1]))
point_index = 0
best_match = 0
# for points1 in cord_set[0]:
# print(points1)
# for points2 in cord_set[1]:
# print(f"{points1} : {points2}")
# print(f"{points1['x']},{points1['y']},{points2['x']},{points2['y']},")
# print(f"{points1 == points2} : {points1['x']} and {points2['x']} ")
# if points1 == points2:
# # print(f"match {points1} : {points2}")
# dist1 = test.calc_mahatten( [points1['x'], points1['y']])
# dist2 = test.calc_mahatten( [points2['x'], points2['y']])
# if dist1 < dist2:
# if dist1 < best_match:
# best_match = dist1
# else:
# if dist2 < best_match:
# best_match = dist2
# point_index += 1
# print(best_match)
| StarcoderdataPython |
89889 | <reponame>fr33ky/signalserver<filename>fileuploads/migrations/0010_auto_20160605_2219.py
# -*- coding: utf-8 -*-
# Generated by Django 1.10.dev20160107235441 on 2016-06-05 22:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fileuploads', '0009_result_row'),
]
operations = [
migrations.AlterField(
model_name='row',
name='cut_off_number',
field=models.DecimalField(decimal_places=3, default=0, max_digits=7),
),
migrations.AlterField(
model_name='row',
name='result_number',
field=models.DecimalField(decimal_places=3, default=0, max_digits=7),
),
]
| StarcoderdataPython |
3221268 | <reponame>dwoz/pytest-salt
# -*- coding: utf-8 -*-
# Import python libs
import logging
if not hasattr(logging, 'TRACE'):
logging.TRACE = 5
logging.addLevelName(logging.TRACE, 'TRACE')
if not hasattr(logging, 'GARBAGE'):
logging.GARBAGE = 1
logging.addLevelName(logging.GARBAGE, 'GARBAGE')
pytest_plugins = 'pytester', 'tornado'
def pytest_configure(config):
config._inicache['log_format'] = '%(asctime)s,%(msecs)04.0f [%(name)-5s:%(lineno)-4d][%(processName)-8s][%(levelname)-8s] %(message)s'
| StarcoderdataPython |
3225819 | from django.contrib import admin
from problem.models import Problem, SolvedProblem, ProblemStatusByLevel
# Register your models here.
admin.site.register(Problem)
admin.site.register(SolvedProblem)
admin.site.register(ProblemStatusByLevel) | StarcoderdataPython |
99836 | from collections import namedtuple
import numpy as np
from untwist import data, utilities, transforms
Anchors = namedtuple('Anchors', ['Distortion',
'Artefacts',
'Interferer',
'Quality'],
)
class Anchor:
'''
Anchor signals for a MUSHRA test assessing source separation
techniques. Four different anchors are provided for assessing the
perception regarding interference, distortion, artefacts, and
overall sound quality. The first three were designed after [1], the
quality anchor after [2].
[1] <NAME>., <NAME>., <NAME>., & <NAME>. (2011).
Subjective and Objective Quality Assessment of Audio Source
Separation. IEEE TASLP, 19(7), 2046–2057.
http://doi.org/10.1109/TASL.2011.2109381
[2] <NAME>., <NAME>., & <NAME>. (2016).
Evaluation of Quality of Sound Source Separation Algorithms:
Human Perception vs Quantitative Metrics. In EUSIPCO
(pp. 1758–1762).
http://doi.org/10.1109/EUSIPCO.2016.7760550
'''
def __init__(self,
target,
others,
trim_factor_distorted=0.2,
trim_factor_artefacts=0.99,
low_pass_artefacts=False,
low_pass_cutoff=3500,
include_background_in_quality_anchor=True,
loudness_normalise_interferer=True,
):
'''
target:
The target audio, e.g. vocals
others:
Can be a list of everthing else, or just the accompaniment (Wave).
trim_factor_distorted:
Proportion of spectral frames to remove randomly in time.
trim_factor_artefacts:
Proportion of time-frequency bins to randomly remove.
'''
from scipy import signal
# We need a single background
if isinstance(others, list):
self.background = sum(other for other in others)
else:
self.background = others
if not isinstance(target, data.audio.Wave):
raise ValueError('target must be of type Wave.')
self.target = target
points = 2048
window = signal.get_window('hann', points, True)
self.stft = transforms.STFT(window, points, points // 2)
self.istft = transforms.ISTFT(window, points, points // 2)
self.cut_off = utilities.conversion.nearest_bin(low_pass_cutoff,
points,
target.sample_rate)
self.trim_factor_distorted = trim_factor_distorted
self.trim_factor_artefacts = trim_factor_artefacts
self.low_pass_artefacts = low_pass_artefacts
self.include_background_in_quality_anchor = include_background_in_quality_anchor
self.loudness_normalise_interferer = loudness_normalise_interferer
def distorted_anchor(self):
'''
Returns the distortion signal created by low-pass filtering the
target source signal to a 3.5 kHz cutoff frequency and by randomly
setting 20% of the remaining time-frequency coefficients to zero,
see [1].
WARNING: this code can't reproduce the distortion from [1]
exactly!
'''
x_fft = self.stft.process(self.target)
x_fft[self.cut_off:] = 0
num_frames_to_remove = int(x_fft.shape[1] * self.trim_factor_distorted)
idx = np.random.choice(x_fft.shape[1],
num_frames_to_remove,
replace=False)
x_fft[:, idx] = 0
distortion = self.istft.process(x_fft)
return distortion[:self.target.num_frames]
def inteference_anchor(self):
'''
Interference anchor for a MUSHRA listening test.
The anchor is created by the sum of target signal and the
interferer. The interferer is formed by summing all interfering
sources and then setting the overall loudness to that of the target,
see [1].
'''
interferer = self.background.copy()
if self.loudness_normalise_interferer:
interferer.loudness = self.target.loudness
interferer += self.target
return interferer
def artefacts(self):
'''
Returns the artefacts signal (musical noise) generated by randomly
zeroing 99% of the time-frequency bins, see [1].
'''
x_fft = self.stft.process(self.target)
idx = np.random.choice(
x_fft.size,
size=int(x_fft.size * self.trim_factor_artefacts),
replace=False)
row, col = np.unravel_index(idx, x_fft.shape)
x_fft[row, col] = 0
if self.low_pass_artefacts:
x_fft[self.cut_off:] = 0
artefacts = self.istft.process(x_fft)
return artefacts[:self.target.num_frames]
def artefacts_anchor(self):
'''
Artefacts anchor for a MUSHRA listening test.
The anchor is defined as the sum of the target with musical
noise; both equally loud. Musical noise is created by randomly
zeroing 99% of the time-frequency bins, see [1].
'''
artefacts = self.artefacts()
artefacts.loudness = self.target.loudness
anchor = artefacts + self.target
return anchor
def quality_anchor(self):
'''
Quality anchor for a MUSHRA listening test.
The anchor is defined as the sum of the distortion anchor,
artefacts only and interferer only; all equally loud, see [2].
'''
target_loudness = -23
signals = []
signals_to_sum = [self.distorted_anchor(), self.artefacts()]
if self.include_background_in_quality_anchor:
signals_to_sum.append(self.background)
for signal in signals_to_sum:
signal.loudness = target_loudness
signals.append(signal)
anchor = sum(signals)
anchor = anchor[:self.target.num_frames]
return anchor
def create(self):
return Anchors(self.distorted_anchor(),
self.artefacts_anchor(),
self.inteference_anchor(),
self.quality_anchor())
class RemixAnchor():
def __init__(self,
target,
others,
trim_factor_distorted=0.2,
trim_factor_artefacts=0.99,
target_level_offset=-14,
quality_anchor_loudness_balance=[0, 0],
low_pass_cutoff=3500):
'''
target:
The target audio, e.g. vocals
others:
Can be a list of everthing else, or just the accompaniment (Wave).
trim_factor_distorted:
Proportion of spectral frames to remove randomly in time.
trim_factor_artefacts:
Proportion of time-frequency bins to randomly remove.
target_level_offset:
The level adjustment applied to the target for the balance anchor.
quality_anchor_loudness_balance:
The desired loudness balance of [distorted_audio, artefacts], e.g.
setting [10, 0] would set the distorted audio to be 10 LU above
the artefacts. Default is [0, 0] = equal loudness.
'''
# We need a single background
if isinstance(others, list):
self.background = sum(other for other in others)
else:
self.background = others
self.target = target
self.mix = self.target + self.background
self.anchor_gen = Anchor(self.mix,
None,
trim_factor_distorted,
trim_factor_artefacts,
low_pass_artefacts=True,
low_pass_cutoff=low_pass_cutoff)
self.target_level_offset = target_level_offset
self.quality_anchor_loudness_balance = np.array(
quality_anchor_loudness_balance)
def distorted_anchor(self):
'''
Returns the distortion mix created by low-pass filtering the
target source signal to a 3.5 kHz cutoff frequency and by randomly
setting 20% of the remaining time-frequency coefficients to zero,
see [1].
'''
return self.anchor_gen.distorted_anchor()
def artefacts_anchor(self):
'''
Returns the artefacts mix (musical noise) generated by randomly
zeroing 99% of the time-frequency bins, see [1].
'''
return self.anchor_gen.artefacts_anchor()
def interferer_anchor(self):
'''
Mixes the background with the target offset by 'target_level_offset'.
'''
mix = (self.target *
utilities.conversion.db_to_amp(self.target_level_offset) +
self.background)
return mix
def interferer_anchor_both_sources(self):
'''
Returns the target and background as used to create the interferer
anchor (but not normalised).
'''
return (self.target *
utilities.conversion.db_to_amp(self.target_level_offset),
self.background)
def quality_anchor(self):
'''
Sum of the distorted mix and artefacts of the mix, at equal loudness.
You can adjust the loudness balance by setting the attribute
'quality_anchor_loudness_balance' (default is an array of zeros).
'''
target_loudness = (np.array([-23.0, -23.0]) +
(self.quality_anchor_loudness_balance -
self.quality_anchor_loudness_balance.mean())
)
signals = []
for signal, loudness in zip([self.distorted_anchor(),
self.anchor_gen.artefacts()],
target_loudness):
signal.loudness = loudness
signals.append(signal)
anchor = sum(signals)
anchor = anchor[:self.target.num_frames]
return anchor
def create(self):
return Anchors(self.distorted_anchor(),
self.artefacts_anchor(),
self.interferer_anchor(),
self.quality_anchor())
| StarcoderdataPython |
1683973 | #Todo: Create a setup file | StarcoderdataPython |
3202414 | # compute nim values using negamax and a dictionary
# that holds values already computed RBH 2019
def get_piles():
while True:
raw = input('nim game pile sizes (eg. 3 5 7) ')
try:
dim = tuple( int(x) for x in raw.split() )
if len(dim) > 0 and all(d >= 0 for d in dim):
return dim
except ValueError:
pass
print('invalid, try again')
"""
sd dictionary(state: boolean), true if player-to-move wins
"""
def winning(nim_psn, sd, depth): # tuple, dictionary, recursion depth
#print("here " , nim_psn, " ", sd)
if nim_psn in sd:
return sd[nim_psn]
# nim_psn not in dictionary, so update before we return
print(' '*depth, nim_psn)
# check a losing postion (0, 0 ,0) @return False
if all(p == 0 for p in nim_psn): # we lose if every pile empty
sd.update({ nim_psn: False }) # update before return
return False
# remove duplicates, isomorphism
psn = tuple(sorted(nim_psn))
# start the sorted psn = ( *1, *2, *3) *1<=*2<=*3
for j in range(len(psn)): # each pile
for k in range(psn[j]): # number of stones that will remain in that pile
child = tuple(sorted(psn[:j] + (k,) + psn[j+1:]))
if not winning(child, sd, depth+1): # if the deeper station is losing state, then this state is a winning state.
sd.update({ nim_psn: True }) # update before return
if depth == 0: print('\nwinning: move to ',child) # show a winning move
return True
# current state cannot go to a False state and we set it to a Loosing state
sd.update({ nim_psn: False }) # update before return
# if we finish all the loop ans still not finish/end the program then we return a losing postion for the current state
if depth == 0: print('\nlosing')
return False
v = get_piles()
S = dict()
w = winning(v,S,0)
| StarcoderdataPython |
1778184 | from __future__ import division, print_function
import tensorflow as tf
import numpy as np
import os
import pprint
import sys
import keras
from keras.models import Sequential, Model
from keras.layers import Dense, Activation, Dropout, Input
from keras.utils import to_categorical
from keras import regularizers, initializers
from keras.callbacks import TensorBoard, ModelCheckpoint, ReduceLROnPlateau
from keras.layers.normalization import BatchNormalization
import pickle
project_path='/Users/jerry/Desktop/Gene_chip/'
pp = pprint.PrettyPrinter()
flags = tf.app.flags
flags.DEFINE_string("gpu", "0", "GPU(s) to use. [0]")
flags.DEFINE_float("learning_rate", 2.5e-3, "Learning rate [2.5e-4]")
flags.DEFINE_integer("batch_size", 200, "The number of batch images [4]")
flags.DEFINE_integer("save_step", 500, "The interval of saveing checkpoints[500]")
flags.DEFINE_string("checkpoint", "checkpoint", "Directory name to save the checkpoints [checkpoint]")
flags.DEFINE_string("log", "summary", "log [log]")
flags.DEFINE_integer("epoch", 100, "Epoch[10]")
dim=22283
flags.DEFINE_integer("pre_epoch", 5, "Epoch[10]")
FLAGS = flags.FLAGS
def GeneModel():
inputs = Input(shape = (dim, ))
hidden1 = Dropout(0.5)(BatchNormalization(axis = 1)(Dense(8196, activation = 'relu',trainable=False)(inputs)))
hidden2 = Dropout(0.5)(BatchNormalization(axis = 1)(Dense(512, activation = 'relu',trainable=False)(hidden1)))
#hidden3 = Dropout(0.5)(Dense(512, activation = 'relu')(hidden2))
#hidden3 = Dropout(0.5)(BatchNormalization(axis = 1)(Dense(128, activation = 'relu')(hidden2)))
#hidden5 = Dropout(0.5)(Dense(128, activation = 'relu')(hidden4))
#predictions = Dense(label_class[label], activation = 'softmax')(hidden3)
model = Model(inputs = inputs, outputs = hidden2)
weights=pickle.load(open(project_path+'premodel/pre_weights.pkl','r'))
model.layers[1].set_weights(weights[0])
model.layers[2].set_weights(weights[1])
return model
def dim_reduction():
model=GeneModel()
x = np.load(project_path+'data/all_raw_data.npy')
print(x.shape)
opt = keras.optimizers.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
x_reduction=model.predict(x, batch_size=FLAGS.batch_size, verbose=0)
np.save("autoencode.npy", x_reduction)
if __name__ == '__main__':
dim_reduction()
| StarcoderdataPython |
3348718 | <reponame>volodink/ubx-decoder-embedded
from ubx import *
print 'Writing data file ...'
# create file and write message in it
dataFile = open('../sender/data.txt', 'wb')
packet = getPacket(3)
packet.tofile(dataFile)
packet = getPacket(0)
packet.tofile(dataFile)
packet = getPacket(3)
packet.tofile(dataFile)
packet = getPacket(0)
packet.tofile(dataFile)
packet = getPacket(3)
packet.tofile(dataFile)
packet = getPacket(0)
packet.tofile(dataFile)
packet = getPacket(3)
packet.tofile(dataFile)
dataFile.close()
print 'Writing data file ... done'
| StarcoderdataPython |
3358713 | # Migration test
#
# Copyright (c) 2019 Red Hat, Inc.
#
# Authors:
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# This work is licensed under the terms of the GNU GPL, version 2 or
# later. See the COPYING file in the top-level directory.
import tempfile
from avocado_qemu import Test
from avocado import skipUnless
from avocado.utils import network
from avocado.utils import wait
from avocado.utils.path import find_command
class Migration(Test):
"""
:avocado: tags=migration
"""
timeout = 10
@staticmethod
def migration_finished(vm):
return vm.command('query-migrate')['status'] in ('completed', 'failed')
def assert_migration(self, src_vm, dst_vm):
wait.wait_for(self.migration_finished,
timeout=self.timeout,
step=0.1,
args=(src_vm,))
wait.wait_for(self.migration_finished,
timeout=self.timeout,
step=0.1,
args=(dst_vm,))
self.assertEqual(src_vm.command('query-migrate')['status'], 'completed')
self.assertEqual(dst_vm.command('query-migrate')['status'], 'completed')
self.assertEqual(dst_vm.command('query-status')['status'], 'running')
self.assertEqual(src_vm.command('query-status')['status'],'postmigrate')
def do_migrate(self, dest_uri, src_uri=None):
dest_vm = self.get_vm('-incoming', dest_uri)
dest_vm.add_args('-nodefaults')
dest_vm.launch()
if src_uri is None:
src_uri = dest_uri
source_vm = self.get_vm()
source_vm.add_args('-nodefaults')
source_vm.launch()
source_vm.qmp('migrate', uri=src_uri)
self.assert_migration(source_vm, dest_vm)
def _get_free_port(self):
port = network.find_free_port()
if port is None:
self.cancel('Failed to find a free port')
return port
def test_migration_with_tcp_localhost(self):
dest_uri = 'tcp:localhost:%u' % self._get_free_port()
self.do_migrate(dest_uri)
def test_migration_with_unix(self):
with tempfile.TemporaryDirectory(prefix='socket_') as socket_path:
dest_uri = 'unix:%s/qemu-test.sock' % socket_path
self.do_migrate(dest_uri)
@skipUnless(find_command('nc', default=False), "'nc' command not found")
def test_migration_with_exec(self):
"""The test works for both netcat-traditional and netcat-openbsd packages."""
free_port = self._get_free_port()
dest_uri = 'exec:nc -l localhost %u' % free_port
src_uri = 'exec:nc localhost %u' % free_port
self.do_migrate(dest_uri, src_uri)
| StarcoderdataPython |
164729 | <reponame>capaximperii/Alyke
from resource import Resource
def dummy_compute_digest(path):
return "digest"
class TestResource(object):
def setup(self):
self.resource = Resource('/tmp/a', dummy_compute_digest)
def teardown(self):
print ("Resource teardown")
def test_get(self):
self.resource.compute_digest()
result = self.resource.get()
assert result[1] == '/tmp/a'
assert result[0] == 'digest'
def test_to_string(self):
self.resource.compute_digest()
result = str(self.resource)
assert result == '/tmp/a --> digest' | StarcoderdataPython |
1655661 | import cv2
import os
def seq2avi():
seq_dir = r'G:\Dataset\PAMIRain\Dataset831\train\Bs'
avi_dir = r'G:\Dataset\PAMIRain\Dataset831\train\Bs.avi'
img_list = os.listdir(seq_dir)
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
writer = cv2.VideoWriter(avi_dir, fourcc, 20.0, (256, 256))
for name in img_list:
img = cv2.imread(os.path.join(seq_dir, name))
writer.write(img)
def avi2seq():
import shutil
from os.path import join
import fnmatch
seq_dir = r'G:\Dataset\PAMIRain\Dataset831\train\Os'
raw_dir = r'G:\Dataset\PAMIRain\Dataset831\train\Bs'
avi_dir = r'G:\Dataset\PAMIRain\Dataset831\train\middle.avi'
raw_img_list = os.listdir(raw_dir)
# name_list = fnmatch.filter(raw_img_list, 'big_*')
reader = cv2.VideoCapture(avi_dir)
index = 0
while True:
# ret, img = reader.read()
# save_name = os.path.join(seq_dir, 'middle_'+raw_img_list[index])
# cv2.imwrite(save_name, img)
shutil.copy(join(raw_dir, raw_img_list[index]), join(raw_dir, 'middle_' + raw_img_list[index]))
shutil.copy(join(raw_dir, raw_img_list[index]), join(raw_dir, 'small_' + raw_img_list[index]))
shutil.copy(join(raw_dir, raw_img_list[index]), join(raw_dir, 'big_' + raw_img_list[index]))
index += 1
if __name__ == '__main__':
# seq2avi()
avi2seq() | StarcoderdataPython |
132240 | <reponame>Razdeep/PythonSnippets<filename>AUG16/02.py<gh_stars>0
# implicit conversion
num_int=123
num_float=1.23
result=num_int+num_float
print('datatype of num_int is',type(num_int))
print('datatype of num_float is',type(num_float))
print('datatype of result is',type(result))
# it automatically converts int to float to avoid data loss | StarcoderdataPython |
4822504 | <filename>Tensorflow/demo/custom/mnist.py
import tensorflow as tf
import tensorflow.keras as keras
import tensorflow.keras.layers as layers
import numpy as np
# 载入数据
(train_data, train_label), (test_data, test_label) = keras.datasets.mnist.load_data()
print('train_data.shape {} test_data.shape {}'.format(train_data.shape, test_data.shape))
# 格式化
train_data = train_data.reshape(60000, 784).astype('float32') / 255
test_data = test_data.reshape(10000, 784).astype('float32') / 255
print('Format train_data.shape {} test_data.shape {}'.format(train_data.shape, test_data.shape))
# 划分数据集
def split_data(data, label, ratio=0.2):
# 把数据的索引乱序
shuffle_indexes = np.random.permutation(len(data))
# 按比例分割
size = int(ratio * len(data))
# 测试集的索引
val_indexes = shuffle_indexes[:size]
# 训练集的索引
train_indexes = shuffle_indexes[size:]
val = data[val_indexes]
val_label = label[val_indexes]
train = data[train_indexes]
train_label = label[train_indexes]
return train, train_label, val, val_label
train_data, train_label, val_data, val_label = split_data(train_data, train_label, 0.1)
print('训练集数据形状:{} 验证集的形状:{} 测试数据集形状:{}'.format(train_data.shape, val_data.shape, test_data.shape))
# 创建模型
def build_model():
inputs = keras.Input(shape=(784,))
dense1 = layers.Dense(256, activation='relu')(inputs)
dense2 = layers.Dense(128, activation='relu')(dense1)
dense3 = layers.Dense(64, activation='relu')(dense2)
outputs = layers.Dense(10, activation='softmax')(dense3)
model = keras.Model(inputs, outputs)
return model
# 正常编译
model = build_model()
model.compile(optimizer=keras.optimizers.RMSprop(learning_rate=1e-3),
loss=keras.losses.SparseCategoricalCrossentropy(),
metrics=[keras.metrics.SparseCategoricalAccuracy()])
# 训练模型
history = model.fit(train_data, train_label, batch_size=128, epochs=3, validation_data=(val_data, val_label))
print('history:')
print(history.history)
result = model.evaluate(test_data, test_label, batch_size=128)
print('evaluate:')
print(result)
pred = model.predict(test_data[:2])
print('predict:')
print(np.argmax(pred, 1))
| StarcoderdataPython |
3376709 | import pygame
import os
from settings import Settings
from button import Button
from helpers import draw_text
class EndGameMenu():
def __init__(self, center_x, center_y, cached_fonts):
self.score_menu_img = pygame.image.load(
os.path.join('images', 'score_menu.png'))
self.x = center_x - self.score_menu_img.get_width() // 2
self.y = center_y - self.score_menu_img.get_height() // 2
self.cached_fonts = cached_fonts
self.on_restart_callback = None
self.restart_params = None
self.on_save_score_callback = None
self.save_score_params = None
self.init_gui()
def init_gui(self):
self.window_width = Settings.instance().settings['window_width']
self.window_height = Settings.instance().settings['window_height']
# load Score menu image
self.score_menu_img = pygame.image.load(
os.path.join('images', 'score_menu.png'))
# Load and set restart button
self.restart_button = Button(0, 0, os.path.join(
'images', 'button_restart.png'), os.path.join('images', 'button_restart_clicked.png'))
x = self.window_width // 2 - self.restart_button.width // 2
y = self.window_height // 3 + self.score_menu_img.get_height() // 2 + \
self.restart_button.height // 2
self.restart_button.set_position(x, y)
# Load and set save score button
self.save_score_button = Button(0, 0, os.path.join(
'images', 'button_save_score.png'), os.path.join('images', 'button_save_score_clicked.png'))
x = self.window_width // 2 - \
self.save_score_button.width // 2
y = self.restart_button.y + self.restart_button.height + \
self.save_score_button.height // 2
self.save_score_button.set_position(x, y)
def set_on_restart_callback(self, on_restart_callback, restart_params=None):
self.restart_button.set_mouse_callback(
on_restart_callback, restart_params)
def set_on_save_score_callback(self, on_save_score_callback, save_score_params=None):
self.save_score_button.set_mouse_callback(
on_save_score_callback, save_score_params)
def on_mouse_move(self, mouse_pos):
self.restart_button.on_mouse_move(mouse_pos)
self.save_score_button.on_mouse_move(mouse_pos)
def on_mouse_click(self, mouse_pos):
self.restart_button.on_mouse_click(mouse_pos)
self.save_score_button.on_mouse_click(mouse_pos)
def render(self, render_screen, score, highest_score):
x = self.x
y = self.y
# Show lose menu
render_screen.blit(self.score_menu_img, (x, y))
# show bird score
draw_text(render_screen, 'Score', self.cached_fonts['retro_20'], (255, 134, 45), True, pygame.Rect(
x, y, self.score_menu_img.get_width(), self.score_menu_img.get_height() // 4), 'center', 'center')
y = y + self.score_menu_img.get_height() // 4
draw_text(render_screen, str(score), self.cached_fonts['retro_20'], (255, 134, 45), True, pygame.Rect(
x, y, self.score_menu_img.get_width(), self.score_menu_img.get_height() // 4), 'center', 'center')
# show bird high score
y = y + self.score_menu_img.get_height() // 4
draw_text(render_screen, 'Best', self.cached_fonts['retro_20'], (255, 134, 45), True, pygame.Rect(
x, y, self.score_menu_img.get_width(), self.score_menu_img.get_height() // 4), 'center', 'center')
y = y + self.score_menu_img.get_height() // 4
draw_text(render_screen, str(highest_score), self.cached_fonts['retro_20'], (255, 134, 45), True, pygame.Rect(
x, y, self.score_menu_img.get_width(), self.score_menu_img.get_height() // 4), 'center', 'center')
# Show Restart button
self.restart_button.render(render_screen)
# Show Highscores button
self.save_score_button.render(render_screen)
| StarcoderdataPython |
1755091 | <filename>tests/test_analysis.py
import pytest
from diffy_api.analysis.views import * # noqa
@pytest.mark.parametrize("token,status", [("", 200)])
def test_analysis_list_get(client, token, status):
assert client.get(api.url_for(AnalysisList), headers=token).status_code == status
@pytest.mark.parametrize("token,status", [("", 400)])
def test_analysis_list_post(client, token, status, sts):
assert (
client.post(api.url_for(AnalysisList), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize("token,status", [("", 405)])
def test_analysis_list_put(client, token, status):
assert (
client.put(api.url_for(AnalysisList), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize("token,status", [("", 405)])
def test_analysis_list_delete(client, token, status):
assert client.delete(api.url_for(AnalysisList), headers=token).status_code == status
@pytest.mark.parametrize("token,status", [("", 405)])
def test_analysis_list_patch(client, token, status):
assert (
client.patch(api.url_for(AnalysisList), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize("token,status", [("", 200)])
def test_analysis_get(client, token, status):
assert (
client.get(api.url_for(Analysis, key="foo"), headers=token).status_code
== status
)
@pytest.mark.parametrize("token,status", [("", 405)])
def test_analysis_post(client, token, status):
assert (
client.post(
api.url_for(Analysis, key="foo"), data={}, headers=token
).status_code
== status
)
@pytest.mark.parametrize("token,status", [("", 405)])
def test_analysis_put(client, token, status):
assert (
client.put(api.url_for(Analysis, key="foo"), data={}, headers=token).status_code
== status
)
@pytest.mark.parametrize("token,status", [("", 405)])
def test_analysis_delete(client, token, status):
assert (
client.delete(api.url_for(Analysis, key="foo"), headers=token).status_code
== status
)
@pytest.mark.parametrize("token,status", [("", 405)])
def test_analysis_patch(client, token, status):
assert (
client.patch(
api.url_for(Analysis, key="foo"), data={}, headers=token
).status_code
== status
)
| StarcoderdataPython |
3360657 | import torch
import torch.nn as nn
class UNetSame(nn.Module):
def __init__(self):
super(UNetSame, self).__init__()
self.encoder1 = nn.Sequential(nn.Conv2d(in_channels=3, out_channels=64, kernel_size=3, padding=1),
nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
self.encoder2 = nn.Sequential(
nn.MaxPool2d(kernel_size=2),
nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, padding=1),
nn.Conv2d(in_channels=128, out_channels=128, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
self.encoder3 = nn.Sequential(
nn.MaxPool2d(kernel_size=2),
nn.Conv2d(in_channels=128, out_channels=256, kernel_size=3, padding=1),
nn.Conv2d(in_channels=256, out_channels=256, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
self.encoder4 = nn.Sequential(
nn.MaxPool2d(kernel_size=2),
nn.Conv2d(in_channels=256, out_channels=512, kernel_size=3, padding=1),
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
self.encoder5 = nn.Sequential(
nn.MaxPool2d(kernel_size=2),
nn.Conv2d(in_channels=512, out_channels=1024, kernel_size=3, padding=1),
nn.Conv2d(in_channels=1024, out_channels=1024, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
self.decoder1 = nn.ConvTranspose2d(in_channels=1024, out_channels=512, kernel_size=3, stride=2, padding=1,
output_padding=1)
self.decoder1_conv = nn.Sequential(nn.Conv2d(in_channels=1024, out_channels=512, kernel_size=3, padding=1),
nn.Conv2d(in_channels=512, out_channels=512, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
self.decoder2 = nn.ConvTranspose2d(in_channels=512, out_channels=256, kernel_size=3, stride=2, padding=1,
output_padding=1)
self.decoder2_conv = nn.Sequential(nn.Conv2d(in_channels=512, out_channels=256, kernel_size=3, padding=1),
nn.Conv2d(in_channels=256, out_channels=256, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
self.decoder3 = nn.ConvTranspose2d(in_channels=256, out_channels=128, kernel_size=3, stride=2, padding=1,
output_padding=1)
self.decoder3_conv = nn.Sequential(nn.Conv2d(in_channels=256, out_channels=128, kernel_size=3, padding=1),
nn.Conv2d(in_channels=128, out_channels=128, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
self.decoder4 = nn.ConvTranspose2d(in_channels=128, out_channels=64, kernel_size=3, stride=2, padding=1,
output_padding=1)
self.decoder4_conv = nn.Sequential(nn.Conv2d(in_channels=128, out_channels=64, kernel_size=3, padding=1),
nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
self.out = nn.Sequential(nn.Conv2d(in_channels=64, out_channels=2, kernel_size=3, padding=1),
nn.Sigmoid())
def forward(self, x):
x1 = self.encoder1(x)
x2 = self.encoder2(x1)
x3 = self.encoder3(x2)
x4 = self.encoder4(x3)
x5 = self.encoder5(x4)
y1 = self.decoder1(x5)
y1 = torch.cat((x4, y1), dim=1)
y1 = self.decoder1_conv(y1)
y2 = self.decoder2(y1)
y2 = torch.cat((x3, y2), dim=1)
y2 = self.decoder2_conv(y2)
y3 = self.decoder3(y2)
y3 = torch.cat((x2, y3), dim=1)
y3 = self.decoder3_conv(y3)
y4 = self.decoder4(y3)
y4 = torch.cat((x1, y4), dim=1)
y4 = self.decoder4_conv(y4)
out = self.out(y4)
return out
| StarcoderdataPython |
103902 | # Generated by Django 2.2.13 on 2021-02-01 11:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('project', '0036_auto_20210201_1120'),
]
operations = [
migrations.AlterField(
model_name='project',
name='pi',
field=models.CharField(blank=True, max_length=256, verbose_name="(obsolete) Principal Investigator's name, position and email"),
),
]
| StarcoderdataPython |
138811 | <reponame>poliakustyczny/deepl.py
#!/usr/bin/python3
import sys, requests, json
URL = "https://www.deepl.com/jsonrpc"
def encodeRequest(text):
return(json.dumps({"jsonrpc" : "2.0", "method" : "LMT_handle_jobs", "params" : { "jobs" : [ { "kind" : "default", "raw_en_sentence" : text } ], "lang" : { "user_preferred_langs" : [ "EN", "PL"], "source_lang_user_selected" : "EN", "target_lang" : "PL"}, "priority" : -1 }, }, separators=(",", ":")))
def sendRequest(json):
return requests.post(URL, data = json)
if(len(sys.argv) < 2):
print("deepl.py: Nie podano tekstu do przetłumaczenia")
else:
text = sys.argv[1]
encodedRequest = encodeRequest(text)
response = sendRequest(encodedRequest)
response = json.loads(response.text)
print(response['result']['translations'][0]['beams'][0]['postprocessed_sentence'])
| StarcoderdataPython |
1700662 | # coding: utf-8
"""Manage the differents pages of the site"""
import markdown
import bleach
from urlparse import urlparse
from datetime import datetime
from flask import Flask, render_template, request, flash, redirect, url_for
from flask_babel import gettext, format_datetime
from mjpoll import app, babel
from data import get_poll, get_results, get_voter_ballot, add_update_ballot, get_own_polls, get_participate_polls, delete_poll, insert_poll, get_ballot_voters
USER = 'Bob' #TODO
def sort_choices_with_rank(choice_with_rank):
return choice_with_rank[1]
@babel.localeselector
def get_locale():
return request.accept_languages.best_match(app.config['LANGUAGES'].keys())
def set_target(attrs, new=False):
p = urlparse(attrs['href'])
attrs['target'] = '_blank'
return attrs
@app.template_filter('md_message')
def md_message_filter(s):
"""Filter that convert to markdown and allow only tag legal for messages"""
return bleach.linkify(bleach.clean(markdown.markdown(s), tags=['strong', 'em', 'a', 'ul', 'li', 'p', 'br'], strip=True), callbacks=[set_target])
@app.template_filter('md_choice')
def md_choice_filter(s):
"""Filter that convert to markdown and allow only tag legal for choices"""
return bleach.linkify(bleach.clean(markdown.markdown(s), tags=['strong', 'em', 'a'], strip=True), callbacks=[set_target])
@app.route('/')
def new_poll():
return render_template('poll.html')
@app.route('/list')
def list_poll():
own_polls = get_own_polls(USER)
if own_polls is None:
own_polls = []
participate_polls = get_participate_polls(USER)
if participate_polls is None:
participate_polls = []
return render_template('list.html', own_polls=own_polls, participate_polls=participate_polls)
@app.route('/delete/<poll>')
def delete(poll):
delete_poll(poll)
flash(gettext(u'Poll deleted'), 'success')
return redirect(url_for('list_poll'))
@app.route('/cast', methods=['POST'])
def cast():
if request.method == 'POST':
if 'poll' not in request.form:
return render_template('error.html', message=gettext(u'Error: cast invalid data'))
choices = {}
for param, value in request.form.items():
if param.startswith('choice_'):
choices[int(param.split('_')[1])] = int(value)
result = add_update_ballot(voter=USER, poll=request.form['poll'], choices=choices)
if result:
flash(gettext(u'Ballot saved'), 'success')
else:
flash(gettext(u'Invalid ballot'), 'danger')
return redirect(url_for('ballot_or_results', poll=request.form['poll']))
else:
return render_template('error.html', message=gettext(u'Error: cast without data'))
@app.route('/preview', methods=['POST'])
def preview():
if request.method == 'POST':
if 'title' in request.form:
title = request.form['title']
else:
title = ""
if 'message' in request.form:
message = request.form['message']
else:
message = ""
if 'end_date' in request.form:
try:
end_date = format_datetime(datetime.strptime(request.form['end_date'], '%Y-%m-%dT%H:%M'))
except:
end_date = format_datetime(datetime.now())
else:
end_date = format_datetime(datetime.now())
choices = []
for param, value in request.form.items():
if param.startswith('choice_'):
choices.append(value)
return render_template('preview.html', title=title, message=message, end_date=end_date, choices=choices)
else:
return render_template('error.html', message=gettext(u'Error: preview without data'))
@app.route('/save', methods=['POST'])
def save():
if request.method == 'POST':
for param, value in request.form.items():
print param, value
if 'poll_title' not in request.form:
return render_template('error.html', message=gettext(u'Error: poll without title'))
if 'poll_message' not in request.form:
return render_template('error.html', message=gettext(u'Error: poll without message'))
if 'poll_end_date' not in request.form:
return render_template('error.html', message=gettext(u'Error: poll without date'))
else:
try:
end_date = datetime.strptime(request.form['poll_end_date'], '%Y-%m-%dT%H:%M')
except:
return render_template('error.html', message=gettext(u'Error: poll with invalid date'))
choices = []
for param, value in request.form.items():
if param.startswith('poll_choice_'):
if value == "":
return render_template('error.html', message=gettext(u'Error: poll with an empty choice'))
choices.append(value)
if not choices:
return render_template('error.html', message=gettext(u'Error: poll with no choices'))
uid = insert_poll(title=request.form['poll_title'], message=request.form['poll_message'], choices=choices, end_date=end_date, owner=USER)
if uid is None:
flash(gettext(u'Error: saving poll failed'), 'danger')
else:
flash(gettext(u'Message: poll created with url %(url)s', url=url_for('ballot_or_results', poll=uid, _external=True)), 'success')
return redirect(url_for('list_poll'))
else:
return render_template('error.html', message=gettext(u'Error: poll without data'))
@app.route('/<poll>')
def ballot_or_results(poll):
"""IF the poll is open, display the ballot page. Otherwise display the results page"""
poll = get_poll(poll)
if poll is None:
return render_template('error.html', message=gettext(u'Error: poll do not exits'))
else:
poll['end_date'] = format_datetime(poll['end_date'])
if poll['closed']:
results = get_results(poll)
if results is None:
return render_template('error.html', message=gettext(u'Error: poll with not results'))
choices_by_rank = []
choices_with_rank = []
for choice, result in results.items():
choices_with_rank.append([choice, result['rank'][0] if isinstance(result['rank'], list) else result['rank']])
choices_with_rank.sort(key=sort_choices_with_rank)
choices_by_rank = [choice_with_rank[0] for choice_with_rank in choices_with_rank]
return render_template('results.html', poll=poll, results=results, choices_by_rank=choices_by_rank)
else:
ballot = get_voter_ballot(USER, poll['uid'])
voters = get_ballot_voters(poll['uid'])
if ballot is None:
ballot = {}
for choice in poll['choices']:
ballot[choice['id']] = 0
if voters is None:
voters = []
return render_template('ballot.html', poll=poll, ballot=ballot, voters=voters)
| StarcoderdataPython |
3365183 | from django.db import models
from django.contrib.auth import get_user_model
from django.urls import reverse
# Create your models here.
class sports(models.Model):
title_field = models.CharField(max_length=256)
purchaser_field = models.ForeignKey(get_user_model(), on_delete=models.CASCADE)
description_field=models.TextField(blank=True)
def __str__(self):
return self.title_field
def get_absolute_url(self):
return reverse("sports_detail", args=[str(self.id)])
| StarcoderdataPython |
3304083 | #!/usr/bin/python
from lxml import etree
import grokscrap as gs
import os
import subprocess as subp
import re
#This class allows for (very handy) re-entrant lists of command-line calls. All you need is to call startStep() at the beginning and make sure to call endStep() at the end only if there was no problem and the list doesn't have to be replayed. And, of course, do not change the list across runs, at least in the parts already executed, or hard-to-debug problems will ensue
class ExecuteContext:
def __init__(self, execute=True, verbose=False, stampName=os.path.join(os.getcwd(), 'step.txt')):
self.execute = execute
self.verbose = verbose
self.resumeFrom = None
self.stampName = stampName
self.step = 0
def writeStep(self, n):
self.step = n
with open(self.stampName, 'w') as f:
f.write(str(n))
def startStep(self):
if os.path.isfile(self.stampName):
with open(self.stampName, 'r') as f:
self.resumeFrom = int(f.read())
else:
self.resumeFrom = None
self.writeStep(0)
def endStep(self):
if os.path.isfile(self.stampName):
os.remove(self.stampName)
def checkStep(self):
if (self.execute):
if self.resumeFrom==self.step:
self.resumeFrom=None
return self.execute and self.resumeFrom is None
def updateStep(self):
self.writeStep(self.step+1)
def actualDoCommand(self, command, **kwargs):
#this is intended to actually do subprocess call, as some esoteric special-needs tools might be so picky about how exactly they are invoked that you can't yjust assume that a straight subptocess.Popen/call will work
#default implementation just uses regular subprocess.call
return subp.call(command, **kwargs)
def doCommand(self, command, **kwargs):
if self.verbose:
print ' '.join(command)
if self.checkStep():
ret = self.actualDoCommand(command, **kwargs)
if ret!=0:
raise RuntimeError("Error in command <%s>" % ' '.join(command))
self.updateStep()
def doCd(self, dr):
if self.checkStep() or True: #as chdirs have critically important side-effects, they have to be replayed no matter what
os.chdir(dr)
self.updateStep()
if self.verbose:
print "cd "+dr
#simple but high-level driver for the refactor binary, it basically does housekeeping and high-level planning; the binary does the grunt work.
class ExternalRefactor:
def __init__(self,
context,
translatepath=lambda x: x,
compiler_args_base=[],
compiler_args=lambda x: ["-I."],
command='./simpleRefactor',
exedir=os.getcwd(),
cppextensions=('.cpp',),
hppextensions=('.hpp', '.h'),
getCppForHpp=None,
grokscraper=None,
isxmlfile=lambda x: x.endswith(('.xml',)),
xml_xpath=None,
execute=True,
verbose=True):
self.context = context
self.translatepath = translatepath
self.compiler_args_base = compiler_args_base
self.compiler_args = compiler_args
self.command = command
self.exedir = exedir
self.cppextensions = cppextensions
self.hppextensions = hppextensions
self.getCppForHpp = getCppForHpp
self.grokscraper = grokscraper
self.xml_xpath = xml_xpath
self.isxmlfile = isxmlfile
self.execute = execute
self.verbose = verbose
self.template = lambda term, value, filepath: [self.command, '--term=%s' % term, '--value=%s' % value, '--overwrite=true', filepath, '--']
#do not forget to call this one if you want to make sure to also refactor instances that only apper in header files!
def addCppFilesForHppFiles(self, table):
for filename in table:
if filename.endswith(self.hppextensions):
cpp = None
if self.getCppForHpp is not None:
cpp = self.getCppForHpp(filename) #paths returned here should be consistent with grok, rather than with the codebase
elif self.grokscraper is not None:
cpp = self.getCppForHppWithGrok(filename, table)
if cpp is None:
raise RuntimeError("Could not find a C++ source file including the header %s!!!!!" % filename)
if cpp in table:
table[cpp].add(filename)
else:
table[cpp] = set([filename])
def getCppForHppWithGrok(self, hppfile, table):
filename = hppfile.rsplit('/', 1)[1]
hpptable = self.grokscraper.getOcurrences("include "+filename)
#first check if the file is already in the table
for grokfilepath in hpptable:
if grokfilepath.endswith(self.cppextensions) and grokfilepath in table:
return grokfilepath
#this is a quite dumb brute-force approach, it might be far better to avoid greedy strategies and compute a minimal set of cpps for all hpps with ocurrences; however that might be inefficient for codebases with massively nested sets of header files
for grokfilepath in hpptable:
if grokfilepath.endswith(self.cppextensions):
return grokfilepath
for grokfilepath in hpptable:
if grokfilepath.endswith(self.hppextensions):
ret = self.getCppForHppWithGrok(grokfilepath)
if ret is not None:
return ret
#there might be headers not included anywhere in the codebase (conceivably, they might be included by source files generated during the build process). If that's the case, here we should add some code to (a) use those generated sources (after compilation) or (b) generate some phony C++ source file that just includes the header and feed it to the binary tool
return None
def doCPPFile(self, term, value, filepath):
commandline = self.template(term, value, filepath)+self.compiler_args_base+self.compiler_args(filepath)
if self.verbose:
print 'ON %s EXECUTE %s' % (self.exedir, ' '.join(commandline))
if self.execute:
self.context.doCommand(commandline, cwd=self.exedir)
def doXMLFile(self, term, filepath):
if self.verbose:
print 'ON %s REMOVE REFERNCES TO %s' % (filepath, term)
if self.execute:
if self.context.checkStep():
root = etree.parse(filepath)
res = root.xpath(self.xml_xpath % term)
if len(res)!=1:
print "Error locating config value <%s> in XML file <%s>!!!!!" % (term, filepath)
else:
toremove = res[0]
toremove.getparent().remove(toremove)
with open(filepath, 'w') as svp:
svp.write(etree.tostring(root))
self.context.updateStep()
#main function, does the refactoring
def doFilesFromTable(self, table, term, value):
if self.verbose:
print "PROCESSING FILES FROM TERMS FOUND WITH OPENGROK\n"
for grokfilepath in sorted(table.keys()):
lines = list(table[grokfilepath])
lines.sort()
filepath = self.translatepath(grokfilepath)
if grokfilepath.endswith(self.cppextensions):
if self.verbose:
print " TERM <%s> TO BE REFACTORED IN CPP FILE <%s> in line(s) %s" % (term, filepath, lines)
self.doCPPFile(term, value, filepath)
if grokfilepath.endswith(self.hppextensions):
if self.verbose:
print " TERM <%s> FOUND IN HEADER FILE <%s> in line(s) %s (refactored as part of a cpp file)" % (term, filepath, lines)
elif self.isxmlfile(filepath):
if self.verbose:
print " TERM <%s> TO BE REFACTORED IN XML FILE <%s> in line(s) %s" % (term, filepath, lines)
self.doXMLFile(term, filepath)
#an example of how a high-level funtion to use GrokScraper and ExternalRefactor might look like
def doFilesFromGrok(self, term, value, printRevs=True):
table = grokscraper.getOcurrences(term)
self.addCppFilesForHppFiles(table)
self.context.startStep()
self.doFilesFromTable(table, term, value)
self.context.endStep()
if printRevs:
print ""
revisions = self.grokscraper.getRevisions(table)
self.grokscraper.printRevisions(revisions)
#helper funtion to be used as part of function compiler_args (one of the members of ExternalRefactor): for a .d file (the one generated by the compiler detailing ALL files #included into the compilation unit), heuristically generate a list of directives to include the relevant directories
def heuristicIncludeDirListFromDFile(dfilepath, rootDirKeyword=['include/'], hppextensions=(('.hpp', '.h')), toExclude=[], prefix=lambda x: ''):
with open(dfilepath, 'r') as fd:
text = fd.read()
dirs_unfiltered = set()
dirs_filtered = set()
for m in re.finditer('[_\./a-zA-Z0-9-]+', text):
path = m.group(0)
if path.endswith(hppextensions):
dr = path.rsplit('/', 1)[0]
if not dr.endswith(hppextensions) and not dr in dirs_unfiltered:
dirs_unfiltered.add(dr)
toAdd = True
for excl in toExclude:
if dr.startswith(excl):
toAdd = False
break
if toAdd:
processed = False
for key in rootDirKeyword:
pos = dr.find(key)
if pos!=-1:
processed = True
dr = dr[:pos+len(key)]
break
if dr[-1]=='/':
dr = dr[:-1]
dirs_filtered.add(dr)
return ['-I'+prefix(dr)+dr for dr in dirs_filtered]
#########################################################
if __name__=='__main__':
translatepath = lambda x: os.path.join('examples', x)
context = ExecuteContext(verbose=True)
external = ExternalRefactor(context,
translatepath=translatepath,
compiler_args=lambda x: ["-I./examples/include"], #if we used a non-installed binary release of clang (i.e. a clang release we just unzipped somewhere where we happen to have write permissions) to compile the binary tool, we should put in this list an additionsl include pointing to the location where clang's stddef.h lives inside the clang directory tree
xml_xpath="/config/value[@name='%s']",
execute=True,
verbose=True)
table = {'test.cpp':[], 'config.xml':[]}
external.doFilesFromTable(table, "UseSpanishLanguage", "true")
| StarcoderdataPython |
1790823 | <gh_stars>1-10
# The MIT License (MIT)
# Copyright (c) 2015 <NAME>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
## -----------------------------------------------------------------------------
def test_chr_on_numbers_between_32_to_126():
i = 32
while i < 127:
print chr(i)
i += 1
## -----------------------------------------------------------------------------
def test_chr_on_special_cases():
print chr(9)
print chr(10)
print chr(13)
## -----------------------------------------------------------------------------
def test_chr_on_remaining_valid_inputs():
specials = set({9, 10, 13})
# Need to handle escapinf characters.
#
# 39: "\'"
# 92: "\\"
cases_to_skip_separately = set({39, 92})
inputs = list([])
i = 0
while i < 256:
if not specials.__contains__(i) and not cases_to_skip_separately.__contains__(i):
inputs.append(int(i))
i += 1
for i in inputs:
print i, chr(i).__repr__()
## -----------------------------------------------------------------------------
def test_chr_on_invalid_inputs():
def test_chr_with_valueerror(i):
try:
chr(i)
except ValueError:
print 'Expected ValueError for ' + i.__str__()
test_chr_with_valueerror(-1)
test_chr_with_valueerror(256)
## -----------------------------------------------------------------------------
test_chr_on_numbers_between_32_to_126()
test_chr_on_special_cases()
test_chr_on_remaining_valid_inputs()
test_chr_on_invalid_inputs()
## -----------------------------------------------------------------------------
| StarcoderdataPython |
1775573 | <gh_stars>10-100
import psycopg2
from psycopg2.extensions import AsIs, ISOLATION_LEVEL_AUTOCOMMIT
from settings import DB_NAME, USER, PASSWORD
import logging
class DBObject(object):
_db_con = None
_db_cur = None
def __init__(self, db, user, password):
try:
self._db_con = psycopg2.connect(dbname=db, user=user,
password=password)
self._db_cur = self._db_con.cursor()
except Exception as e:
print e
def make_query(self, query, params=None, q_type='insert'):
try:
self._db_cur.execute(query, params)
except Exception as e:
print e
finally:
if q_type == 'insert':
self._db_con.commit()
elif q_type == 'query':
return self._db_cur.fetchall()
def __del__(self):
self._db_con.close()
def prepare_db():
"""
Create a database with name in .env
"""
try:
con = psycopg2.connect(dbname='postgres', user=USER, password=PASSWORD)
except psycopg2.Error as e:
raise e
logging.info('Connected to database postgres')
con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = con.cursor()
try:
cur.execute('CREATE DATABASE ' + DB_NAME)
except psycopg2.Error as e:
logging.info('DROP OLD DATABASE')
logging.info('CREATE NEW DATABASE')
cur.execute('DROP DATABASE ' + DB_NAME)
cur.execute('CREATE DATABASE ' + DB_NAME)
cur.close()
con.close()
con = psycopg2.connect(dbname=DB_NAME, user=USER, password=PASSWORD)
cur = con.cursor()
cur.execute('CREATE EXTENSION CUBE')
cur.execute('CREATE TABLE images (id serial, name text, url text, vector cube);')
con.commit()
cur.close()
con.close()
| StarcoderdataPython |
1704088 | <filename>server/daqbrokerServer.py
from tornado.wsgi import WSGIContainer
from tornado.ioloop import IOLoop
from tornado.httpserver import HTTPServer
#import gevent.monkey
# gevent.monkey.patch_all()
import time
import sys
import json
import traceback
import logging
import multiprocessing
import ntplib
import socket
import psutil
import struct
import shutil
import uuid
import platform
import os
import math
import signal
import sqlite3
import pyAesCrypt
import snowflake
import simplejson
import re
import ctypes
import requests
import concurrent.futures
import daqbrokerSettings
import monitorServer
import backupServer
import commServer
import logServer
import webbrowser
import zipfile
import io
from asteval import Interpreter
from concurrent_log_handler import ConcurrentRotatingFileHandler
from subprocess import call
from subprocess import check_output
#from bcrypt import gensalt
from functools import reduce
from sqlalchemy import create_engine
from sqlalchemy import text
from sqlalchemy import bindparam
from sqlalchemy import func
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import scoped_session
from logging.handlers import RotatingFileHandler
from sqlalchemy_utils.functions import database_exists
from sqlalchemy_utils.functions import drop_database
from sqlalchemy_utils.functions import create_database
from flask import Flask
from flask import Markup
from flask import request
from flask import render_template
from flask import redirect
from flask import send_from_directory
from flask import url_for
from flask import session
from flask import flash
from flask import jsonify
from flask import request_tearing_down
#fom gevent.pywsgi import WSGIServer
#from sympy import *
from numpy import asarray, linspace
from scipy.interpolate import interp1d
from numbers import Number
import app
from bpApp import multiprocesses
class daqbrokerServer:
"""
Main server application class. This class can be used to start the DAQBroker server environment and contains the following members
:ivar localSettings: (string) name of the local settings database file (defaults to `localSettings`)
:ivar appPort: (integer) network port for the DAQBroker server REST API (defaults to `7000`)
:ivar logFileName: (string) name of the logging file (defaults to `logFIle`)
"""
def __init__(self, localSettings='localSettings', appPort=7000, logFileName='logFile.txt'):
self.logFile = logFileName
self.appPort = appPort
self.localSettings = localSettings
#print(self.logFile, self.appPort, self.localSettings)
def start(self, detached=False):
"""
Starts the DAQBroker server environment.
.. warning::
This is a long running process and blocks execution of the main task, it should therefore be called on a separate process.
:param detached: Unusable in current version. Meant to be used to launch a background (daemon-like) environment to continue to be used in the same python session
"""
startServer(localSettings=self.localSettings, appPort=self.appPort, logFilename=self.logFile)
alphabets = [
'a',
'b',
'c',
'd',
'e',
'f',
'g',
'h',
'i',
'j',
'k',
'l',
'm',
'n',
'o',
'p',
'q',
'r',
's',
't',
'u',
'v',
'w',
'x',
'y',
'z']
VERSION = "0.1"
timeStart = time.time()
strings = []
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
base_dir = '.'
if getattr(sys, 'frozen', False):
base_dir = os.path.join(sys._MEIPASS)
def setupLocalSettings(localSettings='localSettings'):
try:
#print(os.path.dirname(os.path.realpath(__file__)))
#print(os.path.realpath(__file__))
#print(sys._MEIPASS)
if not os.path.isdir(os.path.join(base_dir, 'static')):
print("Server files not found on this directory. Setting up required files . . .")
canUseLocal = False
useLocal = False
if os.path.isfile(os.path.join(base_dir, 'server.zip')):
canUseLocal = True
if canUseLocal:
useLocal = False
choice = input("Server files found in local compressed file, use these? (Could be out of date)\n\t1. Yes\n\t2. No\nMake a choice[1]:")
if choice == '1':
useLocal = True
if useLocal:
z = zipfile.ZipFile(os.path.join(base_dir, 'server.zip'))
z.extractall(path=base_dir)
print("done")
else:
zipFiles = requests.get("https://daqbroker.com/downloads/server.zip")
if zipFiles.ok:
z = zipfile.ZipFile(io.BytesIO(zipFiles.content))
z.extractall(path=base_dir)
print("done")
else:
sys.exit("Files not found on remote server. Make sure you have internet connection before trying again.")
if os.path.isfile(localSettings): # Must create new local settings
isNewDB = False
else: # Already there, let's hope with no problems
isNewDB = True
databases = []
daqbrokerSettings.setupLocalVars(localSettings)
scoped = daqbrokerSettings.getScoped()
session = scoped()
daqbrokerSettings.daqbroker_settings_local.metadata.create_all(
daqbrokerSettings.localEngine)
#id = snowflake.make_snowflake(snowflake_file='snowflake')
if isNewDB:
newGlobal = daqbrokerSettings.Global(
clock=time.time(),
version=VERSION,
backupfolder="backups",
importfolder="import",
tempfolder="temp",
ntp="NONE",
logport=9092,
commport=9090,
remarks="{}")
session.add(newGlobal)
newFolder = daqbrokerSettings.folder(
clock=time.time(), path="backups", type="0", remarks="{}")
session.add(newFolder)
newFolder = daqbrokerSettings.folder(
clock=time.time(), path="imports", type="0", remarks="{}")
session.add(newFolder)
newFolder = daqbrokerSettings.folder(
clock=time.time(), path="temp", type="0", remarks="{}")
session.add(newFolder)
newNode = daqbrokerSettings.nodes(
node=monitorServer.globalID,
name="localhost",
address="127.0.0.1",
port=9091,
local="127.0.0.1",
active=True,
lastActive=time.time(),
tsyncauto=False,
remarks="{}")
session.add(newNode)
globals = {
'clock': time.time(),
'version': VERSION,
'backupfolder': 'backups',
'importfolder': 'import',
'tempfolder': 'temp',
'ntp': None,
'remarks': {},
'commport': 9090,
'logport': 9092,
'isDefault': True} # Default values, should I use this?
else:
maxGlobal = session.query(
daqbrokerSettings.Global).filter_by(
clock=session.query(
func.max(
daqbrokerSettings.Global.clock))).first()
if maxGlobal:
globals = {}
for field in maxGlobal.__dict__:
if not field.startswith('_'):
globals[field] = getattr(maxGlobal, field)
else:
pass # Something very wrong happened with the local settings, this should be handled with a GUI
session.commit()
return globals
except Exception as e:
traceback.print_exc()
session.rollback()
sys.exit('Could not set up local settings, make sure you have the correct access rights for this folder and restart the application!')
def startServer(localSettings='localSettings', appPort=7000, logFilename="logFile.log"):
global theApp
bufferSize = 64 * 1024
password = str(<PASSWORD>(snowflake_file=os.path.join(base_dir, 'snowflake')))
manager = multiprocessing.Manager()
servers = manager.list()
workers = manager.list()
backupInfo = manager.dict()
for i in range(0, 10000):
workers.append(-1)
if os.path.isfile(os.path.join(base_dir, 'secretEnc')):
pyAesCrypt.decryptFile(
os.path.join(base_dir, "secretEnc"),
os.path.join(base_dir, "secretPlain"),
password,
bufferSize)
file = open(os.path.join(base_dir, "secretPlain"), 'r')
aList = json.load(file)
for server in aList:
servers.append(server)
file.close()
os.remove(os.path.join(base_dir, "secretPlain"))
if os.path.isabs(localSettings):
setFile = localSettings
else:
setFile = os.path.join(base_dir, localSettings)
globals = setupLocalSettings(setFile)
theApp = app.createApp(theServers=servers, theWorkers=workers)
p = multiprocessing.Process(
target=backupServer.startBackup, args=(
os.path.join(base_dir, 'static', 'rsync'), backupInfo, setFile))
p.start()
multiprocesses.append(
{'name': 'Backup', 'pid': p.pid, 'description': 'DAQBroker backup process'})
time.sleep(1)
p = multiprocessing.Process(
target=logServer.logServer, args=(
globals["logport"], base_dir), kwargs={
'logFilename': logFilename})
p.start()
multiprocesses.append(
{'name': 'Logger', 'pid': p.pid, 'description': 'DAQBroker log process'})
time.sleep(1)
p = multiprocessing.Process(
target=commServer.collector,
args=(
servers,
globals["commport"],
globals["logport"],
backupInfo,
setFile))
p.start()
multiprocesses.append({'name': 'Collector', 'pid': p.pid,
'description': 'DAQBroker message collector process'})
time.sleep(1)
p = multiprocessing.Process(
target=monitorServer.producer,
args=(
servers,
globals["commport"],
globals["logport"],
False,
backupInfo,
workers,
setFile
))
p.start()
multiprocesses.append({'name': 'Producer', 'pid': p.pid,
'description': 'DAQBroker broadcasting server process'})
time.sleep(1)
print("STARTED", multiprocesses)
http_server = HTTPServer(WSGIContainer(theApp))
http_server.listen(appPort)
webbrowser.open('http://localhost:'+str(appPort)+"/daqbroker")
IOLoop.instance().start()
if __name__ == "__main__":
multiprocessing.freeze_support()
theArguments = ['localSettings', 'appPort', 'logFileName']
obj = {}
if len(sys.argv) < 5:
for i, val in enumerate(sys.argv):
if i == len(theArguments) + 1:
break
if i < 1:
continue
obj[theArguments[i - 1]] = val
else:
sys.exit(
"Usage:\n\tdaqbrokerServer localSettings apiPort logFile\nOr:\n\tdaqbrokerServer localSettings apiPort\nOr:\n\tdaqbrokerServer localSettings\nOr:\n\tdaqbroker")
if os.path.isfile(os.path.join(base_dir, 'pid')):
if 'appPort' in obj:
appPort = int(obj['appPort'])
else:
appPort = 7000
with open(os.path.join(base_dir, 'pid'), 'r') as f:
existingPID = f.read().strip('\n').strip('\r').strip('\n')
processExists = False
if existingPID:
if psutil.pid_exists(int(existingPID)):
processExists = True
if not processExists:
with open(os.path.join(base_dir, 'pid'), 'w') as f:
f.write(str(os.getpid()))
f.flush()
newServer = daqbrokerServer(**obj)
newServer.start()
else:
webbrowser.open('http://localhost:' + str(appPort) + "/daqbroker")
else:
with open(os.path.join(base_dir, 'pid'), 'w') as f:
f.write(str(os.getpid()))
f.flush()
newServer = daqbrokerServer(**obj)
newServer.start()
| StarcoderdataPython |
127254 | <gh_stars>1-10
"""
Ada-GVAE training script for dsprites dataset, using disentanglement_lib.
Also evaluates DCI metric and saves outputs.
<NAME>
ETHZ 2020
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
import os
from disentanglement_lib.evaluation import evaluate
from disentanglement_lib.evaluation.metrics import utils
from disentanglement_lib.methods.unsupervised import train
from disentanglement_lib.methods.unsupervised import vae
from disentanglement_lib.postprocessing import postprocess
from disentanglement_lib.utils import aggregate_results
import tensorflow as tf
import gin.tf
from absl import app, flags
FLAGS = flags.FLAGS
flags.DEFINE_string('base_dir', 'base', 'Base directory')
flags.DEFINE_string('output_dir', 'test_output', 'Directory to save results in')
flags.DEFINE_integer('dim', 32, 'Latent dimension of encoder')
flags.DEFINE_string('subset', "", 'Subset of factors of tested dataset')
flags.DEFINE_integer('seed', 42, 'Seed for the random number generator')
def main(argv):
del argv # Unused
# Save all results in subdirectories of following path
base_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), FLAGS.base_dir)
# Overwrite output or not (for rerunning script)
overwrite = True
# Results directory of BetaTCVAE
path_adagvae = os.path.join(base_path,FLAGS.output_dir)
gin_bindings = [
"model.random_seed = {}".format(FLAGS.seed),
"subset.name = '{}'".format(FLAGS.subset),
"encoder.num_latent = {}".format(FLAGS.dim)
]
# Train model. Training is configured with a gin config
train.train_with_gin(os.path.join(path_adagvae, 'model'), overwrite,
['baselines/adagvae/adagvae_train.gin'], gin_bindings)
# Extract mean representation of latent space
representation_path = os.path.join(path_adagvae, "representation")
model_path = os.path.join(path_adagvae, "model")
postprocess_gin = ['baselines/adagvae/adagvae_postprocess.gin'] # This contains the settings.
postprocess.postprocess_with_gin(model_path, representation_path, overwrite,
postprocess_gin)
# Compute DCI metric
result_path = os.path.join(path_adagvae, "metrics", "dci")
representation_path = os.path.join(path_adagvae, "representation")
evaluate.evaluate_with_gin(representation_path, result_path, overwrite, ['baselines/adagvae/adagvae_dci.gin'])
if __name__ == '__main__':
app.run(main) | StarcoderdataPython |
3370424 | <filename>third_party/logilab/astroid/scoped_nodes.py
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:<EMAIL>
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""
This module contains the classes for "scoped" node, i.e. which are opening a
new local scope in the language definition : Module, ClassDef, FunctionDef (and
Lambda, GeneratorExp, DictComp and SetComp to some extent).
"""
import io
import itertools
import warnings
import six
import wrapt
from astroid import bases
from astroid import context as contextmod
from astroid import exceptions
from astroid import manager
from astroid import mixins
from astroid import node_classes
from astroid import decorators as decorators_mod
from astroid import util
BUILTINS = six.moves.builtins.__name__
ITER_METHODS = ('__iter__', '__getitem__')
def _c3_merge(sequences):
"""Merges MROs in *sequences* to a single MRO using the C3 algorithm.
Adapted from http://www.python.org/download/releases/2.3/mro/.
"""
result = []
while True:
sequences = [s for s in sequences if s] # purge empty sequences
if not sequences:
return result
for s1 in sequences: # find merge candidates among seq heads
candidate = s1[0]
for s2 in sequences:
if candidate in s2[1:]:
candidate = None
break # reject the current head, it appears later
else:
break
if not candidate:
# Show all the remaining bases, which were considered as
# candidates for the next mro sequence.
bases = ["({})".format(", ".join(base.name
for base in subsequence))
for subsequence in sequences]
raise exceptions.InconsistentMroError(
"Cannot create a consistent method resolution "
"order for bases %s" % ", ".join(bases))
result.append(candidate)
# remove the chosen candidate
for seq in sequences:
if seq[0] == candidate:
del seq[0]
def _verify_duplicates_mro(sequences):
for sequence in sequences:
names = [node.qname() for node in sequence]
if len(names) != len(set(names)):
raise exceptions.DuplicateBasesError('Duplicates found in the mro.')
def remove_nodes(cls):
@wrapt.decorator
def decorator(func, instance, args, kwargs):
nodes = [n for n in func(*args, **kwargs) if not isinstance(n, cls)]
if not nodes:
raise exceptions.NotFoundError()
return nodes
return decorator
def function_to_method(n, klass):
if isinstance(n, FunctionDef):
if n.type == 'classmethod':
return bases.BoundMethod(n, klass)
if n.type != 'staticmethod':
return bases.UnboundMethod(n)
return n
def std_special_attributes(self, name, add_locals=True):
if add_locals:
locals = self._locals
else:
locals = {}
if name == '__name__':
return [node_classes.const_factory(self.name)] + locals.get(name, [])
if name == '__doc__':
return [node_classes.const_factory(self.doc)] + locals.get(name, [])
if name == '__dict__':
return [node_classes.Dict()] + locals.get(name, [])
raise exceptions.NotFoundError(name)
MANAGER = manager.AstroidManager()
def builtin_lookup(name):
"""lookup a name into the builtin module
return the list of matching statements and the astroid for the builtin
module
"""
builtin_astroid = MANAGER.ast_from_module(six.moves.builtins)
if name == '__dict__':
return builtin_astroid, ()
try:
stmts = builtin_astroid._locals[name]
except KeyError:
stmts = ()
return builtin_astroid, stmts
# TODO move this Mixin to mixins.py; problem: 'FunctionDef' in _scope_lookup
class LocalsDictNodeNG(node_classes.LookupMixIn, bases.NodeNG):
""" this class provides locals handling common to Module, FunctionDef
and ClassDef nodes, including a dict like interface for direct access
to locals information
"""
# attributes below are set by the builder module or by raw factories
# dictionary of locals with name as key and node defining the local as
# value
@property
def locals(self):
util.attribute_to_function_warning('locals', 2.0, 'get_locals')
return self._locals
@locals.setter
def locals(self, _locals):
util.attribute_to_function_warning('locals', 2.0, 'get_locals')
self._locals = _locals
@locals.deleter
def locals(self):
util.attribute_to_function_warning('locals', 2.0, 'get_locals')
del self._locals
def qname(self):
"""return the 'qualified' name of the node, eg module.name,
module.class.name ...
"""
if self.parent is None:
return self.name
return '%s.%s' % (self.parent.frame().qname(), self.name)
def frame(self):
"""return the first parent frame node (i.e. Module, FunctionDef or ClassDef)
"""
return self
def scope(self):
"""return the first node defining a new scope (i.e. Module,
FunctionDef, ClassDef, Lambda but also GeneratorExp, DictComp and SetComp)
"""
return self
def _scope_lookup(self, node, name, offset=0):
"""XXX method for interfacing the scope lookup"""
try:
stmts = node._filter_stmts(self._locals[name], self, offset)
except KeyError:
stmts = ()
if stmts:
return self, stmts
if self.parent: # i.e. not Module
# nested scope: if parent scope is a function, that's fine
# else jump to the module
pscope = self.parent.scope()
if not pscope.is_function:
pscope = pscope.root()
return pscope.scope_lookup(node, name)
return builtin_lookup(name) # Module
def set_local(self, name, stmt):
"""define <name> in locals (<stmt> is the node defining the name)
if the node is a Module node (i.e. has globals), add the name to
globals
if the name is already defined, ignore it
"""
#assert not stmt in self._locals.get(name, ()), (self, stmt)
self._locals.setdefault(name, []).append(stmt)
__setitem__ = set_local
def _append_node(self, child):
"""append a child, linking it in the tree"""
self.body.append(child)
child.parent = self
def add_local_node(self, child_node, name=None):
"""append a child which should alter locals to the given node"""
if name != '__class__':
# add __class__ node as a child will cause infinite recursion later!
self._append_node(child_node)
self.set_local(name or child_node.name, child_node)
def __getitem__(self, item):
"""method from the `dict` interface returning the first node
associated with the given name in the locals dictionary
:type item: str
:param item: the name of the locally defined object
:raises KeyError: if the name is not defined
"""
return self._locals[item][0]
def __iter__(self):
"""method from the `dict` interface returning an iterator on
`self.keys()`
"""
return iter(self.keys())
def keys(self):
"""method from the `dict` interface returning a tuple containing
locally defined names
"""
return list(self._locals.keys())
def values(self):
"""method from the `dict` interface returning a tuple containing
locally defined nodes which are instance of `FunctionDef` or `ClassDef`
"""
return [self[key] for key in self.keys()]
def items(self):
"""method from the `dict` interface returning a list of tuple
containing each locally defined name with its associated node,
which is an instance of `FunctionDef` or `ClassDef`
"""
return list(zip(self.keys(), self.values()))
def __contains__(self, name):
return name in self._locals
class Module(LocalsDictNodeNG):
_astroid_fields = ('body',)
fromlineno = 0
lineno = 0
# attributes below are set by the builder module or by raw factories
# the file from which as been extracted the astroid representation. It may
# be None if the representation has been built from a built-in module
source_file = None
# Alternatively, if built from a string/bytes, this can be set
source_code = None
# encoding of python source file, so we can get unicode out of it (python2
# only)
file_encoding = None
# the module name
name = None
# boolean for astroid built from source (i.e. ast)
pure_python = None
# boolean for package module
package = None
# dictionary of globals with name as key and node defining the global
# as value
_globals = None
# Future imports
_future_imports = None
# names of python special attributes (handled by getattr impl.)
special_attributes = set(('__name__', '__doc__', '__file__', '__path__',
'__dict__'))
# names of module attributes available through the global scope
scope_attrs = set(('__name__', '__doc__', '__file__', '__path__'))
def __init__(self, name, doc, pure_python=True):
self.name = name
self.doc = doc
self.pure_python = pure_python
self._locals = self._globals = {}
self.body = []
self._future_imports = set()
# Future deprecation warnings
@property
def file(self):
util.rename_warning('file', 2.0, 'source_file')
return self.source_file
@file.setter
def file(self, source_file):
util.rename_warning('file', 2.0, 'source_file')
self.source_file = source_file
@file.deleter
def file(self):
util.rename_warning('file', 2.0, 'source_file')
del self.source_file
@property
def path(self):
util.rename_warning('path', 2.0, 'source_file')
return self.source_file
@path.setter
def path(self, source_file):
util.rename_warning('path', 2.0, 'source_file')
self.source_file = source_file
@path.deleter
def path(self):
util.rename_warning('path', 2.0, 'source_file')
del self.source_file
@property
def file_bytes(self):
util.rename_warning('file_bytes', 2.0, 'source_code')
return self.source_code
@file_bytes.setter
def file_bytes(self, source_code):
util.rename_warning('file_bytes', 2.0, 'source_code')
self.source_code = source_code
@file_bytes.deleter
def file_bytes(self):
util.rename_warning('file_bytes', 2.0, 'source_code')
del self.source_code
@property
def globals(self):
util.attribute_to_function_warning('globals', 2.0, 'get_locals')
return self._locals
@globals.setter
def globals(self, _globals):
util.attribute_to_function_warning('globals', 2.0, 'get_locals')
self._locals = _globals
@globals.deleter
def globals(self):
util.attribute_to_function_warning('globals', 2.0, 'get_locals')
del self._locals
@property
def future_imports(self):
util.attribute_to_function_warning('future_imports', 2.0, 'future_imports')
return self._future_imports
@future_imports.setter
def future_imports(self, _future_imports):
util.attribute_to_function_warning('future_imports', 2.0, 'future_imports')
self._future_imports = _future_imports
@future_imports.deleter
def future_imports(self):
util.attribute_to_function_warning('future_imports', 2.0, 'future_imports')
del self._future_imports
def _get_stream(self):
if self.source_code is not None:
return io.BytesIO(self.source_code)
if self.source_file is not None:
stream = open(self.source_file, 'rb')
return stream
return None
@property
def file_stream(self):
warnings.warn("file_stream property is deprecated and "
"it is slated for removal in astroid 1.6."
"Use the new method 'stream' instead.",
PendingDeprecationWarning,
stacklevel=2)
return self._get_stream()
def stream(self):
"""Get a stream to the underlying file or bytes."""
return self._get_stream()
def close(self):
"""Close the underlying file streams."""
warnings.warn("close method is deprecated and it is "
"slated for removal in astroid 1.6, along "
"with 'file_stream' property. "
"Its behaviour is replaced by managing each "
"file stream returned by the 'stream' method.",
PendingDeprecationWarning,
stacklevel=2)
def block_range(self, lineno):
"""return block line numbers.
start from the beginning whatever the given lineno
"""
return self.fromlineno, self.tolineno
def scope_lookup(self, node, name, offset=0):
if name in self.scope_attrs and name not in self._locals:
try:
return self, self.getattr(name)
except exceptions.NotFoundError:
return self, ()
return self._scope_lookup(node, name, offset)
def pytype(self):
return '%s.module' % BUILTINS
def display_type(self):
return 'Module'
@remove_nodes(node_classes.DelName)
def getattr(self, name, context=None, ignore_locals=False):
if name in self.special_attributes:
if name == '__file__':
return [node_classes.const_factory(self.source_file)] + self._locals.get(name, [])
if name == '__path__' and self.package:
return [node_classes.List()] + self._locals.get(name, [])
return std_special_attributes(self, name)
if not ignore_locals and name in self._locals:
return self._locals[name]
if self.package:
try:
return [self.import_module(name, relative_only=True)]
except exceptions.AstroidBuildingException:
raise exceptions.NotFoundError(name)
except SyntaxError:
raise exceptions.NotFoundError(name)
raise exceptions.NotFoundError(name)
def igetattr(self, name, context=None):
"""inferred getattr"""
# set lookup name since this is necessary to infer on import nodes for
# instance
context = contextmod.copy_context(context)
context.lookupname = name
try:
return bases._infer_stmts(self.getattr(name, context),
context, frame=self)
except exceptions.NotFoundError:
raise exceptions.InferenceError(name)
def fully_defined(self):
"""return True if this module has been built from a .py file
and so contains a complete representation including the code
"""
return self.source_file is not None and self.source_file.endswith('.py')
def statement(self):
"""return the first parent node marked as statement node
consider a module as a statement...
"""
return self
def previous_sibling(self):
"""module has no sibling"""
return
def next_sibling(self):
"""module has no sibling"""
return
if six.PY2:
@decorators_mod.cachedproperty
def _absolute_import_activated(self):
for stmt in self._locals.get('absolute_import', ()):
if isinstance(stmt, node_classes.ImportFrom) and stmt.modname == '__future__':
return True
return False
else:
_absolute_import_activated = True
def absolute_import_activated(self):
return self._absolute_import_activated
def import_module(self, modname, relative_only=False, level=None):
"""import the given module considering self as context"""
if relative_only and level is None:
level = 0
absmodname = self.relative_to_absolute_name(modname, level)
try:
return MANAGER.ast_from_module_name(absmodname)
except exceptions.AstroidBuildingException:
# we only want to import a sub module or package of this module,
# skip here
if relative_only:
raise
return MANAGER.ast_from_module_name(modname)
def relative_to_absolute_name(self, modname, level):
"""return the absolute module name for a relative import.
The relative import can be implicit or explicit.
"""
# XXX this returns non sens when called on an absolute import
# like 'pylint.checkers.astroid.utils'
# XXX doesn't return absolute name if self.name isn't absolute name
if self.absolute_import_activated() and level is None:
return modname
if level:
if self.package:
level = level - 1
package_name = self.name.rsplit('.', level)[0]
elif self.package:
package_name = self.name
else:
package_name = self.name.rsplit('.', 1)[0]
if package_name:
if not modname:
return package_name
return '%s.%s' % (package_name, modname)
return modname
def wildcard_import_names(self):
"""return the list of imported names when this module is 'wildcard
imported'
It doesn't include the '__builtins__' name which is added by the
current CPython implementation of wildcard imports.
"""
# We separate the different steps of lookup in try/excepts
# to avoid catching too many Exceptions
default = [name for name in self.keys() if not name.startswith('_')]
try:
all = self['__all__']
except KeyError:
return default
try:
explicit = next(all.assigned_stmts())
except exceptions.InferenceError:
return default
except AttributeError:
# not an assignment node
# XXX infer?
return default
# Try our best to detect the exported name.
inferred = []
try:
explicit = next(explicit.infer())
except exceptions.InferenceError:
return default
if not isinstance(explicit, (node_classes.Tuple, node_classes.List)):
return default
str_const = lambda node: (isinstance(node, node_classes.Const) and
isinstance(node.value, six.string_types))
for node in explicit.elts:
if str_const(node):
inferred.append(node.value)
else:
try:
inferred_node = next(node.infer())
except exceptions.InferenceError:
continue
if str_const(inferred_node):
inferred.append(inferred_node.value)
return inferred
def _public_names(self):
"""Get the list of the names which are publicly available in this module."""
return [name for name in self.keys() if not name.startswith('_')]
def bool_value(self):
return True
class ComprehensionScope(LocalsDictNodeNG):
def frame(self):
return self.parent.frame()
scope_lookup = LocalsDictNodeNG._scope_lookup
class GeneratorExp(ComprehensionScope):
_astroid_fields = ('elt', 'generators')
def __init__(self):
self._locals = {}
self.elt = None
self.generators = []
class DictComp(ComprehensionScope):
_astroid_fields = ('key', 'value', 'generators')
def __init__(self):
self._locals = {}
self.key = None
self.value = None
self.generators = []
class SetComp(ComprehensionScope):
_astroid_fields = ('elt', 'generators')
def __init__(self):
self._locals = {}
self.elt = None
self.generators = []
class _ListComp(bases.NodeNG):
"""class representing a ListComp node"""
_astroid_fields = ('elt', 'generators')
elt = None
generators = None
if six.PY3:
class ListComp(_ListComp, ComprehensionScope):
"""class representing a ListComp node"""
def __init__(self):
self._locals = {}
else:
class ListComp(_ListComp):
"""class representing a ListComp node"""
def _infer_decorator_callchain(node):
"""Detect decorator call chaining and see if the end result is a
static or a classmethod.
"""
if not isinstance(node, FunctionDef):
return
if not node.parent:
return
try:
# TODO: We don't handle multiple inference results right now,
# because there's no flow to reason when the return
# is what we are looking for, a static or a class method.
result = next(node.infer_call_result(node.parent))
except (StopIteration, exceptions.InferenceError):
return
if isinstance(result, bases.Instance):
result = result._proxied
if isinstance(result, ClassDef):
if result.is_subtype_of('%s.classmethod' % BUILTINS):
return 'classmethod'
if result.is_subtype_of('%s.staticmethod' % BUILTINS):
return 'staticmethod'
class Lambda(mixins.FilterStmtsMixin, LocalsDictNodeNG):
_astroid_fields = ('args', 'body',)
name = '<lambda>'
# function's type, 'function' | 'method' | 'staticmethod' | 'classmethod'
type = 'function'
def __init__(self):
self._locals = {}
self.args = []
self.body = []
def pytype(self):
if 'method' in self.type:
return '%s.instancemethod' % BUILTINS
return '%s.function' % BUILTINS
def display_type(self):
if 'method' in self.type:
return 'Method'
return 'Function'
def callable(self):
return True
def argnames(self):
"""return a list of argument names"""
if self.args.args: # maybe None with builtin functions
names = _rec_get_names(self.args.args)
else:
names = []
if self.args.vararg:
names.append(self.args.vararg)
if self.args.kwarg:
names.append(self.args.kwarg)
return names
def infer_call_result(self, caller, context=None):
"""infer what a function is returning when called"""
return self.body.infer(context)
def scope_lookup(self, node, name, offset=0):
if node in self.args.defaults or node in self.args.kw_defaults:
frame = self.parent.frame()
# line offset to avoid that def func(f=func) resolve the default
# value to the defined function
offset = -1
else:
# check this is not used in function decorators
frame = self
return frame._scope_lookup(node, name, offset)
class FunctionDef(bases.Statement, Lambda):
if six.PY3:
_astroid_fields = ('decorators', 'args', 'returns', 'body')
returns = None
else:
_astroid_fields = ('decorators', 'args', 'body')
special_attributes = set(('__name__', '__doc__', '__dict__'))
is_function = True
# attributes below are set by the builder module or by raw factories
decorators = None
def __init__(self, name, doc):
self._locals = {}
self.args = []
self.body = []
self.name = name
self.doc = doc
self._instance_attrs = {}
@property
def instance_attrs(self):
util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes')
return self._instance_attrs
@instance_attrs.setter
def instance_attrs(self, _instance_attrs):
util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes')
self._instance_attrs = _instance_attrs
@instance_attrs.deleter
def instance_attrs(self):
util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes')
del self._instance_attrs
@decorators_mod.cachedproperty
def extra_decorators(self):
"""Get the extra decorators that this function can haves
Additional decorators are considered when they are used as
assignments, as in `method = staticmethod(method)`.
The property will return all the callables that are used for
decoration.
"""
frame = self.parent.frame()
if not isinstance(frame, ClassDef):
return []
decorators = []
for assign in frame.nodes_of_class(node_classes.Assign):
if (isinstance(assign.value, node_classes.Call)
and isinstance(assign.value.func, node_classes.Name)):
for assign_node in assign.targets:
if not isinstance(assign_node, node_classes.AssignName):
# Support only `name = callable(name)`
continue
if assign_node.name != self.name:
# Interested only in the assignment nodes that
# decorates the current method.
continue
try:
meth = frame[self.name]
except KeyError:
continue
else:
# Must be a function and in the same frame as the
# original method.
if (isinstance(meth, FunctionDef)
and assign_node.frame() == frame):
decorators.append(assign.value)
return decorators
@decorators_mod.cachedproperty
def type(self):
"""Get the function type for this node.
Possible values are: method, function, staticmethod, classmethod.
"""
builtin_descriptors = {'classmethod', 'staticmethod'}
for decorator in self.extra_decorators:
if decorator.func.name in builtin_descriptors:
return decorator.func.name
frame = self.parent.frame()
type_name = 'function'
if isinstance(frame, ClassDef):
if self.name == '__new__':
return 'classmethod'
else:
type_name = 'method'
if self.decorators:
for node in self.decorators.nodes:
if isinstance(node, node_classes.Name):
if node.name in builtin_descriptors:
return node.name
if isinstance(node, node_classes.Call):
# Handle the following case:
# @some_decorator(arg1, arg2)
# def func(...)
#
try:
current = next(node.func.infer())
except exceptions.InferenceError:
continue
_type = _infer_decorator_callchain(current)
if _type is not None:
return _type
try:
for inferred in node.infer():
# Check to see if this returns a static or a class method.
_type = _infer_decorator_callchain(inferred)
if _type is not None:
return _type
if not isinstance(inferred, ClassDef):
continue
for ancestor in inferred.ancestors():
if not isinstance(ancestor, ClassDef):
continue
if ancestor.is_subtype_of('%s.classmethod' % BUILTINS):
return 'classmethod'
elif ancestor.is_subtype_of('%s.staticmethod' % BUILTINS):
return 'staticmethod'
except exceptions.InferenceError:
pass
return type_name
@decorators_mod.cachedproperty
def fromlineno(self):
# lineno is the line number of the first decorator, we want the def
# statement lineno
lineno = self.lineno
if self.decorators is not None:
lineno += sum(node.tolineno - node.lineno + 1
for node in self.decorators.nodes)
return lineno
@decorators_mod.cachedproperty
def blockstart_tolineno(self):
return self.args.tolineno
def block_range(self, lineno):
"""return block line numbers.
start from the "def" position whatever the given lineno
"""
return self.fromlineno, self.tolineno
def getattr(self, name, context=None):
"""this method doesn't look in the instance_attrs dictionary since it's
done by an Instance proxy at inference time.
"""
if name == '__module__':
return [node_classes.const_factory(self.root().qname())]
if name in self._instance_attrs:
return self._instance_attrs[name]
return std_special_attributes(self, name, False)
def igetattr(self, name, context=None):
"""Inferred getattr, which returns an iterator of inferred statements."""
try:
return bases._infer_stmts(self.getattr(name, context),
context, frame=self)
except exceptions.NotFoundError:
raise exceptions.InferenceError(name)
def is_method(self):
"""return true if the function node should be considered as a method"""
# check we are defined in a ClassDef, because this is usually expected
# (e.g. pylint...) when is_method() return True
return self.type != 'function' and isinstance(self.parent.frame(), ClassDef)
@decorators_mod.cached
def decoratornames(self):
"""return a list of decorator qualified names"""
result = set()
decoratornodes = []
if self.decorators is not None:
# pylint: disable=unsupported-binary-operation; damn flow control.
decoratornodes += self.decorators.nodes
decoratornodes += self.extra_decorators
for decnode in decoratornodes:
try:
for infnode in decnode.infer():
result.add(infnode.qname())
except exceptions.InferenceError:
continue
return result
def is_bound(self):
"""return true if the function is bound to an Instance or a class"""
return self.type == 'classmethod'
def is_abstract(self, pass_is_abstract=True):
"""Returns True if the method is abstract.
A method is considered abstract if
- the only statement is 'raise NotImplementedError', or
- the only statement is 'pass' and pass_is_abstract is True, or
- the method is annotated with abc.astractproperty/abc.abstractmethod
"""
if self.decorators:
for node in self.decorators.nodes:
try:
inferred = next(node.infer())
except exceptions.InferenceError:
continue
if inferred and inferred.qname() in ('abc.abstractproperty',
'abc.abstractmethod'):
return True
for child_node in self.body:
if isinstance(child_node, node_classes.Raise):
if child_node.raises_not_implemented():
return True
return pass_is_abstract and isinstance(child_node, node_classes.Pass)
# empty function is the same as function with a single "pass" statement
if pass_is_abstract:
return True
def is_generator(self):
"""return true if this is a generator function"""
yield_nodes = (node_classes.Yield, node_classes.YieldFrom)
return next(self.nodes_of_class(yield_nodes,
skip_klass=(FunctionDef, Lambda)), False)
def infer_call_result(self, caller, context=None):
"""infer what a function is returning when called"""
if self.is_generator():
result = bases.Generator()
result.parent = self
yield result
return
# This is really a gigantic hack to work around metaclass generators
# that return transient class-generating functions. Pylint's AST structure
# cannot handle a base class object that is only used for calling __new__,
# but does not contribute to the inheritance structure itself. We inject
# a fake class into the hierarchy here for several well-known metaclass
# generators, and filter it out later.
if (self.name == 'with_metaclass' and
len(self.args.args) == 1 and
self.args.vararg is not None):
metaclass = next(caller.args[0].infer(context))
if isinstance(metaclass, ClassDef):
c = ClassDef('temporary_class', None)
c.hide = True
c.parent = self
class_bases = [next(b.infer(context)) for b in caller.args[1:]]
c.bases = [base for base in class_bases if base != util.YES]
c._metaclass = metaclass
yield c
return
returns = self.nodes_of_class(node_classes.Return, skip_klass=FunctionDef)
for returnnode in returns:
if returnnode.value is None:
yield node_classes.Const(None)
else:
try:
for inferred in returnnode.value.infer(context):
yield inferred
except exceptions.InferenceError:
yield util.YES
class AsyncFunctionDef(FunctionDef):
"""Asynchronous function created with the `async` keyword."""
def _rec_get_names(args, names=None):
"""return a list of all argument names"""
if names is None:
names = []
for arg in args:
if isinstance(arg, node_classes.Tuple):
_rec_get_names(arg.elts, names)
else:
names.append(arg.name)
return names
def _is_metaclass(klass, seen=None):
""" Return if the given class can be
used as a metaclass.
"""
if klass.name == 'type':
return True
if seen is None:
seen = set()
for base in klass.bases:
try:
for baseobj in base.infer():
baseobj_name = baseobj.qname()
if baseobj_name in seen:
continue
else:
seen.add(baseobj_name)
if isinstance(baseobj, bases.Instance):
# not abstract
return False
if baseobj is util.YES:
continue
if baseobj is klass:
continue
if not isinstance(baseobj, ClassDef):
continue
if baseobj._type == 'metaclass':
return True
if _is_metaclass(baseobj, seen):
return True
except exceptions.InferenceError:
continue
return False
def _class_type(klass, ancestors=None):
"""return a ClassDef node type to differ metaclass and exception
from 'regular' classes
"""
# XXX we have to store ancestors in case we have a ancestor loop
if klass._type is not None:
return klass._type
if _is_metaclass(klass):
klass._type = 'metaclass'
elif klass.name.endswith('Exception'):
klass._type = 'exception'
else:
if ancestors is None:
ancestors = set()
klass_name = klass.qname()
if klass_name in ancestors:
# XXX we are in loop ancestors, and have found no type
klass._type = 'class'
return 'class'
ancestors.add(klass_name)
for base in klass.ancestors(recurs=False):
name = _class_type(base, ancestors)
if name != 'class':
if name == 'metaclass' and not _is_metaclass(klass):
# don't propagate it if the current class
# can't be a metaclass
continue
klass._type = base.type
break
if klass._type is None:
klass._type = 'class'
return klass._type
class ClassDef(mixins.FilterStmtsMixin, LocalsDictNodeNG, bases.Statement):
# some of the attributes below are set by the builder module or
# by a raw factories
# a dictionary of class instances attributes
_astroid_fields = ('decorators', 'bases', 'body') # name
decorators = None
special_attributes = set(('__name__', '__doc__', '__dict__', '__module__',
'__bases__', '__mro__', '__subclasses__'))
_type = None
_metaclass_hack = False
hide = False
type = property(_class_type,
doc="class'type, possible values are 'class' | "
"'metaclass' | 'exception'")
def __init__(self, name, doc):
self._instance_attrs = {}
self._locals = {}
self.bases = []
self.body = []
self.name = name
self.doc = doc
@property
def instance_attrs(self):
util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes')
return self._instance_attrs
@instance_attrs.setter
def instance_attrs(self, _instance_attrs):
util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes')
self._instance_attrs = _instance_attrs
@instance_attrs.deleter
def instance_attrs(self):
util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes')
del self._instance_attrs
def _newstyle_impl(self, context=None):
if context is None:
context = contextmod.InferenceContext()
if self._newstyle is not None:
return self._newstyle
for base in self.ancestors(recurs=False, context=context):
if base._newstyle_impl(context):
self._newstyle = True
break
klass = self._explicit_metaclass()
# could be any callable, we'd need to infer the result of klass(name,
# bases, dict). punt if it's not a class node.
if klass is not None and isinstance(klass, ClassDef):
self._newstyle = klass._newstyle_impl(context)
if self._newstyle is None:
self._newstyle = False
return self._newstyle
_newstyle = None
newstyle = property(_newstyle_impl,
doc="boolean indicating if it's a new style class"
"or not")
@decorators_mod.cachedproperty
def blockstart_tolineno(self):
if self.bases:
return self.bases[-1].tolineno
else:
return self.fromlineno
def block_range(self, lineno):
"""return block line numbers.
start from the "class" position whatever the given lineno
"""
return self.fromlineno, self.tolineno
def pytype(self):
if self.newstyle:
return '%s.type' % BUILTINS
return '%s.classobj' % BUILTINS
def display_type(self):
return 'Class'
def callable(self):
return True
def is_subtype_of(self, type_name, context=None):
if self.qname() == type_name:
return True
for anc in self.ancestors(context=context):
if anc.qname() == type_name:
return True
def _infer_type_call(self, caller, context):
name_node = next(caller.args[0].infer(context))
if (isinstance(name_node, node_classes.Const) and
isinstance(name_node.value, six.string_types)):
name = name_node.value
else:
return util.YES
result = ClassDef(name, None)
# Get the bases of the class.
class_bases = next(caller.args[1].infer(context))
if isinstance(class_bases, (node_classes.Tuple, node_classes.List)):
result.bases = class_bases.itered()
else:
# There is currently no AST node that can represent an 'unknown'
# node (YES is not an AST node), therefore we simply return YES here
# although we know at least the name of the class.
return util.YES
# Get the members of the class
try:
members = next(caller.args[2].infer(context))
except exceptions.InferenceError:
members = None
if members and isinstance(members, node_classes.Dict):
for attr, value in members.items:
if (isinstance(attr, node_classes.Const) and
isinstance(attr.value, six.string_types)):
result._locals[attr.value] = [value]
result.parent = caller.parent
return result
def infer_call_result(self, caller, context=None):
"""infer what a class is returning when called"""
if (self.is_subtype_of('%s.type' % (BUILTINS,), context)
and len(caller.args) == 3):
result = self._infer_type_call(caller, context)
yield result
else:
yield bases.Instance(self)
def scope_lookup(self, node, name, offset=0):
# pylint: disable=redefined-variable-type
if any(node == base or base.parent_of(node)
for base in self.bases):
# Handle the case where we have either a name
# in the bases of a class, which exists before
# the actual definition or the case where we have
# a Getattr node, with that name.
#
# name = ...
# class A(name):
# def name(self): ...
#
# import name
# class A(name.Name):
# def name(self): ...
frame = self.parent.frame()
# line offset to avoid that class A(A) resolve the ancestor to
# the defined class
offset = -1
else:
frame = self
return frame._scope_lookup(node, name, offset)
@property
def basenames(self):
"""Get the list of parent class names, as they appear in the class definition."""
return [bnode.as_string() for bnode in self.bases]
def ancestors(self, recurs=True, context=None):
"""return an iterator on the node base classes in a prefixed
depth first order
:param recurs:
boolean indicating if it should recurse or return direct
ancestors only
"""
# FIXME: should be possible to choose the resolution order
# FIXME: inference make infinite loops possible here
yielded = set([self])
if context is None:
context = contextmod.InferenceContext()
if six.PY3:
if not self.bases and self.qname() != 'builtins.object':
yield builtin_lookup("object")[1][0]
return
for stmt in self.bases:
with context.restore_path():
try:
for baseobj in stmt.infer(context):
if not isinstance(baseobj, ClassDef):
if isinstance(baseobj, bases.Instance):
baseobj = baseobj._proxied
else:
continue
if not baseobj.hide:
if baseobj in yielded:
continue
yielded.add(baseobj)
yield baseobj
if recurs:
for grandpa in baseobj.ancestors(recurs=True,
context=context):
if grandpa is self:
# This class is the ancestor of itself.
break
if grandpa in yielded:
continue
yielded.add(grandpa)
yield grandpa
except exceptions.InferenceError:
continue
def local_attr_ancestors(self, name, context=None):
"""return an iterator on astroid representation of parent classes
which have <name> defined in their locals
"""
if self.newstyle and all(n.newstyle for n in self.ancestors(context)):
# Look up in the mro if we can. This will result in the
# attribute being looked up just as Python does it.
try:
ancestors = self.mro(context)[1:]
except exceptions.MroError:
# Fallback to use ancestors, we can't determine
# a sane MRO.
ancestors = self.ancestors(context=context)
else:
ancestors = self.ancestors(context=context)
for astroid in ancestors:
if name in astroid:
yield astroid
def instance_attr_ancestors(self, name, context=None):
"""return an iterator on astroid representation of parent classes
which have <name> defined in their instance attribute dictionary
"""
for astroid in self.ancestors(context=context):
if name in astroid._instance_attrs:
yield astroid
def has_base(self, node):
return node in self.bases
@remove_nodes(node_classes.DelAttr)
def local_attr(self, name, context=None):
"""return the list of assign node associated to name in this class
locals or in its parents
:raises `NotFoundError`:
if no attribute with this name has been find in this class or
its parent classes
"""
try:
return self._locals[name]
except KeyError:
for class_node in self.local_attr_ancestors(name, context):
return class_node._locals[name]
raise exceptions.NotFoundError(name)
@remove_nodes(node_classes.DelAttr)
def instance_attr(self, name, context=None):
"""return the astroid nodes associated to name in this class instance
attributes dictionary and in its parents
:raises `NotFoundError`:
if no attribute with this name has been find in this class or
its parent classes
"""
# Return a copy, so we don't modify self._instance_attrs,
# which could lead to infinite loop.
values = list(self._instance_attrs.get(name, []))
# get all values from parents
for class_node in self.instance_attr_ancestors(name, context):
values += class_node._instance_attrs[name]
if not values:
raise exceptions.NotFoundError(name)
return values
def instantiate_class(self):
"""return Instance of ClassDef node, else return self"""
return bases.Instance(self)
def instanciate_class(self):
"""return Instance of ClassDef node, else return self"""
util.rename_warning('instanciate_class()', 2.0, 'instantiate_class()')
return self.instantiate_class()
def getattr(self, name, context=None):
"""this method doesn't look in the instance_attrs dictionary since it's
done by an Instance proxy at inference time.
It may return a YES object if the attribute has not been actually
found but a __getattr__ or __getattribute__ method is defined
"""
values = self._locals.get(name, [])
if name in self.special_attributes:
if name == '__module__':
return [node_classes.const_factory(self.root().qname())] + values
if name == '__bases__':
node = node_classes.Tuple()
elts = list(self._inferred_bases(context))
node.elts = elts
return [node] + values
if name == '__mro__' and self.newstyle:
mro = self.mro()
node = node_classes.Tuple()
node.elts = mro
return [node]
return std_special_attributes(self, name)
# don't modify the list in self._locals!
values = list(values)
for classnode in self.ancestors(recurs=True, context=context):
values += classnode._locals.get(name, [])
if not values:
raise exceptions.NotFoundError(name)
return values
def igetattr(self, name, context=None):
"""inferred getattr, need special treatment in class to handle
descriptors
"""
# set lookup name since this is necessary to infer on import nodes for
# instance
context = contextmod.copy_context(context)
context.lookupname = name
try:
for inferred in bases._infer_stmts(self.getattr(name, context),
context, frame=self):
# yield YES object instead of descriptors when necessary
if (not isinstance(inferred, node_classes.Const)
and isinstance(inferred, bases.Instance)):
try:
inferred._proxied.getattr('__get__', context)
except exceptions.NotFoundError:
yield inferred
else:
yield util.YES
else:
yield function_to_method(inferred, self)
except exceptions.NotFoundError:
if not name.startswith('__') and self.has_dynamic_getattr(context):
# class handle some dynamic attributes, return a YES object
yield util.YES
else:
raise exceptions.InferenceError(name)
def has_dynamic_getattr(self, context=None):
"""
Check if the current instance has a custom __getattr__
or a custom __getattribute__.
If any such method is found and it is not from
builtins, nor from an extension module, then the function
will return True.
"""
def _valid_getattr(node):
root = node.root()
return root.name != BUILTINS and getattr(root, 'pure_python', None)
try:
return _valid_getattr(self.getattr('__getattr__', context)[0])
except exceptions.NotFoundError:
#if self.newstyle: XXX cause an infinite recursion error
try:
getattribute = self.getattr('__getattribute__', context)[0]
return _valid_getattr(getattribute)
except exceptions.NotFoundError:
pass
return False
def methods(self):
"""return an iterator on all methods defined in the class and
its ancestors
"""
done = {}
for astroid in itertools.chain(iter((self,)), self.ancestors()):
for meth in astroid.mymethods():
if meth.name in done:
continue
done[meth.name] = None
yield meth
def mymethods(self):
"""return an iterator on all methods defined in the class"""
for member in self.values():
if isinstance(member, FunctionDef):
yield member
def implicit_metaclass(self):
"""Get the implicit metaclass of the current class
For newstyle classes, this will return an instance of builtins.type.
For oldstyle classes, it will simply return None, since there's
no implicit metaclass there.
"""
if self.newstyle:
return builtin_lookup('type')[1][0]
_metaclass = None
def _explicit_metaclass(self):
""" Return the explicit defined metaclass
for the current class.
An explicit defined metaclass is defined
either by passing the ``metaclass`` keyword argument
in the class definition line (Python 3) or (Python 2) by
having a ``__metaclass__`` class attribute, or if there are
no explicit bases but there is a global ``__metaclass__`` variable.
"""
for base in self.bases:
try:
for baseobj in base.infer():
if isinstance(baseobj, ClassDef) and baseobj.hide:
self._metaclass = baseobj._metaclass
self._metaclass_hack = True
break
except exceptions.InferenceError:
pass
if self._metaclass:
# Expects this from Py3k TreeRebuilder
try:
return next(node for node in self._metaclass.infer()
if node is not util.YES)
except (exceptions.InferenceError, StopIteration):
return None
if six.PY3:
return None
if '__metaclass__' in self._locals:
assignment = self._locals['__metaclass__'][-1]
elif self.bases:
return None
elif '__metaclass__' in self.root()._locals:
assignments = [ass for ass in self.root()._locals['__metaclass__']
if ass.lineno < self.lineno]
if not assignments:
return None
assignment = assignments[-1]
else:
return None
try:
inferred = next(assignment.infer())
except exceptions.InferenceError:
return
if inferred is util.YES: # don't expose this
return None
return inferred
def _find_metaclass(self, seen=None):
if seen is None:
seen = set()
seen.add(self)
klass = self._explicit_metaclass()
if klass is None:
for parent in self.ancestors():
if parent not in seen:
klass = parent._find_metaclass(seen)
if klass is not None:
break
return klass
def metaclass(self):
"""Return the metaclass of this class.
If this class does not define explicitly a metaclass,
then the first defined metaclass in ancestors will be used
instead.
"""
return self._find_metaclass()
def has_metaclass_hack(self):
return self._metaclass_hack
def _islots(self):
""" Return an iterator with the inferred slots. """
if '__slots__' not in self._locals:
return
for slots in self.igetattr('__slots__'):
# check if __slots__ is a valid type
for meth in ITER_METHODS:
try:
slots.getattr(meth)
break
except exceptions.NotFoundError:
continue
else:
continue
if isinstance(slots, node_classes.Const):
# a string. Ignore the following checks,
# but yield the node, only if it has a value
if slots.value:
yield slots
continue
if not hasattr(slots, 'itered'):
# we can't obtain the values, maybe a .deque?
continue
if isinstance(slots, node_classes.Dict):
values = [item[0] for item in slots.items]
else:
values = slots.itered()
if values is util.YES:
continue
if not values:
# Stop the iteration, because the class
# has an empty list of slots.
raise StopIteration(values)
for elt in values:
try:
for inferred in elt.infer():
if inferred is util.YES:
continue
if (not isinstance(inferred, node_classes.Const) or
not isinstance(inferred.value,
six.string_types)):
continue
if not inferred.value:
continue
yield inferred
except exceptions.InferenceError:
continue
def _slots(self):
if not self.newstyle:
raise NotImplementedError(
"The concept of slots is undefined for old-style classes.")
slots = self._islots()
try:
first = next(slots)
except StopIteration as exc:
# The class doesn't have a __slots__ definition or empty slots.
if exc.args and exc.args[0] not in ('', None):
return exc.args[0]
return None
# pylint: disable=unsupported-binary-operation; false positive
return [first] + list(slots)
# Cached, because inferring them all the time is expensive
@decorators_mod.cached
def slots(self):
"""Get all the slots for this node.
If the class doesn't define any slot, through `__slots__`
variable, then this function will return a None.
Also, it will return None in the case the slots weren't inferred.
Otherwise, it will return a list of slot names.
"""
def grouped_slots():
# Not interested in object, since it can't have slots.
for cls in self.mro()[:-1]:
try:
cls_slots = cls._slots()
except NotImplementedError:
continue
if cls_slots is not None:
for slot in cls_slots:
yield slot
else:
yield None
if not self.newstyle:
raise NotImplementedError(
"The concept of slots is undefined for old-style classes.")
slots = list(grouped_slots())
if not all(slot is not None for slot in slots):
return None
return sorted(slots, key=lambda item: item.value)
def _inferred_bases(self, context=None):
# TODO(cpopa): really similar with .ancestors,
# but the difference is when one base is inferred,
# only the first object is wanted. That's because
# we aren't interested in superclasses, as in the following
# example:
#
# class SomeSuperClass(object): pass
# class SomeClass(SomeSuperClass): pass
# class Test(SomeClass): pass
#
# Inferring SomeClass from the Test's bases will give
# us both SomeClass and SomeSuperClass, but we are interested
# only in SomeClass.
if context is None:
context = contextmod.InferenceContext()
if six.PY3:
if not self.bases and self.qname() != 'builtins.object':
yield builtin_lookup("object")[1][0]
return
for stmt in self.bases:
try:
baseobj = next(stmt.infer(context=context))
except exceptions.InferenceError:
continue
if isinstance(baseobj, bases.Instance):
baseobj = baseobj._proxied
if not isinstance(baseobj, ClassDef):
continue
if not baseobj.hide:
yield baseobj
else:
for base in baseobj.bases:
yield base
def mro(self, context=None):
"""Get the method resolution order, using C3 linearization.
It returns the list of ancestors sorted by the mro.
This will raise `NotImplementedError` for old-style classes, since
they don't have the concept of MRO.
"""
if not self.newstyle:
raise NotImplementedError(
"Could not obtain mro for old-style classes.")
bases = list(self._inferred_bases(context=context))
bases_mro = []
for base in bases:
try:
mro = base.mro(context=context)
bases_mro.append(mro)
except NotImplementedError:
# Some classes have in their ancestors both newstyle and
# old style classes. For these we can't retrieve the .mro,
# although in Python it's possible, since the class we are
# currently working is in fact new style.
# So, we fallback to ancestors here.
ancestors = list(base.ancestors(context=context))
bases_mro.append(ancestors)
unmerged_mro = ([[self]] + bases_mro + [bases])
_verify_duplicates_mro(unmerged_mro)
return _c3_merge(unmerged_mro)
def get_locals(node):
'''Stub function for forwards compatibility.'''
return node._locals
def get_attributes(node):
'''Stub function for forwards compatibility.'''
return node._instance_attrs
# Backwards-compatibility aliases
Class = node_classes.proxy_alias('Class', ClassDef)
Function = node_classes.proxy_alias('Function', FunctionDef)
GenExpr = node_classes.proxy_alias('GenExpr', GeneratorExp)
| StarcoderdataPython |
1764060 | import click
from testplan.cli.converter import convert
@click.group()
def cli():
pass
cli.add_command(convert)
if __name__ == "__main__":
cli()
| StarcoderdataPython |
3325224 | <filename>program.py
import json
import jsonpatch
import traceback
from adh_sample_library_preview import (ADHClient, Role, RoleScope, Trustee, TrusteeType, User, UserInvitation, AccessControlList,
AccessControlEntry, AccessType, CommonAccessRightsEnum, SdsType, SdsTypeProperty, SdsTypeCode, SdsStream)
custom_role_name = 'custom role - security management sample'
def get_appsettings():
"""Open and parse the appsettings.json file"""
# Try to open the configuration file
try:
with open(
'appsettings.json',
'r',
) as f:
appsettings = json.load(f)
except Exception as error:
print(f'Error: {str(error)}')
print(f'Could not open/read appsettings.json')
exit()
return appsettings
def get_tenant_member_role_id(client: ADHClient):
"""Helper function that retrieves the first role with the Tenant Member role type Id"""
roles = client.Roles.getRoles()
for role in roles:
if role.RoleTypeId == client.Roles.TenantMemberRoleTypeId:
return role.Id
def main(test = False):
global custom_role_name
try:
print('Sample starting...')
# Read appsettings and create a client
appsettings = get_appsettings()
tenant_id = appsettings.get('TenantId')
namespace_id = appsettings.get('NamespaceId')
contact_given_name = appsettings.get('ContactGivenName')
contact_surname = appsettings.get('ContactSurname')
contact_email = appsettings.get('ContactEmail')
client = ADHClient(appsettings.get('ApiVersion'),
appsettings.get('TenantId'),
appsettings.get('Resource'),
appsettings.get('ClientId'),
appsettings.get('ClientSecret'))
# Step 1 - Create a role
print('Creating a role')
custom_role = Role(name=custom_role_name,
role_scope=RoleScope.Tenant, tenant_id=tenant_id)
custom_role = client.Roles.createRole(custom_role)
# Step 2 - Create a user and invite them
print('Creating a user and invite them')
user = User(contact_given_name=contact_given_name, contact_surname=contact_surname, contact_email=contact_email,
identity_provider_id=client.Users.MicrosoftIdentityProviderId, role_ids=[custom_role.Id])
user.RoleIds.append(get_tenant_member_role_id(client))
user = client.Users.createUser(user)
invitation = UserInvitation(send_invitation=True)
client.Users.createOrUpdateInvitation(user.Id, invitation)
# Step 3 - Create a type
print('Creating a type')
date_time_type = SdsType('DateTimeType', SdsTypeCode.DateTime)
int_type = SdsType('IntType', SdsTypeCode.Int32)
date_time_property = SdsTypeProperty('DateTime', True, date_time_type)
int_property = SdsTypeProperty('Value', False, int_type)
example_type = SdsType('example_type-security_management_sample', SdsTypeCode.Object, [
date_time_property, int_property], 'This is a type example.')
example_type = client.Types.getOrCreateType(namespace_id, example_type)
# Step 4 - Create a stream
print('Creating a stream')
example_stream = SdsStream(
'example_stream-security_management_sample', example_type.Id)
example_stream = client.Streams.getOrCreateStream(
namespace_id, example_stream)
# Step 5 - Add a custom role to example type, example stream, and streams collection ACL using PUT
print('Adding custom role to example type, example stream, and streams collection access control lists using PUT')
trustee = Trustee(TrusteeType.Role, tenant_id, custom_role.Id)
entry = AccessControlEntry(trustee, AccessType.Allowed,
CommonAccessRightsEnum.Read | CommonAccessRightsEnum.Write)
type_acl = client.Types.getAccessControl(
namespace_id, example_type.Id)
type_acl.RoleTrusteeAccessControlEntries.append(entry)
client.Types.updateAccessControl(
namespace_id, example_type.Id, type_acl)
stream_acl = client.Streams.getAccessControl(
namespace_id, example_stream.Id)
stream_acl.RoleTrusteeAccessControlEntries.append(entry)
client.Streams.updateAccessControl(
namespace_id, example_stream.Id, stream_acl)
# The access control list (ACL) of the Streams collection is modified in this step
# The collection ACL is used as a default for all new items in a collection, so any new stream created will have this ACL
# In addition, it governs who has access to a collection and who can make new collection items (such as new streams)
streams_acl = client.Streams.getDefaultAccessControl(namespace_id)
streams_acl.RoleTrusteeAccessControlEntries.append(entry)
client.Streams.updateDefaultAccessControl(namespace_id, streams_acl)
# Step 6 - Add a role from the example stream ACL using PATCH
print('Adding a role from the example stream access control list using PATCH')
patch = jsonpatch.JsonPatch(
[{
'op': 'add', 'path': '/RoleTrusteeAccessControlEntries/-',
'value': {
'AccessRights': 0,
'AccessType': 'Allowed',
'Trustee': {'ObjectId': get_tenant_member_role_id(client), 'TenantId': tenant_id, 'Type': 'Role'}
}
}])
client.Streams.patchAccessControl(
namespace_id, example_stream.Id, patch)
# Step 7 - Change owner of example stream
print('Changing owner of example stream')
stream_owner = client.Streams.getOwner(namespace_id, example_stream.Id)
stream_owner.ObjectId = user.Id
stream_owner.Type = TrusteeType.User
client.Streams.updateOwner(
namespace_id, example_stream.Id, stream_owner)
# Step 8 - Retrieve the access rights of the example stream
print('Retrieving the access rights of the example stream')
access_rights = client.Streams.getAccessRights(
namespace_id, example_stream.Id)
for access_right in access_rights:
print(access_right.name)
except Exception as error:
print((f'Encountered Error: {error}'))
print()
traceback.print_exc()
print()
if test:
raise error
finally:
if test:
return user, stream_owner, custom_role, stream_acl, streams_acl, type_acl
print('Complete!')
if __name__ == '__main__':
main()
| StarcoderdataPython |
3372327 | <gh_stars>0
from NetworkModeCTE import *
from PacketManager import *
src = 2
dest1 = 3
is_ack = PACKET_FIELD_ISACK_NO_ACK
SN = PACKET_FIELD_SN_FIRST
dest2 = 7
packet_type = NETWORK_PACKET_TYPE_CONTROL
pl = bytearray(NETWORK_PAYLOAD_SIZE)
pl[-1] = NETWORK_PACKET_CONTROL_REPLY_YES_PAYLOAD
packet = create_packet(src, dest1, is_ack, SN, dest2, packet_type, pl)
src_rx, dest1_rx, is_ack_rx, SN_rx, dest2_rx, packet_type_rx, pl_rx = decode_packet(
packet)
print(src_rx, dest1_rx, is_ack_rx, SN_rx, dest2_rx, packet_type_rx, pl_rx)
print(NETWORK_PACKET_CONTROL_REPLY_YES_PAYLOAD == pl[-1])
| StarcoderdataPython |
92373 | from .binarytrees import *
| StarcoderdataPython |
1711098 | from __future__ import annotations
__all__ = ["Bind"]
from dataclasses import dataclass
from typing import TYPE_CHECKING, Dict, Generic, List, Optional, TypeVar, Union, cast
from funchacks.errors import TemporaryError
if TYPE_CHECKING:
from funchacks.sig.impl import ArgdefSignature
from funchacks.typehints import AnyCallableT
_T = TypeVar("_T")
VarnameT = TypeVar("VarnameT")
ValueT_co = TypeVar("ValueT_co", covariant=True)
@dataclass
class Bind(Generic[VarnameT, ValueT_co]):
sig: ArgdefSignature
initial: Dict[VarnameT, ValueT_co]
@classmethod
def from_locals(
cls, initial: Dict[VarnameT, ValueT_co], *, in_: AnyCallableT
) -> Bind[VarnameT, ValueT_co]:
sig: Optional[ArgdefSignature] = getattr(in_, "__sig__", None)
if sig is None:
raise AttributeError(f"Cannot find __sig__ attribute in {in_}")
if sig.posonlycount > 0:
raise TemporaryError(1.1, future="posonly args")
if sig.kwonlycount > 0:
raise TemporaryError(1.1, future="kwonly args")
return cls(
sig=getattr(in_, "__sig__"),
initial=initial,
)
def args(self) -> List[VarnameT]:
sig = self.sig
return cast(
List[VarnameT],
sig.argnames[sig.posonlycount : (len(sig.argnames) - (len(sig.defaults) + sig.kwonlycount))],
)
def kwargs(self) -> List[VarnameT]:
sig = self.sig
return cast(
List[VarnameT],
sig.argnames[sig.argcount - len(sig.defaults) :],
)
def posonly(self) -> List[VarnameT]:
"""
!!! Note:
This future doesn't supports yet, but will be
implemented in next versions.
"""
sig = self.sig
return cast(List[VarnameT], sig.argnames[: sig.posonlycount])
def kwonly(self) -> List[VarnameT]:
"""
!!! Note:
This future doesn't supports yet, but will be
implemented in next versions.
"""
sig = self.sig
return cast(
List[VarnameT],
sig.argnames[: -sig.kwonlycount],
)
def get(
self, name: VarnameT, default: Optional[Union[ValueT_co, _T]] = None
) -> Optional[Union[ValueT_co, _T]]:
return self.initial.get(name, default)
| StarcoderdataPython |
3337041 | """
Definition of the :class:`PrivateDataElement` class, representing a single "UN"
data element.
"""
from types import FunctionType
from typing import Any
from dicom_parser.data_element import DataElement
from dicom_parser.utils.siemens.private_tags import (
parse_siemens_b_matrix,
parse_siemens_bandwith_per_pixel_phase_encode,
parse_siemens_csa_header,
parse_siemens_gradient_direction,
parse_siemens_number_of_slices_in_mosaic,
parse_siemens_slice_timing,
)
from dicom_parser.utils.value_representation import ValueRepresentation
from pydicom.dataelem import DataElement as PydicomDataElement
#: A dictionary matching private data elements to their appropriate parsing
#: method.
TAG_TO_DEFINITION = {
("0019", "100a"): {"method": parse_siemens_number_of_slices_in_mosaic},
("0019", "100b"): {"method": float},
("0019", "100c"): {"method": int},
("0019", "100e"): {"method": parse_siemens_gradient_direction},
("0019", "1027"): {"method": parse_siemens_b_matrix},
("0019", "1028"): {
"method": parse_siemens_bandwith_per_pixel_phase_encode
},
("0019", "1029"): {"method": parse_siemens_slice_timing},
("0029", "1010"): {"method": parse_siemens_csa_header},
("0029", "1020"): {"method": parse_siemens_csa_header},
}
class PrivateDataElement(DataElement):
#: The VR value of data elements represented by this class.
VALUE_REPRESENTATION = ValueRepresentation.UN
def __init__(self, raw: PydicomDataElement):
"""
Intialize a new instance of this class.
Parameters
----------
raw : PydicomDataElement
pydicom's representation of this data element
"""
super().__init__(raw)
self.definition = TAG_TO_DEFINITION.get(self.tag, {})
self.update_from_definition()
def update_from_definition(self) -> None:
"""
Update this data element's value_representation using a custom
definition.
"""
self.value_representation = self.definition.get(
"value_representation", self.VALUE_REPRESENTATION
)
def parse_value(self, value: bytes) -> Any:
"""
Tries to parse private data element values using a custom method or by
simply calling :func:`bytes.decode`.
Parameters
----------
value : bytes
Raw private data element value
Returns
-------
Any
Parsed private data element value
"""
# Try to call a custom method
method: FunctionType = self.definition.get("method")
if method:
return method(self.raw.value)
# Try to decode
elif isinstance(self.raw.value, bytes):
try:
return self.raw.value.decode().strip()
except UnicodeDecodeError:
pass
# Otherwise, simply return the raw value
return self.raw.value
| StarcoderdataPython |
1683089 | <filename>BlackVision/Dep/3rdParty/glad/glad/lang/c/loader/wgl.py
from glad.lang.common.loader import BaseLoader
from glad.lang.c.loader import LOAD_OPENGL_DLL, LOAD_OPENGL_DLL_H, LOAD_OPENGL_GLAPI_H
_WGL_LOADER = \
LOAD_OPENGL_DLL % {'pre':'static', 'init':'open_gl',
'proc':'get_proc', 'terminate':'close_gl'} + '''
int gladLoadWGL(HDC hdc) {
int status = 0;
if(open_gl()) {
status = gladLoadWGLLoader((GLADloadproc)get_proc, hdc);
close_gl();
}
return status;
}
'''
_WGL_HEADER = '''
#ifndef WINAPI
# ifndef WIN32_LEAN_AND_MEAN
# define WIN32_LEAN_AND_MEAN 1
# endif
# include <windows.h>
#endif
#include <glad/glad.h>
#ifndef __glad_wglext_h_
#ifdef __wglext_h_
#error WGL header already included, remove this include, glad already provides it
#endif
#define __glad_wglext_h_
#define __wglext_h_
#ifndef APIENTRY
#define APIENTRY
#endif
#ifndef APIENTRYP
#define APIENTRYP APIENTRY *
#endif
#ifdef __cplusplus
extern "C" {
#endif
typedef void* (* GLADloadproc)(const char *name);
''' + LOAD_OPENGL_GLAPI_H
_WGL_HEADER_LOADER = '''
GLAPI int gladLoadWGL(HDC hdc);
''' + LOAD_OPENGL_DLL_H
_WGL_HEADER_END = '''
#ifdef __cplusplus
}
#endif
#endif
'''
_WGL_HAS_EXT = '''
static HDC GLADWGLhdc = (HDC)INVALID_HANDLE_VALUE;
static void get_exts(void) {
}
static int has_ext(const char *ext) {
const char *terminator;
const char *loc;
const char *extensions;
if(wglGetExtensionsStringEXT == NULL && wglGetExtensionsStringARB == NULL)
return 0;
if(wglGetExtensionsStringARB == NULL || GLADWGLhdc == INVALID_HANDLE_VALUE)
extensions = wglGetExtensionsStringEXT();
else
extensions = wglGetExtensionsStringARB(GLADWGLhdc);
if(extensions == NULL || ext == NULL)
return 0;
while(1) {
loc = strstr(extensions, ext);
if(loc == NULL)
break;
terminator = loc + strlen(ext);
if((loc == extensions || *(loc - 1) == ' ') &&
(*terminator == ' ' || *terminator == '\\0'))
{
return 1;
}
extensions = terminator;
}
return 0;
}
'''
class WGLCLoader(BaseLoader):
def write(self, fobj):
if not self.disabled:
fobj.write(_WGL_LOADER)
def write_begin_load(self, fobj):
fobj.write('\twglGetExtensionsStringARB = (PFNWGLGETEXTENSIONSSTRINGARBPROC)load("wglGetExtensionsStringARB");\n')
fobj.write('\twglGetExtensionsStringEXT = (PFNWGLGETEXTENSIONSSTRINGEXTPROC)load("wglGetExtensionsStringEXT");\n')
fobj.write('\tif(wglGetExtensionsStringARB == NULL && wglGetExtensionsStringEXT == NULL) return 0;\n')
def write_end_load(self, fobj):
fobj.write('\treturn 1;\n')
def write_find_core(self, fobj):
fobj.write('\tGLADWGLhdc = hdc;\n')
def write_has_ext(self, fobj):
fobj.write(_WGL_HAS_EXT)
def write_header(self, fobj):
fobj.write(_WGL_HEADER)
if not self.disabled:
fobj.write(_WGL_HEADER_LOADER)
def write_header_end(self, fobj):
fobj.write(_WGL_HEADER_END)
| StarcoderdataPython |
28399 | <filename>game.py
from Enemy.bosses import *
from Enemy.desert_enemies import *
from Enemy.field_enemies import *
from Enemy.graveyard_enemies import *
from Enemy.magic_enemies import *
from Enemy.moon_enemies import *
from Enemy.winter_enemies import *
from Enemy.fire_enemies import *
from menu import VerticalMenu
from Buildings.archer import ArcherTower
from Buildings.support import DamageTower, RangeTower, StoneTower
from button import PlayGameBtn, MusicBtn, PlayPauseBtn
import random
import pygame
import time
# Setup/initialization
bg = pygame.image.load("Game/Backgrounds/graveyard_bg.png")
bg = pygame.transform.scale(bg, (1350, 700))
width = bg.get_width()
height = bg.get_height()
pygame.init()
pygame.font.init()
win = pygame.display.set_mode((width, height))
pygame.display.set_caption("Tower Defense Game")
# Music
music = pygame.mixer_music.load('Game/music3.mp3')
pygame.mixer_music.play(-1)
play = pygame.image.load("Game/Utils/button_sound.png")
play = pygame.transform.scale(play, (100, 100))
pause = pygame.image.load("Game/Utils/button_sound_off.png")
pause = pygame.transform.scale(pause, (100, 100))
# Play Pause Btn
play2 = pygame.image.load("Game/Utils/button_start.png")
play2 = pygame.transform.scale(play2, (100, 100))
pause2 = pygame.image.load("Game/Utils/button_pause.png")
pause2 = pygame.transform.scale(pause2, (100, 100))
# Archer/support tower set up
buy_archer = pygame.transform.scale(pygame.image.load(os.path.join("Game/Shop", "ico_7.png")).
convert_alpha(), (75, 75))
buy_damage = pygame.transform.scale(pygame.image.load(os.path.join("Game/Shop", "ico_4.png")).
convert_alpha(), (75, 75))
buy_range = pygame.transform.scale(pygame.image.load(os.path.join("Game/Buildings", "14.png")).
convert_alpha(), (75, 75))
buy_stone = pygame.transform.scale(pygame.image.load(os.path.join("Game/Shop", "ico_9.png")).convert_alpha(), (75, 75))
attack_tower_names = ["archer"]
support_tower_names = ["range", "damage", "stone"]
side_img = pygame.transform.scale(pygame.image.load(os.path.join("Game/Shop/", "window_1.png"))
.convert_alpha(), (120, 500))
# Clock
clock = pygame.time.Clock()
# Path for enemies
path = [(-10, 477),
(0, 477), (171, 481), (315, 528), (464, 529), (631, 532), (802, 532), (846, 358), (673, 323), (533, 301),
(513, 209),
(491, 118), (661, 102), (826, 99), (1003, 100), (1179, 97), (1346, 96)]
# Waves in Skeleton, Monster, Bat, Goblin, SnowMan, Knight, MaskedMan, Yeti, Tree, Golem, Guard, SuperBoss (12 enemies)
waves = [ # 30 waves + 3 bonus rounds
[0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 2], # for testing
[20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # wave 1
[30, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0], # wave 2
[30, 20, 10, 0, 0, 0, 0, 0, 0, 0, 0], # wave 3
[50, 40, 20, 5, 0, 0, 0, 0, 0, 0, 0], # wave 4
[100, 20, 20, 20, 0, 0, 0, 0, 0, 0, 0], # wave 5
[0, 0, 0, 0, 30, 0, 0, 1, 0, 0, 0], # wave 6 (winter special)
[100, 40, 30, 20, 10, 2, 0, 0, 0, 0, 0], # wave 7
[100, 100, 50, 50, 30, 10, 0, 0, 0, 0, 0], # wave 8
[100, 100, 75, 75, 40, 20, 5, 0, 0, 0, 0], # wave 9
[0, 0, 0, 0, 0, 0, 0, 10, 10, 7, 7], # wave 10 (boss round)
[150, 100, 100, 100, 50, 50, 20, 0, 0, 0, 0], # wave 11
[150, 150, 150, 150, 40, 40, 40, 0, 0, 0, 0], # wave 12
[200, 200, 150, 150, 50, 50, 50, 0, 0, 0, 0], # wave 13
[200, 200, 150, 150, 50, 50, 50, 1, 1, 0, 0], # wave 14
[200, 200, 200, 200, 100, 75, 75, 2, 2, 1, 1], # wave 15
[200, 200, 200, 200, 100, 100, 100, 2, 2, 2, 2], # wave 16
[1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # wave 17
[300, 200, 200, 200, 150, 150, 100, 3, 3, 2, 2], # wave 18
[300, 200, 200, 200, 200, 200, 150, 4, 4, 4, 4], # wave 19
[0, 0, 0, 0, 0, 0, 0, 12, 12, 10, 10], # wave 20 (boss round)
[400, 300, 300, 300, 300, 300, 200, 5, 5, 5, 5], # wave 21
[400, 300, 300, 300, 300, 300, 300, 0, 0, 0, 0], # wave 22
[1300, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # wave 23
[500, 300, 300, 300, 300, 300, 300, 5, 5, 5, 5], # wave 24
[100, 300, 300, 300, 300, 300, 300, 10, 10, 7, 7], # wave 25
[500, 400, 400, 400, 400, 400, 400, 7, 7, 7, 7], # wave 26
[0, 1300, 100, 0, 0, 0, 0, 0, 0, 0, 0], # wave 27
[600, 500, 500, 500, 500, 500, 500, 6, 6, 6, 6], # wave 28
[1700, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # wave 29
[0, 0, 0, 0, 0, 0, 0, 50, 50, 50, 50], # wave 30 (last before bonus)
[2000, 0, 100, 0, 0, 0, 0, 0, 0, 0, 0], # bonus 1
[700, 500, 500, 500, 500, 500, 500, 10, 10, 10, 10], # bonus 2
[0, 0, 0, 0, 0, 0, 0, 100, 100, 100, 100, 1], # bonus 3
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5], # bonus 4
[0, 0, 0, 0, 0, 0, 0, 20, 20, 20, 20, 5]
]
def point_to_line(tower):
"""
Returns if you can place tower based on distance from.
path
:param tower: Tower class object
:return: Bool
"""
return True, tower
class MainLoop:
def __init__(self):
self.clicks = []
self.running = True
self.in_start = True
self.money = 3000
self.lives = 30
self.wave = 1
self.enemies = []
self.path = path
self.bg = bg
self.timer = time.time()
self.paused = True
self.selected_tower = None
self.attack_towers = []
self.support_towers = []
self.menu = VerticalMenu(width - side_img.get_width() + 70, 250, side_img)
self.menu.add_btn(buy_archer, "buy_archer", 500)
self.menu.add_btn(buy_damage, "buy_damage", 1000)
self.menu.add_btn(buy_range, "buy_range", 1000)
self.menu.add_btn(buy_stone, "buy_stone", 1500)
self.moving_object = None
self.curr_wave = waves[self.wave][:]
self.pl_pa_btn = PlayPauseBtn(play2, pause2, 120, 600)
self.music_btn = MusicBtn(play, pause, 20, 600)
self.music = True
self.font = pygame.font.SysFont("comicsans", 40, bold=True)
self.msg_mode = False
self.msg_clicked = False
self.lose = False
self.win = False
def start_screen(self):
"""
Start screen for tower defense game.
:return: None
"""
while self.in_start:
for ev in pygame.event.get():
if ev == pygame.QUIT:
pygame.quit()
break
# Title
logo = pygame.image.load("Game/Start Screen/logo2.png")
win.blit(self.bg, (0, 0))
win.blit(logo, (850 - width // 2, 400 - height // 2))
# Show characters for a e s t h e t i c s
en = pygame.image.load("Game/Enemies/Bosses/Golem/0_boss_run_000.png")
win.blit(en, (450 - width // 2, 370 - height // 2))
en2 = pygame.image.load("Game/Enemies/Bosses/Guard/0_boss_run_000.png")
en2 = pygame.transform.flip(en2, True, False)
win.blit(en2, (1150 - width // 2, 400 - height // 2))
# Button to play
btn1 = pygame.image.load("Game/Start Screen/button_play.png")
start_screen_btn = PlayGameBtn(btn1, 1150 - width // 2, 720 - height // 2)
start_screen_btn.draw(win)
if pygame.mouse.get_pressed()[0] == 1:
self.in_start = False
self.msg_mode = True
self.main()
break
pygame.display.update()
self.main()
def add_tower(self, name):
x, y = pygame.mouse.get_pos()
name_list = ["buy_archer", "buy_damage", "buy_range", "buy_stone"]
object_list = [ArcherTower(x, y), DamageTower(x, y), RangeTower(x, y), StoneTower(x, y)]
try:
obj = object_list[name_list.index(name)]
self.moving_object = obj
obj.moving = True
except Exception as err:
print(f"[ERROR]: {str(err)}.")
def enemy_wave(self):
"""
Chooses the appropriate enemies to put on the screen.
:return: list
"""
if sum(self.curr_wave) == 0:
if len(self.enemies) == 0:
self.wave += 1
self.curr_wave = waves[self.wave]
self.paused = True
else:
wave_enemies = [Skeleton(), PurpleMonster(), Bat(), HammerGoblin(), SnowMan(), Knight(), MaskedMan(),
Yeti(), Tree(), Golem(), Guard(), SuperBoss()]
for x in range(len(self.curr_wave)):
if self.curr_wave[x] != 0:
self.enemies.append(wave_enemies[x])
self.curr_wave[x] = self.curr_wave[x] - 1
break
def redraw_game_window(self):
"""
Draws everything needed for the game onto the screen.
:return: None
"""
win.blit(self.bg, (0, 0)) # background
# Buttons
self.music_btn.draw(win)
self.pl_pa_btn.draw(win)
# draw placement rings
if self.moving_object:
for tower in self.attack_towers:
tower.draw_placement(win)
for tower in self.support_towers:
tower.draw_placement(win)
self.moving_object.draw_placement(win)
# draw attack towers
for tw in self.attack_towers:
tw.draw(win)
# draw support towers
for tw in self.support_towers:
tw.draw(win)
# redraw selected tower
if self.selected_tower:
self.selected_tower.draw(win)
# draw moving object
if self.moving_object:
self.moving_object.draw(win)
# draw menu
self.menu.draw(win)
# Lives Left
life = pygame.image.load("Game/Utils/heart.png")
pygame.transform.scale(life, (70, 70))
lives = self.font.render(str(self.lives), 2, (255, 255, 255))
win.blit(lives, (1300, 20))
win.blit(life, (1260, 15))
# Money Left
money = pygame.image.load("Game/Utils/star.png")
pygame.transform.scale(money, (70, 70))
money_text = self.font.render(str(self.money), 2, (255, 255, 255))
win.blit(money_text, (1160, 20))
win.blit(money, (1110, 15))
# Wave Number
background = pygame.image.load("Game/Utils/table_2.png")
background = pygame.transform.scale(background, (150, 100))
txt = self.font.render(f"Wave #{str(self.wave)}", 2, (0, 0, 0))
win.blit(background, (10, 10))
win.blit(txt, (16, 34))
# for click in self.clicks:
# pygame.draw.circle(win, (255, 0, 0), click, 5, 0)
# draws enemies
for en in self.enemies:
en.draw(win)
pygame.display.update()
def lose_screen(self):
"""
Screen that appears if one has lost.
:return: None
"""
while self.lose:
for ev in pygame.event.get():
if ev.type == pygame.QUIT:
pygame.quit()
break
# Title
logo = pygame.image.load("Game/Start Screen/logo2.png")
win.blit(self.bg, (0, 0))
win.blit(logo, (850 - width // 2, 400 - height // 2))
# You lose logo
lose_img = pygame.image.load("Game/Utils/header_failed.png")
win.blit(lose_img, (450, 370))
# Best wave
background = pygame.image.load("Game/Utils/table.png")
background = pygame.transform.scale(background, (250, 100))
txt = self.font.render(f"Best Wave #{str(self.wave)}", 2, (255, 255, 255))
win.blit(background, (520, 570))
win.blit(txt, (533, 606))
pygame.display.update()
pygame.quit()
def win_screen(self):
"""
If player completes all the levels.
:return: None
"""
while self.win:
for ev in pygame.event.get():
if ev.type == pygame.QUIT:
break
# Title
logo = pygame.image.load("Game/Start Screen/logo2.png")
win.blit(self.bg, (0, 0))
win.blit(logo, (850 - width // 2, 400 - height // 2))
# You win logo
lose_img = pygame.image.load("Game/Utils/header_win.png")
win.blit(lose_img, (450, 370))
pygame.quit()
def main(self):
"""
Main loop of the game.
:return: None
"""
while self.running:
clock.tick(700)
pos = pygame.mouse.get_pos()
# check for moving object
if self.moving_object:
self.moving_object.move_tower(pos[0], pos[1])
tower_list = self.attack_towers[:] + self.support_towers[:]
collide = False
for tower in tower_list:
if tower.collide_other_tower(self.moving_object):
collide = True
tower.place_color = (255, 0, 0, 100)
self.moving_object.place_color = (255, 0, 0, 100)
else:
tower.place_color = (0, 0, 255, 100)
if not collide:
self.moving_object.place_color = (0, 0, 255, 100)
# Music Button & Playing Music
if pygame.mouse.get_pressed()[0] == 1 or pygame.mouse.get_pressed()[1] == 1 or \
pygame.mouse.get_pressed()[2] == 1:
if self.music_btn.clicked(pos[0], pos[1]):
self.music = not self.music
self.music_btn.music = self.music
if self.music:
pygame.mixer_music.unpause()
else:
pygame.mixer_music.pause()
# Main event loop
for ev in pygame.event.get():
if ev.type == pygame.QUIT:
pygame.quit()
break
if ev.type == pygame.MOUSEBUTTONUP:
# if you're moving an object and click
if self.moving_object:
not_allowed = False
tower_list = self.attack_towers[:] + self.support_towers[:]
for tower in tower_list:
if tower.collide_other_tower(self.moving_object):
not_allowed = True
if not not_allowed and point_to_line(self.moving_object):
if self.moving_object.name in attack_tower_names:
self.attack_towers.append(self.moving_object)
elif self.moving_object.name in support_tower_names:
self.support_towers.append(self.moving_object)
self.moving_object.moving = False
self.moving_object = None
else:
# look if you click on side menu
side_menu_button = self.menu.get_clicked(pos[0], pos[1])
if side_menu_button:
cost = self.menu.get_item_cost(side_menu_button)
if self.money >= cost:
self.money -= cost
self.add_tower(side_menu_button)
# look if you clicked on attack tower or support tower
btn_clicked = None
if self.selected_tower:
btn_clicked = self.selected_tower.menu.get_clicked(pos[0], pos[1])
if btn_clicked:
cost = self.selected_tower.get_upgrade_cost()
if self.money >= cost:
self.money -= cost
self.selected_tower.upgrade()
if not btn_clicked:
for tw in self.attack_towers:
if tw.click(pos[0], pos[1]):
tw.selected = True
self.selected_tower = tw
else:
tw.selected = False
# look if you clicked on support tower
for tw in self.support_towers:
if tw.click(pos[0], pos[1]):
tw.selected = True
self.selected_tower = tw
else:
tw.selected = False
# Play Pause
if pygame.mouse.get_pressed()[0] == 1 or pygame.mouse.get_pressed()[1] == 1 or \
pygame.mouse.get_pressed()[2] == 1:
if self.pl_pa_btn.clicked(pos[0], pos[1]):
self.paused = not self.paused
self.pl_pa_btn.paused = self.paused
# If lose the game
if self.lives <= 0:
self.lose = True
self.lives = 15
self.money = 2000
self.enemies = []
self.support_towers = []
self.attack_towers = []
print("[END] You Lose, no more lives!")
self.lose_screen()
# If you beat the game
if self.wave == 34:
self.win = True
self.lives = 15
self.money = 2000
self.enemies = []
self.support_towers = []
self.attack_towers = []
print("[END] You Win, congrats!")
self.win_screen()
# keys = pygame.key.get_pressed() # for finding path
# if keys[pygame.K_SPACE]:
# self.clicks.append(pos)
# print(self.clicks)
# Generate and handle enemies
if not self.paused:
if time.time() - self.timer >= random.randrange(1, 6) / 3:
self.timer = time.time()
self.enemy_wave()
if not self.paused:
en_to_del = []
for en in self.enemies:
en.move()
if en.x < -15:
en_to_del.append(en)
for enemy in en_to_del:
self.lives -= 1
self.enemies.remove(enemy)
# loop through attack towers
for tw in self.attack_towers:
self.money += tw.attack(self.enemies)
# loop through attack towers
for tw in self.support_towers:
tw.support(self.attack_towers)
self.redraw_game_window()
pygame.quit()
| StarcoderdataPython |
3262949 | <filename>Protheus_WebApp/Modules/SIGACRM/CRMA290TESTCASE.py
from tir import Webapp
from datetime import datetime
DataSystem = datetime.today().strftime('%d/%m/%Y')
import unittest
class CRMA290(unittest.TestCase):
Contr = ""
@classmethod
def setUpClass(inst):
inst.oHelper = Webapp()
inst.oHelper.SetTIRConfig(config_name="User", value="APICRM")
inst.oHelper.SetTIRConfig(config_name="Password", value="1")
inst.oHelper.Setup("SIGACRM", DataSystem,"T1","D MG 01 ","73")
inst.oHelper.Program("CRMA290")
def test_CRMA290_CT001(self):
self.oHelper.ClickLabel("+ Criar Oportunidade")
NumOpt = self.oHelper.GetValue("AD1_NROPOR")
self.oHelper.SetValue("AD1_DESCRI", "INCLUSAO TIR AT")
self.oHelper.SetValue("AD1_DTINI",DataSystem)
self.oHelper.SetValue("AD1_CODCLI","FATT01")
self.oHelper.SetValue("AD1_LOJCLI","01")
self.oHelper.SetValue("AD1_PROVEN","FAT001")
self.oHelper.SetValue("AD1_STAGE","000002")
self.oHelper.SetButton("Confirmar")
self.oHelper.WaitShow("Minhas Oportunidades")
self.oHelper.ClickLabel("Minhas Oportunidades")
self.oHelper.WaitHide("Minhas Oportunidades")
self.oHelper.SearchBrowse(f"D MG 01 {NumOpt}")
self.oHelper.SetButton("Visualizar")
self.oHelper.CheckResult("AD1_NROPOR", NumOpt)
self.oHelper.CheckResult("AD1_DESCRI", "INCLUSAO TIR AT")
self.oHelper.CheckResult("AD1_CODCLI","FATT01")
self.oHelper.CheckResult("AD1_LOJCLI","01")
self.oHelper.CheckResult("AD1_PROVEN","FAT001")
self.oHelper.CheckResult("AD1_STAGE","000002")
self.oHelper.SetButton("Fechar")
self.oHelper.SetButton("X")
self.oHelper.AssertTrue()
@classmethod
def tearDownClass(self):
self.oHelper.TearDown()
if __name__ == "__main__":
unittest.main() | StarcoderdataPython |
3390617 | <gh_stars>0
import aspose.slides as slides
import aspose.pydrawing as drawing
def rendering_3d():
dataDir = "./examples/data/"
outDir = "./examples/out/"
with slides.Presentation() as pres:
shape = pres.slides[0].shapes.add_auto_shape(slides.ShapeType.RECTANGLE, 200, 150, 200, 200)
shape.text_frame.text = "3D"
shape.text_frame.paragraphs[0].paragraph_format.default_portion_format.font_height = 64
shape.three_dformat.camera.camera_type = slides.CameraPresetType.ORTHOGRAPHIC_FRONT
shape.three_dformat.camera.set_rotation(20, 30, 40)
shape.three_dformat.light_rig.light_type = slides.light_rigPresetType.FLAT
shape.three_dformat.light_rig.direction = slides.LightingDirection.TOP
shape.three_dformat.material = slides.MaterialPresetType.FLAT
shape.three_dformat.extrusion_height = 100
shape.three_dformat.extrusion_color.color = drawing.Color.blue
pres.slides[0].get_thumbnail(2, 2).save(outDir + "sample_3d.png")
pres.save(outDir + "rendering_3d_out.pptx", slides.export.SaveFormat.PPTX)
| StarcoderdataPython |
45312 | <reponame>EnjoyLifeFund/macHighSierra-py36-pkgs
# !/usr/bin/env python
##############################################################################
## DendroPy Phylogenetic Computing Library.
##
## Copyright 2010-2015 <NAME> and <NAME>.
## All rights reserved.
##
## See "LICENSE.rst" for terms and conditions of usage.
##
## If you use this work or any portion thereof in published work,
## please cite it as:
##
## <NAME>. and <NAME>. 2010. DendroPy: a Python library
## for phylogenetic computing. Bioinformatics 26: 1569-1571.
##
##############################################################################
"""
Tests for general NEXUS character matrix reading.
"""
import unittest
import dendropy
from dendropy.utility import error
from dendropy.test.support import dendropytest
from dendropy.test.support import pathmap
from dendropy.test.support import standard_file_test_chars
from dendropy.test.support import compare_and_validate
from dendropy.dataio import nexmlreader
from dendropy.utility import messaging
_LOG = messaging.get_logger(__name__)
class NexmlCharactersReaderDnaTestCase(
standard_file_test_chars.DnaTestChecker,
dendropytest.ExtendedTestCase):
@classmethod
def setUpClass(cls):
cls.build()
def test_basic_nexml(self):
src_filenames = [
"standard-test-chars-dna.as_cells.nexml",
"standard-test-chars-dna.as_seqs.nexml",
]
for src_idx, src_filename in enumerate(src_filenames):
# print(src_idx, src_filename)
src_path = pathmap.char_source_path(src_filename)
self.verify_get_from(
matrix_type=dendropy.DnaCharacterMatrix,
src_filepath=src_path,
schema="nexml",
factory_kwargs={},
check_taxon_annotations=False,
check_matrix_annotations=False,
check_sequence_annotations=False,
check_column_annotations=False,
check_cell_annotations=False)
class NexmlCharactersReaderRnaTestCase(
standard_file_test_chars.RnaTestChecker,
dendropytest.ExtendedTestCase):
@classmethod
def setUpClass(cls):
cls.build()
def test_basic_nexml(self):
src_filenames = [
"standard-test-chars-rna.as_cells.nexml",
"standard-test-chars-rna.as_seqs.nexml",
]
for src_idx, src_filename in enumerate(src_filenames):
# print(src_idx, src_filename)
src_path = pathmap.char_source_path(src_filename)
self.verify_get_from(
matrix_type=dendropy.RnaCharacterMatrix,
src_filepath=src_path,
schema="nexml",
factory_kwargs={},
check_taxon_annotations=False,
check_matrix_annotations=False,
check_sequence_annotations=False,
check_column_annotations=False,
check_cell_annotations=False)
class NexmlCharactersReaderProteinTestCase(
standard_file_test_chars.ProteinTestChecker,
dendropytest.ExtendedTestCase):
@classmethod
def setUpClass(cls):
cls.build()
def test_basic_nexml(self):
src_filenames = [
"standard-test-chars-protein.as_cells.nexml",
"standard-test-chars-protein.as_seqs.nexml",
]
for src_idx, src_filename in enumerate(src_filenames):
# print(src_idx, src_filename)
src_path = pathmap.char_source_path(src_filename)
self.verify_get_from(
matrix_type=dendropy.ProteinCharacterMatrix,
src_filepath=src_path,
schema="nexml",
factory_kwargs={},
check_taxon_annotations=False,
check_matrix_annotations=False,
check_sequence_annotations=False,
check_column_annotations=False,
check_cell_annotations=False)
class NexmlCharactersContinuousTestCase(
standard_file_test_chars.ContinuousTestChecker,
dendropytest.ExtendedTestCase):
@classmethod
def setUpClass(cls):
cls.build()
def test_basic_nexml(self):
src_filenames = [
"standard-test-chars-continuous.as_cells.nexml",
"standard-test-chars-continuous.as_seqs.nexml",
]
for src_idx, src_filename in enumerate(src_filenames):
# print(src_idx, src_filename)
src_path = pathmap.char_source_path(src_filename)
self.verify_get_from(
matrix_type=dendropy.ContinuousCharacterMatrix,
src_filepath=src_path,
schema="nexml",
factory_kwargs={},
check_taxon_annotations=False,
check_matrix_annotations=False,
check_sequence_annotations=False,
check_column_annotations=False,
check_cell_annotations=False)
class NexmlStandardCharacters01234TestCase(
standard_file_test_chars.Standard01234TestChecker,
dendropytest.ExtendedTestCase):
@classmethod
def setUpClass(cls):
cls.build()
def test_basic_nexml(self):
src_filenames = [
"standard-test-chars-generic.as_cells.nexml",
"standard-test-chars-generic.as_seqs.nexml",
]
for src_idx, src_filename in enumerate(src_filenames):
# print(src_idx, src_filename)
src_path = pathmap.char_source_path(src_filename)
self.verify_get_from(
matrix_type=dendropy.StandardCharacterMatrix,
src_filepath=src_path,
schema="nexml",
factory_kwargs={},
check_taxon_annotations=False,
check_matrix_annotations=False,
check_sequence_annotations=False,
check_column_annotations=False,
check_cell_annotations=False)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
3285783 | from fabrik_chain_3d import Chain as Chain, Bone as Bone, Utils as Util
import math
import sys
sys.path.append('..')
def main(default_target_position):
# This is an example of using this code for solving inverse kinematic of FRANKA robot
# Step 1 : specify the target position and orientation(in quaternion)
# default_target_position = [0.21700440072005056, 0.31700440072005023, 0.55902820523028393]
# default_target_position =[0.0994258838609295, -0.09942588386092957, 1.0867820523028393]
# default_target_position = [0.2, 0.2, 0.5]
# default_target_orientation = [0.707,0.707,-0.707,0]
default_target_orientation = [0,1,0,0,0]
# default_target_position = [0.0, 0.2, 0.5]
# default_target_orientation = [0.707, 0.707, -0.707, 0]
# Define the specification of Base-bone in this case it only twist and rotate around itself.
# Bone number 1 (Base bone)
base_bone_start_location = [0, 0, 0]
base_bone_direction = [0, 0, 1]
base_bone_length = 0.333
joint_type_1 = "twist_only"
base_bone_rotation_axis = [0, 0, 1]
cw_base_bone_constraint_rads = 2.8973
cw_base_bone_constraint_degs = cw_base_bone_constraint_rads * 180 / math.pi
acw_base_bone_constraint_rads = 2.8973
acw_base_bone_constraint_degs = acw_base_bone_constraint_rads * 180 / math.pi
base_bone_orientation = [1, 0, 0, 0] # this means no orientational frame rotation happened from global coordinate.
# Define the specification of consecutive bones in this case they are two group the one
# rotate around themselves(bone 3, 5 7)
# and the one working as a hinge (bone 2,4,6)
# Bone number 2
bone_direction_2 = [0, 0, 1]
bone_length_2 = 0.316
joint_type_2 = "LOCAL_HINGE"
hinge_rotation_axis_2 = [0, 1, 0]
hinge_constraint_reference_axis_2 =Util.Utils().gen_perpendicular_vector_quick(hinge_rotation_axis_2)
cw_rad_2 = 1.7628
cw_deg_2 = cw_rad_2 * 180 / math.pi
acw_rad_2 = 1.7628
acw_deg_2 = acw_rad_2 * 180 / math.pi
bone_2_orientation = [1, 0, 0, 0]
is_bone_2_fixed =0
# Bone number 3
bone_direction_3 = [1, 0, 0]
bone_length_3 = 0.088
is_bone_3_fixed = 0
joint_type_3 = "twist_only"
hinge_rotation_axis_3 = [0, 0, 1]
hinge_constraint_reference_axis_3 = [0,0,1]
cw_rad_3 = 2.8972
cw_deg_3 = cw_rad_3 * 180 / math.pi
acw_rad_3 = 2.8973
acw_deg_3 = acw_rad_3 * 180 / math.pi
bone_3_orientation = [1, 0, 0, 0]
# Bone number 4
bone_direction_4 = [0, 0, 1]
bone_length_4 = 0.088
joint_type_4 = "LOCAL_HINGE"
hinge_rotation_axis_4 = [0, 1, 0]
hinge_constraint_reference_axis_4 = [0,0,1]
cw_rad_4 = 3.0718
cw_deg_4 = cw_rad_4 * 180 / math.pi
acw_rad_4 = 0.0698
acw_deg_4 = acw_rad_4 * 180 / math.pi
bone_4_orientation = [1, 0, 0, 0]
is_bone_4_fixed =0
# Bone number 5
bone_direction_5 = [0, 0, 1]
bone_length_5 = 0.384
is_bone_5_fixed = 0
joint_type_5 = "twist_only"
hinge_rotation_axis_5 = [0, 0, 1]
hinge_constraint_reference_axis_5 = [0,0,1]
cw_rad_5 = 2.8973
cw_deg_5 = cw_rad_5 * 180 / math.pi
acw_rad_5 = 2.8973
acw_deg_5 = acw_rad_5 * 180 / math.pi
bone_5_orientation = [1, 0, 0, 0]
# Bone number 6
bone_direction_6 = [1, 0, 0]
bone_length_6 = 0.088
joint_type_6 = "LOCAL_HINGE"
hinge_rotation_axis_6 = [0, 1, 0]
hinge_constraint_reference_axis_6 = [0,0,1]
cw_rad_6 = 0.0175#
cw_deg_6 = cw_rad_6 * 180 / math.pi
acw_rad_6 = 3.7525#
acw_deg_6 = acw_rad_6 * 180 / math.pi
bone_6_orientation = [0.707, 0, -0.707, 0]
is_bone_6_fixed = 0
# Bone number 7
bone_direction_7 = [0, 0, -1]
bone_length_7 = 0.107
is_bone_7_fixed = 0
joint_type_7 = "twist_only"
hinge_rotation_axis_7 = [0, 0, 1]
hinge_constraint_reference_axis_7 =[0,0,1]
cw_rad_7 = 2.8973
cw_deg_7 = cw_rad_7 * 180 / math.pi
acw_rad_7 = 2.8973
acw_deg_7 = acw_rad_7 * 180 / math.pi
bone_7_orientation = [0.707, 0, -0.707, 0]
###### Solving!
# The FRANKA consist of four main part that in each part there is a joint responsible for hinge duties and
# a consecutive bone responsible for twisting In below these four part being made
# by the above information about joints and bones
# the First part: create a chain by defining one bone that is fixed in its place and only able to twist
is_base_bone_fixed = 0
m_chain = Chain.Chain3d(is_base_bone_fixed, base_address="./output")
# Defining second part that consist of bone 2(able to work as a local hinge) and bone 3 that only
# rotate around itself and responsible for twists.
scale_direction = [i * base_bone_length for i in base_bone_direction]
bone_2_start_location = [x + y for x, y in zip(base_bone_start_location, scale_direction)]
scale_direction = [i * (bone_length_2+bone_length_3) for i in bone_direction_2]
bone_2_end_location = [x + y for x, y in zip(bone_2_start_location, scale_direction)]
m_bone = Bone.Bone3D(bone_2_start_location, bone_2_end_location, bone_direction_2, bone_length_2 + bone_length_3,
is_bone_2_fixed, bone_2_orientation)
m_chain.add_bone(m_bone)
m_chain.set_rotor_base_bone_constraint("BALL",base_bone_rotation_axis,cw_deg_2)
# Third part belongs to bone 4(able to work as a local hinge) and bone 5 that only
# rotate around itself and responsible for twists.
m_chain.add_consecutive_hinged_bone(bone_direction_4, bone_length_4+bone_length_5, joint_type_4,
hinge_rotation_axis_4, cw_deg_4,
acw_deg_4, hinge_constraint_reference_axis_4, is_bone_4_fixed,bone_4_orientation)
# Fourth part belongs to bone 6(able to work as a local hinge) and bone 7 that only
# rotate around itself and responsible for twists.
m_chain.add_consecutive_hinged_bone(bone_direction_6, bone_length_6+bone_length_7, joint_type_6, hinge_rotation_axis_6, cw_deg_6,
acw_deg_6, hinge_constraint_reference_axis_6,is_bone_6_fixed,bone_6_orientation)
# In this part the target is set for the chain and whole chain is going to be solved
m_chain.set_target(default_target_position,default_target_orientation)
m_chain.solve_fabrik_ik()
if __name__ == "__main__":
x = float(sys.argv[1])
y = float(sys.argv[2])
z = float(sys.argv[3])
default_target_position = [x, y, z]
main(default_target_position)
| StarcoderdataPython |
3376811 | <filename>assignmentsApp/admin.py
from django.contrib import admin
from assignmentsApp.models import Assignments
from assignmentsApp.models import Submissions
admin.site.register(Assignments)
admin.site.register(Submissions) | StarcoderdataPython |
3244463 | <filename>nlpsc/representation/word_embedding/word2vec/word2vec_train.py
import plac
import gensim
import multiprocessing
from pathlib import Path
def word2vec_train(infile, outfile, fmtfile, epoch, size, mini):
sentences = gensim.models.word2vec.LineSentence(infile)
model = gensim.models.Word2Vec(sentences,
size=size,
min_count=mini,
sg=1,
workers=multiprocessing.cpu_count(),
iter=epoch)
model.save(outfile)
model.wv.save_word2vec_format(fmtfile, binary=False)
# model.save_word2vec_format(output_file + '.vector', binary=True)
@plac.annotations(
input_loc=("Location of input file", "positional", None, Path),
output_model_loc=("Location of output model dump file", "positional", None, Path),
output_format_loc=("Location of output format file", "positional", None, Path),
epoch=("Train epoch", "option", "epoch", int),
embedding_size=("Embedding size", "option", "size", int),
min_count=("Corpus minimum size", "option", "min", int)
)
def main(input_loc, output_model_loc, output_format_loc, epoch=10, embedding_size=100, min_count=5):
word2vec_train(input_loc, output_model_loc, output_format_loc, epoch, embedding_size, min_count)
if __name__ == "__main__":
plac.call(main) | StarcoderdataPython |
178077 | <filename>m6anet/scripts/dataprep.py
import argparse
import numpy as np
import pandas as pd
import os
import multiprocessing
import ujson
from operator import itemgetter
from collections import defaultdict
from itertools import groupby
from io import StringIO
from . import helper
from .constants import M6A_KMERS, NUM_NEIGHBORING_FEATURES
from ..utils import misc
def get_args():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
optional = parser._action_groups.pop()
required = parser.add_argument_group('required arguments')
# Required arguments
required.add_argument('--eventalign', dest='eventalign', help='eventalign filepath, the output from nanopolish.',required=True)
required.add_argument('--out_dir', dest='out_dir', help='output directory.',required=True)
optional.add_argument('--n_processes', dest='n_processes', help='number of processes to run.',type=int, default=1)
optional.add_argument('--chunk_size', dest='chunk_size', help='number of lines from nanopolish eventalign.txt for processing.',type=int, default=1000000)
optional.add_argument('--readcount_min', dest='readcount_min', help='minimum read counts per gene.',type=int, default=1)
optional.add_argument('--readcount_max', dest='readcount_max', help='maximum read counts per gene.',type=int, default=1000)
optional.add_argument('--skip_index', dest='skip_index', help='with this argument the program will skip indexing eventalign.txt first.',default=False,action='store_true')
optional.add_argument('--n_neighbors', dest='n_neighbors', help='number of neighboring features to extract.',type=int, default=NUM_NEIGHBORING_FEATURES)
parser._action_groups.append(optional)
return parser.parse_args()
def partition_into_continuous_positions(arr, window_size=1):
arr = arr[np.argsort(arr["transcriptomic_position"])]
float_features = ['dwell_time', 'norm_std', 'norm_mean']
float_dtypes = [('norm_mean', '<f8'), ('norm_std', '<f8'), ('dwell_time', '<f8')]
float_arr = arr[float_features].astype(float_dtypes).view('<f8').reshape(-1, 3)
kmer_arr = arr["reference_kmer"].reshape(-1, 1)
tx_pos_arr = arr["transcriptomic_position"]
tx_id_arr = arr["transcript_id"]
partitions = [list(map(itemgetter(0), g)) for k, g in groupby(enumerate(tx_pos_arr),
lambda x: x[0] - x[1])]
return [(float_arr[partition],
kmer_arr[partition], tx_id_arr[partition], tx_pos_arr[partition])
for partition in partitions if len(partition) >= 2 * window_size + 1]
def filter_by_kmer(partition, kmers, window_size):
feature_arr, kmer_arr, tx_id_arr, tx_pos_arr = partition
kmers_5 = kmer_arr[:, (2 * window_size + 1) // 2]
mask = np.isin(kmers_5, kmers)
filtered_feature_arr = feature_arr[mask, :]
filtered_kmer_arr = kmer_arr[mask, :]
filtered_tx_pos_arr = tx_pos_arr[mask]
filtered_tx_id_arr = tx_id_arr[mask]
if len(filtered_kmer_arr) == 0:
return []
else:
return filtered_feature_arr, filtered_kmer_arr, filtered_tx_id_arr, filtered_tx_pos_arr
def filter_partitions(partitions, window_size, kmers):
windowed_partition = [create_features(partition, window_size) for partition in partitions]
filtered_by_kmers = [filter_by_kmer(partition, kmers, window_size) for partition in windowed_partition]
final_partitions = [x for x in filtered_by_kmers if len(x) > 0]
return final_partitions
def roll(to_roll, window_size=1):
nex = np.concatenate([np.roll(to_roll, i, axis=0) for i in range(-1, - window_size - 1, -1)],
axis=1)
prev = np.concatenate([np.roll(to_roll, i, axis=0) for i in range(window_size, 0, -1)], axis=1)
return np.concatenate((prev, to_roll, nex), axis=1)[window_size: -window_size, :]
def create_features(partition, window_size=1):
float_arr, kmer_arr, tx_id_arr, tx_pos_arr = partition
return roll(float_arr, window_size), roll(kmer_arr, window_size), \
tx_id_arr[window_size: -window_size], tx_pos_arr[window_size: -window_size]
def filter_events(events, window_size, kmers):
events = partition_into_continuous_positions(events)
events = filter_partitions(events, window_size, kmers)
return events
def combine_sequence(kmers):
kmer = kmers[0]
for _kmer in kmers[1:]:
kmer += _kmer[-1]
return kmer
def index(eventalign_result,pos_start,out_paths,locks):
eventalign_result = eventalign_result.set_index(['contig','read_index'])
pos_end=pos_start
with locks['index'], open(out_paths['index'],'a') as f_index:
for index in list(dict.fromkeys(eventalign_result.index)):
transcript_id,read_index = index
pos_end += eventalign_result.loc[index]['line_length'].sum()
f_index.write('%s,%d,%d,%d\n' %(transcript_id,read_index,pos_start,pos_end))
pos_start = pos_end
def parallel_index(eventalign_filepath,chunk_size,out_dir,n_processes):
# Create output paths and locks.
out_paths,locks = dict(),dict()
for out_filetype in ['index']:
out_paths[out_filetype] = os.path.join(out_dir,'eventalign.%s' %out_filetype)
locks[out_filetype] = multiprocessing.Lock()
# TO DO: resume functionality for index creation
with open(out_paths['index'],'w') as f:
f.write('transcript_id,read_index,pos_start,pos_end\n') # header
# Create communication queues.
task_queue = multiprocessing.JoinableQueue(maxsize=n_processes * 2)
# Create and start consumers.
consumers = [helper.Consumer(task_queue=task_queue,task_function=index,locks=locks) for i in range(n_processes)]
for p in consumers:
p.start()
## Load tasks into task_queue. A task is eventalign information of one read.
eventalign_file = open(eventalign_filepath,'r')
pos_start = len(eventalign_file.readline()) #remove header
chunk_split = None
index_features = ['contig','read_index','line_length']
for chunk in pd.read_csv(eventalign_filepath, chunksize=chunk_size,sep='\t'):
chunk_complete = chunk[chunk['read_index'] != chunk.iloc[-1]['read_index']]
chunk_concat = pd.concat([chunk_split,chunk_complete])
chunk_concat_size = len(chunk_concat.index)
## read the file at where it left off because the file is opened once ##
lines = [len(eventalign_file.readline()) for i in range(chunk_concat_size)]
chunk_concat['line_length'] = np.array(lines)
task_queue.put((chunk_concat[index_features],pos_start,out_paths))
pos_start += sum(lines)
chunk_split = chunk[chunk['read_index'] == chunk.iloc[-1]['read_index']]
## the loop above leaves off w/o adding the last read_index to eventalign.index
chunk_split_size = len(chunk_split.index)
lines = [len(eventalign_file.readline()) for i in range(chunk_split_size)]
chunk_split['line_length'] = np.array(lines)
task_queue.put((chunk_split[index_features],pos_start,out_paths))
# Put the stop task into task_queue.
task_queue = helper.end_queue(task_queue,n_processes)
# Wait for all of the tasks to finish.
task_queue.join()
def combine(events_str):
f_string = StringIO(events_str)
eventalign_result = pd.read_csv(f_string,delimiter='\t',names=['contig','position','reference_kmer','read_index','strand','event_index','event_level_mean','event_stdv','event_length','model_kmer','model_mean','model_stdv','standardized_level','start_idx','end_idx'])
f_string.close()
cond_successfully_eventaligned = eventalign_result['reference_kmer'] == eventalign_result['model_kmer']
if cond_successfully_eventaligned.sum() != 0:
eventalign_result = eventalign_result[cond_successfully_eventaligned]
keys = ['read_index','contig','position','reference_kmer'] # for groupby
eventalign_result.loc[:, 'length'] = pd.to_numeric(eventalign_result['end_idx'])-pd.to_numeric(eventalign_result['start_idx'])
eventalign_result.loc[:, 'sum_norm_mean'] = pd.to_numeric(eventalign_result['event_level_mean']) * eventalign_result['length']
eventalign_result.loc[:, 'sum_norm_std'] = pd.to_numeric(eventalign_result['event_stdv']) * eventalign_result['length']
eventalign_result.loc[:, 'sum_dwell_time'] = pd.to_numeric(eventalign_result['event_length']) * eventalign_result['length']
eventalign_result = eventalign_result.groupby(keys)
sum_norm_mean = eventalign_result['sum_norm_mean'].sum()
sum_norm_std = eventalign_result["sum_norm_std"].sum()
sum_dwell_time = eventalign_result["sum_dwell_time"].sum()
start_idx = eventalign_result['start_idx'].min()
end_idx = eventalign_result['end_idx'].max()
total_length = eventalign_result['length'].sum()
eventalign_result = pd.concat([start_idx,end_idx],axis=1)
eventalign_result['norm_mean'] = (sum_norm_mean/total_length).round(1)
eventalign_result["norm_std"] = sum_norm_std / total_length
eventalign_result["dwell_time"] = sum_dwell_time / total_length
eventalign_result.reset_index(inplace=True)
eventalign_result['transcript_id'] = [contig.split('.')[0] for contig in eventalign_result['contig']] #### CHANGE MADE ####
eventalign_result['transcriptomic_position'] = pd.to_numeric(eventalign_result['position']) + 2 # the middle position of 5-mers.
features = ['transcript_id','read_index','transcriptomic_position','reference_kmer','norm_mean','norm_std','dwell_time']
df_events = eventalign_result[features]
np_events = np.rec.fromrecords(df_events, names=[*df_events])
return np_events
else:
return np.array([])
def parallel_preprocess_tx(eventalign_filepath,out_dir,n_processes,readcount_min,readcount_max, n_neighbors):
# Create output paths and locks.
out_paths,locks = dict(),dict()
for out_filetype in ['json','index','log','readcount']:
out_paths[out_filetype] = os.path.join(out_dir,'data.%s' %out_filetype)
locks[out_filetype] = multiprocessing.Lock()
# Writing the starting of the files.
open(out_paths['json'],'w').close()
with open(out_paths['index'],'w') as f:
f.write('transcript_id,transcript_position,start,end\n') # header
with open(out_paths['readcount'],'w') as f:
f.write('transcript_id,transcript_position,n_reads\n') # header
open(out_paths['log'],'w').close()
# Create communication queues.
task_queue = multiprocessing.JoinableQueue(maxsize=n_processes * 2)
# Create and start consumers.
consumers = [helper.Consumer(task_queue=task_queue,task_function=preprocess_tx,locks=locks) for i in range(n_processes)]
for p in consumers:
p.start()
df_eventalign_index = pd.read_csv(os.path.join(out_dir,'eventalign.index'))
df_eventalign_index['transcript_id'] = [tx_id.split('.')[0] for tx_id in df_eventalign_index['transcript_id']]
tx_ids = df_eventalign_index['transcript_id'].values.tolist()
tx_ids = list(dict.fromkeys(tx_ids))
df_eventalign_index.set_index('transcript_id',inplace=True)
with open(eventalign_filepath,'r') as eventalign_result:
for tx_id in tx_ids:
data_dict = dict()
readcount = 0
for _,row in df_eventalign_index.loc[[tx_id]].iterrows():
read_index,pos_start,pos_end = row['read_index'],row['pos_start'],row['pos_end']
eventalign_result.seek(pos_start,0)
events_str = eventalign_result.read(pos_end-pos_start)
data = combine(events_str)
if data.size > 1:
data_dict[read_index] = data
readcount += 1
if readcount > readcount_max:
break
if readcount>=readcount_min:
task_queue.put((tx_id,data_dict,n_neighbors,out_paths)) # Blocked if necessary until a free slot is available.
# Put the stop task into task_queue.
task_queue = helper.end_queue(task_queue,n_processes)
# Wait for all of the tasks to finish.
task_queue.join()
def preprocess_tx(tx_id,data_dict,n_neighbors,out_paths,locks): # todo
"""
Convert transcriptomic to genomic coordinates for a gene.
Parameters
----------
tx_id: str
Transcript ID.
data_dict: {read_id:events_array}
Events for each read.
features: [str] # todo
A list of features to collect from the reads that are aligned to each genomic coordinate in the output.
Returns
-------
dict
A dict of all specified features collected for each genomic coordinate.
"""
# features = ['read_id','transcript_id','transcriptomic_position','reference_kmer','norm_mean','start_idx','end_idx'] # columns in the eventalign file per read.
if len(data_dict) == 0:
return
features_arrays = []
reference_kmer_arrays = []
transcriptomic_positions_arrays = []
for _,events_per_read in data_dict.items():
events_per_read = filter_events(events_per_read, n_neighbors, M6A_KMERS)
for event_per_read in events_per_read:
features_arrays.append(event_per_read[0])
reference_kmer_arrays.append([combine_sequence(kmer) for kmer in event_per_read[1]])
transcriptomic_positions_arrays.append(event_per_read[3])
if len(features_arrays) == 0:
return
else:
features_arrays = np.concatenate(features_arrays)
reference_kmer_arrays = np.concatenate(reference_kmer_arrays)
transcriptomic_positions_arrays = np.concatenate(transcriptomic_positions_arrays)
assert(len(features_arrays) == len(reference_kmer_arrays) == len(transcriptomic_positions_arrays))
# Sort and split
idx_sorted = np.argsort(transcriptomic_positions_arrays)
positions, index = np.unique(transcriptomic_positions_arrays[idx_sorted], return_index = True,axis=0) #'chr',
features_arrays = np.split(features_arrays[idx_sorted], index[1:])
reference_kmer_arrays = np.split(reference_kmer_arrays[idx_sorted], index[1:])
# Prepare
# print('Reformating the data for each genomic position ...')
data = defaultdict(dict)
# for each position, make it ready for json dump
for position, features_array, reference_kmer_array in zip(positions, features_arrays, reference_kmer_arrays):
kmer = set(reference_kmer_array)
assert(len(kmer) == 1)
if (len(set(reference_kmer_array)) == 1) and ('XXXXX' in set(reference_kmer_array)) or (len(features_array) == 0):
continue
data[int(position)] = {kmer.pop(): features_array.tolist()}
# write to file.
log_str = '%s: Data preparation ... Done.' %(tx_id)
with locks['json'], open(out_paths['json'],'a') as f, \
locks['index'], open(out_paths['index'],'a') as g, \
locks['readcount'], open(out_paths['readcount'],'a') as h:
for pos, dat in data.items():
pos_start = f.tell()
f.write('{')
f.write('"%s":{"%d":' %(tx_id,pos))
ujson.dump(dat, f)
f.write('}}\n')
pos_end = f.tell()
g.write('%s,%d,%d,%d\n' %(tx_id,pos,pos_start,pos_end))
n_reads = 0
for kmer, features in dat.items():
n_reads += len(features)
h.write('%s,%d,%d\n' %(tx_id,pos,n_reads))
with locks['log'], open(out_paths['log'],'a') as f:
f.write(log_str + '\n')
def main():
args = get_args()
#
n_processes = args.n_processes
eventalign_filepath = args.eventalign
chunk_size = args.chunk_size
out_dir = args.out_dir
readcount_min = args.readcount_min
readcount_max = args.readcount_max
skip_index = args.skip_index
n_neighbors = args.n_neighbors
misc.makedirs(out_dir) #todo: check every level.
# For each read, combine multiple events aligned to the same positions, the results from nanopolish eventalign, into a single event per position.
if not skip_index:
parallel_index(eventalign_filepath,chunk_size,out_dir,n_processes)
parallel_preprocess_tx(eventalign_filepath,out_dir,n_processes,readcount_min,readcount_max, n_neighbors)
if __name__ == '__main__':
main()
| StarcoderdataPython |
3302522 | import sqlite3
from car import Car
from cars_sql_scheme import create_table_cars, create_table_repairs
"""Represents a sample car.
Arguments:
make - car make e.g. Honda
model - car model e.g. Civic
year - year of production
vrn - vehicle registration number
vin - VIN number
sold - if car is still our property
"""
class Helper():
def __init__(self):
self.conn = sqlite3.connect('my_cars.db')
self.c = self.conn.cursor()
self.c.execute(create_table_cars)
self.c.execute(create_table_repairs)
# temp statement - delete all records
# with self.conn:
# self.c.execute("DELETE FROM repairs")
# self.c.execute("DELETE FROM cars")
def add_car(self, make, model, year, vrn, vin):
"""Adds new car to database.
Arguments:
make - car make e.g. Honda
model - car model e.g. Civic
year - year of production
vrn - vehicle registration number
vin - VIN number
Returns:
new Car instance
"""
with self.conn:
self.c.execute("INSERT INTO cars VALUES (:make, :model, :year, :vrn, :vin, :sold)", {
'make': make, 'model': model, 'year': year, 'vrn': vrn, 'vin': vin, 'sold': False})
return Car(make, model, year, vrn, vin)
def del_car(self, car):
"""Deletes car from database.
Arguments:
car - car instance
Returns:
None
"""
with self.conn:
self.c.execute("SELECT ROWID FROM cars WHERE vin=:vin",
{'vin': car.vin})
car_id = self.c.fetchone()
self.c.execute("DELETE FROM repairs WHERE car=?",
(car_id))
self.c.execute("DELETE FROM cars WHERE vin=:vin", {'vin': car.vin})
def search_by_vrn(self, vrn):
"""Search car by vehicle registration number.
Arguments:
vrn - vehicle registration number
Returns:
search result tuple
"""
self.c.execute("SELECT * FROM CARS WHERE vrn=:vrn", {'vrn': vrn})
return self.c.fetchone()
def show_all_cars(self):
"""Search availale cars.
Returns:
search result - list of tuples
"""
self.c.execute("SELECT * FROM CARS")
return self.c.fetchall()
def set_sold(self, car):
"""Mark car as sold.
Arguments:
car - Car instance
Returns:
None
"""
car.sold = True
with self.conn:
self.c.execute("UPDATE cars SET sold=True WHERE vin=:vin", {
'vin': car.vin})
def add_repair(self, car, date, description):
"""Adds repair note.
Arguments:
car - Car instance
date - repair date
description - repair description
Returns:
None
"""
self.c.execute("SELECT ROWID FROM cars WHERE vin=:vin",
{'vin': car.vin})
car_id = self.c.fetchone()[0]
with self.conn:
self.c.execute("INSERT INTO repairs VALUES (:date, :car, :description)", {
'date': date, 'car': car_id, 'description': description})
def show_repairs(self, car):
"""Shows car repairs notes.
Arguments:
car - Car instance
Returns:
search result - list of tuples
"""
self.c.execute("SELECT ROWID FROM cars WHERE vin=:vin",
{'vin': car.vin})
car_id = self.c.fetchone()
self.c.execute("SELECT * FROM repairs WHERE car=?",
(car_id))
return self.c.fetchall()
| StarcoderdataPython |
1674957 | from GameAI.QLearner import QLearnerGameAI
import pickle
from os import path, makedirs
import time
import random
from builtins import input
def ensureDir(f):
d = path.dirname(f)
if not path.exists(d):
makedirs(d)
class Game(object):
def playGame(self, players):
state = self.start
turn = 0
move = None
while self.checkGameOver(state, move) == -1:
self.displayBoard(state)
player = players[turn % len(players)]
move = player.play(state)
state = self.transition(state, move)
turn += 1
self.displayGameEnd(state)
return self.checkGameOver(state, move)
class HumanPlayer(object):
def __init__(self, game):
self.game = game
def play(self, state):
actions = [a for a in self.game.actions(state)]
while True:
print("Available moves: \n")
for ind, action in enumerate(actions):
print("{}: {}".format(ind + 1, str(action)))
index = input("Please pick one of the available moves: ")
index = int(index)
if index < 1 or index > len(actions):
print("That move is not available. Try again!")
else:
return actions[index - 1]
class RandomPlayer(object):
def __init__(self, game):
self.game = game
def play(self, state):
actions = self.game.actions(state)
time.sleep(1)
return random.choice(actions)
class AI(object):
def __init__(self, game, epsilon, alpha, gamma):
self.game = game
self.gameAI = QLearnerGameAI(game, epsilon, alpha, gamma)
def learnSteps(self, numSteps):
self.gameAI.learnSteps(numSteps)
def learnGames(self, numGames):
self.gameAI.learnGames(numGames)
def play(self, state):
time.sleep(1)
return self.gameAI.learnedMove(state)
def getAIFilePath(self, name):
gamePath = self.game.AIpath
fileName = name + ".p"
filePath = path.join(gamePath, fileName)
ensureDir(gamePath)
return filePath
def saveAI(self, name):
filePath = self.getAIFilePath(name)
AI_info = (self.gameAI.Q, self.gameAI.numGamesLearned)
pickle.dump(AI_info, open(filePath, "wb"))
def loadAI(self, name):
filePath = self.getAIFilePath(name)
try:
self.gameAI.Q, self.gameAI.numGamesLearned = pickle.load(
open(filePath, "rb"))
except IOError:
print("Error: couldn't find AI file - skipped loading AI.")
players_map = {"Human": HumanPlayer, "Random": RandomPlayer, "AI": AI}
| StarcoderdataPython |
180641 | <reponame>malharlakdawala/DevelopersInstitute
def mergeSortedArrays(L, R):
sorted_array = []
i = j = 0
while i < len(L) and j < len(R):
if L[i] < R[j]:
sorted_array.append(L[i])
i += 1
else:
sorted_array.append(R[j])
j += 1
# When we run out of elements in either L or M,
# pick up the remaining elements and put in A[p..r]
while i < len(L):
sorted_array.append(L[i])
i += 1
while j < len(R):
sorted_array.append(R[j])
j += 1
return sorted_array
def mergeSort(nums):
# exit condition!!! Important for a recursion!
if (len(nums) <= 1):
return nums
# split the array to two smaller arrays
middle = len(nums) // 2
L = nums[:middle]
R = nums[middle:]
# sort the smalle5r arrays
L = mergeSort(L)
R = mergeSort(R)
nums = mergeSortedArrays(L, R)
return nums
array = [6, 5, 12, 10, 9, 1]
mergeSort(array)
print(array) | StarcoderdataPython |
3269004 | import asyncio
import datetime
from collections import Counter
from typing import Any, NamedTuple, Optional
import asyncpg
import discord
from discord.ext import commands, menus, tasks
from donphan import MaybeAcquire
from ... import BotBase, Cog, Context, CONFIG
from ...db.tables import Commands
from ...utils.paginator import EmbedPaginator
from ...utils.time import human_friendly_timestamp
class CommandInvoke(NamedTuple):
mssage_id: int
guild_id: int
channel_id: int
user_id: int
invoked_at: datetime.datetime
prefix: str
command: str
failed: bool
class Stats(Cog):
def __init__(self, bot: BotBase) -> None:
super().__init__(bot)
self._command_stats: Counter[str] = Counter()
self._socket_stats: Counter[Optional[str]] = Counter()
self._batch_lock = asyncio.Lock()
self._batch_data: list[CommandInvoke] = []
self.bulk_insert.add_exception_type(asyncpg.exceptions.PostgresConnectionError)
self.bulk_insert.start()
async def cog_check(self, ctx: Context) -> bool:
return await commands.is_owner().predicate(ctx)
@commands.command()
async def command_history(self, ctx: Context) -> None:
embed = EmbedPaginator[discord.Embed](colour=ctx.me.colour, max_fields=10)
embed.set_author(name="Command History:", icon_url=self.bot.user.avatar.url)
async with ctx.db as connection:
commands = await Commands.fetch(connection, order_by=(Commands.invoked_at, "DESC"), limit=100)
if commands:
for command in commands:
user = self.bot.get_user(command["user_id"]) or "Unknown user"
embed.add_field(
name=f'{user} ({command["user_id"]}) @ {human_friendly_timestamp(command["invoked_at"])}',
value=f'`{command["prefix"]}{command["command"]}`',
inline=False,
)
else:
embed.add_line("No commands used.")
await menus.MenuPages(embed, delete_message_after=True).start(ctx)
@commands.command()
async def command_stats(self, ctx: Context) -> None:
"""Displays basic information about command invocation statistics."""
total_occurunces = sum(self._command_stats.values())
total_per_min = total_occurunces / (self.bot.uptime.total_seconds() / 60)
embed = discord.Embed(
colour=ctx.me.colour,
description=f"Processed {total_occurunces} command invokes. ({total_per_min:.2f}/min)",
).set_author(name=f"{self.bot.user.name} command stats:", icon_url=self.bot.user.avatar.url)
for event, occurunces in self._command_stats.most_common(25):
per_minute = occurunces / (self.bot.uptime.total_seconds() / 60)
embed.add_field(name=f"`{event}`", value=f"{occurunces} ({per_minute:.2f}/min)", inline=True)
await ctx.send(embed=embed)
@commands.command()
async def socket_stats(self, ctx: Context) -> None:
"""Displays basic information about socket statistics."""
total_occurunces = sum(self._socket_stats.values())
total_per_min = total_occurunces / (self.bot.uptime.total_seconds() / 60)
embed = discord.Embed(
colour=ctx.me.colour, description=f"Observed {total_occurunces} socket events. ({total_per_min:.2f}/min)"
).set_author(name=f"{self.bot.user.name} socket event stats:", icon_url=self.bot.user.avatar.url)
for event, occurunces in self._socket_stats.most_common(25):
per_minute = occurunces / (self.bot.uptime.total_seconds() / 60)
embed.add_field(name=f"`{event}`", value=f"{occurunces} ({per_minute:.2f}/min)", inline=True)
await ctx.send(embed=embed)
@commands.Cog.listener()
async def on_socket_response(self, msg: dict[str, Any]):
self._socket_stats[msg.get("t")] += 1
@commands.Cog.listener("on_command_completion")
@commands.Cog.listener("on_command_error")
async def on_command(self, ctx: Context, error: BaseException = None) -> None:
command = ctx.command
if command is None:
return
guild_id = getattr(ctx.guild, "id", None)
invoke = CommandInvoke(
ctx.message.id,
guild_id,
ctx.channel.id,
ctx.author.id,
ctx.message.created_at,
ctx.prefix,
command.qualified_name,
ctx.command_failed,
)
self._command_stats[command.qualified_name] += 1
async with self._batch_lock:
self._batch_data.append(invoke)
@tasks.loop(seconds=15)
async def bulk_insert(self) -> None:
async with self._batch_lock:
if self._batch_data:
async with MaybeAcquire(pool=self.bot.pool) as connection:
await Commands.insert_many(connection, Commands._columns, *self._batch_data)
self._batch_data.clear()
def setup(bot: BotBase):
if CONFIG.DATABASE.DISABLED:
return
bot.add_cog(Stats(bot))
| StarcoderdataPython |
1738805 | import probability as prb
import pprint
import random
import itertools as it
#todo: opening fire, expected value of strategic bombing raid against risk, sub withdrawal,
#sub/plane individuation:
# sub not hit planes,
# chance of taking territory (planes can't take territory)
# planes potential targets for aa
#expected cost of victory, financial damage wrought (requires unit costs, unit individuation)
def a_minus(n, *v):
"""deplete v (an army) from left to right (->0) until n is exhausted"""
#this assumes that the least probable hitters are removed first ... not necessarily valid (bombers may be preserved on defense)
h = []
pos = 0
while n and pos < len(v):
x = v[pos]
pos += 1
if x > n:
h.append(x-n)
n = 0
else:
h.append(0)
n -= x
h += v[pos:]
return h
def losses(depleted, full):
return [full[i]-depleted[i] for i in range(len(full))]
def make_grid(n, m):
"""nxm array with labeled coordinates ... pretty superfluous"""
return [[(i,j) for j in range(m+1)] for i in range(n+1)]
#h = []
#for i in range(n+1): h.append([(i, j) for j in range(m+1)])
#return h
#return [(i, j) for i in range(n+1) for j in range(m+1)]
def make_transitions(n, m):
h = {}
#could just make all transitions and give the non-downwards ones weight zero when weighting
for x in it.product(range(1, n+1), range(1, m+1)): #start coordinates can't be terminal, so leave 0 out:
h[x] = []
for y in it.product(range(n+1), range(m+1)):
if y[0]<=x[0] and y[1]<=x[1]: h[x].append(y)
return h
def weight_transitions(a1, a2, trans):
"""calculate weights for each transition in trans"""
h = {} #trans plus weights
for s in trans: #start
h[s] = {}
a11, a21 = a_minus(sum(a1)-s[0], *a1), a_minus(sum(a2)-s[1], *a2) #take away losses
c1, c2 = prb.binomial_joint(*[(a11[i], i/float(6)) for i in range(len(a11))]), prb.binomial_joint(*[(a21[i], i/float(6)) for i in range(len(a21))])#get damage dealing capacity of each army
overkill1, overkill2 = sum(c1[s[1]:]), sum(c2[s[0]:]) #p(army deals critical hit)
for e in trans[s]: #end
i, j = s[0]-e[0], s[1]-e[1] #damage dealt to a1, a2
if i > s[1] or j > s[0]: continue #tasked damage too great for at least one party, don't include
#if i > s[1] or j > s[0]: h[s][e] = 0 #tasked damage too great for at least one party, include 0
if not(e[0] or e[1]): h[s][e] = overkill1*overkill2 #a1 and a2 elimination, overkill options
elif not e[0]: h[s][e] = c1[j]*overkill2 #a1 elimination, overkill option for a2
elif not e[1]: h[s][e] = c2[i]*overkill1 #a2 elimination, overkill option for a1
else: h[s][e] = c1[j]*c2[i]
#for x in sorted(h): print(x, h[x])
return h
def self_loop_dict(trans):
"""store key values for calculating self-loop probability re-distribution in dict"""
h = {x:[0, 0] for x in trans}
d = 0
for x in trans:
for y in trans[x]:
if x == y: h[x][0] = trans[x][y]
else: h[x][1] += trans[x][y]
return h
#1972 iterations for 5 2 v 3 3, with zeroes dropped, 1503
def start_pct(grid, trans):
"""probability each end state will be reached"""
#track forward from start, augmenting finals when they come up!
#uses nested transitions
h = {x:0 for x in ([x[0] for x in grid[1:]] + grid[0])} #gotta be a smoother way
start = grid[-1][-1]
sld = self_loop_dict(trans)
#cnt = 0
#depth = 0
#paths = 0
undone = [[(start, 1)]]
while undone:
path = undone.pop()
prevn, prevv = path[-1]
for n in trans[prevn]:
#cnt += 1
if n in h:
h[n] += prevv*(trans[prevn][n]+(sld[prevn][0]*(trans[prevn][n]/sld[prevn][1])))
#paths += 1
#depth += len(path)+1
elif n != prevn and trans[prevn][n] != 0: undone.append(path+[(n, prevv*(trans[prevn][n]+(sld[prevn][0]*(trans[prevn][n]/sld[prevn][1]))))]) #we don't actually need the whole path, can just store the last node and its probability!
#print("total length of paths {}".format(depth))
#print("average length of paths {}".format(round(depth/float(paths), 2)))
#print("total nodes {}".format(cnt))
#print("total paths {}".format(paths))
return [(x, h[x]) for x in h]
def simulate(grid, trans, cap):
h = {x:0 for x in ([x[0] for x in grid[1:]] + grid[0])} #gotta be a smoother way
start = grid[-1][-1]
sld = self_loop_dict(trans)
paths = 0
while paths < cap:
prevn = start
while prevn not in h:
x =random.random()
cumsum = 0
nextn = sorted(trans[prevn])
while cumsum < x:
n = nextn.pop(0) #favors smaller jumps by starting at left edge of curve
if n != prevn:
cumsum += trans[prevn][n]+(sld[prevn][0]*(trans[prevn][n]/sld[prevn][1]))
prevn = n
h[prevn] += 1
paths += 1
return [(x, h[x]/float(paths)) for x in h]
def sim_or_calc(n, m, a1, a2):
if n + m > 14:
outcomes = simulate(make_grid(n, m), weight_transitions(a1, a2, make_transitions(n, m)), 10000)
else:
outcomes = start_pct(make_grid(n, m), weight_transitions(a1, a2, make_transitions(n, m)))
return outcomes
####
#opening fire - under construction
####
#bombardment: you can just do prb.a_minus(target) for each of the cells; weight the outcomes by probability of attrition
#sneak attacks: the submarines stay in the fight, but don't shoot simultaneously with the others until round 2
#aa: non a_minus() subtractions (bomber may go down even if a fighter is present) -> this can be handled by setting up 2 opening fire rounds, one for fighters, one for bombers
#armies could actually be represented as lists containing two lists, one for opening fire, one for later.
#assumed: pre-processing of units into appropriate targets, signal for opening fire
# presumably armies could be broken into sub-armies according to what they target/the enemy targets, and casualties could be calculated on those sub-armies before subtracting point-wise from the overall army.
#produce distribution of outcomes for one round
#calculate remainder of battle for each outcome (adjusting original armies accordingly)
#weight each battle outcome by prior distribution
#sum all weighted battle outcomes
def revive(excluded, core):
return [core[i]+excluded[i] for i in range(len(core))]
def subs_sneak_strike(subs_attack, subs_defend, planes_attack, planes_defend, a1, a2): #this implements a lot of what is needed for fully accurate naval-air combat
subs_attack_probs = prb.binomial_joint(*[(subs_attack[i], i/float(len(subs_attack))) for i in range(len(subs_attack))])
subs_defend_probs = prb.binomial_joint(*[(subs_defend[i], i/float(len(subs_defend))) for i in range(len(subs_defend))])
h = {}
for i in range(len(subs_attack_probs)):
for j in range(len(subs_defend_probs)):
if ((i >= sum(a2) and (not any(planes_defend))) or (j >= sum(a1) and (not any(planes_attack)))) and (sum(a_minus(j, *a1)),sum(a_minus(i, *a2))) not in h:
h[(sum(a_minus(j, *a1)),sum(a_minus(i, *a2)))] = prb.product(*([subs_attack_probs[i]]+[subs_defend_probs[j]]))
elif ((i >= sum(a2) and (not any(planes_defend))) or (j >= sum(a1) and (not any(planes_attack)))) and (sum(a_minus(j, *a1)),sum(a_minus(i, *a2))) in h:
h[(sum(a_minus(j, *a1)),sum(a_minus(i, *a2)))] += prb.product(*([subs_attack_probs[i]]+[subs_defend_probs[j]]))
else:
alt1 = losses(subs_attack, #4 remove subs
revive(planes_attack, #3 add planes back in
a_minus(j, #2 apply defender's strike
*losses(planes_attack, [x for x in a1])))) #1 separate planes from legitimate targets of strike ... planes vector needs to be length-6
alt2 = losses(subs_defend, revive(planes_defend, a_minus(i, *losses(planes_defend, [x for x in a2]))))
attack_core_probs = prb.binomial_joint(*[(alt1[k], k/len(alt1)) for k in range(len(alt1))]) #5 calculate damage dealt by non-subs
defend_core_probs = prb.binomial_joint(*[(alt2[k], k/len(alt2)) for k in range(len(alt2))])
for m in range(len(attack_core_probs)):
for n in range(len(defend_core_probs)):
attack_survivors = a_minus(n, #7 apply defender's post-strike response
*revive(subs_attack, alt1)) #6 add subs back in
defend_survivors = a_minus(m, *revive(subs_defend, alt2))
#h.append((sim_or_calc(sum(attack_survivors), sum(defend_survivors), attack_survivors, defend_survivors), subs_attack_probs[i]*attack_core_probs[m]*subs_defend_probs[j]*defend_core_probs[n])) #subs-air distinction should not be erased
if any(attack_survivors) and any(defend_survivors): outcome = [(x[0], x[1]*subs_attack_probs[i]*subs_defend_probs[j]*attack_core_probs[m]*defend_core_probs[n]) for x in sim_or_calc(sum(attack_survivors), sum(defend_survivors), attack_survivors, defend_survivors)] #subs-air distinction should not be erased
else: outcome = [((sum(attack_survivors), sum(defend_survivors)), prb.product(*((subs_attack_probs[i],attack_core_probs[m],subs_defend_probs[j],defend_core_probs[n]))))]
for o in outcome:
if o[0] not in h: h[o[0]] = o[1]
else: h[o[0]] += o[1]
return [(x, h[x]) for x in h]
def embedded_battle(prior_outcomes1, prior_outcomes2, a1, a2): #openers1/2, core1/2
#2 or more, use a for... no reason to just have a prior stage of battle, could have entire sequences of prior battles
h = []
for p in it.product(*prior_outcomes1): #tuples of (prior casualties inflicted, [vector of targets]) for a1
alt2 = [x for x in a2]
#print p
for x in p:
alt2 = losses(losses(a_minus(x[0], *x[1]), x[1]), alt2) #figure out how the targets have been depleted, then remove that much from the overall targeted army
for q in it.product(*prior_outcomes2): #these could come precompiled as arguments...
alt1 = [z for z in a1]
for y in q: alt1 = losses(losses(a_minus(y[0], *y[1]), y[1]), alt1)
#print alt1
#print alt2
#print "---"
h.append(sim_or_calc(sum(alt1), sum(alt2), alt1, alt2))
return h
def weight_outcomes(prior_probs1, prior_probs2, *outcomes):
h = {} #working around unchecked assumption that all outcomes are in the same order in each list...
#h = [0]*max([len(x for x in outcomes)])
#d = it.product(range(len(prior_probs1)), range(len(prior_probs2))) #full distribution
i = 0
for p in it.product(*prior_probs1):
for q in it.product(*prior_probs2):
for x in outcomes[i]:
if x[0] not in h: h[x[0]]= prb.product(*((x[1],)+q+p))
else: h[x[0]] += prb.product(*((x[1],)+q+p))
i += 1
return [(x, h[x]) for x in h]
if __name__ == "__main__":
a1 = [0,1,0,2,1,0]
a2 = [0,0,2,0,0,0]
a1fig = [0,0,0,2,0,0]
a1bom = [0,0,0,0,1,0]
open1 = ([0,0,0,0,1,0], a2)
open2 = ([0,sum(a1fig),0,0,0,0], a1fig)
open2b = ([0,sum(a1bom),0,0,0,0], a1bom)
open1probs = [
prb.binomial_joint(*[(open1[0][i], i/float(len(open1[0]))) \
for i in range(len(open1[0]))])
] #allows fully general number of die faces. technically, we never have mixed probabilities in opening fire, but since it is not known ahead of time, it is easier to use binomial_joint() degeneratively
open2probs = [
prb.binomial_joint(*[(open2[0][i], i/float(len(open2[0]))) \
for i in range(len(open2[0]))]),
prb.binomial_joint(*[(open2b[0][i], i/float(len(open2b[0]))) \
for i in range(len(open2b[0]))])
]
pprint.pprint_b(*weight_outcomes(open1probs, open2probs, *embedded_battle([[(i, open1[1]) for i in range(len(open1probs[0]))]], [[(i, open2[1]) for i in range(len(open2probs[0]))], [(i, open2b[1]) for i in range(len(open2probs[1]))]], a1, a2)))
| StarcoderdataPython |
1628931 | <reponame>ABBARNABIL/Turing-Machine<gh_stars>0
# V1.2 2019/09/19 <NAME>
import argparse
import curses
from time import sleep
class UI_Curses:
def __init__(self, sim):
self.sim = sim
curses.wrapper(self.term)
def yx(self, p, t):
p += self.COLS // 2
return (p // self.COLS) * (self.sim.am.nb_tapes + 1) + t + self.LINES // 2, p % self.COLS
def print_tapes(self):
for p, l, h, r, t in zip(self.sim.tape.pos, self.sim.tape.stacks[0], self.sim.tape.head, self.sim.tape.stacks[1], range(self.sim.am.nb_tapes)):
k = p - 1
for c in reversed(l):
try:
self.stdscr.addstr(*self.yx(k, t), c, curses.A_NORMAL | self.color_pair(t))
except curses.error:
pass
k -= 1
try:
self.stdscr.addstr(*self.yx(p, t), h, curses.A_REVERSE | self.color_pair(t))
except curses.error:
pass
k = p + 1
for c in reversed(r):
try:
self.stdscr.addstr(*self.yx(k, t), c, curses.A_NORMAL | self.color_pair(t))
except curses.error:
pass
k += 1
def print_state(self):
self.stdscr.addstr(0, 0, self.sim.state, curses.A_BOLD)
self.stdscr.addstr(0, self.COLS - len(str(self.sim.steps)), str(self.sim.steps), curses.A_BOLD)
if self.sim.result is not None:
self.stdscr.addstr(1, 0, self.sim.result, curses.A_REVERSE)
def term(self, stdscr):
self.stdscr = stdscr
self.can_use_color = curses.can_change_color()
if not self.can_use_color:
self.color_pair = lambda t: 0
else:
self.color_pair = curses.color_pair
self.LINES, self.COLS = stdscr.getmaxyx()
if self.can_use_color:
curses.start_color()
curses.use_default_colors()
try:
for i, c in enumerate([curses.COLOR_WHITE, curses.COLOR_CYAN, curses.COLOR_YELLOW, curses.COLOR_GREEN, curses.COLOR_MAGENTA, curses.COLOR_RED, curses.COLOR_BLUE]):
curses.init_pair(i, c, 0)
except curses.error:
self.can_use_color = False
self.color_pair = lambda t: 0
curses.curs_set(0)
stdscr.nodelay(True)
delay, c, back = 0, '', False
while True:
if delay >= 0 or self.sim.result:
stdscr.clear()
self.print_tapes()
self.print_state()
if delay == 0:
stdscr.addstr(1, self.COLS - 5, "PAUSE", curses.A_REVERSE | curses.A_BOLD | self.color_pair(5))
if self.sim.result:
stdscr.addstr(
self.LINES - 1, 0, 'SIMULATION COMPLETED. Press b to go backward, r to restart, any to quit'[:self.COLS - 1])
stdscr.refresh()
stdscr.nodelay(delay != 0)
try:
c = stdscr.getkey()
except curses.error:
c = None
if c:
if c == '+':
delay = 0.25 if delay == 0 else delay * 0.5
elif c == '-':
delay = 0 if delay >= 2 else delay * 2
elif c == 'p':
delay = 0
elif c == 'q':
return
elif c == 'e':
delay = -1
elif c == 'b':
delay = 0
back = True
elif c == 'r':
delay = 0
self.sim.reset()
continue
elif c == 'KEY_RESIZE':
self.LINES, self.COLS = stdscr.getmaxyx()
continue
if delay > 0:
sleep(delay)
if back:
self.sim.back_step()
back = False
self.sim.result = None
else:
if self.sim.result is not None:
if delay == 0:
return
else:
delay = 0
self.sim.step()
| StarcoderdataPython |
3381798 | class NotFoundError(Exception):
code = 404
pass
class ServerError(Exception):
code = 500
pass
| StarcoderdataPython |
1617971 | <filename>src/whoosh/codec/legacy.py
# Copyright 2011 <NAME>. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
from array import array
from struct import Struct
from whoosh.compat import load, xrange, array_frombytes
from whoosh.codec import base
from whoosh.codec.base import (deminimize_ids, deminimize_weights,
deminimize_values)
from whoosh.system import _INT_SIZE, _FLOAT_SIZE, IS_LITTLE
from whoosh.util import byte_to_length, utf8decode
try:
from zlib import decompress
except ImportError:
pass
# Old field lengths format
def load_old_lengths(obj, dbfile, doccount):
fieldcount = dbfile.read_ushort() # Number of fields
for _ in xrange(fieldcount):
fieldname = dbfile.read_string().decode("utf-8")
obj.lengths[fieldname] = dbfile.read_array("B", doccount)
# Old format didn't store totals, so fake it by adding up the codes
obj.totals[fieldname] = sum(byte_to_length(b) for b
in obj.lengths[fieldname])
dbfile.close()
return obj
# Old block formats
def old_block_type(magic):
if magic == "Blk2":
return Block2
elif magic == "\x0eB\xff\xff":
return Block1
else:
raise Exception("Unknown block header %r" % magic)
class Block2(base.BlockBase):
_struct = Struct("<iBBcBiiffHBBB")
@classmethod
def from_file(cls, postfile, postingsize, stringids=False):
start = postfile.tell()
block = cls(postingsize, stringids=stringids)
block.postfile = postfile
header = cls._struct.unpack(postfile.read(cls._struct.size))
block.nextoffset = start + header[0]
block.cmp = header[1]
block.count = header[2]
block.idcode = header[3]
block.idslen = header[5]
block.wtslen = header[6]
block.maxweight = header[7]
block.maxlength = byte_to_length(header[11])
block.minlength = byte_to_length(header[12])
block.maxid = load(postfile) if stringids else postfile.read_uint()
block.dataoffset = postfile.tell()
return block
def read_ids(self):
self.postfile.seek(self.dataoffset)
string = self.postfile.read(self.idslen)
self.ids = deminimize_ids(self.idcode, self.count, string,
compression=self.cmp)
return self.ids
def read_weights(self):
if self.wtslen == 0:
weights = [1.0] * self.count
else:
offset = self.dataoffset + self.idslen
self.postfile.seek(offset)
string = self.postfile.read(self.wtslen)
weights = deminimize_weights(self.count, string,
compression=self.cmp)
return weights
def read_values(self):
postingsize = self.postingsize
if postingsize == 0:
return [None] * self.count
else:
offset = self.dataoffset + self.idslen + self.wtslen
self.postfile.seek(offset)
string = self.postfile.read(self.nextoffset - offset)
return deminimize_values(postingsize, self.count, string, self.cmp)
class Block1(base.BlockBase):
_struct = Struct("!BBHiHHBfffB")
@classmethod
def from_file(cls, postfile, stringids=False):
pos = postfile.tell()
block = cls(postfile, stringids=stringids)
block.postfile = postfile
header = cls._struct.unpack(postfile.read(cls._struct.size))
block.nextoffset = pos + header[3]
block.idslen = header[4]
block.wtslen = header[5]
block.count = header[6]
block.maxweight = header[7]
block.minlength = byte_to_length(header[10])
if stringids:
block.maxid = utf8decode(postfile.read_string())[0]
else:
block.maxid = postfile.read_uint()
block.dataoffset = postfile.tell()
return block
def read_ids(self):
postfile = self.postfile
offset = self.dataoffset
postcount = self.count
postfile.seek(offset)
if self.stringids:
rs = postfile.read_string
ids = [utf8decode(rs())[0] for _ in xrange(postcount)]
newoffset = postfile.tell()
elif self.idslen:
ids = array("I")
array_frombytes(ids, decompress(postfile.read(self.idslen)))
if IS_LITTLE:
ids.byteswap()
newoffset = offset + self.idslen
else:
ids = postfile.read_array("I", postcount)
newoffset = offset + _INT_SIZE * postcount
self.ids = ids
self.weights_offset = newoffset
return ids
def read_weights(self):
postfile = self.postfile
offset = self.weights_offset
postfile.seek(offset)
weightslen = self.wtslen
postcount = self.count
if weightslen == 1:
weights = None
newoffset = offset
elif weightslen:
weights = array("f")
array_frombytes(weights, decompress(postfile.read(weightslen)))
if IS_LITTLE:
weights.byteswap()
newoffset = offset + weightslen
else:
weights = postfile.get_array(offset, "f", postcount)
newoffset = offset + _FLOAT_SIZE * postcount
self.weights = weights
self.values_offset = newoffset
return weights
def read_values(self):
postfile = self.postfile
startoffset = self.values_offset
endoffset = self.nextoffset
postcount = self.count
postingsize = self.postingsize
if postingsize != 0:
postfile.seek(startoffset)
values_string = postfile.read(endoffset - startoffset)
if self.wtslen:
# Values string is compressed
values_string = decompress(values_string)
if postingsize < 0:
# Pull the array of value lengths off the front of the string
lengths = array("i")
array_frombytes(lengths, values_string[:_INT_SIZE * postcount])
values_string = values_string[_INT_SIZE * postcount:]
# Chop up the block string into individual valuestrings
if postingsize > 0:
# Format has a fixed posting size, just chop up the values
# equally
values = [values_string[i * postingsize: i * postingsize + postingsize]
for i in xrange(postcount)]
else:
# Format has a variable posting size, use the array of lengths
# to chop up the values.
pos = 0
values = []
for length in lengths:
values.append(values_string[pos:pos + length])
pos += length
else:
# Format does not store values (i.e. Existence), just create fake
# values
values = (None,) * postcount
self.values = values
| StarcoderdataPython |
3314566 | <filename>bsl/Z3/z3_smt.py<gh_stars>1-10
from z3 import *
import sys
import time
# ====== load file =====
def extract_edge(edge):
tokens = edge.split(",")
assert len(tokens) == 2, "ill-format edge: a,b"
return [int(tokens[0]), int(tokens[1])]
def load_polyg(poly_f):
with open(poly_f) as f:
lines = f.readlines()
n = 0
edges = []
constraints = []
for line in lines:
if line == "":
continue
elems = line.split(':')
assert len(elems) == 2, "ill-format log"
symbol = elems[0]
content = elems[1]
if symbol == "n":
assert n==0, "multiple n in file"
n = int(content)
elif symbol == "e":
e = extract_edge(content)
edges.append(e)
elif symbol == "c":
str_groups = content.split("|")
assert len(str_groups) == 2, "ill-format constraints, not two groups"
con = []
for str_group in str_groups:
group = []
str_edges = str_group.split(";")
assert len(str_edges) >= 1, "ill-format constraints, empty constraint"
for str_edge in str_edges:
group.append(extract_edge(str_edge))
con.append(group)
constraints.append(con)
else:
print("Line = %s" % line)
assert False, "should never be here"
return n, edges, constraints
# ====== main solver logic =====
def add_constraint_int(s, c, N, n):
assert len(c) == 2, "ill-formatd choice"
left = c[0]
right = c[1]
assert len(left) == 1, "should be original constraint"
assert len(right) == 1, "should be original constraint"
l_edge = left[0]
r_edge = right[0]
s.add(Xor(N[l_edge[0]] < N[l_edge[1]],
N[r_edge[0]] < N[r_edge[1]]))
def encode_polyg_linear(s, n, edges, constraints):
# N is nodes; node as an integer
N = IntVector('n', n)
# add edges
for e in edges:
assert len(e) == 2, "ill-formatd edge"
# e[0] -> e[1]
s.add(N[e[0]] < N[e[1]])
# add constraints
for c in constraints:
add_constraint_int(s, c, N, n)
# acyclicity:
# all nodes should have distinct values
s.add([Distinct(N[i]) for i in range(n)])
# === main logic ===
def main(poly_f):
n, edges, constraints = load_polyg(poly_f)
print("n=%d"%n)
#print(edges)
#print(constraints)
#set_option("smt.timeout", 120000) # 120s timeout
s = Solver()
t1 = time.time()
encode_polyg_linear(s, n, edges, constraints)
print("finish construction of clauses")
t2 = time.time()
ret = s.check()
print(ret)
assert ret != unsat, "must be SAT or UNKNOWN, but failed!"
t3 = time.time()
print("clause construction: %.fms" % ((t2-t1)*1000))
print("solve constraints: %.fms" % ((t3-t2)*1000))
print("Overall runtime = %dms" % int((t3-t1)*1000))
if (False):
m = s.model()
for d in m.decls():
print("%s = %s" % (d.name(), m[d]))
def usage_exit():
print("Usage: z3_smt.py <polyg_file>")
exit(1)
if __name__ == "__main__":
if len(sys.argv) != 2:
usage_exit()
main(sys.argv[1])
| StarcoderdataPython |
146866 | #/usr/bin/env python
# encoding: utf-8
import os
import sys
import atexit
import json
import time
import tempfile
import wave
import traceback
import urllib2
from subprocess import check_output
from Queue import Queue, Empty
import numpy as np
import pyaudio
class Spectrum(object):
FORMAT = pyaudio.paFloat32
CHANNELS = 1
FRAME_SIZE = 512
RATE = 16000 # Hz
def frames(self, n):
return int(n*self.RATE/self.FRAME_SIZE)
def __init__(self):
self.speak = Queue()
self.pa = pyaudio.PyAudio()
self.last_samples = None
atexit.register(self.pa.terminate)
# fft結果のインデックスに対応する周波数値の計算。今回使わなかった。
# self.freq = np.fft.fftfreq(self.FRAME_SIZE, d=self.RATE**-1)
self.begin = self.FRAME_SIZE*3/8
self.end = self.FRAME_SIZE/2
self.fque = np.zeros((self.frames(1.0), self.end-self.begin), np.float32)
self.buff = np.zeros((self.frames(5.0), 512), np.float32)
def fft(self, samples):
win = np.hanning(len(samples))
res = np.fft.fftshift(np.fft.fft(win*samples))
return 20*np.log10(np.abs(res))
def callback(self, in_data, frame_count, time_info, status):
try:
data = np.fromstring(in_data, np.float32)
self.buff[0] = data
self.buff = np.roll(self.buff, -1, axis=0)
if self.status == 0: # 切り出しを始めたら環境音成分平均値の更新は一時停止。
self.fque = np.roll(self.fque, 1, axis=0)
self.fque[0] = self.fft(data)[self.begin:self.end]
# これが環境音成分の平均値
average = np.average(self.fque, axis=0)
values = self.fque[0] - average # fft結果から差っ引く
volume = np.average(values)
if self.status:
self.count += 1
else:
self.count == 0
if self.status < 5:
if volume>5:
self.status += 1
else:
self.status = 0
elif self.status == 5:
if volume<5:
self.status += 1
elif self.status < 15:
if volume<5:
self.status += 1
else:
self.status -= 1
else:
self.status = 0
self.speak.put(self.buff[-self.count-2:])
if self.debug:
pr = [min(9, max(0, int(v/10))) for v in values]
print ''.join([str(i) for i in pr]), self.status
return (in_data, self.recording)
except KeyboardInterrupt:
self.recording = pyaudio.paAbort
def start(self, debug=False):
self.debug = debug
self.status = 0
self.count = 0
self.recording = pyaudio.paContinue
self.stream = self.pa.open(format = self.FORMAT,
channels = self.CHANNELS,
rate = self.RATE,
input = True,
output = False,
frames_per_buffer = self.FRAME_SIZE,
stream_callback = self.callback)
self.stream.start_stream()
def stop(self):
self.recording = pyaudio.paAbort
while self.stream.is_active():
time.sleep(0.5)
self.stream.start_stream()
self.stream.close()
RECOGNIZE_URL = "https://www.google.com/speech-api/v1/recognize?xjerr=1&client=chromium&lang=ja-JP"
# RECOGNIZE_URL += "&maxresult=10" # これで候補のトップ10が返る。
FLAC_TOOL = 'flac'
def recognize(fpath):
flac = open(fpath,"rb").read()
header = {'Content-Type' : 'audio/x-flac; rate=16000'}
req = urllib2.Request(RECOGNIZE_URL, flac, header)
data = urllib2.urlopen(req)
params = json.loads(data.read())
return params
def main(spe):
while 1:
try:
buff = spe.speak.get(timeout=3)
with tempfile.NamedTemporaryFile(suffix='.wav') as fp:
f = wave.open(fp, 'w')
f.setnchannels(1)
f.setsampwidth(2)
f.setframerate(16000)
f.writeframes(np.int16(buff*32768).tostring())
f.close()
check_output([FLAC_TOOL, '-sf', fp.name])
output = os.path.splitext(fp.name)[0] + '.flac'
res = recognize(output)
for i in res.get('hypotheses', []):
print i['confidence'], i['utterance']
except KeyboardInterrupt:
raise SystemExit(0)
except Empty:
pass
except:
traceback.print_exc()
time.sleep(5)
if __name__=='__main__':
spe = Spectrum()
spe.start(False)
try:
main(spe)
finally:
spe.stop()
| StarcoderdataPython |
3359864 | <reponame>kdschlosser/home-assistant
"""
Support for retrieving status info from Google Wifi/OnHub routers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.google_wifi/
"""
import logging
from datetime import timedelta
import voluptuous as vol
import requests
from homeassistant.util import dt
import homeassistant.helpers.config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME, CONF_HOST, CONF_MONITORED_CONDITIONS, STATE_UNKNOWN)
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_CURRENT_VERSION = 'current_version'
ATTR_LAST_RESTART = 'last_restart'
ATTR_LOCAL_IP = 'local_ip'
ATTR_NEW_VERSION = 'new_version'
ATTR_STATUS = 'status'
ATTR_UPTIME = 'uptime'
DEFAULT_HOST = 'testwifi.here'
DEFAULT_NAME = 'google_wifi'
ENDPOINT = '/api/v1/status'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
MONITORED_CONDITIONS = {
ATTR_CURRENT_VERSION: [
['software', 'softwareVersion'],
None,
'mdi:checkbox-marked-circle-outline'
],
ATTR_NEW_VERSION: [
['software', 'updateNewVersion'],
None,
'mdi:update'
],
ATTR_UPTIME: [
['system', 'uptime'],
'days',
'mdi:timelapse'
],
ATTR_LAST_RESTART: [
['system', 'uptime'],
None,
'mdi:restart'
],
ATTR_LOCAL_IP: [
['wan', 'localIpAddress'],
None,
'mdi:access-point-network'
],
ATTR_STATUS: [
['wan', 'online'],
None,
'mdi:google'
]
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS,
default=list(MONITORED_CONDITIONS)):
vol.All(cv.ensure_list, [vol.In(MONITORED_CONDITIONS)]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Google Wifi sensor."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
conditions = config.get(CONF_MONITORED_CONDITIONS)
api = GoogleWifiAPI(host, conditions)
dev = []
for condition in conditions:
dev.append(GoogleWifiSensor(api, name, condition))
add_entities(dev, True)
class GoogleWifiSensor(Entity):
"""Representation of a Google Wifi sensor."""
def __init__(self, api, name, variable):
"""Initialize a Google Wifi sensor."""
self._api = api
self._name = name
self._state = None
variable_info = MONITORED_CONDITIONS[variable]
self._var_name = variable
self._var_units = variable_info[1]
self._var_icon = variable_info[2]
@property
def name(self):
"""Return the name of the sensor."""
return '{}_{}'.format(self._name, self._var_name)
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._var_icon
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._var_units
@property
def available(self):
"""Return availability of Google Wifi API."""
return self._api.available
@property
def state(self):
"""Return the state of the device."""
return self._state
def update(self):
"""Get the latest data from the Google Wifi API."""
self._api.update()
if self.available:
self._state = self._api.data[self._var_name]
else:
self._state = None
class GoogleWifiAPI:
"""Get the latest data and update the states."""
def __init__(self, host, conditions):
"""Initialize the data object."""
uri = 'http://'
resource = "{}{}{}".format(uri, host, ENDPOINT)
self._request = requests.Request('GET', resource).prepare()
self.raw_data = None
self.conditions = conditions
self.data = {
ATTR_CURRENT_VERSION: STATE_UNKNOWN,
ATTR_NEW_VERSION: STATE_UNKNOWN,
ATTR_UPTIME: STATE_UNKNOWN,
ATTR_LAST_RESTART: STATE_UNKNOWN,
ATTR_LOCAL_IP: STATE_UNKNOWN,
ATTR_STATUS: STATE_UNKNOWN
}
self.available = True
self.update()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from the router."""
try:
with requests.Session() as sess:
response = sess.send(self._request, timeout=10)
self.raw_data = response.json()
self.data_format()
self.available = True
except (ValueError, requests.exceptions.ConnectionError):
_LOGGER.warning("Unable to fetch data from Google Wifi")
self.available = False
self.raw_data = None
def data_format(self):
"""Format raw data into easily accessible dict."""
for attr_key in self.conditions:
value = MONITORED_CONDITIONS[attr_key]
try:
primary_key = value[0][0]
sensor_key = value[0][1]
if primary_key in self.raw_data:
sensor_value = self.raw_data[primary_key][sensor_key]
# Format sensor for better readability
if (attr_key == ATTR_NEW_VERSION and
sensor_value == '0.0.0.0'):
sensor_value = 'Latest'
elif attr_key == ATTR_UPTIME:
sensor_value = round(sensor_value / (3600 * 24), 2)
elif attr_key == ATTR_LAST_RESTART:
last_restart = (
dt.now() - timedelta(seconds=sensor_value))
sensor_value = last_restart.strftime(
'%Y-%m-%d %H:%M:%S')
elif attr_key == ATTR_STATUS:
if sensor_value:
sensor_value = 'Online'
else:
sensor_value = 'Offline'
elif attr_key == ATTR_LOCAL_IP:
if not self.raw_data['wan']['online']:
sensor_value = STATE_UNKNOWN
self.data[attr_key] = sensor_value
except KeyError:
_LOGGER.error("Router does not support %s field. "
"Please remove %s from monitored_conditions",
sensor_key, attr_key)
self.data[attr_key] = STATE_UNKNOWN
| StarcoderdataPython |
103510 | #!/usr/bin/python
# Copyright (c) 2018
# Call file:
# python ./generate_cpp17.py > TupleConversions/Private/structurebindings_generated.h
############################################################################################################################
import sys
import string
# Skipping some letters that may produce keywords or are hard to read, or shadow template parameters
ascii_letters = string.ascii_letters.replace("o", "").replace("O", "").replace("i", "").replace("I", "").replace("T", "")
PROLOGUE = """#ifndef STRUCTURE_BINDING_GENERATED_HPP
#define STRUCTURE_BINDING_GENERATED_HPP
#pragma once
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////// This is an auto generated header. Modify generate_cpp17.py instead. ////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#include <tuple>
#include <type_traits>
/**
* @file
* @brief Contains detail functions for converting struct to std::tuple
*
* Contains detail functions for converting struct to std::tuple.
* This file is auto-generated by generate_cpp17.py
* @warning Dont use this functions directly!
* @ingroup TupleConversions
*/
namespace TupleConversions::Detail {
template <class T>
constexpr auto toTupleImpl(
T&&, std::integral_constant<std::size_t, 0>
) noexcept
{
return std::make_tuple();
}
template <class T>
constexpr auto toTupleImpl(
T&& val, std::integral_constant<std::size_t, 1>
) noexcept
{
auto&& [a] = std::forward<T>(val);
return std::make_tuple(a);
}
"""
############################################################################################################################
EPILOGUE = """
} // TupleConversions::Detail
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#endif // BOOST_PFR_DETAIL_CORE17_GENERATED_HPP
"""
############################################################################################################################
indexes = " a"
print PROLOGUE
funcs_count = 100 if len(sys.argv) == 1 else int(sys.argv[1])
max_args_on_a_line = 10#len(ascii_letters)
for i in xrange(1, funcs_count):
if i % max_args_on_a_line == 0:
indexes += ",\n "
else:
indexes += ", "
if i >= max_args_on_a_line:
indexes += ascii_letters[i / max_args_on_a_line - 1]
indexes += ascii_letters[i % max_args_on_a_line]
print "template <class T>"
print "constexpr auto toTupleImpl("
print " T&& val, std::integral_constant<std::size_t, " + str(i + 1) + ">"
print " ) noexcept"
print "{"
print " auto&& ["
print indexes
print " ] = std::forward<T>(val);"
print ""
print " return std::make_tuple("
print indexes
print " );"
print "}\n"
print "template <class T, size_t N>"
print "constexpr auto toTupleImpl("
print " T&& val, std::integral_constant<std::size_t, N>"
print " ) noexcept"
print "{"
print " static_assert(N < " + str(funcs_count + 1) + ");"
print "}"
print EPILOGUE
| StarcoderdataPython |
142214 | <gh_stars>1-10
from . import adapters
from . import mol_toolkit
if mol_toolkit.HAS_OE:
from . import cp_openeye
if mol_toolkit.HAS_RDK:
from . import cp_rdk
if not mol_toolkit.HAS_OE and not mol_toolkit.HAS_RDK:
raise Exception("Neither OpenEye or RDKit is installed"\
"ChemPer requires at least one of these toolkits")
| StarcoderdataPython |
1752325 | #
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The ios_static_routes class
It is in this file where the current configuration (as dict)
is compared to the provided configuration (as dict) and the command set
necessary to bring the current configuration to it's desired end-state is
created
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import copy
from ansible.module_utils.six import iteritems
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.cfg.base import (
ConfigBase,
)
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import (
to_list,
)
from ansible_collections.cisco.ios.plugins.module_utils.network.ios.facts.facts import (
Facts,
)
from ansible_collections.cisco.ios.plugins.module_utils.network.ios.utils.utils import (
new_dict_to_set,
validate_n_expand_ipv4,
filter_dict_having_none_value,
)
class Static_Routes(ConfigBase):
"""
The ios_static_routes class
"""
gather_subset = ["!all", "!min"]
gather_network_resources = ["static_routes"]
def __init__(self, module):
super(Static_Routes, self).__init__(module)
def get_static_routes_facts(self, data=None):
""" Get the 'facts' (the current configuration)
:rtype: A dictionary
:returns: The current configuration as a dictionary
"""
facts, _warnings = Facts(self._module).get_facts(
self.gather_subset, self.gather_network_resources, data=data
)
static_routes_facts = facts["ansible_network_resources"].get(
"static_routes"
)
if not static_routes_facts:
return []
return static_routes_facts
def execute_module(self):
""" Execute the module
:rtype: A dictionary
:returns: The result from module execution
"""
result = {"changed": False}
commands = list()
warnings = list()
if self.state in self.ACTION_STATES:
existing_static_routes_facts = self.get_static_routes_facts()
else:
existing_static_routes_facts = []
if self.state in self.ACTION_STATES or self.state == "rendered":
commands.extend(self.set_config(existing_static_routes_facts))
if commands and self.state in self.ACTION_STATES:
if not self._module.check_mode:
self._connection.edit_config(commands)
result["changed"] = True
if self.state in self.ACTION_STATES:
result["commands"] = commands
if self.state in self.ACTION_STATES or self.state == "gathered":
changed_static_routes_facts = self.get_static_routes_facts()
elif self.state == "rendered":
result["rendered"] = commands
elif self.state == "parsed":
running_config = self._module.params["running_config"]
if not running_config:
self._module.fail_json(
msg="value of running_config parameter must not be empty for state parsed"
)
result["parsed"] = self.get_static_routes_facts(
data=running_config
)
else:
changed_static_routes_facts = []
if self.state in self.ACTION_STATES:
result["before"] = existing_static_routes_facts
if result["changed"]:
result["after"] = changed_static_routes_facts
elif self.state == "gathered":
result["gathered"] = changed_static_routes_facts
result["warnings"] = warnings
return result
def set_config(self, existing_static_routes_facts):
""" Collect the configuration from the args passed to the module,
collect the current configuration (as a dict from facts)
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
want = self._module.params["config"]
have = existing_static_routes_facts
resp = self.set_state(want, have)
return to_list(resp)
def set_state(self, want, have):
""" Select the appropriate function based on the state provided
:param want: the desired configuration as a dictionary
:param have: the current configuration as a dictionary
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
state = self._module.params["state"]
if (
state in ("overridden", "merged", "replaced", "rendered")
and not want
):
self._module.fail_json(
msg="value of config parameter must not be empty for state {0}".format(
state
)
)
commands = []
if state == "overridden":
commands = self._state_overridden(want, have)
elif state == "deleted":
commands = self._state_deleted(want, have)
elif state == "merged" or state == "rendered":
commands = self._state_merged(want, have)
elif state == "replaced":
commands = self._state_replaced(want, have)
return commands
def _state_replaced(self, want, have):
""" The command generator when state is replaced
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
# Drill each iteration of want n have and then based on dest and afi tyoe comparison take config call
for w in want:
for addr_want in w.get("address_families"):
for route_want in addr_want.get("routes"):
check = False
for h in have:
if h.get("address_families"):
for addr_have in h.get("address_families"):
for route_have in addr_have.get("routes"):
if (
route_want.get("dest")
== route_have.get("dest")
and addr_want["afi"]
== addr_have["afi"]
):
check = True
have_set = set()
new_hops = []
for each in route_want.get(
"next_hops"
):
want_set = set()
new_dict_to_set(
each, [], want_set, 0
)
new_hops.append(want_set)
new_dict_to_set(
addr_have, [], have_set, 0
)
# Check if the have dict next_hops value is diff from want dict next_hops
have_dict = filter_dict_having_none_value(
route_want.get("next_hops")[0],
route_have.get("next_hops")[0],
)
# update the have_dict with forward_router_address
have_dict.update(
{
"forward_router_address": route_have.get(
"next_hops"
)[
0
].get(
"forward_router_address"
)
}
)
# updating the have_dict with next_hops val that's not None
new_have_dict = {}
for k, v in have_dict.items():
if v is not None:
new_have_dict.update({k: v})
# Set the new config from the user provided want config
cmd = self._set_config(
w,
h,
addr_want,
route_want,
route_have,
new_hops,
have_set,
)
if cmd:
# since inplace update isn't allowed for static routes, preconfigured
# static routes needs to be deleted before the new want static routes changes
# are applied
clear_route_have = copy.deepcopy(
route_have
)
# inplace update is allowed in case of ipv6 static routes, so not deleting it
# before applying the want changes
if ":" not in route_want.get(
"dest"
):
commands.extend(
self._clear_config(
{},
h,
{},
addr_have,
{},
clear_route_have,
)
)
commands.extend(cmd)
if check:
break
if check:
break
if not check:
# For configuring any non-existing want config
new_hops = []
for each in route_want.get("next_hops"):
want_set = set()
new_dict_to_set(each, [], want_set, 0)
new_hops.append(want_set)
commands.extend(
self._set_config(
w,
{},
addr_want,
route_want,
{},
new_hops,
set(),
)
)
commands = [each for each in commands if "no" in each] + [
each for each in commands if "no" not in each
]
return commands
def _state_overridden(self, want, have):
""" The command generator when state is overridden
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
# Creating a copy of want, so that want dict is intact even after delete operation
# performed during override want n have comparison
temp_want = copy.deepcopy(want)
# Drill each iteration of want n have and then based on dest and afi tyoe comparison take config call
for h in have:
if h.get("address_families"):
for addr_have in h.get("address_families"):
for route_have in addr_have.get("routes"):
check = False
for w in temp_want:
for addr_want in w.get("address_families"):
count = 0
for route_want in addr_want.get("routes"):
if (
route_want.get("dest")
== route_have.get("dest")
and addr_want["afi"]
== addr_have["afi"]
):
check = True
have_set = set()
new_hops = []
for each in route_want.get(
"next_hops"
):
want_set = set()
new_dict_to_set(
each, [], want_set, 0
)
new_hops.append(want_set)
new_dict_to_set(
addr_have, [], have_set, 0
)
commands.extend(
self._clear_config(
w,
h,
addr_want,
addr_have,
route_want,
route_have,
)
)
commands.extend(
self._set_config(
w,
h,
addr_want,
route_want,
route_have,
new_hops,
have_set,
)
)
del addr_want.get("routes")[count]
count += 1
if check:
break
if check:
break
if not check:
commands.extend(
self._clear_config(
{}, h, {}, addr_have, {}, route_have
)
)
# For configuring any non-existing want config
for w in temp_want:
for addr_want in w.get("address_families"):
for route_want in addr_want.get("routes"):
new_hops = []
for each in route_want.get("next_hops"):
want_set = set()
new_dict_to_set(each, [], want_set, 0)
new_hops.append(want_set)
commands.extend(
self._set_config(
w, {}, addr_want, route_want, {}, new_hops, set()
)
)
# Arranging the cmds suct that all delete cmds are fired before all set cmds
commands = [each for each in sorted(commands) if "no" in each] + [
each for each in sorted(commands) if "no" not in each
]
return commands
def _state_merged(self, want, have):
""" The command generator when state is merged
:rtype: A list
:returns: the commands necessary to merge the provided into
the current configuration
"""
commands = []
# Drill each iteration of want n have and then based on dest and afi tyoe comparison take config call
for w in want:
for addr_want in w.get("address_families"):
for route_want in addr_want.get("routes"):
check = False
for h in have:
if h.get("address_families"):
for addr_have in h.get("address_families"):
for route_have in addr_have.get("routes"):
if (
route_want.get("dest")
== route_have.get("dest")
and addr_want["afi"]
== addr_have["afi"]
):
check = True
have_set = set()
new_hops = []
for each in route_want.get(
"next_hops"
):
want_set = set()
new_dict_to_set(
each, [], want_set, 0
)
new_hops.append(want_set)
new_dict_to_set(
addr_have, [], have_set, 0
)
commands.extend(
self._set_config(
w,
h,
addr_want,
route_want,
route_have,
new_hops,
have_set,
)
)
if check:
break
if check:
break
if not check:
# For configuring any non-existing want config
new_hops = []
for each in route_want.get("next_hops"):
want_set = set()
new_dict_to_set(each, [], want_set, 0)
new_hops.append(want_set)
commands.extend(
self._set_config(
w,
{},
addr_want,
route_want,
{},
new_hops,
set(),
)
)
return commands
def _state_deleted(self, want, have):
""" The command generator when state is deleted
:rtype: A list
:returns: the commands necessary to remove the current configuration
of the provided objects
"""
commands = []
if want:
# Drill each iteration of want n have and then based on dest and afi type comparison fire delete config call
for w in want:
if w.get("address_families"):
for addr_want in w.get("address_families"):
for route_want in addr_want.get("routes"):
check = False
for h in have:
if h.get("address_families"):
for addr_have in h.get("address_families"):
for route_have in addr_have.get(
"routes"
):
if (
route_want.get("dest")
== route_have.get("dest")
and addr_want["afi"]
== addr_have["afi"]
):
check = True
if route_want.get("next_hops"):
commands.extend(
self._clear_config(
{},
w,
{},
addr_want,
{},
route_want,
)
)
else:
commands.extend(
self._clear_config(
{},
h,
{},
addr_have,
{},
route_have,
)
)
if check:
break
if check:
break
else:
for h in have:
for addr_have in h.get("address_families"):
for route_have in addr_have.get("routes"):
if w.get("vrf") == h.get("vrf"):
commands.extend(
self._clear_config(
{},
h,
{},
addr_have,
{},
route_have,
)
)
else:
# Drill each iteration of have and then based on dest and afi type comparison fire delete config call
for h in have:
for addr_have in h.get("address_families"):
for route_have in addr_have.get("routes"):
commands.extend(
self._clear_config(
{}, h, {}, addr_have, {}, route_have
)
)
return commands
def prepare_config_commands(self, config_dict, cmd):
"""
function to parse the input dict and form the prepare the config commands
:rtype: A str
:returns: The command necessary to configure the static routes
"""
dhcp = config_dict.get("dhcp")
distance_metric = config_dict.get("distance_metric")
forward_router_address = config_dict.get("forward_router_address")
global_route_config = config_dict.get("global")
interface = config_dict.get("interface")
multicast = config_dict.get("multicast")
name = config_dict.get("name")
permanent = config_dict.get("permanent")
tag = config_dict.get("tag")
track = config_dict.get("track")
dest = config_dict.get("dest")
temp_dest = dest.split("/")
if temp_dest and ":" not in dest:
dest = validate_n_expand_ipv4(self._module, {"address": dest})
cmd = cmd + dest
if interface:
cmd = cmd + " {0}".format(interface)
if forward_router_address:
cmd = cmd + " {0}".format(forward_router_address)
if dhcp:
cmd = cmd + " DHCP"
if distance_metric:
cmd = cmd + " {0}".format(distance_metric)
if global_route_config:
cmd = cmd + " global"
if multicast:
cmd = cmd + " multicast"
if name:
cmd = cmd + " name {0}".format(name)
if permanent:
cmd = cmd + " permanent"
elif track:
cmd = cmd + " track {0}".format(track)
if tag:
cmd = cmd + " tag {0}".format(tag)
return cmd
def _set_config(
self, want, have, addr_want, route_want, route_have, hops, have_set
):
"""
Set the interface config based on the want and have config
:rtype: A list
:returns: The commands necessary to configure the static routes
"""
commands = []
cmd = None
vrf_diff = False
topology_diff = False
want_vrf = want.get("vrf")
have_vrf = have.get("vrf")
if want_vrf != have_vrf:
vrf_diff = True
want_topology = want.get("topology")
have_topology = have.get("topology")
if want_topology != have_topology:
topology_diff = True
have_dest = route_have.get("dest")
if have_dest:
have_set.add(tuple(iteritems({"dest": have_dest})))
# configure set cmd for each hops under the same destination
for each in hops:
diff = each - have_set
if vrf_diff:
each.add(tuple(iteritems({"vrf": want_vrf})))
if topology_diff:
each.add(tuple(iteritems({"topology": want_topology})))
if diff or vrf_diff or topology_diff:
if want_vrf and not vrf_diff:
each.add(tuple(iteritems({"vrf": want_vrf})))
if want_topology and not vrf_diff:
each.add(tuple(iteritems({"topology": want_topology})))
each.add(tuple(iteritems({"afi": addr_want.get("afi")})))
each.add(tuple(iteritems({"dest": route_want.get("dest")})))
temp_want = {}
for each_want in each:
temp_want.update(dict(each_want))
if temp_want.get("afi") == "ipv4":
cmd = "ip route "
vrf = temp_want.get("vrf")
if vrf:
cmd = cmd + "vrf {0} ".format(vrf)
cmd = self.prepare_config_commands(temp_want, cmd)
elif temp_want.get("afi") == "ipv6":
cmd = "ipv6 route "
cmd = self.prepare_config_commands(temp_want, cmd)
commands.append(cmd)
return commands
def _clear_config(
self, want, have, addr_want, addr_have, route_want, route_have
):
"""
Delete the interface config based on the want and have config
:rtype: A list
:returns: The commands necessary to configure the static routes
"""
commands = []
cmd = None
vrf_diff = False
topology_diff = False
want_vrf = want.get("vrf")
have_vrf = have.get("vrf")
if want_vrf != have_vrf:
vrf_diff = True
want_topology = want.get("topology")
have_topology = have.get("topology")
if want_topology != have_topology:
topology_diff = True
want_set = set()
new_dict_to_set(addr_want, [], want_set, 0)
have_hops = []
for each in route_have.get("next_hops"):
temp_have_set = set()
new_dict_to_set(each, [], temp_have_set, 0)
have_hops.append(temp_have_set)
# configure delete cmd for each hops under the same destination
for each in have_hops:
diff = each - want_set
if vrf_diff:
each.add(tuple(iteritems({"vrf": have_vrf})))
if topology_diff:
each.add(tuple(iteritems({"topology": want_topology})))
if diff or vrf_diff or topology_diff:
if want_vrf and not vrf_diff:
each.add(tuple(iteritems({"vrf": want_vrf})))
if want_topology and not vrf_diff:
each.add(tuple(iteritems({"topology": want_topology})))
if addr_want:
each.add(tuple(iteritems({"afi": addr_want.get("afi")})))
else:
each.add(tuple(iteritems({"afi": addr_have.get("afi")})))
if route_want:
each.add(
tuple(iteritems({"dest": route_want.get("dest")}))
)
else:
each.add(
tuple(iteritems({"dest": route_have.get("dest")}))
)
temp_want = {}
for each_want in each:
temp_want.update(dict(each_want))
if temp_want.get("afi") == "ipv4":
cmd = "no ip route "
vrf = temp_want.get("vrf")
if vrf:
cmd = cmd + "vrf {0} ".format(vrf)
cmd = self.prepare_config_commands(temp_want, cmd)
elif temp_want.get("afi") == "ipv6":
cmd = "no ipv6 route "
cmd = self.prepare_config_commands(temp_want, cmd)
commands.append(cmd)
return commands
| StarcoderdataPython |
1676872 | from django.db import models
from river.models.fields.state import StateField
class Shipping(models.Model):
product = models.CharField(max_length=50, null=True, blank=True)
customer = models.CharField(max_length=50, null=True, blank=True)
shipping_status = StateField()
def __str__(self):
return self.product
| StarcoderdataPython |
54525 | <gh_stars>0
# split the Neale Lab UK Biobank summary statistics by chromosome
import argparse
CHROM = list(map(str, range(1, 23)))
CHROM.append("X")
def main():
parser = argparse.ArgumentParser()
parser.add_argument("fname", help = "Input variants file")
args = parser.parse_args()
# open all the output files (chr 1-22, and chr X)
outfiles_dict = dict()
for chrom in CHROM:
# hard code file name
fname = "nealelab-uk-biobank/variants_chr" + chrom + ".tsv"
f = open(fname, "w")
outfiles_dict[chrom] = f
with open(args.fname) as f:
# write the header line to each output file
line1 = f.readline()
for key, outfile in outfiles_dict.items():
outfile.write(line1)
# write the line to the appropriate chromosome file
for line in f:
chrom = line.split("\t")[1]
out_f = outfiles_dict[chrom]
out_f.write(line)
for key, outfile in outfiles_dict.items():
outfile.close()
if __name__ == "__main__":
main()
| StarcoderdataPython |
30252 | <filename>mmdet/apis/__init__.py
from .env import get_root_logger, init_dist, set_random_seed
from .inference import inference_detector, init_detector, show_result
from .train import train_detector
__all__ = [
'init_dist', 'get_root_logger', 'set_random_seed', 'train_detector',
'init_detector', 'inference_detector', 'show_result'
]
| StarcoderdataPython |
82607 | # -*- coding: utf-8 -*-
"""
Graph of the system
x - 2y = 0
x - 4y = 8
I calculated solution as (-8,-4)
graph agrees
"""
#%reset -f
import matplotlib.pyplot as plt
def get_x_i(y):
"""
Returns x -2y = 0 solved for x
i.e. x = 2y
"""
return 2*y
def get_x_ii(y):
"""
returns x -4y = 8 solved x
i.e. x = 8 + 4y
"""
return 8 + 4*y
y_i = range(-10,11)
y_ii = range(-10,11)
x_i = [get_x_i(y) for y in y_i]
x_ii = [get_x_ii(y) for y in y_ii]
plt.plot(x_i,y_i,color="blue",label="x - 2y = 0")
plt.plot(x_ii,y_ii,color="red", label="x - 4y = 8")
plt.xlabel("X")
plt.ylabel("Y")
plt.legend()
plt.grid(True, which='both')
plt.title("Graph of x - 2y = 0 and x - 4y = 8")
plt.axhline(y=0, color='k')
plt.axvline(x=0, color='k')
plt.show() | StarcoderdataPython |
111033 | from datetime import datetime
from time import mktime
def micro_time():
"""
Returns the current time since epoch, accurate to the
value returned by gettimeofday(), usually ~1microsecond.
Datetime is more accurate than time.clock
"""
now = datetime.now()
return long(mktime(now.timetuple()) * 1000000 + now.microsecond)
class Logging(object):
def __init__(self):
self.filename = 'color_' + datetime.now().strftime("%Y_%m_%d_%H_%M")
self.log_file = self.start_logging()
def start_logging(self):
log_file = open(self.filename, 'a')
return log_file
def write_line(self, *args):
self.log_file.write(str(micro_time()) + ' {} yup\n'.format(args))
def stop_logging(self):
self.log_file.close() | StarcoderdataPython |
1704007 | <reponame>tervay/the-blue-alliance<gh_stars>100-1000
import csv
import StringIO
from datafeeds.parser_base import ParserBase
class CSVTeamsParser(ParserBase):
@classmethod
def parse(self, data):
"""
Parse CSV that contains teams
Format is as follows:
team1, team2, ... teamN
Example:
254, 1114, 100, 604, 148
"""
teams = set()
csv_data = list(csv.reader(StringIO.StringIO(data), delimiter=',', skipinitialspace=True))
for row in csv_data:
for team in row:
if team.isdigit():
teams.add(int(team))
return teams
| StarcoderdataPython |
1621798 | <reponame>SimLeek/pglsl-neural<filename>pygp_retina/tests_interactive/show_average.py
from cv_pubsubs import webcam_pub as camp
from cv_pubsubs import window_sub as win
from pygp_retina.simple_average import avg_total_color
if False:
from typing import Tuple
def display_average(cam,
request_size=(1280, 720), # type: Tuple[int, int]
fps_limit=60, # type: float
high_speed=True, # type: bool
no_cl=False # type: bool
):
def cam_handler(frame, cam_id):
win.SubscriberWindows.frame_dict[str(cam_id) + "Frame"] = frame
cam_thread = camp.frame_handler_thread(cam, cam_handler, fps_limit=fps_limit,
high_speed=high_speed)
callback = avg_total_color
win.SubscriberWindows(window_names=["avg"],
input_cams=[cam],
input_vid_global_names=[str(cam) + 'Frame'],
callbacks=[callback]).loop()
return cam_thread | StarcoderdataPython |
26464 | <filename>Plug-and-play module/attention/CBAM/cbam.py
import torch
import torch.nn as nn
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=1,
bias=False)
class ChannelAttention(nn.Module):
def __init__(self, in_planes, ratio=4):
super(ChannelAttention, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.max_pool = nn.AdaptiveMaxPool2d(1)
self.sharedMLP = nn.Sequential(
nn.Conv2d(in_planes, in_planes // ratio, 1, bias=False), nn.ReLU(),
nn.Conv2d(in_planes // ratio, in_planes, 1, bias=False))
self.sigmoid = nn.Sigmoid()
def forward(self, x):
avgout = self.sharedMLP(self.avg_pool(x))
maxout = self.sharedMLP(self.max_pool(x))
return self.sigmoid(avgout + maxout)
class SpatialAttention(nn.Module):
def __init__(self, kernel_size=7):
super(SpatialAttention, self).__init__()
assert kernel_size in (3, 7), "kernel size must be 3 or 7"
padding = 3 if kernel_size == 7 else 1
self.conv = nn.Conv2d(2, 1, kernel_size, padding=padding, bias=False)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
avgout = torch.mean(x, dim=1, keepdim=True)
maxout, _ = torch.max(x, dim=1, keepdim=True)
x = torch.cat([avgout, maxout], dim=1)
x = self.conv(x)
return self.sigmoid(x)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.ca = ChannelAttention(planes)
self.sa = SpatialAttention()
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.ca(out) * out # 广播机制
out = self.sa(out) * out # 广播机制
if self.downsample is not None:
print("downsampling")
residual = self.downsample(x)
print(out.shape, residual.shape)
out += residual
out = self.relu(out)
return out
if __name__ == "__main__":
downsample = nn.Sequential(
nn.Conv2d(16, 32, kernel_size=1, stride=1, bias=False),
nn.BatchNorm2d(32))
x = torch.ones(3, 16, 32, 32)
model = BasicBlock(16, 32, stride=1, downsample=downsample)
print(model(x).shape) | StarcoderdataPython |
3327582 | # OTP Generator
# | IMPORT
import base64
import os
import pickle
import shortuuid
from datetime import datetime
from random import randint, seed
from typing import Any, Dict, Union
# | GLOBAL EXECUTIONS & GLOBAL VARIABLES
CHAR_SET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
# | FUNCTIONS
def gen_seed() -> int:
global CHAR_SET
DEFAULT_SEED = "TEDxKasetsartU" + shortuuid.uuid()
seed(DEFAULT_SEED)
res = ""
for _ in range(4):
res += CHAR_SET[randint(0, len(CHAR_SET) - 1)]
return res
def gen_otp(otp_file: str, name: str, server_name: str, length: int = 6):
global CHAR_SET
global INIT_DUMP
ref = gen_seed()
seed(ref)
while True:
otp = ""
for _ in range(length):
otp += CHAR_SET[randint(0, len(CHAR_SET) - 1)]
if write_otp_file(otp_file, otp, ref, name, server_name) == 0:
return otp, ref
def write_otp_file(file: str, otp: str, ref: str, name: str, server_name: str, mode: str = "add") -> Union[None, int]:
empty_structure = {}
if not os.path.exists(file):
with open(file, "wb") as f:
data = base64.b85encode(pickle.dumps(empty_structure))
f.write(data)
data = read_otp_file(file)
if mode == "add":
for k in data.keys():
if k == otp:
return None
data[otp] = {"ref": ref, "create_at": datetime.now(), "for": name, "server": server_name}
elif mode == "remove":
try:
data.pop(otp)
except KeyError:
return None
else:
return None
with open(file, "wb") as f:
data = base64.b85encode(pickle.dumps(data))
f.write(data)
return 0
def read_otp_file(file: str) -> Dict[str, Dict[str, Union[str, Any]]]:
if not os.path.exists(file):
return None
else:
with open(file, "rt") as f:
data = pickle.loads(base64.b85decode(f.read()))
return data
def test(otp_file: str, no_first_print=False):
s = set()
first = not no_first_print
for _ in range(2000):
s.add(gen_otp(otp_file))
if first:
print(s)
first = False
return len(s)
# | MAIN
if __name__ == "__main__":
file = "test.tmp"
count = 0
for _ in range(2):
res = test(file, no_first_print=False)
if res < 2000:
print(res)
count += 1
print(count)
print(read_otp_file(file)) | StarcoderdataPython |
47508 | <filename>deeppavlov/models/go_bot/dto/dataset_features.py<gh_stars>1000+
from typing import List
import numpy as np
# todo remove boilerplate duplications
# todo comments
# todo logging
# todo naming
from deeppavlov.models.go_bot.nlu.dto.nlu_response import NLUResponse
from deeppavlov.models.go_bot.policy.dto.digitized_policy_features import DigitizedPolicyFeatures
from deeppavlov.models.go_bot.tracker.dto.dst_knowledge import DSTKnowledge
from copy import deepcopy
class UtteranceFeatures:
"""
the DTO-like class storing the training features of a single utterance of a dialog
(to feed the GO-bot policy model)
"""
action_mask: np.ndarray
attn_key: np.ndarray
tokens_embeddings_padded: np.ndarray
features: np.ndarray
def __init__(self,
nlu_response: NLUResponse,
tracker_knowledge: DSTKnowledge,
features: DigitizedPolicyFeatures):
self.action_mask = features.action_mask
self.attn_key = features.attn_key
tokens_vectorized = nlu_response.tokens_vectorized # todo proper oop
self.tokens_embeddings_padded = tokens_vectorized.tokens_embeddings_padded
self.features = features.concat_feats
class UtteranceTarget:
"""
the DTO-like class storing the training target of a single utterance of a dialog
(to feed the GO-bot policy model)
"""
action_id: int
def __init__(self, action_id):
self.action_id = action_id
class UtteranceDataEntry:
"""
the DTO-like class storing both the training features and target
of a single utterance of a dialog (to feed the GO-bot policy model)
"""
features: UtteranceFeatures
target: UtteranceTarget
def __init__(self, features, target):
self.features = features
self.target = target
@staticmethod
def from_features_and_target(features: UtteranceFeatures, target: UtteranceTarget):
return UtteranceDataEntry(deepcopy(features), deepcopy(target))
@staticmethod
def from_features(features: UtteranceFeatures):
return UtteranceDataEntry(deepcopy(features), UtteranceTarget(None))
class DialogueFeatures:
"""
the DTO-like class storing both the training features
of a dialog (to feed the GO-bot policy model)
"""
action_masks: List[np.ndarray]
attn_keys: List[np.ndarray]
tokens_embeddings_paddeds: List[np.ndarray]
featuress: List[np.ndarray]
def __init__(self):
self.action_masks = []
self.attn_keys = []
self.tokens_embeddings_paddeds = []
self.featuress = []
def append(self, utterance_features: UtteranceFeatures):
self.action_masks.append(utterance_features.action_mask)
self.attn_keys.append(utterance_features.attn_key)
self.tokens_embeddings_paddeds.append(utterance_features.tokens_embeddings_padded)
self.featuress.append(utterance_features.features)
def __len__(self):
return len(self.featuress)
class DialogueTargets:
"""
the DTO-like class storing both the training targets
of a dialog (to feed the GO-bot policy model)
"""
action_ids: List[int]
def __init__(self):
self.action_ids = []
def append(self, utterance_target: UtteranceTarget):
self.action_ids.append(utterance_target.action_id)
def __len__(self):
return len(self.action_ids)
class DialogueDataEntry:
"""
the DTO-like class storing both the training features and targets
of a dialog (to feed the GO-bot policy model)
"""
features: DialogueFeatures
targets: DialogueTargets
def __init__(self):
self.features = DialogueFeatures()
self.targets = DialogueTargets()
def append(self, utterance_features: UtteranceDataEntry):
self.features.append(utterance_features.features)
self.targets.append(utterance_features.target)
def __len__(self):
return len(self.features)
class PaddedDialogueFeatures(DialogueFeatures):
"""
the DTO-like class storing both the **padded to some specified length** training features
of a dialog (to feed the GO-bot policy model)
"""
padded_dialogue_length_mask: List[int]
def __init__(self, dialogue_features: DialogueFeatures, sequence_length):
super().__init__()
padding_length = sequence_length - len(dialogue_features)
self.padded_dialogue_length_mask = [1] * len(dialogue_features) + [0] * padding_length
self.action_masks = dialogue_features.action_masks + \
[np.zeros_like(dialogue_features.action_masks[0])] * padding_length
self.attn_keys = dialogue_features.attn_keys + [np.zeros_like(dialogue_features.attn_keys[0])] * padding_length
self.tokens_embeddings_paddeds = dialogue_features.tokens_embeddings_paddeds + \
[np.zeros_like(
dialogue_features.tokens_embeddings_paddeds[0])] * padding_length
self.featuress = dialogue_features.featuress + [np.zeros_like(dialogue_features.featuress[0])] * padding_length
class PaddedDialogueTargets(DialogueTargets):
"""
the DTO-like class storing both the **padded to some specified length** training targets
of a dialog (to feed the GO-bot policy model)
"""
def __init__(self, dialogue_targets: DialogueTargets, sequence_length):
super().__init__()
padding_length = sequence_length - len(dialogue_targets)
self.action_ids = dialogue_targets.action_ids + [0] * padding_length
class PaddedDialogueDataEntry(DialogueDataEntry):
"""
the DTO-like class storing both the **padded to some specified length** training features and targets
of a dialog (to feed the GO-bot policy model)
"""
features: PaddedDialogueFeatures
targets: PaddedDialogueTargets
def __init__(self, dialogue_data_entry: DialogueDataEntry, sequence_length):
super().__init__()
self.features = PaddedDialogueFeatures(dialogue_data_entry.features, sequence_length)
self.targets = PaddedDialogueTargets(dialogue_data_entry.targets, sequence_length)
class BatchDialoguesFeatures:
"""
the DTO-like class storing both the training features
of a batch of dialogues. (to feed the GO-bot policy model)
"""
b_action_masks: List[List[np.ndarray]]
b_attn_keys: List[List[np.ndarray]]
b_tokens_embeddings_paddeds: List[List[np.ndarray]]
b_featuress: List[List[np.ndarray]]
b_padded_dialogue_length_mask: List[List[int]]
max_dialogue_length: int
def __init__(self, max_dialogue_length):
self.b_action_masks = []
self.b_attn_keys = []
self.b_tokens_embeddings_paddeds = []
self.b_featuress = []
self.b_padded_dialogue_length_mask = []
self.max_dialogue_length = max_dialogue_length
def append(self, padded_dialogue_features: PaddedDialogueFeatures):
self.b_action_masks.append(padded_dialogue_features.action_masks)
self.b_attn_keys.append(padded_dialogue_features.attn_keys)
self.b_tokens_embeddings_paddeds.append(padded_dialogue_features.tokens_embeddings_paddeds)
self.b_featuress.append(padded_dialogue_features.featuress)
self.b_padded_dialogue_length_mask.append(padded_dialogue_features.padded_dialogue_length_mask)
def __len__(self):
return len(self.b_featuress)
class BatchDialoguesTargets:
"""
the DTO-like class storing both the training targets
of a batch of dialogues. (to feed the GO-bot policy model)
"""
b_action_ids: List[List[int]]
max_dialogue_length: int
def __init__(self, max_dialogue_length):
self.b_action_ids = []
self.max_dialogue_length = max_dialogue_length
def append(self, padded_dialogue_targets: PaddedDialogueTargets):
self.b_action_ids.append(padded_dialogue_targets.action_ids)
def __len__(self):
return len(self.b_action_ids)
class BatchDialoguesDataset:
"""
the DTO-like class storing both the training features and target
of a batch of dialogues. (to feed the GO-bot policy model)
Handles the dialogues padding.
"""
features: BatchDialoguesFeatures
targets: BatchDialoguesTargets
def __init__(self, max_dialogue_length):
self.features = BatchDialoguesFeatures(max_dialogue_length)
self.targets = BatchDialoguesTargets(max_dialogue_length)
self.max_dialogue_length = max_dialogue_length
def append(self, dialogue_features: DialogueDataEntry):
padded_dialogue_features = PaddedDialogueDataEntry(dialogue_features, self.max_dialogue_length)
self.features.append(padded_dialogue_features.features)
self.targets.append(padded_dialogue_features.targets)
def __len__(self):
return len(self.features)
| StarcoderdataPython |
1651709 | name = input()
print(
name + ", " + name + ", bo-b" + name[1:] + "\n" +
"banana-fana fo-f" + name[1:] + "\n" +
"fee-fi-mo-m" + name[1:] + "\n" +
name + "!"
)
| StarcoderdataPython |
1688884 | <filename>openmdao/components/meta_model.py
""" Metamodel provides basic Meta Modeling capability."""
import sys
import numpy as np
from copy import deepcopy
from openmdao.core.component import Component, _NotSet
from six import iteritems
class MetaModel(Component):
"""Class that creates a reduced order model for outputs from
parameters. Each output may have it's own surrogate model.
Training inputs and outputs are automatically created with
'train:' prepended to the corresponding parameter/output name.
For a Float variable, the training data is an array of length m.
Options
-------
deriv_options['type'] : str('user')
Derivative calculation type ('user', 'fd', 'cs')
Default is 'user', where derivative is calculated from
user-supplied derivatives. Set to 'fd' to finite difference
this system. Set to 'cs' to perform the complex step
if your components support it.
deriv_options['form'] : str('forward')
Finite difference mode. (forward, backward, central)
deriv_options['step_size'] : float(1e-06)
Default finite difference stepsize
deriv_options['step_calc'] : str('absolute')
Set to absolute, relative
deriv_options['check_type'] : str('fd')
Type of derivative check for check_partial_derivatives. Set
to 'fd' to finite difference this system. Set to
'cs' to perform the complex step method if
your components support it.
deriv_options['check_form'] : str('forward')
Finite difference mode: ("forward", "backward", "central")
During check_partial_derivatives, the difference form that is used
for the check.
deriv_options['check_step_calc'] : str('absolute',)
Set to 'absolute' or 'relative'. Default finite difference
step calculation for the finite difference check in check_partial_derivatives.
deriv_options['check_step_size'] : float(1e-06)
Default finite difference stepsize for the finite difference check
in check_partial_derivatives"
deriv_options['linearize'] : bool(False)
Set to True if you want linearize to be called even though you are using FD.
"""
def __init__(self):
super(MetaModel, self).__init__()
# This surrogate will be used for all outputs that don't have
# a specific surrogate assigned to them
self.default_surrogate = None
# keep list of params and outputs that are not the training vars
self._surrogate_param_names = []
self._surrogate_output_names = []
# training will occur on first execution
self.train = True
self._training_input = np.zeros(0)
self._training_output = {}
# When set to False (default), the metamodel retrains with the new
# dataset whenever the training data values are changed. When set to
# True, the new data is appended to the old data and all of the data
# is used to train.
self.warm_restart = False
# keeps track of which sur_<name> slots are full
self._surrogate_overrides = set()
self._input_size = 0
def add_param(self, name, val=_NotSet, training_data=None, **kwargs):
""" Add a `param` input to this component and a corresponding
training parameter.
Args
----
name : string
Name of the input.
val : float or ndarray or object
Initial value for the input.
training_data : float or ndarray
training data for this variable. Optional, can be set
by the problem later.
"""
if training_data is None:
training_data = []
super(MetaModel, self).add_param(name, val, **kwargs)
super(MetaModel, self).add_param('train:'+name, val=training_data, pass_by_obj=True)
input_size = self._init_params_dict[name]['size']
self._surrogate_param_names.append((name, input_size))
self._input_size += input_size
def add_output(self, name, val=_NotSet, training_data=None, **kwargs):
""" Add an output to this component and a corresponding
training output.
Args
----
name : string
Name of the variable output.
val : float or ndarray
Initial value for the output. While the value is overwritten during
execution, it is useful for infering size.
training_data : float or ndarray
training data for this variable. Optional, can be set
by the problem later.
"""
if training_data is None:
training_data = []
super(MetaModel, self).add_output(name, val, **kwargs)
super(MetaModel, self).add_param('train:'+name, val=training_data, pass_by_obj=True)
try:
output_shape = self._init_unknowns_dict[name]['shape']
except KeyError: #then its some kind of object, and just assume scalar training data
output_shape = 1
self._surrogate_output_names.append((name, output_shape))
self._training_output[name] = np.zeros(0)
if self._init_unknowns_dict[name].get('surrogate'):
self._init_unknowns_dict[name]['default_surrogate'] = False
else:
self._init_unknowns_dict[name]['default_surrogate'] = True
def _setup_variables(self):
"""Returns our params and unknowns dictionaries,
re-keyed to use absolute variable names.
Also instantiates surrogates for the output variables
that use the default surrogate.
"""
# create an instance of the default surrogate for outputs that
# did not have a surrogate specified
if self.default_surrogate is not None:
for name, shape in self._surrogate_output_names:
if self._init_unknowns_dict[name].get('default_surrogate'):
surrogate = deepcopy(self.default_surrogate)
self._init_unknowns_dict[name]['surrogate'] = surrogate
# training will occur on first execution after setup
self.train = True
return super(MetaModel, self)._setup_variables()
def check_setup(self, out_stream=sys.stdout):
"""Write a report to the given stream indicating any potential problems found
with the current configuration of this ``MetaModel``.
Args
----
out_stream : a file-like object, optional
"""
# All outputs must have surrogates assigned
# either explicitly or through the default surrogate
if self.default_surrogate is None:
no_sur = []
for name, shape in self._surrogate_output_names:
surrogate = self._init_unknowns_dict[name].get('surrogate')
if surrogate is None:
no_sur.append(name)
if len(no_sur) > 0:
msg = ("No default surrogate model is defined and the following"
" outputs do not have a surrogate model:\n%s\n"
"Either specify a default_surrogate, or specify a "
"surrogate model for all outputs."
% no_sur)
out_stream.write(msg)
def solve_nonlinear(self, params, unknowns, resids):
"""Predict outputs.
If the training flag is set, train the metamodel first.
Args
----
params : `VecWrapper`, optional
`VecWrapper` containing parameters. (p)
unknowns : `VecWrapper`, optional
`VecWrapper` containing outputs and states. (u)
resids : `VecWrapper`, optional
`VecWrapper` containing residuals. (r)
"""
# Train first
if self.train:
self._train()
# Now Predict for current inputs
inputs = self._params_to_inputs(params)
for name, shape in self._surrogate_output_names:
surrogate = self._init_unknowns_dict[name].get('surrogate')
if surrogate:
unknowns[name] = surrogate.predict(inputs)
else:
raise RuntimeError("Metamodel '%s': No surrogate specified for output '%s'"
% (self.pathname, name))
def _params_to_inputs(self, params, out=None):
"""
Converts from a dictionary of parameters to the ndarray input.
"""
array_real = True
if out is None:
inputs = np.zeros(self._input_size)
else:
inputs = out
idx = 0
for name, sz in self._surrogate_param_names:
val = params[name]
if isinstance(val, list):
val = np.array(val)
if isinstance(val, np.ndarray):
if array_real and np.issubdtype(val.dtype, complex):
array_real = False
inputs = inputs.astype(complex)
inputs[idx:idx + sz] = val.flat
idx += sz
else:
inputs[idx] = val
idx += 1
return inputs
def linearize(self, params, unknowns, resids):
"""
Returns the Jacobian as a dictionary whose keys are tuples of the form
('unknown', 'param') and whose values are ndarrays.
Args
----
params : `VecWrapper`
`VecWrapper` containing parameters. (p)
unknowns : `VecWrapper`
`VecWrapper` containing outputs and states. (u)
resids : `VecWrapper`
`VecWrapper` containing residuals. (r)
Returns
-------
dict
Dictionary whose keys are tuples of the form ('unknown', 'param')
and whose values are ndarrays.
"""
jac = {}
inputs = self._params_to_inputs(params)
for uname, _ in self._surrogate_output_names:
surrogate = self._init_unknowns_dict[uname].get('surrogate')
sjac = surrogate.linearize(inputs)
idx = 0
for pname, sz in self._surrogate_param_names:
jac[(uname, pname)] = sjac[:, idx:idx+sz]
idx += sz
return jac
def _train(self):
"""
Train the metamodel, if necessary, using the provided training data.
"""
num_sample = None
for name, sz in self._surrogate_param_names:
val = self.params['train:' + name]
if num_sample is None:
num_sample = len(val)
elif len(val) != num_sample:
msg = "MetaModel: Each variable must have the same number"\
" of training points. Expected {0} but found {1} "\
"points for '{2}'."\
.format(num_sample, len(val), name)
raise RuntimeError(msg)
for name, shape in self._surrogate_output_names:
val = self.params['train:' + name]
if len(val) != num_sample:
msg = "MetaModel: Each variable must have the same number" \
" of training points. Expected {0} but found {1} " \
"points for '{2}'." \
.format(num_sample, len(val), name)
raise RuntimeError(msg)
if self.warm_restart:
num_old_pts = self._training_input.shape[0]
inputs = np.zeros((num_sample + num_old_pts, self._input_size))
if num_old_pts > 0:
inputs[:num_old_pts, :] = self._training_input
new_input = inputs[num_old_pts:, :]
else:
inputs = np.zeros((num_sample, self._input_size))
new_input = inputs
self._training_input = inputs
# add training data for each input
if num_sample > 0:
idx = 0
for name, sz in self._surrogate_param_names:
val = self.params['train:' + name]
if isinstance(val[0], float):
new_input[:, idx] = val
idx += 1
else:
for row_idx, v in enumerate(val):
if not isinstance(v, np.ndarray):
v = np.array(v)
new_input[row_idx, idx:idx+sz] = v.flat
# add training data for each output
for name, shape in self._surrogate_output_names:
if num_sample > 0:
output_size = np.prod(shape)
if self.warm_restart:
outputs = np.zeros((num_sample + num_old_pts,
output_size))
if num_old_pts > 0:
outputs[:num_old_pts, :] = self._training_output[name]
self._training_output[name] = outputs
new_output = outputs[num_old_pts:, :]
else:
outputs = np.zeros((num_sample, output_size))
self._training_output[name] = outputs
new_output = outputs
val = self.params['train:' + name]
if isinstance(val[0], float):
new_output[:, 0] = val
else:
for row_idx, v in enumerate(val):
if not isinstance(v, np.ndarray):
v = np.array(v)
new_output[row_idx, :] = v.flat
surrogate = self._init_unknowns_dict[name].get('surrogate')
if surrogate is not None:
surrogate.train(self._training_input, self._training_output[name])
self.train = False
def _get_fd_params(self):
"""
Get the list of parameters that are needed to perform a
finite difference on this `Component`.
Returns
-------
list of str
List of names of params for this `Component` .
"""
return [k for k, acc in iteritems(self.params._dat)
if not (acc.pbo or k.startswith('train'))]
def _get_fd_unknowns(self):
"""
Get the list of unknowns that are needed to perform a
finite difference on this `Component`.
Returns
-------
list of str
List of names of unknowns for this `Component`.
"""
return [k for k, acc in iteritems(self.unknowns._dat)
if not (acc.pbo or k.startswith('train'))]
| StarcoderdataPython |
162924 | <reponame>sarodriguez/audio-source-separation<gh_stars>0
import numpy as np
from glob import glob
from preprocess.config import config
import logging
import os
import librosa
def get_config_as_str():
return {
'FR': config.FR, 'FFT_SIZE': config.FFT_SIZE,
'HOP': config.HOP
}
def spec_complex(audio_file):
"""Compute the complex spectrum"""
output = {'type': 'complex'}
logger = logging.getLogger('computing_spec')
try:
logger.info('Computing complex spec for %s' % audio_file)
audio, fe = librosa.load(audio_file, sr=config.FR)
output['spec'] = librosa.stft(
audio, n_fft=config.FFT_SIZE, hop_length=config.HOP)
except Exception as my_error:
logger.error(my_error)
return output
def spec_mag(audio_file, norm=True):
"""Compute the normalized mag spec and the phase of an audio file"""
output = {}
logger = logging.getLogger('computing_spec')
try:
spec = spec_complex(audio_file)
spec = spec['spec']
logger.info('Computing mag and phase for %s' % audio_file)
# n_freq_bins -> connected with fft_size with 1024 -> 513 bins
# the number of band is odd -> removing the last band
n = spec.shape[0] - 1
mag = np.abs(spec[:n, :])
# mag = mag / np.max(mag)
if norm:
mx = np.max(mag)
mn = np.min(mag)
# betweens 0 and 1 (x - min(x)) / (max(x) - min(x))
mag = ((mag - mn) / (mx-mn))
output['norm_param'] = np.array([mx, mn])
output['phase'] = np.angle(spec)
output['magnitude'] = mag
except Exception as my_error:
logger.error(my_error)
return output
def spec_mag_log(audio_file):
"""Compute the normalized log mag spec and the phase of an audio file"""
output = {}
logger = logging.getLogger('computing_spec')
try:
spec = spec_mag(audio_file, False) # mag without norm
mag = spec['magnitude']
output['phase'] = spec['phase']
spec_log = np.log1p(mag)
mx = np.max(spec_log)
mn = np.min(spec_log)
output['norm_param'] = np.array([mx, mn])
output['log_magnitude'] = (spec_log - mn) / (mx - mn)
except Exception as my_error:
logger.error(my_error)
return output
def compute_one_song(folder):
logger = logging.getLogger('computing_spec')
name = os.path.basename(os.path.normpath(folder)).replace(' ', '_')
logger.info('Computing spec for %s' % name)
data = {i: spec_complex(folder+i+'.wav')['spec']
for i in config.INTRUMENTS}
np.savez(
os.path.join(config.PATH_SPEC, name+'.npz'),
config=get_config_as_str(), **data
)
return
def main():
# if not os.path.exists(os.path.join(config.PATH_SPEC)):
# os.mkdir()
# logging.basicConfig(
# filename=os.path.join(config.PATH_SPEC, config.LOG_FILENAME),
# level=logging.INFO
# )
# logger = logging.getLogger('computing_spec')
# logger.info('Starting the computation')
for i in glob(os.path.join(config.PATH_RAW, '*/')):
if os.path.isdir(i):
compute_one_song(i)
return
if __name__ == '__main__':
config.parse_args()
main()
| StarcoderdataPython |
1783779 | import logging
import os
import argparse
from simpletransformers.language_generation import LanguageGenerationModel
logging.basicConfig(level=logging.INFO)
transformers_logger = logging.getLogger("transformers")
transformers_logger.setLevel(logging.WARNING)
def main():
parser = argparse.ArgumentParser()
# Required parameters
parser.add_argument(
"--data_dir",
default=None,
type=str,
required=True,
help="The input data dir. Should contain the source and target files for the task.",
)
parser.add_argument(
"--model_name_or_path",
default=None,
type=str,
required=True,
help="Path to pretrained model or model identifier from huggingface.co/models",
)
parser.add_argument(
"--output_dir",
default='output_dir/', type=str,
help="The output directory where the model predictions and checkpoints will be written.",
)
args = parser.parse_args()
model = LanguageGenerationModel(model_type = "gpt2", model_name = args.model_name_or_path, args={"max_length": 64})
fsource = open(args.data_dir+'test.source', 'r', encoding='utf8')
prompts = [sent.strip()+'\t' for sent in fsource.readlines()]
ftarget = open(args.data_dir+'test.target', 'r', encoding='utf8')
targets = [sent.strip()+'\t' for sent in ftarget.readlines()]
foutput = open(args.output_dir+'test.hypo', 'w', encoding='utf8', newline='\n')
assert len(prompts) == len(targets)
case_number = len(prompts)
correct_number = 0
for i, prompt in enumerate(prompts):
# Generate text using the model. Verbose set to False to prevent logging generated sequences.
generated = model.generate(prompt, verbose=False)
print("=============================================================================")
print(generated[0])
generated[0] = generated[0].split('\t')[1].strip('!')
targets[i] = targets[i].strip()
print(targets[i])
print(generated[0])
print("=============================================================================")
foutput.write(generated[0])
if generated[0] == targets[i]:
correct_number += 1
print('correct number = {}, case number = {}'.format(correct_number, case_number))
if __name__ == '__main__':
main() | StarcoderdataPython |
1673966 | # This algorithm is limited to algorithm verification
import argparse
import cv2
import os
import numpy as np
import pandas as pd
import sys
from tqdm import tqdm
from skimage import transform
from pprint import pprint
from mtcnn.mtcnn import MTCNN
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.ERROR)
# from common.landmark_utils import LandmarkImageCrop
# from common.landmark_helper import LandmarkHelper
ap = argparse.ArgumentParser()
ap.add_argument("-l", "--landmark_txt", type=str, default='./new_dataset/landmarks.txt',
help="path to landmarks txt")
ap.add_argument("-c", "--landmark_csv", type=str, default='./new_dataset/face_landmarks.csv',
help="exist landmarks csv")
ap.add_argument("-b", "--base_dir", type=str, default='./new_dataset',
help="base dataset dir")
ap.add_argument("-s", "--output_size", type=int, default=112,
help="output image size")
ap.add_argument("-n", "--new_path", type=str, default='./align_new_dataset',
help="new save image file")
args = vars(ap.parse_args())
REFERENCE_FACIAL_POINTS = [[38.453125, 28.139446],
[70.8962, 27.549734],
[54.171013, 50.283226]]
# def scale_and_shift(image, landmarks, scale_range, output_size):
# '''
# Auto generate bbox and then random to scale and shift it.
# Args:
# image: a numpy type
# landmarks: face landmarks with format [(x1, y1), ...]. range is 0-w or h in int
# scale_range: scale bbox in (min, max). eg: (1.3, 1.5)
# output_size: output size of image
# Returns:
# an image and landmarks will be returned
# Raises:
# No
# '''
# (x1, y1, x2, y2), new_size, need_pad, (p_x, p_y, p_w, p_h) = LandmarkImageCrop.get_bbox_of_landmarks(
# image, landmarks, scale_range, shift_rate=0.3)
# box_image = image[y1:y2, x1:x2]
# if need_pad:
# box_image = np.lib.pad(
# box_image, ((p_y, p_h), (p_x, p_w), (0, 0)), 'constant')
# box_image = cv2.resize(box_image, (output_size, output_size))
# landmarks = (landmarks - (x1 - p_x, y1 - p_y))
# return box_image, landmarks
class FaceAlign(object):
'''Align face with MTCNN'''
def __init__(self, out_size):
self.detector = MTCNN()
self.out_size = out_size
def face_aligned_mtcnn(self, im):
'''
Function: Alignment with MTCNN Prior box
im: BGR image array
'''
try:
wrapper = self.detector.detect_faces(im[:, :, ::-1])[0]
except:
raise ValueError("No face...")
points = wrapper['keypoints']
values = list(points.values())
gt_array = np.array(values).reshape((-1, 2))[:2]
ref_array = np.array(REFERENCE_FACIAL_POINTS[:2], dtype=np.float32)
tform = transform.SimilarityTransform()
tform.estimate(gt_array, ref_array)
tfm = tform.params[0: 2, :]
return cv2.warpAffine(
im, tfm, (self.out_size, self.out_size))
def face_aligned(self, im, ldmarks):
'''
im: BGR array
ldmarks: [(x0, y0), ...]
'''
gt_array = np.array(ldmarks)[:2]
ref_array = np.array(REFERENCE_FACIAL_POINTS[:2], dtype=np.float32)
tform = transform.SimilarityTransform()
tform.estimate(gt_array, ref_array)
tfm = tform.params[0: 2, :]
return cv2.warpAffine(
im, tfm, (self.out_size, self.out_size)), tform
if __name__ == '__main__':
# with open('./dataset/landmarks.txt') as f:
# samples_list = []
# for line in f.readlines():
# # Parse txt file
# img_path, landmarks = LandmarkHelper.parse(line)
# image_path = os.path.join("./dataset", img_path)
# im = cv2.imread(image_path)
# image, landmarks = scale_and_shift(
# im, landmarks, scale_range=(1.1, 1.5), output_size=112)
# cv2.imshow("image", image)
# cv2.waitKey(0)
if not os.path.exists(args['new_path']):
os.mkdir(args['new_path'])
root_dir = args['base_dir']
df = pd.read_csv(args['landmark_csv'], header=None)
ldmarks = np.array(df.iloc[:, 1:])
ldmarks = ldmarks.reshape((-1, 106, 2)) * \
(args['output_size'], args['output_size'])
ref_leftpupil = np.mean(ldmarks[:, 34], axis=0)
ref_rightpupil = np.mean(ldmarks[:, 92], axis=0)
ref_nose = np.mean(ldmarks[:, 86], axis=0)
ref_array = np.stack(
[ref_leftpupil, ref_rightpupil, ref_nose], axis=0).astype(np.float32)
boxes = np.empty(
(df.shape[0], args['output_size'], args['output_size'], 3), dtype=np.uint8)
landmarks = np.empty((df.shape[0], 212))
for idx in tqdm(range(df.shape[0])):
im = cv2.imread(os.path.join(root_dir, df.iloc[idx, 0]))
im = cv2.resize(im, (args['output_size'], args['output_size']))
gt_ldmarks = ldmarks[idx]
gt = np.array(df.iloc[idx, 1:], dtype=np.float32).reshape(
(-1, 2)) * (args['output_size'], args['output_size'])
gt_leftpupil = gt[34]
gt_rightpupil = gt[92]
gt_nose = gt[86]
gt_array = np.stack(
[gt_leftpupil, gt_rightpupil, gt_nose], axis=0).astype(np.float32)
# M = cv2.getAffineTransform(gt_array, ref_array)
# Similar transformation
tform = transform.SimilarityTransform()
tform.estimate(gt_array, ref_array)
tfm = tform.params[0: 2, :]
dst = cv2.warpAffine(
im, tfm, (args['output_size'], args['output_size']))
b = np.ones((gt_ldmarks.shape[0], 1))
d = np.concatenate((gt_ldmarks, b), axis=1)
gt_ldmarks = np.dot(d, np.transpose(tfm))
boxes[idx] = dst
landmarks[idx] = (gt_ldmarks / (args['output_size'])).flatten()
# for ldmark in gt_ldmarks:
# cv2.circle(
# dst, (int(ldmark[0]), int(ldmark[1])), 2, (255, 0, 0), -1)
# cv2.imshow("image", dst)
# cv2.waitKey(0)
# Save image and new landmarks
ldmark_dict = dict()
for box, ldmark, num in tqdm(zip(boxes, landmarks, np.arange(df.shape[0]))):
cv2.imwrite("{}.png".format(
os.path.join(args['new_path'], str(num).zfill(5))), box)
ldmark_dict["{}.png".format(str(num).zfill(5))] = ldmark
df = pd.DataFrame(ldmark_dict).T
df.to_csv("{}/face_landmarks.csv".format(args['new_path']),
encoding="utf-8", header=None)
pprint("Complete conversion!!!")
| StarcoderdataPython |
23438 | <filename>integration/phore/tests/shardsynctest.py
import logging
from phore.framework import tester, validatornode, shardnode
from phore.pb import common_pb2
class ShardSyncTest(tester.Tester):
def __init__(self):
logging.info(logging.INFO)
super().__init__()
def _do_run(self):
beacon_nodes = [self.create_beacon_node() for _ in range(1)]
beacon_nodes[0].start()
beacon_nodes[0].wait_for_rpc()
shard_node_configs = [shardnode.ShardConfig.from_beacon(beacon_nodes[0]) for _ in range(2)]
shard_nodes = []
for c in shard_node_configs:
c.initial_shards = ['1']
shard_nodes.append(self.create_shard_node(c))
shard_nodes[0].start()
shard_nodes[0].wait_for_rpc()
shard_nodes[1].start()
shard_nodes[1].wait_for_rpc()
validator_node = self.create_validator_node(
validatornode.ValidatorConfig.from_beacon_and_shard(beacon_nodes[0], shard_nodes[0], "0-255")
)
validator_node.start()
validator_node.wait_for_rpc()
shard_nodes[0].wait_for_slot(4, 1)
shard_node_0_addr = shard_nodes[0].get_listening_addresses().Addresses[0]
shard_nodes[1].connect(common_pb2.ConnectMessage(Address=shard_node_0_addr))
shard_nodes[1].wait_for_slot(8, 1)
ex = ShardSyncTest()
ex.run()
| StarcoderdataPython |
3372262 | from analyzer.syntax_kind import SyntaxKind
class VariableDeclarationSyntax(object):
def __init__(self, var_token, variables, export_token, semicolon_token):
self.kind = SyntaxKind.VariableDeclaration
self.var_token = var_token
self.variables = variables
self.export_token = export_token
self.semicolon_token = semicolon_token
def __str__(self):
return f"{self.var_token}{''.join(map(str, self.variables))}{self.export_token}{self.semicolon_token}"
| StarcoderdataPython |
183609 | from .base import BaseUrlsTestCase
from .registration import RegistrationUrlsTestCase
from .login import LoginUrlsTestCase
from .logout import LogoutUrlsTestCase
from .auth_info import AuthInfoUrlsTestCase
| StarcoderdataPython |
3329518 | import streamlit as st
from utils.streamlit_utils import paint
def app():
st.title("Deaths")
st.header("Welcome to the COVID 19 Deaths Page")
paint("deaths")
| StarcoderdataPython |
1782734 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from chainer import cuda
from chainer import initializers
from chainer import link
from chainer import variable
from lib.functions.connection import graph_convolution
from lib import graph
class GraphConvolution(link.Link):
"""Graph convolutional layer.
This link wraps the :func:`graph_convolution` function and holds the filter
weight and bias vector as parameters.
Args:
in_channels (int): Number of channels of input arrays. If ``None``,
parameter initialization will be deferred until the first forward
data pass at which time the size will be determined.
out_channels (int): Number of channels of output arrays.
A (~ndarray): Weight matrix describing the graph.
K (int): Polynomial order of the Chebyshev approximation.
wscale (float): Scaling factor of the initial weight.
bias (float): Initial bias value.
nobias (bool): If ``True``, then this link does not use the bias term.
initialW (4-D array): Initial weight value. If ``None``, then this
function uses to initialize ``wscale``.
May also be a callable that takes ``numpy.ndarray`` or
``cupy.ndarray`` and edits its value.
initial_bias (1-D array): Initial bias value. If ``None``, then this
function uses to initialize ``bias``.
May also be a callable that takes ``numpy.ndarray`` or
``cupy.ndarray`` and edits its value.
.. seealso::
See :func:`graph_convolution` for the definition of
graph convolution.
Attributes:
W (~chainer.Variable): Weight parameter.
b (~chainer.Variable): Bias parameter.
Graph convolutional layer using Chebyshev polynomials
in the graph spectral domain.
This link implements the graph convolution described in
the paper
Defferrard et al. "Convolutional Neural Networks on Graphs
with Fast Localized Spectral Filtering", NIPS 2016.
"""
def __init__(self, in_channels, out_channels, A, K, wscale=1, bias=0,
nobias=False, initialW=None, initial_bias=None):
super(GraphConvolution, self).__init__()
L = graph.create_laplacian(A)
self.K = K
self.out_channels = out_channels
self.wscale = wscale
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if in_channels is not None:
self._initialize_params(in_channels)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = bias
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer, out_channels)
self.func = graph_convolution.GraphConvolutionFunction(L, K)
def to_cpu(self):
super(GraphConvolution, self).to_cpu()
self.func.to_cpu()
def to_gpu(self, device=None):
with cuda.get_device(device):
super(GraphConvolution, self).to_gpu(device)
self.func.to_gpu(device)
def _initialize_params(self, in_channels):
W_shape = (self.out_channels, in_channels, self.K)
self.W.initialize(W_shape)
def forward(self, x):
"""Applies the graph convolutional layer.
Args:
x: (~chainer.Variable): Input graph signal.
Returns:
~chainer.Variable: Output of the graph convolution.
"""
if self.W.array is None:
self._initialize_params(x.shape[1])
if self.b is None:
return self.func(x, self.W)
else:
return self.func(x, self.W, self.b)
| StarcoderdataPython |
4821382 | import markdown.extensions
def process_posts_and_pages(*, posts, pages, settings):
"""Dummy processer that sets an attribute on posts and pages"""
for post in posts:
post.test_attr = 'post'
for page in pages:
page.test_attr = 'page'
return {'posts': posts, 'pages': pages}
def process_objects_to_write(*, objects, settings):
"""Dummy processer that sets an attribute on all objects"""
for obj in objects:
obj.test_attr = 'obj'
return {'objects': objects}
class DummyMarkdownExtension(markdown.extensions.Extension):
"""Dummy Markdown extension so we hit the code path in tests"""
def extendMarkdown(self, md, md_globals):
pass
| StarcoderdataPython |
3284354 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def forwards_func(apps, schema_editor):
# We get the model from the versioned app registry;
# if we directly import it, it'll be the wrong version
Party = apps.get_model("party", "Party")
for party in Party.objects.filter(special__isnull=False):
party.specials.add(party.special)
class Migration(migrations.Migration):
dependencies = [
('special', '0002_special_image'),
('party', '0008_auto_20160526_0832'),
]
operations = [
migrations.AddField(
model_name='party',
name='specials',
field=models.ManyToManyField(related_name='specials_party_list', null=True, to='special.Special', blank=True),
preserve_default=True,
),
migrations.RunPython(forwards_func),
]
| StarcoderdataPython |
145549 | <filename>submissions/abc125/b.py
n = int(input())
v = list(map(int, input().split()))
c = list(map(int, input().split()))
ans = 0
for i in range(n):
xy = v[i] - c[i]
if xy > 0:
ans += xy
print(ans)
| StarcoderdataPython |
1711979 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from typing import Dict, Any
import warnings
from aws_lambda_powertools import Logger, Metrics, Tracer
from aws_lambda_powertools.utilities.typing import LambdaContext
from controltowerapi.models import AccountModel
from responses import build_response, error_response, authenticate_request
warnings.filterwarnings("ignore", "No metrics to publish*")
tracer = Tracer()
logger = Logger()
metrics = Metrics()
@metrics.log_metrics(capture_cold_start_metric=True)
@tracer.capture_lambda_handler
@logger.inject_lambda_context(log_event=True)
def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict[str, Any]:
if not event or "pathParameters" not in event:
return error_response(400, "Unknown event")
result = authenticate_request(event)
if result is not True:
return result
account_name = event.get("pathParameters", {}).get("accountName")
try:
account = AccountModel.get(account_name)
except AccountModel.DoesNotExist:
return error_response(404, "Account not found")
data = {
"account_name": account.account_name,
"ou_name": account.ou_name,
"status": account.status,
"queued_at": str(account.queued_at),
}
return build_response(200, data)
| StarcoderdataPython |
3391897 | # -*- coding: utf-8 -*-
"""
File Name: largest-perimeter-triangle.py
Author : jynnezhang
Date: 2020/11/29 12:55 下午
Description:
https://leetcode-cn.com/problems/largest-perimeter-triangle/
"""
class Solution:
def largestPerimeter(self, A=[]) -> int:
if not A or len(A) < 3:
return 0
A.sort(reverse=True)
i = 2
while i < len(A):
a1, a2, a3 = A[i-2], A[i-1], A[i]
if a2 + a3 > a1:
return a1 + a2 + a3
else:
i += 1
return 0
if __name__ == '__main__':
print(Solution().largestPerimeter([3,6,2,3]))
| StarcoderdataPython |
4824493 | <filename>examples/python-mpi-example.py<gh_stars>0
from mpi4py import MPI
from time import sleep
import sys
import subprocess
import random
import socket
log_priority_threshold = 0
def logprint(priority, message):
global log_priority_threshold
if priority >= log_priority_threshold:
print(message)
class Controller():
def __init__(self, comm):
self.comm = comm
self.n_workers = comm.Get_size()
self.available_workers = set(range(1, self.n_workers))
def have_available_workers_p(self):
return len(self.available_workers) > 0
def add_available_worker(self, w):
self.available_workers.add(w)
def terminate_everything(self):
self.terminate_workers(list(range(1, self.n_workers)))
print('Rank 0: Finished Successfully!!')
sys.exit(0)
def terminate_workers(self, workers_l):
for w in workers_l:
message = {'should_exit': True, 'problem': None}
self.comm.isend(message, dest=w)
def get_available_worker(self):
# available_workers MUST BE NOT EMPTY
return self.available_workers.pop()
class ProblemSolver():
def __init__(self):
self.data_aggregator = {'list': [], 'counter': 0}
self.dir_list = []
def initialize_task_generator(self):
self.dir_list = ['/', '/hb/home', '/hb/home', '/hb/home', '/hb/home',
'/hb/home', '/hb/home', '/hb/home', '/hb/home',
'/hb/home', '/hb/home', '/hb/home', '/hb/home',
'/hb/home', '/hb/home']
self.task_generator = self.task_generator_creator()
def task_generator_creator(self):
for d in self.dir_list:
if random.randint(1, 10) > 6:
yield 'Wait Please!'
yield ['ls', d]
yield 'End of Problems!'
def process_results(self, results):
logprint(10, 'process_results: {}, {}'.format(results['directory'],
results['len']))
self.data_aggregator['list'].append(
(results['directory'], results['len']))
self.data_aggregator['counter'] += results['len']
def next_task(self):
return next(self.task_generator)
def print_final_statistics(self):
logprint(10, '== Final Statistics by Solver ==')
logprint(10, self.data_aggregator)
def calculate(problem):
cmd_l = problem['cmd_l']
input = problem['cmd_input'].encode('utf-8')
output = subprocess.run(cmd_l, stdout=subprocess.PIPE,
input=input).stdout.decode('utf-8')
# parse command output
results = {'directory': cmd_l[1], 'len': len(output)}
sleep(random.randint(0,2))
return results
def unpack_problem(message):
"""Extract the problem incorporated in 'message'"""
problem = message['problem']
return problem
def worker_should_exit(message):
"""Should the worker receiving 'message' be terminated?"""
return message['should_exit']
def pack_problem(problem_cmd_l):
message = dict()
message['should_exit'] = False
message['problem'] = {'cmd_l':problem_cmd_l, 'cmd_input':''}
return message
def message_pending_P(comm):
return comm.Iprobe(source=MPI.ANY_SOURCE)
def main():
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
if rank == 0:
logprint(10, 'Starting with {} cores!'.format(comm.Get_size()))
status = MPI.Status()
controller = Controller(comm)
solver = ProblemSolver()
solver.initialize_task_generator()
while True:
while message_pending_P(comm):
data = comm.recv(status=status)
controller.add_available_worker(status.Get_source())
solver.process_results(data)
while controller.have_available_workers_p():
problem = solver.next_task()
if problem == 'Wait Please!':
logprint(10, 'Waiting for new tasks to occur...')
break
elif problem == 'End of Problems!':
solver.print_final_statistics()
controller.terminate_everything()
else:
worker = controller.get_available_worker()
logprint(10, 'Packing problem: {}'.format(problem))
message = pack_problem(problem)
comm.isend(message, dest=worker)
sleep(0.2)
else:
while True:
message = comm.recv()
if worker_should_exit(message):
print('Rank {}: Exiting by request!'.format(rank))
sys.exit(0)
logprint(10, 'Rank {}({}): Executing task'.format(rank,
socket.gethostname()))
problem = unpack_problem(message)
results = calculate(problem)
comm.send(results, dest=0)
if __name__ == '__main__':
main()
| StarcoderdataPython |
27382 | import os
# exemplo alterado de EX_10.5.py para 10_5.py
for nome in os.listdir('./Minicurso/Minicurso API'):
# alterar conforme sua necessidade de geração de nomes e layout de arquivos
os.rename("./Minicurso/Minicurso API/"+nome, "./Minicurso/Minicurso API/"+nome+"_Minicurso_API.png")
print("arquivo " + nome + " alterado para " +nome+"_Minicurso_API") | StarcoderdataPython |
4822373 | <gh_stars>0
from zmq_cache.zmq_cache_server import CacheServer
| StarcoderdataPython |
3326574 | # coding=utf-8
###############################################################################
#
# This file is part of pyglfw project which is subject to zlib license.
# See the LICENSE file for details.
#
# Copyright (c) 2013 <NAME> <<EMAIL>>
#
###############################################################################
from . import _wrapapi as api
class _HintsBase(object):
_hint_map_ = {
'resizable': api.GLFW_RESIZABLE,
'visible': api.GLFW_VISIBLE,
'decorated': api.GLFW_DECORATED,
'red_bits': api.GLFW_RED_BITS,
'green_bits': api.GLFW_GREEN_BITS,
'blue_bits': api.GLFW_BLUE_BITS,
'alpha_bits': api.GLFW_ALPHA_BITS,
'depth_bits': api.GLFW_DEPTH_BITS,
'stencil_bits': api.GLFW_STENCIL_BITS,
'accum_red_bits': api.GLFW_ACCUM_RED_BITS,
'accum_green_bits': api.GLFW_ACCUM_GREEN_BITS,
'accum_blue_bits': api.GLFW_ACCUM_BLUE_BITS,
'accum_alpha_bits': api.GLFW_ACCUM_ALPHA_BITS,
'aux_buffers': api.GLFW_AUX_BUFFERS,
'samples': api.GLFW_SAMPLES,
'refresh_rate': api.GLFW_REFRESH_RATE,
'stereo': api.GLFW_STEREO,
'srgb_capable': api.GLFW_SRGB_CAPABLE,
'client_api': api.GLFW_CLIENT_API,
'context_ver_major': api.GLFW_CONTEXT_VERSION_MAJOR,
'context_ver_minor': api.GLFW_CONTEXT_VERSION_MINOR,
'context_robustness': api.GLFW_CONTEXT_ROBUSTNESS,
'debug_context': api.GLFW_OPENGL_DEBUG_CONTEXT,
'forward_compat': api.GLFW_OPENGL_FORWARD_COMPAT,
'opengl_profile': api.GLFW_OPENGL_PROFILE,
}
_over_map_ = {
'context_version': (api.GLFW_CONTEXT_VERSION_MAJOR,
api.GLFW_CONTEXT_VERSION_MINOR,),
'rgba_bits': (api.GLFW_RED_BITS,
api.GLFW_GREEN_BITS,
api.GLFW_BLUE_BITS,
api.GLFW_ALPHA_BITS,),
'rgba_accum_bits': (api.GLFW_ACCUM_RED_BITS,
api.GLFW_ACCUM_GREEN_BITS,
api.GLFW_ACCUM_BLUE_BITS,
api.GLFW_ACCUM_ALPHA_BITS,),
}
def __init__(self, **kwargs):
self._hints = {}
for k, v in kwargs.items():
is_hint = k in self.__class__._hint_map_
is_over = k in self.__class__._over_map_
if is_hint or is_over:
setattr(self, k, v)
def __getitem__(self, index):
if index in self.__class__._hint_map_.values():
return self._hints.get(index, None)
else:
raise TypeError()
def __setitem__(self, index, value):
if index in self.__class__._hint_map_.values():
if value is None:
if index in self._hints:
del self._hints[index]
elif isinstance(value, int):
self._hints[index] = value
else:
raise TypeError()
def __delitem__(self, index):
if index in self.__class__._hint_map_.values():
if index in self._hints:
del self._hints[index]
else:
raise TypeError()
def _hntprops_(hint_map, over_map):
prop_map = {}
def _hint_property(hint):
def _get(self):
return self[hint]
def _set(self, value):
self[hint] = value
def _del(self):
del self[hint]
return property(_get, _set, _del)
for prop, hint in hint_map.items():
prop_map[prop] = _hint_property(hint)
def _over_property(over):
def _get(self):
value = [self[hint] for hint in over]
return tuple(value)
def _set(self, value):
for hint, v in zip(over, value):
self[hint] = v
def _del(self):
for hint in over:
del self[hint]
return property(_get, _set, _del)
for prop, over in over_map.items():
prop_map[prop] = _over_property(over)
return prop_map
Hints = type('Hints',
(_HintsBase,),
_hntprops_(_HintsBase._hint_map_, _HintsBase._over_map_))
| StarcoderdataPython |
4809940 | <reponame>gpetretto/emmet
from emmet.builders.settings import EmmetBuilderSettings
SETTINGS = EmmetBuilderSettings()
| StarcoderdataPython |
3304907 | # -*- coding: utf-8 -*-
import datetime
import json
import scrapy
from scrapy import Selector
from spider.consts import MYSQL_ITEM_PIPELINES, DOWNLOADER_MIDDLEWARES_HTTP_PROXY_OFF
from spider.items import SpiderLoaderItem, AnimationBangumiItem, AnimationEpisodeItem
class JiadiandmSpider(scrapy.Spider):
name = "jiadiandm"
custom_settings = {
'ITEM_PIPELINES': MYSQL_ITEM_PIPELINES,
'SHOW_SQL': False,
'DOWNLOADER_MIDDLEWARES': DOWNLOADER_MIDDLEWARES_HTTP_PROXY_OFF
}
allowed_domains = ['www.jiadiandm.com']
batch_date = datetime.datetime.now().date()
default_data = {
}
#default_data = urllib.parse.urlencode(default_data)
default_data = json.dumps(default_data)
default_headers = {
'Cache-Control': 'max-age=0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'ja-JP,ja;q=0.9,en-US;q=0.8,en;q=0.7,zh-CN;q=0.6,zh;q=0.5',
#'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.167 Safari/537.36',
'Referer': 'https://www.jiadiandm.com/rhdm/',
'Upgrade-Insecure-Requests': 1,
'Connection': 'keep-alive',
'Host': 'www.jiadiandm.com'
}
def start_requests(self):
yield scrapy.Request(
url='https://www.jiadiandm.com/search.php?searchtype=5',
headers=self.default_headers, body=self.default_data, callback=self.get_final_url,dont_filter=True)
def get_final_url(self, response):
total_page = response.xpath('//div[@class="wrap mb "]/div[@class="list_z2 g3"]/div[@class="pagetop2 pslpx"]/div[@class="page"]//span[1]/text()').extract()[0]
pages = int(total_page.split('/')[1].split(' ')[0])
for page in range(1,pages+1):
#time.sleep(random.uniform(1, 3))
url='https://www.jiadiandm.com/search.php?page='+str(page)+'&searchtype=5'
yield scrapy.Request(
url=url,
headers=self.default_headers, body=self.default_data, callback=self.parse_basic_info, dont_filter=True)
def parse_basic_info(self, response):
contents=response.xpath('//div[@class="wrap mb "]/div[@class="list_z2 g3"]/div[@class="list_z g4 pslpx"]/ul[@class="ipic"]//li').extract()
for content in contents:
url='https://www.jiadiandm.com'+Selector(text=content).xpath('//li[1]/a/@href').extract()[0]
img = Selector(text=content).xpath('//li/a/img/@src').extract()[0]
if img!=None and img!='' and (not img.startswith('http')):
img='https://www.jiadiandm.com'+img
name = Selector(text=content).xpath('//li/a/img/@alt').extract()[0]
status=None
if len(Selector(text=content).xpath('//li/a/span/text()').extract())>0:
status = Selector(text=content).xpath('//li/a/span/text()').extract()[0]
yield scrapy.Request(
url=url,
headers=self.default_headers, body=self.default_data, callback=self.parse_detail_info, meta={'img': img, 'name': name, 'status': status}, dont_filter=True)
def parse_detail_info(self, response):
url=response.url
img = response.meta['img']
name = response.meta['name']
status = response.meta['status']
content = response.xpath('//div[@class="wrap mb"]/div[@class="Content_z g6 pslpx"]/div[@class="Content"]/p[@class="w2"]').extract()
area_names = Selector(text=content[0]).xpath('//p[@class="w2"]/text()').extract()[0].strip()[1:]
language = Selector(text=content[1]).xpath('//p[@class="w2"]/text()').extract()[0].strip()[1:]
show_date = Selector(text=content[1]).xpath('//p[@class="w2"]/text()').extract()[1].strip()[1:]
author_names=Selector(text=content[2]).xpath('//p[@class="w2"]/text()').extract()[0].strip()[1:]
update_time = Selector(text=content[4]).xpath('//p[@class="w2"]/text()').extract()[0].strip()[1:]
introduction = response.xpath('//div[@class="Content_des pslpx"]').extract()[0]
item_bangumi = SpiderLoaderItem(item=AnimationBangumiItem(image_urls=[img]), response=response)
item_bangumi.add_value('batch_date', self.batch_date)
item_bangumi.add_value('host', self.allowed_domains[0])
item_bangumi.add_value('url', url)
item_bangumi.add_value('img', img)
item_bangumi.add_value('name', name)
item_bangumi.add_value('author_names', author_names)
item_bangumi.add_value('show_date', show_date)
item_bangumi.add_value('area_names', area_names)
item_bangumi.add_value('language', language)
item_bangumi.add_value('status', status)
item_bangumi.add_value('introduction', introduction)
item_bangumi.add_value('update_time', update_time)
item_bangumi.add_value('table_name', 'animation_bangumi')
yield item_bangumi.load_item()
episodes = response.xpath('//div[@class="playlist pslpx"]').extract()[0]
playee_names=Selector(text=episodes).xpath('//div[@class="playlist pslpx"]/p//span/text()').extract()
episodess = Selector(text=episodes).xpath('//div[@class="playlist pslpx"]/ul').extract()
episode_size=len(playee_names)
for idx in range(0,episode_size):
playee_name = playee_names[idx]
episodes = Selector(text=episodess[idx]).xpath('//ul//li').extract()
for episode in episodes:
episode_name=Selector(text=episode).xpath('//li/a/@title').extract()[0]
episode_url = 'https://www.jiadiandm.com'+Selector(text=episode).xpath('//li/a/@href').extract()[0]
item_episode = SpiderLoaderItem(item=AnimationEpisodeItem(), response=response)
item_episode.add_value('batch_date', self.batch_date)
item_episode.add_value('bangumi_url', url)
item_episode.add_value('episode_name', episode_name)
item_episode.add_value('episode_url', episode_url)
item_episode.add_value('playee_name', playee_name)
item_episode.add_value('table_name', 'animation_episode')
yield item_episode.load_item()
def closed(self, reason):
'''
爬虫结束时退出登录状态
:param reason:
:return:
'''
if 'finished' == reason:
self.logger.warning('%s', '爬虫程序执行结束,即将关闭')
elif 'shutdown' == reason:
self.logger.warning('%s', '爬虫进程被强制中断,即将关闭')
elif 'cancelled' == reason:
self.logger.warning('%s', '爬虫被引擎中断,即将关闭')
else:
self.logger.warning('%s', '爬虫被未知原因打断,即将关闭') | StarcoderdataPython |
88260 | <reponame>xiling42/VL-BERT
from .resnet_vlbert_for_pretraining import ResNetVLBERTForPretraining
from .resnet_vlbert_for_pretraining_multitask import ResNetVLBERTForPretrainingMultitask
from .resnet_vlbert_for_attention_vis import ResNetVLBERTForAttentionVis
| StarcoderdataPython |
1742228 | <filename>Colloquiums/2020-2021/Colloquium_3/Exercise_3_edmonds_karp.py<gh_stars>1-10
import collections
def bfs(graph, s, t, parent):
visited = [False] * len(graph)
queue = collections.deque()
queue.append(s)
visited[s] = True
while queue:
u = queue.popleft()
for ind, val in enumerate(graph[u]):
if (visited[ind] == False) and (val > 0):
queue.append(ind)
visited[ind] = True
parent[ind] = u
return visited[t]
def edmonds_karp(graph, source, sink):
parent = [-1] * len(graph)
max_flow = 0
while bfs(graph, source, sink, parent):
path_flow = float("Inf")
s = sink
while s != source:
path_flow = min(path_flow, graph[parent[s]][s])
s = parent[s]
max_flow += path_flow
v = sink
while v != source:
u = parent[v]
graph[u][v] -= path_flow
graph[v][u] += path_flow
v = parent[v]
return max_flow
| StarcoderdataPython |
3282596 | <gh_stars>10-100
import grpc
import proto.connection_pb2_grpc
import proto.connection_pb2
from concurrent import futures
from libs.core.Log import Log
from libs.core.Event import Event
import threading
class GlobalServer(proto.connection_pb2_grpc.GlobalServerServicer):
group_messages = []
topology_messages = []
def GroupMessage(self, request, context):
Event.trigger("igmp_packet_in", pkt=request)
return proto.connection_pb2.Status(code=1, message="accepted")
def TopologyMessage(self, request, context):
Event.trigger("topology_packet_in", pkt=request)
return proto.connection_pb2.Status(code=1, message="accepted")
def CheckConnection(self, request, context):
Log.async_info("Local controller connected to global server")
return proto.connection_pb2.Status(code=1, message="Connected")
def PortMessage(self, request, context):
"""
This method receives a port message
"""
Log.async_info("Got port message")
Log.async_debug(request)
# this event is not catched yet
# for demonstration purpose, the topology doesn't get updated
# on a link failure
Event.trigger("port_message", message=request)
return proto.connection_pb2.Status(code=1, message="Accepted")
class GRPCServer:
def __init__(self, listen_port=0):
self.listen_port = listen_port
self.running = True
self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
Event.on('exit', self.stop)
def start(self):
"""
Start grpc server
This grpc server will be used to connect the local controller with the global
"""
proto.connection_pb2_grpc.add_GlobalServerServicer_to_server(GlobalServer(), self.server)
self.server.add_insecure_port('[::]:' + str(self.listen_port))
Log.async_info("Start GRPC Server on port", self.listen_port)
self.server.start()
def stop(self):
"""
Stop the grpc server
"""
self.server.stop(1)
| StarcoderdataPython |
3331870 | """Simple FUSE filesystem that mirrors a dir but hides symlinks."""
import os
import os.path
from loopback import Loopback
class HideSymlinks(Loopback):
"""A loopback filesystem that overrides geattr to hide symlinks."""
symlink = None
def getattr(self, path, fh=None):
stat = os.stat(path)
keys = ('st_atime', 'st_ctime', 'st_gid', 'st_mode',
'st_mtime', 'st_nlink', 'st_size', 'st_uid')
return dict((key, getattr(stat, key)) for key in keys)
| StarcoderdataPython |
3231678 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dataset builder for ImageNet."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from six.moves import range
import tensorflow.compat.v2 as tf
from uq_benchmark_2019 import image_data_utils
from uq_benchmark_2019.imagenet import resnet_preprocessing
# total training set from ImageNet competition
IMAGENET_TRAIN_AND_VALID_SHARDS = 1024
# Number of shards to split off from this set for validation
IMAGENET_VALID_SHARDS = 100
IMAGENET_SHAPE = (224, 224, 3)
class ImageNetInput(object):
"""Generates ImageNet input_fn for training, validation, and testing.
The complete ImageNet training data is assumed to be in TFRecord format with
keys as specified in the dataset_parser below, sharded across 1024 files,
named sequentially:
train-00000-of-01024
train-00001-of-01024
...
train-01023-of-01024
ImageNet's validation data (which is treated as test data here) is in the same
format but sharded in 128 files. 'Validation' data here refers to a subset of
the ImageNet training set split off for validation.
The format of the data required is created by the script at:
https://github.com/tensorflow/tpu/blob/master/tools/datasets/imagenet_to_gcs.py
"""
def __init__(self, is_training, data_dir, dataset_split, batch_size,
use_bfloat16=False, fake_data=False):
"""Initialize ImageNetInput object.
Args:
is_training: `bool` for whether the input is for training.
data_dir: `str` for the directory of the training and validation data.
dataset_split: `str`, either 'train', 'valid', or 'test'.
batch_size: `int`, dataset batch size.
use_bfloat16: If True, use bfloat16 precision; else use float32.
fake_data: If True, use synthetic random data.
"""
self.image_preprocessing_fn = resnet_preprocessing.preprocess_image
self.is_training = is_training
self.use_bfloat16 = use_bfloat16
self.data_dir = data_dir
self.dataset_split = dataset_split
self.fake_data = fake_data
self.batch_size = batch_size
def dataset_parser(self, value):
"""Parse an ImageNet record from a serialized string Tensor."""
keys_to_features = {
'image/encoded':
tf.io.FixedLenFeature((), tf.string, ''),
'image/format':
tf.io.FixedLenFeature((), tf.string, 'jpeg'),
'image/class/label':
tf.io.FixedLenFeature([], tf.int64, -1),
'image/class/text':
tf.io.FixedLenFeature([], tf.string, ''),
'image/object/bbox/xmin':
tf.io.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymin':
tf.io.VarLenFeature(dtype=tf.float32),
'image/object/bbox/xmax':
tf.io.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymax':
tf.io.VarLenFeature(dtype=tf.float32),
'image/object/class/label':
tf.io.VarLenFeature(dtype=tf.int64),
}
parsed = tf.io.parse_single_example(value, keys_to_features)
image_bytes = tf.reshape(parsed['image/encoded'], shape=[])
image = self.image_preprocessing_fn(
image_bytes=image_bytes,
is_training=self.is_training,
use_bfloat16=self.use_bfloat16)
# Subtract one so that labels are in [0, 1000), and cast to float32 for
# Keras model.
label = tf.cast(tf.cast(
tf.reshape(parsed['image/class/label'], shape=[1]), dtype=tf.int32) - 1,
dtype=tf.float32)
return image, label
def input_fn(self):
"""Input function which provides a single batch for train or eval.
Returns:
A `tf.data.Dataset` object.
"""
if self.fake_data:
return image_data_utils.make_fake_data(IMAGENET_SHAPE)
train_path_tmpl = os.path.join(self.data_dir, 'train-{0:05d}*')
if self.dataset_split == 'train':
file_pattern = [train_path_tmpl.format(i)
for i in range(IMAGENET_VALID_SHARDS,
IMAGENET_TRAIN_AND_VALID_SHARDS)]
elif self.dataset_split == 'valid':
file_pattern = [train_path_tmpl.format(i)
for i in range(IMAGENET_VALID_SHARDS)]
elif self.dataset_split == 'test':
file_pattern = os.path.join(self.data_dir, 'validation-*')
else:
raise ValueError(
"Dataset_split must be 'train', 'valid', or 'test', was %s"
% self.dataset_split)
# Shuffle the filenames to ensure better randomization.
dataset = tf.data.Dataset.list_files(file_pattern, shuffle=self.is_training)
if self.is_training:
dataset = dataset.repeat()
def fetch_dataset(filename):
buffer_size = 8 * 1024 * 1024 # 8 MiB per file
dataset = tf.data.TFRecordDataset(filename, buffer_size=buffer_size)
return dataset
# Read the data from disk in parallel
dataset = dataset.interleave(fetch_dataset, cycle_length=16)
if self.is_training:
dataset = dataset.shuffle(1024)
# Parse, pre-process, and batch the data in parallel (for speed, it's
# necessary to apply batching here rather than using dataset.batch later)
dataset = dataset.apply(
tf.data.experimental.map_and_batch(
self.dataset_parser,
batch_size=self.batch_size,
num_parallel_batches=2,
drop_remainder=True))
# Prefetch overlaps in-feed with training
dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE)
if self.is_training:
# Use a private thread pool and limit intra-op parallelism. Enable
# non-determinism only for training.
options = tf.data.Options()
options.experimental_threading.max_intra_op_parallelism = 1
options.experimental_threading.private_threadpool_size = 16
options.experimental_deterministic = False
dataset = dataset.with_options(options)
return dataset
| StarcoderdataPython |
119353 | <reponame>artcz/adventofcode
lines = open("input").read().strip().splitlines()
print("--- Day11 ---")
class Seat:
directions = [
(dx, dy) for dx in [-1, 0, 1] for dy in [-1, 0, 1] if (dx, dy) != (0, 0)
]
def __init__(self, x, y, dx, dy):
self.x = x
self.y = y
self.dx = dx
self.dy = dy
def p1(part2=False):
G, rows, columns = load_grid(lines)
i = 0
while True:
i += 1
NG = {}
for x in range(rows):
for y in range(columns):
other = [Seat(x + d[0], y + d[1], *d) for d in Seat.directions]
adjacent = 4
if part2:
for seat in other:
while G.get((seat.x, seat.y)) == ".":
seat.x += seat.dx
seat.y += seat.dy
adjacent = 5
other = [G[s.x, s.y] for s in other if (s.x, s.y) in G]
if G[x, y] == "L" and all(s != "#" for s in other):
NG[x, y] = "#"
elif G[x, y] == "#" and (sum([s == "#" for s in other]) >= adjacent):
NG[x, y] = "L"
else:
NG[x, y] = G[x, y]
if G == NG:
break
G = NG
print(sum([cell == "#" for cell in G.values()]))
def p2():
p1(part2=True)
def load_grid(lines):
G = {}
first_row = lines[0]
for y, line in enumerate(lines):
# Check if grid is really a grid.
assert len(line) == len(first_row)
for x, ch in enumerate(line):
G[y, x] = ch
rows = len(lines)
columns = len(first_row)
return G, rows, columns
def print_grid(grid, maxx, maxy, zfill_padding=3):
header = [" " * zfill_padding, " "]
for x in range(maxx):
header.append(str(x % 10))
print("".join(header))
for y in range(maxy):
row = [str(y).zfill(zfill_padding), " "]
for x in range(maxx):
row.append(grid[x, y])
print("".join(row))
print("Part1")
p1()
print("Part2")
p2()
print("---- EOD ----")
| StarcoderdataPython |
1678161 | <filename>features/Composers/composerTypes.py<gh_stars>1-10
from ariadne import load_schema_from_path, ObjectType, QueryType, MutationType
from features.Composers.composer import resolve_composer, create_composer, update_composer, get_composers
from features.Songs.song import resolve_songs
from features.Songs.songTypes import songQueries
composerQueries = QueryType()
composerMutations = MutationType()
composerTypes = load_schema_from_path("./features/Composers/composer.gql")
composerObjectType = ObjectType('FBComposer')
composerObjectType.set_field("songs", resolve_songs)
composerQueries.set_field('getComposer', resolve_composer)
songQueries.set_field('getComposers', get_composers)
composerMutations.set_field('createComposer', create_composer)
composerMutations.set_field('updateComposer', update_composer)
| StarcoderdataPython |
184796 | <filename>nuzlockeai/utils/pokecache.py
import requests
import json
from typing import List, Tuple, Optional
class PokeCache:
"""
"""
def __init__(self, fpath: Optional[str] = None):
"""
"""
self.dex_cache = {}
self.species_cache = {}
if fpath is not None:
self.load_from_file(fpath)
def load_from_file(self, fpath: str):
"""
"""
with open(fpath, "r") as f:
contents = f.read()
cache = json.loads(contents)
self.dex_cache = cache["pokedex"]
self.species_cache = cache["species"]
def save_to_file(self, fpath: str):
"""
"""
with open(fpath, "w") as f:
output = {}
output["pokedex"] = self.dex_cache
output["species"] = self.species_cache
contents = json.dumps(output)
f.write(contents)
def get_pokedex_entry(poke: str, cache: PokeCache) -> None:
"""
"""
if poke.strip().lower() not in cache.dex_cache.keys():
pokedex_url = f"https://pokeapi.co/api/v2/pokemon/{poke.strip().lower()}/"
pokedex_entry_response = requests.get(pokedex_url)
if pokedex_entry_response.status_code != 200:
print(pokedex_entry_response.status_code)
print(pokedex_entry_response.content)
raise RuntimeError(f"Error when trying to pull pokedex data for: {poke}")
pokedex_entry = pokedex_entry_response.json()
cache.dex_cache[poke.strip().lower()] = pokedex_entry
def get_species_entry(poke: str, cache: PokeCache) -> None:
"""
"""
if poke.strip().lower() not in cache.dex_cache.keys():
get_pokedex_entry(poke, cache)
generic_name = cache.dex_cache[poke.strip().lower()]["species"]["name"]
if generic_name not in cache.species_cache.keys():
species_url = f"https://pokeapi.co/api/v2/pokemon-species/{generic_name}/"
species_response = requests.get(species_url)
if species_response.status_code != 200:
print(species_response.status_code)
print(species_response.content)
raise RuntimeError(f"Error when trying to pull species data for: {generic_name}")
species_entry = species_response.json()
cache.species_cache[generic_name] = species_entry
def get_encounter_basic_info(poke: str, cache: PokeCache, version_name: str = "black-white", move_method: str = "level-up", banned_abilities: List[str] = ["competitive", "slush-rush", "weak-armor"]) -> Tuple[List[str], float, List[str]]:
"""
Get the basic details of an encountered pokemon from the pokeAPI.
(Abilities, Odds Female, and Level-up Learnset)
"""
get_pokedex_entry(poke, cache)
get_species_entry(poke, cache)
possible_abilities = [
a["ability"]["name"]
for a in cache.dex_cache[poke.strip().lower()]["abilities"]
if (not a["is_hidden"]) and(a["ability"]["name"] not in banned_abilities)
]
generic_name = cache.dex_cache[poke.strip().lower()]["species"]["name"]
odds_female = cache.species_cache[generic_name]["gender_rate"] / 8
moveset = []
for move in cache.dex_cache[poke.strip().lower()]["moves"]:
for v_details in move["version_group_details"]:
if v_details["version_group"]["name"] == version_name:
moveset.append((v_details["level_learned_at"], move["move"]["name"]))
moveset = sorted(moveset, key=lambda x: x[0])
return possible_abilities, odds_female, moveset
| StarcoderdataPython |
3267100 | <reponame>strawsyz/straw
import torch
import torch.nn as nn
from torch.autograd import Variable
class VAE(nn.Module):
"""实现简单的VAE模型"""
def __init__(self):
super(VAE, self).__init__()
# encoder部分
self.fc1 = nn.Linear(784, 400)
self.fc21 = nn.Linear(400, 20)
self.fc22 = nn.Linear(400, 20)
# decoder部分
self.fc3 = nn.Linear(20, 400)
self.fc4 = nn.Linear(400, 784)
self.relu = nn.ReLU(inplace=True)
self.sigmoid = nn.Sigmoid()
def reparametrize(self, mu, logvar):
# 计算std
std = logvar.mul(0.5).exp_()
# 按照std的尺寸,创建一个符合正态分布,并转为Variable类型
eps = Variable(std.data.new(std.size()).normal_())
# 乘以方差,加上均值,修改分布
return eps.mul(std).add_(mu)
def forward(self, x):
# fc21和fc22一个计算平均值,一个计算方差
x = x.view(-1, 784)
x = self.relu(self.fc1(x))
mean = self.fc21(x)
var = self.fc22(x)
# 随机采样
out = self.reparametrize(mean, var)
# 输出解码的结合,和输入的x的均值和方差
out = self.relu(self.fc3(out))
out = self.fc4(out)
out = self.sigmoid(out)
return out, mean, var
# 计算重构误差函数
def loss_function(recon_x, x, mean, var):
# 将x压平
x = x.view(-1, 784)
BCE_loss = nn.BCELoss(reduction='sum')(recon_x, x)
# see Appendix B from VAE paper:
# King<NAME> Welling. Auto-Encoding Variational Bayes. ICLR, 2014
# https://arxiv.org/abs/1312.6114
# 0.5 * sum(1 + log(sigma^2) - mu^2 - sigma^2)
# 计算论文汇总定义的误差函数
KLD_element = mean.pow(2).add_(var.exp()).mul_(-1).add_(1).add_(var)
KLD_loss = torch.sum(KLD_element).mul_(-0.5)
# 将两个函数结合起来
return BCE_loss + KLD_loss
| StarcoderdataPython |
12650 | <gh_stars>1-10
import sys
from time import sleep
from random import randint
from urllib.request import urlopen
from urllib.parse import urlencode
if len(sys.argv) != 2:
print('Por favor, usar: ' + sys.argv[0] + ' {idSensor}')
print('Exemplo: ' + sys.argv[0] + ' 8')
else:
sensorId = sys.argv[1]
URL_SERVICO = 'http://127.0.0.1:8081/tcc/sensor/' + sensorId + '/inserir'
VARIACAO_MAXIMA = 5
valores = {
'Chuva': 80.0,
'UmidadeAr': 85.0,
'UmidadeSolo': 80.0,
'TemperaturaAr': 30.0,
'TemperaturaSolo': 25.0
}
variacao = {}
for k in valores:
valores[k] = valores[k] + randint(-3, +3) / 10
variacao[k] = 0.0
accel = {}
while True:
for k in variacao:
accel[k] = randint(-1.0, +1.0) / 10
r = randint(10, 30)
for i in range(r):
data = {}
for k in variacao:
variacao[k] = variacao[k] + accel[k]
variacao[k] = max(variacao[k], -VARIACAO_MAXIMA)
variacao[k] = min(variacao[k], +VARIACAO_MAXIMA)
data[k] = '%.2f' % (valores[k] + round(variacao[k], 2))
data = urlencode(data)
print(data)
urlopen(URL_SERVICO, data.encode('ascii'))
sleep(0.50)
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.