code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import typing
from datetime import datetime
from ..schema import BaseTransformer
class Transformer(BaseTransformer):
"""Transform New Jersey raw data for consolidation."""
postal_code = "NJ"
fields = dict(
company="Company",
location="City",
effective_date="Effective Date",
jobs="Workforce Affected",
)
date_format = ["%Y-%m-%d %H:%M:%S", "%M/%d/%Y", "%M/%d/%y"]
jobs_corrections = {
"TBA": None,
"To be Determined": None,
"-": None,
"Unknown": None,
23695: None,
# The United airlines number is legimate, though nationwide
# https://abcnews.go.com/Politics/united-airlines-furlough-16000-employees/story?id=72771897
16000: 16000,
}
date_corrections = {
"TBA": None,
"Temp layoff": None,
"-": None,
"3030-08-23 00:00:00": datetime(2020, 8, 23),
"04/22/2022, 09/30/2022, 12/21/22": datetime(2022, 4, 22),
}
def transform_jobs(self, value: str) -> typing.Optional[int]:
"""Transform a raw jobs number into an integer.
Args:
value (str): A raw jobs number provided by the source
Returns: An integer number ready for consolidation. Or, if the value is invalid, a None.
"""
# Cut the asterisk they sometimes use
value = value.replace("*", "")
# Do the normal stuff
return super().transform_jobs(value)
|
[
"datetime.datetime"
] |
[((887, 908), 'datetime.datetime', 'datetime', (['(2020)', '(8)', '(23)'], {}), '(2020, 8, 23)\n', (895, 908), False, 'from datetime import datetime\n'), ((954, 975), 'datetime.datetime', 'datetime', (['(2022)', '(4)', '(22)'], {}), '(2022, 4, 22)\n', (962, 975), False, 'from datetime import datetime\n')]
|
#!/usr/bin/python
"""
FlowCal Python API example, without using calibration beads data.
This script is divided in two parts. Part one processes data from five cell
samples, and generates plots of each one.
Part two exemplifies how to use the processed cell sample data with
FlowCal's plotting and statistics modules, in order to produce interesting
plots.
For details about the experiment, samples, and instrument used, please
consult readme.txt.
"""
import os
import os.path
import numpy as np
import matplotlib.pyplot as plt
import FlowCal
###
# Definition of constants
###
# Names of the FCS files containing data from cell samples
samples_filenames = ['FCFiles/Data001.fcs',
'FCFiles/Data002.fcs',
'FCFiles/Data003.fcs',
'FCFiles/Data004.fcs',
'FCFiles/Data005.fcs',
]
# IPTG concentration of each cell sample, in micromolar.
iptg = np.array([0, 81, 161, 318, 1000])
# Plots will be generated after gating and transforming cell samples. These
# will be stored in the following folder.
samples_plot_dir = 'plot_samples'
if __name__ == "__main__":
# Check that plot directory exists, create if it does not.
if not os.path.exists(samples_plot_dir):
os.makedirs(samples_plot_dir)
###
# Part 1: Processing cell sample data
###
print("\nProcessing cell samples...")
# We will use the list ``samples`` to store processed, transformed flow
# cytometry data of each sample.
samples = []
# Iterate over cell sample filenames
for sample_id, sample_filename in enumerate(samples_filenames):
# Load flow cytometry data from the corresponding FCS file.
# ``FlowCal.io.FCSData(filename)`` returns an object that represents
# flow cytometry data loaded from file ``filename``.
print("\nLoading file \"{}\"...".format(sample_filename))
sample = FlowCal.io.FCSData(sample_filename)
# Data loaded from an FCS file is in "Channel Units", the raw numbers
# reported from the instrument's detectors. The FCS file also contains
# information to convert these into Relative Fluorescence Intensity
# (RFI) values, commonly referred to as arbitrary fluorescence units
# (a.u.). The function ``FlowCal.transform.to_rfi()`` performs this
# conversion.
print("Performing data transformation...")
sample = FlowCal.transform.to_rfi(sample)
# Gating
# Gating is the process of removing measurements of irrelevant
# particles, while retaining only the population of interest.
print("Performing gating...")
# ``FlowCal.gate.start_end()`` removes the first and last few events.
# Transients in fluidics can make these events slightly different from
# the rest. This may not be necessary in all instruments.
sample_gated = FlowCal.gate.start_end(sample,
num_start=250,
num_end=100)
# ``FlowCal.gate.high_low()`` removes events outside a range specified
# by a ``low`` and a ``high`` value. If these are not specified (as
# shown below), the function removes events outside the channel's range
# of detection.
# Detectors in a flow cytometer have a finite range of detection. If the
# fluorescence of a particle is higher than the upper limit of this
# range, the instrument will incorrectly record it with a value equal to
# this limit. The same happens for fluorescence values lower than the
# lower limit of detection. These saturated events should be removed,
# otherwise statistics may be calculated incorrectly.
# Note that this might not be necessary with newer instruments that
# record data as floating-point numbers (and in fact it might eliminate
# negative events). To see the data type stored in your FCS files, run
# the following instruction: ``print sample_gated.data_type``.
# We will remove saturated events in the forward/side scatter channels,
# and in the fluorescence channel FL1.
sample_gated = FlowCal.gate.high_low(sample_gated,
channels=['FSC','SSC','FL1'])
# ``FlowCal.gate.density2d()`` preserves only the densest population as
# seen in a 2D density diagram of two channels. This helps remove
# particle aggregations and other sparse populations that are not of
# interest (i.e. debris).
# We use the forward and side scatter channels, and preserve 50% of the
# events. Finally, setting ``full_output=True`` instructs the function
# to return two additional outputs. The last one (``gate_contour``) is
# a curve surrounding the gated region, which we will use for plotting
# later.
sample_gated, __, gate_contour = FlowCal.gate.density2d(
data=sample_gated,
channels=['FSC','SSC'],
gate_fraction=0.5,
full_output=True)
# Plot forward/side scatter 2D density plot and 1D fluorescence
# histograms
print("Plotting density plot and histogram...")
# Parameters for the forward/side scatter density plot
density_params = {}
# We use the "scatter" mode, in which individual particles will be
# plotted individually as in a scatter plot, but with a color
# proportional to the particle density around.
density_params['mode'] = 'scatter'
# Parameters for the fluorescence histograms
hist_params = {}
hist_params['xlabel'] = 'FL1 Fluorescence (a.u.)'
# Plot filename
# The figure can be saved in any format supported by matplotlib (svg,
# jpg, etc.) by just changing the extension.
plot_filename = '{}/density_hist_{}.png'.format(
samples_plot_dir,
'S{:03}'.format(sample_id + 1))
# Plot and save
# The function ``FlowCal.plot.density_and_hist()`` plots a combined
# figure with a 2D density plot at the top, and an arbitrary number of
# 1D histograms below. In this case, we will plot the forward/side
# scatter channels in the density plot, and a histogram of the
# fluorescence channel FL1 below.
# Note that we are providing data both before (``sample``) and after
# (``sample_gated``) gating. The 1D histogram will display the ungated
# dataset with transparency, and the gated dataset in front with a solid
# solid color. In addition, we are providing ``gate_contour`` from the
# density gating step, which will be displayed in the density diagram.
# This will result in a convenient representation of the data both
# before and after gating.
FlowCal.plot.density_and_hist(
sample,
sample_gated,
density_channels=['FSC','SSC'],
hist_channels=['FL1'],
gate_contour=gate_contour,
density_params=density_params,
hist_params=hist_params,
savefig=plot_filename)
# Save cell sample object
samples.append(sample_gated)
###
# Part 3: Examples on how to use processed cell sample data
###
# Histogram of all samples
# Here, we plot the fluorescence histograms of all five samples in the same
# figure, using ``FlowCal.plot.hist1d``. Note how this function can be used
# in the context of accessory matplotlib functions to modify the axes
# limits and labels and add a legend, among others.
plt.figure(figsize=(6,3.5))
FlowCal.plot.hist1d(samples,
channel='FL1',
histtype='step',
bins=128)
plt.ylim([0, 2000])
plt.xlabel('FL1 Fluorescence (a.u.)')
plt.legend(['{} $\mu M$ IPTG'.format(i) for i in iptg],
loc='upper left',
fontsize='small')
plt.tight_layout()
plt.savefig('histograms.png', dpi=200)
plt.close()
# Here we illustrate how to obtain statistics from the fluorescence of each
# sample, and how to use them in a plot.
# The stats module contains functions to calculate different statistics
# such as mean, median, and standard deviation. Here, we calculate the
# geometric mean from channel FL1 of each sample, and plot them against the
# corresponding IPTG concentrations.
samples_fluorescence = [FlowCal.stats.gmean(s, channels='FL1')
for s in samples]
plt.figure(figsize=(5.5, 3.5))
plt.plot(iptg,
samples_fluorescence,
marker='o',
color=(0, 0.4, 0.7))
plt.xlabel('IPTG Concentration ($\mu M$)')
plt.ylabel('FL1 Fluorescence (a.u.)')
plt.tight_layout()
plt.savefig('dose_response.png', dpi=200)
plt.close()
print("\nDone.")
|
[
"FlowCal.gate.start_end",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.tight_layout",
"FlowCal.io.FCSData",
"matplotlib.pyplot.close",
"os.path.exists",
"FlowCal.stats.gmean",
"matplotlib.pyplot.ylim",
"FlowCal.gate.density2d",
"FlowCal.transform.to_rfi",
"FlowCal.plot.hist1d",
"FlowCal.plot.density_and_hist",
"matplotlib.pyplot.ylabel",
"os.makedirs",
"matplotlib.pyplot.plot",
"numpy.array",
"FlowCal.gate.high_low",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((949, 982), 'numpy.array', 'np.array', (['[0, 81, 161, 318, 1000]'], {}), '([0, 81, 161, 318, 1000])\n', (957, 982), True, 'import numpy as np\n'), ((7748, 7776), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 3.5)'}), '(figsize=(6, 3.5))\n', (7758, 7776), True, 'import matplotlib.pyplot as plt\n'), ((7780, 7850), 'FlowCal.plot.hist1d', 'FlowCal.plot.hist1d', (['samples'], {'channel': '"""FL1"""', 'histtype': '"""step"""', 'bins': '(128)'}), "(samples, channel='FL1', histtype='step', bins=128)\n", (7799, 7850), False, 'import FlowCal\n'), ((7927, 7946), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 2000]'], {}), '([0, 2000])\n', (7935, 7946), True, 'import matplotlib.pyplot as plt\n'), ((7951, 7988), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""FL1 Fluorescence (a.u.)"""'], {}), "('FL1 Fluorescence (a.u.)')\n", (7961, 7988), True, 'import matplotlib.pyplot as plt\n'), ((8119, 8137), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (8135, 8137), True, 'import matplotlib.pyplot as plt\n'), ((8142, 8180), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""histograms.png"""'], {'dpi': '(200)'}), "('histograms.png', dpi=200)\n", (8153, 8180), True, 'import matplotlib.pyplot as plt\n'), ((8185, 8196), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8194, 8196), True, 'import matplotlib.pyplot as plt\n'), ((8712, 8742), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(5.5, 3.5)'}), '(figsize=(5.5, 3.5))\n', (8722, 8742), True, 'import matplotlib.pyplot as plt\n'), ((8747, 8816), 'matplotlib.pyplot.plot', 'plt.plot', (['iptg', 'samples_fluorescence'], {'marker': '"""o"""', 'color': '(0, 0.4, 0.7)'}), "(iptg, samples_fluorescence, marker='o', color=(0, 0.4, 0.7))\n", (8755, 8816), True, 'import matplotlib.pyplot as plt\n'), ((8860, 8903), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""IPTG Concentration ($\\\\mu M$)"""'], {}), "('IPTG Concentration ($\\\\mu M$)')\n", (8870, 8903), True, 'import matplotlib.pyplot as plt\n'), ((8907, 8944), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""FL1 Fluorescence (a.u.)"""'], {}), "('FL1 Fluorescence (a.u.)')\n", (8917, 8944), True, 'import matplotlib.pyplot as plt\n'), ((8949, 8967), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (8965, 8967), True, 'import matplotlib.pyplot as plt\n'), ((8972, 9013), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""dose_response.png"""'], {'dpi': '(200)'}), "('dose_response.png', dpi=200)\n", (8983, 9013), True, 'import matplotlib.pyplot as plt\n'), ((9018, 9029), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9027, 9029), True, 'import matplotlib.pyplot as plt\n'), ((1239, 1271), 'os.path.exists', 'os.path.exists', (['samples_plot_dir'], {}), '(samples_plot_dir)\n', (1253, 1271), False, 'import os\n'), ((1281, 1310), 'os.makedirs', 'os.makedirs', (['samples_plot_dir'], {}), '(samples_plot_dir)\n', (1292, 1310), False, 'import os\n'), ((1943, 1978), 'FlowCal.io.FCSData', 'FlowCal.io.FCSData', (['sample_filename'], {}), '(sample_filename)\n', (1961, 1978), False, 'import FlowCal\n'), ((2464, 2496), 'FlowCal.transform.to_rfi', 'FlowCal.transform.to_rfi', (['sample'], {}), '(sample)\n', (2488, 2496), False, 'import FlowCal\n'), ((2942, 3000), 'FlowCal.gate.start_end', 'FlowCal.gate.start_end', (['sample'], {'num_start': '(250)', 'num_end': '(100)'}), '(sample, num_start=250, num_end=100)\n', (2964, 3000), False, 'import FlowCal\n'), ((4265, 4332), 'FlowCal.gate.high_low', 'FlowCal.gate.high_low', (['sample_gated'], {'channels': "['FSC', 'SSC', 'FL1']"}), "(sample_gated, channels=['FSC', 'SSC', 'FL1'])\n", (4286, 4332), False, 'import FlowCal\n'), ((5017, 5124), 'FlowCal.gate.density2d', 'FlowCal.gate.density2d', ([], {'data': 'sample_gated', 'channels': "['FSC', 'SSC']", 'gate_fraction': '(0.5)', 'full_output': '(True)'}), "(data=sample_gated, channels=['FSC', 'SSC'],\n gate_fraction=0.5, full_output=True)\n", (5039, 5124), False, 'import FlowCal\n'), ((6959, 7185), 'FlowCal.plot.density_and_hist', 'FlowCal.plot.density_and_hist', (['sample', 'sample_gated'], {'density_channels': "['FSC', 'SSC']", 'hist_channels': "['FL1']", 'gate_contour': 'gate_contour', 'density_params': 'density_params', 'hist_params': 'hist_params', 'savefig': 'plot_filename'}), "(sample, sample_gated, density_channels=['FSC',\n 'SSC'], hist_channels=['FL1'], gate_contour=gate_contour,\n density_params=density_params, hist_params=hist_params, savefig=\n plot_filename)\n", (6988, 7185), False, 'import FlowCal\n'), ((8623, 8661), 'FlowCal.stats.gmean', 'FlowCal.stats.gmean', (['s'], {'channels': '"""FL1"""'}), "(s, channels='FL1')\n", (8642, 8661), False, 'import FlowCal\n')]
|
#coding:utf-8
#################################
#Copyright(c) 2014 dtysky
#################################
import G2R
class CgTag(G2R.TagSource):
def Get(self,Flag,US):
tags=G2R.TagSource.Get(self,Flag,US)
tags['s']={}
for cg in tags['m']:
tags['s'][cg]={}
for s in US.Args[Flag][cg]['Scene']:
for knum in range(s[1]):
for bg in US.Args[Flag][cg]['Background']:
n=s[0]+str(knum)+bg
tags['s'][cg][n]=n
return tags
|
[
"G2R.TagSource.Get"
] |
[((179, 212), 'G2R.TagSource.Get', 'G2R.TagSource.Get', (['self', 'Flag', 'US'], {}), '(self, Flag, US)\n', (196, 212), False, 'import G2R\n')]
|
#!/usr/bin/env python
#****************************************************************************
# ©
# Copyright 2014-2015 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#****************************************************************************
#****************************************************************************
# Imports
#****************************************************************************
# python standard imports
import argparse
import os
import sys
import traceback
# project-specific imports
cur_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(cur_path, 'actions'))
sys.path.append(os.path.join(cur_path, 'checks'))
sys.path.append(os.path.join(cur_path, 'framework'))
sys.path.append(os.path.join(cur_path, 'scm'))
import abstractScm
from configManager import ConfigManager
from fileObject import FileObject
from hooksterExceptions import *
from logManager import *
#****************************************************************************
# Constants
#****************************************************************************
CONFIG_FILE = os.path.join(cur_path, "hookster.conf")
#****************************************************************************
# Functions
#****************************************************************************
def setup(config_file, scm, old_rev, new_rev, branch):
"""Setup hookster"""
config = ConfigManager(config_file, scm, old_rev, new_rev, branch)
return config
def teardown():
"""Teardown hookster"""
close_log()
def run_this_check(config, check_name, filename):
"""Returns True if this check is to be run on this file"""
run_check = False
extension_list = config.check.get_check_extensions(check_name)
file_extension = "*" + os.path.splitext(filename)[1]
if file_extension in extension_list:
# This extension is in the whitelist. We need to run this check on this file
run_check = True
return run_check
def main(config_file, scm, old_rev, new_rev, branch):
"""
Hookster entry point
:param scm:
:param old_rev:
:param new_rev:
:param branch:
:return: None
"""
try:
config = setup(config_file, scm, old_rev, new_rev, branch)
# loop through each modified file
for filename in config.scm.get_changed_file_list(config.new_rev):
file_object = FileObject(filename, config.scm, config.branch_name, config.new_rev, config.old_rev)
# loop through the enabled checks
for check in config.check.checks:
if run_this_check(config, check, file_object.filename):
config.check.check_objs[check].check_file(file_object)
# all checks passed. run the "success" actions.
for key in config.action.success_action_objs.keys():
config.action.success_action_objs[key].run(True)
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
log(message, LOG_LEVEL_ERROR)
backtrace = traceback.format_exc()
log(backtrace, LOG_LEVEL_ERROR)
# run the "failed" actions
for key in config.action.failure_action_objs.keys():
config.action.failure_action_objs[key].run(False)
# re-raise the failing exception
raise
finally:
teardown()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Hookster - software configuration management hook scripts")
parser.add_argument('-s', '--scm', dest='scm', default=abstractScm.SCM_DEFAULT, type=str,
help='SCM type, e.g. git, svn, etc.')
parser.add_argument('-o', '--oldrev', dest='old_rev', default=None, type=str,
help='Hash/ID that represents the previous (old) revision')
parser.add_argument('-n', '--newrev', dest='new_rev', default=None, type=str,
help='Hash/ID that represents the incoming (new) revision')
parser.add_argument('-b', '--branch', dest='branch', default=None, type=str,
help='Branch name')
parser.add_argument('-c', '--config', dest='config_file', default=CONFIG_FILE, type=str,
help='Hookster configuration file')
args = parser.parse_args()
sys.exit(main(args.config_file, args.scm, args.old_rev, args.new_rev, args.branch))
|
[
"argparse.ArgumentParser",
"os.path.realpath",
"os.path.splitext",
"traceback.format_exc",
"configManager.ConfigManager",
"fileObject.FileObject",
"os.path.join"
] |
[((1626, 1665), 'os.path.join', 'os.path.join', (['cur_path', '"""hookster.conf"""'], {}), "(cur_path, 'hookster.conf')\n", (1638, 1665), False, 'import os\n'), ((1062, 1088), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1078, 1088), False, 'import os\n'), ((1106, 1139), 'os.path.join', 'os.path.join', (['cur_path', '"""actions"""'], {}), "(cur_path, 'actions')\n", (1118, 1139), False, 'import os\n'), ((1157, 1189), 'os.path.join', 'os.path.join', (['cur_path', '"""checks"""'], {}), "(cur_path, 'checks')\n", (1169, 1189), False, 'import os\n'), ((1207, 1242), 'os.path.join', 'os.path.join', (['cur_path', '"""framework"""'], {}), "(cur_path, 'framework')\n", (1219, 1242), False, 'import os\n'), ((1260, 1289), 'os.path.join', 'os.path.join', (['cur_path', '"""scm"""'], {}), "(cur_path, 'scm')\n", (1272, 1289), False, 'import os\n'), ((1930, 1987), 'configManager.ConfigManager', 'ConfigManager', (['config_file', 'scm', 'old_rev', 'new_rev', 'branch'], {}), '(config_file, scm, old_rev, new_rev, branch)\n', (1943, 1987), False, 'from configManager import ConfigManager\n'), ((3997, 4098), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Hookster - software configuration management hook scripts"""'}), "(description=\n 'Hookster - software configuration management hook scripts')\n", (4020, 4098), False, 'import argparse\n'), ((2300, 2326), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (2316, 2326), False, 'import os\n'), ((2916, 3005), 'fileObject.FileObject', 'FileObject', (['filename', 'config.scm', 'config.branch_name', 'config.new_rev', 'config.old_rev'], {}), '(filename, config.scm, config.branch_name, config.new_rev, config\n .old_rev)\n', (2926, 3005), False, 'from fileObject import FileObject\n'), ((3645, 3667), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3665, 3667), False, 'import traceback\n')]
|
N, Q = map(int, input().split())
ab = [list(map(int, input().split())) for _ in range(N - 1)]
cd = [list(map(int, input().split())) for _ in range(Q)]
G = [[] for _ in range(N)]
for a, b in ab:
a -= 1
b -= 1
G[a].append(b)
G[b].append(a)
dist = [-1 for _ in range(N)]
#"dist[0] = 0
from collections import deque
Qu = deque()
Qu.append(0)
while len(Qu) > 0:
i = Qu.popleft()
for j in G[i]:
if dist[j] == -1:
dist[j] = dist[i] + 1
Qu.append(j)
for c, d in cd:
c -= 1
d -= 1
diff = dist[c] - dist[d]
if diff % 2 != 0:
print('Road')
else:
print('Town')
|
[
"collections.deque"
] |
[((337, 344), 'collections.deque', 'deque', ([], {}), '()\n', (342, 344), False, 'from collections import deque\n')]
|
# -*- coding: utf-8 -*-
#
# AUTOR: <NAME>
#
# PLACE: Rio de Janeiro - Brazil
#
# CONTACT: <EMAIL>
#
# CRIATION: ago/2018
#
# LAST MODIFICATION: ago/2018
#
# OBJECTIVE: Processing Artigas' meteorological station data for Bia (INUMET)
import os
import sys
import pandas as pd
from datetime import datetime
sys.path.insert(0, os.path.expanduser('~/Dropbox/airsea'))
import airsea
start = datetime.now().replace(microsecond = 0)
##############################################################################
#### CONFIG PARAMETERS AND GLOBAL VARIABLES ##################################
##############################################################################
DATADIR = u'/home/douglasnehme/Desktop/bia/arquivos'
filename1 = u'vento_bellingshausen_direcao.xlsx'
filename2 = u'vento_bellingshausen_velocidade.xlsx'
new_filename = u'vento_u_v_bellingshausen.xlsx'
##############################################################################
# OPENNING AND MANIPULATING DATA #############################################
##############################################################################
# Open files
wdir = pd.read_excel(
os.path.join(
DATADIR,
filename1
),
header=0,
index_col=0,
na_values=['', '-']
)
wspd = pd.read_excel(
os.path.join(
DATADIR,
filename2
),
header=0,
index_col=0,
na_values=['', '-']
)
# Transform a df with monthly variation on
# columns and years on lines to Series with
# multi-index and all mothly values
wdir = wdir.stack()
wspd = wspd.stack()
# Aggregate month and year info from
# multi-index in one string, transform it
# into datetime and set as Series index
wdir.index = pd.to_datetime((
wdir.index.get_level_values(0).astype('str') +
'-' +
wdir.index.get_level_values(1).astype('str')
))
wspd.index = pd.to_datetime((
wspd.index.get_level_values(0).astype('str') +
'-' +
wspd.index.get_level_values(1).astype('str')
))
# Name Series
wdir.name = 'wdir'
wspd.name = 'wspd'
# Fill gaps with NaN
wdir = wdir.resample('MS').asfreq()
wspd = wspd.resample('MS').asfreq()
# Merge two Series in one df
df = pd.merge(
wspd,
wdir,
left_index=True,
right_index=True
)
del wdir, wspd
u, v = airsea.pol2cart_wind(
df.wspd,
df.wdir,
rnd=1
)
df_new = pd.DataFrame(
data={
'u':u.values,
'v':v.values
},
index=u.index
)
##########################################################
# Transforming index from a daily series from 03/1968 to
# 01/2021 over all rows length to a yearly series over all
# rows length and monthly variations on columns dimension
##########################################################
df_new = df_new.groupby([
df_new.index.year,
df_new.index.month
]).mean()
df_new.index.names = ['', '']
df_new = df_new.unstack()
##########################################################
# Save
df_new.to_excel(os.path.join(DATADIR, new_filename))
stop = datetime.now().replace(microsecond=0)
print('Time taken to execute program: {}'.format(stop - start))
|
[
"pandas.DataFrame",
"airsea.pol2cart_wind",
"os.path.join",
"pandas.merge",
"datetime.datetime.now",
"os.path.expanduser"
] |
[((2152, 2207), 'pandas.merge', 'pd.merge', (['wspd', 'wdir'], {'left_index': '(True)', 'right_index': '(True)'}), '(wspd, wdir, left_index=True, right_index=True)\n', (2160, 2207), True, 'import pandas as pd\n'), ((2250, 2295), 'airsea.pol2cart_wind', 'airsea.pol2cart_wind', (['df.wspd', 'df.wdir'], {'rnd': '(1)'}), '(df.wspd, df.wdir, rnd=1)\n', (2270, 2295), False, 'import airsea\n'), ((2319, 2383), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': "{'u': u.values, 'v': v.values}", 'index': 'u.index'}), "(data={'u': u.values, 'v': v.values}, index=u.index)\n", (2331, 2383), True, 'import pandas as pd\n'), ((328, 366), 'os.path.expanduser', 'os.path.expanduser', (['"""~/Dropbox/airsea"""'], {}), "('~/Dropbox/airsea')\n", (346, 366), False, 'import os\n'), ((1149, 1181), 'os.path.join', 'os.path.join', (['DATADIR', 'filename1'], {}), '(DATADIR, filename1)\n', (1161, 1181), False, 'import os\n'), ((1288, 1320), 'os.path.join', 'os.path.join', (['DATADIR', 'filename2'], {}), '(DATADIR, filename2)\n', (1300, 1320), False, 'import os\n'), ((2930, 2965), 'os.path.join', 'os.path.join', (['DATADIR', 'new_filename'], {}), '(DATADIR, new_filename)\n', (2942, 2965), False, 'import os\n'), ((392, 406), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (404, 406), False, 'from datetime import datetime\n'), ((2975, 2989), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2987, 2989), False, 'from datetime import datetime\n')]
|
"""
Serializers
"""
from django.utils.translation import ugettext_lazy as _
from django.core.mail import EmailMultiAlternatives
from django.conf import settings
from applications.authentication import authenticate
from rest_framework import exceptions, serializers
from .state import User
from .tokens import AccessToken
from .utils import get_string_and_html
from .models import Pages, Token, Site, Image
class PasswordField(serializers.CharField):
def __init__(self, *args, **kwargs):
kwargs.setdefault("style", {})
kwargs["style"]["input_type"] = "password"
kwargs["write_only"] = True
super().__init__(*args, **kwargs)
class TokenObtainSerializer(serializers.Serializer):
username_field = User.USERNAME_FIELD
default_error_messages = {
"no_active_account": _("No active account found with the given credentials")
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields[self.username_field] = serializers.CharField()
self.fields["password"] = PasswordField()
def validate(self, attrs):
authenticate_kwargs = {
self.username_field: attrs[self.username_field],
"password": attrs["password"],
}
try:
authenticate_kwargs["request"] = self.context["request"]
except KeyError:
pass
self.user = authenticate(**authenticate_kwargs)
# Prior to Django 1.10, inactive users could be authenticated with the
# default `ModelBackend`. As of Django 1.10, the `ModelBackend`
# prevents inactive users from authenticating. App designers can still
# allow inactive users to authenticate by opting for the new
# `AllowAllUsersModelBackend`. However, we explicitly prevent inactive
# users from authenticating to enforce a reasonable policy and provide
# sensible backwards compatibility with older Django versions.
if self.user is None:
raise exceptions.AuthenticationFailed(
self.error_messages["no_active_account"],
"no_active_account",
)
return {}
@classmethod
def get_token(cls, user):
raise NotImplementedError(
"Must implement `get_token` method for `TokenObtainSerializer` subclasses"
)
class TokenObtainPairSerializer(TokenObtainSerializer):
"""
Helper class for create token
"""
@classmethod
def get_token(cls, user):
return AccessToken.for_user(user)
def validate(self, attrs):
data = super().validate(attrs)
refresh = self.get_token(self.user)
data["refresh"] = str(refresh)
return data
class RegisterSerializer(serializers.ModelSerializer):
def create(self, validated_data):
user = User.objects.create_account(**validated_data)
user.save()
token = Token.to_active(user)
self.send_activation_email(user, token)
return user
def send_activation_email(self, user, token):
html_content, string_content = get_string_and_html(
"email/email_confirmation.html", {"user": user, "token": token}
)
subject = _("Bienvenido a ") + settings.SERVERNAME
email = EmailMultiAlternatives(
subject, string_content, settings.EMAIL_HOST_USER, [user.email]
)
email.attach_alternative(html_content, "text/html")
email.send()
class Meta:
model = User
fields = ("login", "password", "email", "real_name", "social_id")
extra_kwargs = {"password": {"<PASSWORD>": True}}
# fmt: off
class CurrentUserSerializer(serializers.ModelSerializer):
"""
Current user serializer
"""
class Meta:
model = User
fields = (
"login",
"status",
"real_name",
"email",
"coins",
"create_time"
)
class RankingPlayerSerializer(serializers.Serializer):
"""
Serializer For Players
"""
account_id = serializers.IntegerField()
name = serializers.CharField()
level = serializers.IntegerField()
exp = serializers.IntegerField()
class RankingGuildSerializer(serializers.Serializer):
"""
Serializer for Guilds
"""
name = serializers.CharField()
level = serializers.IntegerField()
exp = serializers.IntegerField()
ladder_point = serializers.IntegerField()
class ChangePasswordSerializer(serializers.Serializer):
""" """
current_password = PasswordField()
new_password = PasswordField()
new_password_again = PasswordField()
def validate(self, data):
if data["new_password"] != data["new_password_again"]:
raise serializers.ValidationError("password must be equal")
return data
class ResetPasswordSerializer(serializers.Serializer):
""" """
new_password = PasswordField()
new_password_again = PasswordField()
def validate(self, data):
if data["new_password"] != data["new_password_again"]:
raise serializers.ValidationError("password must be equal")
return data
class DownloadSerializer(serializers.Serializer):
id = serializers.UUIDField(read_only=True)
provider = serializers.CharField(max_length=30)
weight = serializers.DecimalField(max_digits=5, decimal_places=3)
link = serializers.CharField(max_length=100)
create_at = serializers.DateTimeField()
modified_at = serializers.DateTimeField()
# fmt: off
class PagesSerializer(serializers.ModelSerializer):
class Meta:
model = Pages
fields = (
"slug",
"title",
"content",
"published",
"create_at",
"modified_at"
)
lookup_field = "slug"
extra_kwargs = {"url": {"lookup_field": "slug"}}
class RequestPasswordSerializer(serializers.Serializer):
login = serializers.CharField()
def validate(self, data):
try:
user = User.objects.get(login=data["login"])
token = Token.to_reset(user)
self.send_rest_password_email(user, token)
return data
except User.DoesNotExist:
raise serializers.ValidationError("User not found in database")
def send_rest_password_email(self, user, token):
html_content, string_content = get_string_and_html(
"email/reset_password.html", {"user": user, "token": token}
)
subject = _("Olvido de Contraseña - ") + settings.SERVERNAME
email = EmailMultiAlternatives(
subject, string_content, settings.EMAIL_HOST_USER, [user.email]
)
email.attach_alternative(html_content, "text/html")
email.send()
class ImageSerializer(serializers.ModelSerializer):
image_url = serializers.SerializerMethodField()
class Meta:
model = Image
fields = ("name", "types", "image_url")
def get_image_url(self, image):
photo_url = image.image.url
return photo_url
class SiteSerializer(serializers.ModelSerializer):
images = ImageSerializer(many=True)
footer_menu = PagesSerializer(many=True)
class Meta:
model = Site
fields = (
"name",
"slug",
"images",
"initial_level",
"max_level",
"rates",
"facebook_url",
"facebook_enable",
"footer_menu",
"footer_info",
"footer_menu_enable",
"footer_info_enable",
"forum_url",
"last_online",
)
lookup_field = "slug"
extra_kwargs = {"url": {"lookup_field": "slug"}}
|
[
"rest_framework.serializers.UUIDField",
"rest_framework.exceptions.AuthenticationFailed",
"rest_framework.serializers.SerializerMethodField",
"django.utils.translation.ugettext_lazy",
"rest_framework.serializers.IntegerField",
"rest_framework.serializers.CharField",
"django.core.mail.EmailMultiAlternatives",
"applications.authentication.authenticate",
"rest_framework.serializers.DecimalField",
"rest_framework.serializers.DateTimeField",
"rest_framework.serializers.ValidationError"
] |
[((4097, 4123), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (4121, 4123), False, 'from rest_framework import exceptions, serializers\n'), ((4135, 4158), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (4156, 4158), False, 'from rest_framework import exceptions, serializers\n'), ((4171, 4197), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (4195, 4197), False, 'from rest_framework import exceptions, serializers\n'), ((4208, 4234), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (4232, 4234), False, 'from rest_framework import exceptions, serializers\n'), ((4345, 4368), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (4366, 4368), False, 'from rest_framework import exceptions, serializers\n'), ((4381, 4407), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (4405, 4407), False, 'from rest_framework import exceptions, serializers\n'), ((4418, 4444), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (4442, 4444), False, 'from rest_framework import exceptions, serializers\n'), ((4464, 4490), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {}), '()\n', (4488, 4490), False, 'from rest_framework import exceptions, serializers\n'), ((5256, 5293), 'rest_framework.serializers.UUIDField', 'serializers.UUIDField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (5277, 5293), False, 'from rest_framework import exceptions, serializers\n'), ((5309, 5345), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (5330, 5345), False, 'from rest_framework import exceptions, serializers\n'), ((5359, 5415), 'rest_framework.serializers.DecimalField', 'serializers.DecimalField', ([], {'max_digits': '(5)', 'decimal_places': '(3)'}), '(max_digits=5, decimal_places=3)\n', (5383, 5415), False, 'from rest_framework import exceptions, serializers\n'), ((5427, 5464), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (5448, 5464), False, 'from rest_framework import exceptions, serializers\n'), ((5481, 5508), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {}), '()\n', (5506, 5508), False, 'from rest_framework import exceptions, serializers\n'), ((5527, 5554), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {}), '()\n', (5552, 5554), False, 'from rest_framework import exceptions, serializers\n'), ((5985, 6008), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (6006, 6008), False, 'from rest_framework import exceptions, serializers\n'), ((6883, 6918), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (6916, 6918), False, 'from rest_framework import exceptions, serializers\n'), ((827, 882), 'django.utils.translation.ugettext_lazy', '_', (['"""No active account found with the given credentials"""'], {}), "('No active account found with the given credentials')\n", (828, 882), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1017, 1040), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (1038, 1040), False, 'from rest_framework import exceptions, serializers\n'), ((1414, 1449), 'applications.authentication.authenticate', 'authenticate', ([], {}), '(**authenticate_kwargs)\n', (1426, 1449), False, 'from applications.authentication import authenticate\n'), ((3300, 3392), 'django.core.mail.EmailMultiAlternatives', 'EmailMultiAlternatives', (['subject', 'string_content', 'settings.EMAIL_HOST_USER', '[user.email]'], {}), '(subject, string_content, settings.EMAIL_HOST_USER, [\n user.email])\n', (3322, 3392), False, 'from django.core.mail import EmailMultiAlternatives\n'), ((6621, 6713), 'django.core.mail.EmailMultiAlternatives', 'EmailMultiAlternatives', (['subject', 'string_content', 'settings.EMAIL_HOST_USER', '[user.email]'], {}), '(subject, string_content, settings.EMAIL_HOST_USER, [\n user.email])\n', (6643, 6713), False, 'from django.core.mail import EmailMultiAlternatives\n'), ((2030, 2128), 'rest_framework.exceptions.AuthenticationFailed', 'exceptions.AuthenticationFailed', (["self.error_messages['no_active_account']", '"""no_active_account"""'], {}), "(self.error_messages['no_active_account'],\n 'no_active_account')\n", (2061, 2128), False, 'from rest_framework import exceptions, serializers\n'), ((3243, 3261), 'django.utils.translation.ugettext_lazy', '_', (['"""Bienvenido a """'], {}), "('Bienvenido a ')\n", (3244, 3261), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4789, 4842), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""password must be equal"""'], {}), "('password must be equal')\n", (4816, 4842), False, 'from rest_framework import exceptions, serializers\n'), ((5121, 5174), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""password must be equal"""'], {}), "('password must be equal')\n", (5148, 5174), False, 'from rest_framework import exceptions, serializers\n'), ((6554, 6582), 'django.utils.translation.ugettext_lazy', '_', (['"""Olvido de Contraseña - """'], {}), "('Olvido de Contraseña - ')\n", (6555, 6582), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6282, 6339), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""User not found in database"""'], {}), "('User not found in database')\n", (6309, 6339), False, 'from rest_framework import exceptions, serializers\n')]
|
import os
from pathlib import Path
from django.urls import reverse
from django.contrib.auth import get_user_model
from filer.models import File as FilerFile
# from rest_framework import status
from ...tests import APITestFactory
from ...models import Upload, Link
from ..utils import parse_user_files
User = get_user_model()
class TestFtp(APITestFactory):
def setUp(self):
super(TestFtp, self).setUp()
self.user_folder_ftp_path = os.path.join(self.user_folder_path, 'ftp')
def test_user_folder_creation_on_access(self):
"""
Creates user related folder on first access
"""
self.perform_user_login()
response = self.client.get('/', format='json')
self.assertTrue(os.path.exists(self.user_folder_ftp_path), 'user ftp folder not created')
def test_fpt_uploaded_file_creates_upload_model(self):
self.perform_user_login()
Path(os.path.join(self.user_folder_ftp_path, 'ftp_file.txt')).touch()
_parsed_files = parse_user_files(self.user)
relative_path = os.path.join('users', self.user.sso_id) + os.sep + os.path.join('ftp', 'ftp_file.txt')
self.assertTrue(Upload.objects.filter(file_path=relative_path).count() == 1, 'cannot link to ftp uploaded file')
def test_fpt_uploaded_file_creates_upload_model_once(self):
self.perform_user_login()
Path(os.path.join(self.user_folder_ftp_path, 'ftp_file.txt')).touch()
_parsed_files = parse_user_files(self.user)
relative_path = os.path.join('users', self.user.sso_id) + os.sep + os.path.join('ftp', 'ftp_file.txt')
self.assertTrue(Upload.objects.filter(file_path=relative_path).count() == 1, 'cannot link to ftp uploaded file')
Path(os.path.join(self.user_folder_ftp_path, 'ftp_file.txt')).touch()
_parsed_files = parse_user_files(self.user)
self.assertTrue(Upload.objects.filter(file_path=relative_path).count() == 1, 'cannot link to ftp uploaded file')
self.assertTrue(Upload.objects.count() == 1, 'more than one Upload created')
def test_fpt_uploaded_file_creates_filer_file(self):
self.perform_user_login()
Path(os.path.join(self.user_folder_ftp_path, 'ftp_file.txt')).touch()
_parsed_files = parse_user_files(self.user)
relative_path = os.path.join('users', self.user.sso_id) + os.sep + os.path.join('ftp', 'ftp_file.txt')
self.assertTrue(FilerFile.objects.filter(file=relative_path).count() == 1, 'cannot find filer file for ftp uploaded file')
|
[
"django.contrib.auth.get_user_model",
"os.path.join",
"os.path.exists",
"filer.models.File.objects.filter"
] |
[((314, 330), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (328, 330), False, 'from django.contrib.auth import get_user_model\n'), ((459, 501), 'os.path.join', 'os.path.join', (['self.user_folder_path', '"""ftp"""'], {}), "(self.user_folder_path, 'ftp')\n", (471, 501), False, 'import os\n'), ((746, 787), 'os.path.exists', 'os.path.exists', (['self.user_folder_ftp_path'], {}), '(self.user_folder_ftp_path)\n', (760, 787), False, 'import os\n'), ((1122, 1157), 'os.path.join', 'os.path.join', (['"""ftp"""', '"""ftp_file.txt"""'], {}), "('ftp', 'ftp_file.txt')\n", (1134, 1157), False, 'import os\n'), ((1587, 1622), 'os.path.join', 'os.path.join', (['"""ftp"""', '"""ftp_file.txt"""'], {}), "('ftp', 'ftp_file.txt')\n", (1599, 1622), False, 'import os\n'), ((2384, 2419), 'os.path.join', 'os.path.join', (['"""ftp"""', '"""ftp_file.txt"""'], {}), "('ftp', 'ftp_file.txt')\n", (2396, 2419), False, 'import os\n'), ((1071, 1110), 'os.path.join', 'os.path.join', (['"""users"""', 'self.user.sso_id'], {}), "('users', self.user.sso_id)\n", (1083, 1110), False, 'import os\n'), ((1536, 1575), 'os.path.join', 'os.path.join', (['"""users"""', 'self.user.sso_id'], {}), "('users', self.user.sso_id)\n", (1548, 1575), False, 'import os\n'), ((2333, 2372), 'os.path.join', 'os.path.join', (['"""users"""', 'self.user.sso_id'], {}), "('users', self.user.sso_id)\n", (2345, 2372), False, 'import os\n'), ((928, 983), 'os.path.join', 'os.path.join', (['self.user_folder_ftp_path', '"""ftp_file.txt"""'], {}), "(self.user_folder_ftp_path, 'ftp_file.txt')\n", (940, 983), False, 'import os\n'), ((1393, 1448), 'os.path.join', 'os.path.join', (['self.user_folder_ftp_path', '"""ftp_file.txt"""'], {}), "(self.user_folder_ftp_path, 'ftp_file.txt')\n", (1405, 1448), False, 'import os\n'), ((1759, 1814), 'os.path.join', 'os.path.join', (['self.user_folder_ftp_path', '"""ftp_file.txt"""'], {}), "(self.user_folder_ftp_path, 'ftp_file.txt')\n", (1771, 1814), False, 'import os\n'), ((2190, 2245), 'os.path.join', 'os.path.join', (['self.user_folder_ftp_path', '"""ftp_file.txt"""'], {}), "(self.user_folder_ftp_path, 'ftp_file.txt')\n", (2202, 2245), False, 'import os\n'), ((2445, 2489), 'filer.models.File.objects.filter', 'FilerFile.objects.filter', ([], {'file': 'relative_path'}), '(file=relative_path)\n', (2469, 2489), True, 'from filer.models import File as FilerFile\n')]
|
import mock
import pytest
import pwny
def test_default_arch_x86():
with mock.patch('platform.machine') as platform_mock:
platform_mock.return_value = 'i386'
assert pwny.Target().arch is pwny.Target.Arch.x86
def test_default_arch_x86_64():
with mock.patch('platform.machine') as platform_mock:
platform_mock.return_value = 'x86_64'
assert pwny.Target().arch is pwny.Target.Arch.x86
def test_default_arch_unknown():
with mock.patch('platform.machine') as platform_mock:
platform_mock.return_value = 'unknown'
assert pwny.Target().arch is pwny.Target.Arch.unknown
def test_default_arch_32bit():
with mock.patch('platform.architecture') as platform_mock:
platform_mock.return_value = ('32bit',)
assert pwny.Target().bits is pwny.Target.Bits.bits_32
def test_default_arch_64bit():
with mock.patch('platform.architecture') as platform_mock:
platform_mock.return_value = ('64bit',)
assert pwny.Target().bits is pwny.Target.Bits.bits_64
def test_set_arch():
with mock.patch('platform.architecture') as platform_mock:
platform_mock.return_value = ('64bit',)
target = pwny.Target(arch=pwny.Target.Arch.x86)
assert target.arch is pwny.Target.Arch.x86
def test_default_endian():
assert pwny.Target().endian is pwny.Target.Endian.little
def test_set_endian():
target = pwny.Target(arch=pwny.Target.Arch.unknown, endian=pwny.Target.Endian.big)
assert target.endian is pwny.Target.Endian.big
def test_default_bits_x86():
target = pwny.Target(arch=pwny.Target.Arch.x86)
assert target.bits == 32
@pytest.mark.xfail(raises=NotImplementedError)
def test_default_bits_unsupported():
target = pwny.Target(arch=pwny.Target.Arch.unknown)
_ = target.bits
def test_set__bits():
target = pwny.Target(arch=pwny.Target.Arch.x86, bits=64)
assert target.bits == 64
@pytest.mark.xfail(raises=ValueError)
def test_set_invalid_bits():
pwny.Target(bits=33)
def test_target_assume():
target = pwny.Target()
target.assume(pwny.Target(arch=pwny.Target.Arch.arm, endian=pwny.Target.Endian.little, bits=64, mode=2))
assert target.arch is pwny.Target.Arch.arm and \
target.endian == pwny.Target.Endian.little and \
target.bits == 64 and \
target.mode == 2
|
[
"mock.patch",
"pwny.Target",
"pytest.mark.xfail"
] |
[((1649, 1694), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'raises': 'NotImplementedError'}), '(raises=NotImplementedError)\n', (1666, 1694), False, 'import pytest\n'), ((1925, 1961), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'raises': 'ValueError'}), '(raises=ValueError)\n', (1942, 1961), False, 'import pytest\n'), ((1409, 1482), 'pwny.Target', 'pwny.Target', ([], {'arch': 'pwny.Target.Arch.unknown', 'endian': 'pwny.Target.Endian.big'}), '(arch=pwny.Target.Arch.unknown, endian=pwny.Target.Endian.big)\n', (1420, 1482), False, 'import pwny\n'), ((1578, 1616), 'pwny.Target', 'pwny.Target', ([], {'arch': 'pwny.Target.Arch.x86'}), '(arch=pwny.Target.Arch.x86)\n', (1589, 1616), False, 'import pwny\n'), ((1745, 1787), 'pwny.Target', 'pwny.Target', ([], {'arch': 'pwny.Target.Arch.unknown'}), '(arch=pwny.Target.Arch.unknown)\n', (1756, 1787), False, 'import pwny\n'), ((1845, 1892), 'pwny.Target', 'pwny.Target', ([], {'arch': 'pwny.Target.Arch.x86', 'bits': '(64)'}), '(arch=pwny.Target.Arch.x86, bits=64)\n', (1856, 1892), False, 'import pwny\n'), ((1995, 2015), 'pwny.Target', 'pwny.Target', ([], {'bits': '(33)'}), '(bits=33)\n', (2006, 2015), False, 'import pwny\n'), ((2057, 2070), 'pwny.Target', 'pwny.Target', ([], {}), '()\n', (2068, 2070), False, 'import pwny\n'), ((79, 109), 'mock.patch', 'mock.patch', (['"""platform.machine"""'], {}), "('platform.machine')\n", (89, 109), False, 'import mock\n'), ((273, 303), 'mock.patch', 'mock.patch', (['"""platform.machine"""'], {}), "('platform.machine')\n", (283, 303), False, 'import mock\n'), ((470, 500), 'mock.patch', 'mock.patch', (['"""platform.machine"""'], {}), "('platform.machine')\n", (480, 500), False, 'import mock\n'), ((670, 705), 'mock.patch', 'mock.patch', (['"""platform.architecture"""'], {}), "('platform.architecture')\n", (680, 705), False, 'import mock\n'), ((876, 911), 'mock.patch', 'mock.patch', (['"""platform.architecture"""'], {}), "('platform.architecture')\n", (886, 911), False, 'import mock\n'), ((1072, 1107), 'mock.patch', 'mock.patch', (['"""platform.architecture"""'], {}), "('platform.architecture')\n", (1082, 1107), False, 'import mock\n'), ((1191, 1229), 'pwny.Target', 'pwny.Target', ([], {'arch': 'pwny.Target.Arch.x86'}), '(arch=pwny.Target.Arch.x86)\n', (1202, 1229), False, 'import pwny\n'), ((2089, 2182), 'pwny.Target', 'pwny.Target', ([], {'arch': 'pwny.Target.Arch.arm', 'endian': 'pwny.Target.Endian.little', 'bits': '(64)', 'mode': '(2)'}), '(arch=pwny.Target.Arch.arm, endian=pwny.Target.Endian.little,\n bits=64, mode=2)\n', (2100, 2182), False, 'import pwny\n'), ((1321, 1334), 'pwny.Target', 'pwny.Target', ([], {}), '()\n', (1332, 1334), False, 'import pwny\n'), ((187, 200), 'pwny.Target', 'pwny.Target', ([], {}), '()\n', (198, 200), False, 'import pwny\n'), ((383, 396), 'pwny.Target', 'pwny.Target', ([], {}), '()\n', (394, 396), False, 'import pwny\n'), ((581, 594), 'pwny.Target', 'pwny.Target', ([], {}), '()\n', (592, 594), False, 'import pwny\n'), ((787, 800), 'pwny.Target', 'pwny.Target', ([], {}), '()\n', (798, 800), False, 'import pwny\n'), ((993, 1006), 'pwny.Target', 'pwny.Target', ([], {}), '()\n', (1004, 1006), False, 'import pwny\n')]
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
'''
@Time : 2021/06/21 10:22:31
@Author : Camille
@Version : 1.0
'''
import logging
import os
import datetime
class BaseLogs():
"""
@logName: types_datetime
@callerPath: caller function path
"""
def __init__(self, logName, mark, callerPath='..\\'):
if not logName:
todays = datetime.date.today()
self.logName = '{}{}'.format(todays, '.log')
self.logName = logName
self.callerPath = callerPath
# The main log folder path.
# self.callerLogsPath = '{}{}'.format(self.callerPath , r'\logs')
self.callerLogsPath = os.path.join(callerPath, 'logs', mark)
# Default log name.
self.baseLogDir()
def baseLogDir(self):
"""
Complete the main log folder creation requirements.
"""
if not os.path.exists(self.callerLogsPath):
os.makedirs(self.callerLogsPath)
def subLogDir(self, subLogPath):
"""
Complete other log folder creation requirements.
"""
os.makedirs('{}{}{}'.format(self.callerPath, '\\', subLogPath))
def logHandler(self, logName=None, w_logName=None):
# Create the log.
logPath = '{}{}{}'.format(self.callerLogsPath, '\\', self.logName)
fileHandler = logging.FileHandler(logPath, 'a', encoding='utf-8')
# The logs format.
fmt = logging.Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s - %(module)s: %(message)s')
fileHandler.setFormatter(fmt)
# Use the log. Write to self.logName.
# Default log name: today.
if w_logName:
logger = logging.Logger(w_logName)
logger = logging.Logger(logPath)
logger.addHandler(fileHandler)
return logger
class BasicLogs(BaseLogs):
@staticmethod
def handler(mark, logName=None):
logsObj = BaseLogs(logName, mark)
return logsObj
|
[
"logging.FileHandler",
"os.makedirs",
"os.path.exists",
"datetime.date.today",
"logging.Logger",
"logging.Formatter",
"os.path.join"
] |
[((675, 713), 'os.path.join', 'os.path.join', (['callerPath', '"""logs"""', 'mark'], {}), "(callerPath, 'logs', mark)\n", (687, 713), False, 'import os\n'), ((1360, 1411), 'logging.FileHandler', 'logging.FileHandler', (['logPath', '"""a"""'], {'encoding': '"""utf-8"""'}), "(logPath, 'a', encoding='utf-8')\n", (1379, 1411), False, 'import logging\n'), ((1453, 1547), 'logging.Formatter', 'logging.Formatter', ([], {'fmt': '"""%(asctime)s - %(name)s - %(levelname)s - %(module)s: %(message)s"""'}), "(fmt=\n '%(asctime)s - %(name)s - %(levelname)s - %(module)s: %(message)s')\n", (1470, 1547), False, 'import logging\n'), ((1752, 1775), 'logging.Logger', 'logging.Logger', (['logPath'], {}), '(logPath)\n', (1766, 1775), False, 'import logging\n'), ((388, 409), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (407, 409), False, 'import datetime\n'), ((902, 937), 'os.path.exists', 'os.path.exists', (['self.callerLogsPath'], {}), '(self.callerLogsPath)\n', (916, 937), False, 'import os\n'), ((951, 983), 'os.makedirs', 'os.makedirs', (['self.callerLogsPath'], {}), '(self.callerLogsPath)\n', (962, 983), False, 'import os\n'), ((1705, 1730), 'logging.Logger', 'logging.Logger', (['w_logName'], {}), '(w_logName)\n', (1719, 1730), False, 'import logging\n')]
|
from tornado.web import RequestHandler
from tornado.web import gen
from controller import sugarGuideController
import json
# 保存糖导的结果
class AddSugarGuideResult(RequestHandler):
@gen.coroutine
def post(self):
session_id = self.get_argument('session_id')
gender = self.get_argument('gender')
age = self.get_argument('age')
height = self.get_argument('height')
weight = self.get_argument('weight')
sugarType = self.get_argument('sugarType')
diseaseAge = self.get_argument('diseaseAge')
akin = self.get_argument('akin')
fm = self.get_argument('fm')
manyDrinkWc = self.get_argument('manyDrinkWc')
posion = self.get_argument('posion')
thirsty = self.get_argument('thirsty')
visionDown = self.get_argument('visionDown')
diseaseSpeed = self.get_argument('diseaseSpeed')
verifyYear = self.get_argument('verifyYear')
cureWay = self.get_argument('cureWay')
dsPlan = self.get_argument('dsPlan')
complication = self.get_argument('complication')
data = sugarGuideController.createHealthWeekly(session_id, gender, age, height, weight,
sugarType, diseaseAge, akin, fm,
manyDrinkWc, posion, thirsty,
visionDown, diseaseSpeed, verifyYear,
cureWay, dsPlan, complication)
self.write(json.dumps(data))
# 获取健康周报
class GetHealthWeekly(RequestHandler):
@gen.coroutine
def post(self):
session_id = self.get_argument('session_id')
data = sugarGuideController.retireveHealthWeekly(session_id)
self.render('healthWeekly.html', cerealsValue=data['diet']['cerealsValue'],
cereals=data['diet']['cereals'],fruitValue=data['diet']['fruitValue'],
fruit=data['diet']['fruit'],meatValue=data['diet']['meatValue'],
meat=data['diet']['meat'],milkValue=data['diet']['milkValue'],
milk=data['diet']['milk'],fatValue=data['diet']['fatValue'],
fat=data['diet']['fat'],vegetablesValue=data['diet']['vegetablesValue'],
vegetables=data['diet']['vegetables'],
sport1=data['sport']['sport1'],sport2=data['sport']['sport2'],
sport3=data['sport']['sport3'],sport4=data['sport']['sport4'],
time1=data['sport']['time1'], time2=data['sport']['time2'],
time3=data['sport']['time3'], time4=data['sport']['time4'],
week1=data['sport']['week1'], week2=data['sport']['week2'],
week3=data['sport']['week3'], week4=data['sport']['week4'],
min1=data['control']['min1'],max1=data['control']['max1'],
min2=data['control']['min2'],max2=data['control']['max2'],
sleep1=data['control']['sleep1'],sleep2=data['control']['sleep2'],)
|
[
"json.dumps",
"controller.sugarGuideController.retireveHealthWeekly",
"controller.sugarGuideController.createHealthWeekly"
] |
[((1131, 1351), 'controller.sugarGuideController.createHealthWeekly', 'sugarGuideController.createHealthWeekly', (['session_id', 'gender', 'age', 'height', 'weight', 'sugarType', 'diseaseAge', 'akin', 'fm', 'manyDrinkWc', 'posion', 'thirsty', 'visionDown', 'diseaseSpeed', 'verifyYear', 'cureWay', 'dsPlan', 'complication'], {}), '(session_id, gender, age, height,\n weight, sugarType, diseaseAge, akin, fm, manyDrinkWc, posion, thirsty,\n visionDown, diseaseSpeed, verifyYear, cureWay, dsPlan, complication)\n', (1170, 1351), False, 'from controller import sugarGuideController\n'), ((1769, 1822), 'controller.sugarGuideController.retireveHealthWeekly', 'sugarGuideController.retireveHealthWeekly', (['session_id'], {}), '(session_id)\n', (1810, 1822), False, 'from controller import sugarGuideController\n'), ((1588, 1604), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1598, 1604), False, 'import json\n')]
|
import pytest
import numpy as np
from sklearn.model_selection import GridSearchCV
from sklego.datasets import load_penguins
from sklearn.pipeline import Pipeline
from sklearn.metrics import make_scorer, accuracy_score
from hulearn.preprocessing import PipeTransformer
from hulearn.outlier import InteractiveOutlierDetector
from hulearn.common import flatten
from tests.conftest import (
select_tests,
general_checks,
classifier_checks,
nonmeta_checks,
)
@pytest.mark.parametrize(
"test_fn",
select_tests(
include=flatten([general_checks, classifier_checks, nonmeta_checks]),
exclude=[
"check_estimators_pickle",
"check_estimator_sparse_data",
"check_estimators_nan_inf",
"check_pipeline_consistency",
"check_complex_data",
"check_fit2d_predict1d",
"check_methods_subset_invariance",
"check_fit1d",
"check_dict_unchanged",
"check_classifier_data_not_an_array",
"check_classifiers_one_label",
"check_classifiers_classes",
"check_classifiers_train",
"check_supervised_y_2d",
"check_supervised_y_no_nan",
"check_estimators_unfitted",
"check_estimators_dtypes",
"check_fit_score_takes_y",
"check_dtype_object",
"check_estimators_empty_data_messages",
],
),
)
def test_estimator_checks(test_fn):
"""
We're skipping a lot of tests here mainly because this model is "bespoke"
it is *not* general. Therefore a lot of assumptions are broken.
"""
clf = InteractiveOutlierDetector.from_json(
"tests/test_classification/demo-data.json"
)
test_fn(InteractiveOutlierDetector, clf)
def test_base_predict_usecase():
clf = InteractiveOutlierDetector.from_json(
"tests/test_classification/demo-data.json"
)
df = load_penguins(as_frame=True).dropna()
X, y = df.drop(columns=["species"]), df["species"]
preds = clf.fit(X, y).predict(X)
assert preds.shape[0] == df.shape[0]
def identity(x):
return x
def test_grid_predict():
clf = InteractiveOutlierDetector.from_json(
"tests/test_classification/demo-data.json"
)
pipe = Pipeline(
[
("id", PipeTransformer(identity)),
("mod", clf),
]
)
grid = GridSearchCV(
pipe,
cv=5,
param_grid={},
scoring={"acc": make_scorer(accuracy_score)},
refit="acc",
)
df = load_penguins(as_frame=True).dropna()
X = df.drop(columns=["species", "island", "sex"])
y = (np.random.random(df.shape[0]) < 0.1).astype(int)
preds = grid.fit(X, y).predict(X)
assert preds.shape[0] == df.shape[0]
def test_ignore_bad_data():
"""
There might be some "bad data" drawn. For example, when you quickly hit double-click you might
draw a line instead of a poly. Bokeh is "okeh" with it, but our point-in-poly algorithm is not.
"""
data = [
{
"chart_id": "9ec8e755-2",
"x": "bill_length_mm",
"y": "bill_depth_mm",
"polygons": {
"Adelie": {"bill_length_mm": [], "bill_depth_mm": []},
"Gentoo": {"bill_length_mm": [], "bill_depth_mm": []},
"Chinstrap": {"bill_length_mm": [], "bill_depth_mm": []},
},
},
{
"chart_id": "11640372-c",
"x": "flipper_length_mm",
"y": "body_mass_g",
"polygons": {
"Adelie": {
"flipper_length_mm": [[214.43261376806052, 256.2612913545137]],
"body_mass_g": [[3950.9482324534456, 3859.9137496948247]],
},
"Gentoo": {"flipper_length_mm": [], "body_mass_g": []},
"Chinstrap": {"flipper_length_mm": [], "body_mass_g": []},
},
},
]
clf = InteractiveOutlierDetector(json_desc=data)
assert len(list(clf.poly_data)) == 0
|
[
"sklego.datasets.load_penguins",
"sklearn.metrics.make_scorer",
"numpy.random.random",
"hulearn.common.flatten",
"hulearn.outlier.InteractiveOutlierDetector",
"hulearn.outlier.InteractiveOutlierDetector.from_json",
"hulearn.preprocessing.PipeTransformer"
] |
[((1658, 1743), 'hulearn.outlier.InteractiveOutlierDetector.from_json', 'InteractiveOutlierDetector.from_json', (['"""tests/test_classification/demo-data.json"""'], {}), "('tests/test_classification/demo-data.json'\n )\n", (1694, 1743), False, 'from hulearn.outlier import InteractiveOutlierDetector\n'), ((1843, 1928), 'hulearn.outlier.InteractiveOutlierDetector.from_json', 'InteractiveOutlierDetector.from_json', (['"""tests/test_classification/demo-data.json"""'], {}), "('tests/test_classification/demo-data.json'\n )\n", (1879, 1928), False, 'from hulearn.outlier import InteractiveOutlierDetector\n'), ((2189, 2274), 'hulearn.outlier.InteractiveOutlierDetector.from_json', 'InteractiveOutlierDetector.from_json', (['"""tests/test_classification/demo-data.json"""'], {}), "('tests/test_classification/demo-data.json'\n )\n", (2225, 2274), False, 'from hulearn.outlier import InteractiveOutlierDetector\n'), ((3987, 4029), 'hulearn.outlier.InteractiveOutlierDetector', 'InteractiveOutlierDetector', ([], {'json_desc': 'data'}), '(json_desc=data)\n', (4013, 4029), False, 'from hulearn.outlier import InteractiveOutlierDetector\n'), ((549, 609), 'hulearn.common.flatten', 'flatten', (['[general_checks, classifier_checks, nonmeta_checks]'], {}), '([general_checks, classifier_checks, nonmeta_checks])\n', (556, 609), False, 'from hulearn.common import flatten\n'), ((1947, 1975), 'sklego.datasets.load_penguins', 'load_penguins', ([], {'as_frame': '(True)'}), '(as_frame=True)\n', (1960, 1975), False, 'from sklego.datasets import load_penguins\n'), ((2570, 2598), 'sklego.datasets.load_penguins', 'load_penguins', ([], {'as_frame': '(True)'}), '(as_frame=True)\n', (2583, 2598), False, 'from sklego.datasets import load_penguins\n'), ((2334, 2359), 'hulearn.preprocessing.PipeTransformer', 'PipeTransformer', (['identity'], {}), '(identity)\n', (2349, 2359), False, 'from hulearn.preprocessing import PipeTransformer\n'), ((2504, 2531), 'sklearn.metrics.make_scorer', 'make_scorer', (['accuracy_score'], {}), '(accuracy_score)\n', (2515, 2531), False, 'from sklearn.metrics import make_scorer, accuracy_score\n'), ((2671, 2700), 'numpy.random.random', 'np.random.random', (['df.shape[0]'], {}), '(df.shape[0])\n', (2687, 2700), True, 'import numpy as np\n')]
|
# Copyright (c) 2021, ac and Contributors
# See license.txt
import frappe
import unittest
from accounting.accounting.doctype.sales_invoice.test_sales_invoice import TestSalesInvoice
class TestGLEntry(unittest.TestCase):
def setUp(self) -> None:
self.doctype = 'GL Entry'
def test_gl_entries_for_sales_invoice(self):
gl_entry_count = frappe.db.count(self.doctype)
invoice = TestSalesInvoice.create_sales_invoice('Frappe', 'Laptop', 2)
invoice.submit()
new_gl_entry_count = frappe.db.count('GL Entry')
assert new_gl_entry_count == gl_entry_count + 2
last_gl_entry = frappe.get_last_doc('GL Entry')
assert not last_gl_entry.is_cancelled
assert last_gl_entry.credit == invoice.total_amount
def test_reverse_gl_entries_for_sales_invoice(self):
gl_entry_count = frappe.db.count('GL Entry')
invoice = TestSalesInvoice.create_sales_invoice('Frappe', 'Laptop', 2)
invoice.submit()
invoice.cancel()
new_gl_entry_count = frappe.db.count('GL Entry')
assert new_gl_entry_count == gl_entry_count + 4
last_gl_entry = frappe.get_last_doc('GL Entry')
assert last_gl_entry.is_cancelled
assert last_gl_entry.credit == invoice.total_amount
|
[
"frappe.get_last_doc",
"frappe.db.count",
"accounting.accounting.doctype.sales_invoice.test_sales_invoice.TestSalesInvoice.create_sales_invoice"
] |
[((345, 374), 'frappe.db.count', 'frappe.db.count', (['self.doctype'], {}), '(self.doctype)\n', (360, 374), False, 'import frappe\n'), ((390, 450), 'accounting.accounting.doctype.sales_invoice.test_sales_invoice.TestSalesInvoice.create_sales_invoice', 'TestSalesInvoice.create_sales_invoice', (['"""Frappe"""', '"""Laptop"""', '(2)'], {}), "('Frappe', 'Laptop', 2)\n", (427, 450), False, 'from accounting.accounting.doctype.sales_invoice.test_sales_invoice import TestSalesInvoice\n'), ((496, 523), 'frappe.db.count', 'frappe.db.count', (['"""GL Entry"""'], {}), "('GL Entry')\n", (511, 523), False, 'import frappe\n'), ((595, 626), 'frappe.get_last_doc', 'frappe.get_last_doc', (['"""GL Entry"""'], {}), "('GL Entry')\n", (614, 626), False, 'import frappe\n'), ((796, 823), 'frappe.db.count', 'frappe.db.count', (['"""GL Entry"""'], {}), "('GL Entry')\n", (811, 823), False, 'import frappe\n'), ((839, 899), 'accounting.accounting.doctype.sales_invoice.test_sales_invoice.TestSalesInvoice.create_sales_invoice', 'TestSalesInvoice.create_sales_invoice', (['"""Frappe"""', '"""Laptop"""', '(2)'], {}), "('Frappe', 'Laptop', 2)\n", (876, 899), False, 'from accounting.accounting.doctype.sales_invoice.test_sales_invoice import TestSalesInvoice\n'), ((964, 991), 'frappe.db.count', 'frappe.db.count', (['"""GL Entry"""'], {}), "('GL Entry')\n", (979, 991), False, 'import frappe\n'), ((1063, 1094), 'frappe.get_last_doc', 'frappe.get_last_doc', (['"""GL Entry"""'], {}), "('GL Entry')\n", (1082, 1094), False, 'import frappe\n')]
|
# -*- coding: utf-8 -*-
from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType
from line2.utils import IsEmpty, AddReverseDict, Lock, AddAtExit, DelAtExit, Acquire
from line2.models.messages import Buttons
from time import time, sleep
from threading import Timer, Condition
from random import randint, choice, shuffle
class RoomPhase:
idling=0
waiting=1
starting = 2
night=3
day=4
lynchVote=5
hunter=6
done = 7
toString = {
0:'idling',
1:'waiting',
2:'starting',
3:'night',
4:'day',
5:'lynchVote',
6:'hunter',
7:'done'
}
class ActionPhase:
none=0
night=RoomPhase.night
day=RoomPhase.day
hunter=RoomPhase.hunter
firstNight=7
anyday=8
class Role(object):
none=None
villager=None
werewolf=None
drunk=None
seer=None
harlot=None
beholder=None
gunner=None
traitor=None
guardianAngel=None
cursed=None
detective=None
apprenticeSeer=None
cultist=None
cultistHunter=None
wildChild=None
fool=None
mason=None
doppelganger=None
cupid=None
hunter=None
serialKiller=None
tanner=None
mayor=None
prince=None
sorcerer=None
clumsy=None
blacksmith=None
alphaWolf=None
wolfCub=None
werewolves = []
unconvertible = []
visitorKillers = []
needRoleModel = []
rolesById={}
rolesByName={}
validRoles = []
seers = []
def __init__(self, id, name, team, actionPhase, initMsg):
self.id = id
if id:
Role.validRoles.append(self)
self.name = name
self.team = team
self.actionPhase = actionPhase
self.initMsg = initMsg
Role.rolesById[id] = self
Role.rolesByName[name] = self
def __eq__(self, rhs):
return isinstance(rhs, Role) and rhs.id == self.id
def __ne__(self, rhs):
return not self.__eq__(rhs)
class Team:
none=0
village=1
villager=1
villagers=1
werewolf=2
werewolves=2
cult=3
cultist=3
cultists=3
doppelganger=4
serialKiller=5
tanner=6
independant=7
toString={
0:"None",
1:"Villager",
2:"Werewolf",
3:"Cult",
4:"Doppelganger",
5:"Serial Killer",
6:"Tanner",
7:"Independant"
}
seerLine = "You're the Seer! Every night you can choose to look into someone's role."
Role.none = Role(0, "None", Team.none, ActionPhase.none, "None")
Role.villager = Role(1, "Villager", Team.villager, ActionPhase.none, "You're a Villager. Go plow some field you ugly")
Role.werewolf = Role(2, "Werewolf", Team.werewolf, ActionPhase.night, "You're an awoo")
Role.drunk = Role(3, "Drunk", Team.villager, ActionPhase.none, "You're the Drunk. Werewolves will go drunk and skip one turn if they eat you")
Role.seer = Role(4, "Seer", Team.villager, ActionPhase.night, seerLine)
Role.harlot = Role(5, "Harlot", Team.villager, ActionPhase.night, "You're the Harlot(slut). Every night, you can choose to sneak into someone's bed. If a werewolf tries to kill you, you'll be safe cuz ur not home. However if you sneak into a werewolf's house, you're dead meat. Also, if a werewolf kills the house owner which you snucked into, you'll also be killed.")
Role.beholder = Role(6, "Beholder", Team.villager, ActionPhase.none, "It's nothing special. It's just that you know who the Seer is.")
Role.gunner = Role(7, "Gunner", Team.villager, ActionPhase.day, "You're the Gunner. Every day, you can choose to shoot someone. Your shooting will be revealed to all, as well as your role. You only have two bullets.")
Role.traitor = Role(8, "Traitor", Team.werewolf, ActionPhase.none, "You're a Traitor. You're on the werewolves' side. You will turn into a werewolf if all werewolves die.")
Role.guardianAngel = Role(9, "Guardian Angel", Team.villager, ActionPhase.night, "Your Guardian ANgle. Every night, you can choose to protect someone's house from Werewolves. You can't protect yours, though.")
Role.cursed = Role(10, "Cursed", Team.werewolf, ActionPhase.none, "You're the Cursed. If a werewolf tries to kill you, you will turn into one! Until then, you're on their side from the beginning, though. The Seer will see you as a Werewolf even when you haven't transformed yet.")
Role.detective = Role(11, "Detective", Team.villager, ActionPhase.day, "You're a Detective. Every day, you can choose to investigate someone. However, there are 40% chance the werewolves will notice.")
Role.apprenticeSeer = Role(12, "Apprentice Seer", Team.villager, ActionPhase.none, "You're an Apprentice Seer. If the Seer dies, you carry on his duty")
Role.cultist = Role(13, "Cultist", Team.cultist, ActionPhase.night, "You're a Cultist. You can invite someone over to your cult. If at the end of the game everyone is a cult member, the cult wins")
Role.cultistHunter = Role(14, "<NAME>", Team.villager, ActionPhase.night, "You're the Cultist Hunter. If a cultist tries to invite you, their cult's newest member will die. Every night, you can choose to hunt someone. If he's a cultist, he will die")
Role.wildChild = Role(15, "Wild Child", Team.villager, ActionPhase.firstNight, "You're a Wild Child. You can choose someone to be your role model. If he dies, you'll turn into a Werewolf.")
Role.fool = Role(16, "Fool", Team.villager, ActionPhase.night, seerLine)
Role.mason = Role(17, "Mason", Team.villager, ActionPhase.none, "You're a Mason. All Mason knows all Masons")
Role.doppelganger = Role(18, "Doppelganger", Team.doppelganger, ActionPhase.firstNight, "Your ancestors had the ability to metamorph into others... while you don't have their full abilities, you can pick a player at the start of the game. If that player dies, you will become what they were.\nNote: If they were Wild Child and their role model died, you will become a wolf. Otherwise, you will inherit their role model.\nThe cult cannot convert the Doppelgänger (but can after the doppelganger switches roles). Also - the Doppelgänger can NOT win unless they have transformed. If at the end of the game, the Doppelgänger is still the same, they lose (exception: lover)")
Role.cupid = Role(19, "Cupid", Team.villager, ActionPhase.firstNight, "Love is in the air. As Cupid, you will choose two players at the start of the game. These two players will become madly in love! If one of them dies, the other will die of sorrow :(\nNote: Lovers will know who each other are, but not be told their roles. If the lovers are the last two alive, they win, regardless of teams. If the lovers are on different teams (villager + wolf), and one team wins (wolf), the lover (villager) wins as well. TL;DR if at least one of the lovers was on the winning team, they both win together.")
Role.hunter = Role(20, "Hunter", Team.villager, ActionPhase.hunter, "A trigger happy, vindictive player. As the hunter, you try to keep to yourself. However, when others come to visit you, they may find themselves dead, as your paranoia takes hold and you shoot. If the wolves attack you, you have a chance to take one of them with you. Otherwise, if you die, you will get a chance to shoot someone as you die.\nNote: For wolf attacks, the chance starts at 30%. If there is one wolf, the hunter has a 30% chance to kill the wolf (and survive). For each additional wolf, add 20% (2 wolves = 50%, 3 = 70%, etc). However - if there are multiple wolves, while you may kill one of them, you will still be outnumbered and killed.\nIf the cult tries to convert you, they have a 50% chance to fail. If they fail, you have a 50% chance to kill one of them!")
Role.serialKiller = Role(21, "Serial Killer", Team.serialKiller, ActionPhase.night, "That asylum was silly anyways. What a joke. You are free now however, back to business as usual - killing! The serial killer is a lone player, on their own team. They can win only if they are the last player alive (exception: lovers). As the serial killer, you can kill ANYONE - wolves, hunters, gunners, guardian angels, whatever. If the wolves try to attack you, you will kill one of them (random), and live.")
Role.tanner = Role(22, "Tanner", Team.tanner, ActionPhase.none, "The Tanners goal is simple: Get Lynched. If the Tanner gets lynched, he wins, period. Everyone else loses.")
Role.mayor = Role(23, "Mayor", Team.villager, ActionPhase.anyday, "As mayor, you are a lowly villager, until you reveal yourself. Then you are given twice the vote count for lynching (meaning that your vote is twice as powerful as everyone else's). Use that power wisely to help the Village Team.")
Role.prince = Role(24, "Prince", Team.villager, ActionPhase.none, "Once the prince gets lynched, their role as Prince is revealed, and they survive. However, this can only happen once: if the village insists on lynching them, they will die.")
Role.sorcerer = Role(25, "Sorcerer", Team.werewolf, ActionPhase.night, "Do you remember the good old seer? Well now, it has its Wolf Team counterpart. The sorcerer is the Wuff's Seer. They can only see if someone is Wolf or Seer, and they win with the Wolves.")
Role.clumsy = Role(26, "Clumsy", Team.villager, ActionPhase.none, "You are the Clumsy Guy. Maybe you should not have had so much alcohol for breakfast. You can't see a damn thing. Can you even vote for the person you want to? (You have a 50% chance to vote for someone random.)")
Role.blacksmith = Role(27, "Blacksmith", Team.villager, ActionPhase.anyday, "You are the BlackSmith. Through the years, no blades nor swords gave you as much satisfaction as the Silver Blades the elves ordered.\nYou might have some silverdust left. Who knows ? It might *prevent Wolves from eating tonight*")
Role.alphaWolf = Role(28, "Alpha Wolf", Team.werewolf, ActionPhase.night, "You are the Alpha Wolf, the origin of the curse, the bane of banes. Every night, there's 20% chance that you will bite your pack's meal, and they will join your ranks instead of dying!")
Role.wolfCub = Role(29, "Wolf Cub", Team.werewolf, ActionPhase.night, "What a cuuuute little wuff. _tickles tickles_ -cough cough- As i was saying, you are the Wolf Cub and you _drops the mic_ -I just can't resist that. I think if anyone killed you, I'd give the wuffs two victims. You're too cute to die. I wouldn't be able to tickle you anymore-")
Role.werewolves.append(Role.werewolf)
Role.werewolves.append(Role.alphaWolf)
Role.werewolves.append(Role.wolfCub)
Role.unconvertible.extend(Role.werewolves)
Role.unconvertible.append(Role.doppelganger)
Role.unconvertible.append(Role.serialKiller)
Role.unconvertible.append(Role.cultistHunter)
Role.visitorKillers.extend(Role.werewolves)
Role.visitorKillers.append(Role.hunter)
Role.visitorKillers.append(Role.serialKiller)
Role.needRoleModel.append(Role.wildChild)
Role.needRoleModel.append(Role.doppelganger)
Role.seers.append(Role.seer)
Role.seers.append(Role.fool)
Role.seers.append(Role.sorcerer)
class Alive:
notPlaying = None
dead=False
alive=True
toString={
None:'not playing',
False:'dead',
True:'alive'
}
class Player(object):
def __init__(self, obj, room):
with room.lock:
self.lock = Lock()
with self.lock:
self._1role = None
self.obj = obj
self.room = room
self.room.lastPlayerId+=1
self.id = self.room.lastPlayerId
room.players.append(self)
room.playersById[self.id] = self
room.playersByObj[obj] = self
self.room.playersByObj[obj] = self
rObj = self.obj.rObj
self.originalRole = None
self.alive = Alive.notPlaying
self.lover = None
self.ammo = 0
self.protection=0
self.drunk=False
self.dayLastSeen=0
self.canAct=False
self.houseOwner = self
self.harlot = None
self.cultistId = 0
self.killerRole = None
self.mayorRevealed = False
self.princeRevealed = False
self.master = None
self.apprentices = []
self.freeloader = None
self.done = False
self.dayRoleSet = 0
self.getRole = None
self.kill = None
def Remove(self):
with Acquire(self.lock, self.room.lock):
if self.room.lastPlayerId - self.id == 1:
self.room.lastPlayerId-=1
if self in self.room.players:
self.room.players.remove(self)
if self.id in self.room.playersById:
del self.room.playersById[self.id]
if self.obj in self.room.playersByObj:
del self.room.playersByObj[self.obj]
return CommandResult.Done()
@property
def role(self):
return self._1role
def GetTeamNames(self, group):
group = list(group)
if len(group) == 0:
return ''
name = group[0].role.name
if self in group:
group.remove(self)
groupLen = len(group)
if groupLen < 1:
return "You are a lone %s" % name
elif groupLen == 1:
return "You and %s are %ss" % (group[0].name, name)
else:
return "You, %s and %s are %ss" % (', '.join([x.name for x in group[:groupLen-1]]), group[-1].name, name)
else:
groupLen = len(group)
if groupLen < 1:
return "There is no %s" % name
elif groupLen == 1:
return "%s is a lone %s" % (group[0].name, name)
elif groupLen == 2:
return "%s and %s are %ss" % (group[0].name, group[1].name, name)
else:
return "%s and %s are %ss" % (', '.join([x.name for x in group[:groupLen-1]]), group[-1].name, name)
def SendTeamNames(self, group):
return self.SendText(self.GetTeamNames(group))
@role.setter
def role(self, value):
if value == self.role:
return
if self._1role:
if self._1role == Role.alphaWolf or self._1role == Role.wolfCub:
if self in self.room.playersByRole[Role.werewolf]:
self.room.playersByRole[Role.werewolf].remove(self)
if self in self.room.playersByRole[self._1role]:
self.room.playersByRole[self._1role].remove(self)
self._1role = value
if self._1role not in self.room.playersByRole:
self.room.playersByRole[self._1role] = []
self.room.playersByRole[self._1role].append(self)
if not value:
return
if value == Role.cultist:
with self.room.lock, self.lock:
self.room.lastCultistId += 1
self.cultistId = self.room.lastCultistId
self.room.cultists.append(self)
if not self.room.hasCultist:
self.room.hasCultist = True
if Role.cultist.id not in self.room.votes:
self.room.votes[Role.cultist.id] = Vote(self.room)
elif value == Role.gunner:
self.ammo = 2
elif value == Role.harlot:
self.room.harlots.append(self)
elif value == Role.beholder:
self.room.beholders.append(self)
elif value in Role.werewolves:
self.room.werewolves.append(self)
self.room.hasWerewolf = True
elif value == Role.seer:
self.room.seers.append(self)
elif value == Role.mason:
self.room.masons.append(self)
elif value == Role.cupid:
self.room.lovers[self] = []
elif value == Role.traitor:
self.room.traitors.append(self)
elif value == Role.apprenticeSeer:
self.room.apprenticeSeers.append(self)
elif value == Role.guardianAngel:
self.room.guardianAngels.append(self)
self.dayRoleSet = self.room.day
if self.room.realPhase == RoomPhase.night:
self.dayRoleSet += 1
def Tell(self, to):
s = "%s is now a %s" % (self.name, self.role.name)
for x in to:
if x != self:
x.SendText(s)
def TellBeholders(self):
self.Tell(self.room.beholders)
self.room.shouldTellBeholders = True
def TellWerewolves(self):
self.Tell(self.room.werewolves)
self.room.shouldTellWerewolves = True
def TellMasons(self):
self.Tell(self.room.masons)
self.room.shouldTellMasons = True
def TellCultists(self):
self.Tell(self.room.cultists)
self.room.shouldTellCultists = True
def TryTell(self, to):
if self.room.phase > RoomPhase.starting:
self.Tell(to)
return True
def TryTellBeholders(self):
self.TryTell(self.room.beholders)
self.room.shouldTellBeholders=True
def TryTellWerewolves(self):
self.TryTell(self.room.werewolves)
self.room.shouldTellWerewolves=True
def TryTellMasons(self):
self.TryTell(self.room.masons)
self.room.shouldTellMasons=True
def TryTellCultists(self):
self.TryTell(self.room.cultists)
self.room.shouldTellCultists=True
def Die(self, killerRole=Role.none):
print("DIE")
if self.alive == Alive.alive:
print("WAS ALIVE")
self.alive = Alive.dead
self.killerRole = killerRole
self.room.playersByRole[self.role].remove(self)
with self.room.lock:
if self in self.room.alives:
self.room.alives.remove(self)
if self not in self.room.deads:
self.room.deads.append(self)
if self.role == Role.cultist:
with self.room.lock:
if self in self.room.cultists:
self.room.cultists.remove(self)
if len(self.room.cultists) == 0:
self.room.hasCultist = False
elif self.role == Role.hunter:
self.room.deadHunters.append(self)
elif self.role == Role.harlot:
with self.room.lock:
if self in self.room.harlots:
self.room.harlots.remove(self)
elif self.role in Role.werewolves:
with self.room.lock:
if self in self.room.werewolves:
self.room.werewolves.remove(self)
if len(self.room.werewolves) == 0:
if len(self.room.traitors) == 0:
self.room.hasWerewolf = False
else:
for x in self.room.traitors:
x.Inherit(self)
elif self.role == Role.seer:
with self.room.lock:
if self in self.room.seers:
self.room.seers.remove(self)
if len(self.room.seers) == 0:
for x in self.room.apprenticeSeers:
x.Inherit(self)
self.room.shouldTellBeholders = True
elif self.role == Role.mason:
with self.room.lock:
if self in self.room.masons:
self.room.masons.remove(self)
elif self.role == Role.traitor:
with self.room.lock:
if self in self.room.traitors:
self.room.traitors.remove(self)
elif self.role == Role.beholder:
with self.room.lock:
if self in self.room.beholders:
self.room.beholders.remove(self)
elif self.role == Role.apprenticeSeer:
with self.room.lock:
if self in self.room.apprenticeSeers:
self.room.apprenticeSeers.remove(self)
elif self.role == Role.guardianAngel:
with self.room.lock:
if self in self.room.guardianAngels:
self.room.guardianAngels.remove(self)
if killerRole == Role.villager:
self.room.SendText("Yall lynched %s the %s" % (self.name, self.role.name))
elif killerRole == self:
self.room.SendText("%s just can't live without %s", (self.name, self.lover.name))
elif killerRole == Role.none:
self.room.SendText("%s has been away for too long and considered dead" % self.name)
else:
k = "???"
if killerRole:
k = killerRole.name
self.room.SendText("%s the %s was killed by %s" % (self.name, self.role.name, k))
if len(self.apprentices) > 0:
for x in self.apprentices:
x.Inherit(self)
if self.lover and self.lover.alive:
self.lover.Die(self.lover)
def Inherit(self, master):
if not self.alive:
return
role = self.role
if self.role == Role.apprenticeSeer:
role = Role.seer
with self.room.lock:
if self in self.room.apprenticeSeers:
self.room.apprenticeSeers.remove(self)
elif self.role == Role.traitor:
role = Role.werewolf
with self.room.lock:
if self in self.room.traitors:
self.room.traitors.remove(self)
elif self.role == Role.wildChild:
role = Role.werewolf
elif self.role == Role.doppelganger:
role = master.role
if role == Role.wildChild:
if master.master:
self.Inherit(master.master)
else:
role = Role.werewolf
if role == Role.doppelganger:
if master.master:
return self.Inherit(master.master)
if role == Role.seer:
self.room.shouldTellBeholders = True
self.role = role
self.InitRole()
def SendText(self, text):
if text.startswith("[WW #"):
return self.rObj.SendText(text)
return self.rObj.SendText("[WW #%d : %s]\n%s" % (self.room.id, self.room.name, text))
def SendButtons(self, buttons):
if not buttons.columnText.startswith("[WW #"):
buttons.SetColumnText("[WW #%d : %s]\n%s" % (self.room.id, self.room.name, buttons.columnText))
buttons.SetAltTextHeader("[WW #%d : %s]\n%s" % (self.room.id, self.room.name, buttons.altTextHeader))
return self.rObj.SendButtons(buttons)
@property
def name(self):
return self.obj.name
@property
def rObj(self):
return self.obj.rObj
def InitRole(self):
role = self.role
if role == Role.fool:
role = Role.seer
self.SendText("Role #%d : %s\nTeam : %s\n%s" % (role.id, role.name, Team.toString[role.team], role.initMsg))
if role == Role.cupid:
self.room.AddWaitingCommand(self)
buts = Buttons("Choose who to pair up as lovers by typing '/ww room=%d pair=<id>' using the ids below" % self.room.id, "Choose who to pair up")
options = list(self.room.alives)
for option in options:
buts.AddButton(
option.name,
"/ww room=%d pair=%d" % (self.room.id, option.id),
"\n%s\t : %s" % (option.id, option.name)
)
self.SendButtons(buts)
elif role in Role.needRoleModel:
self.room.AddWaitingCommand(self)
buts = Buttons("Choose your role model by typing '/ww room=%d master=<id>' using the ids below" % self.room.id, "\nChoose your role model")
options = [x for x in self.room.alives if x != self]
for option in options:
buts.AddButton(
option.name,
"/ww room=%d master=%d" % (self.room.id, option.id),
"\n%s\t : %s" % (option.id, option.name)
)
self.SendButtons(buts)
elif role == Role.mason:
self.TryTellMasons()
elif role == Role.werewolf:
self.TryTellWerewolves()
elif role == Role.seer:
self.TryTellBeholders()
elif role == Role.cultist:
self.TryTellCultists()
def HandleCommand(self, action='', eat=0, kill=0, convert=0, shoot=0, pair=0, see=0, master=0, protect=0, lynch=0, *args, **kwargs):
self.dayLastSeen = self.room.day
if eat:
return self.Eat(eat)
elif kill:
return self.Kill(kill)
elif convert:
return self.Convert(convert)
elif shoot:
if self.role == Role.gunner:
return self.ShootGunner(shoot)
elif self.role == Role.hunter:
return self.ShootHunter(shoot)
elif self.role == Role.cultistHunter:
return self.ShootCH(shoot)
else:
self.SendText("You're neither a Gunner, a Hunter, nor a <NAME>.")
return CommandResult.Failed()
elif pair:
return self.Pair(pair)
elif see:
if self.role == Role.detective:
return self.Investigate(see)
else:
return self.SeeRole(see)
elif master:
return self.ChooseMaster(master)
elif protect:
return self.Protect(protect)
elif lynch:
return self.Lynch(lynch)
elif action == 'reveal':
return self.Reveal()
elif action == 'silver':
return self.SpreadDust()
self.SendText("Invalid command")
return CommandResult.Failed()
def Lynch(self, lynchId):
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if self.room.phase != RoomPhase.lynchVote:
self.SendText("It's not time to lynch")
return CommandResult.Failed()
if lynchId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
if self not in self.room.votes[Role.villager.id].haventVoted and not self.room.allowRevote:
self.SendText("Either you have already choosen who to lynch or it's not your turn")
return CommandResult.Failed()
lynch = self.room.playersById[lynchId]
if lynch == self:
self.SendText("You can't lynch yourself")
return CommandResult.Failed()
if not lynch.alive:
self.SendText("The one you want to lynch is already dead")
return CommandResult.Failed()
if self.role == Role.clumsy or self.originalRole == Role.clumsy:
return self.room.votes[Role.villager.id].VoteRandom()
voteCount=1
if self.mayorRevealed:
voteCount=2
ret = self.room.votes[Role.villager.id].Vote(self, lynch, voteCount)
self.room.SendText("%s voted to lynch %s" % (self.name, lynch.name))
return ret
def Reveal(self):
if self.role != Role.mayor:
self.SendText("You're not a Mayor")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if self.mayorRevealed:
self.SendText("You already revealed that you're a Mayor")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
self.mayorRevealed = True
self.room.SendText("%s has revealed that he is a Mayor! His votes will count twice from now on." % self.name)
self.SendText("You have revealed that you are a Mayor")
return CommandResult.Done()
def Eat(self, eatId):
if self.role not in Role.werewolves:
self.SendText("You're not a Werewolf")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if self.drunk:
self.SendText("Go home you're drunk")
return CommandResult.Failed()
if eatId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
if self not in self.room.votes[Role.werewolf.id].haventVoted and not self.room.allowRevote:
self.SendText("You have already choosen who to eat or it's not your turn.")
return CommandResult.Failed()
eat = self.room.playersById[eatId]
if eat == self:
self.SendText("You can't eat yourself")
return CommandResult.Failed()
if eat.role in Role.werewolves:
self.SendText("You can't eat fellow Werewolf")
return CommandResult.Failed()
if not eat.alive:
self.SendText("The one you want to eat is already dead")
return CommandResult.Failed()
if self.originalRole == Role.clumsy:
return self.room.votes[Role.werewolf.id].VoteRandom()
return self.room.votes[Role.werewolf.id].Vote(self, eat)
def Kill(self, killId):
if self.role != Role.serialKiller:
self.SendText("You're not a Serial Killer")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if killId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
if self.kill and not self.room.allowRevote:
self.SendText("You have already choosen who to kill.")
return CommandResult.Failed()
kill = self.room.playersById[killId]
if kill == self:
self.SendText("You can't kill yourself")
return CommandResult.Failed()
if not kill.alive:
self.SendText("The one you want to kill is already dead")
return CommandResult.Failed()
if kill == self.kill:
self.SendText("It's the same guuuyyyyyyyyy")
return CommandResult.Failed()
self.kill = kill
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def Convert(self, convertId):
if self.role != Role.cultist:
self.SendText("You're not a Cultist")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if convertId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
if self not in self.room.votes[Role.cultist.id].haventVoted and not self.room.allowRevote:
self.SendText("Either you have already choosen who to convert or it's not your turn.")
return CommandResult.Failed()
convert = self.room.playersById[convertId]
if convert == self:
self.SendText("You can't convert yourself")
return CommandResult.Failed()
if convert.role == Role.cultist:
self.SendText("You can't convert fellow Cultist")
return CommandResult.Failed()
if not convert.alive:
self.SendText("The one you want to convert is already dead")
return CommandResult.Failed()
if self.originalRole == Role.clumsy:
return self.room.votes[Role.cultist.id].VoteRandom()
return self.room.votes[Role.cultist.id].Vote(self, convert)
def ShootGunner(self, shootId):
if self.role != Role.gunner:
self.SendText("You're not a Gunner")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if self.ammo < 1:
self.SendText("You're out of bullets")
return CommandResult.Failed()
if self.done:
self.SendText("You've already shot someone")
return CommandResult.Failed()
if shootId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
shoot = self.room.playersById[shootId]
if shoot == self:
self.SendText("You can't shoot yourself")
return CommandResult.Failed()
if not shoot.alive:
self.SendText("The one you want to shoot is already dead")
return CommandResult.Failed()
with self.lock:
shoot.Die(self.role)
self.ammo -= 1
self.done = True
self.room.SendText("%s the Gunner shot %s" % (self.name, shoot.name))
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def ShootHunter(self, shootId):
if self.role != Role.hunter:
self.SendText("You're not a Hunter")
return CommandResult.Failed()
if self.alive:
self.SendText("You're not dying")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if shootId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
shoot = self.room.playersById[shootId]
if shoot == self:
self.SendText("You can't shoot yourself. It will all be over soon anyway")
return CommandResult.Failed()
if not shoot.alive:
self.SendText("The one you want to shoot is already dead")
return CommandResult.Failed()
if not shoot.role.team != Team.cultist:
return CommandResult.Failed()
self.kill = shoot
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def ShootCH(self, shootId):
if self.role != Role.cultistHunter:
self.SendText("You're not a Cultist Hunter")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if self.kill and not self.room.allowRevote:
self.SendText("You have choosen who to hunt")
return CommandResult.Failed()
if shootId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
shoot = self.room.playersById[shootId]
if shoot == self:
self.SendText("You can't shoot yourself")
return CommandResult.Failed()
if not shoot.alive:
self.SendText("The one you want to shoot is already dead")
return CommandResult.Failed()
if shoot == self.kill:
self.SendText("It's the same guuuyyyyyyyyy")
return CommandResult.Failed()
self.kill = shoot
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def SpreadDust(self):
if self.role != Role.blacksmith:
self.SendText("You're not a Blacksmith")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if self.ammo < 1:
self.SendText("You're out of silver dust")
return CommandResult.Failed()
for x in self.room.alives:
x.protection = 1
self.room.SendText("%s the Blacksmith spread Silver Dust all over the village!\nEveryone should be safe from Werewolves tonight as long as they don't do anything dangerous" % self.name)
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def PairLovers(self, loverId):
if self.role != Role.cupid:
self.SendText("You're not a Cupid")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if loverId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
pair = self.room.lovers[self]
if len(pair) > 1:
if self.room.allowRevote:
self.room.AddWaitingCommand(self)
self.room.lovers[self] = []
else:
self.SendText("You have already set a pair of lovers")
return CommandResult.Failed()
lover = self.room.playersById[loverId]
if not lover.alive:
self.SendText("The one you want to pair as Lovers is already dead")
return CommandResult.Failed()
if lover in pair:
self.SendText("You have already choosen %s" % lover.name)
if lover.lover:
self.SendText("%s already has someone he loves" % lover.name)
pair.append(lover)
if len(pair) == 1:
self.SendText("Please choose the second person")
else:
pair[0].lover = pair[1]
pair[1].lover = pair[0]
self.SendText("You have set %s and %s to be lovers!" % (pair[0].name, pair[1].name))
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def SleepSomewhereElse(self, otherId):
if self.role != Role.harlot:
self.SendText("You're not a Harlot")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if self.houseOwner != self and not self.room.allowRevote:
self.SendText("You are already at someone else's house")
return CommandResult.Failed()
if otherId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
other = self.room.playersById[otherId]
if other == self:
self.SendText("You can't shoot yourself")
return CommandResult.Failed()
if not other.alive:
self.SendText("Ya can't sleep with a dead body ya sicko")
return CommandResult.Failed()
if other == self.houseOwner:
self.SendText("It's the same guuuyyyyyyyyy")
return CommandResult.Failed()
self.houseOwner = other
other.freeloader = self
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def Protect(self, otherId):
if self.role != Role.guardianAngel:
self.SendText("You're not a Guardian Angel")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if self.houseOwner != self and not self.room.allowRevote:
self.SendText("You are already protecting someone else's house")
return CommandResult.Failed()
if otherId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
other = self.room.playersById[otherId]
if other == self:
self.SendText("You can't protect yourself")
return CommandResult.Failed()
if not other.alive:
self.SendText("You can't protect a dead person.")
return CommandResult.Failed()
if other == self.houseOwner:
self.SendText("It's the same guuuyyyyyyyyy")
return CommandResult.Failed()
if self.houseOwner and self.houseOwner.protection==2:
self.houseOwner.protection = 0
self.houseOwner = other
other.protection = 2
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def ChooseMaster(self, masterId):
if self.role not in [Role.wildChild, Role.doppelganger]:
self.SendText("You're neither a Wild Child nor a Doppelganger")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if self.master and not self.room.allowRevote:
self.SendText("You have already choosen %s" % self.master.name)
return CommandResult.Failed()
if masterId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
master = self.room.playersById[masterId]
if master == self:
self.SendText("You can't choose yourself")
return CommandResult.Failed()
if not master.alive:
self.SendText("The one you want to choose is already dead")
return CommandResult.Failed()
if master == self.master:
self.SendText("It's the same guuuyyyyyyyyy")
return CommandResult.Failed()
self.master = master
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def SeeRole(self, seeId):
if self.role not in Role.seers:
self.SendText("You're neither a Seer nor a Sorcerer")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if self.getRole != self and not self.room.allowRevote:
self.SendText("You've already choosen someone to see through")
return CommandResult.Failed()
if seeId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
see = self.room.playersById[otherId]
if see == self:
self.SendText("You can't see through yourself")
return CommandResult.Failed()
if not see.alive:
self.SendText("The one you want to see through is already dead")
return CommandResult.Failed()
if see == self.getRole:
self.SendText("It's the same guuuyyyyyyyyy")
return CommandResult.Failed()
self.getRole = see
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def Investigate(self, suspectId):
if self.role != Role.detective:
self.SendText("You're not a Detective")
return CommandResult.Failed()
if not self.alive:
self.SendText("You're dead")
return CommandResult.Failed()
if not self.myTurn:
self.SendText("It's not your turn")
return CommandResult.Failed()
if self.getRole != self and not self.room.allowRevote:
self.SendText("You've already choosen someone to investigate")
return CommandResult.Failed()
if suspectId not in self.room.playersById:
self.SendText("Invalid ID")
return CommandResult.Failed()
suspect = self.room.playersById[otherId]
if suspect == self:
self.SendText("You can't investigate yourself")
return CommandResult.Failed()
if not suspect.alive:
self.SendText("The one you want to investigate is already dead")
return CommandResult.Failed()
if suspect == self.getRole:
self.SendText("It's the same guuuyyyyyyyyy")
return CommandResult.Failed()
self.getRole = suspect
self.room.RemoveWaitingCommand(self)
return CommandResult.Done()
def GetRole(self):
if self.getRole:
if not self.alive or not self.getRole.alive:
self.getRole = None
return
if self.role in [Role.fool, Role.seer]:
if self.room.phase == RoomPhase.day:
if self.role == Role.seer:
role = self.getRole.role
else:
role = choice(Role.validRoles)
self.SendText("You have seen through %s and found out that he's a %s" % (self.getRole.name, role.name))
elif self.role == Role.sorcerer:
if self.room.phase == RoomPhase.day:
role = self.getRole.role
if role == Role.seer or role in Role.werewolves:
self.SendText("You have seen through %s and found out that he's a %s" % (self.getRole.name, role.name))
else:
self.SendText("You couldn't see through %s. At least, you know that he's neither a Werewolf nor a Seer" % self.getRole.name)
elif self.role == Role.detective:
if self.room.phase == RoomPhase.lynchVote:
role = self.getRole.role
if role != Role.hunter and role in Role.visitorKillers:
tell = randint(0, 9)
#if tell < 1 and role != Role.werewolf:
# self.Die(role)
# return True
if tell < 4:
self.getRole.SendText("%s seems to be sneaking around you very much. What's his problem?" % self.name)
self.SendText("You have investigated %s and found out that he's a %s" % (self.getRole.name, self.getRole.role.name))
self.getRole = None
def DoKill(self):
if self.kill:
if not self.kill.alive:
self.kill = None
return
if self.role == Role.serialKiller:
self.kill.Die(self.role)
elif self.role == Role.cultistHunter:
if self.kill.role.team == Team.cultist:
self.SendText("%s is a Cultist" % self.kill.name)
self.kill.Die(self.role)
else:
self.SendText("%s is not a Cultist" % self.kill.name)
self.kill = None
@property
def myTurn(self):
if self.role.actionPhase == ActionPhase.firstNight:
return self.room.phase == RoomPhase.night and self.room.day - self.dayRoleSet == 1
elif self.role.actionPhase == ActionPhase.anyday:
return True
else:
return self.role.actionPhase == self.room.phase
class Pending(object):
def __init__(self, method, args, kwargs, requireAlive=None, *args2, **kwargs2):
args.extend(args2)
kwargs.update(kwargs2)
self.method = method
self.args = args
self.kwargs = kwargs
self.requireAlive = requireAlive
def Call(self):
if self.requireAlive and not self.requireAlive.alive:
return
return self.method(*self.args, **self.kwargs)
lastRoomId = 0
roomIdByObj = {}
roomByObj = {}
roomById = {}
lock = Lock()
class Room(object):
def __init__(self, obj, creator, nightDuration=90, dayDuration=90, lynchVoteDuration=60, hunterDeathDuration = 30, allowRevote=False, noVillager=True, autostart=300, quick=True, *args, **kwargs):
global lock
with lock:
if obj in roomIdByObj:
self.id = roomIdByObj[self]
else:
global lastRoomId
lastRoomId += 1
self.id = lastRoomId
global roomById
roomById[self.id] = self
global roomByObj
roomByObj[obj] = self
global roomIdByObj
roomIdByObj[obj] = self.id
self.lock = Lock()
self.obj = obj
self.players = []
self.playersByObj = {}
self.day=0
self._1phase = RoomPhase.waiting
self.realPhase = RoomPhase.waiting
self.votes = {}
self.lovers = {}
self.nightDuration=nightDuration
self.dayDuration=dayDuration
self.lynchVoteDuration=lynchVoteDuration
self.hunterDeathDuration = hunterDeathDuration
self.werewolfKill = 1
self.lastCultistId = 0
self.lastPlayerId = 0
self.playersByRole = {}
self.cultists = []
self.werewolves = []
self.deadHunters = []
self.harlots = []
self.guardianAngels = []
self.seers = []
self.traitors = []
self.beholders = []
self.apprenticeSeers = []
self.masons = []
self.kickeds = []
self.cond = Condition()
self.allowRevote = allowRevote
self.noVillager = noVillager
self.playersById = {}
self.playersByObj = {}
self.shouldTellBeholders = False
self.shouldTellWerewolves = False
self.shouldTellMasons = False
self.shouldTellCultists = False
self.quick = quick
self.hasCultist = False
self.waitingCommands = []
self.room = self
AddAtExit(self, self.__del__)
buts = Buttons("Werewolf game created", "Werewolf game created")
buts.AddButton("Join", "/ww join", "\nType '/ww join' to join")
buts.AddButton("Leave", "/ww leave", "\nType '/ww leave' to leave")
buts.AddButton("Force Start", "/ww forcestart", "\nType '/ww forcestart' to force start")
self.SendButtons(buts)
self.AddPlayer(creator)
if autostart:
self.DelayedStart(autostart, autostart)
def __del__(self):
with self.lock:
if self.phase > RoomPhase.idling and self.phase < RoomPhase.done:
self.phase = RoomPhase.idling
self.SendText("Shutting down")
DelAtExit(self)
@property
def name(self):
return self.obj.name
def AddWaitingCommand(self, player):
if not self.quick and self.allowRevote:
return
with self.lock:
if player not in self.waitingCommands:
self.waitingCommands.append(player)
def ExtendWaitingCommand(self, players):
if not self.quick and self.allowRevote:
return
with self.lock:
for player in players:
if player not in self.waitingCommands:
self.waitingCommands.append(player)
def RemoveWaitingCommand(self, player):
if not self.quick and self.allowRevote:
return
with self.lock:
if player in self.waitingCommands:
self.waitingCommands.remove(player)
if len(self.waitingCommands) == 0:
with self.cond:
self.cond.notifyAll()
def DelayedStart(self, delay, autostart):
return self.obj.client.Thread(self._1DelayedStart, [delay, autostart])
def _1DelayedStart(self, delay, autostart):
if self.phase != RoomPhase.waiting:
return CommandResult.Failed()
with self.cond:
self.StartCountdown(time()+delay)
self.cond.wait(delay)
if self.phase == RoomPhase.waiting:
return self.Start(autostart)
def StartCountdown(self, end, s="Werewolf starting in %s", phase=RoomPhase.waiting):
return self.obj.client.Thread(self.Countdown, [end, s, phase])
def Countdown(self, end, s="Werewolf starting in %s", phase=RoomPhase.waiting):
if self.phase != phase:
return
delay = end-time()
delay+=5
if delay > 60:
mins = delay//60
sec = delay%60
sec -= 5
delay-=5
if mins > 1:
if sec > 0:
si = "%d minutes and %d seconds" % (mins, sec)
else:
si = "%d minutes" % mins
else:
if sec > 0:
si = "a minute and %d seconds" % sec
else:
si = "a minute"
self.SendText(s % si)
with self.cond:
if delay >= 120:
self.cond.wait(60)
elif delay > 60:
self.cond.wait(delay-60)
else:
self.cond.wait(30)
else:
delay -= 5
self.SendText(s % ("%d seconds" % delay))
with self.cond:
if delay > 30:
self.cond.wait(30)
elif delay > 20:
self.cond.wait(20)
elif delay > 5:
self.cond.wait(delay)
if self.phase == phase and (end-time()) > 5:
return self.Countdown(end, s, phase)
def HandleCommand(self, message, action='', eat=0, kill=0, convert=0, shoot=0, pair=0, see=0, master=0, protect=0, lynch=0, *args, **kwargs):
sender = message.sender
chatroom = message.chatroom
client = message.client
if not client.hasOA or not client.hasUser:
message.ReplyText("Sorry Werewolf needs both OAClient and UserClient")
return CommandResult.Failed()
elif not chatroom.hasUser:
message.ReplyText("Please invite the UserClient here first")
return CommandResult.Failed()
elif not chatroom.hasOA:
if client.oAClient.obj:
client.oAClient.obj.InviteInto(chatroom)
message.ReplyText("Please retry the command after the OAClient joined")
else:
message.ReplyText("Please invite the UserClient here first")
return CommandResult.Failed()
elif not sender or not sender.hasUser or (not sender.name and not sender.GetName()):
message.ReplyText("Sorry we can't identify you.")
return CommandResult.Failed()
elif not sender.rObj:
message.ReplyText("%s, please accept the group invitation" % sender.name)
return CommandResult.Failed()
elif action == 'join':
self.AddPlayer(sender)
return CommandResult.Done()
elif sender not in self.playersByObj:
message.ReplyText("Please join the game first.")
return CommandResult.Failed()
elif action == 'forcestart':
return self.ForceStart(sender)
elif action == 'leave':
return CommandResult.Done()
else:
return self.playersByObj[sender].HandleCommand(action=action, eat=eat, kill=kill, convert=convert, shoot=shoot, pair=pair, see=see, master=master, protect=protect, lynch=lynch, *args, **kwargs)
def Remove(self):
with self.lock:
self.phase = RoomPhase.idling
del roomByObj[self.obj]
del roomById[self.id]
del roomIdByObj[self.obj]
def Tell(self, about, to=None):
if not to:
to = about
for x in to:
x.SendTeamNames(about)
@property
def phase(self):
return self._1phase
@phase.setter
def phase(self, value):
self._1phase = value
self.realPhase = value
def TellBeholders(self):
self.Tell(self.seers, self.beholders)
self.shouldTellBeholders=False
def TellWerewolves(self):
self.Tell(self.werewolves)
self.shouldTellWerewolves=False
def TellMasons(self):
self.Tell(self.masons)
self.shouldTellMasons=False
def TellCultists(self):
self.Tell(self.cultists)
self.shouldTellCultists=False
def TryTellBeholders(self):
if self.shouldTellBeholders:
self.TellBeholders()
def TryTellWerewolves(self):
if self.shouldTellWerewolves:
self.TellWerewolves()
def TryTellMasons(self):
if self.shouldTellMasons:
self.TellMasons()
def TryTellCultists(self):
if self.shouldTellCultists:
self.TellCultists()
def TryTellAll(self):
self.TryTellBeholders()
self.TryTellWerewolves()
self.TryTellMasons()
self.TryTellCultists()
def AddPlayer(self, obj):
if self.phase != RoomPhase.waiting:
self.SendText("%s, the game already started" % obj.name)
return
if obj in self.playersByObj:
self.SendText("%s, You have already joined" % obj.name)
return
p = Player(obj, self)
p.SendText("You have just joined")
self.SendText("%s have successfully joined" % p.name)
return p
def Leave(self, obj):
if self.phase != RoomPhase.waiting:
self.SendText("%s, the game has already begun" % obj.name)
return
if obj not in self.playersByObj:
self.SendText("%s, You haven't even joined" % obj.name)
return
p = self.playersByObj[obj]
self.SendText("%s has left the game" % p.name)
return p.Remove()
def SendText(self, text):
if text.startswith("[WW #"):
return self.obj.SendText(text)
return self.obj.SendText("[WW #%d]\n%s" % (self.id, text))
def SendButtons(self, buttons):
if not buttons.columnText.startswith("[WW #"):
buttons.SetColumnText("[WW #%d]\n%s" % (self.id, buttons.columnText))
buttons.SetAltTextHeader("[WW #%d]\n%s" % (self.id, buttons.altTextHeader))
return self.obj.SendButtons(buttons)
def FreeloaderDie(self):
for harlot in list(self.harlots):
if harlot.houseOwner != harlot:
if harlot.houseOwner.alive:
if harlot.houseOwner.role in Role.visitorKillers:
harlot.Die(None)
self.SendText("%s the Harlot's dead body was found outside this morning. What could've happened?" % harlot.name)
harlot.houseOwner.SendText("The Harlot visited you last night. Defend yourself." % harlot.name)
else:
self.SendText("%s the Harlot was also in %s's house last night. Guess what?" % harlot.name)
harlot.Die(harlot.houseOwner.killerRole)
for angel in list(self.guardianAngels):
if angel.houseOwner != angel:
if angel.houseOwner.role in Role.werewolves:
tell = randint(0, 4)
if tell < 2:
angel.houseOwner.SendText("%s the Guardian Angel tried to protect you lol" % angel.name)
if tell < 1:
self.SendText("%s the Guardian Angel unknowingly tried to protect a Werewolf! The Werewolf found out and killed him" % angle.name)
angel.Die(Role.werewolves)
elif angel.houseOwner.alive:
angel.houseOwner.SendText("A Guardian Angel protected your house from Werewolves last night")
def InitRoles(self):
for player in self.alives:
player.InitRole()
self.TryTellAll()
def Eat(self):
eats = self.votes[Role.werewolf.id].votees
if len(eats) == 0:
print("EATS IS EMPTY")
return
print("EATS0 %s" % eats.items())
eats = [(v, k) for k, v in eats.items() if v>0 and k.alive]
print("EATS1 %s" % eats)
self.votes[Role.werewolf.id].Clear()
if len(eats) == 0:
print("EATS IS EMPTY 2 ")
return
eats.sort(reverse=True)
m = eats[0][0]
eatsMost = [x for x in eats if x[0] == m]
lenEatsMost = len(eatsMost)
if lenEatsMost < self.werewolfKill:
eats2 = eats[len(eatsMost)]
m = eats2[0][0]
eats2 = [x for x in eats2 if x[0] == m]
eatsMost.append(choice(eats2))
self.werewolfKill=1
for werewolf in self.werewolves:
werewolf.drunk = False
hasAlpha = len([x for x in self.werewolves if x.alive and x.role == Role.alphaWolf]) > 0
for eat in eatsMost:
eat = eat[1]
if eat.protection:
self.SendText(eat.name + " was about to be attacked by a werewolf, but he got some protection")
s = "Yall went to %s's home to eat her but he got some protection lol go home." % eat.name
for ww in self.werewolves:
ww.SendText(s)
continue
elif eat.role == Role.harlot and eat.houseOwner and eat.houseOwner != eat:
s = "Yall went to %s's home to eat her but she wasn't home." % eat.name
for ww in self.werewolves:
ww.SendText(s)
continue
elif eat.role == Role.cursed:
eat.role = Role.werewolf
eat.SendText('The Werewolf tried to kill you! You, who were a Cursed, are now a Werewolf!')
eat.InitRole()
s = "Yall tried to eat %s who is actually the Cursed!\nHe is now a fellow Werewolf." % eat.name
for ww in self.werewolves:
ww.SendText(s)
continue
elif eat.role == Role.hunter:
wwLen = len(self.werewolves)
if randint(0, 9) < 3 + (wwLen-1)*2:
randomWw = self.werewolves.pop(randint(0, wwLen-1))
randomWw.Die(eat.role)
if wwLen > 1:
eat.Die(Role.werewolf)
s = 'The Werewolf attacked %s the Hunter! He managed to get %s, one of them, down, but he was outnumbered' % (eat.name, randomWw.name)
self.SendText(s)
continue
else:
self.SendText('%s the werewolf tried to attack the Hunter! However, he had the [Quickdraw] ability not on cooldown. Death to Werewolves!' % randomWw.name)
continue
else:
eat.Die(Role.werewolf)
s = "Yall ate %s who is actually the Hunter!\nFortunately, his [Quickdraw] ability is on cooldown." % eat.name
for ww in self.werewolves:
ww.SendText(s)
continue
continue
elif eat.role == Role.serialKiller:
randomWw = self.werewolves.pop(randint(0, len(self.werewolves)-1))
randomWw.Die(eat.role)
self.SendText("The Werewolves tried to attack the Serial Killer! That was a bad move. %s the Werewolf got killed instead." % randomWw.name)
continue
elif hasAlpha and randint(0,4) < 1:
add = ''
if eat.role == Role.drunk:
add = '\nBtw he was the Drunk so yall will skip one turn'
for ww in self.werewolves:
ww.drunk = True
eat.role = Role.werewolf
eat.SendText("You were bitten by the Alpha Wolf, and thus, turned into a Werewolf!")
eat.InitRole()
s = "%s was bitten by the Alpha Wolf and turned into a fellow Werewolf.%s" % (eat.name, add)
for ww in self.werewolves:
ww.SendText(s)
continue
else:
eat.Die(Role.werewolf)
if eat.role == Role.drunk:
s = "Yall ate %s the Drunk so now you're all drunk and will skip one turn" % eat.name
for ww in self.werewolves:
ww.drunk = True
ww.SendText(s)
continue
def GetRole(self):
for player in list(self.alives):
player.GetRole()
def DoKill(self):
for player in list(self.alives):
player.DoKill()
def Lynch(self):
print("LYNCH")
lynches = self.votes[Role.villager.id].votees
if len(lynches) == 0:
self.SendText("Vote lah kampret")
return
print("LYNCH0 %s" % lynches)
lynches = [(v, k) for k, v in lynches.items() if v>0 and k.alive]
print("LYNCH1 %s" % lynches)
self.votes[Role.villager.id].Clear()
lynchesLen = len(lynches)
if lynchesLen == 0:
self.SendText("Somehow people yall voted for are dead")
return
elif lynchesLen > 1:
lynches.sort(reverse=True)
if lynches[0][0] == lynches[1][0]:
self.SendText("MICIN")
return
lynch = lynches[0][1]
if lynch.role == Role.prince:
if not lynch.princeRevealed:
self.SendText("Yall were gonna lynch %s but then he revealed that he's the Prince! Yall can rethink your decision" % lynch.name)
lynch.princeRevealed = True
return
elif lynch.role == Role.tanner:
self.SendText("YALL LYNCHED %s THE TANNER" % lynch.name.upper())
return self.Win(Team.tanner)
lynch.Die(Role.villager)
print("LYNCHDONE")
return
def Convert(self):
converts = self.votes[Role.cultist.id].votees
if len(converts) == 0:
return
converts = [(v, k) for k, v in converts.items() if v>0 and k.alive]
self.votes[Role.cultist.id].Clear()
if len(converts) == 0:
return
converts.sort(reverse=True)
converts = [x for x in converts if x[0] == converts[0][0]]
convert = choice(converts)[1]
cultistLen = len(self.cultists)
if convert.role == Role.cultistHunter:
if cultistLen > 0:
newestCultist = None
newestCultist = self.cultists[-1]
for cultist in self.cultists:
if cultist.cultistId > newestCultist.cultistId:
newestCultist = cultist
newestCultist.Die(convert.role)
self.SendText(newestCultist.name + " was killed by a Cultist Hunter because the cult unknowingly tried to convert the Cultist Hunter lol")
return
elif convert.role == Role.hunter and cultistLen > 0 and randint(0,3) < 1:
randomCultist = self.cultists.pop(randint(0, cultistLen-1))
randomCultist.Die(convert.role)
self.SendText("The cult tried to convert the Hunter. They failed and even got %s, one of their members, down." % randomCultist.name)
return
elif convert.role not in Role.unconvertible:
convert.role = Role.cultist
convert.SendText("You have been converted into a Cultist.")
convert.InitRole()
msg = convert.name + " is now a fellow Cultist."
for cultist in self.cultists:
cultist.SendText(msg)
return
def Status(self):
#alives = [x for x in self.players if x.alive]
#deads = [x for x in self.players if x.alive == False]
alives = self.alives
deads = self.deads
s = 'Day : %d\nPhase : %s\nPlayers:' % (self.day, RoomPhase.toString[self.phase].title())
for x in alives:
s = s + "\n%s, alive" % x.obj.name
for x in deads:
s = s + "\n%s, %s, dead" % (x.obj.name, x.role.name)
self.SendText(s)
return CommandResult.Done()
def Win(self, winningTeam):
#alives = [x for x in self.players if x.alive]
#deads = [x for x in self.players if x.alive == False]
alives = self.alives
deads = self.deads
if winningTeam:
aliveWinners = [x for x in alives if x.role.team == winningTeam]
aliveLosers = [x for x in alives if x.role.team != winningTeam]
deadWinners = [x for x in deads if x.role.team == winningTeam]
deadLosers = [x for x in deads if x.role.team != winningTeam]
s = 'Game over\nDay : %d\nPhase : %s\nWinners:' % (self.day, RoomPhase.toString[self.phase].title())
for x in aliveWinners:
s = s + "\n%s, %s, alive, won" % (x.obj.name, x.role.name)
for x in deadWinners:
s = s + "\n%s, %s, dead, won" % (x.obj.name, x.role.name)
s = s + "\nLosers:"
for x in aliveLosers:
s = s + "\n%s, %s, alive, lost" % (x.obj.name, x.role.name)
for x in deadLosers:
s = s + "\n%s, %s, dead, lost" % (x.obj.name, x.role.name)
self.SendText(s)
else:
s = 'Game over\nDay : %d\nPhase : %s\nLosers:' % (self.day, RoomPhase.toString[self.phase])
for x in alives:
s = s + "\n%s, %s, alive, lost" % (x.obj.name, x.role.name)
for x in deads:
s = s + "\n%s, %s, dead, lost" % (x.obj.name, x.role.name)
self.SendText(s)
for x in self.kickeds:
self.x.obj.InviteInto(self.obj)
self.phase = RoomPhase.done
self.Remove()
return True
def HunterDeathVote(self):
if len(self.deadHunters) == 0:
return
realPhase = self.phase
self._1phase = RoomPhase.hunter
with self.cond:
self.cond.notifyAll()
#something2
players = list(self.deadHunters)
self.deadHunters = []
self.room.ExtendWaitingCommand(players)
for player in players:
buts = Buttons("You can choose shoot someone by typing '/ww room=%d shoot=<id>' with the ids below." % self.id, "You can choose to shoot someone.")
candidates = [x for x in self.alives if x != player]
for x in candidates:
buts.AddButton(
x.name,
"/ww room=%d shoot=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
player.SendButtons(buts)
with self.cond:
self.SendText("Some hunters are gonna die! They have %g seconds to shoot as death comes closer" % self.hunterDeathDuration)
self.StartCountdown(time()+self.hunterDeathDuration, "Dying hunters have %s left", RoomPhase.hunter)
self.cond.wait(self.hunterDeathDuration)
self.DoKill()
self.phase = realPhase
return True
def Night(self):
with self.lock:
self.phase = RoomPhase.night
self.waitingCommands = []
with self.cond:
self.cond.notifyAll()
if self.hasCultist:
self.room.ExtendWaitingCommand(self.cultists)
candidates = [x for x in self.alives if x.role != Role.cultist]
self.votes[Role.cultist.id].Set(self.cultists, candidates)
buts = Buttons("You can vote to convert someone into a cultist by typing '/ww room=%d convert=<id>' with the ids below" % self.id, "You can vote to convert someone into a cultist")
for x in candidates:
buts.AddButton(
x.name,
"/ww room=%d convert=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
for x in self.cultists:
x.SendButtons(buts)
if self.hasWerewolf:
self.room.ExtendWaitingCommand(self.werewolves)
candidates = [x for x in self.alives if x.role not in Role.werewolves]
self.votes[Role.werewolf.id].Set(self.werewolves, candidates)
buts = Buttons("You can vote to eat someone by typing '/ww room=%d eat=<id>' with the ids below" % self.id, "You can vote to eat someone")
for x in candidates:
buts.AddButton(
x.name,
"/ww room=%d eat=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
for x in self.werewolves:
x.SendButtons(buts)
for player in self.alives:
player.getRole = None
player.done = False
if player.myTurn:
if player.role == Role.cultist:
pass
elif player.role == Role.guardianAngel:
self.room.AddWaitingCommand(player)
buts = Buttons("You can choose to protect someone's house from Werewolves by typing '/ww room=%d protect=<id>' with the ids below" % self.id, "You can choose to protect someone's house from Werewolves")
candidates = [x for x in self.alives if x != player]
for x in candidates:
buts.AddButton(
x.name,
"/ww room=%d protect=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
player.SendButtons(buts)
elif player.role == Role.harlot:
self.room.AddWaitingCommand(player)
buts = Buttons("You can choose to sleep in someone's house by typing '/ww room=%d sleep=<id>' with the ids below" % self.id, "You can choose to sleep in someone's house")
if self.allowRevote:
candidates = [x for x in self.alives]
else:
candidates = [x for x in self.alives if x != player]
for x in candidates:
buts.AddButton(
x.name,
"/ww room=%d sleep=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
player.SendButtons(buts)
elif player.role == Role.cultistHunter:
self.room.AddWaitingCommand(player)
buts = Buttons("You can choose hunt someone by typing '/ww room=%d shoot=<id>' with the ids below. If he's a cultist, he will die." % self.id, "You can choose to hunt someone. If he's a cultist, he will die.")
candidates = [x for x in self.alives if x != player]
for x in candidates:
buts.AddButton(
x.name,
"/ww room=%d shoot=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
player.SendButtons(buts)
elif player.role == Role.serialKiller:
self.room.AddWaitingCommand(player)
buts = Buttons("You can choose kill someone by typing '/ww room=%d kill=<id>' with the ids below" % self.id, "You can choose to kill someone")
candidates = [x for x in self.alives if x != player]
for x in candidates:
buts.AddButton(
"\n%s\t : %s" % (x.id, x.name),
x.name,
"/ww room=%d kill=%d" % (self.id, x.id)
)
player.SendButtons(buts)
elif player.role in Role.seers:
self.room.AddWaitingCommand(player)
buts = Buttons("You can choose see through someone's role by typing '/ww room=%d see=<id>' with the ids below" % self.id, "You can choose to see through someone's role")
candidates = [x for x in self.alives if x != player]
for x in candidates:
buts.AddButton(
x.name,
"/ww room=%d see=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
player.SendButtons(buts)
elif player.role in Role.werewolves:
pass
else:
self.SendText("MISSED NIGHT ROLE %s" % player.role.name)
self.Status()
with self.cond:
self.cond.notifyAll()
with self.cond:
self.SendText("Yall night players have %g seconds to do your stuff" % self.nightDuration)
self.StartCountdown(time()+self.nightDuration, "Night players have %s left", RoomPhase.night)
self.cond.wait(self.nightDuration)
if self.phase == RoomPhase.night:
with self.lock:
self.Eat()
self.DoKill()
if self.CheckWin():
return True
self.Convert()
self.GetRole()
self.DoKill()
if self.CheckWin():
return True
while len(self.deadHunters) > 0:
if self.HunterDeathVote() and self.CheckWin():
return True
return self.Day()
def Day(self):
with self.lock:
self.phase = RoomPhase.day
self.day+=1
self.waitingCommands = []
with self.cond:
self.cond.notifyAll()
for player in list(self.alives):
player.protection = 0
if self.day - player.dayLastSeen > 2:
player.Die(Role.none)
continue
if player.role == Role.harlot:
player.houseOwner = player
player.freeloader = None
elif player.myTurn:
if player.role == Role.mayor and not player.mayorRevealed:
self.room.AddWaitingCommand(player)
buts = Buttons("You can choose to reveal your role as a Mayor by typing '/ww room=%d action=reveal'" % self.id, "You can choose to reveal your role as a Mayor")
buts.AddButton(
"Reveal",
"/ww room=%d action=reveal" % self.id,
""
)
player.SendButtons(buts)
elif player.role == Role.blacksmith:
if player.ammo:
self.room.AddWaitingCommand(player)
player.done = False
buts = Buttons("You can choose to spread silver dust all over the village by typing '/ww room=%d action=silver'. You can do it %d times" % (self.id, player.ammo), "You can choose to spread silver dust all over the village. You can do it %d times" % player.ammo)
buts.AddButton(
"Reveal",
"/ww room=%d action=silver" % self.id,
""
)
player.SendButtons(buts)
elif player.role == Role.gunner:
if player.ammo:
self.room.AddWaitingCommand(player)
player.done = False
buts = Buttons("You can choose shoot someone by typing '/ww room=%d shoot=<id>' with the ids below. You have %d bullets" % (self.id, player.ammo), "You can choose to shoot someone. You have %d bullets" % player.ammo)
candidates = [x for x in self.alives if x != player]
for x in candidates:
buts.AddButton(
x.name,
"/ww room=%d shoot=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
player.SendButtons(buts)
elif player.role == Role.detective:
self.room.AddWaitingCommand(player)
buts = Buttons("You can choose investigate someone's role by typing '/ww room=%d see=<id>' with the ids below" % self.id, "You can choose to investigate someone's role")
candidates = [x for x in self.alives if x != player]
for x in candidates:
buts.AddButton(
x.name,
"/ww room=%d see=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
player.SendButtons(buts)
else:
print("MISSED DAY ROLE %s" % player.role.name)
self.Status()
with self.cond:
self.SendText("Yall day players have %g seconds to do your stuff" % self.dayDuration)
self.StartCountdown(time()+self.dayDuration, "Day players have %s left", RoomPhase.day)
self.cond.wait(self.dayDuration)
if self.phase == RoomPhase.day:
with self.lock:
self.DoKill()
if self.CheckWin():
return True
self.GetRole()
self.DoKill()
if self.CheckWin():
return True
while len(self.deadHunters) > 0:
if self.HunterDeathVote() and self.CheckWin():
return True
return self.LynchVote()
def LynchVote(self):
with self.lock:
self.phase = RoomPhase.lynchVote
self.waitingCommands = []
with self.cond:
self.cond.notifyAll()
self.votes[Role.villager.id].Set(self.alives, self.alives)
buts = Buttons("You can vote to lynch someone by typing '/ww room=%d lynch=<id>' with the ids below. Though you can't be dumb enough to vote for yourself, right?" % self.id, "You can vote to lynch someone. Though you can't be dumb enough to vote for yourself, right?")
for x in self.alives:
buts.AddButton(
x.name,
"/ww room=%d lynch=%d" % (self.id, x.id),
"\n%s\t : %s" % (x.id, x.name)
)
for x in self.alives:
x.SendButtons(buts)
self.Status()
with self.cond:
self.SendText("Yall have %g seconds to vote to lynch someone" % self.lynchVoteDuration)
self.StartCountdown(time()+self.lynchVoteDuration, "Yall have %s left", RoomPhase.lynchVote)
self.cond.wait(self.lynchVoteDuration)
if self.phase == RoomPhase.lynchVote:
with self.lock:
self.Lynch()
self.DoKill()
if self.CheckWin():
return True
self.GetRole()
self.DoKill()
if self.CheckWin():
return True
while len(self.deadHunters) > 0:
if self.HunterDeathVote() and self.CheckWin():
return True
return self.Night()
def ForceStart(self, starter):
with self.lock:
if starter not in self.playersByObj:
starter.SendText("%s, you haven't joined, thus, have no authority to force start the game" % starter.name)
return CommandResult.Failed()
return self.Start(False)
def Start(self, autostart=True):
with self.lock:
if self.phase == RoomPhase.idling:
self.SendText("No Werewolf game session")
return CommandResult.Failed()
elif self.phase != RoomPhase.waiting:
self.SendText("Werewolf game already started")
return CommandResult.Failed()
count = len(self.players)
if count < 5 and autostart:
self.SendText('Need at least 5 players to start')
return CommandResult.Failed()
if count < 1:
self.SendText("No players. I should've removed the room though. Removing it.")
self.phase = RoomPhase.idling
return CommandResult.Failed()
self.phase = RoomPhase.starting
with self.cond:
self.cond.notifyAll()
self.SendText('Starting werewolf game')
roles = list(Role.validRoles)
roles.extend([Role.mason, Role.mason])
roles.remove(Role.alphaWolf)
roles.remove(Role.wolfCub)
if self.noVillager:
roles.remove(Role.villager)
else:
roles.extend([Role.villager, Role.villager, Role.villager, Role.villager, Role.mason])
shuffle(roles)
self.alives = list(self.players)
self.deads = []
players = list(self.players)
shuffle(players)
wwCount = int(count//7)+1
hasSeer = False
hasCultist = False
specialWW = [Role.alphaWolf, Role.wolfCub]
specialWWLen = 2
wwTeamNonWWCount = int(count//6)
mason = 0
for player in players:
player.alive = Alive.alive
role = Role.villager
arg = 0
if wwCount > 0:
role = Role.werewolf
if specialWWLen > 2:
a = randint(0, 14)
if a < 2:
if specialWWLen == 1:
role = specialWW[0]
else:
role = specialWW.pop(randint (0, 1))
specialWWLen -= 1
wwCount -= 1
else:
rlen = len(roles)
if rlen == 0:
if self.noVillager:
role = Role.mason
else:
role = choice([Role.mason, Role.villager])
else:
role = roles[randint(0,rlen-1)]
if role.team == Team.werewolf:
if wwTeamNonWWCount>1:
wwTeamNonWWCount -= 1
else:
roles = [x for x in roles if x.team != Team.werewolf]
rlen = len(roles)
if rlen == 0:
if self.noVillager:
role = Role.mason
else:
role = choice([Role.mason, Role.villager])
while role.team == Team.werewolf:
role = choice(roles)
if mason==1:
role=Role.mason
if role==Role.mason:
mason+=1
if role==Role.beholder and not hasSeer:
role=Role.seer
if role==Role.seer:
hasSeer=True
if role==Role.cultistHunter and not hasCultist:
role=Role.cultist
if role==Role.cultist:
hasCultist=True
if rlen > 0:
roles.remove(role)
player.role = role
player.originalRole = role
self.hasCultist = self.hasCultist or hasCultist
self.day = 0
self.votes[Role.villager.id] = Vote(self)
self.votes[Role.werewolf.id] = Vote(self)
self.room.votes[Role.cultist.id] = Vote(self)
self.InitRoles()
self.obj.client.Thread(self.Night)
return CommandResult.Done()
def CheckWin(self):
ww = len(self.werewolves)
nonww = len(self.alives) - ww
if ww == 0:
wwteamnonww = len([x for x in self.alives if x.role.team == Team.werewolf and x.role not in Role.werewolves])
if nonww == 0:
if wwteamnonww == 0:
return self.Win(Team.none)
else:
return self.Win(Team.werewolf)
elif wwteamnonww > 0:
return False
else:
teams = list(set(x.role.team for x in self.alives))
if len(teams) == 1:
if teams[0] == Team.doppelganger or teams[0] == Team.tanner:
return self.Win(Team.none)
return self.Win(teams[0])
hasSK = False
for team in teams:
if team == Team.serialKiller:
hasSK = True
break
if hasSK:
if nonww < 3:
for alive in self.alive:
if alive.role != Role.serialKiller:
alive.SendText("You're left alone with the Serial Killer. You know what comes next, right?")
alive.Die(Role.serialKiller)
return self.Win(Team.serialKiller)
return False
if len(self.cultists) == nonww:
return self.Win(Team.cultist)
else:
return self.Win(Team.villager)
return True
if nonww - ww < 1:
if ww == 1:
nonwwteam = [x for x in self.alives if x.role.team != Team.werewolf]
if len(nonwwteam) == 0:
return self.Win(Team.werewolf)
role = nonwwteam[0].role
if role == Role.hunter:
for alive in list(self.alives):
alive.SendText("Only a Hunter and a Werewolf is left. Yall engaged in a deadly battle, which brought death to both of you.")
if alive.role == Role.hunter:
alive.Die(Role.werewolf)
else:
alive.Die(Role.hunter)
self.SendText("The werewolf attacks the Hunter! However, the Hunter doesn't let go of his gun and keep on shooting him. Both eventually dies.")
return self.Win(Team.none)
elif role == Role.serialKiller:
cur.execute("UPDATE WerewolfPlayers SET alive=FALSE WHERE roomId=%s AND role!=21 and alive RETURNING lineId", (roomId,))
deads = cur.fetchall()
conn.commit()
cur.execute("SELECT lineId FROM WerewolfPlayers WHERE roomId=%s AND alive", (roomId,))
alives = cur.fetchall()
for alive in list(self.alives):
if alive.role == Role.serialKiller:
alive.SendText("ONE STEP TOWARDS WORLD PEACE")
else:
alive.SendText("THE KILLER KILLER WILL RID THE WORLD OF MURDER")
alive.Die(Role.serialKiller)
self.SendText("THE KILLER KILLER WILL RID THE WORLD OF MURDER")
return self.Win(Team.serialKiller)
elif role == Role.gunner and nonwwteam[0].ammo and self.realPhase == RoomPhase.day:
cur.execute("UPDATE WerewolfPlayers SET alive=FALSE WHERE roomId=%s AND role!=7 and alive RETURNING lineId", (roomId,))
deads = cur.fetchall()
conn.commit()
for alive in list(self.alives):
if alive.role == Role.gunner:
alive.SendText("You are left alone with someone whom you are very sure to be a Werewolf. You are really lucky that it's daytime and you still have some bullets")
else:
alive.SendText("You were left alone with the Gunner. It was obvious to him that you were the Werewolf. Lucky for him that he still have some bullets and it was the day.")
self.SendText("The Gunner is our hero now.")
return self.Win(Team.villager)
if len([x for x in self.alives if x.role == Role.serialKiller]) > 0:
return False
for alive in list(self.alives):
if alive.role not in Role.werewolves:
alive.SendText("You're all out of time, and number.")
alive.Die(Role.werewolf)
return self.Win(Team.werewolf)
return False
class Vote(object):
def __init__(self, room):
self.canVote = []
self.haventVoted = []
self.candidates = []
self.voters = {}
self.votees = {}
self.room = room
self.lock = Lock()
@property
def allowRevote(self):
return self.room.allowRevote
def VoteRandom(self, voter):
return self.Vote(voter, choice(self.candidates))
def Vote(self, voter, votee, voteCount=1):
with self.lock:
if voter not in self.canVote:
voter.SendText("You have no authority to vote this one")
return CommandResult.Failed()
if votee not in self.candidates:
voter.SendText("You can't vote for %s" % votee.name)
return CommandResult.Failed()
elif voter in self.voters:
if self.voters[voter] == votee:
voter.SendText("It's the same guuuyyyyyyyyy")
return CommandResult.Failed()
elif self.allowRevote:
self.votees[self.voters[voter]] -= voteCount
else:
voter.SendText("You have already voted for %s" % self.voters[voter].name)
return CommandResult.Failed()
self.voters[voter] = votee
self.votees[votee] += voteCount
if voter in self.haventVoted:
self.haventVoted.remove(voter)
self.room.RemoveWaitingCommand(voter)
voter.SendText("You chose %s" % votee.name)
return CommandResult.Done()
@property
def everyoneVoted(self):
return len(self.haventVoted) == 0
def Clear(self):
with self.lock:
self.voters.clear()
self.votees.clear()
self.haventVoted = []
self.canVote = []
self.candidates = []
def Set(self, voters, candidates):
with self.lock:
self.Clear()
self.haventVoted = list(voters)
self.canVote = list(voters)
self.candidates = list(candidates)
for x in self.candidates:
self.votees[x] = 0
self.room.ExtendWaitingCommand(voters)
def Werewolf(message, options, continuous=CommandContinuousCallType.notContinuous, images=None, text='', room=0, action='', start=True, night=90, day=90, lynchvote=60, hunter=30, revote=False, villager=False, quick=True, autostart=300, eat=0, kill=0, convert=0, shoot=0, pair=0, see=0, master=0, protect=0, lynch=0, *args, **kwargs):
if continuous == CommandContinuousCallType.notContinuous:
if IsEmpty(action):
action = text
sender = message.sender
chatroom = message.chatroom
client = message.client
if not client.hasOA or not client.hasUser:
message.ReplyText("Sorry Werewolf needs both OAClient and UserClient")
return CommandResult.Failed()
elif not chatroom.hasUser:
message.ReplyText("Please invite the UserClient here first")
return CommandResult.Failed()
elif not chatroom.hasOA:
if client.oAClient.obj:
client.oAClient.obj.InviteInto(chatroom)
message.ReplyText("Please retry the command after the OAClient joined")
else:
message.ReplyText("Please invite the UserClient here first")
return CommandResult.Failed()
elif not sender or not sender.hasUser or (not sender.name and not sender.GetName()):
message.ReplyText("Sorry we can't identify you.")
return CommandResult.Failed()
elif not sender.rObj:
message.ReplyText("%s, please type '/robj' in a room consisting of only you, our UserClient, and our OAClient" % sender.name)
#message.ReplyText("%s, please accept the group invitation" % sender.name)
return CommandResult.Failed()
if action == 'create':
if chatroom in roomByObj:
room = roomByObj[chatroom]
if room.phase == RoomPhase.waiting:
room.SendText("WW already created. To forcestart, type '/ww forcestart'")
return CommandResult.Done()
elif room.phase > RoomPhase.waiting and room.phase < RoomPhase.done:
room.SendText("WW is running")
return CommandResult.Done()
room = Room(message.chatroom, message.sender, nightDuration=night, dayDuration=day, lynchVoteDuration=lynchvote, hunterDeathDuration=hunter, allowRevote=revote, noVillager=not villager, autostart=autostart, quick=quick, *args, **kwargs)
return CommandResult.Done()
elif room:
if room not in roomById:
message.ReplyText("Invalid room id")
return CommandResult.Failed()
else:
return roomById[room].HandleCommand(message=message, action=action, eat=eat, kill=kill, convert=convert, shoot=shoot, pair=pair, see=see, master=master, protect=protect, lynch=lynch, *args, **kwargs)
else:
if chatroom not in roomByObj:
message.ReplyText("No Werewolf game session or you need to provide 'room' argument")
return CommandResult.Failed()
room = roomByObj[chatroom]
if action == 'join':
room.AddPlayer(sender)
return CommandResult.Done()
elif action == 'leave':
room.Leave(sender)
return CommandResult.Done()
elif action == 'forcestart':
return room.ForceStart(sender)
elif action == 'status':
return room.Status()
else:
message.ReplyText("Invalid command or you need to provide 'room' argument")
return CommandResult.Failed()
else:
return CommandResult.Failed()
werewolfCmd = ContinuousHybridCommand(
'ww',
Werewolf,
desc='Awoo',
images=['the image']
)
|
[
"line2.models.command.ContinuousHybridCommand",
"random.randint",
"line2.models.command.CommandResult.Done",
"random.shuffle",
"threading.Condition",
"line2.utils.AddAtExit",
"random.choice",
"time.time",
"line2.utils.DelAtExit",
"line2.utils.IsEmpty",
"line2.models.messages.Buttons",
"line2.models.command.CommandResult.Failed",
"line2.utils.Lock",
"line2.utils.Acquire"
] |
[((49243, 49249), 'line2.utils.Lock', 'Lock', ([], {}), '()\n', (49247, 49249), False, 'from line2.utils import IsEmpty, AddReverseDict, Lock, AddAtExit, DelAtExit, Acquire\n'), ((101943, 102017), 'line2.models.command.ContinuousHybridCommand', 'ContinuousHybridCommand', (['"""ww"""', 'Werewolf'], {'desc': '"""Awoo"""', 'images': "['the image']"}), "('ww', Werewolf, desc='Awoo', images=['the image'])\n", (101966, 102017), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((26458, 26480), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (26478, 26480), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((28609, 28629), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (28627, 28629), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((31372, 31392), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (31390, 31392), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((34194, 34214), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (34212, 34214), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((35294, 35314), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (35312, 35314), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((36554, 36574), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (36572, 36574), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((37415, 37435), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (37433, 37435), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((39071, 39091), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (39089, 39091), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((40408, 40428), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (40426, 40428), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((41849, 41869), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (41867, 41869), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((43188, 43208), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (43206, 43208), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((44485, 44505), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (44503, 44505), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((45800, 45820), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (45818, 45820), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((49928, 49934), 'line2.utils.Lock', 'Lock', ([], {}), '()\n', (49932, 49934), False, 'from line2.utils import IsEmpty, AddReverseDict, Lock, AddAtExit, DelAtExit, Acquire\n'), ((50801, 50812), 'threading.Condition', 'Condition', ([], {}), '()\n', (50810, 50812), False, 'from threading import Timer, Condition\n'), ((51237, 51266), 'line2.utils.AddAtExit', 'AddAtExit', (['self', 'self.__del__'], {}), '(self, self.__del__)\n', (51246, 51266), False, 'from line2.utils import IsEmpty, AddReverseDict, Lock, AddAtExit, DelAtExit, Acquire\n'), ((51282, 51339), 'line2.models.messages.Buttons', 'Buttons', (['"""Werewolf game created"""', '"""Werewolf game created"""'], {}), "('Werewolf game created', 'Werewolf game created')\n", (51289, 51339), False, 'from line2.models.messages import Buttons\n'), ((70107, 70127), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (70125, 70127), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((96104, 96110), 'line2.utils.Lock', 'Lock', ([], {}), '()\n', (96108, 96110), False, 'from line2.utils import IsEmpty, AddReverseDict, Lock, AddAtExit, DelAtExit, Acquire\n'), ((98593, 98608), 'line2.utils.IsEmpty', 'IsEmpty', (['action'], {}), '(action)\n', (98600, 98608), False, 'from line2.utils import IsEmpty, AddReverseDict, Lock, AddAtExit, DelAtExit, Acquire\n'), ((101891, 101913), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (101911, 101913), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((11306, 11312), 'line2.utils.Lock', 'Lock', ([], {}), '()\n', (11310, 11312), False, 'from line2.utils import IsEmpty, AddReverseDict, Lock, AddAtExit, DelAtExit, Acquire\n'), ((12577, 12611), 'line2.utils.Acquire', 'Acquire', (['self.lock', 'self.room.lock'], {}), '(self.lock, self.room.lock)\n', (12584, 12611), False, 'from line2.utils import IsEmpty, AddReverseDict, Lock, AddAtExit, DelAtExit, Acquire\n'), ((13021, 13041), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (13039, 13041), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((23705, 23851), 'line2.models.messages.Buttons', 'Buttons', (['("Choose who to pair up as lovers by typing \'/ww room=%d pair=<id>\' using the ids below"\n % self.room.id)', '"""Choose who to pair up"""'], {}), '(\n "Choose who to pair up as lovers by typing \'/ww room=%d pair=<id>\' using the ids below"\n % self.room.id, \'Choose who to pair up\')\n', (23712, 23851), False, 'from line2.models.messages import Buttons\n'), ((26603, 26625), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (26623, 26625), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((26748, 26770), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (26768, 26770), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((26879, 26901), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (26899, 26901), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((27117, 27139), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (27137, 27139), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((27295, 27317), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (27315, 27317), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((27436, 27458), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (27456, 27458), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((27984, 28006), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (28004, 28006), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((28094, 28116), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (28114, 28116), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((28237, 28259), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (28257, 28259), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((28355, 28377), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (28375, 28377), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((28776, 28798), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (28796, 28798), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((28886, 28908), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (28906, 28908), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((29004, 29026), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (29024, 29026), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((29119, 29141), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (29139, 29141), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((29248, 29270), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (29268, 29270), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((29478, 29500), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (29498, 29500), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((29648, 29670), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (29668, 29670), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((29789, 29811), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (29809, 29811), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((29926, 29948), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (29946, 29948), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((30294, 30316), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (30314, 30316), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((30404, 30426), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (30424, 30426), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((30522, 30544), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (30542, 30544), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((30652, 30674), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (30672, 30674), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((30813, 30835), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (30833, 30835), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((30987, 31009), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (31007, 31009), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((31126, 31148), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (31146, 31148), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((31255, 31277), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (31275, 31277), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((31552, 31574), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (31572, 31574), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((31662, 31684), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (31682, 31684), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((31780, 31802), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (31800, 31802), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((31913, 31935), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (31933, 31935), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((32153, 32175), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (32173, 32175), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((32339, 32361), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (32359, 32361), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((32484, 32506), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (32504, 32506), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((32629, 32651), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (32649, 32651), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((32998, 33020), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (33018, 33020), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((33108, 33130), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (33128, 33130), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((33226, 33248), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (33246, 33248), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((33345, 33367), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (33365, 33367), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((33466, 33488), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (33486, 33488), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((33597, 33619), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (33617, 33619), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((33775, 33797), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (33795, 33797), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((33916, 33938), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (33936, 33938), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((34365, 34387), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (34385, 34387), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((34476, 34498), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (34496, 34498), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((34594, 34616), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (34614, 34616), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((34725, 34747), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (34745, 34747), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((34936, 34958), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (34956, 34958), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((35077, 35099), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (35097, 35099), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((35176, 35198), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (35196, 35198), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((35476, 35498), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (35496, 35498), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((35586, 35608), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (35606, 35608), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((35704, 35726), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (35724, 35726), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((35856, 35878), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (35876, 35878), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((35987, 36009), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (36007, 36009), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((36165, 36187), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (36185, 36187), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((36306, 36328), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (36326, 36328), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((36436, 36458), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (36456, 36458), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((36723, 36745), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (36743, 36745), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((36833, 36855), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (36853, 36855), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((36951, 36973), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (36971, 36973), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((37074, 37096), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (37094, 37096), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((37583, 37605), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (37603, 37605), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((37693, 37715), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (37713, 37715), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((37811, 37833), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (37831, 37833), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((37942, 37964), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (37962, 37964), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((38479, 38501), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (38499, 38501), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((39262, 39284), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (39282, 39284), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((39372, 39394), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (39392, 39394), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((39490, 39512), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (39510, 39512), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((39667, 39689), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (39687, 39689), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((39798, 39820), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (39818, 39820), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((39976, 39998), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (39996, 39998), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((40116, 40138), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (40136, 40138), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((40252, 40274), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (40272, 40274), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((40586, 40608), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (40606, 40608), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((40696, 40718), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (40716, 40718), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((40814, 40836), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (40834, 40836), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((40999, 41021), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (41019, 41021), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((41130, 41152), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (41150, 41152), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((41310, 41332), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (41330, 41332), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((41442, 41464), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (41462, 41464), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((41578, 41600), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (41598, 41600), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((42077, 42099), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (42097, 42099), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((42187, 42209), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (42207, 42209), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((42305, 42327), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (42325, 42327), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((42477, 42499), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (42497, 42499), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((42609, 42631), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (42629, 42631), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((42791, 42813), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (42811, 42813), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((42934, 42956), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (42954, 42956), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((43067, 43089), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (43087, 43089), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((43373, 43395), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (43393, 43395), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((43483, 43505), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (43503, 43505), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((43601, 43623), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (43621, 43623), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((43781, 43803), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (43801, 43803), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((43910, 43932), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (43930, 43932), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((44090, 44112), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (44110, 44112), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((44235, 44257), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (44255, 44257), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((44366, 44388), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (44386, 44388), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((44664, 44686), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (44684, 44686), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((44774, 44796), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (44794, 44796), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((44892, 44914), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (44912, 44914), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((45072, 45094), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (45092, 45094), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((45205, 45227), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (45225, 45227), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((45393, 45415), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (45413, 45415), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((45542, 45564), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (45562, 45564), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((45677, 45699), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (45697, 45699), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((53240, 53262), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (53260, 53262), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((53785, 53791), 'time.time', 'time', ([], {}), '()\n', (53789, 53791), False, 'from time import time, sleep\n'), ((55423, 55445), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (55443, 55445), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((68262, 68278), 'random.choice', 'choice', (['converts'], {}), '(converts)\n', (68268, 68278), False, 'from random import randint, choice, shuffle\n'), ((72257, 72407), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose shoot someone by typing \'/ww room=%d shoot=<id>\' with the ids below."\n % self.id)', '"""You can choose to shoot someone."""'], {}), '(\n "You can choose shoot someone by typing \'/ww room=%d shoot=<id>\' with the ids below."\n % self.id, \'You can choose to shoot someone.\')\n', (72264, 72407), False, 'from line2.models.messages import Buttons\n'), ((84806, 85087), 'line2.models.messages.Buttons', 'Buttons', (['("You can vote to lynch someone by typing \'/ww room=%d lynch=<id>\' with the ids below. Though you can\'t be dumb enough to vote for yourself, right?"\n % self.id)', '"""You can vote to lynch someone. Though you can\'t be dumb enough to vote for yourself, right?"""'], {}), '(\n "You can vote to lynch someone by typing \'/ww room=%d lynch=<id>\' with the ids below. Though you can\'t be dumb enough to vote for yourself, right?"\n % self.id,\n "You can vote to lynch someone. Though you can\'t be dumb enough to vote for yourself, right?"\n )\n', (84813, 85087), False, 'from line2.models.messages import Buttons\n'), ((87836, 87850), 'random.shuffle', 'shuffle', (['roles'], {}), '(roles)\n', (87843, 87850), False, 'from random import randint, choice, shuffle\n'), ((87977, 87993), 'random.shuffle', 'shuffle', (['players'], {}), '(players)\n', (87984, 87993), False, 'from random import randint, choice, shuffle\n'), ((90981, 91001), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (90999, 91001), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((96268, 96291), 'random.choice', 'choice', (['self.candidates'], {}), '(self.candidates)\n', (96274, 96291), False, 'from random import randint, choice, shuffle\n'), ((97468, 97488), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (97486, 97488), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((98889, 98911), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (98909, 98911), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((100665, 100685), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (100683, 100685), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((24278, 24423), 'line2.models.messages.Buttons', 'Buttons', (['("Choose your role model by typing \'/ww room=%d master=<id>\' using the ids below"\n % self.room.id)', '"""\nChoose your role model"""'], {}), '(\n "Choose your role model by typing \'/ww room=%d master=<id>\' using the ids below"\n % self.room.id, """\nChoose your role model""")\n', (24285, 24423), False, 'from line2.models.messages import Buttons\n'), ((38273, 38295), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (38293, 38295), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((51970, 51985), 'line2.utils.DelAtExit', 'DelAtExit', (['self'], {}), '(self)\n', (51979, 51985), False, 'from line2.utils import IsEmpty, AddReverseDict, Lock, AddAtExit, DelAtExit, Acquire\n'), ((55573, 55595), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (55593, 55595), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((62338, 62351), 'random.choice', 'choice', (['eats2'], {}), '(eats2)\n', (62344, 62351), False, 'from random import randint, choice, shuffle\n'), ((73603, 73786), 'line2.models.messages.Buttons', 'Buttons', (['("You can vote to convert someone into a cultist by typing \'/ww room=%d convert=<id>\' with the ids below"\n % self.id)', '"""You can vote to convert someone into a cultist"""'], {}), '(\n "You can vote to convert someone into a cultist by typing \'/ww room=%d convert=<id>\' with the ids below"\n % self.id, \'You can vote to convert someone into a cultist\')\n', (73610, 73786), False, 'from line2.models.messages import Buttons\n'), ((74392, 74533), 'line2.models.messages.Buttons', 'Buttons', (['("You can vote to eat someone by typing \'/ww room=%d eat=<id>\' with the ids below"\n % self.id)', '"""You can vote to eat someone"""'], {}), '(\n "You can vote to eat someone by typing \'/ww room=%d eat=<id>\' with the ids below"\n % self.id, \'You can vote to eat someone\')\n', (74399, 74533), False, 'from line2.models.messages import Buttons\n'), ((86447, 86469), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (86467, 86469), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((86705, 86727), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (86725, 86727), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((87055, 87077), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (87075, 87077), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((87268, 87290), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (87288, 87290), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((96511, 96533), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (96531, 96533), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((96671, 96693), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (96691, 96693), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((99039, 99061), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (99059, 99061), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((53319, 53325), 'time.time', 'time', ([], {}), '()\n', (53323, 53325), False, 'from time import time, sleep\n'), ((54926, 54932), 'time.time', 'time', ([], {}), '()\n', (54930, 54932), False, 'from time import time, sleep\n'), ((55924, 55946), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (55944, 55946), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((60816, 60829), 'random.randint', 'randint', (['(0)', '(4)'], {}), '(0, 4)\n', (60823, 60829), False, 'from random import randint, choice, shuffle\n'), ((68940, 68953), 'random.randint', 'randint', (['(0)', '(3)'], {}), '(0, 3)\n', (68947, 68953), False, 'from random import randint, choice, shuffle\n'), ((69004, 69030), 'random.randint', 'randint', (['(0)', '(cultistLen - 1)'], {}), '(0, cultistLen - 1)\n', (69011, 69030), False, 'from random import randint, choice, shuffle\n'), ((72925, 72931), 'time.time', 'time', ([], {}), '()\n', (72929, 72931), False, 'from time import time, sleep\n'), ((79387, 79393), 'time.time', 'time', ([], {}), '()\n', (79391, 79393), False, 'from time import time, sleep\n'), ((83904, 83910), 'time.time', 'time', ([], {}), '()\n', (83908, 83910), False, 'from time import time, sleep\n'), ((85546, 85552), 'time.time', 'time', ([], {}), '()\n', (85550, 85552), False, 'from time import time, sleep\n'), ((86864, 86886), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (86884, 86886), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((99390, 99412), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (99410, 99412), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((100192, 100212), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (100210, 100212), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((100818, 100840), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (100838, 100840), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((101255, 101277), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (101275, 101277), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((101412, 101432), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (101430, 101432), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((46262, 46285), 'random.choice', 'choice', (['Role.validRoles'], {}), '(Role.validRoles)\n', (46268, 46285), False, 'from random import randint, choice, shuffle\n'), ((56121, 56143), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (56141, 56143), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((88523, 88537), 'random.randint', 'randint', (['(0)', '(14)'], {}), '(0, 14)\n', (88530, 88537), False, 'from random import randint, choice, shuffle\n'), ((96874, 96896), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (96894, 96896), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((99587, 99609), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (99607, 99609), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((100376, 100396), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (100394, 100396), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((101527, 101547), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (101545, 101547), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((56279, 56301), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (56299, 56301), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((75231, 75436), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose to protect someone\'s house from Werewolves by typing \'/ww room=%d protect=<id>\' with the ids below"\n % self.id)', '"""You can choose to protect someone\'s house from Werewolves"""'], {}), '(\n "You can choose to protect someone\'s house from Werewolves by typing \'/ww room=%d protect=<id>\' with the ids below"\n % self.id, "You can choose to protect someone\'s house from Werewolves")\n', (75238, 75436), False, 'from line2.models.messages import Buttons\n'), ((80813, 80976), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose to reveal your role as a Mayor by typing \'/ww room=%d action=reveal\'"\n % self.id)', '"""You can choose to reveal your role as a Mayor"""'], {}), '(\n "You can choose to reveal your role as a Mayor by typing \'/ww room=%d action=reveal\'"\n % self.id, \'You can choose to reveal your role as a Mayor\')\n', (80820, 80976), False, 'from line2.models.messages import Buttons\n'), ((89105, 89140), 'random.choice', 'choice', (['[Role.mason, Role.villager]'], {}), '([Role.mason, Role.villager])\n', (89111, 89140), False, 'from random import randint, choice, shuffle\n'), ((89204, 89224), 'random.randint', 'randint', (['(0)', '(rlen - 1)'], {}), '(0, rlen - 1)\n', (89211, 89224), False, 'from random import randint, choice, shuffle\n'), ((97144, 97166), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (97164, 97166), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((99884, 99906), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (99904, 99906), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((47203, 47216), 'random.randint', 'randint', (['(0)', '(9)'], {}), '(0, 9)\n', (47210, 47216), False, 'from random import randint, choice, shuffle\n'), ((56387, 56407), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (56405, 56407), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((63804, 63817), 'random.randint', 'randint', (['(0)', '(9)'], {}), '(0, 9)\n', (63811, 63817), False, 'from random import randint, choice, shuffle\n'), ((75995, 76168), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose to sleep in someone\'s house by typing \'/ww room=%d sleep=<id>\' with the ids below"\n % self.id)', '"""You can choose to sleep in someone\'s house"""'], {}), '(\n "You can choose to sleep in someone\'s house by typing \'/ww room=%d sleep=<id>\' with the ids below"\n % self.id, "You can choose to sleep in someone\'s house")\n', (76002, 76168), False, 'from line2.models.messages import Buttons\n'), ((101843, 101865), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (101863, 101865), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((25837, 25859), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (25857, 25859), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((56534, 56556), 'line2.models.command.CommandResult.Failed', 'CommandResult.Failed', ([], {}), '()\n', (56554, 56556), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((63888, 63909), 'random.randint', 'randint', (['(0)', '(wwLen - 1)'], {}), '(0, wwLen - 1)\n', (63895, 63909), False, 'from random import randint, choice, shuffle\n'), ((76877, 77093), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose hunt someone by typing \'/ww room=%d shoot=<id>\' with the ids below. If he\'s a cultist, he will die."\n % self.id)', '"""You can choose to hunt someone. If he\'s a cultist, he will die."""'], {}), '(\n "You can choose hunt someone by typing \'/ww room=%d shoot=<id>\' with the ids below. If he\'s a cultist, he will die."\n % self.id,\n "You can choose to hunt someone. If he\'s a cultist, he will die.")\n', (76884, 77093), False, 'from line2.models.messages import Buttons\n'), ((81462, 81736), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose to spread silver dust all over the village by typing \'/ww room=%d action=silver\'. You can do it %d times"\n % (self.id, player.ammo))', "('You can choose to spread silver dust all over the village. You can do it %d times'\n % player.ammo)"], {}), '(\n "You can choose to spread silver dust all over the village by typing \'/ww room=%d action=silver\'. You can do it %d times"\n % (self.id, player.ammo), \n \'You can choose to spread silver dust all over the village. You can do it %d times\'\n % player.ammo)\n', (81469, 81736), False, 'from line2.models.messages import Buttons\n'), ((88761, 88774), 'random.randint', 'randint', (['(0)', '(1)'], {}), '(0, 1)\n', (88768, 88774), False, 'from random import randint, choice, shuffle\n'), ((89947, 89960), 'random.choice', 'choice', (['roles'], {}), '(roles)\n', (89953, 89960), False, 'from random import randint, choice, shuffle\n'), ((65242, 65255), 'random.randint', 'randint', (['(0)', '(4)'], {}), '(0, 4)\n', (65249, 65255), False, 'from random import randint, choice, shuffle\n'), ((77652, 77797), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose kill someone by typing \'/ww room=%d kill=<id>\' with the ids below"\n % self.id)', '"""You can choose to kill someone"""'], {}), '(\n "You can choose kill someone by typing \'/ww room=%d kill=<id>\' with the ids below"\n % self.id, \'You can choose to kill someone\')\n', (77659, 77797), False, 'from line2.models.messages import Buttons\n'), ((82232, 82456), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose shoot someone by typing \'/ww room=%d shoot=<id>\' with the ids below. You have %d bullets"\n % (self.id, player.ammo))', "('You can choose to shoot someone. You have %d bullets' % player.ammo)"], {}), '(\n "You can choose shoot someone by typing \'/ww room=%d shoot=<id>\' with the ids below. You have %d bullets"\n % (self.id, player.ammo), \n \'You can choose to shoot someone. You have %d bullets\' % player.ammo)\n', (82239, 82456), False, 'from line2.models.messages import Buttons\n'), ((83043, 83215), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose investigate someone\'s role by typing \'/ww room=%d see=<id>\' with the ids below"\n % self.id)', '"""You can choose to investigate someone\'s role"""'], {}), '(\n "You can choose investigate someone\'s role by typing \'/ww room=%d see=<id>\' with the ids below"\n % self.id, "You can choose to investigate someone\'s role")\n', (83050, 83215), False, 'from line2.models.messages import Buttons\n'), ((89802, 89837), 'random.choice', 'choice', (['[Role.mason, Role.villager]'], {}), '([Role.mason, Role.villager])\n', (89808, 89837), False, 'from random import randint, choice, shuffle\n'), ((56688, 56708), 'line2.models.command.CommandResult.Done', 'CommandResult.Done', ([], {}), '()\n', (56706, 56708), False, 'from line2.models.command import ContinuousHybridCommand, Parameter, ParameterType, CommandResult, CommandResultType, CommandContinuousCallType\n'), ((78352, 78524), 'line2.models.messages.Buttons', 'Buttons', (['("You can choose see through someone\'s role by typing \'/ww room=%d see=<id>\' with the ids below"\n % self.id)', '"""You can choose to see through someone\'s role"""'], {}), '(\n "You can choose see through someone\'s role by typing \'/ww room=%d see=<id>\' with the ids below"\n % self.id, "You can choose to see through someone\'s role")\n', (78359, 78524), False, 'from line2.models.messages import Buttons\n')]
|
# TODO 增加右键菜单和拖拽启动打包
import os
import glob
import time
import zipfile
def mark(target):
tt = time.strftime('.%Y%m%d_%H%M%S')
base, ext = os.path.splitext(target)
os.rename(target, base + tt + ext)
def compress(paths, except_key=()):
save_name = os.path.splitext(paths[0])[0] + time.strftime('.%Y%m%d_%H%M%S.zip')
zip = zipfile.ZipFile(save_name, 'w', zipfile.ZIP_DEFLATED)
for path in paths:
if os.path.isfile(path):
zip.write(path, path)
else:
for file in glob.iglob('%s/**' % path, recursive=True):
if all(key not in file for key in except_key):
zip.write(file, file)
zip.close()
# mark('idlealib.zip')
lst = [file for file in os.listdir() if os.path.isfile(file) and not file.startswith('test') and not file.endswith('.zip')]
print(lst)
compress(('idlealib', *lst),
except_key=('__pycache__',))
|
[
"zipfile.ZipFile",
"os.rename",
"time.strftime",
"os.path.isfile",
"os.path.splitext",
"glob.iglob",
"os.listdir"
] |
[((109, 140), 'time.strftime', 'time.strftime', (['""".%Y%m%d_%H%M%S"""'], {}), "('.%Y%m%d_%H%M%S')\n", (122, 140), False, 'import time\n'), ((158, 182), 'os.path.splitext', 'os.path.splitext', (['target'], {}), '(target)\n', (174, 182), False, 'import os\n'), ((188, 222), 'os.rename', 'os.rename', (['target', '(base + tt + ext)'], {}), '(target, base + tt + ext)\n', (197, 222), False, 'import os\n'), ((360, 413), 'zipfile.ZipFile', 'zipfile.ZipFile', (['save_name', '"""w"""', 'zipfile.ZIP_DEFLATED'], {}), "(save_name, 'w', zipfile.ZIP_DEFLATED)\n", (375, 413), False, 'import zipfile\n'), ((313, 348), 'time.strftime', 'time.strftime', (['""".%Y%m%d_%H%M%S.zip"""'], {}), "('.%Y%m%d_%H%M%S.zip')\n", (326, 348), False, 'import time\n'), ((450, 470), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (464, 470), False, 'import os\n'), ((770, 782), 'os.listdir', 'os.listdir', ([], {}), '()\n', (780, 782), False, 'import os\n'), ((281, 307), 'os.path.splitext', 'os.path.splitext', (['paths[0]'], {}), '(paths[0])\n', (297, 307), False, 'import os\n'), ((547, 589), 'glob.iglob', 'glob.iglob', (["('%s/**' % path)"], {'recursive': '(True)'}), "('%s/**' % path, recursive=True)\n", (557, 589), False, 'import glob\n'), ((786, 806), 'os.path.isfile', 'os.path.isfile', (['file'], {}), '(file)\n', (800, 806), False, 'import os\n')]
|
from tqdm import tqdm
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import time
import argparse
import math
from lib import utils
from lib.utils import log_string
from model.DSTGNN import DSTGNN
parser = argparse.ArgumentParser()
parser.add_argument('--P', type = int, default = 12,
help = 'history steps')
parser.add_argument('--Q', type = int, default = 12,
help = 'prediction steps')
parser.add_argument('--L', type = int, default = 5,
help = 'number of STAtt Blocks')
parser.add_argument('--K', type = int, default = 8,
help = 'number of attention heads')
parser.add_argument('--d', type = int, default = 8,
help = 'dims of each head attention outputs')
parser.add_argument('--train_ratio', type = float, default = 0.7,
help = 'training set [default : 0.7]')
parser.add_argument('--val_ratio', type = float, default = 0.1,
help = 'validation set [default : 0.1]')
parser.add_argument('--test_ratio', type = float, default = 0.2,
help = 'testing set [default : 0.2]')
parser.add_argument('--batch_size', type = int, default = 16,
help = 'batch size')
parser.add_argument('--max_epoch', type = int, default = 15,
help = 'epoch to run')
# parser.add_argument('--patience', type = int, default = 10,
# help = 'patience for early stop')
parser.add_argument('--learning_rate', type=float, default = 0.001,
help = 'initial learning rate')
# parser.add_argument('--decay_epoch', type=int, default = 5,
# help = 'decay epoch')
parser.add_argument('--traffic_file', default = 'data/METR-LA/metr-la.h5',
help = 'traffic file')
parser.add_argument('--SE_file', default = 'data/METR-LA/SE(METR).txt',
help = 'spatial emebdding file')
parser.add_argument('--model_file', default = 'data/METR-LA/METR',
help = 'save the model to disk')
parser.add_argument('--log_file', default = 'data/METR-LA/log(METR)',
help = 'log file')
args = parser.parse_args()
log = open(args.log_file, 'w')
device = torch.device("cuda:6" if torch.cuda.is_available() else "cpu")
log_string(log, "loading data....")
trainX, trainTE, trainY, valX, valTE, valY, testX, testTE, testY, SE, mean, std = utils.loadData(args)
# adj = np.load('./data/metr_adj.npy')
log_string(log, "loading end....")
def res(model, valX, valTE, valY, mean, std):
model.eval() # 评估模式, 这会关闭dropout
# it = test_iter.get_iterator()
num_val = valX.shape[0]
pred = []
label = []
num_batch = math.ceil(num_val / args.batch_size)
with torch.no_grad():
for batch_idx in range(num_batch):
if isinstance(model, torch.nn.Module):
start_idx = batch_idx * args.batch_size
end_idx = min(num_val, (batch_idx + 1) * args.batch_size)
X = torch.from_numpy(valX[start_idx : end_idx]).float().to(device)
y = valY[start_idx : end_idx]
te = torch.from_numpy(valTE[start_idx : end_idx]).to(device)
y_hat = model(X, te)
pred.append(y_hat.cpu().numpy()*std+mean)
label.append(y)
del X, te, y_hat
pred = np.concatenate(pred, axis = 0)
label = np.concatenate(label, axis = 0)
# print(pred.shape, label.shape)
for i in range(12):
mae, rmse, mape = metric(pred[:,i,:], label[:,i,:])
# if i == 11:
log_string(log,'step %d, mae: %.4f, rmse: %.4f, mape: %.4f' % (i+1, mae, rmse, mape))
# print('step %d, mae: %.4f, rmse: %.4f, mape: %.4f' % (i+1, mae, rmse, mape))
mae, rmse , mape = metric(pred, label)
log_string(log, 'average, mae: %.4f, rmse: %.4f, mape: %.4f' % (mae, rmse, mape))
# print('average, mae: %.4f, rmse: %.4f, mape: %.4f' % (mae, rmse, mape))
return mae
def test(model, valX, valTE, valY, mean, std):
model = torch.load(args.model_file)
mae = res(model, valX, valTE, valY, mean, std)
# print(mae)
# print('test loss %.4f, last val loss %.4f' % (test_loss, test_loss_l))
def _compute_loss(y_true, y_predicted):
# y_true = scaler.inverse_transform(y_true)
# y_predicted = scaler.inverse_transform(y_predicted)
return masked_mae(y_predicted, y_true, 0.0)
def masked_mae(preds, labels, null_val=np.nan):
if np.isnan(null_val):
mask = ~torch.isnan(labels)
else:
mask = (labels!=null_val)
mask = mask.float()
mask /= torch.mean((mask))
mask = torch.where(torch.isnan(mask), torch.zeros_like(mask), mask)
loss = torch.abs(preds-labels)
loss = loss * mask
loss = torch.where(torch.isnan(loss), torch.zeros_like(loss), loss)
return torch.mean(loss)
def masked_mae_loss(y_pred, y_true, flag):
mask = (y_true != 0).float()
mask /= mask.mean()
loss = torch.abs(y_pred - y_true)
loss = loss * mask
# trick for nans: https://discuss.pytorch.org/t/how-to-set-nan-in-tensor-to-0/3918/3
loss[loss != loss] = 0
if flag == True:
loss = loss * mask_l
return loss.mean()
def metric(pred, label):
with np.errstate(divide = 'ignore', invalid = 'ignore'):
mask = np.not_equal(label, 0)
mask = mask.astype(np.float32)
mask /= np.mean(mask)
mae = np.abs(np.subtract(pred, label)).astype(np.float32)
rmse = np.square(mae)
mape = np.divide(mae, label)
mae = np.nan_to_num(mae * mask)
mae = np.mean(mae)
rmse = np.nan_to_num(rmse * mask)
rmse = np.sqrt(np.mean(rmse))
mape = np.nan_to_num(mape * mask)
mape = np.mean(mape)
return mae, rmse, mape
if __name__ == '__main__':
log_string(log, "model constructed begin....")
model = DSTGNN(SE, 1, args.K*args.d, args.K, args.d, args.L).to(device)
log_string(log, "model constructed end....")
log_string(log, "test begin....")
test(model, testX, testTE, testY, mean, std)
log_string(log, "test end....")
|
[
"argparse.ArgumentParser",
"numpy.nan_to_num",
"lib.utils.loadData",
"numpy.isnan",
"numpy.mean",
"torch.no_grad",
"model.DSTGNN.DSTGNN",
"torch.isnan",
"torch.load",
"torch.mean",
"numpy.divide",
"torch.zeros_like",
"math.ceil",
"numpy.square",
"numpy.not_equal",
"torch.cuda.is_available",
"numpy.concatenate",
"torch.from_numpy",
"numpy.subtract",
"numpy.errstate",
"lib.utils.log_string",
"torch.abs"
] |
[((246, 271), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (269, 271), False, 'import argparse\n'), ((2329, 2364), 'lib.utils.log_string', 'log_string', (['log', '"""loading data...."""'], {}), "(log, 'loading data....')\n", (2339, 2364), False, 'from lib.utils import log_string\n'), ((2448, 2468), 'lib.utils.loadData', 'utils.loadData', (['args'], {}), '(args)\n', (2462, 2468), False, 'from lib import utils\n'), ((2512, 2546), 'lib.utils.log_string', 'log_string', (['log', '"""loading end...."""'], {}), "(log, 'loading end....')\n", (2522, 2546), False, 'from lib.utils import log_string\n'), ((2740, 2776), 'math.ceil', 'math.ceil', (['(num_val / args.batch_size)'], {}), '(num_val / args.batch_size)\n', (2749, 2776), False, 'import math\n'), ((3412, 3440), 'numpy.concatenate', 'np.concatenate', (['pred'], {'axis': '(0)'}), '(pred, axis=0)\n', (3426, 3440), True, 'import numpy as np\n'), ((3455, 3484), 'numpy.concatenate', 'np.concatenate', (['label'], {'axis': '(0)'}), '(label, axis=0)\n', (3469, 3484), True, 'import numpy as np\n'), ((3869, 3954), 'lib.utils.log_string', 'log_string', (['log', "('average, mae: %.4f, rmse: %.4f, mape: %.4f' % (mae, rmse, mape))"], {}), "(log, 'average, mae: %.4f, rmse: %.4f, mape: %.4f' % (mae, rmse,\n mape))\n", (3879, 3954), False, 'from lib.utils import log_string\n'), ((4109, 4136), 'torch.load', 'torch.load', (['args.model_file'], {}), '(args.model_file)\n', (4119, 4136), False, 'import torch\n'), ((4545, 4563), 'numpy.isnan', 'np.isnan', (['null_val'], {}), '(null_val)\n', (4553, 4563), True, 'import numpy as np\n'), ((4682, 4698), 'torch.mean', 'torch.mean', (['mask'], {}), '(mask)\n', (4692, 4698), False, 'import torch\n'), ((4784, 4809), 'torch.abs', 'torch.abs', (['(preds - labels)'], {}), '(preds - labels)\n', (4793, 4809), False, 'import torch\n'), ((4914, 4930), 'torch.mean', 'torch.mean', (['loss'], {}), '(loss)\n', (4924, 4930), False, 'import torch\n'), ((5044, 5070), 'torch.abs', 'torch.abs', (['(y_pred - y_true)'], {}), '(y_pred - y_true)\n', (5053, 5070), False, 'import torch\n'), ((5887, 5933), 'lib.utils.log_string', 'log_string', (['log', '"""model constructed begin...."""'], {}), "(log, 'model constructed begin....')\n", (5897, 5933), False, 'from lib.utils import log_string\n'), ((6014, 6058), 'lib.utils.log_string', 'log_string', (['log', '"""model constructed end...."""'], {}), "(log, 'model constructed end....')\n", (6024, 6058), False, 'from lib.utils import log_string\n'), ((6063, 6096), 'lib.utils.log_string', 'log_string', (['log', '"""test begin...."""'], {}), "(log, 'test begin....')\n", (6073, 6096), False, 'from lib.utils import log_string\n'), ((6150, 6181), 'lib.utils.log_string', 'log_string', (['log', '"""test end...."""'], {}), "(log, 'test end....')\n", (6160, 6181), False, 'from lib.utils import log_string\n'), ((2290, 2315), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2313, 2315), False, 'import torch\n'), ((2786, 2801), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2799, 2801), False, 'import torch\n'), ((3640, 3732), 'lib.utils.log_string', 'log_string', (['log', "('step %d, mae: %.4f, rmse: %.4f, mape: %.4f' % (i + 1, mae, rmse, mape))"], {}), "(log, 'step %d, mae: %.4f, rmse: %.4f, mape: %.4f' % (i + 1, mae,\n rmse, mape))\n", (3650, 3732), False, 'from lib.utils import log_string\n'), ((4724, 4741), 'torch.isnan', 'torch.isnan', (['mask'], {}), '(mask)\n', (4735, 4741), False, 'import torch\n'), ((4743, 4765), 'torch.zeros_like', 'torch.zeros_like', (['mask'], {}), '(mask)\n', (4759, 4765), False, 'import torch\n'), ((4854, 4871), 'torch.isnan', 'torch.isnan', (['loss'], {}), '(loss)\n', (4865, 4871), False, 'import torch\n'), ((4873, 4895), 'torch.zeros_like', 'torch.zeros_like', (['loss'], {}), '(loss)\n', (4889, 4895), False, 'import torch\n'), ((5318, 5364), 'numpy.errstate', 'np.errstate', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""'}), "(divide='ignore', invalid='ignore')\n", (5329, 5364), True, 'import numpy as np\n'), ((5385, 5407), 'numpy.not_equal', 'np.not_equal', (['label', '(0)'], {}), '(label, 0)\n', (5397, 5407), True, 'import numpy as np\n'), ((5463, 5476), 'numpy.mean', 'np.mean', (['mask'], {}), '(mask)\n', (5470, 5476), True, 'import numpy as np\n'), ((5558, 5572), 'numpy.square', 'np.square', (['mae'], {}), '(mae)\n', (5567, 5572), True, 'import numpy as np\n'), ((5588, 5609), 'numpy.divide', 'np.divide', (['mae', 'label'], {}), '(mae, label)\n', (5597, 5609), True, 'import numpy as np\n'), ((5624, 5649), 'numpy.nan_to_num', 'np.nan_to_num', (['(mae * mask)'], {}), '(mae * mask)\n', (5637, 5649), True, 'import numpy as np\n'), ((5664, 5676), 'numpy.mean', 'np.mean', (['mae'], {}), '(mae)\n', (5671, 5676), True, 'import numpy as np\n'), ((5692, 5718), 'numpy.nan_to_num', 'np.nan_to_num', (['(rmse * mask)'], {}), '(rmse * mask)\n', (5705, 5718), True, 'import numpy as np\n'), ((5772, 5798), 'numpy.nan_to_num', 'np.nan_to_num', (['(mape * mask)'], {}), '(mape * mask)\n', (5785, 5798), True, 'import numpy as np\n'), ((5814, 5827), 'numpy.mean', 'np.mean', (['mape'], {}), '(mape)\n', (5821, 5827), True, 'import numpy as np\n'), ((4581, 4600), 'torch.isnan', 'torch.isnan', (['labels'], {}), '(labels)\n', (4592, 4600), False, 'import torch\n'), ((5742, 5755), 'numpy.mean', 'np.mean', (['rmse'], {}), '(rmse)\n', (5749, 5755), True, 'import numpy as np\n'), ((5946, 6000), 'model.DSTGNN.DSTGNN', 'DSTGNN', (['SE', '(1)', '(args.K * args.d)', 'args.K', 'args.d', 'args.L'], {}), '(SE, 1, args.K * args.d, args.K, args.d, args.L)\n', (5952, 6000), False, 'from model.DSTGNN import DSTGNN\n'), ((5498, 5522), 'numpy.subtract', 'np.subtract', (['pred', 'label'], {}), '(pred, label)\n', (5509, 5522), True, 'import numpy as np\n'), ((3178, 3220), 'torch.from_numpy', 'torch.from_numpy', (['valTE[start_idx:end_idx]'], {}), '(valTE[start_idx:end_idx])\n', (3194, 3220), False, 'import torch\n'), ((3048, 3089), 'torch.from_numpy', 'torch.from_numpy', (['valX[start_idx:end_idx]'], {}), '(valX[start_idx:end_idx])\n', (3064, 3089), False, 'import torch\n')]
|
import os
import sys
import scipy.io
import scipy.misc
import matplotlib.pyplot as plt
from PIL import Image
from nst_utils import *
from loss_function import *
import numpy as np
import tensorflow as tf
import time
STYLE_LAYERS = [
('conv1_1', 0.2),
('conv2_1', 0.2),
('conv3_1', 0.2),
('conv4_1', 0.2),
('conv5_1', 0.2)
]
content_image = scipy.misc.imread("resources/content.jpg")
generate_config = np.array(content_image).shape
content_image = reshape_and_normalize_image(content_image)
style_image = scipy.misc.imread("resources/style.jpg")
style_image = reshape_and_normalize_image(style_image)
generated_image = generate_noise_image(content_image, 0.6, generate_config)
# plt.imshow(generated_image[0])
# plt.show()
# Reset the graph
tf.reset_default_graph()
# Start interactive session
sess = tf.InteractiveSession()
model = load_vgg_model("resources/imagenet-vgg-verydeep-19.mat", generate_config)
# Assign the content image to be the input of the VGG model.
sess.run(model['input'].assign(content_image))
# Select the output tensor of layer conv4_2
out = model['conv4_2']
# Set a_C to be the hidden layer activation from the layer we have selected
a_C = sess.run(out)
# Set a_G to be the hidden layer activation from same layer. Here, a_G references model['conv4_2']
# and isn't evaluated yet. Later in the code, we'll assign the image G as the model input, so that
# when we run the session, this will be the activations drawn from the appropriate layer, with G as input.
a_G = out
# Compute the content cost
J_content = compute_content_cost(a_C, a_G)
# Assign the input of the model to be the "style" image
sess.run(model['input'].assign(style_image))
# Compute the style cost
J_style = compute_style_cost(sess, model, STYLE_LAYERS)
# Default is 10, 40
J = total_cost(J_content, J_style, 10, 40)
# define optimizer (1 line)
optimizer = tf.train.AdamOptimizer(2.0)
# define train_step (1 line)
train_step = optimizer.minimize(J)
def model_nn(sess, input_image, num_iterations = 200):
# Initialize global variables (you need to run the session on the initializer)
### START CODE HERE ### (1 line)
sess.run(tf.global_variables_initializer())
### END CODE HERE ###
# Run the noisy input image (initial generated image) through the model. Use assign().
### START CODE HERE ### (1 line)
sess.run(model['input'].assign(input_image))
### END CODE HERE ###
for i in range(num_iterations):
# Run the session on the train_step to minimize the total cost
### START CODE HERE ### (1 line)
sess.run(train_step)
### END CODE HERE ###
# Compute the generated image by running the session on the current model['input']
### START CODE HERE ### (1 line)
generated_image = sess.run(model['input'])
### END CODE HERE ###
# Print every 20 iteration.
if i%20 == 0:
Jt, Jc, Js = sess.run([J, J_content, J_style])
print("Iteration " + str(i) + " :")
print(time.asctime(time.localtime(time.time())))
print("total cost = " + str(Jt))
print("content cost = " + str(Jc))
print("style cost = " + str(Js))
# save current generated image in the "/output" directory
save_image("output/" + str(i) + ".png", generated_image)
# save last generated image
save_image('output/generated_image.jpg', generated_image)
return generated_image
print("start:" + time.asctime(time.localtime(time.time())))
model_nn(sess, generated_image)
|
[
"tensorflow.global_variables_initializer",
"tensorflow.reset_default_graph",
"time.time",
"numpy.array",
"tensorflow.InteractiveSession",
"tensorflow.train.AdamOptimizer"
] |
[((765, 789), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (787, 789), True, 'import tensorflow as tf\n'), ((826, 849), 'tensorflow.InteractiveSession', 'tf.InteractiveSession', ([], {}), '()\n', (847, 849), True, 'import tensorflow as tf\n'), ((1887, 1914), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['(2.0)'], {}), '(2.0)\n', (1909, 1914), True, 'import tensorflow as tf\n'), ((423, 446), 'numpy.array', 'np.array', (['content_image'], {}), '(content_image)\n', (431, 446), True, 'import numpy as np\n'), ((2174, 2207), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2205, 2207), True, 'import tensorflow as tf\n'), ((3575, 3586), 'time.time', 'time.time', ([], {}), '()\n', (3584, 3586), False, 'import time\n'), ((3094, 3105), 'time.time', 'time.time', ([], {}), '()\n', (3103, 3105), False, 'import time\n')]
|
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
"""
Plots random networks with a varying chance of connections between nodes
for figure 2.3.
"""
node_color = 'red'
node_border_color = 'black'
node_border_width = .6
edge_color = 'black'
N = 10
num_graphs = 6
N_columns = 3
N_rows = 2
P = np.linspace(0.0, 1.0, num=num_graphs)
print(P)
def draw(G, pos, ax):
# Plots a graph.
nodes1 = nx.draw_networkx_nodes(G, pos=pos, node_color=node_color, ax=ax)
nodes1.set_edgecolor(node_border_color)
nodes1.set_linewidth(node_border_width)
nx.draw_networkx_edges(G, pos, edge_color=edge_color, alpha=.8,
ax=ax)
ax.axis('off')
return ax
fig, axs = plt.subplots(N_columns, N_rows)
G = nx.fast_gnp_random_graph(N, P[0], seed=0)
pos = nx.spring_layout(G)
c = 0
for i in range(N_columns):
for j in range(N_rows):
G = nx.fast_gnp_random_graph(N, P[c], seed=0)
draw(G, pos, axs[i, j])
axs[i, j].text(0.5, -0.3, "P = " + str(round(P[c], 1)), size=12,
ha="center", transform=axs[i, j].transAxes)
c += 1
plt.subplots_adjust(hspace=0.3)
plt.show()
|
[
"matplotlib.pyplot.show",
"networkx.draw_networkx_edges",
"networkx.fast_gnp_random_graph",
"networkx.spring_layout",
"networkx.draw_networkx_nodes",
"numpy.linspace",
"matplotlib.pyplot.subplots_adjust",
"matplotlib.pyplot.subplots"
] |
[((322, 359), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1.0)'], {'num': 'num_graphs'}), '(0.0, 1.0, num=num_graphs)\n', (333, 359), True, 'import numpy as np\n'), ((729, 760), 'matplotlib.pyplot.subplots', 'plt.subplots', (['N_columns', 'N_rows'], {}), '(N_columns, N_rows)\n', (741, 760), True, 'import matplotlib.pyplot as plt\n'), ((766, 807), 'networkx.fast_gnp_random_graph', 'nx.fast_gnp_random_graph', (['N', 'P[0]'], {'seed': '(0)'}), '(N, P[0], seed=0)\n', (790, 807), True, 'import networkx as nx\n'), ((814, 833), 'networkx.spring_layout', 'nx.spring_layout', (['G'], {}), '(G)\n', (830, 833), True, 'import networkx as nx\n'), ((1138, 1169), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.3)'}), '(hspace=0.3)\n', (1157, 1169), True, 'import matplotlib.pyplot as plt\n'), ((1170, 1180), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1178, 1180), True, 'import matplotlib.pyplot as plt\n'), ((427, 491), 'networkx.draw_networkx_nodes', 'nx.draw_networkx_nodes', (['G'], {'pos': 'pos', 'node_color': 'node_color', 'ax': 'ax'}), '(G, pos=pos, node_color=node_color, ax=ax)\n', (449, 491), True, 'import networkx as nx\n'), ((584, 655), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['G', 'pos'], {'edge_color': 'edge_color', 'alpha': '(0.8)', 'ax': 'ax'}), '(G, pos, edge_color=edge_color, alpha=0.8, ax=ax)\n', (606, 655), True, 'import networkx as nx\n'), ((908, 949), 'networkx.fast_gnp_random_graph', 'nx.fast_gnp_random_graph', (['N', 'P[c]'], {'seed': '(0)'}), '(N, P[c], seed=0)\n', (932, 949), True, 'import networkx as nx\n')]
|
import torch
import torch.nn.functional as F
def train(model,train_loader,test_loader,
optimizer,target_loss,test_losses,
num_steps,print_steps=10000):
model.train()
opt = optimizer(model.parameters())
device = next(model.parameters()).device
test_losslist = []
train_losslist = []
current_step = 0
while True:
for i, (x,y) in enumerate(train_loader):
current_step += 1
x = x.to(device)
y = y.to(device)
opt.zero_grad()
yhat = model(x)
lossvec = F.cross_entropy(yhat,y,reduction='none')
loss = target_loss(lossvec)
loss.backward()
opt.step()
if (current_step%print_steps == 0):
test_results = test(model,test_loader,test_losses)
train_results = test(model,train_loader,test_losses)
print(f'Steps: {current_step}/{num_steps} \t Test acc: {test_results[0]:.2f}', end='\r')
test_losslist.append(test_results)
train_losslist.append(train_results)
if current_step >= num_steps:
break
if current_step >= num_steps:
break
print(f'Train acc: {train_losslist[-1][0]:.2f}\t Test acc: {test_losslist[-1][0]:.2f}')
return torch.FloatTensor(train_losslist), torch.FloatTensor(test_losslist)
def test(model,loader,test_losses):
model.eval()
device = next(model.parameters()).device
total = len(loader.dataset)
correct = 0
count = 0
losslog = torch.zeros(total).to(device)
for i, (x,y) in enumerate(loader):
x = x.to(device)
y = y.to(device)
with torch.no_grad():
yhat = model(x)
_,pred = yhat.max(1)
losslog[count:count+len(x)] = F.cross_entropy(yhat,y,reduction='none')
correct += pred.eq(y).sum().item()
count += len(x)
losslist = []
losslist.append(correct/total*100.0)
for test_loss in test_losses:
losslist.append(test_loss(losslog))
model.train()
return losslist
|
[
"torch.zeros",
"torch.no_grad",
"torch.FloatTensor",
"torch.nn.functional.cross_entropy"
] |
[((1369, 1402), 'torch.FloatTensor', 'torch.FloatTensor', (['train_losslist'], {}), '(train_losslist)\n', (1386, 1402), False, 'import torch\n'), ((1404, 1436), 'torch.FloatTensor', 'torch.FloatTensor', (['test_losslist'], {}), '(test_losslist)\n', (1421, 1436), False, 'import torch\n'), ((1866, 1908), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['yhat', 'y'], {'reduction': '"""none"""'}), "(yhat, y, reduction='none')\n", (1881, 1908), True, 'import torch.nn.functional as F\n'), ((603, 645), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['yhat', 'y'], {'reduction': '"""none"""'}), "(yhat, y, reduction='none')\n", (618, 645), True, 'import torch.nn.functional as F\n'), ((1618, 1636), 'torch.zeros', 'torch.zeros', (['total'], {}), '(total)\n', (1629, 1636), False, 'import torch\n'), ((1750, 1765), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1763, 1765), False, 'import torch\n')]
|
# Copyright (c) 2014-2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from digits import test_utils
def test_caffe_imports():
test_utils.skipIfNotFramework('caffe')
import numpy # noqa
import google.protobuf # noqa
|
[
"digits.test_utils.skipIfNotFramework"
] |
[((171, 209), 'digits.test_utils.skipIfNotFramework', 'test_utils.skipIfNotFramework', (['"""caffe"""'], {}), "('caffe')\n", (200, 209), False, 'from digits import test_utils\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import copy
import datetime
import logging
import logging.handlers
import threading
from time import sleep
import Communication
import Time
import Algorithm, createDistanceMatrix
import Json
import SQLHandler
import url_constructer
one = SQLHandler.SQLHandler()
LOG_FORMAT = "%(name)2s %(levelname)2s %(asctime)2s - %(message)2s"
logging.basicConfig(filename='PythonServer.log', level=logging.DEBUG, format=LOG_FORMAT, filemode='w')
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
handler = logging.handlers.SocketHandler(host="", port=logging.handlers.DEFAULT_TCP_LOGGING_PORT)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
# parameters: day, schools
# format: str, list(int)
def run_thread(day, schools):
for i in schools:
group = one.build_time_pool(day, i)
for y in group:
locations = one.select_all_addresses(i, day, y)
vehicle_data, location_data, driver_indices, passenger_indices, drivers, passengers = one.locations(i, day, y)
# potential bug when requests to google distance matrix api are synchronized (DDoS attack)
# https://developers.google.com/maps/documentation/distance-matrix/web-service-best-practices#synchronized-requests
matrix, time_matrix = createDistanceMatrix.main(one.select_all_addresses(i, day, y))
routes, dropped_nodes, durations = Algorithm.main(vehicle_data, location_data, matrix, time_matrix)
routes_temp = copy.deepcopy(routes)
urls = url_constructer.construct_route_url(locations, routes_temp)
for u in urls:
print(u)
temp1, temp2 = Json.build_list(urls, routes, dropped_nodes, driver_indices, passenger_indices, drivers, passengers, day, y, durations)
filepath, filename = Json.fill_data_matrix(i, day, y, temp1, temp2)
Communication.sftp_upload(filepath, filename)
sleep(120)
def main():
days = {}
days['Sunday'] = 'monday'
days['Monday'] = 'tuesday'
days['Tuesday'] = 'wednesday'
days['Wednesday'] = 'thursday'
days['Thursday'] = 'friday'
days['Friday'] = None
days['Saturday'] = None
deadline = datetime.time(20, 0, 0)
while True:
one = SQLHandler()
threads = []
timezones = one.build_timezone_pool()
already_run = False
logger.debug("here")
for t in timezones:
logger.info(t)
time_in_timezone = Time.add_timezone(deadline, t)
if days[datetime.date.today().strftime("%A")] is not None and Time.time_in_range(deadline, datetime.time(20, 2, 0), time_in_timezone) and already_run is False:
day = days[datetime.date.today().strftime("%A")]
schools = one.build_school_pool(t)
thread = threading.Thread(target=run_thread, args=(day, schools))
thread.start()
threads.append(thread)
already_run = True
if __name__ == '__main__':
main()
|
[
"threading.Thread",
"copy.deepcopy",
"Time.add_timezone",
"logging.basicConfig",
"Communication.sftp_upload",
"logging.handlers.SocketHandler",
"Algorithm.main",
"Json.build_list",
"datetime.date.today",
"time.sleep",
"Json.fill_data_matrix",
"SQLHandler.SQLHandler",
"datetime.time",
"url_constructer.construct_route_url",
"logging.getLogger",
"SQLHandler"
] |
[((287, 310), 'SQLHandler.SQLHandler', 'SQLHandler.SQLHandler', ([], {}), '()\n', (308, 310), False, 'import SQLHandler\n'), ((380, 486), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""PythonServer.log"""', 'level': 'logging.DEBUG', 'format': 'LOG_FORMAT', 'filemode': '"""w"""'}), "(filename='PythonServer.log', level=logging.DEBUG,\n format=LOG_FORMAT, filemode='w')\n", (399, 486), False, 'import logging\n'), ((492, 511), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (509, 511), False, 'import logging\n'), ((553, 645), 'logging.handlers.SocketHandler', 'logging.handlers.SocketHandler', ([], {'host': '""""""', 'port': 'logging.handlers.DEFAULT_TCP_LOGGING_PORT'}), "(host='', port=logging.handlers.\n DEFAULT_TCP_LOGGING_PORT)\n", (583, 645), False, 'import logging\n'), ((1831, 1841), 'time.sleep', 'sleep', (['(120)'], {}), '(120)\n', (1836, 1841), False, 'from time import sleep\n'), ((2075, 2098), 'datetime.time', 'datetime.time', (['(20)', '(0)', '(0)'], {}), '(20, 0, 0)\n', (2088, 2098), False, 'import datetime\n'), ((2121, 2133), 'SQLHandler', 'SQLHandler', ([], {}), '()\n', (2131, 2133), False, 'import SQLHandler\n'), ((1367, 1431), 'Algorithm.main', 'Algorithm.main', (['vehicle_data', 'location_data', 'matrix', 'time_matrix'], {}), '(vehicle_data, location_data, matrix, time_matrix)\n', (1381, 1431), False, 'import Algorithm, createDistanceMatrix\n'), ((1449, 1470), 'copy.deepcopy', 'copy.deepcopy', (['routes'], {}), '(routes)\n', (1462, 1470), False, 'import copy\n'), ((1481, 1540), 'url_constructer.construct_route_url', 'url_constructer.construct_route_url', (['locations', 'routes_temp'], {}), '(locations, routes_temp)\n', (1516, 1540), False, 'import url_constructer\n'), ((1590, 1713), 'Json.build_list', 'Json.build_list', (['urls', 'routes', 'dropped_nodes', 'driver_indices', 'passenger_indices', 'drivers', 'passengers', 'day', 'y', 'durations'], {}), '(urls, routes, dropped_nodes, driver_indices,\n passenger_indices, drivers, passengers, day, y, durations)\n', (1605, 1713), False, 'import Json\n'), ((1734, 1780), 'Json.fill_data_matrix', 'Json.fill_data_matrix', (['i', 'day', 'y', 'temp1', 'temp2'], {}), '(i, day, y, temp1, temp2)\n', (1755, 1780), False, 'import Json\n'), ((1784, 1829), 'Communication.sftp_upload', 'Communication.sftp_upload', (['filepath', 'filename'], {}), '(filepath, filename)\n', (1809, 1829), False, 'import Communication\n'), ((2296, 2326), 'Time.add_timezone', 'Time.add_timezone', (['deadline', 't'], {}), '(deadline, t)\n', (2313, 2326), False, 'import Time\n'), ((2595, 2651), 'threading.Thread', 'threading.Thread', ([], {'target': 'run_thread', 'args': '(day, schools)'}), '(target=run_thread, args=(day, schools))\n', (2611, 2651), False, 'import threading\n'), ((2421, 2444), 'datetime.time', 'datetime.time', (['(20)', '(2)', '(0)'], {}), '(20, 2, 0)\n', (2434, 2444), False, 'import datetime\n'), ((2505, 2526), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2524, 2526), False, 'import datetime\n'), ((2338, 2359), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2357, 2359), False, 'import datetime\n')]
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
from pymongo import MongoClient
from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk
import datetime
import json
from fuzzywuzzy import fuzz
# 一次同步的数据量,批量同步
syncCountPer = 100000
# Es 数据库地址
es_url = 'localhost:9200'
# mongodb 数据库地址
mongo_url = 'localhost:27017'
# mongod 需要同步的数据库名
DB = 'song'
# mongod 需要同步的表名
COLLECTION = 'songLyricFull'
es = Elasticsearch(es_url, port=9200)
conn = MongoClient(mongo_url, 27017)
def connect_db():
count = 0
db = conn['Song']
sl = db['songLyricFull']
syncDataLst = []
mongoRecordRes = sl.find()
for record in mongoRecordRes:
count += 1
# 因为mongodb和Es中,对于数据类型的支持是有些差异的,所以在数据同步时,需要对某些数据类型和数据做一些加工
# 删掉 url 这个字段
record.pop('url', '')
# Es中不支持 float('inf') 这个数据, 也就是浮点数的最大值
#if record['rank'] == float('inf'):
#record['rank'] = 999999999999
syncDataLst.append({
"_index": DB, # mongod数据库 == Es的index
"_type": COLLECTION, # mongod表名 == Es的type
"_id": str(record.pop('_id')),
"_source": record,
})
if len(syncDataLst) == syncCountPer:
# 批量同步到Es中,就是发送http请求一样,数据量越大request_timeout越要拉长
bulk(es, syncDataLst, request_timeout=180)
# 清空数据列表
syncDataLst[:] = []
print(f"Had sync {count} records at {datetime.datetime.now()}")
# 同步剩余部分
if syncDataLst:
bulk(es, syncDataLst, request_timeout=180)
print(f"Had sync {count} records rest at {datetime.datetime.now()}")
def search(lyric):
dsl = {
'query': {
"match_phrase": {'lyric': lyric}
}
}
dsl2={
'query': {"match": {'lyric': lyric}
}
}
result = es.search(index=DB, doc_type=COLLECTION, body=dsl)
if (len(result['hits']['hits']) == 0):
result = es.search(index=DB, doc_type=COLLECTION, body=dsl2)
if (len(result['hits']['hits']) > 0):
lyricF = result['hits']['hits'][0]['_source']['lyric']
lyricF = lyricF.split('[')
for i in range(len(lyricF)):
lyricC = lyricF[i]
if(']' in lyricC and not lyricC.endswith(']')):
lyricF[i] = lyricC[lyricC.index(']')+1:]
lyricF = [i for i in lyricF if i != '\n']
lyricF_r = []
for j in range(len(lyricF)):
ratio = fuzz.ratio(lyric, lyricF[j])
lyricF_r.append(ratio)
if(lyricF_r.index(max(lyricF_r)) == len(lyricF_r)-1):
lyricF_r[lyricF_r.index(max(lyricF_r))
] = lyricF_r[lyricF_r.index(min(lyricF_r))]
res = lyricF[lyricF_r.index(max(lyricF_r))]
res.replace(' ', '')
if(res.startswith(lyric) and len(res)-1 > len(lyric)):
res = res.replace(lyric, '',1)
return res
else:
res = lyricF[lyricF_r.index(max(lyricF_r))+1]
if(lyricF[lyricF_r.index(max(lyricF_r))] != lyric+'\n'):
res = lyricF[lyricF_r.index(max(lyricF_r))]+res
if(''' in res):
res = res.replace(''', "\'")
print(res)
return res
return "无匹配"
#connect_db()
search('更怕你永远停留在这里')
|
[
"elasticsearch.Elasticsearch",
"fuzzywuzzy.fuzz.ratio",
"pymongo.MongoClient",
"elasticsearch.helpers.bulk",
"datetime.datetime.now"
] |
[((412, 444), 'elasticsearch.Elasticsearch', 'Elasticsearch', (['es_url'], {'port': '(9200)'}), '(es_url, port=9200)\n', (425, 444), False, 'from elasticsearch import Elasticsearch\n'), ((452, 481), 'pymongo.MongoClient', 'MongoClient', (['mongo_url', '(27017)'], {}), '(mongo_url, 27017)\n', (463, 481), False, 'from pymongo import MongoClient\n'), ((1502, 1544), 'elasticsearch.helpers.bulk', 'bulk', (['es', 'syncDataLst'], {'request_timeout': '(180)'}), '(es, syncDataLst, request_timeout=180)\n', (1506, 1544), False, 'from elasticsearch.helpers import bulk\n'), ((1281, 1323), 'elasticsearch.helpers.bulk', 'bulk', (['es', 'syncDataLst'], {'request_timeout': '(180)'}), '(es, syncDataLst, request_timeout=180)\n', (1285, 1323), False, 'from elasticsearch.helpers import bulk\n'), ((2438, 2466), 'fuzzywuzzy.fuzz.ratio', 'fuzz.ratio', (['lyric', 'lyricF[j]'], {}), '(lyric, lyricF[j])\n', (2448, 2466), False, 'from fuzzywuzzy import fuzz\n'), ((1595, 1618), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1616, 1618), False, 'import datetime\n'), ((1426, 1449), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1447, 1449), False, 'import datetime\n')]
|
# Generated by Django 2.1.2 on 2018-10-05 14:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reservations', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='bio',
field=models.TextField(blank=True, max_length=500, null=True),
),
migrations.AlterField(
model_name='profile',
name='phone',
field=models.CharField(blank=True, max_length=30, null=True),
),
migrations.AlterField(
model_name='profile',
name='room_no',
field=models.CharField(blank=True, max_length=30, null=True),
),
]
|
[
"django.db.models.CharField",
"django.db.models.TextField"
] |
[((328, 383), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'max_length': '(500)', 'null': '(True)'}), '(blank=True, max_length=500, null=True)\n', (344, 383), False, 'from django.db import migrations, models\n'), ((505, 559), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(30)', 'null': '(True)'}), '(blank=True, max_length=30, null=True)\n', (521, 559), False, 'from django.db import migrations, models\n'), ((683, 737), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(30)', 'null': '(True)'}), '(blank=True, max_length=30, null=True)\n', (699, 737), False, 'from django.db import migrations, models\n')]
|
import unittest
from __init__ import DotMap
class ReadmeTestCase(unittest.TestCase):
def test_basic_use(self):
m = DotMap()
self.assertIsInstance(m, DotMap)
m.name = 'Joe'
self.assertEqual(m.name, 'Joe')
self.assertEqual('Hello ' + m.name, 'Hello Joe')
self.assertIsInstance(m, dict)
self.assertTrue(issubclass(m.__class__, dict))
self.assertEqual(m['name'], 'Joe')
m.name += ' Smith'
m['name'] += ' Jr'
self.assertEqual(m.name, '<NAME> Jr')
def test_automatic_hierarchy(self):
m = DotMap()
m.people.steve.age = 31
self.assertEqual(m.people.steve.age, 31)
def test_key_init(self):
m = DotMap(a=1, b=2)
self.assertEqual(m.a, 1)
self.assertEqual(m.b, 2)
def test_dict_conversion(self):
d = {'a': 1, 'b': 2, 'c': {'d': 3, 'e': 4}}
m = DotMap(d)
self.assertEqual(m.a, 1)
self.assertEqual(m.b, 2)
d2 = m.toDict()
self.assertIsInstance(d2, dict)
self.assertNotIsInstance(d2, DotMap)
self.assertEqual(len(d2), 3)
self.assertEqual(d2['a'], 1)
self.assertEqual(d2['b'], 2)
self.assertNotIsInstance(d2['c'], DotMap)
self.assertEqual(len(d2['c']), 2)
self.assertEqual(d2['c']['d'], 3)
self.assertEqual(d2['c']['e'], 4)
def test_ordered_iteration(self):
m = DotMap()
m.people.john.age = 32
m.people.john.job = 'programmer'
m.people.mary.age = 24
m.people.mary.job = 'designer'
m.people.dave.age = 55
m.people.dave.job = 'manager'
expected = [
('john', 32, 'programmer'),
('mary', 24, 'designer'),
('dave', 55, 'manager'),
]
for i, (k, v) in enumerate(m.people.items()):
self.assertEqual(expected[i][0], k)
self.assertEqual(expected[i][1], v.age)
self.assertEqual(expected[i][2], v.job)
class BasicTestCase(unittest.TestCase):
def setUp(self):
self.d = {
'a': 1,
'b': 2,
'subD': {'c': 3, 'd': 4}
}
def test_dict_init(self):
m = DotMap(self.d)
self.assertIsInstance(m, DotMap)
self.assertEqual(m.a, 1)
self.assertEqual(m.b, 2)
self.assertIsInstance(m.subD, DotMap)
self.assertEqual(m.subD.c, 3)
self.assertEqual(m.subD.d, 4)
def test_copy(self):
m = DotMap(self.d)
dm_copy = m.copy()
self.assertIsInstance(dm_copy, DotMap)
self.assertEqual(dm_copy.a, 1)
self.assertEqual(dm_copy.b, 2)
self.assertIsInstance(dm_copy.subD, DotMap)
self.assertEqual(dm_copy.subD.c, 3)
self.assertEqual(dm_copy.subD.d, 4)
def test_fromkeys(self):
m = DotMap.fromkeys([1, 2, 3], 'a')
self.assertEqual(len(m), 3)
self.assertEqual(m[1], 'a')
self.assertEqual(m[2], 'a')
self.assertEqual(m[3], 'a')
def test_dict_functionality(self):
m = DotMap(self.d)
self.assertEqual(m.get('a'), 1)
self.assertEqual(m.get('f', 33), 33)
self.assertIsNone(m.get('f'))
self.assertTrue(m.has_key('a'))
self.assertFalse(m.has_key('f'))
m.update([('rat', 5), ('bum', 4)], dog=7, cat=9)
self.assertEqual(m.rat, 5)
self.assertEqual(m.bum, 4)
self.assertEqual(m.dog, 7)
self.assertEqual(m.cat, 9)
m.update({'lol': 1, 'ba': 2})
self.assertEqual(m.lol, 1)
self.assertEqual(m.ba, 2)
ordered_keys = [
'a',
'b',
'subD',
'rat',
'bum',
'dog',
'cat',
'lol',
'ba',
]
for i, k in enumerate(m):
self.assertEqual(ordered_keys[i], k)
self.assertTrue('a' in m)
self.assertFalse('c' in m)
ordered_values = [1, 2, DotMap(c=3, d=4), 5, 4, 7, 9, 1, 2]
for i, v in enumerate(m.values()):
self.assertEqual(ordered_values[i], v)
self.assertTrue('c' in m.subD)
self.assertTrue(len(m.subD), 2)
del m.subD.c
self.assertFalse('c' in m.subD)
self.assertTrue(len(m.subD), 1)
def test_list_comprehension(self):
parentDict = {
'name': 'Father1',
'children': [
{'name': 'Child1'},
{'name': 'Child2'},
{'name': 'Child3'},
]
}
parent = DotMap(parentDict)
ordered_names = ['Child1', 'Child2', 'Child3']
comp = [x.name for x in parent.children]
self.assertEqual(ordered_names, comp)
class PickleTestCase(unittest.TestCase):
def setUp(self):
self.d = {
'a': 1,
'b': 2,
'subD': {'c': 3, 'd': 4}
}
def test(self):
import pickle
pm = DotMap(self.d)
s = pickle.dumps(pm)
m = pickle.loads(s)
self.assertIsInstance(m, DotMap)
self.assertEqual(m.a, 1)
self.assertEqual(m.b, 2)
self.assertIsInstance(m.subD, DotMap)
self.assertEqual(m.subD.c, 3)
self.assertEqual(m.subD.d, 4)
class EmptyTestCase(unittest.TestCase):
def test(self):
m = DotMap()
self.assertTrue(m.empty())
m.a = 1
self.assertFalse(m.empty())
self.assertTrue(m.b.empty())
self.assertIsInstance(m.b, DotMap)
class DynamicTestCase(unittest.TestCase):
def test(self):
m = DotMap()
m.still.works
m.sub.still.works
nonDynamic = DotMap(_dynamic=False)
def assignNonDynamic():
nonDynamic.no
self.assertRaises(KeyError, assignNonDynamic)
nonDynamicWithInit = DotMap(m, _dynamic=False)
nonDynamicWithInit.still.works
nonDynamicWithInit.sub.still.works
def assignNonDynamicWithInit():
nonDynamicWithInit.no.creation
self.assertRaises(KeyError, assignNonDynamicWithInit)
class RecursiveTestCase(unittest.TestCase):
def test(self):
m = DotMap()
m.a = 5
m_id = id(m)
m.recursive = m
self.assertEqual(id(m.recursive.recursive.recursive), m_id)
self.assertEqual(str(m), '''DotMap(a=5, recursive=DotMap(...))''')
d = m.toDict()
d_id = id(d)
d['a'] = 5
d['recursive'] = d
d['recursive']['recursive']['recursive']
self.assertEqual(id(d['recursive']['recursive']['recursive']), d_id)
self.assertEqual(str(d), '''{'a': 5, 'recursive': {...}}''')
m2 = DotMap(d)
m2_id = id(m2)
self.assertEqual(id(m2.recursive.recursive.recursive), m2_id)
self.assertEqual(str(m2), '''DotMap(a=5, recursive=DotMap(...))''')
class kwargTestCase(unittest.TestCase):
def test(self):
a = {'1': 'a', '2': 'b'}
b = DotMap(a, _dynamic=False)
def capture(**kwargs):
return kwargs
self.assertEqual(a, capture(**b.toDict()))
class DeepCopyTestCase(unittest.TestCase):
def test(self):
import copy
original = DotMap()
original.a = 1
original.b = 3
shallowCopy = original
deepCopy = copy.deepcopy(original)
self.assertEqual(original, shallowCopy)
self.assertEqual(id(original), id(shallowCopy))
self.assertEqual(original, deepCopy)
self.assertNotEqual(id(original), id(deepCopy))
original.a = 2
self.assertEqual(original, shallowCopy)
self.assertNotEqual(original, deepCopy)
def test_order_preserved(self):
import copy
original = DotMap()
original.a = 1
original.b = 2
original.c = 3
deepCopy = copy.deepcopy(original)
orderedPairs = []
for k, v in original.iteritems():
orderedPairs.append((k, v))
for i, (k, v) in enumerate(deepCopy.iteritems()):
self.assertEqual(k, orderedPairs[i][0])
self.assertEqual(v, orderedPairs[i][1])
class DotMapTupleToDictTestCase(unittest.TestCase):
def test(self):
m = DotMap({'a': 1, 'b': (11, 22, DotMap({'c': 3}))})
d = m.toDict()
self.assertEqual(d, {'a': 1, 'b': (11, 22, {'c': 3})})
class OrderedDictInitTestCase(unittest.TestCase):
def test(self):
from collections import OrderedDict
o = OrderedDict([('a', 1), ('b', 2), ('c', [OrderedDict([('d', 3)])])])
m = DotMap(o)
self.assertIsInstance(m, DotMap)
self.assertIsInstance(m.c[0], DotMap)
class EmptyAddTestCase(unittest.TestCase):
def test_base(self):
m = DotMap()
for i in range(7):
m.counter += 1
self.assertNotIsInstance(m.counter, DotMap)
self.assertIsInstance(m.counter, int)
self.assertEqual(m.counter, 7)
def test_various(self):
m = DotMap()
m.a.label = 'test'
m.a.counter += 2
self.assertIsInstance(m.a, DotMap)
self.assertEqual(m.a.label, 'test')
self.assertNotIsInstance(m.a.counter, DotMap)
self.assertIsInstance(m.a.counter, int)
self.assertEqual(m.a.counter, 2)
m.a.counter += 1
self.assertEqual(m.a.counter, 3)
def test_proposal(self):
my_counters = DotMap()
pages = [
'once upon a time',
'there was like this super awesome prince',
'and there was this super rad princess',
'and they had a mutually respectful, egalitarian relationship',
'the end'
]
for stuff in pages:
my_counters.page += 1
self.assertIsInstance(my_counters, DotMap)
self.assertNotIsInstance(my_counters.page, DotMap)
self.assertIsInstance(my_counters.page, int)
self.assertEqual(my_counters.page, 5)
def test_string_addition(self):
m = DotMap()
m.quote += 'lions'
m.quote += ' and tigers'
m.quote += ' and bears'
m.quote += ', oh my'
self.assertEqual(m.quote, 'lions and tigers and bears, oh my')
def test_strange_addition(self):
m = DotMap()
m += "I'm a string now"
self.assertIsInstance(m, str)
self.assertNotIsInstance(m, DotMap)
self.assertEqual(m, "I'm a string now")
m2 = DotMap() + "I'll replace that DotMap"
self.assertEqual(m2, "I'll replace that DotMap")
def test_protected_hierarchy(self):
m = DotMap()
m.protected_parent.key = 'value'
def protectedFromAddition():
m.protected_parent += 1
self.assertRaises(TypeError, protectedFromAddition)
def test_type_error_raised(self):
m = DotMap()
def badAddition():
m.a += 1
m.a += ' and tigers'
self.assertRaises(TypeError, badAddition)
|
[
"pickle.loads",
"copy.deepcopy",
"__init__.DotMap",
"collections.OrderedDict",
"__init__.DotMap.fromkeys",
"pickle.dumps"
] |
[((129, 137), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (135, 137), False, 'from __init__ import DotMap\n'), ((589, 597), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (595, 597), False, 'from __init__ import DotMap\n'), ((721, 737), '__init__.DotMap', 'DotMap', ([], {'a': '(1)', 'b': '(2)'}), '(a=1, b=2)\n', (727, 737), False, 'from __init__ import DotMap\n'), ((905, 914), '__init__.DotMap', 'DotMap', (['d'], {}), '(d)\n', (911, 914), False, 'from __init__ import DotMap\n'), ((1428, 1436), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (1434, 1436), False, 'from __init__ import DotMap\n'), ((2212, 2226), '__init__.DotMap', 'DotMap', (['self.d'], {}), '(self.d)\n', (2218, 2226), False, 'from __init__ import DotMap\n'), ((2494, 2508), '__init__.DotMap', 'DotMap', (['self.d'], {}), '(self.d)\n', (2500, 2508), False, 'from __init__ import DotMap\n'), ((2843, 2874), '__init__.DotMap.fromkeys', 'DotMap.fromkeys', (['[1, 2, 3]', '"""a"""'], {}), "([1, 2, 3], 'a')\n", (2858, 2874), False, 'from __init__ import DotMap\n'), ((3071, 3085), '__init__.DotMap', 'DotMap', (['self.d'], {}), '(self.d)\n', (3077, 3085), False, 'from __init__ import DotMap\n'), ((4559, 4577), '__init__.DotMap', 'DotMap', (['parentDict'], {}), '(parentDict)\n', (4565, 4577), False, 'from __init__ import DotMap\n'), ((4954, 4968), '__init__.DotMap', 'DotMap', (['self.d'], {}), '(self.d)\n', (4960, 4968), False, 'from __init__ import DotMap\n'), ((4981, 4997), 'pickle.dumps', 'pickle.dumps', (['pm'], {}), '(pm)\n', (4993, 4997), False, 'import pickle\n'), ((5010, 5025), 'pickle.loads', 'pickle.loads', (['s'], {}), '(s)\n', (5022, 5025), False, 'import pickle\n'), ((5329, 5337), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (5335, 5337), False, 'from __init__ import DotMap\n'), ((5581, 5589), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (5587, 5589), False, 'from __init__ import DotMap\n'), ((5659, 5681), '__init__.DotMap', 'DotMap', ([], {'_dynamic': '(False)'}), '(_dynamic=False)\n', (5665, 5681), False, 'from __init__ import DotMap\n'), ((5825, 5850), '__init__.DotMap', 'DotMap', (['m'], {'_dynamic': '(False)'}), '(m, _dynamic=False)\n', (5831, 5850), False, 'from __init__ import DotMap\n'), ((6157, 6165), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (6163, 6165), False, 'from __init__ import DotMap\n'), ((6668, 6677), '__init__.DotMap', 'DotMap', (['d'], {}), '(d)\n', (6674, 6677), False, 'from __init__ import DotMap\n'), ((6954, 6979), '__init__.DotMap', 'DotMap', (['a'], {'_dynamic': '(False)'}), '(a, _dynamic=False)\n', (6960, 6979), False, 'from __init__ import DotMap\n'), ((7193, 7201), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (7199, 7201), False, 'from __init__ import DotMap\n'), ((7298, 7321), 'copy.deepcopy', 'copy.deepcopy', (['original'], {}), '(original)\n', (7311, 7321), False, 'import copy\n'), ((7722, 7730), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (7728, 7730), False, 'from __init__ import DotMap\n'), ((7819, 7842), 'copy.deepcopy', 'copy.deepcopy', (['original'], {}), '(original)\n', (7832, 7842), False, 'import copy\n'), ((8543, 8552), '__init__.DotMap', 'DotMap', (['o'], {}), '(o)\n', (8549, 8552), False, 'from __init__ import DotMap\n'), ((8722, 8730), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (8728, 8730), False, 'from __init__ import DotMap\n'), ((8963, 8971), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (8969, 8971), False, 'from __init__ import DotMap\n'), ((9372, 9380), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (9378, 9380), False, 'from __init__ import DotMap\n'), ((9968, 9976), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (9974, 9976), False, 'from __init__ import DotMap\n'), ((10219, 10227), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (10225, 10227), False, 'from __init__ import DotMap\n'), ((10551, 10559), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (10557, 10559), False, 'from __init__ import DotMap\n'), ((10786, 10794), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (10792, 10794), False, 'from __init__ import DotMap\n'), ((3980, 3996), '__init__.DotMap', 'DotMap', ([], {'c': '(3)', 'd': '(4)'}), '(c=3, d=4)\n', (3986, 3996), False, 'from __init__ import DotMap\n'), ((10403, 10411), '__init__.DotMap', 'DotMap', ([], {}), '()\n', (10409, 10411), False, 'from __init__ import DotMap\n'), ((8229, 8245), '__init__.DotMap', 'DotMap', (["{'c': 3}"], {}), "({'c': 3})\n", (8235, 8245), False, 'from __init__ import DotMap\n'), ((8503, 8526), 'collections.OrderedDict', 'OrderedDict', (["[('d', 3)]"], {}), "([('d', 3)])\n", (8514, 8526), False, 'from collections import OrderedDict\n')]
|
import base64
import importlib.util
from hashlib import md5
def secret_hash(data):
"""
Create a secret hash from data.
"""
strings = []
for key, value in sorted(data.items()):
strings.append(key)
try:
if isinstance(value, dict):
value = sorted(value.items())
if isinstance(value, list):
value = tuple(value)
data = hash(value)
if data != -1:
strings.append(str(data))
except TypeError:
pass
data = "".join(strings)
hash_value = md5(data.encode("utf8")).digest()
return base64.b85encode(hash_value).decode("ascii")
def module_exists(mod, package=None):
spec = importlib.util.find_spec(mod, package=package)
return spec is not None
def module_path(mod, package=None):
spec = importlib.util.find_spec(mod, package=package)
return spec.origin
|
[
"base64.b85encode"
] |
[((635, 663), 'base64.b85encode', 'base64.b85encode', (['hash_value'], {}), '(hash_value)\n', (651, 663), False, 'import base64\n')]
|
from wagtail.core import hooks
def allow_blindly(tag):
return tag
# See: http://docs.wagtail.io/en/v1.6/reference/hooks.html#construct-whitelister-element-rules
@hooks.register('construct_whitelister_element_rules')
def whitelister_element_rules():
rules = {}
# Tables
rules.update(dict.fromkeys(['table', 'thead', 'tbody', 'tfoot', 'tr', 'th', 'td'], allow_blindly))
# Divs, spans, code and anchors
rules.update(dict.fromkeys(['div', 'span', 'a', 'code', 'pre', 'blockquote', 'section'], allow_blindly))
return rules
|
[
"wagtail.core.hooks.register"
] |
[((169, 222), 'wagtail.core.hooks.register', 'hooks.register', (['"""construct_whitelister_element_rules"""'], {}), "('construct_whitelister_element_rules')\n", (183, 222), False, 'from wagtail.core import hooks\n')]
|
import gym
import vision_arena
import time
import pybullet as p
import pybullet_data
import cv2
if __name__=="__main__":
env = gym.make("vision_arena-v0")
x=0
while True:
p.stepSimulation()
env.move_husky(5, 5, 5, 5)
if x==100:
img = env.camera_feed()
cv2.imwrite('media/testrun'+str(x)+'.png', img)
x+=1
time.sleep(100)
|
[
"gym.make",
"pybullet.stepSimulation",
"time.sleep"
] |
[((132, 159), 'gym.make', 'gym.make', (['"""vision_arena-v0"""'], {}), "('vision_arena-v0')\n", (140, 159), False, 'import gym\n'), ((378, 393), 'time.sleep', 'time.sleep', (['(100)'], {}), '(100)\n', (388, 393), False, 'import time\n'), ((192, 210), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (208, 210), True, 'import pybullet as p\n')]
|
import time
class BruteForce:
def __init__(self, mainStr, searchStr):
self._mainStr=mainStr
self._searchStr=searchStr
def search(self)->int:
searchLen=len(self._searchStr)
mainLen=len(self._mainStr)
if mainLen==0 or mainLen<searchLen:
return -1
i=0
while i<=mainLen-searchLen:
if self._mainStr[i:i+searchLen]==self._searchStr:
return i
i=i+1
return -1
def test_BruteForce():
mainStr='神光财经表示,今日上午沪指震荡下行,小幅下跌,农业种植板块逆势大涨,领涨两市,神农科技等4股涨停,稀土板块继续大涨,板块掀涨停潮,金力永磁等9股涨停,带动小金属、有色、黄金等板块大涨,建筑装饰板块继续保持强势,建研院等7股涨停,5G概念表现活跃,欣天科技等6股涨停,分散染料领跌两市,汽车整车、白酒、医药、家电等白马股集体下跌,跌幅居前。从盘面表现看,股市目前在低位震荡盘整,下跌空间有限,所以大家不必太过担心,短期随时都可能会走出一轮反弹行情,所以大家可以选择优质个股逢低买入,但是要控制好仓位,耐心持股待涨。'
searchStr='持股XX待涨。'
start=time.time()
bf=BruteForce(mainStr, searchStr)
end=time.time()
print(end-start)
index=bf.search()
print('查找到位置是:'+str(index))
if __name__=="__main__":
test_BruteForce()
|
[
"time.time"
] |
[((812, 823), 'time.time', 'time.time', ([], {}), '()\n', (821, 823), False, 'import time\n'), ((870, 881), 'time.time', 'time.time', ([], {}), '()\n', (879, 881), False, 'import time\n')]
|
import os
import sys
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import tempfile
import time
import logging
from tmdb_api import tmdb
from mutagen.mp4 import MP4, MP4Cover
from extensions import valid_output_extensions, valid_poster_extensions, tmdb_api_key
class tmdb_mp4:
def __init__(self, imdbid, tmdbid=False, original=None, language='en', logger=None):
if logger:
self.log = logger
else:
self.log = logging.getLogger(__name__)
if tmdbid:
self.log.debug("TMDB ID: %s." % tmdbid)
else:
self.log.debug("IMDB ID: %s." % imdbid)
if tmdbid is False and imdbid.startswith('tt') is not True:
imdbid = 'tt' + imdbid
self.log.debug("Correcting imdbid to %s." % imdbid)
self.imdbid = imdbid
self.original = original
for i in range(3):
try:
tmdb.configure(tmdb_api_key, language=language)
self.movie = tmdb.Movie(imdbid)
self.HD = None
self.title = self.movie.get_title()
self.genre = self.movie.get_genres()
self.shortdescription = self.movie.get_tagline()
self.description = self.movie.get_overview()
self.date = self.movie.get_release_date()
# Generate XML tags for Actors/Writers/Directors/Producers
self.xml = self.xmlTags()
break
except Exception as e:
self.log.exception("Failed to connect to tMDB, trying again in 20 seconds.")
time.sleep(20)
def writeTags(self, mp4Path, artwork=True, thumbnail=False):
self.log.info("Tagging file: %s." % mp4Path)
ext = os.path.splitext(mp4Path)[1][1:]
if ext not in valid_output_extensions:
self.log.error("File is not the correct format.")
sys.exit()
video = MP4(mp4Path)
try:
video.delete()
except IOError:
self.log.debug("Unable to clear original tags, attempting to proceed.")
video["\xa9nam"] = self.title # Movie title
video["desc"] = self.shortdescription # Short description
video["ldes"] = self.description # Long description
video["\xa9day"] = self.date # Year
video["stik"] = [9] # Movie iTunes category
if self.HD is not None:
video["hdvd"] = self.HD
if self.genre is not None:
genre = None
for g in self.genre:
if genre is None:
genre = g['name']
break
# else:
# genre += ", " + g['name']
video["\xa9gen"] = genre # Genre(s)
video["----:com.apple.iTunes:iTunMOVI"] = self.xml # XML - see xmlTags method
rating = self.rating()
if rating is not None:
video["----:com.apple.iTunes:iTunEXTC"] = rating
if artwork:
path = self.getArtwork(mp4Path)
if path is not None:
cover = open(path, 'rb').read()
if path.endswith('png'):
video["covr"] = [MP4Cover(cover, MP4Cover.FORMAT_PNG)] # png poster
else:
video["covr"] = [MP4Cover(cover, MP4Cover.FORMAT_JPEG)] # jpeg poster
if self.original:
video["\xa9too"] = "MDH:" + os.path.basename(self.original)
else:
video["\xa9too"] = "MDH:" + os.path.basename(mp4Path)
for i in range(3):
try:
self.log.info("Trying to write tags.")
video.save()
self.log.info("Tags written successfully.")
break
except IOError as e:
self.log.info("Exception: %s" % e)
self.log.exception("There was a problem writing the tags. Retrying.")
time.sleep(5)
def rating(self):
ratings = {'G': '100',
'PG': '200',
'PG-13': '300',
'R': '400',
'NC-17': '500'}
output = None
mpaa = self.movie.get_mpaa_rating()
if mpaa in ratings:
numerical = ratings[mpaa]
output = 'mpaa|' + mpaa.capitalize() + '|' + numerical + '|'
return str(output)
def setHD(self, width, height):
if width >= 1900 or height >= 1060:
self.HD = [2]
elif width >= 1260 or height >= 700:
self.HD = [1]
else:
self.HD = [0]
def xmlTags(self):
# constants
header = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\"><plist version=\"1.0\"><dict>\n"
castheader = "<key>cast</key><array>\n"
writerheader = "<key>screenwriters</key><array>\n"
directorheader = "<key>directors</key><array>\n"
producerheader = "<key>producers</key><array>\n"
subfooter = "</array>\n"
footer = "</dict></plist>\n"
output = StringIO()
output.write(header)
# Write actors
output.write(castheader)
for a in self.movie.get_cast()[:5]:
if a is not None:
output.write("<dict><key>name</key><string>%s</string></dict>\n" % a['name'].encode('ascii', 'ignore'))
output.write(subfooter)
# Write screenwriters
output.write(writerheader)
for w in self.movie.get_writers()[:5]:
if w is not None:
output.write("<dict><key>name</key><string>%s</string></dict>\n" % w['name'].encode('ascii', 'ignore'))
output.write(subfooter)
# Write directors
output.write(directorheader)
for d in self.movie.get_directors()[:5]:
if d is not None:
output.write("<dict><key>name</key><string>%s</string></dict>\n" % d['name'].encode('ascii', 'ignore'))
output.write(subfooter)
# Write producers
output.write(producerheader)
for p in self.movie.get_producers()[:5]:
if p is not None:
output.write("<dict><key>name</key><string>%s</string></dict>\n" % p['name'].encode('ascii', 'ignore'))
output.write(subfooter)
# Write final footer
output.write(footer)
return output.getvalue()
output.close()
# end xmlTags
def getArtwork(self, mp4Path, filename='cover'):
# Check for local artwork in the same directory as the mp4
extensions = valid_poster_extensions
poster = None
for e in extensions:
head, tail = os.path.split(os.path.abspath(mp4Path))
path = os.path.join(head, filename + os.extsep + e)
if (os.path.exists(path)):
poster = path
self.log.info("Local artwork detected, using %s." % path)
break
# Pulls down all the poster metadata for the correct season and sorts them into the Poster object
if poster is None:
try:
poster = urlretrieve(self.movie.get_poster("l"), os.path.join(tempfile.gettempdir(), "poster-%s.jpg" % self.imdbid))[0]
except Exception as e:
self.log.error("Exception while retrieving poster %s.", str(e))
poster = None
return poster
def main():
if len(sys.argv) > 2:
mp4 = str(sys.argv[1]).replace("\\", "\\\\").replace("\\\\\\\\", "\\\\")
imdb_id = str(sys.argv[2])
tmdb_mp4_instance = tmdb_mp4(imdb_id)
if os.path.splitext(mp4)[1][1:] in valid_output_extensions:
tmdb_mp4_instance.writeTags(mp4)
else:
print("Wrong file type")
if __name__ == '__main__':
main()
|
[
"io.StringIO",
"os.path.abspath",
"os.path.basename",
"tempfile.gettempdir",
"os.path.exists",
"mutagen.mp4.MP4Cover",
"logging.getLogger",
"tmdb_api.tmdb.configure",
"time.sleep",
"os.path.splitext",
"tmdb_api.tmdb.Movie",
"mutagen.mp4.MP4",
"os.path.join",
"sys.exit"
] |
[((2091, 2103), 'mutagen.mp4.MP4', 'MP4', (['mp4Path'], {}), '(mp4Path)\n', (2094, 2103), False, 'from mutagen.mp4 import MP4, MP4Cover\n'), ((5309, 5319), 'io.StringIO', 'StringIO', ([], {}), '()\n', (5317, 5319), False, 'from io import StringIO\n'), ((598, 625), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (615, 625), False, 'import logging\n'), ((2063, 2073), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2071, 2073), False, 'import sys\n'), ((6948, 6992), 'os.path.join', 'os.path.join', (['head', '(filename + os.extsep + e)'], {}), '(head, filename + os.extsep + e)\n', (6960, 6992), False, 'import os\n'), ((7009, 7029), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (7023, 7029), False, 'import os\n'), ((1056, 1103), 'tmdb_api.tmdb.configure', 'tmdb.configure', (['tmdb_api_key'], {'language': 'language'}), '(tmdb_api_key, language=language)\n', (1070, 1103), False, 'from tmdb_api import tmdb\n'), ((1134, 1152), 'tmdb_api.tmdb.Movie', 'tmdb.Movie', (['imdbid'], {}), '(imdbid)\n', (1144, 1152), False, 'from tmdb_api import tmdb\n'), ((1909, 1934), 'os.path.splitext', 'os.path.splitext', (['mp4Path'], {}), '(mp4Path)\n', (1925, 1934), False, 'import os\n'), ((3577, 3608), 'os.path.basename', 'os.path.basename', (['self.original'], {}), '(self.original)\n', (3593, 3608), False, 'import os\n'), ((3663, 3688), 'os.path.basename', 'os.path.basename', (['mp4Path'], {}), '(mp4Path)\n', (3679, 3688), False, 'import os\n'), ((6903, 6927), 'os.path.abspath', 'os.path.abspath', (['mp4Path'], {}), '(mp4Path)\n', (6918, 6927), False, 'import os\n'), ((1761, 1775), 'time.sleep', 'time.sleep', (['(20)'], {}), '(20)\n', (1771, 1775), False, 'import time\n'), ((4086, 4099), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (4096, 4099), False, 'import time\n'), ((7824, 7845), 'os.path.splitext', 'os.path.splitext', (['mp4'], {}), '(mp4)\n', (7840, 7845), False, 'import os\n'), ((3346, 3382), 'mutagen.mp4.MP4Cover', 'MP4Cover', (['cover', 'MP4Cover.FORMAT_PNG'], {}), '(cover, MP4Cover.FORMAT_PNG)\n', (3354, 3382), False, 'from mutagen.mp4 import MP4, MP4Cover\n'), ((3457, 3494), 'mutagen.mp4.MP4Cover', 'MP4Cover', (['cover', 'MP4Cover.FORMAT_JPEG'], {}), '(cover, MP4Cover.FORMAT_JPEG)\n', (3465, 3494), False, 'from mutagen.mp4 import MP4, MP4Cover\n'), ((7386, 7407), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (7405, 7407), False, 'import tempfile\n')]
|
import os
import json
import logging
from birdy.twitter import UserClient
logging.basicConfig(filename='tweetme.log', format='%(asctime)s %(message)s', level=logging.DEBUG)
def get_config_from_file(filename="config.json"):
"""
This function will check for the config.json file which holds the Twitter API
Tokens and Keys and will also give a user friendly message if they are
invalid. New file is created if not present in the project directory.
Returns False: if config.json is missing of has invalid configuration
Returns tuple (containing configurations): if config.json is present with
valid configuration
"""
if filename not in os.listdir():
with open(filename, mode='w') as f:
json.dump({
'consumer_key': 0,
'consumer_secret': 0,
'access_token': 0,
'access_token_secret': 0,
}, f)
return False
else:
with open(filename, mode='r') as f:
config = json.loads(f.read())
if 0 not in config.values():
return (
config["consumer_key"],
config["consumer_secret"],
config["access_token"],
config["access_token_secret"],
)
else:
return False
def get_next_tweet_from_file(tweets_file='tweets.txt', turn_file='next_tweet_index.txt'):
"""
This function reads Tweets file and Turn file and gets the next tweet.
Returns False: if tweets.txt is not present
Returns next Tweet: if valid tweets.txt is present
"""
if tweets_file not in os.listdir():
"""When tweets.txt is not present"""
with open(tweets_file, mode='w') as f:
f.write('Tweet :: URL\n')
with open(turn_file, mode='w') as f:
f.write('0')
return False
elif turn_file not in os.listdir():
"""When next_tweet_index.txt is not present, creates a new next_tweet_index.txt and writes 1 in it
and return the first tweet from tweets.txt"""
with open(turn_file, mode='w') as f:
f.write('1')
with open(tweets_file, mode='r') as f:
tweet_text = f.readline()
return tweet_text.split("::")
else:
"""When both files are present, check next_tweet_index.txt and use it's value as index to
find the next tweet from tweets.txt and write index + 1 in next_tweet_index.txt"""
with open(turn_file, mode='r') as f:
turn = int(f.readline())
with open(tweets_file, mode='r') as f:
tweets = f.readlines()
if len(tweets) <= turn:
turn = 0
with open(turn_file, mode='w') as f:
f.write(str(turn + 1))
return tweets[turn].split("::")
def manage_twitter_client():
"""
This function will create twitter client using configurations and send tweet.
"""
configError = (
"Please open config.json file located in the project directory and"
"replace the value '0' of all the tokens and keys in order to make "
"this bot work. Visit https://apps.twitter.com/ in order to get your "
"tokens and keys."
)
keys = get_config_from_file()
if not keys:
logging.error(configError)
else:
tweet = get_next_tweet_from_file()
if tweet:
client = UserClient(*keys)
response = client.api.statuses.update.post(status='{} {}'.format(tweet[0], tweet[1]))
logging.info(
'You tweet is out in the world.'
'Check it out https://twitter.com/{}/status/{}'.format(
response.data["user"]["screen_name"],
response.data["id_str"]
)
)
if __name__ == '__main__':
manage_twitter_client()
|
[
"json.dump",
"logging.error",
"logging.basicConfig",
"birdy.twitter.UserClient",
"os.listdir"
] |
[((75, 178), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""tweetme.log"""', 'format': '"""%(asctime)s %(message)s"""', 'level': 'logging.DEBUG'}), "(filename='tweetme.log', format=\n '%(asctime)s %(message)s', level=logging.DEBUG)\n", (94, 178), False, 'import logging\n'), ((718, 730), 'os.listdir', 'os.listdir', ([], {}), '()\n', (728, 730), False, 'import os\n'), ((1720, 1732), 'os.listdir', 'os.listdir', ([], {}), '()\n', (1730, 1732), False, 'import os\n'), ((3377, 3403), 'logging.error', 'logging.error', (['configError'], {}), '(configError)\n', (3390, 3403), False, 'import logging\n'), ((788, 892), 'json.dump', 'json.dump', (["{'consumer_key': 0, 'consumer_secret': 0, 'access_token': 0,\n 'access_token_secret': 0}", 'f'], {}), "({'consumer_key': 0, 'consumer_secret': 0, 'access_token': 0,\n 'access_token_secret': 0}, f)\n", (797, 892), False, 'import json\n'), ((1982, 1994), 'os.listdir', 'os.listdir', ([], {}), '()\n', (1992, 1994), False, 'import os\n'), ((3496, 3513), 'birdy.twitter.UserClient', 'UserClient', (['*keys'], {}), '(*keys)\n', (3506, 3513), False, 'from birdy.twitter import UserClient\n')]
|
from decimal import Decimal
from .common import SlotMixin
class SlotKalkulator_Wydawnictwo_Ciagle_Prog1(SlotMixin):
"""
Artykuł z czasopisma z listy ministerialnej.
Dla roku 2017, 2018: punkty KBN >= 30
"""
def punkty_pkd(self, dyscyplina):
if self.ma_dyscypline(dyscyplina):
return self.original.punkty_kbn
def slot_dla_autora_z_dyscypliny(self, dyscyplina):
azd = len(self.autorzy_z_dyscypliny(dyscyplina))
if azd == 0:
return
return Decimal("1") / azd
def slot_dla_dyscypliny(self, dyscyplina):
if self.ma_dyscypline(dyscyplina):
return Decimal("1")
class SlotKalkulator_Wydawnictwo_Ciagle_Prog2(SlotMixin):
"""
Artykuł z czasopisma z listy ministerialnej.
Dla roku 2017-2018: punkty KBN 20 lub 25
"""
def punkty_pkd(self, dyscyplina):
if self.ma_dyscypline(dyscyplina):
pierwiastek = self.pierwiastek_k_przez_m(dyscyplina)
if pierwiastek is None:
return None
if self.liczba_k(dyscyplina) == 0:
return 0
return self.original.punkty_kbn * max(pierwiastek, Decimal("0.1"))
def slot_dla_autora_z_dyscypliny(self, dyscyplina):
if not self.ma_dyscypline(dyscyplina):
return
azd = len(self.autorzy_z_dyscypliny(dyscyplina))
if azd > 0:
return self.pierwiastek_k_przez_m(dyscyplina) * 1 / azd
def slot_dla_dyscypliny(self, dyscyplina):
if not self.ma_dyscypline(dyscyplina):
return
return self.pierwiastek_k_przez_m(dyscyplina)
class SlotKalkulator_Wydawnictwo_Ciagle_Prog3(SlotMixin):
"""
Artykuł z czasopisma z listy ministerialnej.
Dla roku 2017-2018: punkty KBN poniżej 20 lub 5
"""
def punkty_pkd(self, dyscyplina):
if self.ma_dyscypline(dyscyplina):
k_przez_m = self.k_przez_m(dyscyplina)
if k_przez_m is None:
return
if self.liczba_k(dyscyplina) == 0:
return 0
return self.original.punkty_kbn * max(k_przez_m, Decimal("0.1"))
def slot_dla_autora_z_dyscypliny(self, dyscyplina):
if not self.ma_dyscypline(dyscyplina):
return
return self.jeden_przez_wszyscy()
def slot_dla_dyscypliny(self, dyscyplina):
if not self.ma_dyscypline(dyscyplina):
return
return self.jeden_przez_wszyscy() * len(self.autorzy_z_dyscypliny(dyscyplina))
|
[
"decimal.Decimal"
] |
[((521, 533), 'decimal.Decimal', 'Decimal', (['"""1"""'], {}), "('1')\n", (528, 533), False, 'from decimal import Decimal\n'), ((650, 662), 'decimal.Decimal', 'Decimal', (['"""1"""'], {}), "('1')\n", (657, 662), False, 'from decimal import Decimal\n'), ((1182, 1196), 'decimal.Decimal', 'Decimal', (['"""0.1"""'], {}), "('0.1')\n", (1189, 1196), False, 'from decimal import Decimal\n'), ((2137, 2151), 'decimal.Decimal', 'Decimal', (['"""0.1"""'], {}), "('0.1')\n", (2144, 2151), False, 'from decimal import Decimal\n')]
|
"""
Tests for the xopen.xopen function
"""
import bz2
from contextlib import contextmanager
import functools
import gzip
import io
import itertools
import lzma
import os
from pathlib import Path
import shutil
import pytest
from xopen import xopen
# TODO this is duplicated in test_piped.py
TEST_DIR = Path(__file__).parent
CONTENT_LINES = ["Testing, testing ...\n", "The second line.\n"]
CONTENT = "".join(CONTENT_LINES)
extensions = ["", ".gz", ".bz2", ".xz"]
base = os.path.join(os.path.dirname(__file__), "file.txt")
files = [base + ext for ext in extensions]
@contextmanager
def disable_binary(tmp_path, binary_name):
"""
Find the location of the binary by its name, then set PATH to a directory that contains
the binary with permissions set to 000. If no suitable binary could be found,
PATH is set to an empty directory
"""
try:
binary_path = shutil.which(binary_name)
if binary_path:
shutil.copy(binary_path, tmp_path)
os.chmod(tmp_path / binary_name, 0)
path = os.environ["PATH"]
os.environ["PATH"] = str(tmp_path)
yield
finally:
os.environ["PATH"] = path
@pytest.fixture(params=extensions)
def ext(request):
return request.param
@pytest.fixture(params=files)
def fname(request):
return request.param
@pytest.fixture
def lacking_pigz_permissions(tmp_path):
with disable_binary(tmp_path, "pigz"):
yield
@pytest.fixture
def lacking_pbzip2_permissions(tmp_path):
with disable_binary(tmp_path, "pbzip2"):
yield
@pytest.fixture
def xopen_without_igzip(monkeypatch):
import xopen # xopen local overrides xopen global variable
monkeypatch.setattr(xopen, "igzip", None)
return xopen.xopen
def test_text(fname):
with xopen(fname, "rt") as f:
lines = list(f)
assert len(lines) == 2
assert lines[1] == "The second line.\n", fname
def test_binary(fname):
with xopen(fname, "rb") as f:
lines = list(f)
assert len(lines) == 2
assert lines[1] == b"The second line.\n", fname
def test_binary_no_isal_no_threads(fname, xopen_without_igzip):
with xopen_without_igzip(fname, "rb", threads=0) as f:
lines = list(f)
assert len(lines) == 2
assert lines[1] == b"The second line.\n", fname
def test_binary_no_isal(fname, xopen_without_igzip):
with xopen_without_igzip(fname, "rb", threads=1) as f:
lines = list(f)
assert len(lines) == 2
assert lines[1] == b"The second line.\n", fname
def test_no_context_manager_text(fname):
f = xopen(fname, "rt")
lines = list(f)
assert len(lines) == 2
assert lines[1] == "The second line.\n", fname
f.close()
assert f.closed
def test_no_context_manager_binary(fname):
f = xopen(fname, "rb")
lines = list(f)
assert len(lines) == 2
assert lines[1] == b"The second line.\n", fname
f.close()
assert f.closed
def test_bytes_path(fname):
path = fname.encode("utf-8")
with xopen(path, "rt") as f:
lines = list(f)
assert len(lines) == 2
assert lines[1] == "The second line.\n", fname
def test_readinto(fname):
content = CONTENT.encode("utf-8")
with xopen(fname, "rb") as f:
b = bytearray(len(content) + 100)
length = f.readinto(b)
assert length == len(content)
assert b[:length] == content
def test_detect_file_format_from_content(ext, tmp_path):
path = tmp_path / f"file.txt{ext}.test"
shutil.copy(TEST_DIR / f"file.txt{ext}", path)
with xopen(path, "rb") as fh:
assert fh.readline() == CONTENT_LINES[0].encode("utf-8")
def test_readline(fname):
first_line = CONTENT_LINES[0].encode("utf-8")
with xopen(fname, "rb") as f:
assert f.readline() == first_line
def test_readline_text(fname):
with xopen(fname, "r") as f:
assert f.readline() == CONTENT_LINES[0]
def test_next(fname):
with xopen(fname, "rt") as f:
_ = next(f)
line2 = next(f)
assert line2 == "The second line.\n", fname
def test_has_iter_method(ext, tmp_path):
path = tmp_path / f"out{ext}"
with xopen(path, mode="w") as f:
# Writing anything isn’t strictly necessary, but if we don’t, then
# pbzip2 causes a delay of one second
f.write("hello")
assert hasattr(f, "__iter__")
def test_iter_without_with(fname):
f = xopen(fname, "rt")
it = iter(f)
assert CONTENT_LINES[0] == next(it)
f.close()
@pytest.mark.parametrize("extension", [".gz", ".bz2"])
def test_partial_iteration_closes_correctly(extension, create_large_file):
class LineReader:
def __init__(self, file):
self.file = xopen(file, "rb")
def __iter__(self):
wrapper = io.TextIOWrapper(self.file)
yield from wrapper
large_file = create_large_file(extension)
f = LineReader(large_file)
next(iter(f))
f.file.close()
def test_nonexisting_file(ext):
with pytest.raises(IOError):
with xopen("this-file-does-not-exist" + ext):
pass # pragma: no cover
def test_write_to_nonexisting_dir(ext):
with pytest.raises(IOError):
with xopen("this/path/does/not/exist/file.txt" + ext, "w"):
pass # pragma: no cover
def test_invalid_mode(ext):
with pytest.raises(ValueError):
with xopen(TEST_DIR / f"file.txt.{ext}", mode="hallo"):
pass # pragma: no cover
def test_filename_not_a_string():
with pytest.raises(TypeError):
with xopen(123, mode="r"):
pass # pragma: no cover
def test_invalid_compression_level(tmp_path):
with pytest.raises(ValueError) as e:
with xopen(tmp_path / "out.gz", mode="w", compresslevel=17) as f:
f.write("hello") # pragma: no cover
assert "compresslevel must be" in e.value.args[0]
@pytest.mark.parametrize("ext", extensions)
def test_append(ext, tmp_path):
text = b"AB"
reference = text + text
path = tmp_path / f"the-file{ext}"
with xopen(path, "ab") as f:
f.write(text)
with xopen(path, "ab") as f:
f.write(text)
with xopen(path, "r") as f:
for appended in f:
pass
reference = reference.decode("utf-8")
assert appended == reference
@pytest.mark.parametrize("ext", extensions)
def test_append_text(ext, tmp_path):
text = "AB"
reference = text + text
path = tmp_path / f"the-file{ext}"
with xopen(path, "at") as f:
f.write(text)
with xopen(path, "at") as f:
f.write(text)
with xopen(path, "rt") as f:
for appended in f:
pass
assert appended == reference
@pytest.mark.timeout(5)
@pytest.mark.parametrize("extension", [".gz", ".bz2", ".xz"])
def test_truncated_file(extension, create_truncated_file):
truncated_file = create_truncated_file(extension)
with pytest.raises((EOFError, IOError)):
f = xopen(truncated_file, "r")
f.read()
f.close() # pragma: no cover
@pytest.mark.timeout(5)
@pytest.mark.parametrize("extension", [".gz", ".bz2", ".xz"])
def test_truncated_iter(extension, create_truncated_file):
truncated_file = create_truncated_file(extension)
with pytest.raises((EOFError, IOError)):
f = xopen(truncated_file, "r")
for line in f:
pass
f.close() # pragma: no cover
@pytest.mark.timeout(5)
@pytest.mark.parametrize("extension", [".gz", ".bz2", ".xz"])
def test_truncated_with(extension, create_truncated_file):
truncated_file = create_truncated_file(extension)
with pytest.raises((EOFError, IOError)):
with xopen(truncated_file, "r") as f:
f.read()
@pytest.mark.timeout(5)
@pytest.mark.parametrize("extension", [".gz", ".bz2", ".xz"])
def test_truncated_iter_with(extension, create_truncated_file):
truncated_file = create_truncated_file(extension)
with pytest.raises((EOFError, IOError)):
with xopen(truncated_file, "r") as f:
for line in f:
pass
def test_bare_read_from_gz():
hello_file = TEST_DIR / "hello.gz"
with xopen(hello_file, "rt") as f:
assert f.read() == "hello"
def test_read_no_threads(ext):
klasses = {
".bz2": bz2.BZ2File,
".gz": gzip.GzipFile,
".xz": lzma.LZMAFile,
"": io.BufferedReader,
}
klass = klasses[ext]
with xopen(TEST_DIR / f"file.txt{ext}", "rb", threads=0) as f:
assert isinstance(f, klass), f
def test_write_threads(tmp_path, ext):
path = tmp_path / f"out.{ext}"
with xopen(path, mode="w", threads=3) as f:
f.write("hello")
with xopen(path) as f:
assert f.read() == "hello"
def test_write_pigz_threads_no_isal(tmp_path, xopen_without_igzip):
path = tmp_path / "out.gz"
with xopen_without_igzip(path, mode="w", threads=3) as f:
f.write("hello")
with xopen_without_igzip(path) as f:
assert f.read() == "hello"
def test_write_no_threads(tmp_path, ext):
klasses = {
".bz2": bz2.BZ2File,
".gz": gzip.GzipFile,
".xz": lzma.LZMAFile,
"": io.BufferedWriter,
}
klass = klasses[ext]
with xopen(tmp_path / f"out.{ext}", "wb", threads=0) as f:
assert isinstance(f, io.BufferedWriter)
if ext:
assert isinstance(f.raw, klass), f
def test_write_gzip_no_threads_no_isal(tmp_path, xopen_without_igzip):
import gzip
with xopen_without_igzip(tmp_path / "out.gz", "wb", threads=0) as f:
assert isinstance(f.raw, gzip.GzipFile), f
def test_write_stdout():
f = xopen("-", mode="w")
print("Hello", file=f)
f.close()
# ensure stdout is not closed
print("Still there?")
def test_write_stdout_contextmanager():
# Do not close stdout
with xopen("-", mode="w") as f:
print("Hello", file=f)
# ensure stdout is not closed
print("Still there?")
def test_read_pathlib(fname):
path = Path(fname)
with xopen(path, mode="rt") as f:
assert f.read() == CONTENT
def test_read_pathlib_binary(fname):
path = Path(fname)
with xopen(path, mode="rb") as f:
assert f.read() == bytes(CONTENT, "ascii")
def test_write_pathlib(ext, tmp_path):
path = tmp_path / f"hello.txt{ext}"
with xopen(path, mode="wt") as f:
f.write("hello")
with xopen(path, mode="rt") as f:
assert f.read() == "hello"
def test_write_pathlib_binary(ext, tmp_path):
path = tmp_path / f"hello.txt{ext}"
with xopen(path, mode="wb") as f:
f.write(b"hello")
with xopen(path, mode="rb") as f:
assert f.read() == b"hello"
def test_falls_back_to_gzip_open(lacking_pigz_permissions):
with xopen(TEST_DIR / "file.txt.gz", "rb") as f:
assert f.readline() == CONTENT_LINES[0].encode("utf-8")
def test_falls_back_to_gzip_open_no_isal(lacking_pigz_permissions, xopen_without_igzip):
with xopen_without_igzip(TEST_DIR / "file.txt.gz", "rb") as f:
assert f.readline() == CONTENT_LINES[0].encode("utf-8")
def test_fals_back_to_gzip_open_write_no_isal(
lacking_pigz_permissions, xopen_without_igzip, tmp_path
):
tmp = tmp_path / "test.gz"
with xopen_without_igzip(tmp, "wb") as f:
f.write(b"hello")
assert gzip.decompress(tmp.read_bytes()) == b"hello"
def test_falls_back_to_bzip2_open(lacking_pbzip2_permissions):
with xopen(TEST_DIR / "file.txt.bz2", "rb") as f:
assert f.readline() == CONTENT_LINES[0].encode("utf-8")
def test_open_many_writers(tmp_path, ext):
files = []
# Because lzma.open allocates a lot of memory,
# open fewer files to avoid MemoryError on 32-bit architectures
n = 21 if ext == ".xz" else 61
for i in range(1, n):
path = tmp_path / f"{i:03d}.txt{ext}"
f = xopen(path, "wb", threads=2)
f.write(b"hello")
files.append(f)
for f in files:
f.close()
def test_override_output_format(tmp_path):
path = tmp_path / "test_gzip_compressed"
with xopen(path, mode="wb", format="gz") as f:
f.write(b"test")
test_contents = path.read_bytes()
assert test_contents.startswith(b"\x1f\x8b") # Gzip magic
assert gzip.decompress(test_contents) == b"test"
def test_override_output_format_unsupported_format(tmp_path):
path = tmp_path / "test_fairy_format_compressed"
with pytest.raises(ValueError) as error:
xopen(path, mode="wb", format="fairy")
error.match("not supported")
error.match("fairy")
def test_override_output_format_wrong_format(tmp_path):
path = tmp_path / "not_compressed"
path.write_text("I am not compressed.")
with pytest.raises(OSError): # BadGzipFile is a subclass of OSError
with xopen(path, "rt", format="gz") as opened_file:
opened_file.read()
# Test for threaded and non-threaded.
OPENERS = (xopen, functools.partial(xopen, threads=0))
@pytest.mark.parametrize(
["opener", "extension"], itertools.product(OPENERS, extensions)
)
def test_text_encoding_newline_passtrough(opener, extension, tmp_path):
# "Eén ree\nTwee reeën\n" latin-1 encoded with \r for as line separator.
encoded_text = b"E\xe9n ree\rTwee ree\xebn\r"
path = tmp_path / f"test.txt{extension}"
with opener(path, "wb") as f:
f.write(encoded_text)
with opener(path, "rt", encoding="latin-1", newline="\r") as f:
result = f.read()
assert result == "Eén ree\rTwee reeën\r"
@pytest.mark.parametrize(
["opener", "extension"], itertools.product(OPENERS, extensions)
)
def test_text_encoding_errors(opener, extension, tmp_path):
# "Eén ree\nTwee reeën\n" latin-1 encoded. This is not valid ascii.
encoded_text = b"E\xe9n ree\nTwee ree\xebn\n"
path = tmp_path / f"test.txt{extension}"
with opener(path, "wb") as f:
f.write(encoded_text)
with opener(path, "rt", encoding="ascii", errors="replace") as f:
result = f.read()
assert result == "E�n ree\nTwee ree�n\n"
|
[
"functools.partial",
"os.chmod",
"os.path.dirname",
"gzip.decompress",
"pytest.fixture",
"shutil.which",
"xopen",
"pathlib.Path",
"pytest.raises",
"io.TextIOWrapper",
"itertools.product",
"pytest.mark.timeout",
"pytest.mark.parametrize",
"shutil.copy"
] |
[((1172, 1205), 'pytest.fixture', 'pytest.fixture', ([], {'params': 'extensions'}), '(params=extensions)\n', (1186, 1205), False, 'import pytest\n'), ((1252, 1280), 'pytest.fixture', 'pytest.fixture', ([], {'params': 'files'}), '(params=files)\n', (1266, 1280), False, 'import pytest\n'), ((4522, 4575), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""extension"""', "['.gz', '.bz2']"], {}), "('extension', ['.gz', '.bz2'])\n", (4545, 4575), False, 'import pytest\n'), ((5891, 5933), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ext"""', 'extensions'], {}), "('ext', extensions)\n", (5914, 5933), False, 'import pytest\n'), ((6322, 6364), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ext"""', 'extensions'], {}), "('ext', extensions)\n", (6345, 6364), False, 'import pytest\n'), ((6712, 6734), 'pytest.mark.timeout', 'pytest.mark.timeout', (['(5)'], {}), '(5)\n', (6731, 6734), False, 'import pytest\n'), ((6736, 6796), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""extension"""', "['.gz', '.bz2', '.xz']"], {}), "('extension', ['.gz', '.bz2', '.xz'])\n", (6759, 6796), False, 'import pytest\n'), ((7052, 7074), 'pytest.mark.timeout', 'pytest.mark.timeout', (['(5)'], {}), '(5)\n', (7071, 7074), False, 'import pytest\n'), ((7076, 7136), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""extension"""', "['.gz', '.bz2', '.xz']"], {}), "('extension', ['.gz', '.bz2', '.xz'])\n", (7099, 7136), False, 'import pytest\n'), ((7415, 7437), 'pytest.mark.timeout', 'pytest.mark.timeout', (['(5)'], {}), '(5)\n', (7434, 7437), False, 'import pytest\n'), ((7439, 7499), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""extension"""', "['.gz', '.bz2', '.xz']"], {}), "('extension', ['.gz', '.bz2', '.xz'])\n", (7462, 7499), False, 'import pytest\n'), ((7728, 7750), 'pytest.mark.timeout', 'pytest.mark.timeout', (['(5)'], {}), '(5)\n', (7747, 7750), False, 'import pytest\n'), ((7752, 7812), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""extension"""', "['.gz', '.bz2', '.xz']"], {}), "('extension', ['.gz', '.bz2', '.xz'])\n", (7775, 7812), False, 'import pytest\n'), ((304, 318), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (308, 318), False, 'from pathlib import Path\n'), ((484, 509), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (499, 509), False, 'import os\n'), ((2601, 2619), 'xopen', 'xopen', (['fname', '"""rt"""'], {}), "(fname, 'rt')\n", (2606, 2619), False, 'import xopen\n'), ((2805, 2823), 'xopen', 'xopen', (['fname', '"""rb"""'], {}), "(fname, 'rb')\n", (2810, 2823), False, 'import xopen\n'), ((3518, 3564), 'shutil.copy', 'shutil.copy', (["(TEST_DIR / f'file.txt{ext}')", 'path'], {}), "(TEST_DIR / f'file.txt{ext}', path)\n", (3529, 3564), False, 'import shutil\n'), ((4429, 4447), 'xopen', 'xopen', (['fname', '"""rt"""'], {}), "(fname, 'rt')\n", (4434, 4447), False, 'import xopen\n'), ((9630, 9650), 'xopen', 'xopen', (['"""-"""'], {'mode': '"""w"""'}), "('-', mode='w')\n", (9635, 9650), False, 'import xopen\n'), ((9990, 10001), 'pathlib.Path', 'Path', (['fname'], {}), '(fname)\n', (9994, 10001), False, 'from pathlib import Path\n'), ((10125, 10136), 'pathlib.Path', 'Path', (['fname'], {}), '(fname)\n', (10129, 10136), False, 'from pathlib import Path\n'), ((12890, 12925), 'functools.partial', 'functools.partial', (['xopen'], {'threads': '(0)'}), '(xopen, threads=0)\n', (12907, 12925), False, 'import functools\n'), ((12984, 13022), 'itertools.product', 'itertools.product', (['OPENERS', 'extensions'], {}), '(OPENERS, extensions)\n', (13001, 13022), False, 'import itertools\n'), ((13529, 13567), 'itertools.product', 'itertools.product', (['OPENERS', 'extensions'], {}), '(OPENERS, extensions)\n', (13546, 13567), False, 'import itertools\n'), ((886, 911), 'shutil.which', 'shutil.which', (['binary_name'], {}), '(binary_name)\n', (898, 911), False, 'import shutil\n'), ((1783, 1801), 'xopen', 'xopen', (['fname', '"""rt"""'], {}), "(fname, 'rt')\n", (1788, 1801), False, 'import xopen\n'), ((1953, 1971), 'xopen', 'xopen', (['fname', '"""rb"""'], {}), "(fname, 'rb')\n", (1958, 1971), False, 'import xopen\n'), ((3029, 3046), 'xopen', 'xopen', (['path', '"""rt"""'], {}), "(path, 'rt')\n", (3034, 3046), False, 'import xopen\n'), ((3238, 3256), 'xopen', 'xopen', (['fname', '"""rb"""'], {}), "(fname, 'rb')\n", (3243, 3256), False, 'import xopen\n'), ((3574, 3591), 'xopen', 'xopen', (['path', '"""rb"""'], {}), "(path, 'rb')\n", (3579, 3591), False, 'import xopen\n'), ((3751, 3769), 'xopen', 'xopen', (['fname', '"""rb"""'], {}), "(fname, 'rb')\n", (3756, 3769), False, 'import xopen\n'), ((3860, 3877), 'xopen', 'xopen', (['fname', '"""r"""'], {}), "(fname, 'r')\n", (3865, 3877), False, 'import xopen\n'), ((3965, 3983), 'xopen', 'xopen', (['fname', '"""rt"""'], {}), "(fname, 'rt')\n", (3970, 3983), False, 'import xopen\n'), ((4172, 4193), 'xopen', 'xopen', (['path'], {'mode': '"""w"""'}), "(path, mode='w')\n", (4177, 4193), False, 'import xopen\n'), ((5017, 5039), 'pytest.raises', 'pytest.raises', (['IOError'], {}), '(IOError)\n', (5030, 5039), False, 'import pytest\n'), ((5183, 5205), 'pytest.raises', 'pytest.raises', (['IOError'], {}), '(IOError)\n', (5196, 5205), False, 'import pytest\n'), ((5351, 5376), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5364, 5376), False, 'import pytest\n'), ((5524, 5548), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (5537, 5548), False, 'import pytest\n'), ((5679, 5704), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5692, 5704), False, 'import pytest\n'), ((6059, 6076), 'xopen', 'xopen', (['path', '"""ab"""'], {}), "(path, 'ab')\n", (6064, 6076), False, 'import xopen\n'), ((6114, 6131), 'xopen', 'xopen', (['path', '"""ab"""'], {}), "(path, 'ab')\n", (6119, 6131), False, 'import xopen\n'), ((6169, 6185), 'xopen', 'xopen', (['path', '"""r"""'], {}), "(path, 'r')\n", (6174, 6185), False, 'import xopen\n'), ((6494, 6511), 'xopen', 'xopen', (['path', '"""at"""'], {}), "(path, 'at')\n", (6499, 6511), False, 'import xopen\n'), ((6549, 6566), 'xopen', 'xopen', (['path', '"""at"""'], {}), "(path, 'at')\n", (6554, 6566), False, 'import xopen\n'), ((6604, 6621), 'xopen', 'xopen', (['path', '"""rt"""'], {}), "(path, 'rt')\n", (6609, 6621), False, 'import xopen\n'), ((6919, 6953), 'pytest.raises', 'pytest.raises', (['(EOFError, IOError)'], {}), '((EOFError, IOError))\n', (6932, 6953), False, 'import pytest\n'), ((6967, 6993), 'xopen', 'xopen', (['truncated_file', '"""r"""'], {}), "(truncated_file, 'r')\n", (6972, 6993), False, 'import xopen\n'), ((7259, 7293), 'pytest.raises', 'pytest.raises', (['(EOFError, IOError)'], {}), '((EOFError, IOError))\n', (7272, 7293), False, 'import pytest\n'), ((7307, 7333), 'xopen', 'xopen', (['truncated_file', '"""r"""'], {}), "(truncated_file, 'r')\n", (7312, 7333), False, 'import xopen\n'), ((7622, 7656), 'pytest.raises', 'pytest.raises', (['(EOFError, IOError)'], {}), '((EOFError, IOError))\n', (7635, 7656), False, 'import pytest\n'), ((7940, 7974), 'pytest.raises', 'pytest.raises', (['(EOFError, IOError)'], {}), '((EOFError, IOError))\n', (7953, 7974), False, 'import pytest\n'), ((8150, 8173), 'xopen', 'xopen', (['hello_file', '"""rt"""'], {}), "(hello_file, 'rt')\n", (8155, 8173), False, 'import xopen\n'), ((8424, 8475), 'xopen', 'xopen', (["(TEST_DIR / f'file.txt{ext}')", '"""rb"""'], {'threads': '(0)'}), "(TEST_DIR / f'file.txt{ext}', 'rb', threads=0)\n", (8429, 8475), False, 'import xopen\n'), ((8606, 8638), 'xopen', 'xopen', (['path'], {'mode': '"""w"""', 'threads': '(3)'}), "(path, mode='w', threads=3)\n", (8611, 8638), False, 'import xopen\n'), ((8679, 8690), 'xopen', 'xopen', (['path'], {}), '(path)\n', (8684, 8690), False, 'import xopen\n'), ((9216, 9263), 'xopen', 'xopen', (["(tmp_path / f'out.{ext}')", '"""wb"""'], {'threads': '(0)'}), "(tmp_path / f'out.{ext}', 'wb', threads=0)\n", (9221, 9263), False, 'import xopen\n'), ((9829, 9849), 'xopen', 'xopen', (['"""-"""'], {'mode': '"""w"""'}), "('-', mode='w')\n", (9834, 9849), False, 'import xopen\n'), ((10011, 10033), 'xopen', 'xopen', (['path'], {'mode': '"""rt"""'}), "(path, mode='rt')\n", (10016, 10033), False, 'import xopen\n'), ((10146, 10168), 'xopen', 'xopen', (['path'], {'mode': '"""rb"""'}), "(path, mode='rb')\n", (10151, 10168), False, 'import xopen\n'), ((10316, 10338), 'xopen', 'xopen', (['path'], {'mode': '"""wt"""'}), "(path, mode='wt')\n", (10321, 10338), False, 'import xopen\n'), ((10379, 10401), 'xopen', 'xopen', (['path'], {'mode': '"""rt"""'}), "(path, mode='rt')\n", (10384, 10401), False, 'import xopen\n'), ((10540, 10562), 'xopen', 'xopen', (['path'], {'mode': '"""wb"""'}), "(path, mode='wb')\n", (10545, 10562), False, 'import xopen\n'), ((10604, 10626), 'xopen', 'xopen', (['path'], {'mode': '"""rb"""'}), "(path, mode='rb')\n", (10609, 10626), False, 'import xopen\n'), ((10740, 10777), 'xopen', 'xopen', (["(TEST_DIR / 'file.txt.gz')", '"""rb"""'], {}), "(TEST_DIR / 'file.txt.gz', 'rb')\n", (10745, 10777), False, 'import xopen\n'), ((11416, 11454), 'xopen', 'xopen', (["(TEST_DIR / 'file.txt.bz2')", '"""rb"""'], {}), "(TEST_DIR / 'file.txt.bz2', 'rb')\n", (11421, 11454), False, 'import xopen\n'), ((11823, 11851), 'xopen', 'xopen', (['path', '"""wb"""'], {'threads': '(2)'}), "(path, 'wb', threads=2)\n", (11828, 11851), False, 'import xopen\n'), ((12039, 12074), 'xopen', 'xopen', (['path'], {'mode': '"""wb"""', 'format': '"""gz"""'}), "(path, mode='wb', format='gz')\n", (12044, 12074), False, 'import xopen\n'), ((12218, 12248), 'gzip.decompress', 'gzip.decompress', (['test_contents'], {}), '(test_contents)\n', (12233, 12248), False, 'import gzip\n'), ((12386, 12411), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (12399, 12411), False, 'import pytest\n'), ((12430, 12468), 'xopen', 'xopen', (['path'], {'mode': '"""wb"""', 'format': '"""fairy"""'}), "(path, mode='wb', format='fairy')\n", (12435, 12468), False, 'import xopen\n'), ((12677, 12699), 'pytest.raises', 'pytest.raises', (['OSError'], {}), '(OSError)\n', (12690, 12699), False, 'import pytest\n'), ((948, 982), 'shutil.copy', 'shutil.copy', (['binary_path', 'tmp_path'], {}), '(binary_path, tmp_path)\n', (959, 982), False, 'import shutil\n'), ((995, 1030), 'os.chmod', 'os.chmod', (['(tmp_path / binary_name)', '(0)'], {}), '(tmp_path / binary_name, 0)\n', (1003, 1030), False, 'import os\n'), ((4731, 4748), 'xopen', 'xopen', (['file', '"""rb"""'], {}), "(file, 'rb')\n", (4736, 4748), False, 'import xopen\n'), ((4800, 4827), 'io.TextIOWrapper', 'io.TextIOWrapper', (['self.file'], {}), '(self.file)\n', (4816, 4827), False, 'import io\n'), ((5054, 5093), 'xopen', 'xopen', (["('this-file-does-not-exist' + ext)"], {}), "('this-file-does-not-exist' + ext)\n", (5059, 5093), False, 'import xopen\n'), ((5220, 5273), 'xopen', 'xopen', (["('this/path/does/not/exist/file.txt' + ext)", '"""w"""'], {}), "('this/path/does/not/exist/file.txt' + ext, 'w')\n", (5225, 5273), False, 'import xopen\n'), ((5391, 5440), 'xopen', 'xopen', (["(TEST_DIR / f'file.txt.{ext}')"], {'mode': '"""hallo"""'}), "(TEST_DIR / f'file.txt.{ext}', mode='hallo')\n", (5396, 5440), False, 'import xopen\n'), ((5563, 5583), 'xopen', 'xopen', (['(123)'], {'mode': '"""r"""'}), "(123, mode='r')\n", (5568, 5583), False, 'import xopen\n'), ((5724, 5778), 'xopen', 'xopen', (["(tmp_path / 'out.gz')"], {'mode': '"""w"""', 'compresslevel': '(17)'}), "(tmp_path / 'out.gz', mode='w', compresslevel=17)\n", (5729, 5778), False, 'import xopen\n'), ((7671, 7697), 'xopen', 'xopen', (['truncated_file', '"""r"""'], {}), "(truncated_file, 'r')\n", (7676, 7697), False, 'import xopen\n'), ((7989, 8015), 'xopen', 'xopen', (['truncated_file', '"""r"""'], {}), "(truncated_file, 'r')\n", (7994, 8015), False, 'import xopen\n'), ((12754, 12784), 'xopen', 'xopen', (['path', '"""rt"""'], {'format': '"""gz"""'}), "(path, 'rt', format='gz')\n", (12759, 12784), False, 'import xopen\n')]
|
# the -n is important on gdal_merge otherwise data gets stomped!
import os
sep = os.path.sep
s = ['5MCP19/1/20210710/rgb.bin',
'5MCP19/1/20210722/rgb.bin',
'5MCP19/1/20210714/rgb.bin',
'5MCP19/2/20210710/rgb.bin',
'5MCP19/2/20210722/rgb.bin',
'5MCP19/2/20210714/rgb.bin',
'5MCP18/1/20210718/rgb.bin',
'5MCP18/2/20210718/rgb.bin',
'5MCP13/1/20210715/rgb.bin',
'5MCP13/1/20210719/rgb.bin',
'5MCP13/2/20210715/rgb.bin',
'5MCP13/2/20210719/rgb.bin',
'5MCP7/1/20210716/rgb.bin',
'5MCP7/1/20210720/rgb.bin',
'5MCP7/1/20210708/rgb.bin',
'5MCP7/2/20210716/rgb.bin',
'5MCP7/2/20210720/rgb.bin',
'5MCP7/2/20210708/rgb.bin']
d = {}
# identify pairs to merge
for i in s:
w = i.split(sep)
beam = w[0] # beam mode
st = w[1] # dataset number
date = w[2] # date
key = beam + '_' + date# should be two sets per beam_date
if key not in d: d[key] = []
d[key].append(i)
c = []
for k in d:
print(k, d[k])
c += ['gdal_merge.py -n -o ' + k + '.bin -of ENVI -ot Float32 ' + (' '.join(d[k]))]
import multiprocessing as mp
def run(c):
return os.system(c)
def parfor(my_function, my_inputs, n_thread=mp.cpu_count()): # eval fxn in parallel, collect
pool = mp.Pool(n_thread)
result = pool.map(my_function, my_inputs)
return(result)
parfor(run, c, 4)
|
[
"multiprocessing.Pool",
"os.system",
"multiprocessing.cpu_count"
] |
[((1151, 1163), 'os.system', 'os.system', (['c'], {}), '(c)\n', (1160, 1163), False, 'import os\n'), ((1209, 1223), 'multiprocessing.cpu_count', 'mp.cpu_count', ([], {}), '()\n', (1221, 1223), True, 'import multiprocessing as mp\n'), ((1269, 1286), 'multiprocessing.Pool', 'mp.Pool', (['n_thread'], {}), '(n_thread)\n', (1276, 1286), True, 'import multiprocessing as mp\n')]
|
from __future__ import annotations
import ast
import pytest
from flake8_pie import Flake8PieCheck
from flake8_pie.base import Error
from flake8_pie.pie784_celery_crontab_args import PIE784, _is_invalid_celery_crontab
from flake8_pie.tests.utils import to_errors
@pytest.mark.parametrize(
"code,expected",
[
(
"""
crontab(hour="0,12")
""",
PIE784(lineno=2, col_offset=0),
),
(
"""
crontab(hour="0,12", minute="*")
""",
None,
),
(
"""
crontab(hour="0,12", minute="*"),
""",
None,
),
(
"""
crontab(day_of_month="*", hour="0,12"),
""",
PIE784(lineno=2, col_offset=0),
),
(
"""
crontab(day_of_week="*", minute="*"),
""",
PIE784(lineno=2, col_offset=0),
),
(
"""
crontab(month_of_year="*", day_of_month="*", hour="0,12", minute="*"),
""",
PIE784(lineno=2, col_offset=0),
),
(
"""
crontab(),
""",
PIE784(lineno=2, col_offset=0),
),
(
"""
crontab(minute="*/5")
""",
None,
),
],
)
def test_celery_crontab_named_args(code: str, expected: Error | None) -> None:
"""
ensure we pass a explicit params to celery's crontab
see: https://github.com/celery/celery/blob/0736cff9d908c0519e07babe4de9c399c87cb32b/celery/schedules.py#L403
You must pass all the params below the level you are creating.
So if you pass hour, then you must pass minutes.
If you pass the day arg then you must provide hours and minutes, etc.
params: minute, hour, day_of_week, day_of_month, month_of_year
"""
node = ast.parse(code)
assert isinstance(node, ast.Module)
expected_errors = [expected] if expected else []
assert (
to_errors(Flake8PieCheck(node, filename="foo.py").run())
) == expected_errors, "missing a required argument"
@pytest.mark.parametrize(
"args,expected",
[
({"minute", "hour"}, False),
({"hour"}, True),
({"hour", "day_of_week"}, True),
({"minute", "hour", "day_of_week"}, False),
(
{
"minute",
"hour",
"day_of_week",
"day_of_month",
"month_of_year",
"another_random_arg",
},
False,
),
({"minute", "hour", "day_of_week", "day_of_month", "month_of_year"}, False),
],
)
def test_invalid_celery_crontab_kwargs(args: list[str], expected: bool) -> None:
kwargs = [ast.keyword(arg=arg, value=ast.Str(s="0,1")) for arg in args]
assert _is_invalid_celery_crontab(kwargs=kwargs) == expected
|
[
"flake8_pie.pie784_celery_crontab_args.PIE784",
"pytest.mark.parametrize",
"flake8_pie.Flake8PieCheck",
"flake8_pie.pie784_celery_crontab_args._is_invalid_celery_crontab",
"ast.parse",
"ast.Str"
] |
[((1995, 2355), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""args,expected"""', "[({'minute', 'hour'}, False), ({'hour'}, True), ({'hour', 'day_of_week'}, \n True), ({'minute', 'hour', 'day_of_week'}, False), ({'minute', 'hour',\n 'day_of_week', 'day_of_month', 'month_of_year', 'another_random_arg'}, \n False), ({'minute', 'hour', 'day_of_week', 'day_of_month',\n 'month_of_year'}, False)]"], {}), "('args,expected', [({'minute', 'hour'}, False), ({\n 'hour'}, True), ({'hour', 'day_of_week'}, True), ({'minute', 'hour',\n 'day_of_week'}, False), ({'minute', 'hour', 'day_of_week',\n 'day_of_month', 'month_of_year', 'another_random_arg'}, False), ({\n 'minute', 'hour', 'day_of_week', 'day_of_month', 'month_of_year'}, False)])\n", (2018, 2355), False, 'import pytest\n'), ((1749, 1764), 'ast.parse', 'ast.parse', (['code'], {}), '(code)\n', (1758, 1764), False, 'import ast\n'), ((2718, 2759), 'flake8_pie.pie784_celery_crontab_args._is_invalid_celery_crontab', '_is_invalid_celery_crontab', ([], {'kwargs': 'kwargs'}), '(kwargs=kwargs)\n', (2744, 2759), False, 'from flake8_pie.pie784_celery_crontab_args import PIE784, _is_invalid_celery_crontab\n'), ((384, 414), 'flake8_pie.pie784_celery_crontab_args.PIE784', 'PIE784', ([], {'lineno': '(2)', 'col_offset': '(0)'}), '(lineno=2, col_offset=0)\n', (390, 414), False, 'from flake8_pie.pie784_celery_crontab_args import PIE784, _is_invalid_celery_crontab\n'), ((697, 727), 'flake8_pie.pie784_celery_crontab_args.PIE784', 'PIE784', ([], {'lineno': '(2)', 'col_offset': '(0)'}), '(lineno=2, col_offset=0)\n', (703, 727), False, 'from flake8_pie.pie784_celery_crontab_args import PIE784, _is_invalid_celery_crontab\n'), ((821, 851), 'flake8_pie.pie784_celery_crontab_args.PIE784', 'PIE784', ([], {'lineno': '(2)', 'col_offset': '(0)'}), '(lineno=2, col_offset=0)\n', (827, 851), False, 'from flake8_pie.pie784_celery_crontab_args import PIE784, _is_invalid_celery_crontab\n'), ((978, 1008), 'flake8_pie.pie784_celery_crontab_args.PIE784', 'PIE784', ([], {'lineno': '(2)', 'col_offset': '(0)'}), '(lineno=2, col_offset=0)\n', (984, 1008), False, 'from flake8_pie.pie784_celery_crontab_args import PIE784, _is_invalid_celery_crontab\n'), ((1075, 1105), 'flake8_pie.pie784_celery_crontab_args.PIE784', 'PIE784', ([], {'lineno': '(2)', 'col_offset': '(0)'}), '(lineno=2, col_offset=0)\n', (1081, 1105), False, 'from flake8_pie.pie784_celery_crontab_args import PIE784, _is_invalid_celery_crontab\n'), ((2672, 2688), 'ast.Str', 'ast.Str', ([], {'s': '"""0,1"""'}), "(s='0,1')\n", (2679, 2688), False, 'import ast\n'), ((1889, 1928), 'flake8_pie.Flake8PieCheck', 'Flake8PieCheck', (['node'], {'filename': '"""foo.py"""'}), "(node, filename='foo.py')\n", (1903, 1928), False, 'from flake8_pie import Flake8PieCheck\n')]
|
from __future__ import print_function
from infi.execute import execute
import os
import glob
import logging
import shutil
import platform
import hashlib
import stat
from contextlib import contextmanager
from six.moves.configparser import ConfigParser, NoOptionError
from tempfile import NamedTemporaryFile
log = logging.getLogger(__name__)
INSTALLER_USERDATA = os.path.join('SOFTWARE', 'Microsoft', 'Windows', 'CurrentVersion', 'Installer', 'UserData')
PYPI_HOSTS = ["127.0.0.1 pypi.infinidat.com",
"127.0.0.1 pypi",
"127.0.0.1 pypi.python.org", ]
HOSTS_FILE = os.path.join('/', 'etc', 'hosts') if os.name != 'nt' else \
os.path.join(os.environ.get("SystemRoot", r"C:\Windows"), "System32", "Drivers", "etc", "hosts")
CHMOD_755 = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH
from infi.recipe.application_packager.utils.execute import execute_assert_success
def get_pypi_addresses():
""":returns: a list of PyPI addresses being looked by easy_install and buildout"""
# TODO get the real list from pydistutils.cfg and buildout/default.cfg
return PYPI_HOSTS
@contextmanager
def prevent_access_to_pypi_servers():
with open(HOSTS_FILE, 'r') as fd:
content = fd.read()
try:
new_content = '\n'.join([content] + get_pypi_addresses())
with open(HOSTS_FILE, 'w') as fd:
fd.write(new_content)
log.info("Preventing access to pypi servers")
log.debug("Wrote {!r} to hosts file {}".format(new_content, HOSTS_FILE))
yield
finally:
with open(HOSTS_FILE, 'w') as fd:
fd.write(content)
log.info("Restoring access to pypi servers")
log.debug("Wrote original content {!r} for hosts file".format(content, HOSTS_FILE))
@contextmanager
def prevent_access_to_gcc():
from tempfile import mkdtemp
original_path = os.environ['PATH']
tempdir = mkdtemp('gcc')
fake_gcc = os.path.join(tempdir, 'gcc')
with open(fake_gcc, 'w') as fd:
fd.write("#!/bin/sh\nexit 1")
os.chmod(fake_gcc, CHMOD_755)
try:
os.environ['PATH'] = os.path.pathsep.join([tempdir, os.environ['PATH'], tempdir])
log.info("Preventing access to gcc")
log.debug("Setting PATH to {}".format(os.environ['PATH']))
yield
finally:
os.environ['PATH'] = original_path
log.debug("Restored PATH to {}".format(os.environ['PATH']))
class Installer(object):
package_extension = None
targetdir = None
executable_extension = None
def __init__(self, buildout_path='buildout.cfg'):
super(Installer, self).__init__()
self._buildout_path = os.path.abspath(buildout_path)
self._project_dir = os.path.dirname(self._buildout_path)
self._parser = ConfigParser()
self._parser.read(self._buildout_path)
@property
def product_name(self):
try:
return self._parser.get('project', 'product_name')
except NoOptionError:
return self._parser.get('project', 'name')
@property
def project_name(self):
return self.product_name.replace(' ', '-').replace('_', '-').lower()
@property
def package_name(self):
return self.project_name
@property
def company(self):
try:
return self._parser.get('project', 'company')
except NoOptionError:
return self._parser.get('project', 'None')
@property
def targetdir(self):
if os.name == 'nt':
return os.path.join(r'C:\Program Files', self.company, self.product_name)
return os.path.join(os.path.sep, 'opt', self.company.lower(), self.project_name)
def _format_executable(self, executable):
return "{}.{}".format(executable, self.executable_extension) if self.executable_extension else executable
def has_bootstrap_ocurred(self):
buildout_path = os.path.join(self.targetdir, 'bin', self._format_executable('buildout'))
buildout_exists = os.path.exists(buildout_path)
log.debug("{!r} exists: {}".format(buildout_path, buildout_exists))
return buildout_exists
def are_there_remainings_of_previous_installations(self):
filepaths = []
if os.path.exists(self.targetdir):
for root, dirs, files in os.walk(self.targetdir):
basedir = os.path.relpath(root, self.targetdir)
filepaths += [basedir + os.path.sep]
filepaths += [os.path.join(basedir, file) for file in files]
log.info("Files and directories under {!r}: {!r}".format(self.targetdir, filepaths))
return os.path.exists(self.targetdir)
def is_package_exists(self):
return len(self._get_packages()) > 0
def _get_packages(self):
packages = glob.glob(os.path.join(self._project_dir, 'parts', '*.{}'.format(self.package_extension)))
log.info("Found the following packages: {!r}".format(packages))
return packages
def get_package(self):
return self._get_packages()[0]
def create_package(self):
from ..utils import chdir
with chdir(os.path.dirname(self._buildout_path)):
python = os.path.join('bin', 'python{}'.format('.exe' if os.name == 'nt' else ''))
buildout_script = os.path.join('bin', 'buildout{}'.format('-script.py' if os.name == 'nt' else ''))
stdout = execute_assert_success([python, buildout_script, '-v', 'install', 'pack']).get_stdout()
log.debug('package created, stdout: {}'.format(stdout))
def is_product_installed(self):
raise NotImplementedError()
def install_package(self, with_custom_actions=True):
raise NotImplementedError()
def uninstall_package(self, with_custom_actions=True):
raise NotImplementedError()
class MsiInstaller(Installer):
package_extension = 'msi'
executable_extension = 'exe'
@property
def package_code(self):
return self._parser.get('project', 'upgrade_code')
@property
def package_code_formatted(self):
return self.package_code.strip('{}').replace('-', '').upper()
def _get_installed_product_from_registry(self):
from infi.registry import LocalComputer
registry = LocalComputer()
userdata = registry.local_machine[INSTALLER_USERDATA]
for user in [user for user in userdata.values() if os.path.join('Products') in user]:
for product in user['Products'].values():
display_name = product['InstallProperties'].values_store['DisplayName'].to_python_object()
log.debug("product found: {!r}".format(display_name))
if display_name == self.product_name:
log.debug("Product is indeed installed")
return product
log.debug("Product is not installed")
return None
def is_product_installed(self):
return self._get_installed_product_from_registry() is not None
def install_package(self, with_custom_actions=True):
import io
logfile = self.get_package() + '.install.log'
with open(logfile, 'w'):
pass
args = ['msiexec', '/i', self.get_package(), '/passive', '/l*vx', logfile]
if not with_custom_actions:
args.append("NO_CUSTOM_ACTIONS=1")
with prevent_access_to_pypi_servers():
try:
execute_assert_success(args)
finally:
with io.open(logfile, encoding='utf-16') as fd:
print(fd.read())
def uninstall_package(self, with_custom_actions=True):
import io
logfile = self.get_package() + '.uninstall.log'
with open(logfile, 'w'):
pass
properties = self._get_installed_product_from_registry()['InstallProperties'].values_store
uninstall_string = properties['UninstallString'].to_python_object()
args = uninstall_string.split() + ['/passive', '/l*vx', logfile]
if not with_custom_actions:
args.append("NO_CUSTOM_ACTIONS=1")
try:
execute_assert_success(args)
finally:
with io.open(logfile, encoding='utf-16') as fd:
print(fd.read())
class RpmInstaller(Installer):
package_extension = 'rpm'
def is_product_installed(self):
pid = execute_assert_success(['rpm', '-q', self.package_name], allowed_return_codes=[0, 1])
output = pid.get_stderr() + pid.get_stdout()
return b'not installed' not in output
def install_package(self, with_custom_actions=True):
env = os.environ.copy()
if not with_custom_actions:
env['NO_CUSTOM_ACTIONS'] = '1'
env['LIBPATH'] = '' # On AIX we don't want Python's LIBPATH for rpm
with prevent_access_to_pypi_servers(), prevent_access_to_gcc():
execute_assert_success(['rpm', '-Uvh', self.get_package()], env=env)
def uninstall_package(self, with_custom_actions=True):
env = os.environ.copy()
if not with_custom_actions:
env['NO_CUSTOM_ACTIONS'] = '1'
env['LIBPATH'] = '' # On AIX we don't want Python's LIBPATH for rpm
execute_assert_success(['rpm', '-e', self.package_name], env=env)
class DebInstaller(Installer):
package_extension = 'deb'
def is_product_installed(self):
output = execute_assert_success(["dpkg", "--list", self.package_name], allowed_return_codes=[0, 1]).get_stdout().splitlines()
return any([line.startswith(b'ii') and self.package_name in line.decode('ascii') for line in output])
def install_package(self, with_custom_actions=True):
env = os.environ.copy()
if not with_custom_actions:
env['NO_CUSTOM_ACTIONS'] = '1'
with prevent_access_to_pypi_servers(), prevent_access_to_gcc():
execute_assert_success(['dpkg', '-i', self.get_package()], env=env)
def uninstall_package(self, with_custom_actions=True):
env = os.environ.copy()
if not with_custom_actions:
env['NO_CUSTOM_ACTIONS'] = '1'
execute_assert_success(['dpkg', '-r', self.package_name], env=env)
class PkgInstaller(Installer):
package_extension = 'pkg.gz'
def __init__(self, *args, **kwargs):
super(PkgInstaller, self).__init__(*args, **kwargs)
admin_file_content = '\n'.join(['partial=nocheck',
'runlevel=nocheck',
'idepend=nocheck',
'rdepend=nocheck',
'setuid=nocheck',
'action=nocheck',
'partial=nocheck',
'conflict=nocheck',
'authentication=quit',
'instance=overwrite',
'basedir=default'])
self.admin_file = NamedTemporaryFile(mode='w')
self.admin_file.write(admin_file_content)
self.admin_file.flush()
os.fsync(self.admin_file.fileno())
def is_product_installed(self):
return 0 == execute(["pkginfo", self.package_name]).get_returncode()
def install_package(self, with_custom_actions=True):
response_file = NamedTemporaryFile(mode='w')
response_file.write("NO_CUSTOM_ACTIONS={}".format(int(not with_custom_actions)))
response_file.flush()
os.fsync(response_file.fileno())
with prevent_access_to_pypi_servers(), prevent_access_to_gcc():
zipped_package_name = self.get_package()
unzipped_package_name = zipped_package_name[:-3]
execute_assert_success('gunzip -c {} > {}'.format(zipped_package_name, unzipped_package_name), shell=True)
execute_assert_success(['pkgadd',
'-n',
'-a', self.admin_file.name,
'-r', response_file.name,
'-d', unzipped_package_name,
self.package_name])
def uninstall_package(self, with_custom_actions=True):
# with_custom_actions is actually ignored here. This flag is passed to the installer through the response file.
# Luckily, the preremove scripts also gets this info (it's saved somwhere in the os until the removal)
execute_assert_success(['pkgrm', '-n', '-a', self.admin_file.name, self.package_name], allowed_return_codes=[0,])
|
[
"tempfile.NamedTemporaryFile",
"os.path.abspath",
"os.chmod",
"os.path.pathsep.join",
"os.path.dirname",
"os.environ.copy",
"os.path.exists",
"os.walk",
"infi.registry.LocalComputer",
"os.environ.get",
"tempfile.mkdtemp",
"os.path.relpath",
"six.moves.configparser.ConfigParser",
"infi.recipe.application_packager.utils.execute.execute_assert_success",
"io.open",
"os.path.join",
"infi.execute.execute",
"logging.getLogger"
] |
[((316, 343), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (333, 343), False, 'import logging\n'), ((366, 461), 'os.path.join', 'os.path.join', (['"""SOFTWARE"""', '"""Microsoft"""', '"""Windows"""', '"""CurrentVersion"""', '"""Installer"""', '"""UserData"""'], {}), "('SOFTWARE', 'Microsoft', 'Windows', 'CurrentVersion',\n 'Installer', 'UserData')\n", (378, 461), False, 'import os\n'), ((604, 637), 'os.path.join', 'os.path.join', (['"""/"""', '"""etc"""', '"""hosts"""'], {}), "('/', 'etc', 'hosts')\n", (616, 637), False, 'import os\n'), ((1967, 1981), 'tempfile.mkdtemp', 'mkdtemp', (['"""gcc"""'], {}), "('gcc')\n", (1974, 1981), False, 'from tempfile import mkdtemp\n'), ((1997, 2025), 'os.path.join', 'os.path.join', (['tempdir', '"""gcc"""'], {}), "(tempdir, 'gcc')\n", (2009, 2025), False, 'import os\n'), ((2104, 2133), 'os.chmod', 'os.chmod', (['fake_gcc', 'CHMOD_755'], {}), '(fake_gcc, CHMOD_755)\n', (2112, 2133), False, 'import os\n'), ((690, 733), 'os.environ.get', 'os.environ.get', (['"""SystemRoot"""', '"""C:\\\\Windows"""'], {}), "('SystemRoot', 'C:\\\\Windows')\n", (704, 733), False, 'import os\n'), ((2172, 2232), 'os.path.pathsep.join', 'os.path.pathsep.join', (["[tempdir, os.environ['PATH'], tempdir]"], {}), "([tempdir, os.environ['PATH'], tempdir])\n", (2192, 2232), False, 'import os\n'), ((2718, 2748), 'os.path.abspath', 'os.path.abspath', (['buildout_path'], {}), '(buildout_path)\n', (2733, 2748), False, 'import os\n'), ((2777, 2813), 'os.path.dirname', 'os.path.dirname', (['self._buildout_path'], {}), '(self._buildout_path)\n', (2792, 2813), False, 'import os\n'), ((2837, 2851), 'six.moves.configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (2849, 2851), False, 'from six.moves.configparser import ConfigParser, NoOptionError\n'), ((4059, 4088), 'os.path.exists', 'os.path.exists', (['buildout_path'], {}), '(buildout_path)\n', (4073, 4088), False, 'import os\n'), ((4293, 4323), 'os.path.exists', 'os.path.exists', (['self.targetdir'], {}), '(self.targetdir)\n', (4307, 4323), False, 'import os\n'), ((4693, 4723), 'os.path.exists', 'os.path.exists', (['self.targetdir'], {}), '(self.targetdir)\n', (4707, 4723), False, 'import os\n'), ((6317, 6332), 'infi.registry.LocalComputer', 'LocalComputer', ([], {}), '()\n', (6330, 6332), False, 'from infi.registry import LocalComputer\n'), ((8414, 8503), 'infi.recipe.application_packager.utils.execute.execute_assert_success', 'execute_assert_success', (["['rpm', '-q', self.package_name]"], {'allowed_return_codes': '[0, 1]'}), "(['rpm', '-q', self.package_name],\n allowed_return_codes=[0, 1])\n", (8436, 8503), False, 'from infi.recipe.application_packager.utils.execute import execute_assert_success\n'), ((8671, 8688), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (8686, 8688), False, 'import os\n'), ((9072, 9089), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (9087, 9089), False, 'import os\n'), ((9254, 9319), 'infi.recipe.application_packager.utils.execute.execute_assert_success', 'execute_assert_success', (["['rpm', '-e', self.package_name]"], {'env': 'env'}), "(['rpm', '-e', self.package_name], env=env)\n", (9276, 9319), False, 'from infi.recipe.application_packager.utils.execute import execute_assert_success\n'), ((9736, 9753), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (9751, 9753), False, 'import os\n'), ((10059, 10076), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (10074, 10076), False, 'import os\n'), ((10164, 10230), 'infi.recipe.application_packager.utils.execute.execute_assert_success', 'execute_assert_success', (["['dpkg', '-r', self.package_name]"], {'env': 'env'}), "(['dpkg', '-r', self.package_name], env=env)\n", (10186, 10230), False, 'from infi.recipe.application_packager.utils.execute import execute_assert_success\n'), ((11042, 11070), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'mode': '"""w"""'}), "(mode='w')\n", (11060, 11070), False, 'from tempfile import NamedTemporaryFile\n'), ((11392, 11420), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'mode': '"""w"""'}), "(mode='w')\n", (11410, 11420), False, 'from tempfile import NamedTemporaryFile\n'), ((12520, 12637), 'infi.recipe.application_packager.utils.execute.execute_assert_success', 'execute_assert_success', (["['pkgrm', '-n', '-a', self.admin_file.name, self.package_name]"], {'allowed_return_codes': '[0]'}), "(['pkgrm', '-n', '-a', self.admin_file.name, self.\n package_name], allowed_return_codes=[0])\n", (12542, 12637), False, 'from infi.recipe.application_packager.utils.execute import execute_assert_success\n'), ((3580, 3646), 'os.path.join', 'os.path.join', (['"""C:\\\\Program Files"""', 'self.company', 'self.product_name'], {}), "('C:\\\\Program Files', self.company, self.product_name)\n", (3592, 3646), False, 'import os\n'), ((4362, 4385), 'os.walk', 'os.walk', (['self.targetdir'], {}), '(self.targetdir)\n', (4369, 4385), False, 'import os\n'), ((8161, 8189), 'infi.recipe.application_packager.utils.execute.execute_assert_success', 'execute_assert_success', (['args'], {}), '(args)\n', (8183, 8189), False, 'from infi.recipe.application_packager.utils.execute import execute_assert_success\n'), ((11898, 12044), 'infi.recipe.application_packager.utils.execute.execute_assert_success', 'execute_assert_success', (["['pkgadd', '-n', '-a', self.admin_file.name, '-r', response_file.name, '-d',\n unzipped_package_name, self.package_name]"], {}), "(['pkgadd', '-n', '-a', self.admin_file.name, '-r',\n response_file.name, '-d', unzipped_package_name, self.package_name])\n", (11920, 12044), False, 'from infi.recipe.application_packager.utils.execute import execute_assert_success\n'), ((4413, 4450), 'os.path.relpath', 'os.path.relpath', (['root', 'self.targetdir'], {}), '(root, self.targetdir)\n', (4428, 4450), False, 'import os\n'), ((5190, 5226), 'os.path.dirname', 'os.path.dirname', (['self._buildout_path'], {}), '(self._buildout_path)\n', (5205, 5226), False, 'import os\n'), ((7470, 7498), 'infi.recipe.application_packager.utils.execute.execute_assert_success', 'execute_assert_success', (['args'], {}), '(args)\n', (7492, 7498), False, 'from infi.recipe.application_packager.utils.execute import execute_assert_success\n'), ((8224, 8259), 'io.open', 'io.open', (['logfile'], {'encoding': '"""utf-16"""'}), "(logfile, encoding='utf-16')\n", (8231, 8259), False, 'import io\n'), ((4534, 4561), 'os.path.join', 'os.path.join', (['basedir', 'file'], {}), '(basedir, file)\n', (4546, 4561), False, 'import os\n'), ((5457, 5531), 'infi.recipe.application_packager.utils.execute.execute_assert_success', 'execute_assert_success', (["[python, buildout_script, '-v', 'install', 'pack']"], {}), "([python, buildout_script, '-v', 'install', 'pack'])\n", (5479, 5531), False, 'from infi.recipe.application_packager.utils.execute import execute_assert_success\n'), ((6454, 6478), 'os.path.join', 'os.path.join', (['"""Products"""'], {}), "('Products')\n", (6466, 6478), False, 'import os\n'), ((7541, 7576), 'io.open', 'io.open', (['logfile'], {'encoding': '"""utf-16"""'}), "(logfile, encoding='utf-16')\n", (7548, 7576), False, 'import io\n'), ((11253, 11292), 'infi.execute.execute', 'execute', (["['pkginfo', self.package_name]"], {}), "(['pkginfo', self.package_name])\n", (11260, 11292), False, 'from infi.execute import execute\n'), ((9437, 9531), 'infi.recipe.application_packager.utils.execute.execute_assert_success', 'execute_assert_success', (["['dpkg', '--list', self.package_name]"], {'allowed_return_codes': '[0, 1]'}), "(['dpkg', '--list', self.package_name],\n allowed_return_codes=[0, 1])\n", (9459, 9531), False, 'from infi.recipe.application_packager.utils.execute import execute_assert_success\n')]
|
# -*- coding: utf-8 -*-
"""A setuptools based module for the NIVA tsb module/application.
"""
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
# get the version from the __version__.py file
version_dict = {}
with open(path.join(here, 'pyniva', '__version__.py')) as f:
exec(f.read(), version_dict)
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='pyniva',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=version_dict['__version__'],
description="Python wrapper/API for interacting with NIVA's data platform",
long_description=long_description,
long_description_content_type='text/markdown',
# The project's main homepage.
url='https://github.com/NIVANorge/pyniva',
# Author details
author='<NAME>',
author_email='<EMAIL>',
# Choose your license
license='MIT license',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
keywords='metadata timeseries data',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
install_requires=['pandas>=1.1,<2.0', 'numpy>=1.16,<2.0', 'requests>=2.20,<3.0',
'pyjwt>=1.7,<2.0', 'cryptography>=2.5,<3.0'],
test_suite='tests',
)
|
[
"os.path.dirname",
"os.path.join",
"setuptools.find_packages"
] |
[((180, 202), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (192, 202), False, 'from os import path\n'), ((280, 323), 'os.path.join', 'path.join', (['here', '"""pyniva"""', '"""__version__.py"""'], {}), "(here, 'pyniva', '__version__.py')\n", (289, 323), False, 'from os import path\n'), ((375, 403), 'os.path.join', 'path.join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (384, 403), False, 'from os import path\n'), ((1734, 1785), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['contrib', 'docs', 'tests']"}), "(exclude=['contrib', 'docs', 'tests'])\n", (1747, 1785), False, 'from setuptools import setup, find_packages\n')]
|
__copyright__ = 'Copyright (C) 2019, Nokia'
import os
import imp
from setuptools import setup, find_packages
VERSIONFILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'src', 'crl', 'examplelib', '_version.py')
def get_version():
return imp.load_source('_version', VERSIONFILE).get_version()
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='crl.examplelib',
version=get_version(),
author='<NAME>',
author_email='<EMAIL>',
description='Example of Common Robot Library',
install_requires=[],
long_description=read('README.rst'),
license='BSD-3-Clause',
keywords='robotframework, example',
url='https://github.com/nokia/crl-examplelib',
packages=find_packages('src'),
package_dir={'': 'src'},
namespace_packages=['crl'],
entry_points={'robotdocsconf': [
'robotdocsconf = crl.examplelib.robotdocsconf:robotdocs']},
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development',
],
)
|
[
"imp.load_source",
"os.path.abspath",
"os.path.dirname",
"setuptools.find_packages"
] |
[((160, 185), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (175, 185), False, 'import os\n'), ((794, 814), 'setuptools.find_packages', 'find_packages', (['"""src"""'], {}), "('src')\n", (807, 814), False, 'from setuptools import setup, find_packages\n'), ((267, 307), 'imp.load_source', 'imp.load_source', (['"""_version"""', 'VERSIONFILE'], {}), "('_version', VERSIONFILE)\n", (282, 307), False, 'import imp\n'), ((368, 393), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (383, 393), False, 'import os\n')]
|
from copy import copy
import numpy as np
from nipy.core.image.image import Image
class ImageList(object):
''' Class to contain ND image as list of (N-1)D images '''
def __init__(self, images=None):
"""
A lightweight implementation of a list of images.
Parameters
----------
images : iterable
a iterable and sliceale object whose items are meant to be
images, this is checked by asserting that each has a
`coordmap` attribute
>>> import numpy as np
>>> from nipy.testing import funcfile
>>> from nipy.core.api import Image, ImageList
>>> from nipy.io.api import load_image
>>> funcim = load_image(funcfile)
>>> ilist = ImageList(funcim)
>>> sublist = ilist[2:5]
Slicing an ImageList returns a new ImageList
>>> isinstance(sublist, ImageList)
True
Indexing an ImageList returns a new Image
>>> newimg = ilist[2]
>>> isinstance(newimg, Image)
True
>>> isinstance(newimg, ImageList)
False
>>> np.asarray(sublist).shape
(3, 2, 20, 20)
>>> np.asarray(newimg).shape
(2, 20, 20)
"""
if images is None:
self.list = []
return
for im in images:
if not hasattr(im, "coordmap"):
raise ValueError("expecting each element of images "
" to have a 'coordmap' attribute")
self.list = images
@classmethod
def from_image(klass, image, axis=-1):
if axis is None:
raise ValueError('axis must be array axis no or -1')
imlist = []
coordmap = image.coordmap
data = np.asarray(image)
data = np.rollaxis(data, axis)
imlist = [Image(dataslice, copy(coordmap))
for dataslice in data]
return klass(imlist)
def __setitem__(self, index, value):
"""
self.list[index] = value
"""
self.list[index] = value
def __getitem__(self, index):
"""
self.list[index]
"""
if type(index) is type(1):
return self.list[index]
else:
return ImageList(images=self.list[index])
def __getslice__(self, i, j):
"""
Return another ImageList instance consisting with
images self.list[i:j]
"""
return ImageList(images=self.list[i:j])
def __array__(self):
"""Return data in ndarray. Called through numpy.array.
Examples
--------
>>> import numpy as np
>>> from nipy.testing import funcfile
>>> from nipy.core.api import ImageList
>>> from nipy.io.api import load_image
>>> funcim = load_image(funcfile)
>>> ilist = ImageList(funcim)
>>> np.asarray(ilist).shape
(20, 2, 20, 20)
"""
return np.asarray([np.asarray(im) for im in self.list])
def __iter__(self):
self._iter = iter(self.list)
return self
def next(self):
return self._iter.next()
|
[
"numpy.asarray",
"copy.copy",
"numpy.rollaxis"
] |
[((1776, 1793), 'numpy.asarray', 'np.asarray', (['image'], {}), '(image)\n', (1786, 1793), True, 'import numpy as np\n'), ((1809, 1832), 'numpy.rollaxis', 'np.rollaxis', (['data', 'axis'], {}), '(data, axis)\n', (1820, 1832), True, 'import numpy as np\n'), ((1868, 1882), 'copy.copy', 'copy', (['coordmap'], {}), '(coordmap)\n', (1872, 1882), False, 'from copy import copy\n'), ((3009, 3023), 'numpy.asarray', 'np.asarray', (['im'], {}), '(im)\n', (3019, 3023), True, 'import numpy as np\n')]
|
# Regular expression exercises from Google Python class
import re
# Example 1
match = re.search('iig','called piiig')
print(match)
print(match.group())
# Example 2
match = re.search('igs','called piiig')
print(match)
def Find(pat, txt):
match = re.search(pat, txt)
if match:
print(match.group())
else:
print('Not found')
# Example 3
Find('iig','called piiig')
Find('igs','called piiig')
# Additional notes
# .(dot) any char
# \w word char
# \d digit
# \s whitespace \S non-whitespace
# + 1 or more
# * 0 or more
# Example 4
Find('...g','called piiig')
# Example 5
Find('..gs','called piiig')
# Example 6
Find('..g','called piiig Another match xxxg')
# Example 7
Find('x..g','called piiig Another match xxxg')
# Example 8
Find('c\.call','c.called piiig Another match xxxg')
# Example 9
Find(r'c\.call','c.called piiig Another match xxxg')
# Example 10
Find(r':\w\w\w','blah :cat blah blah')
# Example 11
Find(r':\d\d\d','blah :cat :123 blah blah')
# Example 12
Find(r'\d\d\d','blah :cat :123 blah blah')
# Example 13
Find(r'\d\s+\d\s+\d','blah :cat :1 2 3 blah blah')
# Example 14
Find(r':\w+','blah :kitten :1 2 3 blah blah')
# Example 15
Find(r':.+','blah :kitten :1 2 3 blah blah')
# Example 16
Find(r':\w+','blah :kitten123%&fd*^ :1 2 3 blah blah')
# Example 17
Find(r':\S+','blah :kitten123%&fd*^ :1 2 3 blah blah')
# Example 18
Find(r'\w+@\w+','My email <EMAIL> blah @')
# Example 19
Find(r'[\w.]+@[\w.]+','My email <EMAIL> blah @') # Set of characters allowed, inside []
# Example 20
Find(r'[\w.]+@[\w.]+','My email <EMAIL> blah @')
# Example 21
Find(r'\w[\w.]+@[\w.]+','My email <EMAIL> blah @')
# Example 22 - Get the username and hostname
m = re.search(r'(\w[\w.]+)@([\w.]+)', 'My email <EMAIL> blah @')
print(m)
print(m.group())
print(m.group(1))
print(m.group(2))
# Example 23
out = re.findall(r'\w[\w.]+@[\w.]+','My email .<EMAIL> blah @ <EMAIL>')
print(out)
# Example 24
out = re.findall(r'(\w[\w.]+)@([\w.]+)','My email <EMAIL> blah @ <EMAIL>')
print(out)
# Example 25
out = re.findall(r'(\w[\w.]+)@([\w.]+)','My email .<EMAIL> blah @ <EMAIL>', re.IGNORECASE)
print(out)
|
[
"re.findall",
"re.search"
] |
[((89, 121), 're.search', 're.search', (['"""iig"""', '"""called piiig"""'], {}), "('iig', 'called piiig')\n", (98, 121), False, 'import re\n'), ((176, 208), 're.search', 're.search', (['"""igs"""', '"""called piiig"""'], {}), "('igs', 'called piiig')\n", (185, 208), False, 'import re\n'), ((1722, 1784), 're.search', 're.search', (['"""(\\\\w[\\\\w.]+)@([\\\\w.]+)"""', '"""My email <EMAIL> blah @"""'], {}), "('(\\\\w[\\\\w.]+)@([\\\\w.]+)', 'My email <EMAIL> blah @')\n", (1731, 1784), False, 'import re\n'), ((1865, 1933), 're.findall', 're.findall', (['"""\\\\w[\\\\w.]+@[\\\\w.]+"""', '"""My email .<EMAIL> blah @ <EMAIL>"""'], {}), "('\\\\w[\\\\w.]+@[\\\\w.]+', 'My email .<EMAIL> blah @ <EMAIL>')\n", (1875, 1933), False, 'import re\n'), ((1962, 2033), 're.findall', 're.findall', (['"""(\\\\w[\\\\w.]+)@([\\\\w.]+)"""', '"""My email <EMAIL> blah @ <EMAIL>"""'], {}), "('(\\\\w[\\\\w.]+)@([\\\\w.]+)', 'My email <EMAIL> blah @ <EMAIL>')\n", (1972, 2033), False, 'import re\n'), ((2062, 2154), 're.findall', 're.findall', (['"""(\\\\w[\\\\w.]+)@([\\\\w.]+)"""', '"""My email .<EMAIL> blah @ <EMAIL>"""', 're.IGNORECASE'], {}), "('(\\\\w[\\\\w.]+)@([\\\\w.]+)', 'My email .<EMAIL> blah @ <EMAIL>', re\n .IGNORECASE)\n", (2072, 2154), False, 'import re\n'), ((251, 270), 're.search', 're.search', (['pat', 'txt'], {}), '(pat, txt)\n', (260, 270), False, 'import re\n')]
|
from django.db import close_old_connections
from rest_framework_simplejwt.tokens import UntypedToken
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
from jwt import decode as jwt_decode
from django.conf import settings
from django.contrib.auth import get_user_model
from urllib.parse import parse_qs
from app.models import CustomUser
from asgiref.sync import sync_to_async
from channels.db import database_sync_to_async
from django.contrib.auth.models import AnonymousUser
@database_sync_to_async
def get_user(user_id):
try:
return CustomUser.objects.get(id=user_id)
except CustomUser.DoesNotExist:
return AnonymousUser()
class TokenAuthMiddleware:
def __init__(self, inner):
self.inner = inner
async def __call__(self, scope,receive,send):
token = parse_qs(scope["query_string"].decode("utf8"))["token"][0]
print(token)
try:
UntypedToken(token)
except (InvalidToken, TokenError) as e:
return None
else:
decoded_data = jwt_decode(token, settings.SECRET_KEY, algorithms=["HS256"])
scope['user'] = await get_user(int(decoded_data["user_id"]))
return await self.inner(scope,receive,send)
|
[
"django.contrib.auth.models.AnonymousUser",
"app.models.CustomUser.objects.get",
"jwt.decode",
"rest_framework_simplejwt.tokens.UntypedToken"
] |
[((570, 604), 'app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'user_id'}), '(id=user_id)\n', (592, 604), False, 'from app.models import CustomUser\n'), ((656, 671), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ([], {}), '()\n', (669, 671), False, 'from django.contrib.auth.models import AnonymousUser\n'), ((941, 960), 'rest_framework_simplejwt.tokens.UntypedToken', 'UntypedToken', (['token'], {}), '(token)\n', (953, 960), False, 'from rest_framework_simplejwt.tokens import UntypedToken\n'), ((1074, 1134), 'jwt.decode', 'jwt_decode', (['token', 'settings.SECRET_KEY'], {'algorithms': "['HS256']"}), "(token, settings.SECRET_KEY, algorithms=['HS256'])\n", (1084, 1134), True, 'from jwt import decode as jwt_decode\n')]
|
#
# This file is part of Invenio.
# Copyright (C) 2022 Graz University of Technology.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Create featured communities table"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
from sqlalchemy_utils import UUIDType
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = 'fbe746957cfc'
branch_labels = ()
depends_on = None
def upgrade():
"""Upgrade database."""
op.create_table(
'communities_featured',
sa.Column(
'created',
sa.DateTime().with_variant(mysql.DATETIME(fsp=6), 'mysql'),
nullable=False,
),
sa.Column(
'updated',
sa.DateTime().with_variant(mysql.DATETIME(fsp=6), 'mysql'),
nullable=False,
),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('community_id', UUIDType(), nullable=False),
sa.Column(
'start_date',
sa.DateTime().with_variant(mysql.DATETIME(fsp=6), 'mysql'),
nullable=False,
),
sa.ForeignKeyConstraint(
['community_id'],
['communities_metadata.id'],
name=op.f(
'fk_communities_featured_community_id_communities_metadata'
),
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_communities_featured')),
)
def downgrade():
"""Downgrade database."""
op.drop_table('communities_featured')
|
[
"alembic.op.drop_table",
"sqlalchemy.DateTime",
"alembic.op.f",
"sqlalchemy.dialects.mysql.DATETIME",
"sqlalchemy_utils.UUIDType",
"sqlalchemy.Integer"
] |
[((1567, 1604), 'alembic.op.drop_table', 'op.drop_table', (['"""communities_featured"""'], {}), "('communities_featured')\n", (1580, 1604), False, 'from alembic import op\n'), ((952, 964), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (962, 964), True, 'import sqlalchemy as sa\n'), ((1017, 1027), 'sqlalchemy_utils.UUIDType', 'UUIDType', ([], {}), '()\n', (1025, 1027), False, 'from sqlalchemy_utils import UUIDType\n'), ((703, 724), 'sqlalchemy.dialects.mysql.DATETIME', 'mysql.DATETIME', ([], {'fsp': '(6)'}), '(fsp=6)\n', (717, 724), False, 'from sqlalchemy.dialects import mysql\n'), ((856, 877), 'sqlalchemy.dialects.mysql.DATETIME', 'mysql.DATETIME', ([], {'fsp': '(6)'}), '(fsp=6)\n', (870, 877), False, 'from sqlalchemy.dialects import mysql\n'), ((1130, 1151), 'sqlalchemy.dialects.mysql.DATETIME', 'mysql.DATETIME', ([], {'fsp': '(6)'}), '(fsp=6)\n', (1144, 1151), False, 'from sqlalchemy.dialects import mysql\n'), ((1323, 1388), 'alembic.op.f', 'op.f', (['"""fk_communities_featured_community_id_communities_metadata"""'], {}), "('fk_communities_featured_community_id_communities_metadata')\n", (1327, 1388), False, 'from alembic import op\n'), ((1474, 1505), 'alembic.op.f', 'op.f', (['"""pk_communities_featured"""'], {}), "('pk_communities_featured')\n", (1478, 1505), False, 'from alembic import op\n'), ((676, 689), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (687, 689), True, 'import sqlalchemy as sa\n'), ((829, 842), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (840, 842), True, 'import sqlalchemy as sa\n'), ((1103, 1116), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1114, 1116), True, 'import sqlalchemy as sa\n')]
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='azure_blob_check',
version='2.0',
description='azure blob filelist check',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/kyungjunleeme/azure_blob_check',
download_url='https://github.com/kyungjunleeme/azure_blob_check/archive/main.zip',
packages=find_packages(exclude=['docs', 'tests*']),
install_requires=['azure-storage-blob', 'pytz', 'pandas', 'openpyxl'],
entry_points={'console_scripts': [
'blob_check=azure_blob_check.blob_check:main']},
keywords=['azure_blob', 'blob_list'],
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
|
[
"setuptools.find_packages"
] |
[((380, 421), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['docs', 'tests*']"}), "(exclude=['docs', 'tests*'])\n", (393, 421), False, 'from setuptools import setup, find_packages\n')]
|
import os
import argparse
import multiprocessing
from typing import Dict, Union
import numpy as np
import pandas as pd
import skimage.io
from tqdm import tqdm
from src import config
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
'--masks',
type=str,
required=True,
help='Path to a directory with training masks',
)
parser.add_argument(
'--out-csv',
type=str,
required=True,
help='Where to save .csv dataframe with folds split',
)
return parser.parse_args()
class MetadataGetter:
def __init__(self, masks_dir: str):
self._masks_dir = masks_dir
def get_metadata(self, image_id: str) -> Dict[str, Union[str, float]]:
mask = self._load_mask(image_id) > 127
return {
'id': image_id,
'city_id': self._get_city_id(image_id),
'roads_ratio': mask.sum() / mask.size,
}
def _load_mask(self, image_id: str) -> np.ndarray:
path = os.path.join(self._masks_dir, f'{image_id}.tif')
if not os.path.exists(path):
raise ValueError(path)
mask = skimage.io.imread(path)
assert mask is not None, path
# print('SHAPE', mask.shape)
mask = mask[:, :, -1]
return mask
@staticmethod
def _get_city_id(image_id: str) -> str:
city_id = '_'.join(image_id.split('_')[3:6])
assert city_id in config.TRAINING_CITIES, (city_id, config.TRAINING_CITIES)
return city_id
def get_folds_split(df: pd.DataFrame, num_folds: int = 5) -> pd.DataFrame:
df = df.sample(n=len(df), replace=False)
df = df.sort_values(by=['city_id', 'roads_ratio']).reset_index(drop=True)
df['fold_id'] = np.arange(len(df)) % num_folds
return df
def main():
args = parse_args()
image_ids = [os.path.splitext(filename)[0] for filename in os.listdir(args.masks)]
with multiprocessing.Pool(16) as pool:
metadata = list(tqdm(
pool.imap_unordered(MetadataGetter(args.masks).get_metadata, image_ids),
total=len(image_ids),
desc='Extracting metadata...',
))
df = pd.DataFrame(metadata)
df = get_folds_split(df)
df.to_csv(args.out_csv, index=False)
print(f'Saved folds dataframe of shape {df.shape} to `{args.out_csv}`')
if __name__ == '__main__':
main()
|
[
"pandas.DataFrame",
"argparse.ArgumentParser",
"os.path.exists",
"os.path.splitext",
"multiprocessing.Pool",
"os.path.join",
"os.listdir"
] |
[((239, 264), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (262, 264), False, 'import argparse\n'), ((2205, 2227), 'pandas.DataFrame', 'pd.DataFrame', (['metadata'], {}), '(metadata)\n', (2217, 2227), True, 'import pandas as pd\n'), ((1050, 1098), 'os.path.join', 'os.path.join', (['self._masks_dir', 'f"""{image_id}.tif"""'], {}), "(self._masks_dir, f'{image_id}.tif')\n", (1062, 1098), False, 'import os\n'), ((1958, 1982), 'multiprocessing.Pool', 'multiprocessing.Pool', (['(16)'], {}), '(16)\n', (1978, 1982), False, 'import multiprocessing\n'), ((1114, 1134), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1128, 1134), False, 'import os\n'), ((1878, 1904), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (1894, 1904), False, 'import os\n'), ((1924, 1946), 'os.listdir', 'os.listdir', (['args.masks'], {}), '(args.masks)\n', (1934, 1946), False, 'import os\n')]
|
from libsaas.services import base
from . import resource
class PlansBaseResource(resource.StripeResource):
path = 'plans'
class Plan(PlansBaseResource):
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
class Plans(resource.ListResourceMixin, PlansBaseResource):
def update(self, *args, **kwargs):
raise base.MethodNotSupported()
def delete(self, *args, **kwargs):
raise base.MethodNotSupported()
|
[
"libsaas.services.base.MethodNotSupported"
] |
[((217, 242), 'libsaas.services.base.MethodNotSupported', 'base.MethodNotSupported', ([], {}), '()\n', (240, 242), False, 'from libsaas.services import base\n'), ((359, 384), 'libsaas.services.base.MethodNotSupported', 'base.MethodNotSupported', ([], {}), '()\n', (382, 384), False, 'from libsaas.services import base\n'), ((439, 464), 'libsaas.services.base.MethodNotSupported', 'base.MethodNotSupported', ([], {}), '()\n', (462, 464), False, 'from libsaas.services import base\n')]
|
# !/usr/bin/python
# -*- coding: utf-8 -*-
# @time : 2019/11/12 16:45
# @author : Mo
# @function:
from keras_textclassification import train
train(graph='TextCNN', # 必填, 算法名, 可选"ALBERT","BERT","XLNET","FASTTEXT","TEXTCNN","CHARCNN",
# "TEXTRNN","RCNN","DCNN","DPCNN","VDCNN","CRNN","DEEPMOJI",
# "SELFATTENTION", "HAN","CAPSULE","TRANSFORMER"
label=17, # 必填, 类别数, 训练集和测试集合必须一样
path_train_data=None, # 必填, 训练数据文件, csv格式, 必须含'label,ques'头文件, 详见keras_textclassification/data
path_dev_data=None, # 必填, 测试数据文件, csv格式, 必须含'label,ques'头文件, 详见keras_textclassification/data
rate=1, # 可填, 训练数据选取比例
hyper_parameters=None) # 可填, json格式, 超参数, 默认embedding为'char','random'
|
[
"keras_textclassification.train"
] |
[((147, 256), 'keras_textclassification.train', 'train', ([], {'graph': '"""TextCNN"""', 'label': '(17)', 'path_train_data': 'None', 'path_dev_data': 'None', 'rate': '(1)', 'hyper_parameters': 'None'}), "(graph='TextCNN', label=17, path_train_data=None, path_dev_data=None,\n rate=1, hyper_parameters=None)\n", (152, 256), False, 'from keras_textclassification import train\n')]
|
from app import app
import routes
import rest
from myhvac_core import cfg
from myhvac_core.db import api as db
from myhvac_core import log
import logging
LOG = logging.getLogger(__name__)
opts = [
cfg.BoolOpt('debug', default=False,
help='Enables debug mode for the flask rest api'),
cfg.IntOpt('port', default=8081, help='Http port of the webserver')
]
CONF = cfg.CONF
CONF.register_opts(opts, 'rest_api')
CONF = cfg.CONF
def init():
try:
CONF(project='myhvac_service')
except cfg.RequiredOptError:
CONF.print_help()
raise SystemExit(1)
log.init_log()
db.init_db()
if __name__ == '__main__':
init()
app.run()
|
[
"myhvac_core.cfg.BoolOpt",
"myhvac_core.cfg.IntOpt",
"myhvac_core.log.init_log",
"myhvac_core.db.api.init_db",
"logging.getLogger",
"app.app.run"
] |
[((164, 191), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (181, 191), False, 'import logging\n'), ((206, 296), 'myhvac_core.cfg.BoolOpt', 'cfg.BoolOpt', (['"""debug"""'], {'default': '(False)', 'help': '"""Enables debug mode for the flask rest api"""'}), "('debug', default=False, help=\n 'Enables debug mode for the flask rest api')\n", (217, 296), False, 'from myhvac_core import cfg\n'), ((313, 380), 'myhvac_core.cfg.IntOpt', 'cfg.IntOpt', (['"""port"""'], {'default': '(8081)', 'help': '"""Http port of the webserver"""'}), "('port', default=8081, help='Http port of the webserver')\n", (323, 380), False, 'from myhvac_core import cfg\n'), ((606, 620), 'myhvac_core.log.init_log', 'log.init_log', ([], {}), '()\n', (618, 620), False, 'from myhvac_core import log\n'), ((625, 637), 'myhvac_core.db.api.init_db', 'db.init_db', ([], {}), '()\n', (635, 637), True, 'from myhvac_core.db import api as db\n'), ((682, 691), 'app.app.run', 'app.run', ([], {}), '()\n', (689, 691), False, 'from app import app\n')]
|
import adv_test
import adv
import vanessa
def module():
return Vanessa
class Vanessa(vanessa.Vanessa):
comment = 'void weapon vs HMS'
def pre(this):
this.conf['str_w'] = 1.5*380
this.conf['mod_w'] = ('att','killer',0.2)
if this.condition('last offense'):
this.o_init = this.init
this.init = this.c_init
def init(this):
this.charge_p('prep','50%')
def c_init(this):
this.o_init()
adv.Selfbuff('last_offense',0.3,15).on()
if __name__ == '__main__':
conf = {}
conf['acl'] = """
`s1
`s2
`fs,seq=5
"""
adv_test.test(module(), conf, verbose=0)
|
[
"adv.Selfbuff"
] |
[((480, 517), 'adv.Selfbuff', 'adv.Selfbuff', (['"""last_offense"""', '(0.3)', '(15)'], {}), "('last_offense', 0.3, 15)\n", (492, 517), False, 'import adv\n')]
|
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test cases for merge_header_definitions module."""
import unittest
from pysam import libcbcf
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from gcp_variant_transforms.beam_io import vcf_header_io
from gcp_variant_transforms.transforms import merge_header_definitions
from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition
from gcp_variant_transforms.libs.vcf_header_definitions_merger import VcfHeaderDefinitions
class MergeHeadersTest(unittest.TestCase):
def _get_header_from_lines(self, lines, file_path):
header = libcbcf.VariantHeader()
for line in lines[:-1]:
header.add_line(line)
return vcf_header_io.VcfHeader(infos=header.info,
filters=header.filters,
alts=header.alts,
formats=header.formats,
contigs=header.contigs,
file_path=file_path)
def test_merge_header_definitions_one_header(self):
lines = [
'##INFO=<ID=NS,Number=1,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n'
]
headers = self._get_header_from_lines(lines, 'file1')
pipeline = TestPipeline()
merged_definitions = (
pipeline
| Create([headers])
| 'MergeDefinitions' >> merge_header_definitions.MergeDefinitions())
expected = VcfHeaderDefinitions()
expected._infos = {'NS': {Definition(1, 'Integer'): ['file1']}}
assert_that(merged_definitions, equal_to([expected]))
pipeline.run()
def test_merge_header_definitions_two_conflicting_headers(self):
lines_1 = [
'##INFO=<ID=NS,Number=1,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n'
]
lines_2 = [
'##INFO=<ID=NS,Number=1,Type=Float,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n'
]
headers_1 = self._get_header_from_lines(lines_1, 'file1')
headers_2 = self._get_header_from_lines(lines_2, 'file2')
pipeline = TestPipeline()
merged_definitions = (
pipeline
| Create([headers_1, headers_2])
| 'MergeDefinitions' >> merge_header_definitions.MergeDefinitions())
expected = VcfHeaderDefinitions()
expected._infos = {'NS': {Definition(1, 'Integer'): ['file1'],
Definition(1, 'Float'): ['file2']}}
assert_that(merged_definitions, equal_to([expected]))
pipeline.run()
def test_merge_header_definitions_no_conflicting_headers(self):
lines_1 = [
'##FORMAT=<ID=NS,Number=1,Type=Float,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n'
]
lines_2 = [
'##FORMAT=<ID=DP,Number=2,Type=Float,Description="Total Depth">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n'
]
headers_1 = self._get_header_from_lines(lines_1, 'file1')
headers_2 = self._get_header_from_lines(lines_2, 'file2')
pipeline = TestPipeline()
merged_definitions = (
pipeline
| Create([headers_1, headers_2])
| 'MergeDefinitions' >> merge_header_definitions.MergeDefinitions())
expected = VcfHeaderDefinitions()
expected._formats = {'NS': {Definition(1, 'Float'): ['file1']},
'DP': {Definition(2, 'Float'): ['file2']}}
assert_that(merged_definitions, equal_to([expected]))
pipeline.run()
def test_merge_header_definitions_same_id_in_info_and_format_headers(self):
lines_1 = [
'##INFO=<ID=NS,Number=1,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n'
]
lines_2 = [
'##FORMAT=<ID=NS,Number=1,Type=Float,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n'
]
headers_1 = self._get_header_from_lines(lines_1, 'file1')
headers_2 = self._get_header_from_lines(lines_2, 'file2')
pipeline = TestPipeline()
merged_definitions = (
pipeline
| Create([headers_1, headers_2])
| 'MergeDefinitions' >> merge_header_definitions.MergeDefinitions())
expected = VcfHeaderDefinitions()
expected._infos = {'NS': {Definition(1, 'Integer'): ['file1']}}
expected._formats = {'NS': {Definition(1, 'Float'): ['file2']}}
assert_that(merged_definitions, equal_to([expected]))
pipeline.run()
def test_merge_header_definitions_save_five_copies(self):
lines_1 = [
'##INFO=<ID=NS,Number=1,Type=Float,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n'
]
lines_2 = [
'##INFO=<ID=NS,Number=1,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n'
]
file_names = ['file1', 'file2', 'file3', 'file4', 'file5', 'file6']
headers = []
for file_name in file_names:
headers.append(self._get_header_from_lines(lines_1, file_name))
headers.append(self._get_header_from_lines(lines_2, 'file7'))
pipeline = TestPipeline()
merged_definitions = (
pipeline
| Create(headers, reshuffle=False)
| 'MergeDefinitions' >> merge_header_definitions.MergeDefinitions())
expected = VcfHeaderDefinitions()
expected._infos = {
'NS': {Definition(1, 'Float'):
['file1', 'file2', 'file3', 'file4', 'file5'],
Definition(1, 'Integer'): ['file7']}}
assert_that(merged_definitions, equal_to([expected]))
pipeline.run()
|
[
"apache_beam.testing.util.equal_to",
"apache_beam.testing.test_pipeline.TestPipeline",
"gcp_variant_transforms.beam_io.vcf_header_io.VcfHeader",
"pysam.libcbcf.VariantHeader",
"gcp_variant_transforms.libs.vcf_header_definitions_merger.VcfHeaderDefinitions",
"gcp_variant_transforms.transforms.merge_header_definitions.MergeDefinitions",
"gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition",
"apache_beam.transforms.Create"
] |
[((1306, 1329), 'pysam.libcbcf.VariantHeader', 'libcbcf.VariantHeader', ([], {}), '()\n', (1327, 1329), False, 'from pysam import libcbcf\n'), ((1397, 1560), 'gcp_variant_transforms.beam_io.vcf_header_io.VcfHeader', 'vcf_header_io.VcfHeader', ([], {'infos': 'header.info', 'filters': 'header.filters', 'alts': 'header.alts', 'formats': 'header.formats', 'contigs': 'header.contigs', 'file_path': 'file_path'}), '(infos=header.info, filters=header.filters, alts=\n header.alts, formats=header.formats, contigs=header.contigs, file_path=\n file_path)\n', (1420, 1560), False, 'from gcp_variant_transforms.beam_io import vcf_header_io\n'), ((2034, 2048), 'apache_beam.testing.test_pipeline.TestPipeline', 'TestPipeline', ([], {}), '()\n', (2046, 2048), False, 'from apache_beam.testing.test_pipeline import TestPipeline\n'), ((2214, 2236), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.VcfHeaderDefinitions', 'VcfHeaderDefinitions', ([], {}), '()\n', (2234, 2236), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import VcfHeaderDefinitions\n'), ((2942, 2956), 'apache_beam.testing.test_pipeline.TestPipeline', 'TestPipeline', ([], {}), '()\n', (2954, 2956), False, 'from apache_beam.testing.test_pipeline import TestPipeline\n'), ((3135, 3157), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.VcfHeaderDefinitions', 'VcfHeaderDefinitions', ([], {}), '()\n', (3155, 3157), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import VcfHeaderDefinitions\n'), ((3926, 3940), 'apache_beam.testing.test_pipeline.TestPipeline', 'TestPipeline', ([], {}), '()\n', (3938, 3940), False, 'from apache_beam.testing.test_pipeline import TestPipeline\n'), ((4119, 4141), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.VcfHeaderDefinitions', 'VcfHeaderDefinitions', ([], {}), '()\n', (4139, 4141), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import VcfHeaderDefinitions\n'), ((4928, 4942), 'apache_beam.testing.test_pipeline.TestPipeline', 'TestPipeline', ([], {}), '()\n', (4940, 4942), False, 'from apache_beam.testing.test_pipeline import TestPipeline\n'), ((5121, 5143), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.VcfHeaderDefinitions', 'VcfHeaderDefinitions', ([], {}), '()\n', (5141, 5143), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import VcfHeaderDefinitions\n'), ((6046, 6060), 'apache_beam.testing.test_pipeline.TestPipeline', 'TestPipeline', ([], {}), '()\n', (6058, 6060), False, 'from apache_beam.testing.test_pipeline import TestPipeline\n'), ((6241, 6263), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.VcfHeaderDefinitions', 'VcfHeaderDefinitions', ([], {}), '()\n', (6261, 6263), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import VcfHeaderDefinitions\n'), ((2341, 2361), 'apache_beam.testing.util.equal_to', 'equal_to', (['[expected]'], {}), '([expected])\n', (2349, 2361), False, 'from apache_beam.testing.util import equal_to\n'), ((3327, 3347), 'apache_beam.testing.util.equal_to', 'equal_to', (['[expected]'], {}), '([expected])\n', (3335, 3347), False, 'from apache_beam.testing.util import equal_to\n'), ((4314, 4334), 'apache_beam.testing.util.equal_to', 'equal_to', (['[expected]'], {}), '([expected])\n', (4322, 4334), False, 'from apache_beam.testing.util import equal_to\n'), ((5317, 5337), 'apache_beam.testing.util.equal_to', 'equal_to', (['[expected]'], {}), '([expected])\n', (5325, 5337), False, 'from apache_beam.testing.util import equal_to\n'), ((6482, 6502), 'apache_beam.testing.util.equal_to', 'equal_to', (['[expected]'], {}), '([expected])\n', (6490, 6502), False, 'from apache_beam.testing.util import equal_to\n'), ((2103, 2120), 'apache_beam.transforms.Create', 'Create', (['[headers]'], {}), '([headers])\n', (2109, 2120), False, 'from apache_beam.transforms import Create\n'), ((2153, 2196), 'gcp_variant_transforms.transforms.merge_header_definitions.MergeDefinitions', 'merge_header_definitions.MergeDefinitions', ([], {}), '()\n', (2194, 2196), False, 'from gcp_variant_transforms.transforms import merge_header_definitions\n'), ((2267, 2291), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition', 'Definition', (['(1)', '"""Integer"""'], {}), "(1, 'Integer')\n", (2277, 2291), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition\n'), ((3011, 3041), 'apache_beam.transforms.Create', 'Create', (['[headers_1, headers_2]'], {}), '([headers_1, headers_2])\n', (3017, 3041), False, 'from apache_beam.transforms import Create\n'), ((3074, 3117), 'gcp_variant_transforms.transforms.merge_header_definitions.MergeDefinitions', 'merge_header_definitions.MergeDefinitions', ([], {}), '()\n', (3115, 3117), False, 'from gcp_variant_transforms.transforms import merge_header_definitions\n'), ((3188, 3212), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition', 'Definition', (['(1)', '"""Integer"""'], {}), "(1, 'Integer')\n", (3198, 3212), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition\n'), ((3255, 3277), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition', 'Definition', (['(1)', '"""Float"""'], {}), "(1, 'Float')\n", (3265, 3277), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition\n'), ((3995, 4025), 'apache_beam.transforms.Create', 'Create', (['[headers_1, headers_2]'], {}), '([headers_1, headers_2])\n', (4001, 4025), False, 'from apache_beam.transforms import Create\n'), ((4058, 4101), 'gcp_variant_transforms.transforms.merge_header_definitions.MergeDefinitions', 'merge_header_definitions.MergeDefinitions', ([], {}), '()\n', (4099, 4101), False, 'from gcp_variant_transforms.transforms import merge_header_definitions\n'), ((4174, 4196), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition', 'Definition', (['(1)', '"""Float"""'], {}), "(1, 'Float')\n", (4184, 4196), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition\n'), ((4242, 4264), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition', 'Definition', (['(2)', '"""Float"""'], {}), "(2, 'Float')\n", (4252, 4264), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition\n'), ((4997, 5027), 'apache_beam.transforms.Create', 'Create', (['[headers_1, headers_2]'], {}), '([headers_1, headers_2])\n', (5003, 5027), False, 'from apache_beam.transforms import Create\n'), ((5060, 5103), 'gcp_variant_transforms.transforms.merge_header_definitions.MergeDefinitions', 'merge_header_definitions.MergeDefinitions', ([], {}), '()\n', (5101, 5103), False, 'from gcp_variant_transforms.transforms import merge_header_definitions\n'), ((5174, 5198), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition', 'Definition', (['(1)', '"""Integer"""'], {}), "(1, 'Integer')\n", (5184, 5198), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition\n'), ((5244, 5266), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition', 'Definition', (['(1)', '"""Float"""'], {}), "(1, 'Float')\n", (5254, 5266), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition\n'), ((6115, 6147), 'apache_beam.transforms.Create', 'Create', (['headers'], {'reshuffle': '(False)'}), '(headers, reshuffle=False)\n', (6121, 6147), False, 'from apache_beam.transforms import Create\n'), ((6180, 6223), 'gcp_variant_transforms.transforms.merge_header_definitions.MergeDefinitions', 'merge_header_definitions.MergeDefinitions', ([], {}), '()\n', (6221, 6223), False, 'from gcp_variant_transforms.transforms import merge_header_definitions\n'), ((6303, 6325), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition', 'Definition', (['(1)', '"""Float"""'], {}), "(1, 'Float')\n", (6313, 6325), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition\n'), ((6408, 6432), 'gcp_variant_transforms.libs.vcf_header_definitions_merger.Definition', 'Definition', (['(1)', '"""Integer"""'], {}), "(1, 'Integer')\n", (6418, 6432), False, 'from gcp_variant_transforms.libs.vcf_header_definitions_merger import Definition\n')]
|
"""
Quick Sort is one of the most efficient sorting algorithms.
It is based on the splitting of the input list into smaller lists.
Quick Sort works better with smaller data sets in comparison to merge sort.
"""
import random
rand_list = [random.randint(1, 100) for i in range(0,8)]
def swap(arr: list, i: int, k: int) -> None:
"""
:param arr: input array
:type arr: list
:param i: first element to swap in list
:type i: int
:param k: second element to swap in list
:type k: int
:return: None
"""
arr[i], arr[k] = arr[k], arr[i]
|
[
"random.randint"
] |
[((240, 262), 'random.randint', 'random.randint', (['(1)', '(100)'], {}), '(1, 100)\n', (254, 262), False, 'import random\n')]
|
import json
import requests
from requests.exceptions import RequestException
import re
def get_one_page(url,**headers):
try:
response = requests.get(url,headers = headers)
if response.status_code == 200:
return response.text
return None
except RequestException:
return 'Exception'
def parse_one_page(html):
pattern = re.compile('<div class="hd".*?href="(.*?)".*?"title">(.*?)</span>.*?"bd">.*?<p class="">(.*?)</p>.*?"star">.*?"v:average">(.*?)</span>.*?inq">(.*?)</span>', re.S)
items = re.findall(pattern, html)
for item in items:
yield {
'url':item[0],
'name':item[1],
'actor':re.sub(' |...<br>\\n', '', item[2].strip()),
'motor':item[3]
}
def write_to_file(content):
with open('doubanTop250.txt','a',encoding='utf-8') as f:
f.write(json.dumps(content,ensure_ascii=False) + '\n' )
def main(num):
url = 'https://movie.douban.com/top250?start='+str(num)+'&filter='
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36'}
html = get_one_page(url,**headers)
for item in parse_one_page(html):
print (item)
write_to_file(item)
if __name__ == '__main__':
for i in range(0,225,25):
main(i)
|
[
"re.findall",
"json.dumps",
"requests.get",
"re.compile"
] |
[((375, 547), 're.compile', 're.compile', (['"""<div class="hd".*?href="(.*?)".*?"title">(.*?)</span>.*?"bd">.*?<p class="">(.*?)</p>.*?"star">.*?"v:average">(.*?)</span>.*?inq">(.*?)</span>"""', 're.S'], {}), '(\n \'<div class="hd".*?href="(.*?)".*?"title">(.*?)</span>.*?"bd">.*?<p class="">(.*?)</p>.*?"star">.*?"v:average">(.*?)</span>.*?inq">(.*?)</span>\'\n , re.S)\n', (385, 547), False, 'import re\n'), ((551, 576), 're.findall', 're.findall', (['pattern', 'html'], {}), '(pattern, html)\n', (561, 576), False, 'import re\n'), ((149, 183), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (161, 183), False, 'import requests\n'), ((885, 924), 'json.dumps', 'json.dumps', (['content'], {'ensure_ascii': '(False)'}), '(content, ensure_ascii=False)\n', (895, 924), False, 'import json\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
###############################################################################
import argparse
import sys
parser=argparse.ArgumentParser(
description='''Parse pfam file''')
__file__ = "pfam_parser.py"
__author__ = '<NAME> (<EMAIL>)'
__version__ = '0.8'
__date__ = 'December 3rd, 2020'
parser.add_argument('inputFile',
help='Full path to the input directory where all files are')
# Execute parse_args()
args = parser.parse_args()
# import standard Python modules
import os
import re
###############################################################################
file = sys.argv[1]
filename = os.path.basename(file)
filename = filename.replace("_tblout.txt","")
output_dir = os.path.dirname(file)
out = os.path.join(output_dir, filename + "_tblout_pfam.txt")
protein2hit_dict = {}
protein2bit_dict = {}
dic = {}
with open(file, 'r') as f:
i=0
lines = f.readlines()
for line in lines:
line = line.rstrip() # This removes the whitespace at the end of the line
if line.startswith("#"): # We only want to analyze lines with HMMER matches, so we can pass on all the lines that start with a #
pass
else:
newline = re.sub("\s+", "\t", line) # Now we can replace the whitespace in the lines with tabs, which are easier to work with.
tabs = newline.split("\t") # And now we can create a list by splitting each line into pieces based on where the tabs are.
hit = tabs[3]
i +=1
query = tabs[0] # The first item in the line is the query protein. We can assign the variable "query" to it.
bit_score = tabs[5] # The fifth item is the bit score. We can assign the variable "bit_score" to it.
dic[i]= query
protein2bit_dict[i] = float(bit_score)
protein2hit_dict[i] = hit
with open(out, "w") as outputfile:
outputfile.write("Query\tHit\tScore\n")
for proteins in protein2hit_dict:
outputfile.write(dic[proteins] + "\t" + protein2hit_dict[proteins] + "\t" + str(protein2bit_dict[proteins]) +"\n")
outputfile.close()
print("File " + str(out) + " was created.")
f.close()
|
[
"argparse.ArgumentParser",
"os.path.basename",
"os.path.dirname",
"os.path.join",
"re.sub"
] |
[((162, 216), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Parse pfam file"""'}), "(description='Parse pfam file')\n", (185, 216), False, 'import argparse\n'), ((654, 676), 'os.path.basename', 'os.path.basename', (['file'], {}), '(file)\n', (670, 676), False, 'import os\n'), ((736, 757), 'os.path.dirname', 'os.path.dirname', (['file'], {}), '(file)\n', (751, 757), False, 'import os\n'), ((764, 819), 'os.path.join', 'os.path.join', (['output_dir', "(filename + '_tblout_pfam.txt')"], {}), "(output_dir, filename + '_tblout_pfam.txt')\n", (776, 819), False, 'import os\n'), ((1232, 1258), 're.sub', 're.sub', (['"""\\\\s+"""', '"""\t"""', 'line'], {}), "('\\\\s+', '\\t', line)\n", (1238, 1258), False, 'import re\n')]
|
import numpy as np
import matplotlib.pyplot as plt
def plot_price_history(hist):
''' plot price history '''
plt.plot(hist, '-')
plt.xlabel("time steps"); plt.ylabel("price")
plt.title("price history")
plt.show()
def plot_price_std(arr):
''' plot std of price history over simulations'''
plt.plot(arr, '-')
plt.xlabel("time steps"); plt.ylabel("std of price")
plt.title("standard deviation of daily prices")
plt.show()
def plot_wealth_dist(total):
total = {"user": total[0], "miner": total[1], "speculator": total[2]}
total = dict(sorted(total.items(), key=lambda x:x[1]))
plt.pie(list(total.values()), labels=list(total.keys()) )
plt.title("wealth distribution post simulation")
plt.show()
def plot_hash_power_prop(prop):
''' plot miner proportion '''
plt.pie(prop)
plt.title("proportions of miner hash power")
plt.show()
price_hist = np.load("price_hist.npy")
hash_power = np.load("hash_power.npy")
wealth_dist = np.load("wealth_dist.npy")
plot_price_history(np.mean(price_hist, axis=0))
plot_price_std(np.std(price_hist, axis=0))
plot_wealth_dist(np.mean(wealth_dist, axis=0))
# # plot_hash_power_prop(np.mean(hash_power, axis=0))
keep = 20
arr = np.zeros(keep)
for hp in hash_power:
arr = np.add(hp[:keep], arr)
arr /= 100
plot_hash_power_prop(arr)
|
[
"matplotlib.pyplot.title",
"numpy.load",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"numpy.std",
"numpy.add",
"numpy.zeros",
"numpy.mean",
"matplotlib.pyplot.pie",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] |
[((859, 884), 'numpy.load', 'np.load', (['"""price_hist.npy"""'], {}), "('price_hist.npy')\n", (866, 884), True, 'import numpy as np\n'), ((898, 923), 'numpy.load', 'np.load', (['"""hash_power.npy"""'], {}), "('hash_power.npy')\n", (905, 923), True, 'import numpy as np\n'), ((938, 964), 'numpy.load', 'np.load', (['"""wealth_dist.npy"""'], {}), "('wealth_dist.npy')\n", (945, 964), True, 'import numpy as np\n'), ((1175, 1189), 'numpy.zeros', 'np.zeros', (['keep'], {}), '(keep)\n', (1183, 1189), True, 'import numpy as np\n'), ((111, 130), 'matplotlib.pyplot.plot', 'plt.plot', (['hist', '"""-"""'], {}), "(hist, '-')\n", (119, 130), True, 'import matplotlib.pyplot as plt\n'), ((132, 156), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time steps"""'], {}), "('time steps')\n", (142, 156), True, 'import matplotlib.pyplot as plt\n'), ((158, 177), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""price"""'], {}), "('price')\n", (168, 177), True, 'import matplotlib.pyplot as plt\n'), ((179, 205), 'matplotlib.pyplot.title', 'plt.title', (['"""price history"""'], {}), "('price history')\n", (188, 205), True, 'import matplotlib.pyplot as plt\n'), ((207, 217), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (215, 217), True, 'import matplotlib.pyplot as plt\n'), ((296, 314), 'matplotlib.pyplot.plot', 'plt.plot', (['arr', '"""-"""'], {}), "(arr, '-')\n", (304, 314), True, 'import matplotlib.pyplot as plt\n'), ((316, 340), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time steps"""'], {}), "('time steps')\n", (326, 340), True, 'import matplotlib.pyplot as plt\n'), ((342, 368), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""std of price"""'], {}), "('std of price')\n", (352, 368), True, 'import matplotlib.pyplot as plt\n'), ((370, 417), 'matplotlib.pyplot.title', 'plt.title', (['"""standard deviation of daily prices"""'], {}), "('standard deviation of daily prices')\n", (379, 417), True, 'import matplotlib.pyplot as plt\n'), ((419, 429), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (427, 429), True, 'import matplotlib.pyplot as plt\n'), ((647, 695), 'matplotlib.pyplot.title', 'plt.title', (['"""wealth distribution post simulation"""'], {}), "('wealth distribution post simulation')\n", (656, 695), True, 'import matplotlib.pyplot as plt\n'), ((697, 707), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (705, 707), True, 'import matplotlib.pyplot as plt\n'), ((773, 786), 'matplotlib.pyplot.pie', 'plt.pie', (['prop'], {}), '(prop)\n', (780, 786), True, 'import matplotlib.pyplot as plt\n'), ((788, 832), 'matplotlib.pyplot.title', 'plt.title', (['"""proportions of miner hash power"""'], {}), "('proportions of miner hash power')\n", (797, 832), True, 'import matplotlib.pyplot as plt\n'), ((834, 844), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (842, 844), True, 'import matplotlib.pyplot as plt\n'), ((985, 1012), 'numpy.mean', 'np.mean', (['price_hist'], {'axis': '(0)'}), '(price_hist, axis=0)\n', (992, 1012), True, 'import numpy as np\n'), ((1029, 1055), 'numpy.std', 'np.std', (['price_hist'], {'axis': '(0)'}), '(price_hist, axis=0)\n', (1035, 1055), True, 'import numpy as np\n'), ((1074, 1102), 'numpy.mean', 'np.mean', (['wealth_dist'], {'axis': '(0)'}), '(wealth_dist, axis=0)\n', (1081, 1102), True, 'import numpy as np\n'), ((1219, 1241), 'numpy.add', 'np.add', (['hp[:keep]', 'arr'], {}), '(hp[:keep], arr)\n', (1225, 1241), True, 'import numpy as np\n')]
|
"""
You need to import this file in any app in versatileimagefield.py file
to make it visible for versatileimagefield
"""
from PIL import Image
from PIL.WebPImagePlugin import WebPImageFile
from io import BytesIO
from versatileimagefield.datastructures.sizedimage import (
MalformedSizedImageKey,
settings,
cache,
VERSATILEIMAGEFIELD_CACHE_LENGTH,
SizedImageInstance
)
from versatileimagefield.registry import versatileimagefield_registry
from versatileimagefield.utils import JPEG_QUAL as QUAL
from versatileimagefield.versatileimagefield import (
FilteredImage,
CroppedImage as DefaultCroppedImage,
ThumbnailImage as DefaultThumbnailImage
)
from .utils import (
get_resized_path,
get_filtered_path
)
__all__ = (
'WebPMixin',
'ToWebPImage',
'WebPThumbnailImage',
'WebPCroppedImage',
'CroppedImage',
'ThumbnailImage'
)
class WebPMixin:
ext = "webp"
def __getitem__(self, key):
"""
Return a URL to an image sized according to key.
Arguments:
* `key`: A string in the following format
'[width-in-pixels]x[height-in-pixels]'
Example: '400x400'
"""
try:
width, height = [int(i) for i in key.split('x')]
except (KeyError, ValueError):
raise MalformedSizedImageKey(
"%s keys must be in the following format: "
"'`width`x`height`' where both `width` and `height` are "
"integers." % self.__class__.__name__
)
if not self.path_to_image and getattr(
settings, 'VERSATILEIMAGEFIELD_USE_PLACEHOLDIT', False
):
resized_url = "http://placehold.it/%dx%d" % (width, height)
resized_storage_path = resized_url
else:
resized_storage_path = get_resized_path(
path_to_image=self.path_to_image,
ext=self.ext,
width=width,
height=height,
filename_key=self.get_filename_key(),
storage=self.storage
)
try:
resized_url = self.storage.url(resized_storage_path)
except Exception:
resized_url = None
if self.create_on_demand is True:
if cache.get(resized_url) and resized_url is not None:
# The sized path exists in the cache so the image already
# exists. So we `pass` to skip directly to the return
# statement
pass
else:
if resized_storage_path and not self.storage.exists(
resized_storage_path
):
self.create_resized_image(
path_to_image=self.path_to_image,
save_path_on_storage=resized_storage_path,
width=width,
height=height
)
resized_url = self.storage.url(resized_storage_path)
# Setting a super-long cache for a resized image (30 Days)
cache.set(resized_url, 1, VERSATILEIMAGEFIELD_CACHE_LENGTH)
return SizedImageInstance(
name=resized_storage_path,
url=resized_url,
storage=self.storage
)
def retrieve_image(self, path_to_image):
image = self.storage.open(path_to_image, "rb")
file_ext = self.ext
image_format, mime_type = "WEBP", "image/webp"
return Image.open(image), file_ext, image_format, mime_type
def save_image(self, imagefile, save_path, file_ext, mime_type):
path, ext = save_path.rsplit('.')
save_path = f'{path}.{self.ext}'
return super().save_image(imagefile, save_path, file_ext, mime_type)
def preprocess_WEBP(self, image, **kwargs):
return image, {"quality": QUAL, "lossless": False, "icc_profile": ""}
class ToWebPImage(WebPMixin, FilteredImage):
"""
object.image.filters.to_webp.url
"""
def __init__(self, path_to_image, storage, create_on_demand, filename_key):
super().__init__(
path_to_image, storage, create_on_demand, filename_key
)
self.name = get_filtered_path(
path_to_image=self.path_to_image,
ext=self.ext,
filename_key=filename_key,
storage=storage
)
self.url = storage.url(self.name)
def process_image(self, image, image_format, save_kwargs):
imagefile = BytesIO()
image, save_kwargs = self.preprocess(image, "WEBP")
image.save(imagefile, **save_kwargs)
return imagefile
class WebPThumbnailImage(WebPMixin, DefaultThumbnailImage):
"""
object.image.thumbnail_webp['512x511'].url
"""
filename_key = "thumbnail_webp"
def process_image(self, image, image_format, save_kwargs, width, height):
imagefile = BytesIO()
image.thumbnail(
(width, height),
Image.ANTIALIAS
)
image, save_kwargs = self.preprocess(image, "WEBP")
image.save(
imagefile,
**save_kwargs
)
return imagefile
class WebPCroppedImage(WebPMixin, DefaultCroppedImage):
"""
object.image.crop_webp['512x511'].url
"""
filename_key = "crop_webp"
filename_key_regex = r'crop_webp-c[0-9-]+__[0-9-]+'
def process_image(self, image, image_format, save_kwargs,
width, height):
imagefile = BytesIO()
palette = image.getpalette()
cropped_image = self.crop_on_centerpoint(
image,
width,
height,
self.ppoi
)
# Using ImageOps.fit on GIFs can introduce issues with their palette
# Solution derived from: http://stackoverflow.com/a/4905209/1149774
if image_format == 'GIF':
cropped_image.putpalette(palette)
cropped_image, save_kwargs = self.preprocess(cropped_image, "WEBP")
cropped_image.save(
imagefile,
**save_kwargs
)
return imagefile
class CroppedImage(DefaultCroppedImage):
def process_image(self, image, image_format, save_kwargs,
width, height):
"""
Return a BytesIO instance of `image` cropped to `width` and `height`.
Cropping will first reduce an image down to its longest side
and then crop inwards centered on the Primary Point of Interest
(as specified by `self.ppoi`)
"""
imagefile = BytesIO()
palette = image.getpalette()
cropped_image = self.crop_on_centerpoint(
image,
width,
height,
self.ppoi
)
# Using ImageOps.fit on GIFs can introduce issues with their palette
# Solution derived from: http://stackoverflow.com/a/4905209/1149774
if image_format == 'GIF':
cropped_image.putpalette(palette)
if isinstance(image, WebPImageFile):
save_kwargs['format'] = 'JPEG'
cropped_image.save(
imagefile,
**save_kwargs
)
return imagefile
class ThumbnailImage(DefaultThumbnailImage):
def process_image(self, image, image_format, save_kwargs,
width, height):
"""
Return a BytesIO instance of `image` that fits in a bounding box.
Bounding box dimensions are `width`x`height`.
"""
imagefile = BytesIO()
image.thumbnail(
(width, height),
Image.ANTIALIAS
)
if isinstance(image, WebPImageFile):
save_kwargs['format'] = 'JPEG'
image.save(
imagefile,
**save_kwargs
)
return imagefile
versatileimagefield_registry.register_filter('to_webp', ToWebPImage)
versatileimagefield_registry.register_sizer("thumbnail_webp", WebPThumbnailImage)
versatileimagefield_registry.register_sizer("crop_webp", WebPCroppedImage)
versatileimagefield_registry.unregister_sizer('crop')
versatileimagefield_registry.unregister_sizer('thumbnail')
versatileimagefield_registry.register_sizer('crop', CroppedImage)
versatileimagefield_registry.register_sizer('thumbnail', ThumbnailImage)
|
[
"versatileimagefield.datastructures.sizedimage.cache.get",
"io.BytesIO",
"versatileimagefield.datastructures.sizedimage.MalformedSizedImageKey",
"versatileimagefield.datastructures.sizedimage.SizedImageInstance",
"versatileimagefield.registry.versatileimagefield_registry.register_filter",
"versatileimagefield.datastructures.sizedimage.cache.set",
"versatileimagefield.registry.versatileimagefield_registry.unregister_sizer",
"PIL.Image.open",
"versatileimagefield.registry.versatileimagefield_registry.register_sizer"
] |
[((7943, 8011), 'versatileimagefield.registry.versatileimagefield_registry.register_filter', 'versatileimagefield_registry.register_filter', (['"""to_webp"""', 'ToWebPImage'], {}), "('to_webp', ToWebPImage)\n", (7987, 8011), False, 'from versatileimagefield.registry import versatileimagefield_registry\n'), ((8012, 8097), 'versatileimagefield.registry.versatileimagefield_registry.register_sizer', 'versatileimagefield_registry.register_sizer', (['"""thumbnail_webp"""', 'WebPThumbnailImage'], {}), "('thumbnail_webp',\n WebPThumbnailImage)\n", (8055, 8097), False, 'from versatileimagefield.registry import versatileimagefield_registry\n'), ((8094, 8168), 'versatileimagefield.registry.versatileimagefield_registry.register_sizer', 'versatileimagefield_registry.register_sizer', (['"""crop_webp"""', 'WebPCroppedImage'], {}), "('crop_webp', WebPCroppedImage)\n", (8137, 8168), False, 'from versatileimagefield.registry import versatileimagefield_registry\n'), ((8169, 8222), 'versatileimagefield.registry.versatileimagefield_registry.unregister_sizer', 'versatileimagefield_registry.unregister_sizer', (['"""crop"""'], {}), "('crop')\n", (8214, 8222), False, 'from versatileimagefield.registry import versatileimagefield_registry\n'), ((8223, 8281), 'versatileimagefield.registry.versatileimagefield_registry.unregister_sizer', 'versatileimagefield_registry.unregister_sizer', (['"""thumbnail"""'], {}), "('thumbnail')\n", (8268, 8281), False, 'from versatileimagefield.registry import versatileimagefield_registry\n'), ((8282, 8347), 'versatileimagefield.registry.versatileimagefield_registry.register_sizer', 'versatileimagefield_registry.register_sizer', (['"""crop"""', 'CroppedImage'], {}), "('crop', CroppedImage)\n", (8325, 8347), False, 'from versatileimagefield.registry import versatileimagefield_registry\n'), ((8348, 8420), 'versatileimagefield.registry.versatileimagefield_registry.register_sizer', 'versatileimagefield_registry.register_sizer', (['"""thumbnail"""', 'ThumbnailImage'], {}), "('thumbnail', ThumbnailImage)\n", (8391, 8420), False, 'from versatileimagefield.registry import versatileimagefield_registry\n'), ((3314, 3403), 'versatileimagefield.datastructures.sizedimage.SizedImageInstance', 'SizedImageInstance', ([], {'name': 'resized_storage_path', 'url': 'resized_url', 'storage': 'self.storage'}), '(name=resized_storage_path, url=resized_url, storage=self\n .storage)\n', (3332, 3403), False, 'from versatileimagefield.datastructures.sizedimage import MalformedSizedImageKey, settings, cache, VERSATILEIMAGEFIELD_CACHE_LENGTH, SizedImageInstance\n'), ((4652, 4661), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (4659, 4661), False, 'from io import BytesIO\n'), ((5052, 5061), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (5059, 5061), False, 'from io import BytesIO\n'), ((5644, 5653), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (5651, 5653), False, 'from io import BytesIO\n'), ((6701, 6710), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (6708, 6710), False, 'from io import BytesIO\n'), ((7645, 7654), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (7652, 7654), False, 'from io import BytesIO\n'), ((3644, 3661), 'PIL.Image.open', 'Image.open', (['image'], {}), '(image)\n', (3654, 3661), False, 'from PIL import Image\n'), ((1342, 1509), 'versatileimagefield.datastructures.sizedimage.MalformedSizedImageKey', 'MalformedSizedImageKey', (['("%s keys must be in the following format: \'`width`x`height`\' where both `width` and `height` are integers."\n % self.__class__.__name__)'], {}), '(\n "%s keys must be in the following format: \'`width`x`height`\' where both `width` and `height` are integers."\n % self.__class__.__name__)\n', (1364, 1509), False, 'from versatileimagefield.datastructures.sizedimage import MalformedSizedImageKey, settings, cache, VERSATILEIMAGEFIELD_CACHE_LENGTH, SizedImageInstance\n'), ((2343, 2365), 'versatileimagefield.datastructures.sizedimage.cache.get', 'cache.get', (['resized_url'], {}), '(resized_url)\n', (2352, 2365), False, 'from versatileimagefield.datastructures.sizedimage import MalformedSizedImageKey, settings, cache, VERSATILEIMAGEFIELD_CACHE_LENGTH, SizedImageInstance\n'), ((3238, 3297), 'versatileimagefield.datastructures.sizedimage.cache.set', 'cache.set', (['resized_url', '(1)', 'VERSATILEIMAGEFIELD_CACHE_LENGTH'], {}), '(resized_url, 1, VERSATILEIMAGEFIELD_CACHE_LENGTH)\n', (3247, 3297), False, 'from versatileimagefield.datastructures.sizedimage import MalformedSizedImageKey, settings, cache, VERSATILEIMAGEFIELD_CACHE_LENGTH, SizedImageInstance\n')]
|
"""Optimizes for specific Confusion Matrix Values: `FP` - only recommended if threshold is adjusted"""
import typing
import numpy as np
from h2oaicore.metrics import CustomScorer
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics import confusion_matrix
class CMFalsePositive(CustomScorer):
_threshold = 0.5 # Example only, should be adjusted based on domain knowledge and other experiments
_description = "Reduce false positive count"
_binary = True
_maximize = False
_perfect_score = 0
_display_name = "FP"
def score(self,
actual: np.array,
predicted: np.array,
sample_weight: typing.Optional[np.array] = None,
labels: typing.Optional[np.array] = None) -> float:
lb = LabelEncoder()
labels = lb.fit_transform(labels)
actual = lb.transform(actual)
predicted = (predicted > self._threshold)
cm = confusion_matrix(actual, predicted, sample_weight=sample_weight, labels=labels)
tn, fp, fn, tp = cm.ravel()
return fp
|
[
"sklearn.metrics.confusion_matrix",
"sklearn.preprocessing.LabelEncoder"
] |
[((783, 797), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (795, 797), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((941, 1020), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['actual', 'predicted'], {'sample_weight': 'sample_weight', 'labels': 'labels'}), '(actual, predicted, sample_weight=sample_weight, labels=labels)\n', (957, 1020), False, 'from sklearn.metrics import confusion_matrix\n')]
|
from flask import Blueprint
from . import auth, models, schemas
user_bp = Blueprint("user", __name__)
|
[
"flask.Blueprint"
] |
[((76, 103), 'flask.Blueprint', 'Blueprint', (['"""user"""', '__name__'], {}), "('user', __name__)\n", (85, 103), False, 'from flask import Blueprint\n')]
|
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.io.cp2k.outputs import Cp2kOutput
from pymatgen.util.testing import PymatgenTest
class SetTest(PymatgenTest):
def setUp(self):
self.TEST_FILES_DIR = Path.joinpath(self.TEST_FILES_DIR, "cp2k")
self.out = Cp2kOutput(Path.joinpath(self.TEST_FILES_DIR, "cp2k.out"), auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames["PDOS"]), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), "GEO_OPT")
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"pathlib.Path.joinpath"
] |
[((849, 864), 'unittest.main', 'unittest.main', ([], {}), '()\n', (862, 864), False, 'import unittest\n'), ((313, 355), 'pathlib.Path.joinpath', 'Path.joinpath', (['self.TEST_FILES_DIR', '"""cp2k"""'], {}), "(self.TEST_FILES_DIR, 'cp2k')\n", (326, 355), False, 'from pathlib import Path\n'), ((386, 432), 'pathlib.Path.joinpath', 'Path.joinpath', (['self.TEST_FILES_DIR', '"""cp2k.out"""'], {}), "(self.TEST_FILES_DIR, 'cp2k.out')\n", (399, 432), False, 'from pathlib import Path\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django_extensions.db.fields
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="Conference",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"created_at",
models.DateTimeField(auto_now_add=True, verbose_name="Created At"),
),
(
"modified_at",
models.DateTimeField(
auto_now=True, verbose_name="Last Modified At"
),
),
(
"name",
models.CharField(max_length=255, verbose_name="Conference Name"),
),
(
"slug",
django_extensions.db.fields.AutoSlugField(
editable=False,
populate_from=("name",),
max_length=255,
blank=True,
unique=True,
),
),
("description", models.TextField(default="")),
("start_date", models.DateField(verbose_name="Start Date")),
("end_date", models.DateField(verbose_name="End Date")),
(
"status",
models.PositiveSmallIntegerField(
verbose_name="Current Status",
choices=[
(1, b"Accepting Call for Proposals"),
(2, b"Closed for Proposals"),
(3, b"Accepting Votes"),
(4, b"Schedule Published"),
],
),
),
(
"deleted",
models.BooleanField(default=False, verbose_name="Is Deleted?"),
),
(
"created_by",
models.ForeignKey(
related_name="created_conference_set",
verbose_name="Created By",
blank=True,
on_delete=models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
null=True,
),
),
(
"modified_by",
models.ForeignKey(
related_name="updated_conference_set",
verbose_name="Modified By",
blank=True,
on_delete=models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
null=True,
),
),
],
options={"abstract": False},
bases=(models.Model,),
),
migrations.CreateModel(
name="ConferenceModerator",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"created_at",
models.DateTimeField(auto_now_add=True, verbose_name="Created At"),
),
(
"modified_at",
models.DateTimeField(
auto_now=True, verbose_name="Last Modified At"
),
),
(
"active",
models.BooleanField(default=True, verbose_name="Is Active?"),
),
(
"conference",
models.ForeignKey(
to="conferences.Conference", on_delete=models.deletion.CASCADE,
),
),
(
"created_by",
models.ForeignKey(
related_name="created_conferencemoderator_set",
verbose_name="Created By",
blank=True,
on_delete=models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
null=True,
),
),
(
"moderator",
models.ForeignKey(
to=settings.AUTH_USER_MODEL, on_delete=models.deletion.CASCADE,
),
),
(
"modified_by",
models.ForeignKey(
related_name="updated_conferencemoderator_set",
verbose_name="Modified By",
blank=True,
on_delete=models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
null=True,
),
),
],
options={"abstract": False},
bases=(models.Model,),
),
migrations.CreateModel(
name="ConferenceProposalReviewer",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"created_at",
models.DateTimeField(auto_now_add=True, verbose_name="Created At"),
),
(
"modified_at",
models.DateTimeField(
auto_now=True, verbose_name="Last Modified At"
),
),
(
"active",
models.BooleanField(default=True, verbose_name="Is Active?"),
),
(
"conference",
models.ForeignKey(
to="conferences.Conference", on_delete=models.deletion.CASCADE,
),
),
(
"created_by",
models.ForeignKey(
related_name="created_conferenceproposalreviewer_set",
verbose_name="Created By",
blank=True,
on_delete=models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
null=True,
),
),
(
"modified_by",
models.ForeignKey(
related_name="updated_conferenceproposalreviewer_set",
verbose_name="Modified By",
blank=True,
on_delete=models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
null=True,
),
),
(
"reviewer",
models.ForeignKey(
to=settings.AUTH_USER_MODEL, on_delete=models.deletion.CASCADE,
),
),
],
options={},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name="conferenceproposalreviewer",
unique_together=set([("conference", "reviewer")]),
),
]
|
[
"django.db.models.TextField",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.AutoField",
"django.db.models.BooleanField",
"django.db.models.DateField",
"django.db.models.DateTimeField"
] |
[((245, 302), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (276, 302), False, 'from django.db import migrations, models\n'), ((478, 571), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (494, 571), False, 'from django.db import migrations, models\n'), ((779, 845), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Created At"""'}), "(auto_now_add=True, verbose_name='Created At')\n", (799, 845), False, 'from django.db import migrations, models\n'), ((939, 1007), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Last Modified At"""'}), "(auto_now=True, verbose_name='Last Modified At')\n", (959, 1007), False, 'from django.db import migrations, models\n'), ((1140, 1204), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""Conference Name"""'}), "(max_length=255, verbose_name='Conference Name')\n", (1156, 1204), False, 'from django.db import migrations, models\n'), ((1610, 1638), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""'}), "(default='')\n", (1626, 1638), False, 'from django.db import migrations, models\n'), ((1672, 1715), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""Start Date"""'}), "(verbose_name='Start Date')\n", (1688, 1715), False, 'from django.db import migrations, models\n'), ((1747, 1788), 'django.db.models.DateField', 'models.DateField', ([], {'verbose_name': '"""End Date"""'}), "(verbose_name='End Date')\n", (1763, 1788), False, 'from django.db import migrations, models\n'), ((1859, 2061), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'verbose_name': '"""Current Status"""', 'choices': "[(1, b'Accepting Call for Proposals'), (2, b'Closed for Proposals'), (3,\n b'Accepting Votes'), (4, b'Schedule Published')]"}), "(verbose_name='Current Status', choices=[(1,\n b'Accepting Call for Proposals'), (2, b'Closed for Proposals'), (3,\n b'Accepting Votes'), (4, b'Schedule Published')])\n", (1891, 2061), False, 'from django.db import migrations, models\n'), ((2353, 2415), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Is Deleted?"""'}), "(default=False, verbose_name='Is Deleted?')\n", (2372, 2415), False, 'from django.db import migrations, models\n'), ((2508, 2688), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""created_conference_set"""', 'verbose_name': '"""Created By"""', 'blank': '(True)', 'on_delete': 'models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'null': '(True)'}), "(related_name='created_conference_set', verbose_name=\n 'Created By', blank=True, on_delete=models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL, null=True)\n", (2525, 2688), False, 'from django.db import migrations, models\n'), ((2939, 3120), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""updated_conference_set"""', 'verbose_name': '"""Modified By"""', 'blank': '(True)', 'on_delete': 'models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'null': '(True)'}), "(related_name='updated_conference_set', verbose_name=\n 'Modified By', blank=True, on_delete=models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL, null=True)\n", (2956, 3120), False, 'from django.db import migrations, models\n'), ((3557, 3650), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (3573, 3650), False, 'from django.db import migrations, models\n'), ((3858, 3924), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Created At"""'}), "(auto_now_add=True, verbose_name='Created At')\n", (3878, 3924), False, 'from django.db import migrations, models\n'), ((4018, 4086), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Last Modified At"""'}), "(auto_now=True, verbose_name='Last Modified At')\n", (4038, 4086), False, 'from django.db import migrations, models\n'), ((4221, 4281), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""Is Active?"""'}), "(default=True, verbose_name='Is Active?')\n", (4240, 4281), False, 'from django.db import migrations, models\n'), ((4374, 4460), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""conferences.Conference"""', 'on_delete': 'models.deletion.CASCADE'}), "(to='conferences.Conference', on_delete=models.deletion.\n CASCADE)\n", (4391, 4460), False, 'from django.db import migrations, models\n'), ((4595, 4783), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""created_conferencemoderator_set"""', 'verbose_name': '"""Created By"""', 'blank': '(True)', 'on_delete': 'models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'null': '(True)'}), "(related_name='created_conferencemoderator_set',\n verbose_name='Created By', blank=True, on_delete=models.deletion.\n CASCADE, to=settings.AUTH_USER_MODEL, null=True)\n", (4612, 4783), False, 'from django.db import migrations, models\n'), ((5033, 5119), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'settings.AUTH_USER_MODEL', 'on_delete': 'models.deletion.CASCADE'}), '(to=settings.AUTH_USER_MODEL, on_delete=models.deletion.\n CASCADE)\n', (5050, 5119), False, 'from django.db import migrations, models\n'), ((5255, 5444), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""updated_conferencemoderator_set"""', 'verbose_name': '"""Modified By"""', 'blank': '(True)', 'on_delete': 'models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'null': '(True)'}), "(related_name='updated_conferencemoderator_set',\n verbose_name='Modified By', blank=True, on_delete=models.deletion.\n CASCADE, to=settings.AUTH_USER_MODEL, null=True)\n", (5272, 5444), False, 'from django.db import migrations, models\n'), ((5889, 5982), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (5905, 5982), False, 'from django.db import migrations, models\n'), ((6190, 6256), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Created At"""'}), "(auto_now_add=True, verbose_name='Created At')\n", (6210, 6256), False, 'from django.db import migrations, models\n'), ((6350, 6418), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Last Modified At"""'}), "(auto_now=True, verbose_name='Last Modified At')\n", (6370, 6418), False, 'from django.db import migrations, models\n'), ((6553, 6613), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""Is Active?"""'}), "(default=True, verbose_name='Is Active?')\n", (6572, 6613), False, 'from django.db import migrations, models\n'), ((6706, 6792), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""conferences.Conference"""', 'on_delete': 'models.deletion.CASCADE'}), "(to='conferences.Conference', on_delete=models.deletion.\n CASCADE)\n", (6723, 6792), False, 'from django.db import migrations, models\n'), ((6927, 7122), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""created_conferenceproposalreviewer_set"""', 'verbose_name': '"""Created By"""', 'blank': '(True)', 'on_delete': 'models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'null': '(True)'}), "(related_name='created_conferenceproposalreviewer_set',\n verbose_name='Created By', blank=True, on_delete=models.deletion.\n CASCADE, to=settings.AUTH_USER_MODEL, null=True)\n", (6944, 7122), False, 'from django.db import migrations, models\n'), ((7374, 7570), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""updated_conferenceproposalreviewer_set"""', 'verbose_name': '"""Modified By"""', 'blank': '(True)', 'on_delete': 'models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'null': '(True)'}), "(related_name='updated_conferenceproposalreviewer_set',\n verbose_name='Modified By', blank=True, on_delete=models.deletion.\n CASCADE, to=settings.AUTH_USER_MODEL, null=True)\n", (7391, 7570), False, 'from django.db import migrations, models\n'), ((7819, 7905), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'settings.AUTH_USER_MODEL', 'on_delete': 'models.deletion.CASCADE'}), '(to=settings.AUTH_USER_MODEL, on_delete=models.deletion.\n CASCADE)\n', (7836, 7905), False, 'from django.db import migrations, models\n')]
|
# The poller that queries Telegram for bot updates.
# Assumes it's the only poller enqueueing elements onto a number of queues.
# Nice way to make HTTP get requests
import requests
# To read arguments
import sys
# For our queues
from collections import deque
# To lock and unlock files
import fcntl
# To read/write files
import os
# To serialize/deserialize objects
import pickle
# To yield
from time import sleep
# Package as a function for go-lite-bot to run
def run ():
# For ease of configuration, we pull our token from a text file located in the same directory
f = open('token.txt', 'r')
token = f.readline().strip()
f.close()
# Get the last update number so we don't do duplicates
f = open('offset.txt', 'r')
offset = int(f.readline.strip())
f.close()
# Process our arguments, which should be safe since they're passed by start
queueDir = sys.argv[1]
numQueues = int(sys.argv[2])
# Initialize our internal buffers to hold pending writes
writeBuffers = []
for i in range(numQueues):
writeBuffers.append(deque())
# Continually request updates and pass them to the queues
while not canceled():
updates = getUpdates()
# If there's no updates, yield
if len(updates) == 0:
sleep(0)
else: # Apply them to the queues and write
for i in range(updates):
if 'message' in updates[i] and 'text' in updates[i].message:
writeBuffers[hash(updates[i].message.chat.id) % numQueues].append(updates[i])
writeOut()
# Gives the queue file name for queue i
def queueName (i):
return queueDir + '/' + str(i) + '_queue.p'
# Write out the new offset number
def writeOffset (num):
f = open('offset.txt', 'w')
f.write(str(num))
f.close()
# Check to see if we've been canceled
def canceled ():
f = open('cancel.txt', 'r')
done = f.readline.strip()
f.close()
return done == 'Yes'
# Write out all buffers, appending elements to the appropriate queues
def writeOut ():
for i in range(len(writeBuffers)):
f = open(queueName(i), 'r+')
fcntl.flock(f, fcntl.LOCK_EX)
workingQueue = pickle.load(f)
# Recall that later messages are at higher indices
# Workers read messages off the left, so we should add to the right
# in ascending order
for j in range(len(writeBuffers[i])):
workingQueue.append(writeBuffers[i].popLeft())
pickle.dump(writeBuffers[i], f, pickle.HIGHEST_PROTOCOL)
fcntl.flock(f, fcntl.LOCK_UN)
f.close()
# Get all updates from the server for our bot
def getUpdates ():
r = requests.get('https://api.telegram.org/bot' + token + '/getUpdates' +
'?offset=' + str(offset) +
'&limit=100')
# Updates are returned sequentially, update the offset
updates = r.json()
if (len(updates) > 0):
offset = updates[len(updates) - 1].update_id + 1
# Write out offset counter
writeOffset(offset)
return updates
|
[
"pickle.dump",
"fcntl.flock",
"time.sleep",
"pickle.load",
"collections.deque"
] |
[((2153, 2182), 'fcntl.flock', 'fcntl.flock', (['f', 'fcntl.LOCK_EX'], {}), '(f, fcntl.LOCK_EX)\n', (2164, 2182), False, 'import fcntl\n'), ((2206, 2220), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2217, 2220), False, 'import pickle\n'), ((2498, 2554), 'pickle.dump', 'pickle.dump', (['writeBuffers[i]', 'f', 'pickle.HIGHEST_PROTOCOL'], {}), '(writeBuffers[i], f, pickle.HIGHEST_PROTOCOL)\n', (2509, 2554), False, 'import pickle\n'), ((2563, 2592), 'fcntl.flock', 'fcntl.flock', (['f', 'fcntl.LOCK_UN'], {}), '(f, fcntl.LOCK_UN)\n', (2574, 2592), False, 'import fcntl\n'), ((1084, 1091), 'collections.deque', 'deque', ([], {}), '()\n', (1089, 1091), False, 'from collections import deque\n'), ((1294, 1302), 'time.sleep', 'sleep', (['(0)'], {}), '(0)\n', (1299, 1302), False, 'from time import sleep\n')]
|
# Generated by Django 3.0.7 on 2020-09-06 03:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('resources', '0003_remove_resource_code'),
]
operations = [
migrations.AlterModelTable(
name='resource',
table='Resource',
),
]
|
[
"django.db.migrations.AlterModelTable"
] |
[((231, 292), 'django.db.migrations.AlterModelTable', 'migrations.AlterModelTable', ([], {'name': '"""resource"""', 'table': '"""Resource"""'}), "(name='resource', table='Resource')\n", (257, 292), False, 'from django.db import migrations\n')]
|
"""
This file comes from pydc1394 examples.
Written by jordens.
Tested on Linux.
git clone https://github.com/jordens/pydc1394
"""
import time
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
from pymba import Vimba
class CameraPlot:
def __init__(self):
self.vimba = Vimba()
self.vimba.startup()
self.system = self.vimba.getSystem()
self.cameraIds = self.vimba.getCameraIds()
self.init_win()
self.init_camera()
def init_win(self):
self.win = QtGui.QMainWindow()
self.win.show()
self.win.resize(600, 400)
self.win.setWindowTitle("pymba + pyqtgraph")
self.img = pg.ImageView()
self.win.setCentralWidget(self.img)
def init_camera(self):
print("Vimba version:", self.vimba.getVersion())
print("Found {:d} cameras.".format(len(self.cameraIds)))
self.cam = self.vimba.getCamera(self.cameraIds[0])
self.cam.openCamera()
info = self.cam.getInfo()
print('cameraName:', info.cameraName.decode('ascii'))
print('interfaceIdString:', info.interfaceIdString.decode('ascii'))
print('modelName:', info.modelName.decode('ascii'))
def start_camera(self):
self.cam.AcquisitionMode = 'Continuous'
self.cam.IIDCPhyspeed = 'S800'
self.cam.PixelFormat = 'Mono16'
self.cam.TriggerMode = 'Off'
self.cam.AcquisitionFrameRate = 20.0
self.frame = self.cam.getFrame()
self.frame.announceFrame()
self.cam.startCapture()
self.cam.runFeatureCommand('AcquisitionStart')
def process_images(self):
QtCore.QTimer.singleShot(50, self.process_images)
self.frame.queueFrameCapture()
self.frame.waitFrameCapture()
im = self.frame.getImage().T
self.img.setImage(im, autoRange=False, autoLevels=False,
autoHistogramRange=False)
def stop_camera(self):
self.cam.runFeatureCommand('AcquisitionStop')
self.cam.endCapture()
self.cam.revokeAllFrames()
def deinit_camera(self):
self.vimba.shutdown()
if __name__ == "__main__":
app = QtGui.QApplication([])
cam = CameraPlot()
try:
cam.start_camera()
time.sleep(.5)
cam.process_images()
cam.img.autoRange()
cam.img.autoLevels()
QtGui.QApplication.instance().exec_()
finally:
cam.stop_camera()
cam.deinit_camera()
|
[
"pyqtgraph.Qt.QtGui.QMainWindow",
"pyqtgraph.Qt.QtGui.QApplication.instance",
"pyqtgraph.ImageView",
"pymba.Vimba",
"time.sleep",
"pyqtgraph.Qt.QtGui.QApplication",
"pyqtgraph.Qt.QtCore.QTimer.singleShot"
] |
[((2190, 2212), 'pyqtgraph.Qt.QtGui.QApplication', 'QtGui.QApplication', (['[]'], {}), '([])\n', (2208, 2212), False, 'from pyqtgraph.Qt import QtCore, QtGui\n'), ((297, 304), 'pymba.Vimba', 'Vimba', ([], {}), '()\n', (302, 304), False, 'from pymba import Vimba\n'), ((529, 548), 'pyqtgraph.Qt.QtGui.QMainWindow', 'QtGui.QMainWindow', ([], {}), '()\n', (546, 548), False, 'from pyqtgraph.Qt import QtCore, QtGui\n'), ((679, 693), 'pyqtgraph.ImageView', 'pg.ImageView', ([], {}), '()\n', (691, 693), True, 'import pyqtgraph as pg\n'), ((1665, 1714), 'pyqtgraph.Qt.QtCore.QTimer.singleShot', 'QtCore.QTimer.singleShot', (['(50)', 'self.process_images'], {}), '(50, self.process_images)\n', (1689, 1714), False, 'from pyqtgraph.Qt import QtCore, QtGui\n'), ((2280, 2295), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2290, 2295), False, 'import time\n'), ((2389, 2418), 'pyqtgraph.Qt.QtGui.QApplication.instance', 'QtGui.QApplication.instance', ([], {}), '()\n', (2416, 2418), False, 'from pyqtgraph.Qt import QtCore, QtGui\n')]
|
from django.contrib import admin
from django_summernote.admin import SummernoteModelAdmin
from .models import BlogPost
# Apply summernote to all TextField in model.
class BlogPostAdmin(SummernoteModelAdmin): # instead of ModelAdmin
exclude = ('slug', )
list_display = ('id', 'title', 'category', 'date_created')
list_display_links = ('id', 'title')
search_fields = ('title', )
list_per_page = 25
summernote_fields = ('content',)
admin.site.register(BlogPost, BlogPostAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((456, 500), 'django.contrib.admin.site.register', 'admin.site.register', (['BlogPost', 'BlogPostAdmin'], {}), '(BlogPost, BlogPostAdmin)\n', (475, 500), False, 'from django.contrib import admin\n')]
|
#Importing libraries
import argparse
import cv2
from imutils.video import VideoStream #it creates a really good video stream
from imutils import face_utils, translate, resize
#face_utils : something that converts dlib to numpy so it can be furthur used.
#translate : it's going to translate the current position of our eyes to the previous pos.
#resize : for faster computation
import time
import dlib
import numpy as np
#Taking arguments from command line
parser = argparse.ArgumentParser() #you iniatize as such
parser.add_argument("-predictor", required=True, help="path to predictor")
#the add_argument tells you what needs to be given as an input sp its help
args = parser.parse_args() #you take the arguments from command line
#Controls
print("Starting Program.")
print("Press 'Esc' to quit")
#Video from webcam
video = VideoStream().start()
time.sleep(1.5) #to hault the code it will stop after 1.5 sec
#the detector responsible for detecting the face and predictor responsible for predict the 68 points on the face
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor(args.predictor) #used for getting outside loop
#For taking video
counter = 0
#creating separate eye layer and eye mask to like extract only the eyes from the face and to work with it
eye_layer = np.zeros((300,400,3),dtype = 'uint8')
eye_mask = eye_layer.copy()
eye_mask = cv2.cvtColor(eye_mask , cv2.COLOR_BGR2GRAY) #eyes are white on a black background
#create translated mask to know the mask of the previous pos of all the eyes
translated = np.zeros((300,400,3),dtype = 'uint8')
translated_mask = eye_mask.copy()
#creating the eye list for storing all the positions of eyes in different frames
class EyeList(object) :
def __init__(self,length) :
self.length = length #length will be the total number of pos to be stored
self.eyes = []
def push(self,newCoords) :
if len(self.eyes) < self.length :
self.eyes.append(newCoords)
#when we reach the max limit for the eyelist , we remove the oldest coordinates
else :
self.eyes.pop(0)
self.eyes.append(newCoords)
eye_list = EyeList(10) #10 coordinates/positions of eyes
#Making Video
img_list = []
out = cv2.VideoWriter('Filter-Eyes.avi',cv2.VideoWriter_fourcc(*'DIVX'), 15, (400,300))
while True :
frame = video.read()
frame = resize(frame,width = 400)
gray = cv2.cvtColor(frame , cv2.COLOR_BGR2GRAY)
rectangle = detector(gray,0)
# fill our masks and frames with 0 (black) on every draw loop
eye_mask.fill(0) #this will only show the exact eye pos on the screen and not its continuous movement
eye_layer.fill(0)
translated.fill(0)
translated_mask.fill(0)
for rect in rectangle :
x,y,w,h = face_utils.rect_to_bb(rect) #gives the coordinates and size
#cv2.rectangle(frame,(x,y),(x+w,y+h),(255,0,0),2)
shape = predictor(gray,rect) #dlib output will be received after predicting
shape = face_utils.shape_to_np(shape)
lefteye = shape[36:42]
righteye = shape[42:48]
# fill our mask in the shape of our eyes
cv2.fillPoly(eye_mask,[lefteye],255)
cv2.fillPoly(eye_mask,[righteye],255)
#take the eyemask and do bitwise AND with the frame
'''What happens is that the bitwise AND will be performed between eyemask and frame , whichever gives the
true or 1 will be shown actually in the eyelayer'''
eye_layer = cv2.bitwise_and(frame,frame,mask = eye_mask)
#Getting the coordinates for the eye at diff position in each frame
ex,ey,ew,eh = cv2.boundingRect(eye_mask)
eye_list.push([ex,ey])
#Accessing the coordinates in the reverse order
for i in reversed(eye_list.eyes) :
translated1 = translate(eye_layer, i[0] -ex ,i[1]-ey) #translate take x and y coords to translate/move from
translated1_mask = translate(eye_mask, i[0]-ex ,i[1]-ey)
translated_mask = np.maximum(translated_mask , translated1_mask) #when you've 255 in both and if you add them you get overflow in np hence you get max
#cut out the new translated mask
translated = cv2.bitwise_and(translated,translated,mask=255-translated1_mask)
#paste in the newly translated eye position
translated += translated1
'''for point in shape[36:48] : #we will only extract the eyes points in the entire face
cv2.circle(frame,tuple(point),2,(0,255,0)) #marks the points embracing the detected shape of face'''
#translated_mask will have all the previous eye position so we will black out those ones from the current eye
frame = cv2.bitwise_and(frame,frame,mask = 255-translated_mask)
frame += translated #paste in the translated eye image
cv2.imshow("Eye Glitch",frame)
img_list.append(frame)
key = cv2.waitKey(1) & 0xFF
if key == 27 :
break
#Writing all frames to make a video file
for img in img_list :
out.write(img)
out.release()
video.stop()
cv2.destroyAllWindows()
|
[
"numpy.maximum",
"argparse.ArgumentParser",
"cv2.VideoWriter_fourcc",
"cv2.bitwise_and",
"cv2.fillPoly",
"imutils.face_utils.shape_to_np",
"imutils.translate",
"imutils.resize",
"cv2.imshow",
"dlib.shape_predictor",
"cv2.cvtColor",
"cv2.boundingRect",
"cv2.destroyAllWindows",
"cv2.waitKey",
"time.sleep",
"dlib.get_frontal_face_detector",
"imutils.video.VideoStream",
"numpy.zeros",
"imutils.face_utils.rect_to_bb"
] |
[((482, 507), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (505, 507), False, 'import argparse\n'), ((879, 894), 'time.sleep', 'time.sleep', (['(1.5)'], {}), '(1.5)\n', (889, 894), False, 'import time\n'), ((1069, 1101), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ([], {}), '()\n', (1099, 1101), False, 'import dlib\n'), ((1116, 1152), 'dlib.shape_predictor', 'dlib.shape_predictor', (['args.predictor'], {}), '(args.predictor)\n', (1136, 1152), False, 'import dlib\n'), ((1341, 1379), 'numpy.zeros', 'np.zeros', (['(300, 400, 3)'], {'dtype': '"""uint8"""'}), "((300, 400, 3), dtype='uint8')\n", (1349, 1379), True, 'import numpy as np\n'), ((1421, 1463), 'cv2.cvtColor', 'cv2.cvtColor', (['eye_mask', 'cv2.COLOR_BGR2GRAY'], {}), '(eye_mask, cv2.COLOR_BGR2GRAY)\n', (1433, 1463), False, 'import cv2\n'), ((1598, 1636), 'numpy.zeros', 'np.zeros', (['(300, 400, 3)'], {'dtype': '"""uint8"""'}), "((300, 400, 3), dtype='uint8')\n", (1606, 1636), True, 'import numpy as np\n'), ((5484, 5507), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (5505, 5507), False, 'import cv2\n'), ((2362, 2393), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'DIVX'"], {}), "(*'DIVX')\n", (2384, 2393), False, 'import cv2\n'), ((2474, 2498), 'imutils.resize', 'resize', (['frame'], {'width': '(400)'}), '(frame, width=400)\n', (2480, 2498), False, 'from imutils import face_utils, translate, resize\n'), ((2516, 2555), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (2528, 2555), False, 'import cv2\n'), ((5067, 5124), 'cv2.bitwise_and', 'cv2.bitwise_and', (['frame', 'frame'], {'mask': '(255 - translated_mask)'}), '(frame, frame, mask=255 - translated_mask)\n', (5082, 5124), False, 'import cv2\n'), ((5203, 5234), 'cv2.imshow', 'cv2.imshow', (['"""Eye Glitch"""', 'frame'], {}), "('Eye Glitch', frame)\n", (5213, 5234), False, 'import cv2\n'), ((856, 869), 'imutils.video.VideoStream', 'VideoStream', ([], {}), '()\n', (867, 869), False, 'from imutils.video import VideoStream\n'), ((2939, 2966), 'imutils.face_utils.rect_to_bb', 'face_utils.rect_to_bb', (['rect'], {}), '(rect)\n', (2960, 2966), False, 'from imutils import face_utils, translate, resize\n'), ((3174, 3203), 'imutils.face_utils.shape_to_np', 'face_utils.shape_to_np', (['shape'], {}), '(shape)\n', (3196, 3203), False, 'from imutils import face_utils, translate, resize\n'), ((3356, 3394), 'cv2.fillPoly', 'cv2.fillPoly', (['eye_mask', '[lefteye]', '(255)'], {}), '(eye_mask, [lefteye], 255)\n', (3368, 3394), False, 'import cv2\n'), ((3407, 3446), 'cv2.fillPoly', 'cv2.fillPoly', (['eye_mask', '[righteye]', '(255)'], {}), '(eye_mask, [righteye], 255)\n', (3419, 3446), False, 'import cv2\n'), ((3732, 3776), 'cv2.bitwise_and', 'cv2.bitwise_and', (['frame', 'frame'], {'mask': 'eye_mask'}), '(frame, frame, mask=eye_mask)\n', (3747, 3776), False, 'import cv2\n'), ((3885, 3911), 'cv2.boundingRect', 'cv2.boundingRect', (['eye_mask'], {}), '(eye_mask)\n', (3901, 3911), False, 'import cv2\n'), ((5292, 5306), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (5303, 5306), False, 'import cv2\n'), ((4094, 4136), 'imutils.translate', 'translate', (['eye_layer', '(i[0] - ex)', '(i[1] - ey)'], {}), '(eye_layer, i[0] - ex, i[1] - ey)\n', (4103, 4136), False, 'from imutils import face_utils, translate, resize\n'), ((4226, 4267), 'imutils.translate', 'translate', (['eye_mask', '(i[0] - ex)', '(i[1] - ey)'], {}), '(eye_mask, i[0] - ex, i[1] - ey)\n', (4235, 4267), False, 'from imutils import face_utils, translate, resize\n'), ((4300, 4345), 'numpy.maximum', 'np.maximum', (['translated_mask', 'translated1_mask'], {}), '(translated_mask, translated1_mask)\n', (4310, 4345), True, 'import numpy as np\n'), ((4514, 4582), 'cv2.bitwise_and', 'cv2.bitwise_and', (['translated', 'translated'], {'mask': '(255 - translated1_mask)'}), '(translated, translated, mask=255 - translated1_mask)\n', (4529, 4582), False, 'import cv2\n')]
|
import torch
import numpy as np
import random
from transformers import T5Tokenizer, T5ForConditionalGeneration
#Set all seeds to make output deterministic
torch.manual_seed(0)
np.random.seed(0)
random.seed(0)
#Paragraphs for which we want to generate queries
paragraphs = [
"Python is an interpreted, high-level and general-purpose programming language. Python's design philosophy emphasizes code readability with its notable use of significant whitespace. Its language constructs and object-oriented approach aim to help programmers write clear, logical code for small and large-scale projects.",
"Python is dynamically-typed and garbage-collected. It supports multiple programming paradigms, including structured (particularly, procedural), object-oriented and functional programming. Python is often described as a \"batteries included\" language due to its comprehensive standard library.",
"Python was created in the late 1980s, and first released in 1991, by <NAME> as a successor to the ABC programming language. Python 2.0, released in 2000, introduced new features, such as list comprehensions, and a garbage collection system with reference counting, and was discontinued with version 2.7 in 2020. Python 3.0, released in 2008, was a major revision of the language that is not completely backward-compatible and much Python 2 code does not run unmodified on Python 3. With Python 2's end-of-life (and pip having dropped support in 2021), only Python 3.6.x and later are supported, with older versions still supporting e.g. Windows 7 (and old installers not restricted to 64-bit Windows).",
"Python interpreters are supported for mainstream operating systems and available for a few more (and in the past supported many more). A global community of programmers develops and maintains CPython, a free and open-source reference implementation. A non-profit organization, the Python Software Foundation, manages and directs resources for Python and CPython development.",
"As of January 2021, Python ranks third in TIOBE’s index of most popular programming languages, behind C and Java, having previously gained second place and their award for the most popularity gain for 2020.",
"Java is a class-based, object-oriented programming language that is designed to have as few implementation dependencies as possible. It is a general-purpose programming language intended to let application developers write once, run anywhere (WORA), meaning that compiled Java code can run on all platforms that support Java without the need for recompilation. Java applications are typically compiled to bytecode that can run on any Java virtual machine (JVM) regardless of the underlying computer architecture. The syntax of Java is similar to C and C++, but has fewer low-level facilities than either of them. The Java runtime provides dynamic capabilities (such as reflection and runtime code modification) that are typically not available in traditional compiled languages. As of 2019, Java was one of the most popular programming languages in use according to GitHub, particularly for client-server web applications, with a reported 9 million developers.",
"Java was originally developed by <NAME> at Sun Microsystems (which has since been acquired by Oracle) and released in 1995 as a core component of Sun Microsystems' Java platform. The original and reference implementation Java compilers, virtual machines, and class libraries were originally released by Sun under proprietary licenses. As of May 2007, in compliance with the specifications of the Java Community Process, Sun had relicensed most of its Java technologies under the GNU General Public License. Oracle offers its own HotSpot Java Virtual Machine, however the official reference implementation is the OpenJDK JVM which is free open source software and used by most developers and is the default JVM for almost all Linux distributions.",
"As of September 2020, the latest version is Java 15, with Java 11, a currently supported long-term support (LTS) version, released on September 25, 2018. Oracle released the last zero-cost public update for the legacy version Java 8 LTS in January 2019 for commercial use, although it will otherwise still support Java 8 with public updates for personal use indefinitely. Other vendors have begun to offer zero-cost builds of OpenJDK 8 and 11 that are still receiving security and other upgrades.",
"Oracle (and others) highly recommend uninstalling outdated versions of Java because of serious risks due to unresolved security issues. Since Java 9, 10, 12, 13, and 14 are no longer supported, Oracle advises its users to immediately transition to the latest version (currently Java 15) or an LTS release."
]
# For available models for query generation, see: https://huggingface.co/BeIR/
# Here, we use a T5-large model was trained on the MS MARCO dataset
tokenizer = T5Tokenizer.from_pretrained('BeIR/query-gen-msmarco-t5-large')
model = T5ForConditionalGeneration.from_pretrained('BeIR/query-gen-msmarco-t5-large')
model.eval()
#Select the device
device = 'cuda' if torch.cuda.is_available() else 'cpu'
model.to(device)
#Iterate over the paragraphs and generate for each some queries
with torch.no_grad():
for para in paragraphs:
input_ids = tokenizer.encode(para, return_tensors='pt').to(device)
outputs = model.generate(
input_ids=input_ids,
max_length=64,
do_sample=True,
top_p=0.95,
num_return_sequences=3)
print("\nParagraph:")
print(para)
print("\nGenerated Queries:")
for i in range(len(outputs)):
query = tokenizer.decode(outputs[i], skip_special_tokens=True)
print(f'{i + 1}: {query}')
"""
Output of the script:
Paragraph:
Python is an interpreted, high-level and general-purpose programming language. Python's design philosophy emphasizes code readability with its notable use of significant whitespace. Its language constructs and object-oriented approach aim to help programmers write clear, logical code for small and large-scale projects.
Generated Queries:
1: what is python language used for
2: what is python programming
3: what language do i use for scripts
Paragraph:
Python is dynamically-typed and garbage-collected. It supports multiple programming paradigms, including structured (particularly, procedural), object-oriented and functional programming. Python is often described as a "batteries included" language due to its comprehensive standard library.
Generated Queries:
1: what is python language
2: what programming paradigms do python support
3: what programming languages use python
Paragraph:
Python was created in the late 1980s, and first released in 1991, by <NAME> as a successor to the ABC programming language. Python 2.0, released in 2000, introduced new features, such as list comprehensions, and a garbage collection system with reference counting, and was discontinued with version 2.7 in 2020. Python 3.0, released in 2008, was a major revision of the language that is not completely backward-compatible and much Python 2 code does not run unmodified on Python 3. With Python 2's end-of-life (and pip having dropped support in 2021), only Python 3.6.x and later are supported, with older versions still supporting e.g. Windows 7 (and old installers not restricted to 64-bit Windows).
Generated Queries:
1: what year did python start
2: when does the next python update release
3: when did python come out?
Paragraph:
Python interpreters are supported for mainstream operating systems and available for a few more (and in the past supported many more). A global community of programmers develops and maintains CPython, a free and open-source reference implementation. A non-profit organization, the Python Software Foundation, manages and directs resources for Python and CPython development.
Generated Queries:
1: what platform is python available on
2: what is python used for
3: what is python?
Paragraph:
As of January 2021, Python ranks third in TIOBE’s index of most popular programming languages, behind C and Java, having previously gained second place and their award for the most popularity gain for 2020.
Generated Queries:
1: what is the most used programming language in the world
2: what is python language
3: what is the most popular programming language in the world?
Paragraph:
Java is a class-based, object-oriented programming language that is designed to have as few implementation dependencies as possible. It is a general-purpose programming language intended to let application developers write once, run anywhere (WORA), meaning that compiled Java code can run on all platforms that support Java without the need for recompilation. Java applications are typically compiled to bytecode that can run on any Java virtual machine (JVM) regardless of the underlying computer architecture. The syntax of Java is similar to C and C++, but has fewer low-level facilities than either of them. The Java runtime provides dynamic capabilities (such as reflection and runtime code modification) that are typically not available in traditional compiled languages. As of 2019, Java was one of the most popular programming languages in use according to GitHub, particularly for client-server web applications, with a reported 9 million developers.
Generated Queries:
1: java how java works
2: what language is similar to java
3: what is java language
Paragraph:
Java was originally developed by <NAME> at Sun Microsystems (which has since been acquired by Oracle) and released in 1995 as a core component of Sun Microsystems' Java platform. The original and reference implementation Java compilers, virtual machines, and class libraries were originally released by Sun under proprietary licenses. As of May 2007, in compliance with the specifications of the Java Community Process, Sun had relicensed most of its Java technologies under the GNU General Public License. Oracle offers its own HotSpot Java Virtual Machine, however the official reference implementation is the OpenJDK JVM which is free open source software and used by most developers and is the default JVM for almost all Linux distributions.
Generated Queries:
1: what is java created by
2: when was java introduced to linux
3: who developed java?
Paragraph:
As of September 2020, the latest version is Java 15, with Java 11, a currently supported long-term support (LTS) version, released on September 25, 2018. Oracle released the last zero-cost public update for the legacy version Java 8 LTS in January 2019 for commercial use, although it will otherwise still support Java 8 with public updates for personal use indefinitely. Other vendors have begun to offer zero-cost builds of OpenJDK 8 and 11 that are still receiving security and other upgrades.
Generated Queries:
1: what is the latest version of java
2: what is the latest java version
3: what is the latest version of java
Paragraph:
Oracle (and others) highly recommend uninstalling outdated versions of Java because of serious risks due to unresolved security issues. Since Java 9, 10, 12, 13, and 14 are no longer supported, Oracle advises its users to immediately transition to the latest version (currently Java 15) or an LTS release.
Generated Queries:
1: why is oracle not supported
2: what version is oracle used in
3: which java version is obsolete
"""
|
[
"numpy.random.seed",
"torch.manual_seed",
"transformers.T5ForConditionalGeneration.from_pretrained",
"random.seed",
"torch.cuda.is_available",
"transformers.T5Tokenizer.from_pretrained",
"torch.no_grad"
] |
[((156, 176), 'torch.manual_seed', 'torch.manual_seed', (['(0)'], {}), '(0)\n', (173, 176), False, 'import torch\n'), ((177, 194), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (191, 194), True, 'import numpy as np\n'), ((195, 209), 'random.seed', 'random.seed', (['(0)'], {}), '(0)\n', (206, 209), False, 'import random\n'), ((4876, 4938), 'transformers.T5Tokenizer.from_pretrained', 'T5Tokenizer.from_pretrained', (['"""BeIR/query-gen-msmarco-t5-large"""'], {}), "('BeIR/query-gen-msmarco-t5-large')\n", (4903, 4938), False, 'from transformers import T5Tokenizer, T5ForConditionalGeneration\n'), ((4947, 5024), 'transformers.T5ForConditionalGeneration.from_pretrained', 'T5ForConditionalGeneration.from_pretrained', (['"""BeIR/query-gen-msmarco-t5-large"""'], {}), "('BeIR/query-gen-msmarco-t5-large')\n", (4989, 5024), False, 'from transformers import T5Tokenizer, T5ForConditionalGeneration\n'), ((5077, 5102), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (5100, 5102), False, 'import torch\n'), ((5202, 5217), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5215, 5217), False, 'import torch\n')]
|
"""
Mocks out led device hardware
"""
from logging import getLogger
from unittest.mock import Mock
_LOGGER = getLogger("mock matrix")
class Matrix:
"""
A mock for an led matrix device
"""
_width = 32
_height = 8
_mode = "1"
def __init__(self):
_LOGGER.info("Created mock led matrix device %r", self)
self.display = Mock()
@property
def width(self):
"""
Width of the display in pixels
"""
return Matrix._width
@property
def height(self):
"""
Height of the display in pixels
"""
return Matrix._height
@property
def mode(self):
"""
Returns mode which is needed for image drawing reasons
"""
return Matrix._mode
def clear(self):
"""
Clear the display
"""
_LOGGER.info("Clearing device %r", self)
|
[
"unittest.mock.Mock",
"logging.getLogger"
] |
[((110, 134), 'logging.getLogger', 'getLogger', (['"""mock matrix"""'], {}), "('mock matrix')\n", (119, 134), False, 'from logging import getLogger\n'), ((363, 369), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (367, 369), False, 'from unittest.mock import Mock\n')]
|
from __future__ import print_function
import time
import numpy as np
import sys
import gym
from PIL import Image
from gibson.core.render.profiler import Profiler
from gibson.envs.husky_env import *
from gibson.envs.ant_env import *
from gibson.envs.humanoid_env import *
from gibson.envs.drone_env import *
import pybullet as p
class RandomAgent(object):
"""The world's simplest agent"""
def __init__(self, action_space, is_discrete):
self.action_space = action_space
self.is_discrete = is_discrete
def act(self, observation, reward=None):
if self.is_discrete:
action = np.random.randint(self.action_space.n)
else:
action = np.zeros(self.action_space.shape[0])
if (np.random.random() < 0.5):
action[np.random.choice(action.shape[0], 1)] = np.random.randint(-1, 2)
return action
def testEnv(Env, config="test_filled.yaml", frame_total=10, is_discrete=False):
print("Currently testing", Env)
config = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'examples', 'configs', 'test', config)
env = Env(config)
obs = env.reset()
agent = RandomAgent(env.action_space, is_discrete)
frame = 0
score = 0
restart_delay = 0
obs = env.reset()
while True:
time.sleep(0.01)
a = agent.act(obs)
obs, r, done, meta = env.step(a)
score += r
frame += 1
if not done and frame < frame_total: continue
env.close()
return
if __name__ == '__main__':
testEnv(HuskyNavigateEnv, "test_semantics.yaml", 10, is_discrete=True)
testEnv(HuskyNavigateEnv, "test_filled.yaml", 10, is_discrete=True)
testEnv(HuskyNavigateEnv, "test_prefilled.yaml", 10, is_discrete=True)
testEnv(HuskyNavigateEnv, "test_depth.yaml", 10, is_discrete=True)
testEnv(HuskyNavigateEnv, "test_normal.yaml", 10, is_discrete=True)
testEnv(HuskyNavigateEnv, "test_three.yaml", 10, is_discrete=True)
testEnv(HuskyNavigateEnv, "test_four.yaml", 10, is_discrete=True)
testEnv(HuskyNavigateEnv, "test_nonviz.yaml", 10, is_discrete=True)
testEnv(HuskyGibsonFlagRunEnv, "test_nonviz.yaml", 10, is_discrete=True)
testEnv(HuskyGibsonFlagRunEnv, "test_depth.yaml", 10, is_discrete=True)
testEnv(AntGibsonFlagRunEnv, "test_nonviz_nondiscrete.yaml", 10, is_discrete=False)
testEnv(AntFlagRunEnv, "test_nonviz_nondiscrete.yaml", 10, is_discrete=False)
testEnv(AntClimbEnv, "test_nonviz_nondiscrete.yaml", 10, is_discrete=False)
testEnv(AntNavigateEnv, "test_nonviz_nondiscrete.yaml", 10, is_discrete=False)
testEnv(AntClimbEnv, "test_four_nondiscrete.yaml", 10, is_discrete=False)
testEnv(HumanoidNavigateEnv, "test_nonviz_nondiscrete.yaml", 10, is_discrete=False)
testEnv(HumanoidGibsonFlagRunEnv, "test_nonviz_nondiscrete.yaml", 10, is_discrete=False)
testEnv(HumanoidNavigateEnv, "test_four_nondiscrete.yaml", 10, is_discrete=False)
testEnv(DroneNavigateEnv, "test_nonviz_nondiscrete.yaml", 100, is_discrete=False)
testEnv(DroneNavigateEnv, "test_four_nondiscrete.yaml", 100, is_discrete=False)
|
[
"numpy.zeros",
"time.sleep",
"numpy.random.random",
"numpy.random.randint",
"numpy.random.choice"
] |
[((1326, 1342), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (1336, 1342), False, 'import time\n'), ((628, 666), 'numpy.random.randint', 'np.random.randint', (['self.action_space.n'], {}), '(self.action_space.n)\n', (645, 666), True, 'import numpy as np\n'), ((702, 738), 'numpy.zeros', 'np.zeros', (['self.action_space.shape[0]'], {}), '(self.action_space.shape[0])\n', (710, 738), True, 'import numpy as np\n'), ((755, 773), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (771, 773), True, 'import numpy as np\n'), ((845, 869), 'numpy.random.randint', 'np.random.randint', (['(-1)', '(2)'], {}), '(-1, 2)\n', (862, 869), True, 'import numpy as np\n'), ((805, 841), 'numpy.random.choice', 'np.random.choice', (['action.shape[0]', '(1)'], {}), '(action.shape[0], 1)\n', (821, 841), True, 'import numpy as np\n')]
|
import sqlite3
from utils import send
import requests
from parameters import recruitment_message as message, login_payload, exclude_inactive, inactivity_threshold
def recruit(key):
""" Finds new players from the nations API, then sends and logs recruitment messages to them. """
conn = sqlite3.connect('logs.db')
with conn:
unaligned = get_unaligned_nations(key)
contacts = filter_unaligned(unaligned, conn)
with requests.Session() as s:
s.post('https://politicsandwar.com/login/', data=login_payload, headers={'User-Agent': 'Mozilla/5.0'})
print("Sending recruitment messages.")
c = conn.cursor()
for contact in contacts:
send(contact, message, s)
# log the message
c.execute('''INSERT INTO recruitment(nation_id) VALUES(?)''', (contact['nationid'],))
c.close()
print('Finished messaging unaligned nations.')
def get_unaligned_nations(key):
req = requests.get(f"https://politicsandwar.com/api/nations/?key={key}&alliance_id=0", headers={'User-Agent': 'Mozilla/5.0'})
print('Calling the API...')
data = req.json()
if not data['success']:
raise SystemExit("PW API Error : " + data['general_message'])
nations = data['nations']
print('API Loaded')
return nations
def filter_unaligned(nations, conn):
print('Filtering previously messaged nations...')
filtered = []
c = conn.cursor()
# Filter out vacation mode and previously messaged nations
for nation in nations:
if nation['vacmode']:
continue
if exclude_inactive:
if nation['minutesinactive'] > inactivity_threshold:
continue
nation_id = nation['nationid']
c.execute('''SELECT nation_id FROM recruitment WHERE nation_id=?''', (nation_id,))
result = c.fetchone()
if result:
continue
filtered.append(nation)
c.close()
print(f"Found {len(filtered)} unmessaged nations.")
return filtered
|
[
"utils.send",
"requests.Session",
"sqlite3.connect",
"requests.get"
] |
[((306, 332), 'sqlite3.connect', 'sqlite3.connect', (['"""logs.db"""'], {}), "('logs.db')\n", (321, 332), False, 'import sqlite3\n'), ((1037, 1161), 'requests.get', 'requests.get', (['f"""https://politicsandwar.com/api/nations/?key={key}&alliance_id=0"""'], {'headers': "{'User-Agent': 'Mozilla/5.0'}"}), "(f'https://politicsandwar.com/api/nations/?key={key}&alliance_id=0'\n , headers={'User-Agent': 'Mozilla/5.0'})\n", (1049, 1161), False, 'import requests\n'), ((467, 485), 'requests.Session', 'requests.Session', ([], {}), '()\n', (483, 485), False, 'import requests\n'), ((746, 771), 'utils.send', 'send', (['contact', 'message', 's'], {}), '(contact, message, s)\n', (750, 771), False, 'from utils import send\n')]
|
# -*-coding:utf-8-*-
import torch
if __name__ == '__main__':
A = torch.arange(20).reshape(5,-1)
# print(A)
# print(A.T)
#对称矩阵
B = torch.tensor([[1,2,3],[2,0,4],[3,4,5]])
# print(B)
# print(B == B.T)
X = torch.arange(24).reshape(2,3,4)
# print(X)
#注意结果输出,3和4代表最里层3*4矩阵,2代表最外层
# tensor([[[ 0, 1, 2, 3],
# [ 4, 5, 6, 7],
# [ 8, 9, 10, 11]],
#
# [[12, 13, 14, 15],
# [16, 17, 18, 19],
# [20, 21, 22, 23]]])
#reshape并不改变所在地址,仅仅改变"view"视图
#若想给reshape后的结果分配新地址示例如下
A = torch.arange(20, dtype=torch.float32).reshape(5, 4)
B = A.clone() # 通过分配新内存,将A的一个副本分配给B
# print(A)
# print(B)
# print(A.storage() == B.storage())
#*运算:两个矩阵的按元素乘法称为 哈达玛积
# print(A * B)
a = 2
#标量与张量乘除
X = torch.arange(24).reshape(2, 3, 4)
# print(a + X)
# print((a * X).shape)
'''
指定张量沿哪一个轴来通过求和降低维度
例如[5,4]沿轴0求和变为[4]一维向量
沿轴1求和变为[5]一维向量
'''
# 例:求和所有行的元素来降维(轴0)
A_sum_axis0 = A.sum(axis=0)
# print(A)
# print(A_sum_axis0)
# print(A_sum_axis0.shape)
#列降维(轴1)
A_sum_axis1 = A.sum(axis=1)
# print(A_sum_axis1)
# print(A_sum_axis1.shape)
#沿着行和列对矩阵求和,等价于对矩阵的所有元素进行求和
#print(A.sum(axis=[0, 1])) # Same as `A.sum()`
#指定轴求均值
# print(A.mean(axis=0))
# print(A.sum(axis=0) / A.shape[0])
#计算总和或均值时保持轴数不变
#例如[5,4]对轴1求和且保持轴数不变则变为[5,1]
sum_A = A.sum(axis=1, keepdims=True)
# print(A)
# print(sum_A)
#由于 sum_A 在对每行进行求和后仍保持两个轴,可以通过广播将 A 除以 sum_A
#广播机制必须保证维度相同,而m*n中m与n的具体值随意
# print(A / sum_A)
#cumsum:沿[某个轴计算 A 元素的累积总和]
#此函数不会沿任何轴降低输入张量的维度
# print(A.cumsum(axis=0))
#两向量点积: torch.dot(x,y)
x = torch.arange(4,dtype=torch.float32)
y = torch.ones(4,dtype=torch.float32)
# print(torch.dot(x,y))
#等价于 torch.sum(x * y)
#矩阵-向量积:torch.mv(A,x)
A = torch.arange(20).reshape(5,4)
x = torch.arange(4)
Ax = torch.mv(A,x)
# print(A)
# print(x)
# print(Ax)
#矩阵乘法:torch.mm(A, B)
B = torch.ones(4, 3)
# print(torch.mm(A, B))
#torch.norm() L2范数:平方和开根号
#对向量来说norm即求L2范数
u = torch.tensor([3.0, -4.0])
# print(torch.norm(u))
#L1范数:绝对值之和
# print(torch.abs(u).sum())
#对矩阵来说norm即求F范数,也是平方和开根号
print(torch.norm(torch.ones((4, 9))))
|
[
"torch.ones",
"torch.mv",
"torch.arange",
"torch.tensor"
] |
[((153, 200), 'torch.tensor', 'torch.tensor', (['[[1, 2, 3], [2, 0, 4], [3, 4, 5]]'], {}), '([[1, 2, 3], [2, 0, 4], [3, 4, 5]])\n', (165, 200), False, 'import torch\n'), ((1735, 1771), 'torch.arange', 'torch.arange', (['(4)'], {'dtype': 'torch.float32'}), '(4, dtype=torch.float32)\n', (1747, 1771), False, 'import torch\n'), ((1779, 1813), 'torch.ones', 'torch.ones', (['(4)'], {'dtype': 'torch.float32'}), '(4, dtype=torch.float32)\n', (1789, 1813), False, 'import torch\n'), ((1943, 1958), 'torch.arange', 'torch.arange', (['(4)'], {}), '(4)\n', (1955, 1958), False, 'import torch\n'), ((1968, 1982), 'torch.mv', 'torch.mv', (['A', 'x'], {}), '(A, x)\n', (1976, 1982), False, 'import torch\n'), ((2062, 2078), 'torch.ones', 'torch.ones', (['(4)', '(3)'], {}), '(4, 3)\n', (2072, 2078), False, 'import torch\n'), ((2169, 2194), 'torch.tensor', 'torch.tensor', (['[3.0, -4.0]'], {}), '([3.0, -4.0])\n', (2181, 2194), False, 'import torch\n'), ((71, 87), 'torch.arange', 'torch.arange', (['(20)'], {}), '(20)\n', (83, 87), False, 'import torch\n'), ((239, 255), 'torch.arange', 'torch.arange', (['(24)'], {}), '(24)\n', (251, 255), False, 'import torch\n'), ((578, 615), 'torch.arange', 'torch.arange', (['(20)'], {'dtype': 'torch.float32'}), '(20, dtype=torch.float32)\n', (590, 615), False, 'import torch\n'), ((819, 835), 'torch.arange', 'torch.arange', (['(24)'], {}), '(24)\n', (831, 835), False, 'import torch\n'), ((1905, 1921), 'torch.arange', 'torch.arange', (['(20)'], {}), '(20)\n', (1917, 1921), False, 'import torch\n'), ((2321, 2339), 'torch.ones', 'torch.ones', (['(4, 9)'], {}), '((4, 9))\n', (2331, 2339), False, 'import torch\n')]
|
"""
Copyright 2019 Samsung SDS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
import pandas as pd
import re
from brightics.common.utils import check_required_parameters
import itertools
def polynomial_expansion(table, **params):
check_required_parameters(_polynomial_expansion, params, ['table'])
return _polynomial_expansion(table, **params)
def _polynomial_expansion(table, input_cols, hold_cols=False):
out_table = pd.DataFrame()
out_table[input_cols] = table[input_cols]
if hold_cols:
hold_cols = list(set(hold_cols) - set(input_cols))
out_table[hold_cols] = table[hold_cols]
for i in range(len(input_cols)):
for j in range(i, len(input_cols)):
out_table[input_cols[i] + '_' + input_cols[j]] = np.array(table[input_cols[i]]) * np.array(table[input_cols[j]])
return {'out_table' : out_table}
|
[
"pandas.DataFrame",
"numpy.array",
"brightics.common.utils.check_required_parameters"
] |
[((789, 856), 'brightics.common.utils.check_required_parameters', 'check_required_parameters', (['_polynomial_expansion', 'params', "['table']"], {}), "(_polynomial_expansion, params, ['table'])\n", (814, 856), False, 'from brightics.common.utils import check_required_parameters\n'), ((991, 1005), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1003, 1005), True, 'import pandas as pd\n'), ((1319, 1349), 'numpy.array', 'np.array', (['table[input_cols[i]]'], {}), '(table[input_cols[i]])\n', (1327, 1349), True, 'import numpy as np\n'), ((1352, 1382), 'numpy.array', 'np.array', (['table[input_cols[j]]'], {}), '(table[input_cols[j]])\n', (1360, 1382), True, 'import numpy as np\n')]
|
from unittest import mock
from urllib.parse import parse_qs, urlparse
import pytest
import python_freeipa
from bs4 import BeautifulSoup
from pyotp import TOTP
from noggin.app import ipa_admin
from noggin.representation.otptoken import OTPToken
from ..utilities import (
assert_form_field_error,
assert_form_generic_error,
assert_redirects_with_flash,
get_otp,
otp_secret_from_uri,
)
@pytest.fixture
def dummy_user_with_2_otp(client, logged_in_dummy_user, logged_in_dummy_user_with_otp):
ipa = logged_in_dummy_user
result = ipa.otptoken_add(
o_ipatokenowner="dummy",
o_description="dummy's other token",
)['result']
token = OTPToken(result)
yield logged_in_dummy_user_with_otp, token
try:
ipa_admin.otptoken_del(token.uniqueid)
except python_freeipa.exceptions.NotFound:
pass # already deleted, it's fine.
@pytest.fixture
def totp_token():
return TOTP("<KEY>")
@pytest.mark.vcr()
def test_user_settings_otp(client, logged_in_dummy_user):
"""Test getting the user OTP settings page: /user/<username>/settings/otp/"""
result = client.get("/user/dummy/settings/otp/")
page = BeautifulSoup(result.data, "html.parser")
assert page.title
assert page.title.string == "Settings for dummy - noggin"
# check the pageheading
pageheading = page.select("#pageheading")[0]
assert pageheading.get_text(strip=True) == "OTP Tokens"
# check that there arent any tokens
tokenlist = page.select("div.list-group")
assert len(tokenlist) == 1
assert (
tokenlist[0].select(".list-group-item")[0].get_text(strip=True)
== "You have no OTP tokensAdd an OTP token to enable two-factor "
"authentication on your account."
)
result = client.get("/user/dummy/settings/otp/")
page = BeautifulSoup(result.data, "html.parser")
assert page.title
assert page.title.string == "Settings for dummy - noggin"
form = page.select("form[action='/user/dummy/settings/otp/']")
assert len(form) == 1
@pytest.mark.vcr()
def test_user_settings_otp_no_permission(client, logged_in_dummy_user):
"""Verify that a user's OTP settings page can't be viewed by another user."""
result = client.get("/user/dudemcpants/settings/otp/")
assert_redirects_with_flash(
result,
expected_url="/user/dudemcpants/",
expected_message="You do not have permission to edit this account.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_add(client, logged_in_dummy_user, cleanup_dummy_tokens):
"""Test the first step of OTP creation"""
result = client.post(
"/user/dummy/settings/otp/",
data={
"add-description": "pants token",
"add-password": "<PASSWORD>",
"add-submit": "1",
},
)
page = BeautifulSoup(result.data, "html.parser")
# The token has not been added yet
tokenlist = page.select_one("div.list-group")
assert tokenlist is not None
assert "You have no OTP tokens" in tokenlist.get_text(strip=True)
# check the modal is on the page
modal = page.select_one("#otp-modal")
assert modal is not None
# check the next step form is properly pre-filled
confirm_form = modal.select_one("form")
assert confirm_form is not None
assert (
confirm_form.select_one("input[name='confirm-description']")["value"]
== "pants token"
)
otp_uri = page.select_one("input#otp-uri")
parsed_otp_uri_query = parse_qs(urlparse(otp_uri["value"]).query)
assert (
confirm_form.select_one("input[name='confirm-secret']")["value"]
== parsed_otp_uri_query["secret"][0]
)
@pytest.mark.vcr()
def test_user_settings_otp_confirm(
client, logged_in_dummy_user, cleanup_dummy_tokens, totp_token
):
"""Test OTP creation"""
result = client.post(
"/user/dummy/settings/otp/",
data={
"confirm-description": "pants token",
"confirm-secret": totp_token.secret,
"confirm-code": totp_token.now(),
"confirm-submit": "1",
},
)
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="The token has been created.",
expected_category="success",
)
result = client.get("/user/dummy/settings/otp/")
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select_one("div.list-group")
assert tokenlist is not None
# check this is not the no tokens message
assert "You have no OTP tokens" not in tokenlist.get_text(strip=True)
# check we are showing 1 token
tokens = tokenlist.select(".list-group-item .col")
assert len(tokens) == 1
# check the token is in the list
description = tokens[0].select_one("div[data-role='token-description']")
assert description is not None
assert description.get_text(strip=True) == "pants token"
# check the modal is closed
assert page.select_one("#otp-modal") is None
@pytest.mark.vcr()
def test_user_settings_otp_add_second(
client, logged_in_dummy_user_with_otp, cleanup_dummy_tokens
):
"""Test posting to the create OTP endpoint"""
otp = get_otp(otp_secret_from_uri(logged_in_dummy_user_with_otp.uri))
result = client.post(
"/user/dummy/settings/otp/",
data={
"add-description": "pants token 2",
"add-password": "<PASSWORD>",
"add-otp": otp,
"add-submit": "1",
},
)
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select_one("div.list-group")
assert tokenlist is not None
tokens = tokenlist.select(".list-group-item div[data-role='token-description']")
assert len(tokens) == 1
modal = page.select_one("#otp-modal")
assert modal is not None
confirm_form = modal.select_one("form")
assert confirm_form is not None
assert (
confirm_form.select_one("input[name='confirm-description']")["value"]
== "pants token 2"
)
otp_uri = page.select_one("input#otp-uri")
parsed_otp_uri_query = parse_qs(urlparse(otp_uri["value"]).query)
assert (
confirm_form.select_one("input[name='confirm-secret']")["value"]
== parsed_otp_uri_query["secret"][0]
)
@pytest.mark.vcr()
def test_user_settings_otp_add_second_confirm(
client,
logged_in_dummy_user_with_otp,
cleanup_dummy_tokens,
totp_token,
):
"""Test posting to the create OTP endpoint"""
result = client.post(
"/user/dummy/settings/otp/",
data={
"confirm-description": "pants token",
"confirm-secret": totp_token.secret,
"confirm-code": totp_token.now(),
"confirm-submit": "1",
},
follow_redirects=True,
)
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select_one("div.list-group")
assert tokenlist is not None
# check we are showing 2 tokens
tokens = tokenlist.select(".list-group-item div[data-role='token-description']")
assert len(tokens) == 2
# check the 2nd token is in the list
assert tokens[1].get_text(strip=True) == "pants token"
# check the modal is closed
assert page.select_one("#otp-modal") is None
@pytest.mark.vcr()
def test_user_settings_otp_check_no_description(
client, logged_in_dummy_user, cleanup_dummy_tokens, totp_token
):
"""Test an OTP token without a description"""
result = client.post(
"/user/dummy/settings/otp/",
data={
"confirm-secret": totp_token.secret,
"confirm-code": totp_token.now(),
"confirm-submit": "1",
},
follow_redirects=True,
)
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select_one("div.list-group")
assert tokenlist is not None
tokens = tokenlist.select(".list-group-item div[data-role='token-description']")
assert len(tokens) == 1
assert tokens[0].get_text(strip=True) == "(no name)"
@pytest.mark.vcr()
def test_user_settings_otp_check_description_escaping(
client, logged_in_dummy_user, cleanup_dummy_tokens
):
"""Test that we escape the token description when constructing the OTP URI"""
result = client.post(
"/user/dummy/settings/otp/",
data={
"add-description": "pants token",
"add-password": "<PASSWORD>",
"add-submit": "1",
},
follow_redirects=True,
)
page = BeautifulSoup(result.data, "html.parser")
otp_uri = page.select_one("input#otp-uri")
print(page.prettify())
assert otp_uri is not None
parsed_otp_uri = urlparse(otp_uri["value"])
# Not sure we need all of these checked
assert parsed_otp_uri.scheme == "otpauth"
assert parsed_otp_uri.netloc == "totp"
assert parsed_otp_uri.path == "/dummy%40NOGGIN.TEST:pants%20token"
parsed_query = parse_qs(parsed_otp_uri.query)
assert parsed_query["issuer"] == ["<EMAIL>"]
@pytest.mark.vcr()
def test_user_settings_otp_add_no_permission(client, logged_in_dummy_user, totp_token):
"""Verify that another user can't make an otp token."""
result = client.post(
"/user/dudemcpants/settings/otp/",
data={
"confirm-description": "pants token",
"confirm-secret": totp_token.secret,
"confirm-code": totp_token.now(),
"confirm-submit": "1",
},
)
assert_redirects_with_flash(
result,
expected_url="/user/dudemcpants/",
expected_message="You do not have permission to edit this account.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_add_invalid_form(client, logged_in_dummy_user):
"""Test an invalid form when adding an otp token"""
result = client.post("/user/dummy/settings/otp/", data={"add-submit": "1"})
assert_form_field_error(result, "add-password", "You must provide a password")
@pytest.mark.vcr()
def test_user_settings_otp_add_wrong_password(client, logged_in_dummy_user):
"""Test adding an otp token with the wrong password"""
result = client.post(
"/user/dummy/settings/otp/",
data={
"add-description": "pants token",
"add-password": "<PASSWORD>",
"add-submit": "1",
},
)
assert_form_field_error(result, "add-password", "Incorrect password")
@pytest.mark.vcr()
def test_user_settings_otp_add_wrong_code(client, logged_in_dummy_user, totp_token):
"""Test failure when adding an otptoken"""
result = client.post(
"/user/dummy/settings/otp/",
data={
"confirm-description": "pants token",
"confirm-secret": totp_token.secret,
"confirm-code": "123456",
"confirm-submit": "1",
},
)
assert_form_field_error(
result, "confirm-code", "The code is wrong, please try again."
)
@pytest.mark.vcr()
def test_user_settings_otp_add_invalid(client, logged_in_dummy_user, totp_token):
"""Test failure when adding an otptoken"""
with mock.patch("noggin.security.ipa.Client.otptoken_add") as method:
method.side_effect = python_freeipa.exceptions.ValidationError(
message={
"member": {"user": [("testuser", "something went wrong")], "group": []}
},
code="4242",
)
result = client.post(
"/user/dummy/settings/otp/",
data={
"confirm-description": "pants token",
"confirm-secret": totp_token.secret,
"confirm-code": totp_token.now(),
"confirm-submit": "1",
},
)
assert_form_generic_error(result, expected_message="Cannot create the token.")
@pytest.mark.vcr()
def test_user_settings_otp_disable_no_permission(client, logged_in_dummy_user):
"""Verify that another user can't disable an otp token."""
result = client.post(
"/user/dudemcpants/settings/otp/disable/",
data={"description": "pants token", "password": "<PASSWORD>"},
)
assert_redirects_with_flash(
result,
expected_url="/user/dudemcpants/",
expected_message="You do not have permission to edit this account.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_disable_invalid_form(client, logged_in_dummy_user):
"""Test an invalid form when disabling an otp token"""
result = client.post("/user/dummy/settings/otp/disable/", data={})
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Token must not be empty",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_disable_ipaerror(
client, logged_in_dummy_user, dummy_user_with_2_otp
):
"""Test failure when disabling an otptoken"""
with mock.patch("noggin.security.ipa.Client.otptoken_mod") as method:
method.side_effect = python_freeipa.exceptions.FreeIPAError(
message="Cannot disable the token.", code="4242"
)
result = client.post(
"/user/dummy/settings/otp/disable/",
data={"token": dummy_<PASSWORD>_with_2_otp[1].uniqueid},
)
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Cannot disable the token.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_disable(client, logged_in_dummy_user, dummy_user_with_2_otp):
"""Test deleting an otptoken"""
# add another OTP Token
result = client.get("/user/dummy/settings/otp/")
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select("div.list-group .list-group-item")
# check we are showing 2 tokens
assert len(tokenlist) == 2
# grab the id of the first token
tokenid = tokenlist[0].select(".text-monospace")[0].get_text(strip=True)
# disable that token
result = client.post(
"/user/dummy/settings/otp/disable/",
data={"token": tokenid},
follow_redirects=True,
)
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select("div.list-group .list-group-item")
# check we are still showing 2 item
assert len(tokenlist) == 2
@pytest.mark.vcr()
def test_user_settings_otp_disable_lasttoken(client, logged_in_dummy_user_with_otp):
"""Test trying to disable the last token"""
result = client.post(
"/user/dummy/settings/otp/disable/",
data={"token": logged_in_dummy_user_with_otp.uniqueid},
)
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Sorry, You cannot disable your last active token.",
expected_category="warning",
)
@pytest.mark.vcr()
def test_user_settings_otp_disable_ipabadrequest(
client, logged_in_dummy_user, dummy_user_with_2_otp
):
"""Test IPA badrequest failure when disabling an otptoken"""
with mock.patch("noggin.security.ipa.Client.otptoken_mod") as method:
method.side_effect = python_freeipa.exceptions.BadRequest(
message="Cannot delete the token.", code="4242"
)
result = client.post(
"/user/dummy/settings/otp/disable/",
data={"token": "<KEY>2-89d7-8<PASSWORD>ba"},
)
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Cannot disable the token.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_delete_no_permission(client, logged_in_dummy_user):
"""Verify that another user can't delete an otp token."""
result = client.post(
"/user/dudemcpants/settings/otp/delete/", data={"token": "<KEY>"}
)
assert_redirects_with_flash(
result,
expected_url="/user/dudemcpants/",
expected_message="You do not have permission to edit this account.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_delete_invalid_form(client, logged_in_dummy_user):
"""Test an invalid form when deleting an otp token"""
result = client.post("/user/dummy/settings/otp/delete/", data={})
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Token must not be empty",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_delete_ipafailure(
client, logged_in_dummy_user, dummy_user_with_2_otp
):
"""Test IPA failure when deleting an otptoken"""
with mock.patch("noggin.security.ipa.Client.otptoken_del") as method:
method.side_effect = python_freeipa.exceptions.FreeIPAError(
message="Cannot delete the token.", code="4242"
)
result = client.post(
"/user/dummy/settings/otp/delete/",
data={"token": "0<PASSWORD>-<PASSWORD>-<PASSWORD>"},
)
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Cannot delete the token.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_delete_ipabadrequest(
client, logged_in_dummy_user, dummy_user_with_2_otp
):
"""Test IPA badrequest failure when deleting an otptoken"""
with mock.patch("noggin.security.ipa.Client.otptoken_del") as method:
method.side_effect = python_freeipa.exceptions.BadRequest(
message="Cannot delete the token.", code="4242"
)
result = client.post(
"/user/dummy/settings/otp/delete/",
data={"token": "0<PASSWORD>"},
)
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Cannot delete the token.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_delete(client, logged_in_dummy_user, dummy_user_with_2_otp):
"""Test deleting an otptoken"""
result = client.get("/user/dummy/settings/otp/")
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select("div.list-group .list-group-item")
# check we are showing 2 tokens
assert len(tokenlist) == 2
# grab the id of the first token
tokenid = tokenlist[0].select(".text-monospace")[0].get_text(strip=True)
# delete that token
result = client.post(
"/user/dummy/settings/otp/delete/",
data={"token": tokenid},
follow_redirects=True,
)
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select("div.list-group .list-group-item")
# check we are showing 1 item
assert len(tokenlist) == 1
# check the one item is not the no tokens message
assert "You have no OTP tokens" not in tokenlist[0].get_text(strip=True)
@pytest.mark.vcr()
def test_user_settings_otp_delete_lasttoken(
client, logged_in_dummy_user, logged_in_dummy_user_with_otp
):
"""Test trying to delete the last token"""
result = client.get("/user/dummy/settings/otp/")
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select("div.list-group .list-group-item")
# check we are showing 1 token
assert len(tokenlist) == 1
# check the one item is not the no tokens message
assert "You have no OTP tokens" not in tokenlist[0].get_text(strip=True)
# grab the id of the token
tokenid = tokenlist[0].select(".text-monospace")[0].get_text(strip=True)
# try to delete that token
result = client.post("/user/dummy/settings/otp/delete/", data={"token": tokenid})
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Sorry, You cannot delete your last active token.",
expected_category="warning",
)
@pytest.mark.vcr()
def test_user_settings_otp_enable_no_permission(client, logged_in_dummy_user):
"""Verify that another user can't enable an otp token."""
result = client.post(
"/user/dudemcpants/settings/otp/enable/",
data={"description": "pants token", "password": "<PASSWORD>"},
)
assert_redirects_with_flash(
result,
expected_url="/user/dudemcpants/",
expected_message="You do not have permission to edit this account.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_enable_invalid_form(client, logged_in_dummy_user):
"""Test an invalid form when enabling an otp token"""
result = client.post("/user/dummy/settings/otp/enable/", data={})
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Token must not be empty",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_enable_ipaerror(
client, logged_in_dummy_user, dummy_user_with_2_otp
):
"""Test failure when enabling an otptoken"""
with mock.patch("noggin.security.ipa.Client.otptoken_mod") as method:
method.side_effect = python_freeipa.exceptions.FreeIPAError(
message="Cannot enable the token.", code="4242"
)
result = client.post(
"/user/dummy/settings/otp/enable/",
data={"token": dummy_user_with_2_otp[1].uniqueid},
)
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Cannot enable the token. Cannot enable the token.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_enable(client, logged_in_dummy_user, dummy_user_with_2_otp):
"""Test enabling an otptoken"""
# add another OTP Token
result = client.get("/user/dummy/settings/otp/")
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select("div.list-group .list-group-item")
# check we are showing 2 tokens
assert len(tokenlist) == 2
# grab the id of the first token
tokenid = tokenlist[0].select(".text-monospace")[0].get_text(strip=True)
# disable that token
result = client.post(
"/user/dummy/settings/otp/disable/",
data={"token": tokenid},
follow_redirects=True,
)
page = BeautifulSoup(result.data, "html.parser")
# select all the tokens, disabled and enabled
tokenlist = page.select("div.list-group .list-group-item")
# check we are showing 2 tokens
assert len(tokenlist) == 2
# select just the disabled tokens
tokenlist = page.select("div.list-group .list-group-item.text-muted")
# check we are showing 1 disabled item
assert len(tokenlist) == 1
# enable that token
result = client.post(
"/user/dummy/settings/otp/enable/",
data={"token": tokenid},
follow_redirects=True,
)
page = BeautifulSoup(result.data, "html.parser")
# select all the tokens, disabled and enabled
tokenlist = page.select("div.list-group .list-group-item")
# check we are showing 2 tokens
assert len(tokenlist) == 2
# try to select just the disabled tokens
tokenlist = page.select("div.list-group .list-group-item.text-muted")
# check we are showing 0 disabled tokens
assert len(tokenlist) == 0
@pytest.mark.vcr()
def test_user_settings_otp_rename(client, logged_in_dummy_user_with_otp):
"""Test renaming an otp token"""
tokenid = logged_in_dummy_user_with_otp.uniqueid
# rename the token
result = client.post(
"/user/dummy/settings/otp/rename/",
data={"token": tokenid, "description": "the new name"},
follow_redirects=True,
)
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select("div.list-group .list-group-item")
assert len(tokenlist) == 1
desc = (
tokenlist[0]
.select("div[data-role='token-description']")[0]
.get_text(strip=True)
)
assert desc == "the new name"
@pytest.mark.vcr()
def test_user_settings_otp_rename_no_change(client, logged_in_dummy_user_with_otp):
"""Test renaming an otp token with no actual change"""
tokenid = logged_in_dummy_user_with_otp.uniqueid
desc = logged_in_dummy_user_with_otp.description
result = client.post(
"/user/dummy/settings/otp/rename/",
data={"token": tokenid, "description": desc},
follow_redirects=True,
)
page = BeautifulSoup(result.data, "html.parser")
tokenlist = page.select("div.list-group .list-group-item")
assert len(tokenlist) == 1
new_desc = (
tokenlist[0]
.select("div[data-role='token-description']")[0]
.get_text(strip=True)
)
assert new_desc == desc
@pytest.mark.vcr()
def test_user_settings_otp_rename_ipaerror(client, logged_in_dummy_user_with_otp):
"""Test failure when renaming an otptoken"""
tokenid = logged_in_dummy_user_with_otp.uniqueid
with mock.patch("noggin.security.ipa.Client.otptoken_mod") as method:
method.side_effect = python_freeipa.exceptions.FreeIPAError(
message="Whoops", code="4242"
)
result = client.post(
"/user/dummy/settings/otp/rename/",
data={"token": tokenid},
)
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Cannot rename the token.",
expected_category="danger",
)
@pytest.mark.vcr()
def test_user_settings_otp_rename_invalid_form(client, logged_in_dummy_user_with_otp):
"""Test an invalid form when renaming an otp token"""
result = client.post("/user/dummy/settings/otp/rename/", data={})
assert_redirects_with_flash(
result,
expected_url="/user/dummy/settings/otp/",
expected_message="Token must not be empty",
expected_category="danger",
)
|
[
"pytest.mark.vcr",
"noggin.representation.otptoken.OTPToken",
"python_freeipa.exceptions.BadRequest",
"noggin.app.ipa_admin.otptoken_del",
"urllib.parse.parse_qs",
"unittest.mock.patch",
"python_freeipa.exceptions.ValidationError",
"python_freeipa.exceptions.FreeIPAError",
"bs4.BeautifulSoup",
"pyotp.TOTP",
"urllib.parse.urlparse"
] |
[((955, 972), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (970, 972), False, 'import pytest\n'), ((2052, 2069), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (2067, 2069), False, 'import pytest\n'), ((2497, 2514), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (2512, 2514), False, 'import pytest\n'), ((3725, 3742), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (3740, 3742), False, 'import pytest\n'), ((5071, 5088), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (5086, 5088), False, 'import pytest\n'), ((6346, 6363), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (6361, 6363), False, 'import pytest\n'), ((7328, 7345), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (7343, 7345), False, 'import pytest\n'), ((8084, 8101), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (8099, 8101), False, 'import pytest\n'), ((9057, 9074), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (9072, 9074), False, 'import pytest\n'), ((9718, 9735), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (9733, 9735), False, 'import pytest\n'), ((10033, 10050), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (10048, 10050), False, 'import pytest\n'), ((10478, 10495), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (10493, 10495), False, 'import pytest\n'), ((11004, 11021), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (11019, 11021), False, 'import pytest\n'), ((11854, 11871), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (11869, 11871), False, 'import pytest\n'), ((12383, 12400), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (12398, 12400), False, 'import pytest\n'), ((12806, 12823), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (12821, 12823), False, 'import pytest\n'), ((13548, 13565), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (13563, 13565), False, 'import pytest\n'), ((14431, 14448), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (14446, 14448), False, 'import pytest\n'), ((14946, 14963), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (14961, 14963), False, 'import pytest\n'), ((15693, 15710), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (15708, 15710), False, 'import pytest\n'), ((16172, 16189), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (16187, 16189), False, 'import pytest\n'), ((16592, 16609), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (16607, 16609), False, 'import pytest\n'), ((17331, 17348), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (17346, 17348), False, 'import pytest\n'), ((18060, 18077), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (18075, 18077), False, 'import pytest\n'), ((19038, 19055), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (19053, 19055), False, 'import pytest\n'), ((20034, 20051), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (20049, 20051), False, 'import pytest\n'), ((20560, 20577), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (20575, 20577), False, 'import pytest\n'), ((20980, 20997), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (20995, 20997), False, 'import pytest\n'), ((21736, 21753), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (21751, 21753), False, 'import pytest\n'), ((23447, 23464), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (23462, 23464), False, 'import pytest\n'), ((24136, 24153), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (24151, 24153), False, 'import pytest\n'), ((24876, 24893), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (24891, 24893), False, 'import pytest\n'), ((25596, 25613), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (25611, 25613), False, 'import pytest\n'), ((680, 696), 'noggin.representation.otptoken.OTPToken', 'OTPToken', (['result'], {}), '(result)\n', (688, 696), False, 'from noggin.representation.otptoken import OTPToken\n'), ((938, 951), 'pyotp.TOTP', 'TOTP', (['"""<KEY>"""'], {}), "('<KEY>')\n", (942, 951), False, 'from pyotp import TOTP\n'), ((1177, 1218), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (1190, 1218), False, 'from bs4 import BeautifulSoup\n'), ((1829, 1870), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (1842, 1870), False, 'from bs4 import BeautifulSoup\n'), ((2870, 2911), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (2883, 2911), False, 'from bs4 import BeautifulSoup\n'), ((4414, 4455), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (4427, 4455), False, 'from bs4 import BeautifulSoup\n'), ((5574, 5615), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (5587, 5615), False, 'from bs4 import BeautifulSoup\n'), ((6870, 6911), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (6883, 6911), False, 'from bs4 import BeautifulSoup\n'), ((7783, 7824), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (7796, 7824), False, 'from bs4 import BeautifulSoup\n'), ((8554, 8595), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (8567, 8595), False, 'from bs4 import BeautifulSoup\n'), ((8722, 8748), 'urllib.parse.urlparse', 'urlparse', (["otp_uri['value']"], {}), "(otp_uri['value'])\n", (8730, 8748), False, 'from urllib.parse import parse_qs, urlparse\n'), ((8974, 9004), 'urllib.parse.parse_qs', 'parse_qs', (['parsed_otp_uri.query'], {}), '(parsed_otp_uri.query)\n', (8982, 9004), False, 'from urllib.parse import parse_qs, urlparse\n'), ((13784, 13825), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (13797, 13825), False, 'from bs4 import BeautifulSoup\n'), ((14251, 14292), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (14264, 14292), False, 'from bs4 import BeautifulSoup\n'), ((18267, 18308), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (18280, 18308), False, 'from bs4 import BeautifulSoup\n'), ((18732, 18773), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (18745, 18773), False, 'from bs4 import BeautifulSoup\n'), ((19280, 19321), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (19293, 19321), False, 'from bs4 import BeautifulSoup\n'), ((21971, 22012), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (21984, 22012), False, 'from bs4 import BeautifulSoup\n'), ((22438, 22479), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (22451, 22479), False, 'from bs4 import BeautifulSoup\n'), ((23025, 23066), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (23038, 23066), False, 'from bs4 import BeautifulSoup\n'), ((23835, 23876), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (23848, 23876), False, 'from bs4 import BeautifulSoup\n'), ((24577, 24618), 'bs4.BeautifulSoup', 'BeautifulSoup', (['result.data', '"""html.parser"""'], {}), "(result.data, 'html.parser')\n", (24590, 24618), False, 'from bs4 import BeautifulSoup\n'), ((761, 799), 'noggin.app.ipa_admin.otptoken_del', 'ipa_admin.otptoken_del', (['token.uniqueid'], {}), '(token.uniqueid)\n', (783, 799), False, 'from noggin.app import ipa_admin\n'), ((11160, 11213), 'unittest.mock.patch', 'mock.patch', (['"""noggin.security.ipa.Client.otptoken_add"""'], {}), "('noggin.security.ipa.Client.otptoken_add')\n", (11170, 11213), False, 'from unittest import mock\n'), ((11254, 11396), 'python_freeipa.exceptions.ValidationError', 'python_freeipa.exceptions.ValidationError', ([], {'message': "{'member': {'user': [('testuser', 'something went wrong')], 'group': []}}", 'code': '"""4242"""'}), "(message={'member': {'user': [(\n 'testuser', 'something went wrong')], 'group': []}}, code='4242')\n", (11295, 11396), False, 'import python_freeipa\n'), ((12987, 13040), 'unittest.mock.patch', 'mock.patch', (['"""noggin.security.ipa.Client.otptoken_mod"""'], {}), "('noggin.security.ipa.Client.otptoken_mod')\n", (12997, 13040), False, 'from unittest import mock\n'), ((13081, 13173), 'python_freeipa.exceptions.FreeIPAError', 'python_freeipa.exceptions.FreeIPAError', ([], {'message': '"""Cannot disable the token."""', 'code': '"""4242"""'}), "(message='Cannot disable the token.',\n code='4242')\n", (13119, 13173), False, 'import python_freeipa\n'), ((15147, 15200), 'unittest.mock.patch', 'mock.patch', (['"""noggin.security.ipa.Client.otptoken_mod"""'], {}), "('noggin.security.ipa.Client.otptoken_mod')\n", (15157, 15200), False, 'from unittest import mock\n'), ((15241, 15330), 'python_freeipa.exceptions.BadRequest', 'python_freeipa.exceptions.BadRequest', ([], {'message': '"""Cannot delete the token."""', 'code': '"""4242"""'}), "(message='Cannot delete the token.',\n code='4242')\n", (15277, 15330), False, 'import python_freeipa\n'), ((16777, 16830), 'unittest.mock.patch', 'mock.patch', (['"""noggin.security.ipa.Client.otptoken_del"""'], {}), "('noggin.security.ipa.Client.otptoken_del')\n", (16787, 16830), False, 'from unittest import mock\n'), ((16871, 16962), 'python_freeipa.exceptions.FreeIPAError', 'python_freeipa.exceptions.FreeIPAError', ([], {'message': '"""Cannot delete the token."""', 'code': '"""4242"""'}), "(message='Cannot delete the token.',\n code='4242')\n", (16909, 16962), False, 'import python_freeipa\n'), ((17530, 17583), 'unittest.mock.patch', 'mock.patch', (['"""noggin.security.ipa.Client.otptoken_del"""'], {}), "('noggin.security.ipa.Client.otptoken_del')\n", (17540, 17583), False, 'from unittest import mock\n'), ((17624, 17713), 'python_freeipa.exceptions.BadRequest', 'python_freeipa.exceptions.BadRequest', ([], {'message': '"""Cannot delete the token."""', 'code': '"""4242"""'}), "(message='Cannot delete the token.',\n code='4242')\n", (17660, 17713), False, 'import python_freeipa\n'), ((21159, 21212), 'unittest.mock.patch', 'mock.patch', (['"""noggin.security.ipa.Client.otptoken_mod"""'], {}), "('noggin.security.ipa.Client.otptoken_mod')\n", (21169, 21212), False, 'from unittest import mock\n'), ((21253, 21344), 'python_freeipa.exceptions.FreeIPAError', 'python_freeipa.exceptions.FreeIPAError', ([], {'message': '"""Cannot enable the token."""', 'code': '"""4242"""'}), "(message='Cannot enable the token.',\n code='4242')\n", (21291, 21344), False, 'import python_freeipa\n'), ((25088, 25141), 'unittest.mock.patch', 'mock.patch', (['"""noggin.security.ipa.Client.otptoken_mod"""'], {}), "('noggin.security.ipa.Client.otptoken_mod')\n", (25098, 25141), False, 'from unittest import mock\n'), ((25182, 25251), 'python_freeipa.exceptions.FreeIPAError', 'python_freeipa.exceptions.FreeIPAError', ([], {'message': '"""Whoops"""', 'code': '"""4242"""'}), "(message='Whoops', code='4242')\n", (25220, 25251), False, 'import python_freeipa\n'), ((3551, 3577), 'urllib.parse.urlparse', 'urlparse', (["otp_uri['value']"], {}), "(otp_uri['value'])\n", (3559, 3577), False, 'from urllib.parse import parse_qs, urlparse\n'), ((6172, 6198), 'urllib.parse.urlparse', 'urlparse', (["otp_uri['value']"], {}), "(otp_uri['value'])\n", (6180, 6198), False, 'from urllib.parse import parse_qs, urlparse\n')]
|
# Copyright (c) 2017-2018 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from cloudify.state import current_ctx
from cloudify.exceptions import NonRecoverableError
from cloudify_rest_client.exceptions import CloudifyClientError
from ..tasks import execute_start
from .. import DeploymentProxyBase
from .base import DeploymentProxyTestBase
from .client_mock import MockCloudifyRestClient
from ..constants import EXTERNAL_RESOURCE, NIP_TYPE, DEP_TYPE
from cloudify_common_sdk._compat import text_type
REST_CLIENT_EXCEPTION = \
mock.MagicMock(side_effect=CloudifyClientError('Mistake'))
class TestExecute(DeploymentProxyTestBase):
sleep_mock = None
def setUp(self):
super(TestExecute, self).setUp()
mock_sleep = mock.MagicMock()
self.sleep_mock = mock.patch('time.sleep', mock_sleep)
self.sleep_mock.start()
def tearDown(self):
if self.sleep_mock:
self.sleep_mock.stop()
self.sleep_mock = None
super(TestExecute, self).tearDown()
def test_execute_start_rest_client_error(self):
# Tests that execute start fails on rest client error
test_name = 'test_execute_start_rest_client_error'
_ctx = self.get_mock_ctx(test_name)
current_ctx.set(_ctx)
_ctx.instance.runtime_properties['deployment'] = {}
with mock.patch('cloudify.manager.get_rest_client') as mock_client:
cfy_mock_client = MockCloudifyRestClient()
cfy_mock_client.executions.start = REST_CLIENT_EXCEPTION
mock_client.return_value = cfy_mock_client
error = self.assertRaises(NonRecoverableError,
execute_start,
deployment_id=test_name,
workflow_id='install')
self.assertIn('action start failed', text_type(error))
del _ctx, mock_client
def test_execute_start_timeout(self):
# Tests that execute start fails on timeout
test_name = 'test_execute_start_timeout'
_ctx = self.get_mock_ctx(test_name)
current_ctx.set(_ctx)
_ctx.instance.runtime_properties['deployment'] = {}
with mock.patch('cloudify.manager.get_rest_client') as mock_client:
mock_client.return_value = MockCloudifyRestClient()
poll_with_timeout_test = \
'cloudify_deployment_proxy.polling.poll_with_timeout'
with mock.patch(poll_with_timeout_test) as poll:
poll.return_value = False
error = self.assertRaises(NonRecoverableError,
execute_start,
deployment_id=test_name,
workflow_id='install',
timeout=.001)
self.assertIn('Execution timeout', text_type(error))
del _ctx, mock_client
def test_execute_start_succeeds(self):
# Tests that execute start succeeds
test_name = 'test_execute_start_succeeds'
_ctx = self.get_mock_ctx(test_name)
current_ctx.set(_ctx)
_ctx.instance.runtime_properties['deployment'] = {}
with mock.patch('cloudify.manager.get_rest_client') as mock_client:
mock_client.return_value = MockCloudifyRestClient()
poll_with_timeout_test = \
'cloudify_deployment_proxy.polling.poll_with_timeout'
with mock.patch(poll_with_timeout_test) as poll:
poll.return_value = True
output = execute_start(operation='execute_workflow',
deployment_id=test_name,
workflow_id='install',
timeout=.001)
self.assertTrue(output)
del _ctx, mock_client
def test_execute_deployment_not_ready(self):
# Tests that execute start succeeds
test_name = 'test_execute_deployment_not_ready'
_ctx = self.get_mock_ctx(test_name)
current_ctx.set(_ctx)
_ctx.instance.runtime_properties['deployment'] = {}
_ctx.instance.runtime_properties['resource_config'] = {
'deployment': {
EXTERNAL_RESOURCE: True
}
}
_ctx.instance.runtime_properties['reexecute'] = True
with mock.patch('cloudify.manager.get_rest_client') as mock_client:
cfy_mock_client = MockCloudifyRestClient()
list_response = cfy_mock_client.deployments.list()
list_response[0]['id'] = test_name
list_response[0]['is_system_workflow'] = False
list_response[0]['status'] = 'started'
list_response[0]['deployment_id'] = test_name
def mock_return(*args, **kwargs):
del args, kwargs
return list_response
cfy_mock_client.executions.list = mock_return
mock_client.return_value = cfy_mock_client
output = execute_start(operation='execute_workflow',
deployment_id=test_name,
workflow_id='install',
timeout=.001)
self.assertIsNone(output)
del _ctx, mock_client
def test_execute_start_succeeds_not_finished(self):
# Tests that execute start succeeds
test_name = 'test_execute_start_succeeds_not_finished'
_ctx = self.get_mock_ctx(test_name)
current_ctx.set(_ctx)
_ctx.instance.runtime_properties['deployment'] = {}
with mock.patch('cloudify.manager.get_rest_client') as mock_client:
mock_client.return_value = MockCloudifyRestClient()
poll_with_timeout_test = \
'cloudify_deployment_proxy.DeploymentProxyBase.' \
'verify_execution_successful'
with mock.patch(poll_with_timeout_test) as poll:
poll.return_value = False
output = execute_start(operation='execute_workflow',
deployment_id=test_name,
workflow_id='install',
timeout=.001)
self.assertTrue(output)
del _ctx, mock_client
def test_execute_start_succeeds_node_instance_proxy(self):
# Tests that execute start succeeds
test_name = 'test_execute_start_succeeds_node_instance_proxy'
_ctx = self.get_mock_ctx(test_name, node_type=NIP_TYPE)
current_ctx.set(_ctx)
# _ctx.node.type = NIP_TYPE
ni = {}
_ctx.node.properties['resource_config']['node_instance'] = ni
_ctx.instance.runtime_properties['deployment'] = {}
with mock.patch('cloudify.manager.get_rest_client') as mock_client:
mock_client.return_value = MockCloudifyRestClient()
poll_with_timeout_test = \
'cloudify_deployment_proxy.polling.poll_with_timeout'
with mock.patch(poll_with_timeout_test) as poll:
poll.return_value = True
output = execute_start(operation='execute_workflow',
deployment_id=test_name,
workflow_id='install',
timeout=.001)
self.assertTrue(output)
del _ctx, mock_client
def test_execute_start_succeeds_weird_node_type(self):
# Tests that execute start succeeds
test_name = 'test_execute_start_succeeds_weird_node_type'
_ctx = self.get_mock_ctx(test_name, node_type='node.weird')
current_ctx.set(_ctx)
# _ctx.node.type = 'cloudify.nodes.WeirdNodeType'
_ctx.instance.runtime_properties['deployment'] = {}
with mock.patch('cloudify.manager.get_rest_client') as mock_client:
mock_client.return_value = MockCloudifyRestClient()
poll_with_timeout_test = \
'cloudify_deployment_proxy.polling.poll_with_timeout'
with mock.patch(poll_with_timeout_test) as poll:
poll.return_value = True
self.assertRaises(NonRecoverableError,
execute_start,
operation='execute_workflow',
deployment_id=test_name,
workflow_id='install',
timeout=.001)
del _ctx, mock_client
def test_post_execute_client_error(self):
# Tests that execute client error ignored
test_name = 'test_post_execute_client_error'
_ctx = self.get_mock_ctx(test_name, node_type=DEP_TYPE)
current_ctx.set(_ctx)
_ctx.instance.runtime_properties['deployment'] = dict()
cfy_mock_client = MockCloudifyRestClient()
cfy_mock_client.deployments.outputs.get = \
mock.MagicMock(side_effect=CloudifyClientError('Mistake'))
poll_with_timeout_test = \
'cloudify_deployment_proxy.DeploymentProxyBase.' \
'verify_execution_successful'
with mock.patch(
'cloudify_deployment_proxy.CloudifyClient'
) as mock_local_client:
mock_local_client.return_value = cfy_mock_client
with mock.patch(poll_with_timeout_test) as poll:
poll.return_value = False
self.assertRaises(NonRecoverableError,
execute_start,
operation='execute_workflow',
deployment_id=test_name,
workflow_id='install',
client={'host': 'localhost'},
timeout=.001)
del _ctx
def test_execute_start_succeeds_node_instance_proxy_matches(self):
# Tests that execute start succeeds
test_name = 'test_execute_start_succeeds_node_instance_proxy'
_ctx = self.get_mock_ctx(test_name, node_type=NIP_TYPE)
current_ctx.set(_ctx)
# _ctx.node.type = NIP_TYPE
ni = {'id': test_name}
_ctx.node.properties['resource_config']['node_instance'] = ni
_ctx.instance.runtime_properties['deployment'] = {}
with mock.patch('cloudify.manager.get_rest_client') as mock_client:
mock_client.return_value = MockCloudifyRestClient()
poll_with_timeout_test = \
'cloudify_deployment_proxy.polling.poll_with_timeout'
with mock.patch(poll_with_timeout_test) as poll:
poll.return_value = True
output = execute_start(operation='execute_workflow',
deployment_id=test_name,
workflow_id='install',
timeout=.001)
self.assertTrue(output)
del _ctx, mock_client
def _test_output_mapping(self, all_outputs, output_mapping,
deployment_outputs, expected_outputs):
_ctx = self.get_mock_ctx('test_post_execute_deployment_proxy',
node_type=NIP_TYPE)
_ctx.node.properties['resource_config']['deployment']['outputs'] = \
output_mapping
_ctx.node.properties['resource_config']['deployment']['all_outputs'] =\
all_outputs
_ctx.instance.runtime_properties['deployment'] = {}
cfy_mock_client = MockCloudifyRestClient()
cfy_mock_client.deployments.outputs.get = \
mock.MagicMock(return_value={'outputs': deployment_outputs})
with mock.patch('cloudify.manager.get_rest_client') as mock_client:
mock_client.return_value = cfy_mock_client
current_ctx.set(_ctx)
self.addCleanup(current_ctx.clear)
d = DeploymentProxyBase({})
d.post_execute_deployment_proxy()
self.assertEqual(
expected_outputs,
_ctx.instance.runtime_properties['deployment']['outputs'])
def test_post_execute_deployment_proxy_with_mapping_full(self):
self._test_output_mapping(
all_outputs=False,
output_mapping={
'key1': 'key1',
'key2': 'key2'
}, deployment_outputs={
'key1': 'value1',
'key2': 'value2'},
expected_outputs={
'key1': 'value1',
'key2': 'value2'
}
)
def test_post_execute_deployment_proxy_with_mapping_partial(self):
self._test_output_mapping(
all_outputs=False,
output_mapping={
'key1': 'key1'
}, deployment_outputs={
'key1': 'value1',
'key2': 'value2'},
expected_outputs={
'key1': 'value1'
}
)
def test_post_execute_deployment_proxy_all_outputs(self):
self._test_output_mapping(
all_outputs=True,
output_mapping={},
deployment_outputs={
'key1': 'value1',
'key2': 'value2'},
expected_outputs={
'key1': 'value1',
'key2': 'value2'
}
)
|
[
"cloudify_common_sdk._compat.text_type",
"cloudify.state.current_ctx.set",
"mock.patch",
"cloudify_rest_client.exceptions.CloudifyClientError",
"mock.MagicMock"
] |
[((1118, 1148), 'cloudify_rest_client.exceptions.CloudifyClientError', 'CloudifyClientError', (['"""Mistake"""'], {}), "('Mistake')\n", (1137, 1148), False, 'from cloudify_rest_client.exceptions import CloudifyClientError\n'), ((1303, 1319), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1317, 1319), False, 'import mock\n'), ((1346, 1382), 'mock.patch', 'mock.patch', (['"""time.sleep"""', 'mock_sleep'], {}), "('time.sleep', mock_sleep)\n", (1356, 1382), False, 'import mock\n'), ((1809, 1830), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (1824, 1830), False, 'from cloudify.state import current_ctx\n'), ((2677, 2698), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (2692, 2698), False, 'from cloudify.state import current_ctx\n'), ((3710, 3731), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (3725, 3731), False, 'from cloudify.state import current_ctx\n'), ((4665, 4686), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (4680, 4686), False, 'from cloudify.state import current_ctx\n'), ((6125, 6146), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (6140, 6146), False, 'from cloudify.state import current_ctx\n'), ((7172, 7193), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (7187, 7193), False, 'from cloudify.state import current_ctx\n'), ((8293, 8314), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (8308, 8314), False, 'from cloudify.state import current_ctx\n'), ((9371, 9392), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (9386, 9392), False, 'from cloudify.state import current_ctx\n'), ((10724, 10745), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (10739, 10745), False, 'from cloudify.state import current_ctx\n'), ((12254, 12314), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': "{'outputs': deployment_outputs}"}), "(return_value={'outputs': deployment_outputs})\n", (12268, 12314), False, 'import mock\n'), ((1905, 1951), 'mock.patch', 'mock.patch', (['"""cloudify.manager.get_rest_client"""'], {}), "('cloudify.manager.get_rest_client')\n", (1915, 1951), False, 'import mock\n'), ((2773, 2819), 'mock.patch', 'mock.patch', (['"""cloudify.manager.get_rest_client"""'], {}), "('cloudify.manager.get_rest_client')\n", (2783, 2819), False, 'import mock\n'), ((3806, 3852), 'mock.patch', 'mock.patch', (['"""cloudify.manager.get_rest_client"""'], {}), "('cloudify.manager.get_rest_client')\n", (3816, 3852), False, 'import mock\n'), ((4978, 5024), 'mock.patch', 'mock.patch', (['"""cloudify.manager.get_rest_client"""'], {}), "('cloudify.manager.get_rest_client')\n", (4988, 5024), False, 'import mock\n'), ((6221, 6267), 'mock.patch', 'mock.patch', (['"""cloudify.manager.get_rest_client"""'], {}), "('cloudify.manager.get_rest_client')\n", (6231, 6267), False, 'import mock\n'), ((7390, 7436), 'mock.patch', 'mock.patch', (['"""cloudify.manager.get_rest_client"""'], {}), "('cloudify.manager.get_rest_client')\n", (7400, 7436), False, 'import mock\n'), ((8447, 8493), 'mock.patch', 'mock.patch', (['"""cloudify.manager.get_rest_client"""'], {}), "('cloudify.manager.get_rest_client')\n", (8457, 8493), False, 'import mock\n'), ((9788, 9842), 'mock.patch', 'mock.patch', (['"""cloudify_deployment_proxy.CloudifyClient"""'], {}), "('cloudify_deployment_proxy.CloudifyClient')\n", (9798, 9842), False, 'import mock\n'), ((10957, 11003), 'mock.patch', 'mock.patch', (['"""cloudify.manager.get_rest_client"""'], {}), "('cloudify.manager.get_rest_client')\n", (10967, 11003), False, 'import mock\n'), ((12329, 12375), 'mock.patch', 'mock.patch', (['"""cloudify.manager.get_rest_client"""'], {}), "('cloudify.manager.get_rest_client')\n", (12339, 12375), False, 'import mock\n'), ((12459, 12480), 'cloudify.state.current_ctx.set', 'current_ctx.set', (['_ctx'], {}), '(_ctx)\n', (12474, 12480), False, 'from cloudify.state import current_ctx\n'), ((2432, 2448), 'cloudify_common_sdk._compat.text_type', 'text_type', (['error'], {}), '(error)\n', (2441, 2448), False, 'from cloudify_common_sdk._compat import text_type\n'), ((3026, 3060), 'mock.patch', 'mock.patch', (['poll_with_timeout_test'], {}), '(poll_with_timeout_test)\n', (3036, 3060), False, 'import mock\n'), ((4059, 4093), 'mock.patch', 'mock.patch', (['poll_with_timeout_test'], {}), '(poll_with_timeout_test)\n', (4069, 4093), False, 'import mock\n'), ((6517, 6551), 'mock.patch', 'mock.patch', (['poll_with_timeout_test'], {}), '(poll_with_timeout_test)\n', (6527, 6551), False, 'import mock\n'), ((7643, 7677), 'mock.patch', 'mock.patch', (['poll_with_timeout_test'], {}), '(poll_with_timeout_test)\n', (7653, 7677), False, 'import mock\n'), ((8700, 8734), 'mock.patch', 'mock.patch', (['poll_with_timeout_test'], {}), '(poll_with_timeout_test)\n', (8710, 8734), False, 'import mock\n'), ((9601, 9631), 'cloudify_rest_client.exceptions.CloudifyClientError', 'CloudifyClientError', (['"""Mistake"""'], {}), "('Mistake')\n", (9620, 9631), False, 'from cloudify_rest_client.exceptions import CloudifyClientError\n'), ((9966, 10000), 'mock.patch', 'mock.patch', (['poll_with_timeout_test'], {}), '(poll_with_timeout_test)\n', (9976, 10000), False, 'import mock\n'), ((11210, 11244), 'mock.patch', 'mock.patch', (['poll_with_timeout_test'], {}), '(poll_with_timeout_test)\n', (11220, 11244), False, 'import mock\n'), ((3471, 3487), 'cloudify_common_sdk._compat.text_type', 'text_type', (['error'], {}), '(error)\n', (3480, 3487), False, 'from cloudify_common_sdk._compat import text_type\n')]
|
import data_sourcing
from prefect import Flow, task
@task
def sourcing():
return data_sourcing.get()
with Flow("greenhouse") as flow:
sourcing()
flow.run()
|
[
"data_sourcing.get",
"prefect.Flow"
] |
[((88, 107), 'data_sourcing.get', 'data_sourcing.get', ([], {}), '()\n', (105, 107), False, 'import data_sourcing\n'), ((115, 133), 'prefect.Flow', 'Flow', (['"""greenhouse"""'], {}), "('greenhouse')\n", (119, 133), False, 'from prefect import Flow, task\n')]
|
import numpy as np
import matplotlib.pylab as plt
import pandas as pd
import scipy.signal as signal
#Concatenación de los datos
data1 = pd.read_csv("transacciones2008.txt",sep = ";",names=['Fecha','Hora','Conversion','Monto'],decimal =",")
data2 = pd.read_csv("transacciones2009.txt",sep = ";",names=['Fecha','Hora','Conversion','Monto'],decimal =",")
data3 = pd.read_csv("transacciones2010.txt",sep = ";",names=['Fecha','Hora','Conversion','Monto'],decimal =",")
a = data1["Fecha"].str.split(" ",expand = True)
b = data1["Hora"].str.split(" ",expand = True)
c = data2["Fecha"].str.split(" ",expand = True)
d = data2["Hora"].str.split(" ",expand = True)
e = data3["Fecha"].str.split(" ",expand = True)
f = data3["Hora"].str.split(" ",expand = True)
n1 = pd.DataFrame({'Fecha': a[0] + " " + b[1],'Conversion':data1["Conversion"],'Monto':data1["Monto"]})
n2 = pd.DataFrame({'Fecha': c[0] + " " + d[1],'Conversion':data2["Conversion"],'Monto':data2["Monto"]})
n3 = pd.DataFrame({'Fecha': e[0] + " " + f[1],'Conversion':data3["Conversion"],'Monto':data3["Monto"]})
data = pd.concat([n1,n2,n3],ignore_index = True)
data["Fecha"] = pd.to_datetime(data["Fecha"],format='%d/%m/%Y %H:%M:%S')
data.to_csv('datos.csv',index = False)
#Plot de los datos para la señal
plt.figure(figsize=(15,10))
plt.plot(data["Fecha"],data["Conversion"])
plt.savefig("Señal.png")
#Filtros
N1 = 1
Wn1 = 0.1
B1, A1 = signal.butter(N1, Wn1)
precio_filtrado1 = signal.filtfilt(B1,A1, data["Conversion"])
N2 = 2
Wn2 = 0.01
B2, A2 = signal.butter(N2, Wn2)
precio_filtrado2 = signal.filtfilt(B2,A2, data["Conversion"])
N3 = 3
Wn3 = 0.01
B3, A3 = signal.butter(N3, Wn3)
precio_filtrado3 = signal.filtfilt(B3,A3, data["Conversion"])
plt.figure(figsize=(10,15))
plt.subplot(3,1,1)
plt.plot(data["Fecha"],data["Conversion"], label = "Original")
plt.plot(data["Fecha"],precio_filtrado1, label = "Filtrado")
plt.xlabel("Fecha")
plt.ylabel("Precio")
plt.legend(loc=0.0)
plt.subplot(3,1,2)
plt.plot(data["Fecha"],data["Conversion"], label = "Original")
plt.plot(data["Fecha"],precio_filtrado2, label = "Filtrado")
plt.xlabel("Fecha")
plt.ylabel("Precio")
plt.legend(loc=0.0)
plt.subplot(3,1,3)
plt.plot(data["Fecha"],data["Conversion"], label = "Original")
plt.plot(data["Fecha"],precio_filtrado3, label = "Filtrado")
plt.xlabel("Fecha")
plt.ylabel("Precio")
plt.legend(loc=0.0)
plt.savefig("Filtros.png")
# Correlaciones
ruido1 = data["Conversion"]-precio_filtrado1
ruido2 = data["Conversion"]-precio_filtrado2
ruido3 = data["Conversion"]-precio_filtrado3
corr1=np.correlate(ruido1,ruido1,mode="full")
corr2=np.correlate(ruido2,ruido2,mode="full")
corr3=np.correlate(ruido3,ruido3,mode="full")
plt.figure(figsize=(10,15))
plt.subplot(3,1,1)
plt.plot(np.abs(corr1[len(corr1)//2:]))
plt.subplot(3,1,2)
plt.plot(np.abs(corr2[len(corr2)//2:]))
plt.subplot(3,1,3)
plt.plot(np.abs(corr3[len(corr3)//2:]))
plt.savefig("Correlaciones.png")
|
[
"pandas.DataFrame",
"matplotlib.pylab.savefig",
"matplotlib.pylab.legend",
"scipy.signal.filtfilt",
"matplotlib.pylab.subplot",
"pandas.read_csv",
"matplotlib.pylab.ylabel",
"matplotlib.pylab.plot",
"pandas.to_datetime",
"scipy.signal.butter",
"numpy.correlate",
"matplotlib.pylab.xlabel",
"pandas.concat",
"matplotlib.pylab.figure"
] |
[((141, 251), 'pandas.read_csv', 'pd.read_csv', (['"""transacciones2008.txt"""'], {'sep': '""";"""', 'names': "['Fecha', 'Hora', 'Conversion', 'Monto']", 'decimal': '""","""'}), "('transacciones2008.txt', sep=';', names=['Fecha', 'Hora',\n 'Conversion', 'Monto'], decimal=',')\n", (152, 251), True, 'import pandas as pd\n'), ((253, 363), 'pandas.read_csv', 'pd.read_csv', (['"""transacciones2009.txt"""'], {'sep': '""";"""', 'names': "['Fecha', 'Hora', 'Conversion', 'Monto']", 'decimal': '""","""'}), "('transacciones2009.txt', sep=';', names=['Fecha', 'Hora',\n 'Conversion', 'Monto'], decimal=',')\n", (264, 363), True, 'import pandas as pd\n'), ((365, 475), 'pandas.read_csv', 'pd.read_csv', (['"""transacciones2010.txt"""'], {'sep': '""";"""', 'names': "['Fecha', 'Hora', 'Conversion', 'Monto']", 'decimal': '""","""'}), "('transacciones2010.txt', sep=';', names=['Fecha', 'Hora',\n 'Conversion', 'Monto'], decimal=',')\n", (376, 475), True, 'import pandas as pd\n'), ((764, 870), 'pandas.DataFrame', 'pd.DataFrame', (["{'Fecha': a[0] + ' ' + b[1], 'Conversion': data1['Conversion'], 'Monto':\n data1['Monto']}"], {}), "({'Fecha': a[0] + ' ' + b[1], 'Conversion': data1['Conversion'],\n 'Monto': data1['Monto']})\n", (776, 870), True, 'import pandas as pd\n'), ((868, 974), 'pandas.DataFrame', 'pd.DataFrame', (["{'Fecha': c[0] + ' ' + d[1], 'Conversion': data2['Conversion'], 'Monto':\n data2['Monto']}"], {}), "({'Fecha': c[0] + ' ' + d[1], 'Conversion': data2['Conversion'],\n 'Monto': data2['Monto']})\n", (880, 974), True, 'import pandas as pd\n'), ((972, 1078), 'pandas.DataFrame', 'pd.DataFrame', (["{'Fecha': e[0] + ' ' + f[1], 'Conversion': data3['Conversion'], 'Monto':\n data3['Monto']}"], {}), "({'Fecha': e[0] + ' ' + f[1], 'Conversion': data3['Conversion'],\n 'Monto': data3['Monto']})\n", (984, 1078), True, 'import pandas as pd\n'), ((1081, 1123), 'pandas.concat', 'pd.concat', (['[n1, n2, n3]'], {'ignore_index': '(True)'}), '([n1, n2, n3], ignore_index=True)\n', (1090, 1123), True, 'import pandas as pd\n'), ((1139, 1196), 'pandas.to_datetime', 'pd.to_datetime', (["data['Fecha']"], {'format': '"""%d/%m/%Y %H:%M:%S"""'}), "(data['Fecha'], format='%d/%m/%Y %H:%M:%S')\n", (1153, 1196), True, 'import pandas as pd\n'), ((1272, 1300), 'matplotlib.pylab.figure', 'plt.figure', ([], {'figsize': '(15, 10)'}), '(figsize=(15, 10))\n', (1282, 1300), True, 'import matplotlib.pylab as plt\n'), ((1300, 1343), 'matplotlib.pylab.plot', 'plt.plot', (["data['Fecha']", "data['Conversion']"], {}), "(data['Fecha'], data['Conversion'])\n", (1308, 1343), True, 'import matplotlib.pylab as plt\n'), ((1343, 1367), 'matplotlib.pylab.savefig', 'plt.savefig', (['"""Señal.png"""'], {}), "('Señal.png')\n", (1354, 1367), True, 'import matplotlib.pylab as plt\n'), ((1413, 1435), 'scipy.signal.butter', 'signal.butter', (['N1', 'Wn1'], {}), '(N1, Wn1)\n', (1426, 1435), True, 'import scipy.signal as signal\n'), ((1455, 1498), 'scipy.signal.filtfilt', 'signal.filtfilt', (['B1', 'A1', "data['Conversion']"], {}), "(B1, A1, data['Conversion'])\n", (1470, 1498), True, 'import scipy.signal as signal\n'), ((1533, 1555), 'scipy.signal.butter', 'signal.butter', (['N2', 'Wn2'], {}), '(N2, Wn2)\n', (1546, 1555), True, 'import scipy.signal as signal\n'), ((1575, 1618), 'scipy.signal.filtfilt', 'signal.filtfilt', (['B2', 'A2', "data['Conversion']"], {}), "(B2, A2, data['Conversion'])\n", (1590, 1618), True, 'import scipy.signal as signal\n'), ((1653, 1675), 'scipy.signal.butter', 'signal.butter', (['N3', 'Wn3'], {}), '(N3, Wn3)\n', (1666, 1675), True, 'import scipy.signal as signal\n'), ((1695, 1738), 'scipy.signal.filtfilt', 'signal.filtfilt', (['B3', 'A3', "data['Conversion']"], {}), "(B3, A3, data['Conversion'])\n", (1710, 1738), True, 'import scipy.signal as signal\n'), ((1739, 1767), 'matplotlib.pylab.figure', 'plt.figure', ([], {'figsize': '(10, 15)'}), '(figsize=(10, 15))\n', (1749, 1767), True, 'import matplotlib.pylab as plt\n'), ((1767, 1787), 'matplotlib.pylab.subplot', 'plt.subplot', (['(3)', '(1)', '(1)'], {}), '(3, 1, 1)\n', (1778, 1787), True, 'import matplotlib.pylab as plt\n'), ((1786, 1847), 'matplotlib.pylab.plot', 'plt.plot', (["data['Fecha']", "data['Conversion']"], {'label': '"""Original"""'}), "(data['Fecha'], data['Conversion'], label='Original')\n", (1794, 1847), True, 'import matplotlib.pylab as plt\n'), ((1849, 1908), 'matplotlib.pylab.plot', 'plt.plot', (["data['Fecha']", 'precio_filtrado1'], {'label': '"""Filtrado"""'}), "(data['Fecha'], precio_filtrado1, label='Filtrado')\n", (1857, 1908), True, 'import matplotlib.pylab as plt\n'), ((1910, 1929), 'matplotlib.pylab.xlabel', 'plt.xlabel', (['"""Fecha"""'], {}), "('Fecha')\n", (1920, 1929), True, 'import matplotlib.pylab as plt\n'), ((1930, 1950), 'matplotlib.pylab.ylabel', 'plt.ylabel', (['"""Precio"""'], {}), "('Precio')\n", (1940, 1950), True, 'import matplotlib.pylab as plt\n'), ((1951, 1970), 'matplotlib.pylab.legend', 'plt.legend', ([], {'loc': '(0.0)'}), '(loc=0.0)\n', (1961, 1970), True, 'import matplotlib.pylab as plt\n'), ((1971, 1991), 'matplotlib.pylab.subplot', 'plt.subplot', (['(3)', '(1)', '(2)'], {}), '(3, 1, 2)\n', (1982, 1991), True, 'import matplotlib.pylab as plt\n'), ((1990, 2051), 'matplotlib.pylab.plot', 'plt.plot', (["data['Fecha']", "data['Conversion']"], {'label': '"""Original"""'}), "(data['Fecha'], data['Conversion'], label='Original')\n", (1998, 2051), True, 'import matplotlib.pylab as plt\n'), ((2053, 2112), 'matplotlib.pylab.plot', 'plt.plot', (["data['Fecha']", 'precio_filtrado2'], {'label': '"""Filtrado"""'}), "(data['Fecha'], precio_filtrado2, label='Filtrado')\n", (2061, 2112), True, 'import matplotlib.pylab as plt\n'), ((2114, 2133), 'matplotlib.pylab.xlabel', 'plt.xlabel', (['"""Fecha"""'], {}), "('Fecha')\n", (2124, 2133), True, 'import matplotlib.pylab as plt\n'), ((2134, 2154), 'matplotlib.pylab.ylabel', 'plt.ylabel', (['"""Precio"""'], {}), "('Precio')\n", (2144, 2154), True, 'import matplotlib.pylab as plt\n'), ((2155, 2174), 'matplotlib.pylab.legend', 'plt.legend', ([], {'loc': '(0.0)'}), '(loc=0.0)\n', (2165, 2174), True, 'import matplotlib.pylab as plt\n'), ((2175, 2195), 'matplotlib.pylab.subplot', 'plt.subplot', (['(3)', '(1)', '(3)'], {}), '(3, 1, 3)\n', (2186, 2195), True, 'import matplotlib.pylab as plt\n'), ((2194, 2255), 'matplotlib.pylab.plot', 'plt.plot', (["data['Fecha']", "data['Conversion']"], {'label': '"""Original"""'}), "(data['Fecha'], data['Conversion'], label='Original')\n", (2202, 2255), True, 'import matplotlib.pylab as plt\n'), ((2257, 2316), 'matplotlib.pylab.plot', 'plt.plot', (["data['Fecha']", 'precio_filtrado3'], {'label': '"""Filtrado"""'}), "(data['Fecha'], precio_filtrado3, label='Filtrado')\n", (2265, 2316), True, 'import matplotlib.pylab as plt\n'), ((2318, 2337), 'matplotlib.pylab.xlabel', 'plt.xlabel', (['"""Fecha"""'], {}), "('Fecha')\n", (2328, 2337), True, 'import matplotlib.pylab as plt\n'), ((2338, 2358), 'matplotlib.pylab.ylabel', 'plt.ylabel', (['"""Precio"""'], {}), "('Precio')\n", (2348, 2358), True, 'import matplotlib.pylab as plt\n'), ((2359, 2378), 'matplotlib.pylab.legend', 'plt.legend', ([], {'loc': '(0.0)'}), '(loc=0.0)\n', (2369, 2378), True, 'import matplotlib.pylab as plt\n'), ((2379, 2405), 'matplotlib.pylab.savefig', 'plt.savefig', (['"""Filtros.png"""'], {}), "('Filtros.png')\n", (2390, 2405), True, 'import matplotlib.pylab as plt\n'), ((2567, 2608), 'numpy.correlate', 'np.correlate', (['ruido1', 'ruido1'], {'mode': '"""full"""'}), "(ruido1, ruido1, mode='full')\n", (2579, 2608), True, 'import numpy as np\n'), ((2613, 2654), 'numpy.correlate', 'np.correlate', (['ruido2', 'ruido2'], {'mode': '"""full"""'}), "(ruido2, ruido2, mode='full')\n", (2625, 2654), True, 'import numpy as np\n'), ((2659, 2700), 'numpy.correlate', 'np.correlate', (['ruido3', 'ruido3'], {'mode': '"""full"""'}), "(ruido3, ruido3, mode='full')\n", (2671, 2700), True, 'import numpy as np\n'), ((2700, 2728), 'matplotlib.pylab.figure', 'plt.figure', ([], {'figsize': '(10, 15)'}), '(figsize=(10, 15))\n', (2710, 2728), True, 'import matplotlib.pylab as plt\n'), ((2728, 2748), 'matplotlib.pylab.subplot', 'plt.subplot', (['(3)', '(1)', '(1)'], {}), '(3, 1, 1)\n', (2739, 2748), True, 'import matplotlib.pylab as plt\n'), ((2787, 2807), 'matplotlib.pylab.subplot', 'plt.subplot', (['(3)', '(1)', '(2)'], {}), '(3, 1, 2)\n', (2798, 2807), True, 'import matplotlib.pylab as plt\n'), ((2846, 2866), 'matplotlib.pylab.subplot', 'plt.subplot', (['(3)', '(1)', '(3)'], {}), '(3, 1, 3)\n', (2857, 2866), True, 'import matplotlib.pylab as plt\n'), ((2905, 2937), 'matplotlib.pylab.savefig', 'plt.savefig', (['"""Correlaciones.png"""'], {}), "('Correlaciones.png')\n", (2916, 2937), True, 'import matplotlib.pylab as plt\n')]
|
#!/usr/bin/env python
import sys,os,stat,inspect,fnmatch
from glob import *
from collections import defaultdict as ddict
from .m4 import *
from .utilities import *
from .mod_autolib import autolib
from .mod_autoprog import autoprog
from .mod_autopackage import autopackage
# todo
#
# am_write should only set bin_PROGRAMS = [empty] and lib_LTLIBRARIES = [empty]
# only immediately upon opening Makefile.am
#
# am_write needs to recursively determine the dependency var's instead of just looking at the top level
#
def here():
"""
Returns the directory-part of the full path of the script
that called this function.
"""
filename = inspect.getfile(sys._getframe(1))
return os.path.dirname(os.path.realpath(filename))
|
[
"os.path.realpath",
"sys._getframe"
] |
[((674, 690), 'sys._getframe', 'sys._getframe', (['(1)'], {}), '(1)\n', (687, 690), False, 'import sys, os, stat, inspect, fnmatch\n'), ((719, 745), 'os.path.realpath', 'os.path.realpath', (['filename'], {}), '(filename)\n', (735, 745), False, 'import sys, os, stat, inspect, fnmatch\n')]
|
import os
from pathlib import Path
from tempfile import TemporaryDirectory
from textwrap import dedent
from unittest import TestCase
from mypy import api
def _check_mypy_on_code(python_code: str) -> str:
file_content = dedent(python_code).strip() + os.linesep
with TemporaryDirectory() as directory_name:
path_to_file = Path(directory_name) / "mypy_test.py"
with open(path_to_file, "w") as file:
file.write(file_content)
mypy_findings, _, _ = api.run([str(path_to_file)])
return mypy_findings
class MyPyTest(TestCase):
def test_mypy_accepts_any(self):
mypy_findings = _check_mypy_on_code(
"""
from typing import Any
from nptyping import NDArray
NDArray[Any, Any]
"""
)
self.assertIn("Success", mypy_findings)
def test_mypy_accepts_shape(self):
mypy_findings = _check_mypy_on_code(
"""
from typing import Any
from nptyping import NDArray, Shape
NDArray[Shape["3, 3"], Any]
"""
)
self.assertIn("Success", mypy_findings)
def test_mypy_disapproves_wrong_function_arguments(self):
mypy_findings = _check_mypy_on_code(
"""
from typing import Any
import numpy as np
from nptyping import NDArray, Shape
def func(_: NDArray[Shape["2, 2"], Any]) -> None:
...
func("Not an array...")
"""
)
self.assertIn('Argument 1 to "func" has incompatible type "str"', mypy_findings)
self.assertIn('expected "ndarray[Any, Any]"', mypy_findings)
self.assertIn("Found 1 error in 1 file", mypy_findings)
def test_mypy_accepts_ndarrays_as_function_arguments(self):
mypy_findings = _check_mypy_on_code(
"""
from typing import Any
import numpy as np
from nptyping import NDArray, Shape
def func(_: NDArray[Shape["2, 2"], Any]) -> None:
...
func(np.array([1, 2])) # (Wrong shape though)
"""
)
self.assertIn("Success", mypy_findings)
def test_mypy_accepts_ndarrays_as_variable_hints(self):
mypy_findings = _check_mypy_on_code(
"""
from typing import Any
import numpy as np
from nptyping import NDArray
arr: NDArray[Any, Any] = np.array([1, 2, 3])
"""
)
self.assertIn("Success", mypy_findings)
def test_mypy_accepts_numpy_types(self):
mypy_findings = _check_mypy_on_code(
"""
from typing import Any
from nptyping import NDArray
import numpy as np
NDArray[np.int_, Any]
NDArray[np.float_, Any]
NDArray[np.uint8, Any]
NDArray[np.bool_, Any]
"""
)
self.assertIn("Success", mypy_findings)
def test_mypy_knows_of_ndarray_methods(self):
# If MyPy knows of some arbitrary ndarray methods, we can assume that
# code completion works.
mypy_findings = _check_mypy_on_code(
"""
from typing import Any
from nptyping import NDArray
arr: NDArray[Any, Any]
arr.shape
arr.size
arr.sort
arr.squeeze
arr.transpose
"""
)
self.assertIn("Success", mypy_findings)
def test_mypy_accepts_nptyping_types(self):
mypy_findings = _check_mypy_on_code(
"""
from typing import Any
import numpy as np
from nptyping import (
NDArray,
Number,
Bool,
Bool8,
Object,
Object0,
Datetime64,
Integer,
SignedInteger,
Int8,
Int16,
Int32,
Int64,
Byte,
Short,
IntC,
IntP,
Int0,
Int,
LongLong,
Timedelta64,
UnsignedInteger,
UInt8,
UInt16,
UInt32,
UInt64,
UByte,
UShort,
UIntC,
UIntP,
UInt0,
UInt,
ULongLong,
Inexact,
Floating,
Float16,
Float32,
Float64,
Half,
Single,
Double,
Float,
LongDouble,
LongFloat,
ComplexFloating,
Complex64,
Complex128,
CSingle,
SingleComplex,
CDouble,
Complex,
CFloat,
CLongDouble,
CLongFloat,
LongComplex,
Flexible,
Void,
Void0,
Character,
Bytes,
String,
Bytes0,
Unicode,
Str0,
)
NDArray[Number, Any]
NDArray[Bool, Any]
NDArray[Bool8, Any]
NDArray[Object, Any]
NDArray[Object0, Any]
NDArray[Datetime64, Any]
NDArray[Integer, Any]
NDArray[SignedInteger, Any]
NDArray[Int8, Any]
NDArray[Int16, Any]
NDArray[Int32, Any]
NDArray[Int64, Any]
NDArray[Byte, Any]
NDArray[Short, Any]
NDArray[IntC, Any]
NDArray[IntP, Any]
NDArray[Int0, Any]
NDArray[Int, Any]
NDArray[LongLong, Any]
NDArray[Timedelta64, Any]
NDArray[UnsignedInteger, Any]
NDArray[UInt8, Any]
NDArray[UInt16, Any]
NDArray[UInt32, Any]
NDArray[UInt64, Any]
NDArray[UByte, Any]
NDArray[UShort, Any]
NDArray[UIntC, Any]
NDArray[UIntP, Any]
NDArray[UInt0, Any]
NDArray[UInt, Any]
NDArray[ULongLong, Any]
NDArray[Inexact, Any]
NDArray[Floating, Any]
NDArray[Float16, Any]
NDArray[Float32, Any]
NDArray[Float64, Any]
NDArray[Half, Any]
NDArray[Single, Any]
NDArray[Double, Any]
NDArray[Float, Any]
NDArray[LongDouble, Any]
NDArray[LongFloat, Any]
NDArray[ComplexFloating, Any]
NDArray[Complex64, Any]
NDArray[Complex128, Any]
NDArray[CSingle, Any]
NDArray[SingleComplex, Any]
NDArray[CDouble, Any]
NDArray[Complex, Any]
NDArray[CFloat, Any]
NDArray[CLongDouble, Any]
NDArray[CLongFloat, Any]
NDArray[LongComplex, Any]
NDArray[Flexible, Any]
NDArray[Void, Any]
NDArray[Void0, Any]
NDArray[Character, Any]
NDArray[Bytes, Any]
NDArray[String, Any]
NDArray[Bytes0, Any]
NDArray[Unicode, Any]
NDArray[Str0, Any]
"""
)
self.assertIn("Success", mypy_findings)
|
[
"textwrap.dedent",
"pathlib.Path",
"tempfile.TemporaryDirectory"
] |
[((276, 296), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (294, 296), False, 'from tempfile import TemporaryDirectory\n'), ((339, 359), 'pathlib.Path', 'Path', (['directory_name'], {}), '(directory_name)\n', (343, 359), False, 'from pathlib import Path\n'), ((226, 245), 'textwrap.dedent', 'dedent', (['python_code'], {}), '(python_code)\n', (232, 245), False, 'from textwrap import dedent\n')]
|
from dataclasses import dataclass, field
from pathlib import Path
from typing import Type, AnyStr, List, Optional
from zipfile import ZipFile, ZIP_DEFLATED
import imghdr
import inspect
import json
import os
import pprint
import re
import shutil
import stat
import tempfile
import warnings
import pkg_resources
import requests
from chai_py.auth import get_auth
from chai_py.chai_bot import ChaiBot
MAX_SUPPORTED_MEMORY = 4096
@dataclass
class Metadata:
"""Information required for bot deployment."""
# Name of the bot.
name: str
# Profile image for the bot. Has to be a valid URL.
image_url: str
# The alphanumeric part of a hex color code. (E.g. ffffff)
color: str
# Description of the bot.
description: str
# Python class (N.B. not object!) that inherits from ChaiBot.
input_class: Type[ChaiBot]
# Developer Unique ID.
developer_uid: str = field(default_factory=lambda: get_auth().uid)
# Total available memory for the bot in MB. This includes memory needed to store sources and data.
memory: int = 256
def verify(self, bot_file: Path):
"""Performs basic checks to ensure validity of the metadata."""
assert isinstance(self.name, str)
assert len(self.name) >= 3, "Bot name has to be at least 3 characters."
assert len(self.description) > 0, "Bot has to have description."
assert self.input_class.__init__ is ChaiBot.__init__, \
"Do not override ChaiBot.__init__(). Override the setup() method instead."
assert not (bot_file.parent / "main.py").exists(), "Do not create a main.py file in your bot's root directory."
try:
verify_image_url(self.image_url)
except Exception:
raise ValueError(f"Could not verify image url ({self.image_url})")
assert isinstance(self.color, str)
assert re.search(r"^(?:[0-9a-fA-F]{3}){1,2}$", self.color), \
f"Color has to be provided as the alphanumeric part of the hex code (e.g. ffffff), found {self.color}"
assert isinstance(self.memory, int), f"Attribute .memory has to be an integer (found type {type(self.memory)})."
assert self.memory <= MAX_SUPPORTED_MEMORY, f"Attribute .memory has to be less than or equal to {MAX_SUPPORTED_MEMORY} (found {self.memory})."
def package(metadata: Metadata, requirements: Optional[List[str]] = None, path: Optional[str] = None):
"""Packages the chatbot into a single archive for deployment.
Performs some preliminary checks on the metadata.
Creates a _package.zip file in the directory containing the file that contains the bot class
unless a path is provided.
:param metadata:
:param requirements:
:param path:
:return:
"""
bot_file = Path(inspect.getfile(metadata.input_class))
print("Running verification checks on metadata.")
metadata.verify(bot_file)
metadata_dict = {
'name': metadata.name,
'imageUrl': metadata.image_url,
'color': metadata.color,
'developerUid': metadata.developer_uid,
'description': metadata.description,
'inputFile': bot_file.stem,
'inputClass': metadata.input_class.__name__,
'memory': metadata.memory,
}
print("Prepared metadata:")
pprint.pprint(metadata_dict)
print("Preparing temporary directory...")
with tempfile.TemporaryDirectory() as temp_dir:
# Copy files in bot directory
def ignore(src, names):
ignore_list = []
for name in names:
# e.g .git folder is not wanted
if name.startswith('.') or name.startswith('_package.zip'):
warnings.warn(
f"Ignoring files which start with '.': {name}.",
RuntimeWarning
)
ignore_list.append(name)
if name == "main.py":
raise RuntimeError("Bot root directory cannot contain a main.py file.")
return ignore_list
copytree(bot_file.parent, temp_dir, ignore=ignore)
# Write metadata.json
with (Path(temp_dir) / "metadata.json").open("w") as f:
json.dump(metadata_dict, f)
# Write requirements.txt
if requirements:
write_valid_requirements_file(Path(temp_dir) / "requirements.txt", requirements)
# Create zip
if path is None:
path = bot_file.parent / "_package.zip"
else:
path = Path(path)
with path.open("wb") as f:
zipfile_from_folder(temp_dir, f)
print(f"Created zip package at {path}.")
def verify_image_url(url: str):
"""Verifies that the provided url resolves to an image.
Performs a GET request on the given url and performs a trivial (non-conclusive) check
that the image type can be inferred from the received bytes.
:param url:
:return:
"""
r = requests.get(url)
try:
imghdr.what(None, h=r.content)
except Exception:
raise ValueError(
f"Could not verify image type from bytes "
f"(response content-type of {r.headers.get('content-type')})"
)
def zipfile_from_folder(folder: AnyStr, file):
# Adapted from https://stackoverflow.com/a/17080988
with ZipFile(file, "w", compression=ZIP_DEFLATED) as zip_archive:
for root, dirs, files in os.walk(folder):
# add directory (needed for empty dirs)
zip_archive.write(root, os.path.relpath(root, folder))
for file in files:
filename = os.path.join(root, file)
if os.path.isfile(filename): # regular files only
arcname = os.path.join(os.path.relpath(root, folder), file)
zip_archive.write(filename, arcname)
def copytree(src, dst, symlinks=False, ignore=None):
"""Copies files from src to dst.
Taken from https://stackoverflow.com/a/22331852.
Necessitated by Python 3.7 environment; Python 3.8's shutil.copytree can be used directly
as it has the necessary dirs_exist_ok argument.
:param src: Source directory
:param dst: Target directory
:param symlinks:
:param ignore: Callable
:return:
"""
if not os.path.exists(dst):
os.makedirs(dst)
shutil.copystat(src, dst)
lst = os.listdir(src)
if ignore:
excl = ignore(src, lst)
lst = [x for x in lst if x not in excl]
for item in lst:
s = os.path.join(src, item)
d = os.path.join(dst, item)
if symlinks and os.path.islink(s):
if os.path.lexists(d):
os.remove(d)
os.symlink(os.readlink(s), d)
try:
st = os.lstat(s)
mode = stat.S_IMODE(st.st_mode)
os.lchmod(d, mode)
except Exception:
pass # lchmod not available
elif os.path.isdir(s):
copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
def write_valid_requirements_file(path: AnyStr, requirements: List[str]):
"""Writes a valid requirements.txt file.
Iterates through list of requirements, writing valid entries to the specified file,
Ignores (and prints) invalid requirements.
:param path:
:param requirements:
:return:
"""
with Path(path).open("w") as f:
for requirement in requirements:
try:
pkg_resources.Requirement.parse(requirement)
f.write(requirement + "\n")
except Exception as e:
print(f"Ignoring requirement {requirement}: {e}")
|
[
"os.remove",
"os.lchmod",
"os.walk",
"shutil.copystat",
"chai_py.auth.get_auth",
"pathlib.Path",
"os.path.islink",
"os.path.isfile",
"pprint.pprint",
"os.path.join",
"os.path.lexists",
"tempfile.TemporaryDirectory",
"pkg_resources.Requirement.parse",
"os.path.exists",
"requests.get",
"re.search",
"json.dump",
"shutil.copy2",
"inspect.getfile",
"os.listdir",
"zipfile.ZipFile",
"os.makedirs",
"os.readlink",
"os.path.isdir",
"imghdr.what",
"os.path.relpath",
"stat.S_IMODE",
"warnings.warn",
"os.lstat"
] |
[((3285, 3313), 'pprint.pprint', 'pprint.pprint', (['metadata_dict'], {}), '(metadata_dict)\n', (3298, 3313), False, 'import pprint\n'), ((4957, 4974), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (4969, 4974), False, 'import requests\n'), ((6371, 6386), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (6381, 6386), False, 'import os\n'), ((1873, 1923), 're.search', 're.search', (['"""^(?:[0-9a-fA-F]{3}){1,2}$"""', 'self.color'], {}), "('^(?:[0-9a-fA-F]{3}){1,2}$', self.color)\n", (1882, 1923), False, 'import re\n'), ((2775, 2812), 'inspect.getfile', 'inspect.getfile', (['metadata.input_class'], {}), '(metadata.input_class)\n', (2790, 2812), False, 'import inspect\n'), ((3370, 3399), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (3397, 3399), False, 'import tempfile\n'), ((4992, 5022), 'imghdr.what', 'imghdr.what', (['None'], {'h': 'r.content'}), '(None, h=r.content)\n', (5003, 5022), False, 'import imghdr\n'), ((5324, 5368), 'zipfile.ZipFile', 'ZipFile', (['file', '"""w"""'], {'compression': 'ZIP_DEFLATED'}), "(file, 'w', compression=ZIP_DEFLATED)\n", (5331, 5368), False, 'from zipfile import ZipFile, ZIP_DEFLATED\n'), ((5418, 5433), 'os.walk', 'os.walk', (['folder'], {}), '(folder)\n', (5425, 5433), False, 'import os\n'), ((6281, 6300), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (6295, 6300), False, 'import os\n'), ((6310, 6326), 'os.makedirs', 'os.makedirs', (['dst'], {}), '(dst)\n', (6321, 6326), False, 'import os\n'), ((6335, 6360), 'shutil.copystat', 'shutil.copystat', (['src', 'dst'], {}), '(src, dst)\n', (6350, 6360), False, 'import shutil\n'), ((6515, 6538), 'os.path.join', 'os.path.join', (['src', 'item'], {}), '(src, item)\n', (6527, 6538), False, 'import os\n'), ((6551, 6574), 'os.path.join', 'os.path.join', (['dst', 'item'], {}), '(dst, item)\n', (6563, 6574), False, 'import os\n'), ((4208, 4235), 'json.dump', 'json.dump', (['metadata_dict', 'f'], {}), '(metadata_dict, f)\n', (4217, 4235), False, 'import json\n'), ((4520, 4530), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (4524, 4530), False, 'from pathlib import Path\n'), ((6599, 6616), 'os.path.islink', 'os.path.islink', (['s'], {}), '(s)\n', (6613, 6616), False, 'import os\n'), ((6633, 6651), 'os.path.lexists', 'os.path.lexists', (['d'], {}), '(d)\n', (6648, 6651), False, 'import os\n'), ((6945, 6961), 'os.path.isdir', 'os.path.isdir', (['s'], {}), '(s)\n', (6958, 6961), False, 'import os\n'), ((5523, 5552), 'os.path.relpath', 'os.path.relpath', (['root', 'folder'], {}), '(root, folder)\n', (5538, 5552), False, 'import os\n'), ((5612, 5636), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (5624, 5636), False, 'import os\n'), ((5656, 5680), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (5670, 5680), False, 'import os\n'), ((6669, 6681), 'os.remove', 'os.remove', (['d'], {}), '(d)\n', (6678, 6681), False, 'import os\n'), ((6705, 6719), 'os.readlink', 'os.readlink', (['s'], {}), '(s)\n', (6716, 6719), False, 'import os\n'), ((6762, 6773), 'os.lstat', 'os.lstat', (['s'], {}), '(s)\n', (6770, 6773), False, 'import os\n'), ((6797, 6821), 'stat.S_IMODE', 'stat.S_IMODE', (['st.st_mode'], {}), '(st.st_mode)\n', (6809, 6821), False, 'import stat\n'), ((6838, 6856), 'os.lchmod', 'os.lchmod', (['d', 'mode'], {}), '(d, mode)\n', (6847, 6856), False, 'import os\n'), ((7034, 7052), 'shutil.copy2', 'shutil.copy2', (['s', 'd'], {}), '(s, d)\n', (7046, 7052), False, 'import shutil\n'), ((7383, 7393), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (7387, 7393), False, 'from pathlib import Path\n'), ((7484, 7528), 'pkg_resources.Requirement.parse', 'pkg_resources.Requirement.parse', (['requirement'], {}), '(requirement)\n', (7515, 7528), False, 'import pkg_resources\n'), ((929, 939), 'chai_py.auth.get_auth', 'get_auth', ([], {}), '()\n', (937, 939), False, 'from chai_py.auth import get_auth\n'), ((3687, 3765), 'warnings.warn', 'warnings.warn', (['f"""Ignoring files which start with \'.\': {name}."""', 'RuntimeWarning'], {}), '(f"Ignoring files which start with \'.\': {name}.", RuntimeWarning)\n', (3700, 3765), False, 'import warnings\n'), ((4337, 4351), 'pathlib.Path', 'Path', (['temp_dir'], {}), '(temp_dir)\n', (4341, 4351), False, 'from pathlib import Path\n'), ((4146, 4160), 'pathlib.Path', 'Path', (['temp_dir'], {}), '(temp_dir)\n', (4150, 4160), False, 'from pathlib import Path\n'), ((5747, 5776), 'os.path.relpath', 'os.path.relpath', (['root', 'folder'], {}), '(root, folder)\n', (5762, 5776), False, 'import os\n')]
|
import pygame
class Instruction():
def __init__(self, x, y, font_size = 30):
self.x = x
self.y = y
self.line_size = font_size + 30 # 行距
self.color = (255, 255, 255)
self.font_obj = pygame.font.Font("assets/ShadowsIntoLightTwo-Regular.ttf", font_size)
# 為每行文字創建surface
self.font_surfaces = []
with open("assets/instructions.txt", 'r', encoding='utf8') as text:
for line in text.readlines():
self.font_surfaces.append(self.font_obj.render(line, True, self.color))
def draw(self, screen):
# draw所有text line,含行距運算
for index, surface in enumerate(self.font_surfaces):
screen.blit(surface, (self.x, self.y + self.line_size * index))
|
[
"pygame.font.Font"
] |
[((226, 295), 'pygame.font.Font', 'pygame.font.Font', (['"""assets/ShadowsIntoLightTwo-Regular.ttf"""', 'font_size'], {}), "('assets/ShadowsIntoLightTwo-Regular.ttf', font_size)\n", (242, 295), False, 'import pygame\n')]
|
#
# Copyright 2012-2021 Bronto Software, Udviklings- og Forenklingsstyrelsen
# and multiple other contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import collections
import re
from xml.sax.saxutils import escape as html_escape
from bs4 import BeautifulSoup
Cell = collections.namedtuple("Cell", ["type", "rowspan", "colspan", "contents"])
class Converter(object):
def __init__(self, parser):
self._unknown_tags = set()
self._clear = "\n\n..\n\n"
# Regular expressions
self._preprocess_anchors = re.compile(r'<a\s+name\s*=\s*["\']?(.+?)["\']?\s*>')
self._post_process_empty_lines = re.compile(r"^\s+$", re.MULTILINE)
self._post_process_compress_lines = re.compile(r"\n{3,}")
self._whitespace_with_newline = re.compile(r"[\s\n]+")
self._whitespace = re.compile(r"\s+")
self._html_tag = re.compile(r"<.*?>")
self._preprocess_entity = re.compile(r"&(nbsp|lt|gt|amp)([^;]|[\n])")
self._parser = parser
# --------------------------------------------------------------------------
# ---- reST Utility Methods ----
def _unicode(self, s):
if isinstance(s, str):
return s
else:
return str(s, "utf8")
def _separate(self, s):
return "\n\n" + s + "\n\n"
def _escape_inline(self, s):
return "\\ " + s + "\\ "
def _inline(self, tag, s):
# Seems fishy if our inline markup spans lines. We will instead just return
# the string as is
if "\n" in s:
return s
s = s.strip()
if not s:
return s
return self._escape_inline(tag + s.strip() + tag)
def _role(self, role, s, label=None):
if label:
return self._escape_inline(":%s:`%s <%s>`" % (role, label, s))
else:
return self._escape_inline(":%s:`%s`" % (role, s))
def _directive(self, directive, body=None):
header = "\n\n.. %s::\n\n" % (directive,)
if body:
return header + self._left_justify(body, 3) + "\n\n"
else:
return header + "\n"
def _hyperlink(self, target, label):
return self._escape_inline("`%s <%s>`_" % (label, target))
def _listing(self, marker, items):
items = [self._left_justify(item, len(marker) + 1) for item in items]
items = [marker + item[len(marker) :] for item in items]
return self._separate("..") + self._separate("\n".join(items))
def _left_justify(self, s, indent=0):
lines = [line.rstrip() for line in s.split("\n")]
indents = [len(line) - len(line.lstrip()) for line in lines if line]
if not indents:
return s
shift = indent - min(indents)
if shift < 0:
return "\n".join(line[-shift:] for line in lines)
else:
prefix = " " * shift
return "\n".join(prefix + line for line in lines)
def _compress_whitespace(self, s, replace=" ", newlines=True):
if newlines:
return self._whitespace_with_newline.sub(replace, s)
else:
return self._whitespace.sub(replace, s)
# --------------------------------------------------------------------------
# ---- DOM Tree Processing ----
def _process_table_cells(self, table):
"""Compile all the table cells.
Returns a list of rows. The rows may have different lengths because of
column spans.
"""
rows = []
for i, tr in enumerate(table.find_all("tr")):
row = []
for c in tr.contents:
cell_type = getattr(c, "name", None)
if cell_type not in ("td", "th"):
continue
rowspan = int(c.attrs.get("rowspan", 1))
colspan = int(c.attrs.get("colspan", 1))
contents = self._process_children(c).strip()
if cell_type == "th" and i > 0:
contents = self._inline("**", contents)
row.append(Cell(cell_type, rowspan, colspan, contents))
rows.append(row)
return rows
def _process_table(self, node):
rows = self._process_table_cells(node)
if not rows:
return ""
table_num_columns = max(sum(c.colspan for c in row) for row in rows)
for row in rows:
row_num_columns = sum(c.colspan for c in row)
if row_num_columns < table_num_columns:
cell_type = row[-1].type if row else "td"
row.append(Cell(cell_type, 1, table_num_columns - row_num_columns, ""))
col_widths = [0] * table_num_columns
row_heights = [0] * len(rows)
for i, row in enumerate(rows):
j = 0
for cell in row:
current_w = sum(col_widths[j : j + cell.colspan])
required_w = max(len(line) for line in cell.contents.split("\n"))
if required_w > current_w:
additional = required_w - current_w
col_widths[j] += additional - (cell.colspan - 1) * (
additional // cell.colspan
)
for jj in range(j + 1, j + cell.colspan):
col_widths[jj] += additional // cell.colspan
current_h = row_heights[i]
required_h = len(cell.contents.split("\n"))
if required_h > current_h:
row_heights[i] = required_h
j += cell.colspan
row_sep = "+" + "+".join("-" * (line + 2) for line in col_widths) + "+"
header_sep = "+" + "+".join("=" * (line + 2) for line in col_widths) + "+"
lines = [row_sep]
for i, row in enumerate(rows):
for y in range(0, row_heights[i]):
line = []
j = 0
for c in row:
w = sum(n + 3 for n in col_widths[j : j + c.colspan]) - 2
# h = row_heights[i]
line.append("| ")
cell_lines = c.contents.split("\n")
content = cell_lines[y] if y < len(cell_lines) else ""
line.append(content.ljust(w))
j += c.colspan
line.append("|")
lines.append("".join(line))
if i == 0 and all(c.type == "th" for c in row):
lines.append(header_sep)
else:
lines.append(row_sep)
return self._separate("\n".join(lines))
def _process_children(self, node):
parts = []
is_newline = False
for c in node.contents:
part = self._process(c)
if is_newline:
part = part.lstrip()
if part:
parts.append(part)
is_newline = part.endswith("\n")
return "".join(parts)
def _process_text(self, node):
return "".join(node.strings)
def _process(self, node):
if isinstance(node, str):
return self._compress_whitespace(node)
simple_tags = {
"b": lambda s: self._inline("**", s),
"strong": lambda s: self._inline("**", s),
"i": lambda s: self._inline("*", s),
"em": lambda s: self._inline("*", s),
"tt": lambda s: self._inline("``", s),
"code": lambda s: self._inline("``", s),
"h1": lambda s: self._inline("**", s),
"h2": lambda s: self._inline("**", s),
"h3": lambda s: self._inline("**", s),
"h4": lambda s: self._inline("**", s),
"h5": lambda s: self._inline("**", s),
"h6": lambda s: self._inline("**", s),
"sub": lambda s: self._role("sub", s),
"sup": lambda s: self._role("sup", s),
"hr": lambda s: self._separate(""), # Transitions not allowed
}
if node.name in simple_tags:
return simple_tags[node.name](self._process_text(node))
if node.name == "p":
return self._separate(self._process_children(node).strip())
if node.name == "pre":
return self._directive("parsed-literal", self._process_text(node))
if node.name == "a":
if "name" in node.attrs:
return self._separate(".. _" + node["name"] + ":")
elif "href" in node.attrs:
target = node["href"]
label = self._compress_whitespace(self._process_text(node).strip("\n"))
if target.startswith("#"):
return self._role("ref", target[1:], label)
elif target.startswith("@"):
return self._role("java:ref", target[1:], label)
else:
return self._hyperlink(target, label)
if node.name == "ul":
items = [self._process(n) for n in node.find_all("li", recursive=False)]
return self._listing("*", items)
if node.name == "ol":
items = [self._process(n) for n in node.find_all("li", recursive=False)]
return self._listing("#.", items)
if node.name == "li":
s = self._process_children(node)
s = s.strip()
# If it's multiline clear the end to correcly support nested lists
if "\n" in s:
s = s + "\n\n"
return s
if node.name == "table":
return self._process_table(node)
self._unknown_tags.add(node.name)
return self._process_children(node)
# --------------------------------------------------------------------------
# ---- HTML Preprocessing ----
def _preprocess_inline_javadoc_replace(self, tag, f, s):
parts = []
start = "{@" + tag
start_length = len(start)
i = s.find(start)
j = 0
while i != -1:
parts.append(s[j:i])
# Find a closing bracket such that the brackets are balanced between
# them. This is necessary since code examples containing { and } are
# commonly wrapped in {@code ...} tags
try:
j = s.find("}", i + start_length) + 1
while s.count("{", i, j) != s.count("}", i, j):
j = s.index("}", j) + 1
except ValueError:
raise ValueError("Unbalanced {} brackets in " + tag + " tag")
parts.append(f(s[i + start_length : j - 1].strip()))
i = s.find(start, j)
parts.append(s[j:])
return "".join(parts)
def _preprocess_replace_javadoc_link(self, s):
s = self._compress_whitespace(s)
target = None
label = ""
if " " not in s:
target = s
else:
i = s.find(" ")
while s.count("(", 0, i) != s.count(")", 0, i):
i = s.find(" ", i + 1)
if i == -1:
i = len(s)
break
target = s[:i]
label = s[i:]
if target[0] == "#":
target = target[1:]
target = target.replace("#", ".").replace(" ", "").strip()
# Strip HTML tags from the target
target = self._html_tag.sub("", target)
label = label.strip()
return '<a href="@%s">%s</a>' % (target, label)
def _preprocess_close_anchor_tags(self, s):
# Add closing tags to all anchors so they are better handled by the parser
return self._preprocess_anchors.sub(r'<a name="\1"></a>', s)
def _preprocess_fix_entities(self, s):
return self._preprocess_entity.sub(r"&\1;\2", s)
def _preprocess(self, s_html):
def to_tag(t):
return lambda m: "<%s>%s</%s>" % (t, html_escape(m), t)
s_html = self._preprocess_inline_javadoc_replace("code", to_tag("code"), s_html)
s_html = self._preprocess_inline_javadoc_replace(
"literal", to_tag("span"), s_html
)
s_html = self._preprocess_inline_javadoc_replace(
"docRoot", lambda m: "", s_html
)
s_html = self._preprocess_inline_javadoc_replace(
"linkplain", self._preprocess_replace_javadoc_link, s_html
)
s_html = self._preprocess_inline_javadoc_replace(
"link", self._preprocess_replace_javadoc_link, s_html
)
# Make sure all anchor tags are closed
s_html = self._preprocess_close_anchor_tags(s_html)
# Fix up some entitities without closing ;
s_html = self._preprocess_fix_entities(s_html)
return s_html
# --------------------------------------------------------------------------
# ---- Conversion entry point ----
def convert(self, s_html):
if not isinstance(s_html, str):
s_html = str(s_html, "utf8")
s_html = self._preprocess(s_html)
if not s_html.strip():
return ""
soup = BeautifulSoup(s_html, self._parser)
top = soup.html.body
result = self._process_children(top)
# Post processing
result = self._post_process_empty_lines.sub("", result)
result = self._post_process_compress_lines.sub("\n\n", result)
result = result.strip()
return result
|
[
"bs4.BeautifulSoup",
"xml.sax.saxutils.escape",
"collections.namedtuple",
"re.compile"
] |
[((798, 872), 'collections.namedtuple', 'collections.namedtuple', (['"""Cell"""', "['type', 'rowspan', 'colspan', 'contents']"], {}), "('Cell', ['type', 'rowspan', 'colspan', 'contents'])\n", (820, 872), False, 'import collections\n'), ((1068, 1127), 're.compile', 're.compile', (['"""<a\\\\s+name\\\\s*=\\\\s*["\\\\\']?(.+?)["\\\\\']?\\\\s*>"""'], {}), '(\'<a\\\\s+name\\\\s*=\\\\s*["\\\\\\\']?(.+?)["\\\\\\\']?\\\\s*>\')\n', (1078, 1127), False, 'import re\n'), ((1162, 1196), 're.compile', 're.compile', (['"""^\\\\s+$"""', 're.MULTILINE'], {}), "('^\\\\s+$', re.MULTILINE)\n", (1172, 1196), False, 'import re\n'), ((1241, 1262), 're.compile', 're.compile', (['"""\\\\n{3,}"""'], {}), "('\\\\n{3,}')\n", (1251, 1262), False, 'import re\n'), ((1303, 1326), 're.compile', 're.compile', (['"""[\\\\s\\\\n]+"""'], {}), "('[\\\\s\\\\n]+')\n", (1313, 1326), False, 'import re\n'), ((1353, 1371), 're.compile', 're.compile', (['"""\\\\s+"""'], {}), "('\\\\s+')\n", (1363, 1371), False, 'import re\n'), ((1397, 1416), 're.compile', 're.compile', (['"""<.*?>"""'], {}), "('<.*?>')\n", (1407, 1416), False, 'import re\n'), ((1453, 1496), 're.compile', 're.compile', (['"""&(nbsp|lt|gt|amp)([^;]|[\\\\n])"""'], {}), "('&(nbsp|lt|gt|amp)([^;]|[\\\\n])')\n", (1463, 1496), False, 'import re\n'), ((13556, 13591), 'bs4.BeautifulSoup', 'BeautifulSoup', (['s_html', 'self._parser'], {}), '(s_html, self._parser)\n', (13569, 13591), False, 'from bs4 import BeautifulSoup\n'), ((12363, 12377), 'xml.sax.saxutils.escape', 'html_escape', (['m'], {}), '(m)\n', (12374, 12377), True, 'from xml.sax.saxutils import escape as html_escape\n')]
|
import macropy.activate
import JeevesLib
from smt.Z3 import *
import unittest
from Auction import AuctionContext, Bid, User
import JeevesLib
class TestAuction(unittest.TestCase):
def setUp(self):
JeevesLib.init()
self.aliceUser = User(0)
self.bobUser = User(1)
self.claireUser = User(2)
def testOwnerCanSee(self):
policy = lambda oc: False
aliceBid = Bid(3, self.aliceUser, policy)
ctxt0 = AuctionContext(self.aliceUser, 0, [])
self.assertEqual(3
, JeevesLib.concretize(ctxt0, aliceBid.value))
ctxt1 = AuctionContext(self.bobUser, 0, [])
self.assertEqual(-1
, JeevesLib.concretize(ctxt1, aliceBid.value))
def testTimeSensitiveRelease(self):
auctionEndTime = 10
policy = lambda oc: oc.time > auctionEndTime
aliceBid = Bid(3, self.aliceUser, policy)
self.assertEqual(3
, JeevesLib.concretize(
AuctionContext(self.bobUser, 11, []), aliceBid.value))
self.assertEqual(-1
, JeevesLib.concretize(
AuctionContext(self.bobUser, 10, []), aliceBid.value))
def testSealedAuction(self):
# Function that returns true if the context contains a bid from the given
# user.
def hasBidFromUser(ctxt, u):
return JeevesLib.jhasElt(ctxt.bids, lambda b: b.owner == u)
allUsers = [self.aliceUser, self.bobUser, self.claireUser]
policy = lambda oc: reduce(lambda acc, c: JeevesLib.jand(
lambda: hasBidFromUser(oc, c), lambda: acc)
, allUsers)
aliceBid = Bid(3, self.aliceUser, policy)
bobBid = Bid(4, self.bobUser, policy)
claireBid = Bid(5, self.claireUser, policy)
self.assertEqual(-1,
JeevesLib.concretize(
AuctionContext(self.bobUser, 11, [aliceBid]), aliceBid.value))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"JeevesLib.jhasElt",
"JeevesLib.concretize",
"Auction.Bid",
"Auction.AuctionContext",
"JeevesLib.init",
"Auction.User"
] |
[((1807, 1822), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1820, 1822), False, 'import unittest\n'), ((203, 219), 'JeevesLib.init', 'JeevesLib.init', ([], {}), '()\n', (217, 219), False, 'import JeevesLib\n'), ((241, 248), 'Auction.User', 'User', (['(0)'], {}), '(0)\n', (245, 248), False, 'from Auction import AuctionContext, Bid, User\n'), ((268, 275), 'Auction.User', 'User', (['(1)'], {}), '(1)\n', (272, 275), False, 'from Auction import AuctionContext, Bid, User\n'), ((298, 305), 'Auction.User', 'User', (['(2)'], {}), '(2)\n', (302, 305), False, 'from Auction import AuctionContext, Bid, User\n'), ((381, 411), 'Auction.Bid', 'Bid', (['(3)', 'self.aliceUser', 'policy'], {}), '(3, self.aliceUser, policy)\n', (384, 411), False, 'from Auction import AuctionContext, Bid, User\n'), ((429, 466), 'Auction.AuctionContext', 'AuctionContext', (['self.aliceUser', '(0)', '[]'], {}), '(self.aliceUser, 0, [])\n', (443, 466), False, 'from Auction import AuctionContext, Bid, User\n'), ((558, 593), 'Auction.AuctionContext', 'AuctionContext', (['self.bobUser', '(0)', '[]'], {}), '(self.bobUser, 0, [])\n', (572, 593), False, 'from Auction import AuctionContext, Bid, User\n'), ((800, 830), 'Auction.Bid', 'Bid', (['(3)', 'self.aliceUser', 'policy'], {}), '(3, self.aliceUser, policy)\n', (803, 830), False, 'from Auction import AuctionContext, Bid, User\n'), ((1529, 1559), 'Auction.Bid', 'Bid', (['(3)', 'self.aliceUser', 'policy'], {}), '(3, self.aliceUser, policy)\n', (1532, 1559), False, 'from Auction import AuctionContext, Bid, User\n'), ((1573, 1601), 'Auction.Bid', 'Bid', (['(4)', 'self.bobUser', 'policy'], {}), '(4, self.bobUser, policy)\n', (1576, 1601), False, 'from Auction import AuctionContext, Bid, User\n'), ((1618, 1649), 'Auction.Bid', 'Bid', (['(5)', 'self.claireUser', 'policy'], {}), '(5, self.claireUser, policy)\n', (1621, 1649), False, 'from Auction import AuctionContext, Bid, User\n'), ((500, 543), 'JeevesLib.concretize', 'JeevesLib.concretize', (['ctxt0', 'aliceBid.value'], {}), '(ctxt0, aliceBid.value)\n', (520, 543), False, 'import JeevesLib\n'), ((628, 671), 'JeevesLib.concretize', 'JeevesLib.concretize', (['ctxt1', 'aliceBid.value'], {}), '(ctxt1, aliceBid.value)\n', (648, 671), False, 'import JeevesLib\n'), ((1241, 1293), 'JeevesLib.jhasElt', 'JeevesLib.jhasElt', (['ctxt.bids', '(lambda b: b.owner == u)'], {}), '(ctxt.bids, lambda b: b.owner == u)\n', (1258, 1293), False, 'import JeevesLib\n'), ((897, 933), 'Auction.AuctionContext', 'AuctionContext', (['self.bobUser', '(11)', '[]'], {}), '(self.bobUser, 11, [])\n', (911, 933), False, 'from Auction import AuctionContext, Bid, User\n'), ((1018, 1054), 'Auction.AuctionContext', 'AuctionContext', (['self.bobUser', '(10)', '[]'], {}), '(self.bobUser, 10, [])\n', (1032, 1054), False, 'from Auction import AuctionContext, Bid, User\n'), ((1712, 1756), 'Auction.AuctionContext', 'AuctionContext', (['self.bobUser', '(11)', '[aliceBid]'], {}), '(self.bobUser, 11, [aliceBid])\n', (1726, 1756), False, 'from Auction import AuctionContext, Bid, User\n')]
|
import os
from flask import Flask
def create_app(test_config= None):
app = Flask(__name__, instance_relative_config= True)
app.config.from_mapping(
SECRET_KEY = 'dEV',
DATABASE = os.path.join(app.instance_path, 'flaskr.sqlite')
)
if test_config is None:
app.config.from_pyfile('config.py', silent= True)
else:
app.config.update(test_config)
try:
os.makedirs(app.instance_path)
except OSError:
pass
from flaskr import db
db.init_app(app)
from flaskr import auth, blog
app.register_blueprint(auth.bp)
app.register_blueprint(blog.bp)
app.add_url_rule('/', endpoint='login')
return app
|
[
"os.makedirs",
"flask.Flask",
"os.path.join",
"flaskr.db.init_app"
] |
[((81, 127), 'flask.Flask', 'Flask', (['__name__'], {'instance_relative_config': '(True)'}), '(__name__, instance_relative_config=True)\n', (86, 127), False, 'from flask import Flask\n'), ((510, 526), 'flaskr.db.init_app', 'db.init_app', (['app'], {}), '(app)\n', (521, 526), False, 'from flaskr import db\n'), ((415, 445), 'os.makedirs', 'os.makedirs', (['app.instance_path'], {}), '(app.instance_path)\n', (426, 445), False, 'import os\n'), ((206, 254), 'os.path.join', 'os.path.join', (['app.instance_path', '"""flaskr.sqlite"""'], {}), "(app.instance_path, 'flaskr.sqlite')\n", (218, 254), False, 'import os\n')]
|
import unittest
import filterdesigner.FIRDesign as FIRDesign
import numpy as np
class TestKaiserord(unittest.TestCase):
def setUp(self):
self.f1 = 0.2
self.f2 = 0.3
self.f3 = 0.4
self.f4 = 0.5
self.f5 = 0.6
self.f6 = 0.7
self.m1 = 1
self.m2 = 0
self.dev1 = 0.05
self.dev2 = 0.01
def test_kaiserord_1(self):
# Test case for lowapass filter
self.assertTrue(np.all(FIRDesign.kaiserord([self.f1, self.f2], [self.m1, self.m2], self.dev2) == (45, 0.25, 3.3953210522614574, 'low')))
def test_kaiserord_2(self):
# Test case for highpass filter
self.assertTrue(np.all(FIRDesign.kaiserord([self.f1, self.f2], [self.m2, self.m1], self.dev1) == (26, 0.25, 1.509869637041394, 'high')))
def test_kaiserord_3(self):
# Test case for bandpass filter
ORD = FIRDesign.kaiserord([self.f1, self.f2, self.f3, self.f4], [self.m2, self.m1, self.m2], self.dev2)
self.assertTrue((ORD[0] == 45) and np.all(ORD[1] == [0.25, 0.45]) and (ORD[2] == 3.3953210522614574) and (ORD[3] == 'bandpass'))
def test_kaiserord_4(self):
# Test case for bandstop filter
ORD = FIRDesign.kaiserord([self.f1, self.f2, self.f3, self.f4], [self.m1, self.m2, self.m1], self.dev2)
self.assertTrue((ORD[0] == 46) and np.all(ORD[1] == [0.25, 0.45]) and (ORD[2] == 3.3953210522614574) and (ORD[3] == 'stop'))
def test_kaiserord_5(self):
# Test case for 'DC-1' filter
ORD = FIRDesign.kaiserord([self.f1, self.f2, self.f3, self.f4, self.f5, self.f6], [self.m1, self.m2, self.m1, self.m2], self.dev2)
self.assertTrue((ORD[0] == 45) and np.all(ORD[1] == [0.25, 0.45, 0.6499999999999999]) and (ORD[2] == 3.3953210522614574) and (ORD[3] == 'DC-1'))
def test_kaiserord_6(self):
# Test case for 'DC-0' filter
ORD = FIRDesign.kaiserord([self.f1, self.f2, self.f3, self.f4, self.f5, self.f6], [self.m2, self.m1, self.m2, self.m1], self.dev2)
self.assertTrue((ORD[0] == 46) and np.all(ORD[1] == [0.25, 0.45, 0.6499999999999999]) and (ORD[2] == 3.3953210522614574) and (ORD[3] == 'DC-0'))
def test_kaiserord_7(self):
# Test case for Exception 1
with self.assertRaises(ValueError):
FIRDesign.kaiserord([self.f1, self.f2], [self.m1, self.m2, self.m1], self.dev2)
def test_kaiserord_8(self):
# Test case for Exception 2
with self.assertRaises(ValueError):
FIRDesign.kaiserord([self.f1, self.f2, self.f3, self.f4], [self.m1, self.m2, self.m1], [self.dev1, self.dev2])
def test_kaiserord_9(self):
# Test case for Exception 3
with self.assertRaises(ValueError):
FIRDesign.kaiserord([self.f1, self.f2, self.f3, self.f4], [self.m1, self.m2, self.m1], [self.dev1, -0.2])
|
[
"filterdesigner.FIRDesign.kaiserord",
"numpy.all"
] |
[((916, 1017), 'filterdesigner.FIRDesign.kaiserord', 'FIRDesign.kaiserord', (['[self.f1, self.f2, self.f3, self.f4]', '[self.m2, self.m1, self.m2]', 'self.dev2'], {}), '([self.f1, self.f2, self.f3, self.f4], [self.m2, self.m1,\n self.m2], self.dev2)\n', (935, 1017), True, 'import filterdesigner.FIRDesign as FIRDesign\n'), ((1251, 1352), 'filterdesigner.FIRDesign.kaiserord', 'FIRDesign.kaiserord', (['[self.f1, self.f2, self.f3, self.f4]', '[self.m1, self.m2, self.m1]', 'self.dev2'], {}), '([self.f1, self.f2, self.f3, self.f4], [self.m1, self.m2,\n self.m1], self.dev2)\n', (1270, 1352), True, 'import filterdesigner.FIRDesign as FIRDesign\n'), ((1580, 1708), 'filterdesigner.FIRDesign.kaiserord', 'FIRDesign.kaiserord', (['[self.f1, self.f2, self.f3, self.f4, self.f5, self.f6]', '[self.m1, self.m2, self.m1, self.m2]', 'self.dev2'], {}), '([self.f1, self.f2, self.f3, self.f4, self.f5, self.f6],\n [self.m1, self.m2, self.m1, self.m2], self.dev2)\n', (1599, 1708), True, 'import filterdesigner.FIRDesign as FIRDesign\n'), ((1956, 2084), 'filterdesigner.FIRDesign.kaiserord', 'FIRDesign.kaiserord', (['[self.f1, self.f2, self.f3, self.f4, self.f5, self.f6]', '[self.m2, self.m1, self.m2, self.m1]', 'self.dev2'], {}), '([self.f1, self.f2, self.f3, self.f4, self.f5, self.f6],\n [self.m2, self.m1, self.m2, self.m1], self.dev2)\n', (1975, 2084), True, 'import filterdesigner.FIRDesign as FIRDesign\n'), ((2373, 2452), 'filterdesigner.FIRDesign.kaiserord', 'FIRDesign.kaiserord', (['[self.f1, self.f2]', '[self.m1, self.m2, self.m1]', 'self.dev2'], {}), '([self.f1, self.f2], [self.m1, self.m2, self.m1], self.dev2)\n', (2392, 2452), True, 'import filterdesigner.FIRDesign as FIRDesign\n'), ((2583, 2697), 'filterdesigner.FIRDesign.kaiserord', 'FIRDesign.kaiserord', (['[self.f1, self.f2, self.f3, self.f4]', '[self.m1, self.m2, self.m1]', '[self.dev1, self.dev2]'], {}), '([self.f1, self.f2, self.f3, self.f4], [self.m1, self.m2,\n self.m1], [self.dev1, self.dev2])\n', (2602, 2697), True, 'import filterdesigner.FIRDesign as FIRDesign\n'), ((2824, 2933), 'filterdesigner.FIRDesign.kaiserord', 'FIRDesign.kaiserord', (['[self.f1, self.f2, self.f3, self.f4]', '[self.m1, self.m2, self.m1]', '[self.dev1, -0.2]'], {}), '([self.f1, self.f2, self.f3, self.f4], [self.m1, self.m2,\n self.m1], [self.dev1, -0.2])\n', (2843, 2933), True, 'import filterdesigner.FIRDesign as FIRDesign\n'), ((1058, 1088), 'numpy.all', 'np.all', (['(ORD[1] == [0.25, 0.45])'], {}), '(ORD[1] == [0.25, 0.45])\n', (1064, 1088), True, 'import numpy as np\n'), ((1393, 1423), 'numpy.all', 'np.all', (['(ORD[1] == [0.25, 0.45])'], {}), '(ORD[1] == [0.25, 0.45])\n', (1399, 1423), True, 'import numpy as np\n'), ((1749, 1799), 'numpy.all', 'np.all', (['(ORD[1] == [0.25, 0.45, 0.6499999999999999])'], {}), '(ORD[1] == [0.25, 0.45, 0.6499999999999999])\n', (1755, 1799), True, 'import numpy as np\n'), ((2125, 2175), 'numpy.all', 'np.all', (['(ORD[1] == [0.25, 0.45, 0.6499999999999999])'], {}), '(ORD[1] == [0.25, 0.45, 0.6499999999999999])\n', (2131, 2175), True, 'import numpy as np\n'), ((489, 559), 'filterdesigner.FIRDesign.kaiserord', 'FIRDesign.kaiserord', (['[self.f1, self.f2]', '[self.m1, self.m2]', 'self.dev2'], {}), '([self.f1, self.f2], [self.m1, self.m2], self.dev2)\n', (508, 559), True, 'import filterdesigner.FIRDesign as FIRDesign\n'), ((711, 781), 'filterdesigner.FIRDesign.kaiserord', 'FIRDesign.kaiserord', (['[self.f1, self.f2]', '[self.m2, self.m1]', 'self.dev1'], {}), '([self.f1, self.f2], [self.m2, self.m1], self.dev1)\n', (730, 781), True, 'import filterdesigner.FIRDesign as FIRDesign\n')]
|
__all__ = ['ArduCopter']
import logging
import os
from .state import State
from .sandbox import Sandbox
from .goto import GoTo
from .setmode import SetMode
from .takeoff import Takeoff
from .parachute import Parachute
from ..command_factory import read_commands_yml
from ..base import BaseSystem
from ..common import ArmDisarm
from ..configuration import Configuration
logger = logging.getLogger(__name__) # type: logging.Logger
logger.setLevel(logging.DEBUG)
dirname = os.path.dirname(__file__)
class ArduCopter(BaseSystem):
name = 'arducopter'
state = State
sandbox = Sandbox
configuration = Configuration
# commands = [
# GoTo,
# Takeoff,
# ArmDisarm,
# SetMode,
# Parachute
# ]
commands = read_commands_yml(os.path.join(dirname, 'commands.yml'))
|
[
"os.path.dirname",
"os.path.join",
"logging.getLogger"
] |
[((381, 408), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (398, 408), False, 'import logging\n'), ((475, 500), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (490, 500), False, 'import os\n'), ((779, 816), 'os.path.join', 'os.path.join', (['dirname', '"""commands.yml"""'], {}), "(dirname, 'commands.yml')\n", (791, 816), False, 'import os\n')]
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from future.builtins import object
from limpyd.contrib.related import re_identifier
from ..related import (FKStringField, FKInstanceHashField,
M2MSetField, M2MListField, M2MSortedSetField,
RelatedCollectionForString, RelatedCollectionForInstanceHash,
RelatedCollectionForSet, RelatedCollectionForList, RelatedCollectionForSortedSet)
from .fields import DynamicFieldMixin
class RelatedCollectionMixinForDynamicField(object):
def __call__(self, dynamic_part, **filters):
"""
Return a collection on the related model, given the current instance as
a filter for the related field. Take the dyanmic part of the dynamic
field to consider as a first argument.
"""
dynamic_field_name = self.related_field.get_name_for(dynamic_part)
if not filters:
filters = {}
filters[dynamic_field_name] = self.instance._pk
return self.related_field._model.collection(**filters)
class RelatedCollectionForDynamicString(RelatedCollectionMixinForDynamicField, RelatedCollectionForString):
pass
class RelatedCollectionForDynamicInstanceHash(RelatedCollectionMixinForDynamicField, RelatedCollectionForInstanceHash):
pass
class RelatedCollectionForDynamicSet(RelatedCollectionMixinForDynamicField, RelatedCollectionForSet):
pass
class RelatedCollectionForDynamicList(RelatedCollectionMixinForDynamicField, RelatedCollectionForList):
pass
class RelatedCollectionForDynamicSortedSet(RelatedCollectionMixinForDynamicField, RelatedCollectionForSortedSet):
pass
class DynamicRelatedFieldMixin(DynamicFieldMixin):
"""
As the related name must be unique for a relation between two objects, we
have to make a fake one for this dynamic field, based on its dynamic name.
"""
def _get_related_name(self):
if self.dynamic_version_of is not None:
self.related_name = '%s__%s__%s' % (
self.dynamic_version_of.related_name,
re_identifier.sub('_', self.dynamic_part),
id(self)
)
return super(DynamicRelatedFieldMixin, self)._get_related_name()
def get_name_for(self, dynamic_part):
"""
Return the name for the current dynamic field, accepting a limpyd
instance for the dynamic part
"""
dynamic_part = self.from_python(dynamic_part)
return super(DynamicRelatedFieldMixin, self).get_name_for(dynamic_part)
class DynamicFKStringField(DynamicRelatedFieldMixin, FKStringField):
related_collection_class = RelatedCollectionForDynamicString
class DynamicFKInstanceHashField(DynamicRelatedFieldMixin, FKInstanceHashField):
related_collection_class = RelatedCollectionForDynamicInstanceHash
class DynamicM2MSetField(DynamicRelatedFieldMixin, M2MSetField):
related_collection_class = RelatedCollectionForDynamicSet
class DynamicM2MListField(DynamicRelatedFieldMixin, M2MListField):
related_collection_class = RelatedCollectionForDynamicList
class DynamicM2MSortedSetField(DynamicRelatedFieldMixin, M2MSortedSetField):
related_collection_class = RelatedCollectionForDynamicSortedSet
|
[
"limpyd.contrib.related.re_identifier.sub"
] |
[((2114, 2155), 'limpyd.contrib.related.re_identifier.sub', 're_identifier.sub', (['"""_"""', 'self.dynamic_part'], {}), "('_', self.dynamic_part)\n", (2131, 2155), False, 'from limpyd.contrib.related import re_identifier\n')]
|
# The MIT License (MIT)
# Copyright (c) 2021 by Brockmann Consult GmbH and contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from datetime import datetime
from typing import List
from typing import Tuple, Optional
import pandas as pd
import xarray as xr
from .custom import load_custom_func
from .error import ConverterError
from .log import LOGGER
class DatasetPreProcessor:
def __init__(self, *,
input_variables: List[str] = None,
input_custom_preprocessor: str = None,
input_concat_dim: str = None,
input_datetime_format: str = None):
self._input_variables = input_variables
self._input_custom_preprocessor = load_custom_func(input_custom_preprocessor) \
if input_custom_preprocessor else None
self._input_concat_dim = input_concat_dim
self._input_datetime_format = input_datetime_format
self._first_dataset_shown = False
def preprocess_dataset(self, ds: xr.Dataset) -> xr.Dataset:
if self._input_variables:
drop_variables = set(ds.variables).difference(self._input_variables)
ds = ds.drop_vars(drop_variables)
if self._input_custom_preprocessor is not None:
ds = self._input_custom_preprocessor(ds)
if self._input_concat_dim:
ds = ensure_dataset_has_concat_dim(ds, self._input_concat_dim,
datetime_format=self._input_datetime_format)
if not self._first_dataset_shown:
LOGGER.debug(f'First input dataset:\n{ds}')
self._first_dataset_shown = True
return ds
def ensure_dataset_has_concat_dim(ds: xr.Dataset,
concat_dim_name: str,
datetime_format: str = None) -> xr.Dataset:
"""
Ensure dataset *ds* has dimension *concat_dim*.
:param ds: Dataset to adjust
:param concat_dim_name: Name of dimension to be appended
:param datetime_format: Name of dimension to be appended
:return: Adjusted dataset
"""
concat_dim_var = None
if concat_dim_name in ds:
concat_dim_var = ds[concat_dim_name]
if concat_dim_var is not None:
if not concat_dim_var.dims:
# If the concat_dim_var does not yet has a dimension, add it.
# This is typically the case if time value is a scalar rather
# than an 1-element array.
encoding = concat_dim_var.encoding
concat_dim_var = xr.DataArray([concat_dim_var.values],
dims=(concat_dim_name,),
attrs=concat_dim_var.attrs)
concat_dim_var.encoding.update(encoding)
ds = ds.assign_coords({concat_dim_name: concat_dim_var})
elif concat_dim_name == 'time':
time_coverage_start, time_coverage_end = \
get_time_coverage_from_ds(ds, datetime_format=datetime_format)
time_coverage_start = time_coverage_start or time_coverage_end
time_coverage_end = time_coverage_end or time_coverage_start
time_coverage_avg = time_coverage_start + 0.5 \
* (time_coverage_end - time_coverage_start)
ds = ds.assign_coords(
time=xr.DataArray([time_coverage_avg],
dims=('time',),
attrs=dict(bounds='time_bnds')),
time_bnds=xr.DataArray([[time_coverage_start, time_coverage_end]],
dims=('time', 'bnds'))
)
concat_dim_var = ds.time
else:
# Can't do anything
raise ConverterError(f'Missing (coordinate) variable '
f'"{concat_dim_name}" for dimension '
f'"{concat_dim_name}".')
is_concat_dim_used = any((concat_dim_name in ds[var_name].dims)
for var_name in ds.data_vars)
if not is_concat_dim_used:
concat_dim_bnds_name = concat_dim_var.attrs.get('bounds',
f'{concat_dim_name}_bnds')
concat_dim_bnds_var = ds[concat_dim_bnds_name] \
if concat_dim_bnds_name in ds else None
# ds.expand_dims() will raise if coordinates exist,
# so remove them temporarily
ds = ds.drop_vars(concat_dim_name)
# if concat_dim_name is still a dimension, drop it too
if concat_dim_name in ds.dims:
ds = ds.drop_dims(concat_dim_name)
# expand dataset by concat_dim_name/concat_dim_var,
# this will add the dimension and the coordinate
ds = ds.expand_dims({concat_dim_name: concat_dim_var})
# ds.expand_dims() does not use the attributes of new
# variable given by concat_dim_name, so we need to
# assign it ourselves:
ds[concat_dim_name].attrs.update(concat_dim_var.attrs)
ds[concat_dim_name].encoding.update(concat_dim_var.encoding)
# also (re)assign bounds coordinates
if concat_dim_bnds_var is not None:
# concat_dim_bnds may have been removed during drop_vars() - execution,
# so we may have to set it again
if concat_dim_bnds_name in ds:
ds = ds.set_coords(concat_dim_bnds_name)
else:
ds = ds.assign_coords({concat_dim_bnds_name: concat_dim_bnds_var})
return ds
def get_time_coverage_from_ds(ds: xr.Dataset,
datetime_format: str = None) -> Tuple[datetime, datetime]:
time_coverage_start = ds.attrs.get('time_coverage_start')
if time_coverage_start is not None:
time_coverage_start = parse_timestamp(time_coverage_start,
datetime_format=datetime_format)
time_coverage_end = ds.attrs.get('time_coverage_end')
if time_coverage_end is not None:
time_coverage_end = parse_timestamp(time_coverage_end,
datetime_format=datetime_format)
time_coverage_start = time_coverage_start or time_coverage_end
time_coverage_end = time_coverage_end or time_coverage_start
if time_coverage_start and time_coverage_end:
return time_coverage_start, time_coverage_end
# TODO: use special parameters to parse
# time_coverage_start, time_coverage_end from source_path
# source_path = ds.encoding.get('source', '')
raise ConverterError('Missing time_coverage_start and/or '
'time_coverage_end in dataset attributes.')
def parse_timestamp(string: str, datetime_format: str = None) \
-> Optional[datetime]:
try:
return pd.to_datetime(string, format=datetime_format)
except ValueError as e:
raise ConverterError(f'Cannot parse timestamp from "{string}".') from e
|
[
"pandas.to_datetime",
"xarray.DataArray"
] |
[((7746, 7792), 'pandas.to_datetime', 'pd.to_datetime', (['string'], {'format': 'datetime_format'}), '(string, format=datetime_format)\n', (7760, 7792), True, 'import pandas as pd\n'), ((3548, 3643), 'xarray.DataArray', 'xr.DataArray', (['[concat_dim_var.values]'], {'dims': '(concat_dim_name,)', 'attrs': 'concat_dim_var.attrs'}), '([concat_dim_var.values], dims=(concat_dim_name,), attrs=\n concat_dim_var.attrs)\n', (3560, 3643), True, 'import xarray as xr\n'), ((4489, 4568), 'xarray.DataArray', 'xr.DataArray', (['[[time_coverage_start, time_coverage_end]]'], {'dims': "('time', 'bnds')"}), "([[time_coverage_start, time_coverage_end]], dims=('time', 'bnds'))\n", (4501, 4568), True, 'import xarray as xr\n')]
|
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
## From the good people at Stackoverflow
## http://stackoverflow.com/questions/273192/python-best-way-to-create-directory-if-it-doesnt-exist-for-file-write
def ensure_dir(f):
d = os.path.dirname(f)
if not os.path.exists(d):
os.makedirs(d)
|
[
"os.path.dirname",
"os.path.exists",
"os.makedirs"
] |
[((792, 810), 'os.path.dirname', 'os.path.dirname', (['f'], {}), '(f)\n', (807, 810), False, 'import os\n'), ((822, 839), 'os.path.exists', 'os.path.exists', (['d'], {}), '(d)\n', (836, 839), False, 'import os\n'), ((849, 863), 'os.makedirs', 'os.makedirs', (['d'], {}), '(d)\n', (860, 863), False, 'import os\n')]
|
import json
import torch
import torch.nn as nn
import torch.nn.functional as F
PAD_token = 0 # Used for padding short sentences
SOS_token = 1 # Start-of-sentence token
EOS_token = 2 # End-of-sentence token
# TODO: `.to(device=device)` for all tensors
class EncoderRNN(nn.Module):
def __init__(self, hidden_size, n_layers=1, dropout=0):
super().__init__()
self.n_layers = n_layers
self.hidden_size = hidden_size
self.embedding = nn.Embedding(voc.num_words, hidden_size)
self.gru = nn.GRU(
hidden_size, hidden_size, n_layers,
dropout=(0 if n_layers == 1 else dropout), bidirectional=True)
def forward(self, input_seq, input_lengths, hidden=None):
embedded = self.embedding(input_seq)
packed = nn.utils.rnn.pack_padded_sequence(
embedded, input_lengths)
outputs, hidden = self.gru(packed, hidden)
outputs, _ = nn.utils.rnn.pad_packed_sequence(outputs)
t1 = outputs[:, :, :self.hidden_size]
t2 = outputs[:, :, self.hidden_size:]
outputs = t1 + t2
# only first part of the hidden layer is required for decoder
return outputs, hidden[:self.n_layers]
class Attn(nn.Module):
def __init__(self, hidden_size):
super().__init__()
self.hidden_size = hidden_size
def forward(self, hidden, encoder_output):
attn_energies = torch.sum(hidden * encoder_output, dim=2)
attn_energies = attn_energies.t()
return F.softmax(attn_energies, dim=1).unsqueeze(1)
class LuongAttnDecoderRNN(nn.Module):
def __init__(
self, hidden_size,
output_size, n_layers=1, dropout=0.1):
super().__init__()
self.hidden_size = hidden_size
self.output_size = output_size
self.n_layers = n_layers
self.dropout = dropout
# Define layers
self.embedding = nn.Embedding(voc.num_words, hidden_size)
self.embedding_dropout = nn.Dropout(dropout)
self.gru = nn.GRU(
hidden_size, hidden_size, n_layers,
dropout=(0 if n_layers == 1 else dropout))
self.concat = nn.Linear(hidden_size * 2, hidden_size)
self.out = nn.Linear(hidden_size, output_size)
self.attn = Attn(hidden_size)
def forward(self, input_seq, last_hidden, encoder_outputs):
embedded = self.embedding(input_seq)
embedded = self.embedding_dropout(embedded)
rnn_output, hidden = self.gru(embedded, last_hidden)
attn_weights = self.attn(rnn_output, encoder_outputs)
context = attn_weights.bmm(encoder_outputs.transpose(0, 1))
rnn_output = rnn_output.squeeze(0)
context = context.squeeze(1)
concat_input = torch.cat((rnn_output, context), 1)
concat_output = torch.tanh(self.concat(concat_input))
output = self.out(concat_output)
output = F.softmax(output, dim=1)
return output, hidden
class Voc:
def __init__(self, name):
self.name = name
self.trimmed = False
self.word2index = {}
self.word2count = {}
self.index2word = {
PAD_token: "PAD",
SOS_token: "SOS",
EOS_token: "EOS"}
self.num_words = len(self.index2word)
voc = Voc(name=None)
with open('voc.json') as f:
voc.__dict__ = json.load(f)
hidden_size = 500
encoder_n_layers = 2
decoder_n_layers = 2
dropout = 0.1
MAX_LENGTH = 10
encoder = EncoderRNN(hidden_size, encoder_n_layers, dropout)
encoder_params = torch.load('weights/encoder.pth', map_location='cpu')
encoder.load_state_dict(encoder_params)
encoder.eval()
seq = torch.ones((MAX_LENGTH, 1), dtype=torch.long)
seq_length = torch.tensor([seq.size()[0]])
traced_encoder = torch.jit.trace(encoder, (seq, seq_length))
decoder = LuongAttnDecoderRNN(
hidden_size, voc.num_words, decoder_n_layers, dropout)
decoder_params = torch.load('weights/decoder.pth', map_location='cpu')
decoder.load_state_dict(decoder_params)
decoder.eval()
test_encoder_outputs, test_encoder_hidden = traced_encoder(seq, seq_length)
test_decoder_hidden = test_encoder_hidden[:decoder.n_layers]
test_decoder_input = torch.LongTensor(1, 1).random_(0, voc.num_words)
traced_decoder = torch.jit.trace(
decoder, (test_decoder_input, test_decoder_hidden, test_encoder_outputs))
traced_encoder.save('encoder.pt')
traced_decoder.save('decoder.pt')
|
[
"torch.nn.Dropout",
"torch.ones",
"torch.nn.GRU",
"json.load",
"torch.jit.trace",
"torch.LongTensor",
"torch.load",
"torch.nn.Embedding",
"torch.cat",
"torch.nn.functional.softmax",
"torch.nn.Linear",
"torch.nn.utils.rnn.pad_packed_sequence",
"torch.nn.utils.rnn.pack_padded_sequence",
"torch.sum"
] |
[((3531, 3584), 'torch.load', 'torch.load', (['"""weights/encoder.pth"""'], {'map_location': '"""cpu"""'}), "('weights/encoder.pth', map_location='cpu')\n", (3541, 3584), False, 'import torch\n'), ((3646, 3691), 'torch.ones', 'torch.ones', (['(MAX_LENGTH, 1)'], {'dtype': 'torch.long'}), '((MAX_LENGTH, 1), dtype=torch.long)\n', (3656, 3691), False, 'import torch\n'), ((3752, 3795), 'torch.jit.trace', 'torch.jit.trace', (['encoder', '(seq, seq_length)'], {}), '(encoder, (seq, seq_length))\n', (3767, 3795), False, 'import torch\n'), ((3904, 3957), 'torch.load', 'torch.load', (['"""weights/decoder.pth"""'], {'map_location': '"""cpu"""'}), "('weights/decoder.pth', map_location='cpu')\n", (3914, 3957), False, 'import torch\n'), ((4237, 4330), 'torch.jit.trace', 'torch.jit.trace', (['decoder', '(test_decoder_input, test_decoder_hidden, test_encoder_outputs)'], {}), '(decoder, (test_decoder_input, test_decoder_hidden,\n test_encoder_outputs))\n', (4252, 4330), False, 'import torch\n'), ((3348, 3360), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3357, 3360), False, 'import json\n'), ((472, 512), 'torch.nn.Embedding', 'nn.Embedding', (['voc.num_words', 'hidden_size'], {}), '(voc.num_words, hidden_size)\n', (484, 512), True, 'import torch.nn as nn\n'), ((532, 639), 'torch.nn.GRU', 'nn.GRU', (['hidden_size', 'hidden_size', 'n_layers'], {'dropout': '(0 if n_layers == 1 else dropout)', 'bidirectional': '(True)'}), '(hidden_size, hidden_size, n_layers, dropout=0 if n_layers == 1 else\n dropout, bidirectional=True)\n', (538, 639), True, 'import torch.nn as nn\n'), ((788, 846), 'torch.nn.utils.rnn.pack_padded_sequence', 'nn.utils.rnn.pack_padded_sequence', (['embedded', 'input_lengths'], {}), '(embedded, input_lengths)\n', (821, 846), True, 'import torch.nn as nn\n'), ((932, 973), 'torch.nn.utils.rnn.pad_packed_sequence', 'nn.utils.rnn.pad_packed_sequence', (['outputs'], {}), '(outputs)\n', (964, 973), True, 'import torch.nn as nn\n'), ((1409, 1450), 'torch.sum', 'torch.sum', (['(hidden * encoder_output)'], {'dim': '(2)'}), '(hidden * encoder_output, dim=2)\n', (1418, 1450), False, 'import torch\n'), ((1912, 1952), 'torch.nn.Embedding', 'nn.Embedding', (['voc.num_words', 'hidden_size'], {}), '(voc.num_words, hidden_size)\n', (1924, 1952), True, 'import torch.nn as nn\n'), ((1986, 2005), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (1996, 2005), True, 'import torch.nn as nn\n'), ((2025, 2112), 'torch.nn.GRU', 'nn.GRU', (['hidden_size', 'hidden_size', 'n_layers'], {'dropout': '(0 if n_layers == 1 else dropout)'}), '(hidden_size, hidden_size, n_layers, dropout=0 if n_layers == 1 else\n dropout)\n', (2031, 2112), True, 'import torch.nn as nn\n'), ((2158, 2197), 'torch.nn.Linear', 'nn.Linear', (['(hidden_size * 2)', 'hidden_size'], {}), '(hidden_size * 2, hidden_size)\n', (2167, 2197), True, 'import torch.nn as nn\n'), ((2217, 2252), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'output_size'], {}), '(hidden_size, output_size)\n', (2226, 2252), True, 'import torch.nn as nn\n'), ((2747, 2782), 'torch.cat', 'torch.cat', (['(rnn_output, context)', '(1)'], {}), '((rnn_output, context), 1)\n', (2756, 2782), False, 'import torch\n'), ((2903, 2927), 'torch.nn.functional.softmax', 'F.softmax', (['output'], {'dim': '(1)'}), '(output, dim=1)\n', (2912, 2927), True, 'import torch.nn.functional as F\n'), ((4171, 4193), 'torch.LongTensor', 'torch.LongTensor', (['(1)', '(1)'], {}), '(1, 1)\n', (4187, 4193), False, 'import torch\n'), ((1508, 1539), 'torch.nn.functional.softmax', 'F.softmax', (['attn_energies'], {'dim': '(1)'}), '(attn_energies, dim=1)\n', (1517, 1539), True, 'import torch.nn.functional as F\n')]
|
#!/usr/bin/env python3
import argparse
import os
import sys
import itertools
# input file data with illegal characters removed,
# converted according to JESD71 Table 2
# 6 bits per index
inputSymbols = []
# decompressed output data
# 8 bits per index
outputBytes = []
# uncompressed data length in bytes
uncompressed_len = 0
# bit index tracker used by get_bit()
next_bitIdx = 0
# convert according to JESD71 Table 2:
# '0' - '9' : 0b000000 - 0b001001
# 'A' - 'Z' : 0b001010 - 0b100011
# 'a' - 'z' : 0b100100 - 0b111101
# '_' : 0b111110
# '@' : 0b111111
# error : -1
def convert_input_char_to_binary(inChar):
if ((ord(inChar) >= ord('0')) and
(ord(inChar) <= ord('9'))):
outVal = ord(inChar) - ord('0')
elif ((ord(inChar) >= ord('A')) and
(ord(inChar) <= ord('Z'))):
outVal = ord(inChar) - ord('A') + 10
elif ((ord(inChar) >= ord('a')) and
(ord(inChar) <= ord('z'))):
outVal = ord(inChar) - ord('a') + 36
elif (ord(inChar) == ord('_')):
outVal = 62
elif (ord(inChar) == ord('@')):
outVal = 63
else:
# illegal char, mark to skip
outVal = -1
return outVal
# get the least significant bit of the least significant symbol where bit 0 of
# symbol 0 has the lowest value.
# the index is updated for each bit returned.
#
# returns bitvalue [0, 1] or -1 if end of list is reached
def get_bit():
global next_bitIdx
symIdx = int(next_bitIdx / 6)
bitSubIdx = int(next_bitIdx % 6)
if (symIdx >= len(inputSymbols)):
retVal = -1
else:
if ((inputSymbols[symIdx] &
(1 << bitSubIdx)) > 0):
retVal = 1
else:
retVal = 0
next_bitIdx = (next_bitIdx + 1)
return retVal;
# parse inputfile and write uncompressed data to outputfile
#
# when outputDirReversed is False, the lowest output address is at the beginning of
# a line; otherwise the lowest address is at the end of a line
def parse_input(inputfile,
outputfile,
outputDirReversed):
searchForFirstAt = True
searchForFirstLiteral = True
with inputfile as f:
for c in itertools.chain.from_iterable(f):
o = convert_input_char_to_binary(c)
# ignore anything upto and including the first '@'
if (searchForFirstAt):
if (o == 63):
searchForFirstAt = False
continue
if ((o >= 0) and
(o <= 63)):
inputSymbols.append(o)
# extract 32-bit uncompressed length
uncompressed_len = 0;
for byteWriteIdx in range(0, 4, 1):
for bitWriteIdx in range(0, 8, 1):
bit = get_bit()
if (bit < 0):
print('ran out of bits while reading uncompressed length')
exit()
uncompressed_len |= (bit << ((8 * byteWriteIdx) + bitWriteIdx))
outputIdx = 0
while (outputIdx < uncompressed_len):
# first bit of an object determines it's type
bit = get_bit()
if (bit < 0):
print('ran out of bits before enough output data was collected')
exit()
elif (bit >= 1):
# repeat object
if (searchForFirstLiteral):
print('expecting first object to be a literal, found repeat instead')
exit()
# take outPutIdx as offset of the first repeated byte
#
# calculate N as the smallest number of bits (in the range 1..13 ) that can
# represent the offset
repeatWriteOffset = outputIdx
N = int.bit_length(repeatWriteOffset)
N = min(max(N, 1), 13)
# read out the offset, this value gets subtracted from the current output
# index in order to obtain from where the repeat pattern starts
offset = 0
for bitWriteIdx in range(0, N, 1):
bit = get_bit()
if (bit < 0):
print('unexpected end of bits while reading repeat object')
exit()
offset |= (bit << bitWriteIdx)
# read out the following 8 bits, this is the length (in the range 4..255)
# of the section to repeat.
#
# the standard is a bit broken in this regard - if input has a literal
# followed by a repeat block as it's first contents, repLen can not be
# greater than 3. Check that repLen is greater than 3 instead of 4.
repLen = 0
for bitWriteIdx in range(0, 8, 1):
bit = get_bit()
if (bit < 0):
print('unexpected end of bits while reading repeat object')
exit()
repLen |= (bit << bitWriteIdx)
# sanity check offset and length
if (offset > repeatWriteOffset):
print('repeat object has offset field that causes negative data index')
exit()
if (repLen < 3):
print('repeat object has length field less than 3')
exit()
# not clear from the standard if a repeat object can repeat the bytes it
# has just written or not.
#
# this implementation assumes that it's ok, and will allow it.
byteCopyIdx = (repeatWriteOffset - offset)
while ((repLen > 0) and
(outputIdx < uncompressed_len)):
outputBytes.append(outputBytes[byteCopyIdx])
outputIdx += 1
byteCopyIdx += 1
repLen -= 1
else:
# literal object
searchForFirstLiteral = False
for byteWriteIdx in range(0, 3, 1):
byteVal = 0
for bitWriteIdx in range(0, 8, 1):
bit = get_bit()
if (bit < 0):
print('unexpected end of bits while reading literal object')
exit()
byteVal |= (bit << bitWriteIdx)
outputBytes.append(byteVal)
outputIdx += 1
with outputfile as f:
oneLine = ""
colCnt = 0
for i in range(0, uncompressed_len, 1):
if (outputDirReversed):
oneLine = (hex(outputBytes[i]).upper()[2:].zfill(2) + ' ') + oneLine
else:
oneLine = oneLine + (hex(outputBytes[i]).upper()[2:].zfill(2) + ' ')
colCnt += 1
if (colCnt >= 32):
print(oneLine, file=f)
colCnt = 0
oneLine = ""
# flush out the last unfinished line
if (colCnt != 0):
if (outputDirReversed):
# first pad the left side of the line
oneLine = (" " * (32 - colCnt)) + oneLine;
print(oneLine, file=f)
parser = argparse.ArgumentParser(
description = 'decompress ACA-compressed data of one boolean array object')
parser.add_argument('--infile',
type=argparse.FileType('r', encoding='UTF-8'),
required=True,
help="input file")
parser.add_argument('--reverse_output_line_dir',
required=False,
action='store_true',
help="output data index 0 begins from the right margin")
parser.add_argument('--outfile',
type=argparse.FileType('w', encoding='UTF-8'),
required=True,
help="output file")
args = parser.parse_args()
if ((not args.infile) or
(not args.outfile)):
print(parser.format_help())
args.infile.close()
args.outfile.close()
exit()
searchForFirstAt = True
parse_input(args.infile,
args.outfile,
args.reverse_output_line_dir)
args.infile.close()
args.outfile.close()
exit()
|
[
"itertools.chain.from_iterable",
"argparse.ArgumentParser",
"argparse.FileType"
] |
[((5806, 5908), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""decompress ACA-compressed data of one boolean array object"""'}), "(description=\n 'decompress ACA-compressed data of one boolean array object')\n", (5829, 5908), False, 'import argparse\n'), ((1999, 2031), 'itertools.chain.from_iterable', 'itertools.chain.from_iterable', (['f'], {}), '(f)\n', (2028, 2031), False, 'import itertools\n'), ((5951, 5991), 'argparse.FileType', 'argparse.FileType', (['"""r"""'], {'encoding': '"""UTF-8"""'}), "('r', encoding='UTF-8')\n", (5968, 5991), False, 'import argparse\n'), ((6241, 6281), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {'encoding': '"""UTF-8"""'}), "('w', encoding='UTF-8')\n", (6258, 6281), False, 'import argparse\n')]
|
"""
Update URL definitions:
https://docs.djangoproject.com/en/2.0/releases/2.0/#simplified-url-routing-syntax
"""
from __future__ import annotations
import ast
import re
from functools import partial
from typing import Iterable, MutableMapping
from weakref import WeakKeyDictionary
from tokenize_rt import Offset, Token
from django_upgrade.ast import ast_start_offset, is_rewritable_import_from
from django_upgrade.compat import str_removeprefix
from django_upgrade.data import Fixer, State, TokenFunc
from django_upgrade.tokens import (
STRING,
extract_indent,
find,
insert,
replace,
update_import_names,
)
fixer = Fixer(
__name__,
min_version=(2, 0),
)
@fixer.register(ast.ImportFrom)
def visit_ImportFrom(
state: State,
node: ast.ImportFrom,
parent: ast.AST,
) -> Iterable[tuple[Offset, TokenFunc]]:
if (
node.module == "django.conf.urls"
and is_rewritable_import_from(node)
and any(alias.name in ("include", "url") for alias in node.names)
):
yield ast_start_offset(node), partial(
update_import,
node=node,
state=state,
)
# Track which of path and re_path have been used for this current file
# Then when backtracking into an import statement, we can use the set of names
# to determine what names to import.
state_used_names: MutableMapping[State, set[str]] = WeakKeyDictionary()
def update_import(
tokens: list[Token], i: int, *, node: ast.ImportFrom, state: State
) -> None:
""" """
removals = set()
additions = set()
used_names = state_used_names.pop(state, set())
for alias in node.names:
if alias.asname is not None:
continue
if alias.name == "include":
removals.add("include")
additions.add("include")
elif alias.name == "url" and used_names:
removals.add("url")
additions.update(used_names)
if removals:
j, indent = extract_indent(tokens, i)
update_import_names(
tokens,
i,
node=node,
name_map={name: "" for name in removals},
)
joined_names = ", ".join(sorted(additions))
insert(
tokens,
j,
new_src=f"{indent}from django.urls import {joined_names}\n",
)
@fixer.register(ast.Call)
def visit_Call(
state: State,
node: ast.Call,
parent: ast.AST,
) -> Iterable[tuple[Offset, TokenFunc]]:
if (
isinstance(node.func, ast.Name)
and node.func.id == "url"
and "url" in state.from_imports["django.conf.urls"]
# cannot convert where called with all kwargs as names don't align
and len(node.args) >= 1
):
regex_path: str | None = None
if isinstance(node.args[0], ast.Constant) and isinstance(
node.args[0].value, str
):
regex_path = node.args[0].value
yield ast_start_offset(node), partial(
fix_url_call,
regex_path=regex_path,
state=state,
)
def fix_url_call(
tokens: list[Token], i: int, *, regex_path: str | None, state: State
) -> None:
new_name = "re_path"
if regex_path is not None:
path = convert_path_syntax(regex_path)
if path is not None:
string_idx = find(tokens, i, name=STRING)
replace(tokens, string_idx, src=repr(path))
new_name = "path"
state_used_names.setdefault(state, set()).add(new_name)
replace(tokens, i, src=new_name)
REGEX_TO_CONVERTER = {
"[0-9]+": "int",
r"\d+": "int",
".+": "path",
"[-a-zA-Z0-9_]+": "slug",
"[^/]+": "str",
"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}": "uuid",
}
def convert_path_syntax(regex_path: str) -> str | None:
if not regex_path.endswith("$"):
return None
remaining = str_removeprefix(regex_path[:-1], "^")
path = ""
while "(?P<" in remaining:
prefix, rest = remaining.split("(?P<", 1)
group, remaining = rest.split(")", 1)
group_name, group_regex = group.split(">", 1)
try:
converter = REGEX_TO_CONVERTER[group_regex]
except KeyError:
return None
path += prefix
path += f"<{converter}:{group_name}>"
path += remaining
dashless_path = path.replace("-", "")
if re.escape(dashless_path) != dashless_path:
# path still contains regex special characters
# dashes are ignored as they only have meaning in regexes within []
return None
return path
|
[
"django_upgrade.data.Fixer",
"functools.partial",
"django_upgrade.tokens.find",
"django_upgrade.ast.is_rewritable_import_from",
"django_upgrade.ast.ast_start_offset",
"django_upgrade.tokens.insert",
"django_upgrade.tokens.update_import_names",
"re.escape",
"django_upgrade.tokens.replace",
"weakref.WeakKeyDictionary",
"django_upgrade.compat.str_removeprefix",
"django_upgrade.tokens.extract_indent"
] |
[((644, 679), 'django_upgrade.data.Fixer', 'Fixer', (['__name__'], {'min_version': '(2, 0)'}), '(__name__, min_version=(2, 0))\n', (649, 679), False, 'from django_upgrade.data import Fixer, State, TokenFunc\n'), ((1402, 1421), 'weakref.WeakKeyDictionary', 'WeakKeyDictionary', ([], {}), '()\n', (1419, 1421), False, 'from weakref import WeakKeyDictionary\n'), ((3532, 3564), 'django_upgrade.tokens.replace', 'replace', (['tokens', 'i'], {'src': 'new_name'}), '(tokens, i, src=new_name)\n', (3539, 3564), False, 'from django_upgrade.tokens import STRING, extract_indent, find, insert, replace, update_import_names\n'), ((3907, 3945), 'django_upgrade.compat.str_removeprefix', 'str_removeprefix', (['regex_path[:-1]', '"""^"""'], {}), "(regex_path[:-1], '^')\n", (3923, 3945), False, 'from django_upgrade.compat import str_removeprefix\n'), ((916, 947), 'django_upgrade.ast.is_rewritable_import_from', 'is_rewritable_import_from', (['node'], {}), '(node)\n', (941, 947), False, 'from django_upgrade.ast import ast_start_offset, is_rewritable_import_from\n'), ((1989, 2014), 'django_upgrade.tokens.extract_indent', 'extract_indent', (['tokens', 'i'], {}), '(tokens, i)\n', (2003, 2014), False, 'from django_upgrade.tokens import STRING, extract_indent, find, insert, replace, update_import_names\n'), ((2023, 2110), 'django_upgrade.tokens.update_import_names', 'update_import_names', (['tokens', 'i'], {'node': 'node', 'name_map': "{name: '' for name in removals}"}), "(tokens, i, node=node, name_map={name: '' for name in\n removals})\n", (2042, 2110), False, 'from django_upgrade.tokens import STRING, extract_indent, find, insert, replace, update_import_names\n'), ((2226, 2304), 'django_upgrade.tokens.insert', 'insert', (['tokens', 'j'], {'new_src': 'f"""{indent}from django.urls import {joined_names}\n"""'}), "(tokens, j, new_src=f'{indent}from django.urls import {joined_names}\\n')\n", (2232, 2304), False, 'from django_upgrade.tokens import STRING, extract_indent, find, insert, replace, update_import_names\n'), ((4402, 4426), 're.escape', 're.escape', (['dashless_path'], {}), '(dashless_path)\n', (4411, 4426), False, 'import re\n'), ((3353, 3381), 'django_upgrade.tokens.find', 'find', (['tokens', 'i'], {'name': 'STRING'}), '(tokens, i, name=STRING)\n', (3357, 3381), False, 'from django_upgrade.tokens import STRING, extract_indent, find, insert, replace, update_import_names\n'), ((1043, 1065), 'django_upgrade.ast.ast_start_offset', 'ast_start_offset', (['node'], {}), '(node)\n', (1059, 1065), False, 'from django_upgrade.ast import ast_start_offset, is_rewritable_import_from\n'), ((1067, 1113), 'functools.partial', 'partial', (['update_import'], {'node': 'node', 'state': 'state'}), '(update_import, node=node, state=state)\n', (1074, 1113), False, 'from functools import partial\n'), ((2963, 2985), 'django_upgrade.ast.ast_start_offset', 'ast_start_offset', (['node'], {}), '(node)\n', (2979, 2985), False, 'from django_upgrade.ast import ast_start_offset, is_rewritable_import_from\n'), ((2987, 3044), 'functools.partial', 'partial', (['fix_url_call'], {'regex_path': 'regex_path', 'state': 'state'}), '(fix_url_call, regex_path=regex_path, state=state)\n', (2994, 3044), False, 'from functools import partial\n')]
|
# -*- coding: utf-8 -*-
import ast
import re
from setuptools import find_packages, setup
# get version from __version__ variable in repairs/__init__.py
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('repairs/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(f.read().decode('utf-8')).group(1)))
with open('requirements.txt') as f:
install_requires = f.read().strip().split('\n')
setup(
name='repairs',
version=version,
description='Repair management app',
author='DigiThinkIT',
author_email='<EMAIL>',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=install_requires
)
|
[
"setuptools.find_packages",
"re.compile"
] |
[((168, 206), 're.compile', 're.compile', (['"""__version__\\\\s+=\\\\s+(.*)"""'], {}), "('__version__\\\\s+=\\\\s+(.*)')\n", (178, 206), False, 'import re\n'), ((565, 580), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (578, 580), False, 'from setuptools import find_packages, setup\n')]
|
"""object domain"""
# -*- coding:utf-8 -*-
import json
import threading
from commonbaby.helpers import helper_time
from datacontract.iscoutdataset.iscouttask import EObjectType, IscoutTask
from .mailserver import MailServer
from .portinfo import PortInfo
from .scoutfeedbackbase import ScoutFeedBackBase
from .searchengine import SearchEngine
from .sidesite import SideSite
from .url import URL
from .whois import Whois
class Domain(ScoutFeedBackBase):
"""scout object Domain"""
def __init__(self, task: IscoutTask, level: int, domain: str):
ScoutFeedBackBase.__init__(self, task, level, EObjectType.Domain,
domain, 'iscout_domain')
# if not helper_domain.is_valid_domain(domain):
# raise Exception("Invalid domain value")
# current fields
self.logtime: str = helper_time.get_time_sec_tz()
# subitems
self._subdomains: dict = {}
self._subdomains_locker = threading.RLock()
self._iplogs: dict = {}
self._iplogs_locker = threading.RLock()
self._whois: dict = {}
self._whois_locker = threading.RLock()
self._emails: dict = {}
self._emails_locker = threading.RLock()
self._phones: dict = {}
self._phones_locker = threading.RLock()
self._searchengine: dict = {}
self._searchengine_locker = threading.RLock()
self._url: dict = {}
self._url_locker = threading.RLock()
self._portinfos: dict = {}
self._portinfo_locker = threading.RLock()
self._realip: dict = {}
self._realip_locker = threading.RLock()
self._side_sites: dict = {}
self._side_sites_locker = threading.RLock()
self._wafs: dict = {}
self._wafs_locker = threading.RLock()
self._mailservers: dict = {}
self._mailservers_locker = threading.RLock()
def _subitem_count(self) -> int:
res = 0
with self._subdomains_locker:
res += len(self._subdomains)
with self._iplogs_locker:
res += len(self._iplogs)
with self._whois_locker:
res += len(self._whois)
with self._emails_locker:
res += len(self._emails)
with self._phones_locker:
res += len(self._phones)
with self._searchengine_locker:
res += len(self._searchengine)
with self._url_locker:
res += len(self._url)
with self._portinfo_locker:
res += len(self._portinfos)
with self._realip_locker:
res += len(self._realip)
with self._side_sites_locker:
res += len(self._side_sites)
with self._wafs_locker:
res += len(self._wafs)
with self._mailservers_locker:
res += len(self._mailservers)
return res
# set items #######################################################
def set_subdomain(self, subdomain):
"""向当前域名根节点添加 子域名\n
domain: Domain对象"""
if not isinstance(subdomain, Domain):
return
# 避免多线程同时添加时造成重复,需加锁
with self._subdomains_locker:
self._subdomains[subdomain.value] = subdomain
self._set_parentobj(subdomain)
def set_iplog(self, ip):
"""向当前域名根节点添加 历史解析IP记录\n
ip: IP对象"""
with self._iplogs_locker:
self._iplogs[ip.value] = ip
self._set_parentobj(ip)
def set_whois(self, whois: Whois):
"""赋值当前域名根节点的 whois信息,覆盖更新原值\n
whois: Whois对象"""
if not isinstance(whois, Whois):
return
key = json.dumps(whois.get_whois_outputdict()) # 去重条件(直接字典的全部内容,后续视情况再改!)
with self._whois_locker:
if self._whois.__contains__(key):
return
self._whois[key] = whois
def set_email(self, email):
"""向当前域名根节点添加 邮箱地址\n
email: Email对象"""
with self._emails_locker:
self._emails[email.value] = email
self._set_parentobj(email)
def set_phone(self, phone):
"""向当前域名根节点添加 电话号码\n
phone: Phone 对象"""
with self._phones_locker:
self._phones[phone.value] = phone
self._set_parentobj(phone)
def set_searchengine(self, searchengine: SearchEngine):
"""向当前域名根节点添加 搜索引擎结果\n
searchengine: SearchEngine 对象"""
if not isinstance(searchengine, SearchEngine):
return
with self._searchengine_locker:
self._searchengine[searchengine._keyword +
searchengine._url] = searchengine
def set_url(self, url: URL):
"""向当前域名根节点添加 URL结果\n
url: URL 对象"""
if not isinstance(url, URL):
return
with self._url_locker:
self._url[url.value] = url
def set_portinfo(self, portinfo: PortInfo):
"""向当前域名根节点添加 PortInfo端口服务信息\n
portinfo: PortInfo 对象"""
if not isinstance(portinfo, PortInfo):
return
with self._portinfo_locker:
self._portinfos[portinfo._port] = portinfo
def set_realip(self, realip: str):
"""向当前域名根节点添加 Realip真实ip信息
realip: Realip 对象"""
if not isinstance(realip, str):
return
with self._realip_locker:
if self._realip.__contains__(realip):
return
self._realip[realip] = None
def set_side_site(self, ssite: SideSite):
"""
向当前域名对象中设置site
:param ssite:
:return:
"""
if not isinstance(ssite, SideSite):
return
with self._side_sites_locker:
self._side_sites[ssite.host + ssite.ip + str(ssite.port)] = ssite # 1204 add port --tms
def set_waf(self, waf: str):
"""向当前域名对象中设置waf字段"""
if not isinstance(waf, str):
return
with self._wafs_locker:
if self._wafs.__contains__(waf):
return
self._wafs[waf] = None
def set_mailserver(self, mailserver: MailServer):
"""向当前Email根节点添加 邮服地址\n
emailserver: EmailServer对象"""
if not isinstance(mailserver, MailServer):
raise Exception(
"Invalid MailServer for Email: {}".format(mailserver))
with self._mailservers_locker:
self._mailservers[mailserver._host] = mailserver
# self._set_parentobj(mailserver)
# output #######################################################
def _get_outputdict_sub(self, rootdict: dict):
if not isinstance(rootdict, dict):
raise Exception("Invalid rootdict")
# 添加子域名节点
self._outputdict_add_subdomain(rootdict)
# 添加域名历史解析ip记录
self._outputdict_add_iplog(rootdict)
# 添加whois记录
self._outputdict_add_whois(rootdict)
# 添加邮箱节点
self._outputdict_add_email(rootdict)
# 添加电话节点
self._outputdict_add_phone(rootdict)
# searchengine
self._outputdict_add_searchengine(rootdict)
# url
self._outputdict_add_url(rootdict)
# portinfo
self._outputdict_add_portinfo(rootdict)
# 添加real_ip节点
self._outputdict_add_realip(rootdict)
# 添加side site 节点
self._outputdict_add_sidesite(rootdict)
# 添加waf节点
self._outputdict_add_waf(rootdict)
# mailserver
self._outputdict_add_email_server(rootdict)
def _outputdict_add_subdomain(self, rootdict: dict):
if len(self._subdomains) < 1:
return
if not rootdict.__contains__("subdomain"):
rootdict["subdomain"] = []
for subdomain in self._subdomains.keys():
# sd = {}
# sd['name'] = subdomain
rootdict['subdomain'].append({'name': subdomain})
def _outputdict_add_iplog(self, rootdict: dict):
if len(self._iplogs) < 1:
return
if not rootdict.__contains__("iplog"):
rootdict["iplog"] = []
for ip in self._iplogs.values():
rootdict["iplog"].append({"ip": ip.value, "logtime": ip.logtime})
def _outputdict_add_whois(self, rootdict: dict):
if not isinstance(self._whois, dict) or len(self._whois) < 1:
return
if not rootdict.__contains__("whois"):
rootdict["whois"] = []
with self._whois_locker:
for whois in self._whois.values():
wdict: dict = whois.get_whois_outputdict()
if not isinstance(wdict, dict):
continue
rootdict["whois"].append(wdict)
def _outputdict_add_email(self, rootdict: dict):
if len(self._emails) < 1:
return
if not rootdict.__contains__("emails"):
rootdict["emails"] = []
for email in self._emails.values():
rootdict["emails"].append({
"email": email.value,
"source": email.source,
"reason": email.reason,
})
def _outputdict_add_phone(self, rootdict: dict):
if len(self._phones) < 1:
return
if not rootdict.__contains__("phones"):
rootdict["phones"] = []
for phone in self._phones.values():
rootdict["phones"].append({
"phone": phone.value,
"source": phone.source,
"reason": phone.reason,
})
def _outputdict_add_searchengine(self, rootdict: dict):
"""目前就域名、邮箱、电话有搜索引擎数据,后面多了就提出来封装下"""
if len(self._searchengine) < 1:
return
if not rootdict.__contains__("searchengine"):
rootdict["searchengine"] = []
sglist = rootdict["searchengine"]
for sg in self._searchengine.values():
sg: SearchEngine = sg
sglist.append(sg.get_output_dict())
def _outputdict_add_url(self, rootdict: dict):
""""""
if len(self._url) < 1:
return
if not rootdict.__contains__("urls"):
rootdict['urls'] = []
with self._url_locker:
for u in self._url.values():
u: URL = u
rootdict['urls'].append({'url': u.value})
def _outputdict_add_portinfo(self, rootdict: dict):
"""portinfo"""
if len(self._portinfos) < 1:
return
if not rootdict.__contains__("portinfo"):
rootdict["portinfo"] = []
for portinfo in self._portinfos.values():
if not isinstance(portinfo, PortInfo):
continue
# portinfo: PortInfo = portinfo
portdictone: dict = portinfo.get_outputdict()
if not isinstance(portdictone, dict) or len(portdictone) < 1:
continue
rootdict["portinfo"].append(portdictone)
def _outputdict_add_realip(self, rootdict: dict):
"""real_ip"""
if len(self._realip) < 1:
return
if not rootdict.__contains__("realip"):
rootdict["realip"] = []
with self._realip_locker:
for rp in self._realip.keys():
rootdict["realip"].append({"ip": rp})
def _outputdict_add_sidesite(self, rootdict: dict):
if len(self._side_sites) < 1:
return
if not rootdict.__contains__("sidesites"):
rootdict["sidesites"] = []
for ssdata in self._side_sites.values():
rootdict['sidesites'].append(ssdata.get_sidesite_output_dict())
def _outputdict_add_waf(self, rootdict: dict):
if len(self._wafs) < 1:
return
if not rootdict.__contains__("waf"):
rootdict["waf"] = {}
# 暂时只取第一个,后面测试下是否有网站能探测到2个或以上的再改标准
for waf in self._wafs.keys():
rootdict["waf"]["name"] = waf
break
def _outputdict_add_email_server(self, rootdict: dict):
if len(self._mailservers) < 1:
return
if not rootdict.__contains__("mailserver"):
rootdict["mailserver"] = []
for ms in self._mailservers.values():
ms: MailServer = ms
msdict: dict = {}
msdict["type"] = ms._servertype
msdict["host"] = ms._host
if len(ms._ips) > 0:
msdict["ip"] = [{'addr': ip} for ip in ms._ips.keys()]
rootdict["mailserver"].append(msdict)
|
[
"threading.RLock",
"commonbaby.helpers.helper_time.get_time_sec_tz"
] |
[((856, 885), 'commonbaby.helpers.helper_time.get_time_sec_tz', 'helper_time.get_time_sec_tz', ([], {}), '()\n', (883, 885), False, 'from commonbaby.helpers import helper_time\n'), ((976, 993), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (991, 993), False, 'import threading\n'), ((1057, 1074), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1072, 1074), False, 'import threading\n'), ((1136, 1153), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1151, 1153), False, 'import threading\n'), ((1217, 1234), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1232, 1234), False, 'import threading\n'), ((1298, 1315), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1313, 1315), False, 'import threading\n'), ((1391, 1408), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1406, 1408), False, 'import threading\n'), ((1466, 1483), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1481, 1483), False, 'import threading\n'), ((1552, 1569), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1567, 1569), False, 'import threading\n'), ((1633, 1650), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1648, 1650), False, 'import threading\n'), ((1722, 1739), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1737, 1739), False, 'import threading\n'), ((1799, 1816), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1814, 1816), False, 'import threading\n'), ((1890, 1907), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1905, 1907), False, 'import threading\n')]
|
import os
import sys
import threading
from Legobot.Lego import Lego
from unittest.mock import patch
LOCAL_PATH = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
'..',
'Local'
)
sys.path.append(LOCAL_PATH)
from shakespeare import Shakespeare # noqa: E402
LOCK = threading.Lock()
BASEPLATE = Lego.start(None, LOCK)
PROPERTIES = [
{
'name': 'insult_array',
'file': {
'path': 'data/lists/quotes.txt',
'raw': True,
'split_lines': True,
'default': []
}
}
]
LEGO = Shakespeare(BASEPLATE, LOCK, properties=PROPERTIES)
def test_listening_for():
assert LEGO.listening_for({'text': 'shake'}) is False
assert LEGO.listening_for({'text': '!shake moin'}) is True
assert LEGO.listening_for({'text': '!shake'}) is True
assert LEGO.listening_for({'text': '!Shake'}) is False
def test_get_quote():
default = 'Not so much brain as ear wax.'
assert LEGO._get_quote('') == default
assert LEGO._get_quote('bad search') == default
assert LEGO._get_quote('scurvy') == '11. I scorn you, scurvy companion.'
@patch('Legobot.Lego.Lego.reply')
@patch('shakespeare.Shakespeare._get_quote')
def test_handle_check(mock_get_quote, mock_reply):
msg = {'text': '!shake scurvy', 'metadata': {'source_user': 'bob'}}
LEGO.handle(msg)
mock_get_quote.assert_called_once()
mock_reply.assert_called_once()
BASEPLATE.stop()
|
[
"sys.path.append",
"Legobot.Lego.Lego.start",
"os.path.dirname",
"shakespeare.Shakespeare",
"threading.Lock",
"unittest.mock.patch"
] |
[((202, 229), 'sys.path.append', 'sys.path.append', (['LOCAL_PATH'], {}), '(LOCAL_PATH)\n', (217, 229), False, 'import sys\n'), ((291, 307), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (305, 307), False, 'import threading\n'), ((320, 342), 'Legobot.Lego.Lego.start', 'Lego.start', (['None', 'LOCK'], {}), '(None, LOCK)\n', (330, 342), False, 'from Legobot.Lego import Lego\n'), ((568, 619), 'shakespeare.Shakespeare', 'Shakespeare', (['BASEPLATE', 'LOCK'], {'properties': 'PROPERTIES'}), '(BASEPLATE, LOCK, properties=PROPERTIES)\n', (579, 619), False, 'from shakespeare import Shakespeare\n'), ((1130, 1162), 'unittest.mock.patch', 'patch', (['"""Legobot.Lego.Lego.reply"""'], {}), "('Legobot.Lego.Lego.reply')\n", (1135, 1162), False, 'from unittest.mock import patch\n'), ((1164, 1207), 'unittest.mock.patch', 'patch', (['"""shakespeare.Shakespeare._get_quote"""'], {}), "('shakespeare.Shakespeare._get_quote')\n", (1169, 1207), False, 'from unittest.mock import patch\n'), ((150, 175), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (165, 175), False, 'import os\n')]
|
from setuptools import setup, find_packages
from pathlib import Path
SRC_ROOT = Path(__file__).parent / "src"
ABOUT_MODULE = SRC_ROOT / "sopredictable/about.py"
install_requires = [
"typing-extensions;python_version < '3.8'",
]
extras_require = {
"serve": ["fastapi"],
"dev": ["pytest"]
}
with ABOUT_MODULE.open("r") as f:
mod_globals = {}
exec(f.read(), mod_globals)
version = mod_globals["__version__"]
del mod_globals
setup(
name="sopredictable",
version=version,
author="<NAME> (lemontheme)",
author_email="<EMAIL>",
url="https://github.com/lemontheme/sopredictable",
package_dir={"": "src"},
packages=find_packages(where="src"),
install_requires=install_requires,
extras_require=extras_require,
python_requires=">=3.7",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
[
"pathlib.Path",
"setuptools.find_packages"
] |
[((82, 96), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (86, 96), False, 'from pathlib import Path\n'), ((662, 688), 'setuptools.find_packages', 'find_packages', ([], {'where': '"""src"""'}), "(where='src')\n", (675, 688), False, 'from setuptools import setup, find_packages\n')]
|
"""
Stand alone GUI free index builder for Leo's full text search system::
python leoftsindex.py <file1> <file2> <file3>...
If the file name starts with @ it's a assumed to be a simple
text file listing files to be indexed.
If <file> does not contain '#' it's assumed to be a .leo file
to index, and is indexed.
If <file> does contain '#' it's assumed to be a .leo file
containing a list of .leo files to index, with the list in
the node indicated by the UNL after the #, e.g.::
path/to/myfile.leo#Lists-->List of outlines
In the latter case, if the node identified by the UNL has children,
the list of files to scan is built from the first line of the body
of each child node of the identified node (works well with bookmarks.py).
If the node identified by the UNL does not have children, the
node's body is assumed to be a simple text listing of paths to .leo files).
.. note::
It may be necessary to quote the "file" on the command line,
as the '#' may be interpreted as a comment delimiter::
python leoftsindex.py "workbook.leo#Links"
"""
import sys
# add folder containing 'leo' folder to path
# sys.path.append("/home/tbrown/Package/leo/bzr/leo.repo/trunk")
import leo.core.leoBridge as leoBridge
import leo.plugins.leofts as leofts
controller = leoBridge.controller(
gui='nullGui',
loadPlugins=False, # True: attempt to load plugins.
readSettings=False, # True: read standard settings files.
silent=False, # True: don't print signon messages.
verbose=False
)
g = controller.globals()
# list of "files" to process
files = sys.argv[1:]
# set up leofts
leofts.set_leo(g)
g._gnxcache = leofts.GnxCache()
fts = leofts.get_fts()
fn2c = {} # cache to avoid loading same outline twice
done = set() # outlines scanned, to avoid repetition repetition
todo = list(files)
while todo:
item = todo.pop(0)
print ("INDEX: %s"%item)
if '#' in item:
fn, node = item.split('#', 1)
else:
fn, node = item, None
if node:
c = fn2c.setdefault(fn, controller.openLeoFile(fn))
found, dummy, p = g.recursiveUNLSearch(node.split('-->'), c)
if not found:
print("Could not find '%s'"%item)
break
if not p:
p = c.p
if p.hasChildren():
# use file named in first node of each child
files = [chl.b.strip().split('\n', 1)[0].strip() for chl in p.children()]
else:
# use all files listed in body
files = [i.strip() for i in p.b.strip().split('\n')]
elif fn.startswith('@'):
todo.extend(open(fn[1:]).read().strip().split('\n'))
files = []
else:
files = [fn]
for fn in files:
# file names may still have '#' if taken from a node list
real_name = fn.split('#', 1)[0]
if real_name in done:
continue
done.add(real_name)
if len(files) != 1:
print (" FILE: %s"%real_name)
c = fn2c.setdefault(real_name, controller.openLeoFile(fn))
fts.drop_document(real_name)
fts.index_nodes(c)
|
[
"leo.plugins.leofts.get_fts",
"leo.plugins.leofts.GnxCache",
"leo.core.leoBridge.controller",
"leo.plugins.leofts.set_leo"
] |
[((1298, 1405), 'leo.core.leoBridge.controller', 'leoBridge.controller', ([], {'gui': '"""nullGui"""', 'loadPlugins': '(False)', 'readSettings': '(False)', 'silent': '(False)', 'verbose': '(False)'}), "(gui='nullGui', loadPlugins=False, readSettings=False,\n silent=False, verbose=False)\n", (1318, 1405), True, 'import leo.core.leoBridge as leoBridge\n'), ((1632, 1649), 'leo.plugins.leofts.set_leo', 'leofts.set_leo', (['g'], {}), '(g)\n', (1646, 1649), True, 'import leo.plugins.leofts as leofts\n'), ((1664, 1681), 'leo.plugins.leofts.GnxCache', 'leofts.GnxCache', ([], {}), '()\n', (1679, 1681), True, 'import leo.plugins.leofts as leofts\n'), ((1688, 1704), 'leo.plugins.leofts.get_fts', 'leofts.get_fts', ([], {}), '()\n', (1702, 1704), True, 'import leo.plugins.leofts as leofts\n')]
|
#!/usr/bin/env python
import json
import multiprocessing as mp
import os
import shutil
from multiprocessing.pool import ThreadPool
import gffutils
from Bio.Alphabet import IUPAC
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from pyfaidx import Fasta
def create_padded_cds(template_species_list, fasta_path, template_alignment_path, db_path, json_path, n_count):
# create handles for all .db files in intermediate directory
gff = {name.split('.gff.db')[0]: name for name in os.listdir(db_path) if ".gff.db" in name}
gff = {key: gffutils.FeatureDB(db_path + value) for key, value in gff.items()}
# create handles for all .fasta files in fasta directory
fasta = {name.split('.')[0]: name for name in os.listdir(fasta_path) if
((".fasta" in name) and (".fai" not in name))}
fasta = {key: Fasta(fasta_path + value) for key, value in fasta.items()}
# import ortholog groups
with open(json_path + "groups.json", 'r') as f:
parent_groups = json.load(f)
# concatenate cds's for each species,ortho and output a fasta for each ortho
nnn = Seq('n' * n_count, IUPAC.ambiguous_dna)
shutil.rmtree(template_alignment_path, ignore_errors=True)
os.makedirs(template_alignment_path, exist_ok=True)
for ortho in parent_groups:
filename = template_alignment_path + ortho + ".template.fasta"
with open(filename, "w") as f:
for sp in template_species_list:
parent = gff[sp][parent_groups[ortho][sp]]
strand = parent.strand
cds_list = gff[sp].children(parent, featuretype="CDS", order_by="start")
cat_seq = Seq("", IUPAC.ambiguous_dna)
for i, cds in enumerate(cds_list):
if i > 0:
cat_seq += nnn
cat_seq += Seq(str(cds.sequence(fasta=fasta[sp], use_strand=False)),
IUPAC.ambiguous_dna)
if strand == '-':
cat_seq = cat_seq.reverse_complement()
seqReq = SeqRecord(cat_seq, id=sp, description=parent.id)
f.write(seqReq.format("fasta"))
def mafft_driver_file(file):
p = subprocess.Popen(["./mafft_driver.sh", file, file + ".aln"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out, err
def mafft_driver_path(path):
# remove old alignments
rm_files = [path + file for file in os.listdir(path) if ".aln" in file]
for file in rm_files:
os.remove(file)
# call maft on each fasta
files = [path + file for file in os.listdir(path) if ".fasta" in file]
pool = ThreadPool(mp.cpu_count())
pool.map(mafft_driver_file, files)
pool.close()
pool.join()
if __name__ == "__main__":
import subprocess
import argparse
import pytoml
parser = argparse.ArgumentParser()
parser.add_argument('--configPath', help='configPath', default='../config.toml')
args = parser.parse_args()
# load config file
with open(args.configPath) as toml_data:
config = pytoml.load(toml_data)
create_padded_cds(config['template_species_list'], config['fasta_path'], config['template_alignment_path'],
config['db_path'], config['json_path'], config['n_count'])
mafft_driver_path(config['template_alignment_path'])
|
[
"subprocess.Popen",
"Bio.Seq.Seq",
"json.load",
"os.makedirs",
"argparse.ArgumentParser",
"os.remove",
"Bio.SeqRecord.SeqRecord",
"pyfaidx.Fasta",
"gffutils.FeatureDB",
"pytoml.load",
"shutil.rmtree",
"os.listdir",
"multiprocessing.cpu_count"
] |
[((1110, 1149), 'Bio.Seq.Seq', 'Seq', (["('n' * n_count)", 'IUPAC.ambiguous_dna'], {}), "('n' * n_count, IUPAC.ambiguous_dna)\n", (1113, 1149), False, 'from Bio.Seq import Seq\n'), ((1154, 1212), 'shutil.rmtree', 'shutil.rmtree', (['template_alignment_path'], {'ignore_errors': '(True)'}), '(template_alignment_path, ignore_errors=True)\n', (1167, 1212), False, 'import shutil\n'), ((1217, 1268), 'os.makedirs', 'os.makedirs', (['template_alignment_path'], {'exist_ok': '(True)'}), '(template_alignment_path, exist_ok=True)\n', (1228, 1268), False, 'import os\n'), ((2217, 2330), 'subprocess.Popen', 'subprocess.Popen', (["['./mafft_driver.sh', file, file + '.aln']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['./mafft_driver.sh', file, file + '.aln'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE)\n", (2233, 2330), False, 'import subprocess\n'), ((2905, 2930), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2928, 2930), False, 'import argparse\n'), ((557, 592), 'gffutils.FeatureDB', 'gffutils.FeatureDB', (['(db_path + value)'], {}), '(db_path + value)\n', (575, 592), False, 'import gffutils\n'), ((840, 865), 'pyfaidx.Fasta', 'Fasta', (['(fasta_path + value)'], {}), '(fasta_path + value)\n', (845, 865), False, 'from pyfaidx import Fasta\n'), ((1005, 1017), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1014, 1017), False, 'import json\n'), ((2571, 2586), 'os.remove', 'os.remove', (['file'], {}), '(file)\n', (2580, 2586), False, 'import os\n'), ((2715, 2729), 'multiprocessing.cpu_count', 'mp.cpu_count', ([], {}), '()\n', (2727, 2729), True, 'import multiprocessing as mp\n'), ((3133, 3155), 'pytoml.load', 'pytoml.load', (['toml_data'], {}), '(toml_data)\n', (3144, 3155), False, 'import pytoml\n'), ((499, 518), 'os.listdir', 'os.listdir', (['db_path'], {}), '(db_path)\n', (509, 518), False, 'import os\n'), ((736, 758), 'os.listdir', 'os.listdir', (['fasta_path'], {}), '(fasta_path)\n', (746, 758), False, 'import os\n'), ((2501, 2517), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (2511, 2517), False, 'import os\n'), ((2655, 2671), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (2665, 2671), False, 'import os\n'), ((1669, 1697), 'Bio.Seq.Seq', 'Seq', (['""""""', 'IUPAC.ambiguous_dna'], {}), "('', IUPAC.ambiguous_dna)\n", (1672, 1697), False, 'from Bio.Seq import Seq\n'), ((2081, 2129), 'Bio.SeqRecord.SeqRecord', 'SeqRecord', (['cat_seq'], {'id': 'sp', 'description': 'parent.id'}), '(cat_seq, id=sp, description=parent.id)\n', (2090, 2129), False, 'from Bio.SeqRecord import SeqRecord\n')]
|
from django.shortcuts import render
def home(request):
return render (request,"base.html")
|
[
"django.shortcuts.render"
] |
[((64, 92), 'django.shortcuts.render', 'render', (['request', '"""base.html"""'], {}), "(request, 'base.html')\n", (70, 92), False, 'from django.shortcuts import render\n')]
|
# container-service-extension
# Copyright (c) 2019 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: BSD-2-Clause
import hashlib
from pyvcloud.vcd.api_extension import APIExtension
from pyvcloud.vcd.client import BasicLoginCredentials
from pyvcloud.vcd.client import Client
import requests
from container_service_extension.common.constants.server_constants import CSE_SERVICE_NAME # noqa: E501
from container_service_extension.common.constants.server_constants import CSE_SERVICE_NAMESPACE # noqa: E501
from container_service_extension.common.constants.server_constants import MQTT_EXTENSION_VENDOR # noqa: E501
from container_service_extension.common.constants.server_constants import MQTT_EXTENSION_VERSION # noqa: E501
from container_service_extension.common.constants.server_constants import MQTTExtKey # noqa: E501
from container_service_extension.common.constants.shared_constants import SYSTEM_ORG_NAME # noqa: E501
from container_service_extension.common.utils.core_utils import NullPrinter
from container_service_extension.common.utils.server_utils import should_use_mqtt_protocol # noqa: E501
from container_service_extension.lib.telemetry.constants import COLLECTOR_ID
from container_service_extension.lib.telemetry.constants import VAC_URL
from container_service_extension.logging.logger import NULL_LOGGER
from container_service_extension.mqi.mqtt_extension_manager import \
MQTTExtensionManager
CEIP_HEADER_NAME = "x-vmware-vcloud-ceip-id"
def uuid_hash(uuid):
"""Return SHA1 hash as hex digest of an uuid.
Requirement from VAC team : You should apply SHA1 hashing over the data as
a text. More specifically you should have the text data as a UTF8 encoded
string then convert the string to byte array and digest it into a SHA1
hash. The resulting SHA1 hash must be converted to text by displaying each
byte of the hashcode as a HEX char (first byte displayed leftmost in the
output). The hash must be lowercase.
No checks are made to determine if the input uuid is valid or not. Dashes
in the uuid are ignored while computing the hash.
:param str uuid: uuid to be hashed
:returns: SHA1 hash as hex digest of the provided uuid.
"""
uuid_no_dash = uuid.replace('-', '')
m = hashlib.sha1()
m.update(bytes(uuid_no_dash, 'utf-8'))
return m.hexdigest()
def get_vcd_ceip_id(vcd_host, verify_ssl=True, logger_debug=NULL_LOGGER):
"""."""
response = None
try:
if not verify_ssl:
requests.packages.urllib3.disable_warnings()
uri = f"https://{vcd_host}"
response = requests.get(uri, verify=verify_ssl)
return response.headers.get(CEIP_HEADER_NAME)
except Exception as err:
logger_debug.error(f"Unable to get vCD CEIP id : {str(err)}")
finally:
if response:
response.close()
def get_telemetry_instance_id(config_dict, logger_debug=NULL_LOGGER,
msg_update_callback=NullPrinter()):
"""Get CSE AMQP or MQTT extension id which is used as instance id.
Any exception is logged as error. No exception is leaked out
of this method and does not affect the server startup.
:param dict config_dict: CSE configuration
:param logging.logger logger_debug: logger instance to log any error
in retrieving CSE extension id.
:param utils.ConsoleMessagePrinter msg_update_callback: Callback object.
:return instance id to use for sending data to Vmware telemetry server
:rtype str (unless no instance id found)
"""
vcd = config_dict['vcd']
client = None
try:
client = Client(vcd['host'], verify_ssl_certs=vcd['verify'])
client.set_credentials(BasicLoginCredentials(
vcd['username'], SYSTEM_ORG_NAME, vcd['password']))
if should_use_mqtt_protocol(config_dict):
# Get MQTT extension uuid
mqtt_ext_manager = MQTTExtensionManager(client)
ext_info = mqtt_ext_manager.get_extension_info(
ext_name=CSE_SERVICE_NAME,
ext_version=MQTT_EXTENSION_VERSION,
ext_vendor=MQTT_EXTENSION_VENDOR)
if not ext_info:
logger_debug.debug("Failed to retrieve telemetry instance id")
return None
logger_debug.debug("Retrieved telemetry instance id")
return mqtt_ext_manager.get_extension_uuid(
ext_info[MQTTExtKey.EXT_URN_ID])
else:
# Get AMQP extension id
ext = APIExtension(client)
cse_info = ext.get_extension_info(CSE_SERVICE_NAME,
namespace=CSE_SERVICE_NAMESPACE)
logger_debug.debug("Retrieved telemetry instance id")
return cse_info.get('id')
except Exception as err:
msg = f"Cannot retrieve telemetry instance id:{err}"
msg_update_callback.general(msg)
logger_debug.error(msg, exc_info=True)
finally:
if client is not None:
client.logout()
def store_telemetry_settings(config_dict):
"""Populate telemetry instance id, url and collector id in config.
:param dict config_dict: CSE configuration
"""
if 'vac_url' not in config_dict['service']['telemetry']:
config_dict['service']['telemetry']['vac_url'] = VAC_URL
config_dict['service']['telemetry']['collector_id'] = COLLECTOR_ID
vcd_ceip_id = None
instance_id = None
if config_dict['service']['telemetry']['enable']:
vcd_ceip_id = get_vcd_ceip_id(config_dict['vcd']['host'],
verify_ssl=config_dict['vcd']['verify'])
instance_id = get_telemetry_instance_id(config_dict)
config_dict['service']['telemetry']['vcd_ceip_id'] = vcd_ceip_id
config_dict['service']['telemetry']['instance_id'] = instance_id
|
[
"container_service_extension.common.utils.server_utils.should_use_mqtt_protocol",
"requests.packages.urllib3.disable_warnings",
"hashlib.sha1",
"pyvcloud.vcd.client.BasicLoginCredentials",
"container_service_extension.mqi.mqtt_extension_manager.MQTTExtensionManager",
"container_service_extension.common.utils.core_utils.NullPrinter",
"requests.get",
"pyvcloud.vcd.api_extension.APIExtension",
"pyvcloud.vcd.client.Client"
] |
[((2274, 2288), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (2286, 2288), False, 'import hashlib\n'), ((2987, 3000), 'container_service_extension.common.utils.core_utils.NullPrinter', 'NullPrinter', ([], {}), '()\n', (2998, 3000), False, 'from container_service_extension.common.utils.core_utils import NullPrinter\n'), ((2613, 2649), 'requests.get', 'requests.get', (['uri'], {'verify': 'verify_ssl'}), '(uri, verify=verify_ssl)\n', (2625, 2649), False, 'import requests\n'), ((3636, 3687), 'pyvcloud.vcd.client.Client', 'Client', (["vcd['host']"], {'verify_ssl_certs': "vcd['verify']"}), "(vcd['host'], verify_ssl_certs=vcd['verify'])\n", (3642, 3687), False, 'from pyvcloud.vcd.client import Client\n'), ((3817, 3854), 'container_service_extension.common.utils.server_utils.should_use_mqtt_protocol', 'should_use_mqtt_protocol', (['config_dict'], {}), '(config_dict)\n', (3841, 3854), False, 'from container_service_extension.common.utils.server_utils import should_use_mqtt_protocol\n'), ((2513, 2557), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {}), '()\n', (2555, 2557), False, 'import requests\n'), ((3719, 3791), 'pyvcloud.vcd.client.BasicLoginCredentials', 'BasicLoginCredentials', (["vcd['username']", 'SYSTEM_ORG_NAME', "vcd['password']"], {}), "(vcd['username'], SYSTEM_ORG_NAME, vcd['password'])\n", (3740, 3791), False, 'from pyvcloud.vcd.client import BasicLoginCredentials\n'), ((3925, 3953), 'container_service_extension.mqi.mqtt_extension_manager.MQTTExtensionManager', 'MQTTExtensionManager', (['client'], {}), '(client)\n', (3945, 3953), False, 'from container_service_extension.mqi.mqtt_extension_manager import MQTTExtensionManager\n'), ((4534, 4554), 'pyvcloud.vcd.api_extension.APIExtension', 'APIExtension', (['client'], {}), '(client)\n', (4546, 4554), False, 'from pyvcloud.vcd.api_extension import APIExtension\n')]
|
# -*- coding: utf-8 -*-
from io import BytesIO
from msgpack import Unpacker
class Writer:
def __init__(self, server):
self.server = server
def write(self, data):
self.server._buf.write(data)
async def drain(self):
pass
def close(self):
pass
class MockRecvServer:
def __init__(self):
self._writer = Writer(self)
self._buf = BytesIO()
async def factory(self, sender):
return None, self._writer
def get_recieved(self):
self._buf.seek(0)
return list(Unpacker(self._buf, encoding='utf-8'))
|
[
"io.BytesIO",
"msgpack.Unpacker"
] |
[((400, 409), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (407, 409), False, 'from io import BytesIO\n'), ((557, 594), 'msgpack.Unpacker', 'Unpacker', (['self._buf'], {'encoding': '"""utf-8"""'}), "(self._buf, encoding='utf-8')\n", (565, 594), False, 'from msgpack import Unpacker\n')]
|
import PyPluMA
PyPluMA.dependency("ClusterCSV2NOA")
PyPluMA.dependency("CSV2GML")
import sys
import os
import math
from ClusterCSV2NOA.ClusterCSV2NOAPlugin import *
from CSV2GML.CSV2GMLPlugin import *
from distutils.spawn import find_executable
class NetworkVizPlugin(CSV2GMLPlugin):
def input(self, filename):
CSV2GMLPlugin.input(self,filename+".csv")
# Three files will be created in addition to the output file:
# GML file for the network, EDA file for network edges,
# NOA file for the clusters.
self.cysfile = filename+".cys"
self.gmlfile = filename+".gml"
self.edafile = filename+".eda"
self.csvfile = filename+".clusters.csv"
self.noafile = filename+".clusters.noa"
self.clusterhandle = ClusterCSV2NOAPlugin()
self.clusterhandle.input(self.csvfile)
def run(self):
CSV2GMLPlugin.run(self)
self.clusterhandle.run()
def output(self, filename):
filestuff = open(self.edafile, 'w')
filestuff.write("name\tmappedWeight\tscaledWeight\n")
for i in range(self.n):
self.bacteria[i] = self.bacteria[i].strip()
for i in range(self.n):
for j in range(self.n):
bac1 = self.bacteria[i].strip()
bac1 = bac1[1:len(bac1)-1]
bac2 = self.bacteria[j].strip()
bac2 = bac2[1:len(bac2)-1]
if (i != j):# and result[0]):
if (float(self.ADJ[i][j]) > 0):
filestuff.write(bac1+' '+'('+'pp'+')'+' '+bac2+'\t'+str(self.ADJ[i][j])+'\t'+str((float(self.ADJ[i][j]))**7)+'\n')
elif (float(self.ADJ[i][j]) < 0): # negatives to zero
filestuff.write(bac1+' '+'('+'pp'+')'+' '+bac2+'\t'+str(0)+'\t'+str((float(self.ADJ[i][j]))**7)+'\n')
self.clusterhandle.output(self.noafile)
CSV2GMLPlugin.output(self, self.gmlfile)
filestuff2 = open(filename, 'w')
filestuff2.write("session open file=\""+self.cysfile+"\"\n")
filestuff2.write("network load file file=\""+self.gmlfile+"\"\n")
filestuff2.write("table import file file=\""+self.edafile+"\" DataTypeTargetForNetworkCollection=\"Edge Table Columns\" keyColumnIndex=1 firstRowAsColumnNames=true startLoadRow=1 delimiters=\"\\t\"\n")
filestuff2.write("table import file file=\""+self.noafile+"\" DataTypeTargetForNetworkCollection=\"Node Table Columns\" keyColumnIndex=1 firstRowAsColumnNames=true startLoadRow=1\n")
filestuff2.write("layout allegro-fruchterman-reingold EdgeAttribute=\"mappedWeight\" defaultEdgeWeight=0 randomize=true useNormalizedEdgeWeight=false\n")
filestuff2.close()
#cytoscape = find_executable("cytoscape.sh")
#if (cytoscape):
# os.system("cytoscape.sh -S "+filename)
|
[
"PyPluMA.dependency"
] |
[((15, 51), 'PyPluMA.dependency', 'PyPluMA.dependency', (['"""ClusterCSV2NOA"""'], {}), "('ClusterCSV2NOA')\n", (33, 51), False, 'import PyPluMA\n'), ((52, 81), 'PyPluMA.dependency', 'PyPluMA.dependency', (['"""CSV2GML"""'], {}), "('CSV2GML')\n", (70, 81), False, 'import PyPluMA\n')]
|
#!/usr/bin/env python3
import os
import sys
import socket
import datetime
import argparse
import torch
import numpy as np
from baselines import logger
from baselines.common.vec_env.dummy_vec_env import DummyVecEnv
from baselines.common.vec_env.vec_normalize import VecNormalize
from envs import make_env
from model_tor import ActorCriticNetwork
from storage_tor import ExperienceBuffer
from ppo_tor import VanillaPPO
def main():
# Init
args = parse_args()
env_id = 'Reacher-v2'
nprocess = 1
n_step_per_update = 2500
gamma = 0.99
epsilon = 1e-5
log_interval = 1
use_gae=False; tau=None
tag = '_'.join(['ppo', env_id, args.opt])
log_dir = os.path.join(args.log_dir, make_stamp(tag))
logger.configure(dir=log_dir)
torch.manual_seed(args.seed)
torch.set_num_threads(4)
assert nprocess==1
# assert not using cuda!
# assert not using recurrent net!
envs = [make_env(env_id, seed=args.seed, rank=i, log_dir=log_dir, add_timestep=False) for i in range(nprocess)]
envs = DummyVecEnv(envs)
envs = VecNormalize(envs, ob=True, ret=True, gamma=gamma, epsilon=epsilon, clipob=10., cliprew=10.)
observ_dim = envs.observation_space.shape[0]
action_dim = envs.action_space.shape[0]
assert len(envs.observation_space.shape)==1
assert len(envs.action_space.shape)==1
assert envs.action_space.__class__.__name__ == "Box"
actor_critic_net = ActorCriticNetwork(input_dim=observ_dim,
hidden_dim=64,
actor_output_dim=action_dim,
critic_output_dim=1) # one neuron estimating the value of any state
agent = VanillaPPO(actor_critic_net, optim_id=args.opt, lr=3e-4, clip_eps=0.2,
max_grad_norm=0.5, n_epoch=10, n_minibatch=32, epsilon=epsilon)
experience = ExperienceBuffer(n_step_per_update, nprocess, observ_dim, action_dim)
# Train
observ = envs.reset(); observ = torch.from_numpy(observ).float()
experience.observations[0].copy_(observ)
for update_idx in range(args.n_update):
# Get experience via rollouts for n_step_per_update steps
for step_idx in range(n_step_per_update):
# Sample actions
with torch.no_grad():
action, action_log_prob, pred_state_value = actor_critic_net.act(observ)
# print(action); print(action_log_prob); print(pred_state_value)
# Step
observ, reward, done, info = envs.step(action.squeeze(1).cpu().numpy())
mask = torch.FloatTensor([[0.0] if done_ else [1.0] for done_ in done])
reward = torch.from_numpy(np.expand_dims(np.stack(reward), 1)).float()
observ = torch.from_numpy(observ).float()
observ *= mask
experience.insert(action, action_log_prob, pred_state_value, reward, next_observ=observ, next_mask=mask)
# Update
with torch.no_grad():
pred_next_state_value = actor_critic_net.predict_state_value(observ).detach()
experience.compute_returns(pred_next_state_value, gamma)
loss, value_loss, action_loss, distrib_entropy = agent.update(experience)
experience.after_update()
# Log
if (update_idx % log_interval)==0:
n_step_so_far = (update_idx+1) * nprocess * n_step_per_update
logs = ['update {}/{}'.format(update_idx+1, args.n_update)]
logs += ['loss {:.5f}'.format(loss)]
logs += ['action_loss {:.5f}'.format(action_loss)]
logs += ['value_loss {:.5f}'.format(value_loss)]
logs += ['distrib_entropy {:.5f}'.format(distrib_entropy)]
logs += ['n_step_so_far {}'.format(n_step_so_far)]
logger.log(' | '.join(logs))
def make_stamp(tag):
hostname = socket.gethostname(); hostname = hostname.split('.')[0]
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S-%f")
stamp = '_'.join([tag, hostname, timestamp])
return stamp
def parse_args():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--opt', help='optimizer ID', type=str, default=None, required=True)
parser.add_argument('--n_update', help='number of update', type=int, default=None, required=True)
parser.add_argument('--seed', help='RNG seed', type=int, default=None, required=True)
parser.add_argument('--log_dir', help='root xprmt log dir', type=str, default=None, required=True)
return parser.parse_args()
if __name__ == '__main__':
main()
|
[
"numpy.stack",
"model_tor.ActorCriticNetwork",
"baselines.common.vec_env.dummy_vec_env.DummyVecEnv",
"envs.make_env",
"argparse.ArgumentParser",
"ppo_tor.VanillaPPO",
"storage_tor.ExperienceBuffer",
"baselines.common.vec_env.vec_normalize.VecNormalize",
"torch.manual_seed",
"torch.FloatTensor",
"socket.gethostname",
"torch.set_num_threads",
"baselines.logger.configure",
"torch.no_grad",
"datetime.datetime.now",
"torch.from_numpy"
] |
[((730, 759), 'baselines.logger.configure', 'logger.configure', ([], {'dir': 'log_dir'}), '(dir=log_dir)\n', (746, 759), False, 'from baselines import logger\n'), ((764, 792), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (781, 792), False, 'import torch\n'), ((797, 821), 'torch.set_num_threads', 'torch.set_num_threads', (['(4)'], {}), '(4)\n', (818, 821), False, 'import torch\n'), ((1040, 1057), 'baselines.common.vec_env.dummy_vec_env.DummyVecEnv', 'DummyVecEnv', (['envs'], {}), '(envs)\n', (1051, 1057), False, 'from baselines.common.vec_env.dummy_vec_env import DummyVecEnv\n'), ((1069, 1168), 'baselines.common.vec_env.vec_normalize.VecNormalize', 'VecNormalize', (['envs'], {'ob': '(True)', 'ret': '(True)', 'gamma': 'gamma', 'epsilon': 'epsilon', 'clipob': '(10.0)', 'cliprew': '(10.0)'}), '(envs, ob=True, ret=True, gamma=gamma, epsilon=epsilon, clipob=\n 10.0, cliprew=10.0)\n', (1081, 1168), False, 'from baselines.common.vec_env.vec_normalize import VecNormalize\n'), ((1427, 1537), 'model_tor.ActorCriticNetwork', 'ActorCriticNetwork', ([], {'input_dim': 'observ_dim', 'hidden_dim': '(64)', 'actor_output_dim': 'action_dim', 'critic_output_dim': '(1)'}), '(input_dim=observ_dim, hidden_dim=64, actor_output_dim=\n action_dim, critic_output_dim=1)\n', (1445, 1537), False, 'from model_tor import ActorCriticNetwork\n'), ((1718, 1858), 'ppo_tor.VanillaPPO', 'VanillaPPO', (['actor_critic_net'], {'optim_id': 'args.opt', 'lr': '(0.0003)', 'clip_eps': '(0.2)', 'max_grad_norm': '(0.5)', 'n_epoch': '(10)', 'n_minibatch': '(32)', 'epsilon': 'epsilon'}), '(actor_critic_net, optim_id=args.opt, lr=0.0003, clip_eps=0.2,\n max_grad_norm=0.5, n_epoch=10, n_minibatch=32, epsilon=epsilon)\n', (1728, 1858), False, 'from ppo_tor import VanillaPPO\n'), ((1893, 1962), 'storage_tor.ExperienceBuffer', 'ExperienceBuffer', (['n_step_per_update', 'nprocess', 'observ_dim', 'action_dim'], {}), '(n_step_per_update, nprocess, observ_dim, action_dim)\n', (1909, 1962), False, 'from storage_tor import ExperienceBuffer\n'), ((3861, 3881), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (3879, 3881), False, 'import socket\n'), ((4084, 4163), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (4107, 4163), False, 'import argparse\n'), ((925, 1002), 'envs.make_env', 'make_env', (['env_id'], {'seed': 'args.seed', 'rank': 'i', 'log_dir': 'log_dir', 'add_timestep': '(False)'}), '(env_id, seed=args.seed, rank=i, log_dir=log_dir, add_timestep=False)\n', (933, 1002), False, 'from envs import make_env\n'), ((2012, 2036), 'torch.from_numpy', 'torch.from_numpy', (['observ'], {}), '(observ)\n', (2028, 2036), False, 'import torch\n'), ((2604, 2670), 'torch.FloatTensor', 'torch.FloatTensor', (['[([0.0] if done_ else [1.0]) for done_ in done]'], {}), '([([0.0] if done_ else [1.0]) for done_ in done])\n', (2621, 2670), False, 'import torch\n'), ((2982, 2997), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2995, 2997), False, 'import torch\n'), ((3933, 3956), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3954, 3956), False, 'import datetime\n'), ((2297, 2312), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2310, 2312), False, 'import torch\n'), ((2773, 2797), 'torch.from_numpy', 'torch.from_numpy', (['observ'], {}), '(observ)\n', (2789, 2797), False, 'import torch\n'), ((2722, 2738), 'numpy.stack', 'np.stack', (['reward'], {}), '(reward)\n', (2730, 2738), True, 'import numpy as np\n')]
|
import csv
import pandas as pd
from sklearn.linear_model import Perceptron
df = pd.read_csv('original-data.csv')
df_feature = df.ix[:, df.columns != 'label']
df_label = df['label']
quiz = pd.read_csv('quiz.csv')
len = 126387
mega = pd.concat([df, quiz])
full = pd.get_dummies(mega)
multi_category_columns = ['0', '5', '7',
'8', '9', '14',
'16', '17', '18',
'20', '23', '25',
'26', '56', '57',
'58']
|
[
"pandas.read_csv",
"pandas.get_dummies",
"pandas.concat"
] |
[((80, 112), 'pandas.read_csv', 'pd.read_csv', (['"""original-data.csv"""'], {}), "('original-data.csv')\n", (91, 112), True, 'import pandas as pd\n'), ((188, 211), 'pandas.read_csv', 'pd.read_csv', (['"""quiz.csv"""'], {}), "('quiz.csv')\n", (199, 211), True, 'import pandas as pd\n'), ((232, 253), 'pandas.concat', 'pd.concat', (['[df, quiz]'], {}), '([df, quiz])\n', (241, 253), True, 'import pandas as pd\n'), ((261, 281), 'pandas.get_dummies', 'pd.get_dummies', (['mega'], {}), '(mega)\n', (275, 281), True, 'import pandas as pd\n')]
|
"""Tests for the base Arduino hardware implementation."""
from datetime import timedelta
from math import pi
from typing import List, Optional, Set, Tuple, Type, cast
import pytest
from serial import Serial
from serial.tools.list_ports_common import ListPortInfo
from j5.backends.hardware import NotSupportedByHardwareError
from j5.backends.hardware.j5.arduino import ArduinoHardwareBackend
from j5.boards import Board
from j5.boards.arduino import ArduinoUno
from j5.components import GPIOPinMode
from tests.backends.hardware.j5.mock_serial import MockSerial
# Pins on the digital-analogue border
EDGE_ANALOGUE_PIN = ArduinoUno.FIRST_ANALOGUE_PIN
EDGE_DIGITAL_PIN = EDGE_ANALOGUE_PIN - 1
class MockArduinoBackend(ArduinoHardwareBackend):
"""A simple backend overriding ArduinoHardwareBackend's abstract methods."""
board = ArduinoUno
def __init__(
self,
serial_port: str,
baud: int = 9600,
timeout: timedelta = ArduinoHardwareBackend.DEFAULT_TIMEOUT,
) -> None:
super(MockArduinoBackend, self).__init__(
serial_port=serial_port,
baud=baud,
timeout=timeout,
)
def get_serial_class(self) -> Type[Serial]:
"""Get the serial class."""
return MockSerial # type: ignore
@property
def firmware_version(self) -> Optional[str]:
"""The firmware version of the board."""
return None
def _update_digital_pin(self, identifier: int) -> None:
"""Write the stored value of a digital pin to the Arduino."""
self._serial.write(update_digital_pin_command(
identifier,
self._digital_pins[identifier].mode,
self._digital_pins[identifier].state,
))
def _read_digital_pin(self, identifier: int) -> bool:
"""Read the value of a digital pin from the Arduino."""
message, result = read_digital_pin_command(identifier)
self._serial.write(message)
return result
def _read_analogue_pin(self, identifier: int) -> float:
"""Read the value of an analogue pin from the Arduino."""
message, result = read_analogue_pin_command(identifier)
self._serial.write(message)
return result
def make_backend() -> MockArduinoBackend:
"""Instantiate a MockArduinoBackend with some default arguments."""
return MockArduinoBackend("COM0")
def update_digital_pin_command(identifier: int, mode: GPIOPinMode, state: bool) -> bytes:
"""Generate a pin update command to send to the mock arduino board."""
if mode in {GPIOPinMode.ANALOGUE_INPUT, GPIOPinMode.ANALOGUE_OUTPUT}:
return b""
return "_".join([
"update",
str(identifier),
mode.name,
str(state),
]).encode("utf-8")
def read_digital_pin_command(identifier: int) -> Tuple[bytes, bool]:
"""Generate a digital pin read command to send to the mock arduino board."""
result: bool = identifier % 2 == 0
return (
"_".join([
"readdigital",
str(identifier),
str(result),
]).encode("utf-8"),
result,
)
def read_analogue_pin_command(identifier: int) -> Tuple[bytes, float]:
"""Generate an analogue pin read command to send to the mock arduino board."""
result: int = identifier
return (
"_".join([
"readanalogue",
str(identifier),
str(result),
]).encode("utf-8"),
float(result),
)
def test_backend_default_timeout() -> None:
"""Test that a default timeout exists that is a timedelta."""
assert isinstance(ArduinoHardwareBackend.DEFAULT_TIMEOUT, timedelta)
def make_port_info(vid: int, pid: int) -> ListPortInfo:
"""Make a ListPortInfo object from a USB vendor ID and product ID."""
list_port_info = ListPortInfo("/dev/null")
list_port_info.vid, list_port_info.pid = vid, pid
return list_port_info
def test_backend_is_arduino() -> None:
"""Test that the USB IDs listed are recognised as Arduinos."""
assert len(ArduinoHardwareBackend.USB_IDS) > 0
assert all(
ArduinoHardwareBackend.is_arduino(make_port_info(vid, pid))
for vid, pid in ArduinoHardwareBackend.USB_IDS
)
def test_backend_discover() -> None:
"""Test that we can discover Arduinos and only Arduinos."""
arduino_ports: List[ListPortInfo] = [
make_port_info(vid, pid)
for vid, pid in ArduinoHardwareBackend.USB_IDS
]
other_ports: List[ListPortInfo] = [
make_port_info(vid, pid)
for vid, pid in [
(0x1e7d, 0x307a), # Keyboard
(0x1bda, 0x0010), # Power board
(0x0781, 0x5581), # USB flash drive
]
]
def discover_arduinos(ports: List[ListPortInfo]) -> Set[Board]:
class MockDiscoveryArduinoBackend(MockArduinoBackend):
@classmethod
def get_comports(cls) -> List[ListPortInfo]:
return ports
return MockDiscoveryArduinoBackend.discover()
# Find nothing
assert discover_arduinos([]) == set()
# Only find other devices
assert discover_arduinos(other_ports) == set()
# Only find one Arduino
assert len(discover_arduinos([arduino_ports[0]])) == 1
# Find one Arduino in a mixture
assert len(discover_arduinos(other_ports + [arduino_ports[0]])) == 1
# Find lots of Arduinos
assert len(discover_arduinos(arduino_ports)) == len(arduino_ports)
# Find lots of Arduinos in a mixture
assert len(discover_arduinos(other_ports + arduino_ports)) == len(arduino_ports)
# Make sure they're all Arduinos
assert all(
isinstance(board, MockArduinoBackend.board)
for board in discover_arduinos(other_ports + arduino_ports)
)
def test_backend_initialisation() -> None:
"""Test that we can initialise an ArduinoHardwareBackend."""
backend = make_backend()
assert backend.serial_port == "COM0"
assert isinstance(backend._serial, MockSerial)
assert all(
pin.mode is GPIOPinMode.DIGITAL_INPUT for pin in backend._digital_pins.values()
)
assert all(pin.state is False for pin in backend._digital_pins.values())
def test_backend_get_set_pin_mode() -> None:
"""Test that we can get and set pin modes."""
pin = EDGE_DIGITAL_PIN
backend = make_backend()
assert backend.get_gpio_pin_mode(EDGE_ANALOGUE_PIN) is GPIOPinMode.ANALOGUE_INPUT
assert backend.get_gpio_pin_mode(pin) is GPIOPinMode.DIGITAL_INPUT
serial = cast(MockSerial, backend._serial)
mode = GPIOPinMode.DIGITAL_OUTPUT
backend.set_gpio_pin_mode(pin, mode)
serial.check_sent_data(update_digital_pin_command(pin, mode, False))
assert backend.get_gpio_pin_mode(pin) is mode
def test_backend_digital_pin_modes() -> None:
"""Test that only certain modes are valid on digital pins."""
legal_modes: Set[GPIOPinMode] = {
GPIOPinMode.DIGITAL_INPUT,
GPIOPinMode.DIGITAL_INPUT_PULLUP,
GPIOPinMode.DIGITAL_OUTPUT,
}
check_pin_modes(make_backend(), EDGE_DIGITAL_PIN, legal_modes)
def test_backend_analogue_pin_modes() -> None:
"""Test that only certain modes are valid on digital pins."""
legal_modes: Set[GPIOPinMode] = {
GPIOPinMode.ANALOGUE_INPUT,
}
check_pin_modes(make_backend(), EDGE_ANALOGUE_PIN, legal_modes)
def check_pin_modes(
backend: ArduinoHardwareBackend,
pin: ArduinoUno.PinNumber,
legal_modes: Set[GPIOPinMode],
) -> None:
"""Check that a set of modes is supported on a backend for a pin."""
for mode in GPIOPinMode:
if mode in legal_modes:
serial = cast(MockSerial, backend._serial)
backend.set_gpio_pin_mode(pin, mode)
serial.check_sent_data(update_digital_pin_command(pin, mode, False))
else:
with pytest.raises(NotSupportedByHardwareError):
backend.set_gpio_pin_mode(pin, mode)
def test_backend_write_digital_state() -> None:
"""Test that we can write a new digital state to a pin."""
pin = 2
mode = GPIOPinMode.DIGITAL_OUTPUT
backend = make_backend()
serial = cast(MockSerial, backend._serial)
backend.set_gpio_pin_mode(pin, mode)
serial.check_sent_data(update_digital_pin_command(pin, mode, False))
backend.write_gpio_pin_digital_state(pin, True)
assert backend.get_gpio_pin_digital_state(pin) is True
serial.check_sent_data(update_digital_pin_command(pin, mode, True))
def test_backend_write_digital_state_requires_pin_mode() -> None:
"""Check that pin must be in DIGITAL_OUTPUT mode for write digital state to work."""
pin = 2
backend = make_backend()
assert backend.get_gpio_pin_mode(pin) is not GPIOPinMode.DIGITAL_OUTPUT
with pytest.raises(ValueError):
backend.write_gpio_pin_digital_state(pin, True)
def test_backend_write_digital_state_requires_digital_pin() -> None:
"""Check that pins 14-19 are not supported by write digital state."""
with pytest.raises(NotSupportedByHardwareError):
make_backend().write_gpio_pin_digital_state(EDGE_ANALOGUE_PIN, True)
def test_backend_get_digital_state() -> None:
"""Test that we can recall the digital state of a pin."""
pin = 2
backend = make_backend()
# This should put the pin into the most recent (or default) output state.
backend.set_gpio_pin_mode(pin, GPIOPinMode.DIGITAL_OUTPUT)
assert backend.get_gpio_pin_digital_state(pin) is False
backend.write_gpio_pin_digital_state(pin, True)
assert backend.get_gpio_pin_digital_state(pin) is True
backend.write_gpio_pin_digital_state(pin, False)
assert backend.get_gpio_pin_digital_state(pin) is False
def test_backend_get_digital_state_requires_pin_mode() -> None:
"""Check that pin must not be in DIGITAL_OUTPUT mode for get digital state to work."""
pin = 2
backend = make_backend()
assert backend.get_gpio_pin_mode(pin) is not GPIOPinMode.DIGITAL_OUTPUT
with pytest.raises(ValueError):
backend.get_gpio_pin_digital_state(pin)
def test_backend_get_digital_state_requires_digital_pin() -> None:
"""Check that pins 14-19 are not supported by get digital state."""
with pytest.raises(NotSupportedByHardwareError):
make_backend().get_gpio_pin_digital_state(EDGE_ANALOGUE_PIN)
def test_backend_read_digital_state() -> None:
"""Test that we can read the digital state of a pin."""
pin = 2
backend = make_backend()
serial = cast(MockSerial, backend._serial)
assert backend.get_gpio_pin_mode(pin) is GPIOPinMode.DIGITAL_INPUT
expected_message, expected_result = read_digital_pin_command(pin)
assert backend.read_gpio_pin_digital_state(pin) is expected_result
serial.check_sent_data(expected_message)
def test_backend_read_digital_state_requires_pin_mode() -> None:
"""Check that pin must be in DIGITAL_INPUT* mode for read digital state to work."""
pin = 2
backend = make_backend()
backend.set_gpio_pin_mode(pin, GPIOPinMode.DIGITAL_OUTPUT)
assert backend.get_gpio_pin_mode(pin) is not GPIOPinMode.DIGITAL_INPUT
with pytest.raises(ValueError):
backend.read_gpio_pin_digital_state(pin)
def test_backend_read_digital_state_requires_digital_pin() -> None:
"""Check that pins 14-19 are not supported by read digital state."""
with pytest.raises(NotSupportedByHardwareError):
make_backend().read_gpio_pin_digital_state(EDGE_ANALOGUE_PIN)
def test_backend_read_analogue() -> None:
"""Test that we can read the digital state of a pin."""
pin = EDGE_ANALOGUE_PIN
backend = make_backend()
serial = cast(MockSerial, backend._serial)
expected_message, expected_result = read_analogue_pin_command(pin)
assert backend.read_gpio_pin_analogue_value(pin) == expected_result
serial.check_sent_data(expected_message)
def test_backend_read_analogue_requires_analogue_pin() -> None:
"""Check that pins 2-13 are not supported by read analogue."""
with pytest.raises(NotSupportedByHardwareError):
make_backend().read_gpio_pin_analogue_value(EDGE_DIGITAL_PIN)
def test_backend_write_analogue_not_supported() -> None:
"""Test that writing an analogue value to a pin is unsupported."""
with pytest.raises(NotSupportedByHardwareError):
make_backend().write_gpio_pin_dac_value(2, pi)
def test_backend_write_pwm_not_supported() -> None:
"""Test that writing a PWM value to a pin is unsupported."""
with pytest.raises(NotSupportedByHardwareError):
make_backend().write_gpio_pin_pwm_value(3, 0.3)
def test_backend_get_set_led_state() -> None:
"""Test that we can recall and set the state of the LED."""
pin = 13
mode = GPIOPinMode.DIGITAL_OUTPUT
backend = make_backend()
serial = cast(MockSerial, backend._serial)
backend.set_gpio_pin_mode(pin, mode)
serial.check_sent_data(update_digital_pin_command(pin, mode, False))
backend.set_led_state(0, True)
serial.check_sent_data(update_digital_pin_command(pin, mode, True))
assert backend.get_led_state(0) is True
def test_backend_nonzero_led_identifier() -> None:
"""Test that the only allowed LED identifier is 0."""
backend = make_backend()
backend.set_gpio_pin_mode(13, GPIOPinMode.DIGITAL_OUTPUT)
with pytest.raises(ValueError):
backend.get_led_state(1)
with pytest.raises(ValueError):
backend.set_led_state(1, True)
|
[
"typing.cast",
"serial.tools.list_ports_common.ListPortInfo",
"pytest.raises"
] |
[((3847, 3872), 'serial.tools.list_ports_common.ListPortInfo', 'ListPortInfo', (['"""/dev/null"""'], {}), "('/dev/null')\n", (3859, 3872), False, 'from serial.tools.list_ports_common import ListPortInfo\n'), ((6532, 6565), 'typing.cast', 'cast', (['MockSerial', 'backend._serial'], {}), '(MockSerial, backend._serial)\n', (6536, 6565), False, 'from typing import List, Optional, Set, Tuple, Type, cast\n'), ((8170, 8203), 'typing.cast', 'cast', (['MockSerial', 'backend._serial'], {}), '(MockSerial, backend._serial)\n', (8174, 8203), False, 'from typing import List, Optional, Set, Tuple, Type, cast\n'), ((10506, 10539), 'typing.cast', 'cast', (['MockSerial', 'backend._serial'], {}), '(MockSerial, backend._serial)\n', (10510, 10539), False, 'from typing import List, Optional, Set, Tuple, Type, cast\n'), ((11658, 11691), 'typing.cast', 'cast', (['MockSerial', 'backend._serial'], {}), '(MockSerial, backend._serial)\n', (11662, 11691), False, 'from typing import List, Optional, Set, Tuple, Type, cast\n'), ((12808, 12841), 'typing.cast', 'cast', (['MockSerial', 'backend._serial'], {}), '(MockSerial, backend._serial)\n', (12812, 12841), False, 'from typing import List, Optional, Set, Tuple, Type, cast\n'), ((8786, 8811), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (8799, 8811), False, 'import pytest\n'), ((9023, 9065), 'pytest.raises', 'pytest.raises', (['NotSupportedByHardwareError'], {}), '(NotSupportedByHardwareError)\n', (9036, 9065), False, 'import pytest\n'), ((10005, 10030), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10018, 10030), False, 'import pytest\n'), ((10230, 10272), 'pytest.raises', 'pytest.raises', (['NotSupportedByHardwareError'], {}), '(NotSupportedByHardwareError)\n', (10243, 10272), False, 'import pytest\n'), ((11142, 11167), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (11155, 11167), False, 'import pytest\n'), ((11370, 11412), 'pytest.raises', 'pytest.raises', (['NotSupportedByHardwareError'], {}), '(NotSupportedByHardwareError)\n', (11383, 11412), False, 'import pytest\n'), ((12023, 12065), 'pytest.raises', 'pytest.raises', (['NotSupportedByHardwareError'], {}), '(NotSupportedByHardwareError)\n', (12036, 12065), False, 'import pytest\n'), ((12276, 12318), 'pytest.raises', 'pytest.raises', (['NotSupportedByHardwareError'], {}), '(NotSupportedByHardwareError)\n', (12289, 12318), False, 'import pytest\n'), ((12503, 12545), 'pytest.raises', 'pytest.raises', (['NotSupportedByHardwareError'], {}), '(NotSupportedByHardwareError)\n', (12516, 12545), False, 'import pytest\n'), ((13320, 13345), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13333, 13345), False, 'import pytest\n'), ((13389, 13414), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13402, 13414), False, 'import pytest\n'), ((7673, 7706), 'typing.cast', 'cast', (['MockSerial', 'backend._serial'], {}), '(MockSerial, backend._serial)\n', (7677, 7706), False, 'from typing import List, Optional, Set, Tuple, Type, cast\n'), ((7868, 7910), 'pytest.raises', 'pytest.raises', (['NotSupportedByHardwareError'], {}), '(NotSupportedByHardwareError)\n', (7881, 7910), False, 'import pytest\n')]
|