index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
9,100 | 73a778c6e4216c23ac8d82eef96ce7b73b18f661 | """This is the body of the low-level worker tool.
A worker is intended to run as a process that imports a module, mutates it in
one location with one operator, runs the tests, reports the results, and dies.
"""
import difflib
import importlib
import inspect
import json
import logging
import subprocess
import sys
import traceback
import astunparse
try:
import typing # the typing module does some fancy stuff at import time
# which we shall not do twice... by loading it here,
# preserve_modules does not delete it and therefore
# fancy stuff happens only once
except ImportError:
pass
from .config import serialize_config
from .importing import preserve_modules, using_ast
from .mutating import MutatingCore
from .parsing import get_ast
from .testing.test_runner import TestOutcome
from .work_item import WorkItem
log = logging.getLogger()
class WorkerOutcome:
"""Possible outcomes for a worker.
"""
NORMAL = 'normal'
EXCEPTION = 'exception'
NO_TEST = 'no-test'
TIMEOUT = 'timeout'
SKIPPED = 'skipped'
def worker(module_name,
operator_class,
occurrence,
test_runner):
"""Mutate the OCCURRENCE-th site for OPERATOR_CLASS in MODULE_NAME, run the
tests, and report the results.
This is fundamentally the single-mutation-and-test-run process
implementation.
There are three high-level ways that a worker can finish. First, it could
fail exceptionally, meaning that some uncaught exception made its way from
some part of the operation to terminate the function. This function will
intercept all exceptions and return it in a non-exceptional structure.
Second, the mutation testing machinery may determine that there is no
OCCURENCE-th instance for OPERATOR_NAME in the module under test. In this
case there is no way to report a test result (i.e. killed, survived, or
incompetent) so a special value is returned indicating that no mutation is
possible.
Finally, and hopefully normally, the worker will find that it can run a
test. It will do so and report back the result - killed, survived, or
incompetent - in a structured way.
Returns: a WorkItem
Raises: This will generally not raise any exceptions. Rather, exceptions
will be reported using the 'exception' result-type in the return value.
"""
try:
with preserve_modules():
module = importlib.import_module(module_name)
module_source_file = inspect.getsourcefile(module)
module_ast = get_ast(module)
module_source = astunparse.unparse(module_ast)
core = MutatingCore(occurrence)
operator = operator_class(core)
# note: after this step module_ast and modified_ast
# appear to be the same
modified_ast = operator.visit(module_ast)
modified_source = astunparse.unparse(modified_ast)
if not core.activation_record:
return WorkItem(
worker_outcome=WorkerOutcome.NO_TEST)
# generate a source diff to visualize how the mutation
# operator has changed the code
module_diff = ["--- mutation diff ---"]
for line in difflib.unified_diff(module_source.split('\n'),
modified_source.split('\n'),
fromfile="a" + module_source_file,
tofile="b" + module_source_file,
lineterm=""):
module_diff.append(line)
with using_ast(module_name, module_ast):
rec = test_runner()
rec.update({
'diff': module_diff,
'worker_outcome': WorkerOutcome.NORMAL
})
rec.update(core.activation_record)
return rec
except Exception: # noqa # pylint: disable=broad-except
return WorkItem(
data=traceback.format_exception(*sys.exc_info()),
test_outcome=TestOutcome.INCOMPETENT,
worker_outcome=WorkerOutcome.EXCEPTION)
def worker_process(work_item,
timeout,
config):
"""Run `cosmic-ray worker` in a subprocess and return the results,
passing `config` to it via stdin.
Returns: An updated WorkItem
"""
# The work_item param may come as just a dict (e.g. if it arrives over
# celery), so we reconstruct a WorkItem to make it easier to work with.
work_item = WorkItem(work_item)
command = 'cosmic-ray worker {module} {operator} {occurrence}'.format(
**work_item)
log.info('executing: %s', command)
proc = subprocess.Popen(command.split(),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
universal_newlines=True)
config_string = serialize_config(config)
try:
outs, _ = proc.communicate(input=config_string, timeout=timeout)
result = json.loads(outs)
work_item.update({
k: v
for k, v
in result.items()
if v is not None
})
except subprocess.TimeoutExpired as exc:
work_item.worker_outcome = WorkerOutcome.TIMEOUT
work_item.data = exc.timeout
proc.kill()
except json.JSONDecodeError as exc:
work_item.worker_outcome = WorkerOutcome.EXCEPTION
work_item.data = exc
work_item.command_line = command
return work_item
|
9,101 | c6a6b8f2485528af479fadbdf286e82f10a11de8 | import collect_from_webapi.api_public_data as pdapi
from collect_from_webapi import pd_fetch_tourspot_visitor
# url = pdapi.pd_gen_url("http://openapi.tour.go.kr/openapi/serviceTourismResourceStatsService/getPchrgTrrsrtVisitorList",
# YM='{0:04d}{1:02d}'.format(2017, 1),
# SIDO='서울특별시',
# GUNGU='',
# RES_NM='',
# numOfRows=10,
# _type='json',
# pageNo=1)
# test for pd_fetch_tourspot_visitor
for items in pd_fetch_tourspot_visitor(district='서울특별시', year=2017, month=7):
print(items)
# test for pd_fetch_tourspot_visitor()
item = pdapi.pd_fetch_foreign_visitor(112, 2012, 7)
print(item) |
9,102 | 99f50d393e750bd8fa5bee21d99f08d20b9f5fe9 | from covid import FuzzyNet
import numpy as np
import time
if __name__ == '__main__':
# mx1,mx2,mx3,my1,my2,my3, dx1,dx2,dx3,dy1,dy2,dy3, p1,p2,p3,p4,p5,p6,p7,p8,p9, q1,q2,q3,q4,q5,q6,q7,q8,q9, r1,r2,r3,r4,r5,r6,r7,r8,r9
generations = 100
for generation in range(generations):
population = np.random.randint(0, 255, size=(200, 39), dtype=np.uint8)
print('Population\n', population, end='\n\n')
cov19 = FuzzyNet()
print('Base Z matrix\n', cov19.Z, end='\n\n')
population_fa, population_matrix_z, best_FA_index = cov19.get_FA(population=population)
if population_fa[best_FA_index] < 40:
print('Best Z matrix\n', population_matrix_z[best_FA_index], population_fa[best_FA_index])
cov19.plot(population_matrix_z[best_FA_index])
time.sleep(5)
# xi , yj = zij |
9,103 | 010f78d952657b3d7c11fbf8e46912d0294f6cc1 | # python imports
import re
# django imports
from django.core.management.base import BaseCommand
# module level imports
from utils.spells import SPELLS
from spells.models import Spell
SPELL_SCHOOL = {
'Abjuration': 'Abjuration',
'Conjuration': 'Conjuration',
'Divination': 'Divination',
'Enchantment': 'Enchantment',
'Evocation': 'Evocation',
'Illusion': 'Illusion',
'Necromancy': 'Necromancy',
'Transmutation': 'Transmutation',
}
CAST_TIME = {
'1 Action': '1 Action',
'1 Bonus Action': '1 Bonus Action',
'1 Reaction': '1 Reaction',
'1 Minute': '1 Minute',
'10 Minutes': '10 Minutes',
'1 Hour': '1 Hour',
'8 Hours': '8 Hours',
'12 Hours': '12 Hours',
'24 Hours': '24 Hours',
'1 Action or 8 Hours': '1 Action or 8 Hours',
}
SPELL_LEVELS = {
'Cantrip': 'Cantrip',
'1': '1st-level',
'2': '2nd-level',
'3': '3rd-level',
'4': '4th-level',
'5': '5th-level',
'6': '6th-level',
'7': '7th-level',
'8': '8th-level',
'9': '9th-level',
}
class Command(BaseCommand):
"""Command to populate the database with all spells for 5th Edition."""
# args
help = 'Will auto populate the database with all the Spells from 5th Edition Dungeons and Dragons.'
def handle(self, *args, **kwargs):
for spell in SPELLS:
spell_entry = Spell.objects.create(
name=spell['name'],
distance=spell['range'],
ritual=spell['ritual'],
)
if len(spell['classes']) > 1:
spell_entry.available_to = ''
for i in range(len(spell['classes'])):
spell_entry.available_to += spell['classes'][i].title() + ', '
else:
spell_entry.available_to = spell['classes'][0].title()
if 'components' in spell.keys():
spell_entry.somatic = spell['components']['somatic']
spell_entry.verbal = spell['components']['verbal']
spell_entry.material = spell['components']['material']
if spell_entry.material:
spell_entry.specific_materials = ''
for i in range(len(spell['components']['materials_needed'])):
spell_entry.specific_materials += spell['components']['materials_needed'][i] + ', '
if 'description' in spell.keys():
spell_entry.description = spell['description']
dice_number = re.findall(r'\d+(?=d)', spell['description'])
if len(dice_number) > 0:
spell_entry.damage_dice_number = dice_number[0]
dice_size = re.findall(r'(?<=d)\d+', spell['description'])
if len(dice_size) > 0:
spell_entry.damage_dice_size = dice_size[0]
s_throw = re.findall(r"[A-Z]\w+(?= saving throw)", spell['description'])
if len(s_throw) == 1:
s_throw = s_throw[0][:3].upper()
spell_entry.save_type = s_throw
if spell['level'] == 'cantrip':
spell_entry.level = 'Cantrip'
else:
spell_entry.level = SPELL_LEVELS[spell['level']]
if 'higher_levels' in spell.keys():
spell_entry.higher_level = spell['higher_levels']
if 'school' in spell.keys():
spell_entry.school = SPELL_SCHOOL[spell['school'].title()]
if 'casting_time' in spell.keys():
if 'reaction' in spell['casting_time']:
spell_entry.cast_time = CAST_TIME['1 Reaction']
else:
spell_entry.cast_time = spell['casting_time'].title()
if 'Concentration' in spell['duration']:
spell_entry.concentration = True
spell_entry.duration = spell['duration'][15:].title()
else:
spell_entry.concentration = False
spell_entry.duration = spell['duration']
spell_entry.save()
|
9,104 | e1448e62020f87e315d219be97d9af84607441df | """SamsungTV Encrypted."""
import aiohttp
from aioresponses import aioresponses
import pytest
from yarl import URL
from samsungtvws.encrypted.authenticator import SamsungTVEncryptedWSAsyncAuthenticator
@pytest.mark.asyncio
async def test_authenticator(aioresponse: aioresponses) -> None:
with open("tests/fixtures/auth_pin_status.xml") as file:
aioresponse.get("http://1.2.3.4:8080/ws/apps/CloudPINPage", body=file.read())
with open("tests/fixtures/auth_pin_status.xml") as file:
aioresponse.post(
"http://1.2.3.4:8080/ws/apps/CloudPINPage",
body="http:///ws/apps/CloudPINPage/run",
)
with open("tests/fixtures/auth_empty.json") as file:
aioresponse.get(
"http://1.2.3.4:8080/ws/pairing?step=0&app_id=12345"
"&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&type=1",
body=file.read(),
)
with open("tests/fixtures/auth_generator_client_hello.json") as file:
aioresponse.post(
"http://1.2.3.4:8080/ws/pairing?step=1&app_id=12345"
"&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184",
body=file.read(),
)
with open("tests/fixtures/auth_client_ack_msg.json") as file:
aioresponse.post(
"http://1.2.3.4:8080/ws/pairing?step=2&app_id=12345"
"&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184",
body=file.read(),
)
aioresponse.delete("http://1.2.3.4:8080/ws/apps/CloudPINPage/run", body="")
authenticator = SamsungTVEncryptedWSAsyncAuthenticator(
"1.2.3.4", web_session=aiohttp.ClientSession()
)
await authenticator.start_pairing()
token = await authenticator.try_pin("0997")
assert token == "545a596ab96b289c60896255e8690288"
session_id = await authenticator.get_session_id_and_close()
assert session_id == "1"
assert len(aioresponse.requests) == 6
print(aioresponse.requests)
request = aioresponse.requests[
(
"POST",
URL(
"http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=1"
),
)
]
assert (
request[0].kwargs["data"]
== '{"auth_Data":{"auth_type":"SPC","GeneratorServerHello":'
'"010200000000000000008A000000063635343332317CAF9CBDC06B666D23EBC'
"A615E0666FEB2B807091BF507404DDD18329CD64A91E513DC704298CCE49C4C5"
"656C42141A696354A7145127BCD94CDD2B0D632D87E332437F86EBE5A50A1512"
"F3F54C71B791A88ECBAF562FBABE2731F27D851A764CA114DBE2C2C965DF151C"
'FC7401920FAA04636B356B97DBE1DA3A090004F81830000000000"}}'
)
request = aioresponse.requests[
(
"POST",
URL(
"http://1.2.3.4:8080/ws/pairing?app_id=12345&device_id=7e509404-9d7c-46b4-8f6a-e2a9668ad184&step=2"
),
)
]
assert (
request[0].kwargs["data"]
== '{"auth_Data":{"auth_type":"SPC","request_id":"0","ServerAckMsg":'
'"01030000000000000000145F38EAFF0F6A6FF062CA652CD6CBAD9AF1EC62470000000000"}}'
)
|
9,105 | 21fec6d307b928a295f2ffbf267456f9cd9ea722 | import os
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import cv2
import color_to_gray_operations
VIZ_PATH = '../output_data/visualizations/gray_intensities/'
def visualize_grayscale_intensities(img, out_path):
img_x, img_y = np.mgrid[0: img.shape[0], 0: img.shape[1]]
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_surface(img_x, img_y, img, rstride=1, cstride=1, cmap=plt.cm.jet,
linewidth=0)
plt.savefig(out_path + 'surface.png')
plt.close()
def visualize_color_intensities(color_img, out_path):
b, g, r = cv2.split(color_img)
blue_x, blue_y = np.mgrid[0: b.shape[0], 0: b.shape[1]]
green_x, green_y = np.mgrid[0: g.shape[0], 0: g.shape[1]]
red_x, red_y = np.mgrid[0: r.shape[0], 0: r.shape[1]]
fig_blue = plt.figure()
ax_blue = fig_blue.gca(projection='3d')
ax_blue.plot_surface(blue_x, blue_y, b ,rstride=1, cstride=1, cmap=plt.cm.jet,
linewidth=0)
plt.savefig(out_path + 'blue_surface.png')
plt.close()
fig_green = plt.figure()
ax_green = fig_green.gca(projection='3d')
ax_green.plot_surface(green_x, green_y, g ,rstride=1, cstride=1, cmap=plt.cm.jet,
linewidth=0)
plt.savefig(out_path + 'green_surface.png')
plt.close()
fig_red = plt.figure()
ax_red = fig_red.gca(projection='3d')
ax_red.plot_surface(red_x, red_y, r ,rstride=1, cstride=1, cmap=plt.cm.jet,
linewidth=0)
plt.savefig(out_path + 'red_surface.png')
plt.close()
def visualize_histogram(img):
if len(img.shape) == 2:
hist, bins = np.histogram(img, bins=[x for x in range(0, 257)])
fig = plt.figure()
fig.plot(bins, hist)
fig.show()
def visualization_tests(path='../input_data/tunnel_1.png'):
path = '/Users/adamcatto/src/L0-Smoothing/src/output_tunnels/tunnel_1.png'
img = cv2.imread(path)
visualize_color_intensities(img, out_path=VIZ_PATH)
def experiments(path='../input_data/noisy_segments/honeycomb_1.png'):
img = cv2.imread(path)
img = color_to_gray_operations.luminosity_method(img)
visualize_grayscale_intensities(img, out_path=VIZ_PATH)
experiments()
#visualization_tests() |
9,106 | 31416f1ba9f3c44a7aa740365e05b5db49e70444 | #! /usr/bin/env python3
from PIL import Image
from imtools import *
import os
cwd = os.getcwd()
filelist = get_imlist(os.getcwd())
print(filelist)
for infile in filelist:
outfile = os.path.splitext(infile)[0] + ".jpg"
if infile != outfile:
try:
Image.open(infile).save(outfile)
except IOError:
print("cannot convert", infile)
|
9,107 | 3eaa898d1428e48aeb0449c7216d0a994262f76a | """Plotting functionality for ab_test_model."""
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
from itertools import combinations
from ._ab_test_model_utils import _ab_test_utils
# pylint: disable=no-member
class _ab_test_plotting(_ab_test_utils):
"""Provide Funcs for class to plot Bayesian charts."""
def _plot_posteriors(self, variants=[]):
"""Plot KDE of the posterior samples.
This is a private function. For a public interface, see
plot_posteriors().
Keyword Arguments:
variants {list} -- which variants to plot. If empty, all are
plotted. Otherwise, the must be contained in raw_data
(default: {[]}).
"""
if variants == []:
variants = list(self.posteriors.keys())
for variant in variants:
sns.kdeplot(self.posteriors[variant].get_posterior_sample(),
shade=True,
color=self.posteriors[variant].get_color())
plt.legend(labels=variants, loc='upper right')
if self.prior_function == 'beta':
plt.xlabel('Conversion Rate')
elif (self.prior_function == 'log-normal'
or self.prior_function == 'normal'):
plt.xlabel(self.metric)
sns.despine(left=True)
plt.yticks([], [])
title = 'Distribution(s) for {0} for {1}'.format(
self._stringify_variants(variants),
self.metric)
title = self._format_title(title)
plt.title(title)
if self.prior_function == 'beta':
locs, labels = plt.xticks()
labels = self._format_axis_as_percent(locs, labels)
plt.xticks(locs, labels=labels)
def _plot_positive_lift(self, numerator_name, denominator_name):
"""Plot the lift vector as a kernel density estimation.
This is a private function. For a public interface, see
plot_positive_lift().
Arguments:
numerator_name {str} -- The name of the numerator in the lift
calculation.
denominator_name {str} -- The name of the numerator in the lift
calculation.
"""
lift = self.lift[numerator_name][denominator_name]
ax = sns.kdeplot(lift, shade=True)
line = ax.get_lines()[0]
x, y = line.get_data()
mask = x > 0
x, y = x[mask], y[mask]
ax.fill_between(x, y1=y, alpha=0.5, facecolor='red')
if len(self.variant_bucket_names) > 1:
title = numerator_name + ' vs ' + denominator_name
ax.set_ylabel(title, rotation=0, fontstyle='italic')
plt.axvline(x=0, linestyle='dotted', color='black')
plt.xlabel('Lift')
percent_positive_lift = sum(i > 0 for i in lift) / len(lift)
title = '{0} had {1:.2%} probability of positive lift'.format(
self.metric,
percent_positive_lift)
title = self._format_title(title)
plt.title(title)
sns.despine(left=True)
plt.yticks([], [])
locs, labels = plt.xticks()
labels = self._format_axis_as_percent(locs, labels)
plt.xticks(locs, labels=labels)
def _plot_ecdf(self, numerator_name, denominator_name):
"""Plot the empirical cumulative distribution function.
This is a private function. For a public interface, see
plot_ecdf().
Arguments:
numerator_name {str} -- The name of the numerator in the lift
calculation.
denominator_name {str} -- The name of the numerator in the lift
calculation.
"""
x = self.ecdf[numerator_name][denominator_name]['x']
y = self.ecdf[numerator_name][denominator_name]['y']
lower_bound = x[y.index(min(y,
key=lambda x:
abs(x-self.confidence_level)))]
median = x[y.index(min(y, key=lambda x:abs(x-0.5)))]
upper_bound = x[y.index(min(y,
key=lambda x:
abs(x-(1-self.confidence_level))))]
sns.lineplot(x=x, y=y)
ci = 1 - self.confidence_level
title = ('Median Lift was {0:.2%}, with a '
'{1:.0%} CI of [{2:.2%}, {3:.2%}]'.format(median,
ci,
lower_bound,
upper_bound))
title = self._format_title(title)
plt.title(title)
plt.xlabel('Lift')
plt.ylabel('Cumulative Probability')
plt.axvline(x=lower_bound, linestyle='dotted', color='black')
plt.axvline(x=median, linestyle='dotted', color='black')
plt.axvline(x=upper_bound, linestyle='dotted', color='black')
sns.despine(left=True)
locs, labels = plt.xticks()
labels = self._format_axis_as_percent(locs, labels)
plt.xticks(locs, labels=labels)
def _calc_ecdf(self):
"""Calculate the empirical CDFs and set member var."""
for numerator, vals in self.lift.items():
for denominator, lift in vals.items():
raw_data = np.array(lift)
cdfx = np.sort(np.unique(lift))
x_values = np.linspace(start=min(cdfx),
stop=max(cdfx),
num=len(cdfx))
size_data = raw_data.size
y_values = []
for i in x_values:
temp = raw_data[raw_data <= i]
value = temp.size / size_data
y_values.append(value)
temp = {}
temp['x'] = x_values
temp['y'] = y_values
if numerator not in self.ecdf.keys():
self.ecdf[numerator] = {}
self.ecdf[numerator][denominator] = temp
else:
self.ecdf[numerator][denominator] = temp
def _calc_lift(self):
"""Calculate the lift of the variants over the others."""
for key, val in self.posteriors.items():
if key == self.control_bucket_name:
continue
lift_over_control = np.divide(val.get_posterior_sample(),
self.posteriors[
self.control_bucket_name]
.get_posterior_sample()) - 1
if key not in self.lift.keys():
self.lift[key] = {}
self.lift[key][self.control_bucket_name] = lift_over_control
else:
self.lift[key][self.control_bucket_name] = lift_over_control
if self.debug:
percent_positive_lift = sum(i > 0 for i in
lift_over_control) / \
len(lift_over_control)
print('percent positive lift for {0} over {1} = {2:.2%}'
.format(key, self.control_bucket_name,
percent_positive_lift))
if self.compare_variants:
comparisons = list(range(0, len(self.variant_bucket_names)))
combs = combinations(comparisons, 2)
for combination in combs:
denom = self.posteriors[
self.variant_bucket_names[combination[0]]]
num = self.posteriors[
self.variant_bucket_names[combination[1]]]
lift = np.divide(num.get_posterior_sample(),
denom.get_posterior_sample()) - 1
if num.get_variant_name() not in self.lift.keys():
self.lift[num.get_variant_name()] = {}
self.lift[num.get_variant_name()][
denom.get_variant_name()] = lift
else:
self.lift[num.get_variant_name()][
denom.get_variant_name()] = lift
if self.debug:
percent_positive_lift = sum(i > 0 for i in lift) \
/ len(lift)
print('percent positive lift for {0} over {1} = {2:.2%}'
.format(num.get_variant_name(),
denom.get_variant_name(),
percent_positive_lift))
def plot_posteriors(self, variants=[]):
"""Plot the PDFs of the posterior distributions.
Arguments:
variants {list} -- List of variant names to be plotted.
If variants is not set, all are plotted, otherwise, the variants
in the list are plotted. Variants must only have items in
bucket_col_name (default: {[]}).
"""
if variants != []:
for var in variants:
if var not in self.posteriors.keys():
raise ValueError(('Variants must only be a value in '
'bucket_col_name'))
self._plot_posteriors(variants)
def plot_positive_lift(self, variant_one, variant_two):
"""Plot the positive lift pdt between variant_one and variant_two.
Arguments:
variant_one and variant_two should not be the same
variant_one {str} -- should be a value in bucket_col_name.
variant_two {str} -- should be a value in bucket_col_name.
"""
if variant_one == variant_two:
raise ValueError('variant_one and variant_two cannot be the same')
if variant_one not in self.posteriors.keys() or \
variant_two not in self.posteriors.keys():
raise ValueError(('Variants must only be a value in column '
'{}'.format(self.bucket_col_name)))
if variant_one != self.control_bucket_name and \
variant_two != self.control_bucket_name:
if not self.compare_variants:
raise RuntimeError('Compare_variants must be set to true in '
'order to compare {0} and {1}'
.format(variant_one, variant_two))
if variant_one in self.lift.keys() and \
variant_two in self.lift[variant_one].keys():
self._plot_positive_lift(numerator_name=variant_one,
denominator_name=variant_two)
else:
self._plot_positive_lift(numerator_name=variant_two,
denominator_name=variant_one)
def plot_ecdf(self, variant_one, variant_two):
"""Plot the empirical cdf for the lift b/w variant_one and variant_two.
Arguments:
variant_one {str} -- should be a value in bucket_col_name.
variant_two {str} -- should be a value in bucket_col_name.
"""
if variant_one == variant_two:
raise ValueError('variant_one and variant_two cannot be the same')
if variant_one not in self.posteriors.keys() or \
variant_two not in self.posteriors.keys():
raise ValueError(('Variants must only be a value in column '
'{}'.format(self.bucket_col_name)))
if variant_one in self.ecdf.keys() and \
variant_two in self.ecdf[variant_one].keys():
self._plot_ecdf(numerator_name=variant_one,
denominator_name=variant_two)
plt.ylabel('Cumulative Lift: {0} vs {1}'
.format(variant_two, variant_one))
else:
self._plot_ecdf(numerator_name=variant_two,
denominator_name=variant_one)
plt.ylabel('Cumulative Lift: {0} vs {1}'
.format(variant_one, variant_two))
|
9,108 | bd96b31c5de2f0ad4bbc28c876b86ec238db3184 | n = int(input("Please input the number of 1's and 0's you want to print:"))
for i in range (1, n+1):
if i%2 == 1:
print ("1 ", end = "")
else:
print ("0 ", end = "") |
9,109 | 22f7f725d89db354b2e66ff145550192826af5ea | /opt/python3.7/lib/python3.7/_weakrefset.py |
9,110 | 687f7f4908e8a5448335f636edf74a627f03c306 | from typing import Tuple, Union
from webdnn.graph.graph import Graph
from webdnn.graph.operators.zero_padding_2d import ZeroPadding2D
from webdnn.graph.operators.convolution2d import Convolution2D
from webdnn.graph.operators.max_pooling_2d import MaxPooling2D
from webdnn.graph.operators.average_pooling_2d import AveragePooling2D
from webdnn.graph.optimize_rule import OptimizeRule
from webdnn.graph.traverse import search_sub_structure
from webdnn.graph.variable import Variable
from webdnn.util import flags
class ConcatZeroPadding(OptimizeRule):
def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
"""
Merges padding of ZeroPadding2D and Convolution2D | MaxPooling2D | AveragePooling2D layer
Args:
graph:
Returns:
"""
# this optimization is always applied (since backends do not implement padding)
flag_changed = False
for tail_layer in [Convolution2D, MaxPooling2D, AveragePooling2D]:
matches = search_sub_structure(graph, [ZeroPadding2D, Variable, tail_layer])
while len(matches) > 0:
match = matches[0]
a1: ZeroPadding2D = match[0]
a2: Union[Convolution2D, MaxPooling2D, AveragePooling2D] = match[2]
zero_pad = a1.parameters["padding"]
conv_pad = a2.parameters["padding"]
a2.parameters["padding"] = (zero_pad[0] + conv_pad[0], zero_pad[1] + conv_pad[1])
x1 = a1.inputs["x"]
x2 = a2.inputs["x"]
a1.remove_all()
# replace_input checks if the shape of x1 and x2 are same, but this restriction does not hold.
a2.remove_input(x2)
a2.append_input("x", x1)
flag_changed = True
matches = search_sub_structure(graph, [ZeroPadding2D, Variable, tail_layer])
return graph, flag_changed
|
9,111 | 4048d7bfc7922ef76d98d43e1ea266e732e0982e |
import requests
# qq推送 申请参考https://cp.xuthus.cc/
key = ''
def main():
try:
api = 'http://t.weather.itboy.net/api/weather/city/' # API地址,必须配合城市代码使用
city_code = '101070201' # 进入https://where.heweather.com/index.html查询你的城市代码
tqurl = api + city_code
response = requests.get(tqurl)
d = response.json() # 将数据以json形式返回,这个d就是返回的json数据
print(d['status'])
if (d['status'] == 200): # 当返回状态码为200,输出天气状况
parent = d["cityInfo"]["parent"] # 省
city = d["cityInfo"]["city"] # 市
update_time = d["time"] # 更新时间
date = d["data"]["forecast"][0]["ymd"] # 日期
week = d["data"]["forecast"][0]["week"] # 星期
weather_type = d["data"]["forecast"][0]["type"] # 天气
wendu_high = d["data"]["forecast"][0]["high"] # 最高温度
wendu_low = d["data"]["forecast"][0]["low"] # 最低温度
shidu = d["data"]["shidu"] # 湿度
pm25 = str(d["data"]["pm25"]) # PM2.5
pm10 = str(d["data"]["pm10"]) # PM10
quality = d["data"]["quality"] # 天气质量
fx = d["data"]["forecast"][0]["fx"] # 风向
fl = d["data"]["forecast"][0]["fl"] # 风力
ganmao = d["data"]["ganmao"] # 感冒指数
tips = d["data"]["forecast"][0]["notice"] # 温馨提示
cpurl = "https://push.xuthus.cc/group/" + key # 推送到QQ群
# cpurl = '[/font][/size][size=4][font=宋体][size=4][font=宋体]请求地址[/font][/size]/send/'+spkey #推送到个人QQ
# 天气提示内容
tdwt ="-----------------------------------------" + "\n【今日份天气】\n城市: " + parent + city + \
"\n日期: " + date + "\n星期: " + week + "\n天气: " + weather_type + "\n温度: " + wendu_high + " / " + wendu_low + "\n湿度: " + \
shidu + "\nPM25: " + pm25 + "\nPM10: " + pm10 + "\n空气质量: " + quality + \
"\n风力风向: " + fx + fl + "\n感冒指数: " + ganmao + "\n温馨提示: " + tips + "\n更新时间: " + update_time
print(tdwt)
requests.post(cpurl, tdwt.encode('utf-8')) # 把天气数据转换成UTF-8格式,不然要报错。
except:
error = '【出现错误】\n 今日天气推送错误,请检查服务或网络状态!'
print(error)
def main_handler(event, context):
try:
main()
except Exception as e:
raise e
else:
return 'success'
if __name__ == '__main__':
# print(extension)
print(main_handler({}, {})) |
9,112 | f24075ea70851ce95bb6b3cd87b6417f8141d546 | import unittest
import hospital.employee.nurse as n
class TestNurse(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Start testing nurse')
def setUp(self):
self.n1 = n.Nurse('Tess',18,"5436890982",3200,25)
self.n2 = n.Nurse('Melissa',40,"8920953924",9000,5)
def test_init(self):
self.assertEqual(self.n1.name,"Tess")
self.assertEqual(self.n1.age,18)
self.assertEqual(self.n1.phone_num,"5436890982")
self.assertEqual(self.n1.salary,3200)
self.assertEqual(self.n1.number_treated,25)
def test_display(self):
self.assertEqual(self.n1.display(),"Nurse {} is {} years old. \nThe best number to reach out is {}. \nThe nurse's salary is {}. \nThe nurse has treated {} patients.\n".format('Tess',18,"5436890982",3200,25))
def test_change_in_phone_num(self):
self.n1.change_in_phone_num("1234567890")
self.n2.change_in_phone_num("0987654321")
self.assertEqual(self.n1.phone_num,"1234567890")
self.assertEqual(self.n2.phone_num,"0987654321")
self.n1.change_in_phone_num("3254678313")
self.n2.change_in_phone_num("0928495820")
self.assertEqual(self.n1.phone_num,"3254678313")
self.assertEqual(self.n2.phone_num,"0928495820")
def test_change_in_salary(self):
self.n1.change_in_salary(9000)
self.n2.change_in_salary(10000)
self.assertEqual(self.n1.salary,9000)
self.assertEqual(self.n1.change_in_salary(-50),"Invalid salary.")
self.assertEqual(self.n2.salary,10000)
self.n1.change_in_salary(20)
self.assertEqual(self.n1.salary,20)
def test_bonus(self):
self.n1.bonus()
self.n2.bonus()
self.assertEqual(self.n1.salary,3450)
self.assertEqual(self.n2.salary,9050)
def tearDown(self):
self.n1 = None
self.n2 = None
@classmethod
def tearDownClass(cls):
print("Finish test nurse")
unittest.main(argv=[''], verbosity=2, exit=False)
|
9,113 | 7ea6fefa75d36ff45dcea49919fdc632e378a73f | from sqlalchemy import create_engine
from sqlalchemy import Table,Column,Integer,String,MetaData,ForeignKey
from sqlalchemy.sql import select
from sqlalchemy import text
#Creating a database 'college.db'
engine = create_engine('sqlite:///college.db', echo=True)
meta = MetaData()
#Creating a Students table
students = Table(
'students',meta,
Column('id',Integer,primary_key=True),
Column('name',String),
Column('lastname',String)
)
meta.create_all(engine)
#Inserting values
conn = engine.connect()
ins = students.insert().values(name='Ravi',lastname='Mahajan')
res = conn.execute(ins)
# Execute many commands
conn.execute(students.insert(),[
{'name': 'Rajiv', 'lastname': 'Khanna'},
{'name': 'Komal', 'lastname': 'Bhandari'},
{'name': 'Abdul', 'lastname': 'Sattar'},
{'name': 'Priya', 'lastname': 'Rajhans'},
])
# Selecting from table Students
s = students.select()
result = conn.execute(s)
# row = result.fetchall()
for row in result:
print(row)
# Where condition
s = students.select().where(students.c.id>2)
result = conn.execute(s)
row = result.fetchall()
print(row)
s = select([students])
result = conn.execute(s)
for row in result:
print(row)
# Using text to execute query using text
t = text('SELECT * from students')
result = conn.execute(t)
# Update
stmt = students.update().where(students.c.lastname=='Khanna').values(lastname='Bhatt')
conn.execute(stmt)
s = students.select()
conn.execute(s).fetchall()
from sqlalchemy.sql.expression import update
stmt = update(students).where(students.c.lastname == 'Khanna').values(lastname = 'Kapoor')
stmt2 = students.delete().where(students.c.lastname=='Rajhans')
conn.execute(stmt)
addresses = Table(
'addresses', meta,
Column('id', Integer, primary_key = True),
Column('st_id', Integer, ForeignKey('students.id')),
Column('postal_add', String),
Column('email_add', String))
meta.create_all(engine)
conn.execute(addresses.insert(), [
{'st_id':1, 'postal_add':'Shivajinagar Pune', 'email_add':'ravi@gmail.com'},
{'st_id':1, 'postal_add':'ChurchGate Mumbai', 'email_add':'kapoor@gmail.com'},
{'st_id':3, 'postal_add':'Jubilee Hills Hyderabad', 'email_add':'komal@gmail.com'},
{'st_id':5, 'postal_add':'MG Road Bangaluru', 'email_add':'as@yahoo.com'},
{'st_id':2, 'postal_add':'Cannought Place new Delhi', 'email_add':'admin@khanna.com'},
])
# Update query for Multiple tables
stmt = students.update().values({students.c.name:'xyz',
addresses.c.email_add:'abc@xyz.com'}).where(students.c.id == addresses.c.id)
# using joins
from sqlalchemy import join
from sqlalchemy.sql import select
j = students.join(addresses,students.c.id==addresses.c.st_id)
stmt = select([students]).select_from(j)
result = conn.execute(stmt)
for res in result:
print(res)
|
9,114 | 8f7ecbe03e9a7a1d9df8cbe4596456e21b84653b | from base64 import b64encode
from configparser import ConfigParser
import functools
from flask import (
Blueprint, flash, redirect, render_template, request, session, url_for, app
)
from requests.exceptions import SSLError
import spotipy
from spotipy import oauth2
bp = Blueprint('auth', __name__, url_prefix='/auth')
config = ConfigParser()
config.read('spotify.cfg')
CLIENT_ID = config.get('SPOTIFY', 'CLIENT_ID').strip("'")
CLIENT_SECRET = config.get('SPOTIFY', 'CLIENT_SECRET').strip("'")
REDIRECT_URI = config.get('SPOTIFY', 'REDIRECT_URI').strip("'")
SCOPE = 'user-read-currently-playing user-library-read playlist-read-private'
SP_OAUTH = oauth2.SpotifyOAuth(CLIENT_ID, CLIENT_SECRET, REDIRECT_URI, scope=SCOPE)
@bp.route('/login')
def login():
'''
: Create session and login user
: PARAMS None
: RETURN <view>
'''
try:
session.clear()
return redirect(SP_OAUTH.get_authorize_url())
except ConnectionError as e:
flash("Connection error")
@bp.route('/callback/')
def callback():
'''
: Redirect user after login
: PARAMS None
: RETURN <view>
'''
code = request.args.get('code')
token = SP_OAUTH.get_access_token(code)
if token:
session['token'] = token['access_token']
session['refresh'] = token['refresh_token']
sp = spotipy.Spotify(auth=session['token'])
try:
cu = sp.current_user()
session['display_name'] = cu['display_name']
except SSLError as e:
# flash("Connection error")
return redirect(url_for('home'))
else:
flash("Cannot get access token")
return redirect(url_for('home'))
@bp.route('/logout')
def logout():
'''
: Clear session and log user out
: PARAMS None
: RETURN <view>
'''
session.clear()
return redirect(url_for('home'))
def login_required(view):
@functools.wraps(view)
def wrapped_view(**kwargs):
if 'refresh' in session:
refresh = SP_OAUTH.refresh_access_token(session['refresh'])
session['token'] = refresh['access_token']
session['refresh'] = refresh['refresh_token']
sp = spotipy.Spotify(auth=session['token'])
try:
cu = sp.current_user()
session['display_name'] = cu['display_name']
except SSLError:
# flash("Connection error - please try again.")
return redirect(url_for('home'))
return view(**kwargs)
else:
return redirect(url_for('home'))
return wrapped_view
|
9,115 | 51848a64102f7fe8272fcf56a9792ed50c430538 | import random
def patternToNumber(pattern):
if len(pattern) == 0:
return 0
return 4 * patternToNumber(pattern[0:-1]) + symbolToNumber(pattern[-1:])
def symbolToNumber(symbol):
if symbol == "A":
return 0
if symbol == "C":
return 1
if symbol == "G":
return 2
if symbol == "T":
return 3
def numberToPattern(index, k):
if k == 1:
return numberToSymbol(index)
return numberToPattern(index // 4, k-1) + numberToSymbol(index % 4)
def numberToSymbol(index):
if index == 0:
return "A"
if index == 1:
return "C"
if index == 2:
return "G"
if index == 3:
return "T"
def profileProbable(text, k, profile):
maxprob = 0
kmer = text[0:k]
for i in range(0, len(text) - k +1):
prob =1
pattern =text[i:i+k]
for j in range(k):
l = symbolToNumber(pattern[j])
prob *= profile [l][j]
if prob > maxprob:
maxprob =prob
kmer = pattern
return kmer
def hammingDistance(p, q):
ham = 0
for index, y in zip(p, q):
if index != y:
ham +=1
return ham
def distanceBetweenPatternAndString(pattern, DNA):
k = len(pattern)
distance = 0
for index in DNA:
hamming = k+1
for i in range(len(index) - k + 1):
z = hammingDistance(pattern, index[i:i+k])
if hamming > z:
hamming = z
distance += hamming
return distance
def profileForm(motifs):
k= len(motifs[0])
profile = [[1 for i in range(k)] for j in range(4)]
for index in motifs:
for i in range(len(index)):
j = symbolToNumber(index[i])
profile[j][i] +=1
for index in profile:
for i in range(len(index)):
index[i] = index[i]/len(motifs)
return profile
def consensus(profile):
str = ""
for i in range(len(profile[0])):
max = 0
loc = 0
for j in range(4):
if profile[j][i] > max:
loc = j
max = profile[j][i]
str+=numberToSymbol(loc)
return str
def score(motifs):
profile = profileForm(motifs)
cons = consensus(profile)
score = 0
for index in motifs:
for i in range(len(index)):
if cons[i] != index[i]:
score +=1
return score
def randomMotifSearch(DNA, k, t):
bestMotifs = []
motifs = []
for index in range(t):
random.seed()
i= random.randint(0, len(DNA[index])-k)
motifs.append(DNA[index][i:i+k])
bestMotifs = motifs.copy()
count = 0
while True:
profile = profileForm(motifs)
for index in range(t):
motifs[index] = profileProbable(DNA[index], k, profile)
if score(motifs) < score(bestMotifs):
bestMotifs = motifs.copy()
count +=1
else:
print(count)
return bestMotifs
k = 15
t = 20
DNA = ["ACTTATATCTAGAGTAAAGCCCTGATTCCATTGACGCGATCCCTACCTCCATCATACTCCACAGGTTCTTCAATAGAACATGGGGAAAACTGAGGTACACCAGGTCTAACGGAGATTTCTGGCACTAACTACCCAAAATCGAGTGATTGAACTGACTTATATCTAGAGT", "AAAGCCCTGATTCCATTGACGCGATCCCTACCTCCATCATACTCCACAGGTTCTTCAATAGAACATGGGGAAAACTGAGGTACACCAGGTCTAACGGAGATTTCTGGCACTAACTACCCAAAATCCTCTCGATCACCGACGAGTGATTGAACTGACTTATATCTAGAGT", "CACTCCCGTCCGTCTGACGCCAGGTGCTCTACCCCGCTGATTGTCTGGTACATAGCAGCCTATAGATCACCGATGCAGAAACACTTCGAGGCAGCCGATTTCGCTTATCACAACGTGACGGAATTTGATAAACCACGTACTCTAATACCGTCACGGGCCCATCAACGAA", "ACAAGAACTGGTGGGGAGACTATGACACTCTAGCGGTCGCATAAGGGCCGGAAACCAGGACAAATCGATAAGATGAAGCGGGGATATAAGCCTTATACTGCGACTGGTTCCTTATATTATTTAGCCCCGATTGATCACCGATTAAAATATTCTGCGGTTTTCGAGACGG", "TAACCACACCTAAAATTTTTCTTGGTGAGATGGACCCCCGCCGTAAATATCAGGATTAAATGTACGGATACCCATGACCCTCCAGTCATCTACCTTCCCGTGGTGGTCGCTCAGCCTTGTGCAGACCGAACTAGCACCTGTCACATACAATGTTGCCCGCATAGATCGT", "ATCCGACAGAGGCAGTGAATAAGGTTTCGTTTCCTCAGAGAGTAGAACTGCGTGTGACCTTGCCTTCACCGACATCCGTTTCCAATTGAGCTTTTCAGGACGTTTAGGTAACTGATTGTCATTGCAATTGTCCGGGGGATTTAGATGGCCGGGTACCTCTCGGACTATA", "CCTTGTTGCCACCGATTCGCGAGCAACATCGGAGTGCTCTGATTCACGGCGATGCTCCACGAAGAGGACCGCGGCACGACACGCCCTGTACCTACGTTTCTGGATATCCTCCGGCGAGTTAATAGAGCAATACGACCTGGTCGTCGAGATCGTGTATCTAGCCCTACCT", "ATAGGTTAACGAATCAGGAGAGTTAATTTTACCTAGCTAGAGCGGACGGTGCCTGGCTGTATTCGCGTTTGACTTTCGGGCTCGCTGATAACTTGTGATCACCTTTTACGCTTACTGGATCCAACGATGGATCAAAGTTGAGAATTTCTGTGCCTTGGGTGTGAGCTGT", "CTGACGAAAGGACGGGCGGTGTACTTAGTTTGGGGTAAAATAGTTGGTATAATTCTGTGCGACAGACATTTGGTCAGGCCATACTGCCATATCGTGATGTAACTATCCACACTACGTCATAGGCCCTTGTGATCAATTAAACGTTCCTCATGCCAGGCTATCTGTTTAA", "GGCTTCGCGTTTAAGGCTGGATTAAGTACTCCGCCTTGTGATCTGTGATCCTCCGACCTGTGATCAGCAAGATTGGAACCTAGGTAGGCGGCGGGTCTACGCTGGCCCACAATCGTGAGTCCCCCACTCCGTAGGTTGTGGAATTTATAGACCCGCAAGGGGCACCACT", "AGGATGACACCCAGGATGAATCTGGATTAGGAACACCAACCCGACATATTTGTTACCGCTGCAGCATTTCGCTCTTGGACGCGTAACCCGAGATCCGTCTCGCGATCGTCACGGATCGGGATTATGCAGGCAATACCTTGTGATCACTCCGCGCTTGGTTTTGCTAGCG", "ACATCTCTAGTCACTTTTATTGAGCAGGTGGGCGGATTCATGATCCGGCTCTGTCGTACGTCCAACCACGGTGACATGTTCGGAGCTGTCGCCGTGGAGCAGAGATACATCGGATCTATCAATTTTACTAAGAGCAACTAGCCACGACAAACTGTGATCACCGATTGGA", "AATTTGCGTATCTCTAGGACTCCCTCATACAAATCAAAGCTTGGATGGGTAAGATGCCGCAGCAGCAGGTATCTCATATTGGCTATTAAGAGCCAGGCCCTATGGCCTTAGTATCACCGATCAGACGTCGCATGAGCGGGCCCGTTGTCCTATCTCTTTAGCTGCCGCA", "GAAGTAAAGGGGTTCCACTGCGTAGAGCGTGCCCCTCTGGTGTGCCGTACTGTTATGGTGATACAGCTTCCTTATACCCCTCGTAAAGCGGCTAATGGTCCTAATGAATGCCCTTGTGAAATCCGAATCGCTTTACAATTGCGTTCGGCGGAATGCAGTCACCAGTGTT", "TACACTACGCGTTATTTACTTTTACTGAGTCCTTGTCGCCACCGAACGAGGATTGTTCATTGTATCCGGAGATTAGGAGTTCGCATCGCTGACACAGCCAGTTCGTAGCAAATACCGCTGGCCCTGGGCACTCCAGATCAGAACTACTAGCCCTAAACTCTATGACACA", "TTGGGTCTCGATCCCTCTATGTTAAGCTGTTCCGTGGAGAATCTCCTGGGTTTTATGATTTGAATGACGAGAATTGGGAAGTCGGGATGTTGTGATCACCGCCGTTCGCTTTCATAAATGAACCCCTTTTTTTCAGCAGACGGTGGCCTTTCCCTTTCATCATTATACA", "TTTCAAGTTACTACCGCCCTCTAGCGATAGAACTGAGGCAAATCATACACCGTGATCACCGACCCATGGAGTTTGACTCAGATTTACACTTTTAGGGGAACATGTTTGTCGGTCAGAGGTGTCAATTATTAGCAGATATCCCCCAACGCAGCGAGAGAGCACGGAGTGA", "GATCCATTACCCTACGATATGTATATAGCGCCCTAGTACGGCTTCTCCCTTGCAGACACGCAGGCGCTGTGCGCTATCGGCTTCCTCGGACATTCCTGGATATAAGTAACGGCGAACTGGCTATCACTACCGCCGCTCCTTAAGCCTTGGTTTCACCGACGATTGTCGT", "TAGTAGATTATTACCTGTGGACCGTTAGCTTCAAGACCGAAACGTTGGTGATGCTACTTAAATGTCAAGAGTTGCGAAGTTGGGCGAAGCACATCCGTACTCCCAAGTGGACGATCGATAGATCCATGGAGTTTCCATCCATCTTAATCCGCCCTTTGCATCACCGACG", "TACAAGGCACAAACGAGACCTGATCGAACGGTGCACGGTCGAGGCAGCGAGATAAATGTACATTGAGAGCACCTTGTGATTTACGACCTGCATCGAAGGTTTCTTGGCACCCACCTGTCGTCCGCCAGGGCAGAGCCGACATTATATGACGCTGATGTACGAAGCCCCT"]
best = randomMotifSearch(DNA, k, t)
min = score(best)
for index in range(1000):
print(index)
a = randomMotifSearch(DNA, k, t)
if score(a) < score(best):
best = a
min = score(a)
print(min)
for index in best:
print(index) |
9,116 | a724b49c4d86400b632c02236ceca58e62ba6c86 | import json
import datetime
import string
import random
import logging
import jwt
from main import db
from main.config import config
def execute_sql_from_file(filename):
# Open and read the file as a single buffer
fd = open(filename, 'r')
sql_file = fd.read()
fd.close()
# All SQL commands (split on ';')
sql_commands = sql_file.split(';')
# Execute every command from the input file
for command in sql_commands:
# This will skip and report validation
# For example, if the tables do not yet exist, this will skip over
# the DROP TABLE commands
try:
db.session.execute(command.decode('utf-8'))
except Exception, e:
logging.exception(e)
def create_mock_data():
execute_sql_from_file('./sql/test.sql')
def drop_tables():
execute_sql_from_file('./sql/drop_tables.sql')
def create_headers(access_token=None):
headers = {
'Content-Type': 'application/json'
}
if access_token:
headers.update({
'Authorization': 'Bearer {}'.format(access_token)
})
return headers
def json_response(response):
return json.loads(response.data.decode('utf-8'))
def generate_access_token(user_id, is_expired=False):
"""
Generate JWT Token for test authentication.
:param user_id: User ID
:param is_expired: To generate expired tokens
:return: JWT Token string
"""
iat = datetime.datetime.utcnow()
return jwt.encode({
'sub': user_id, # Subject of this token
'iat': iat, # Issued at
'exp': iat + datetime.timedelta(hours=1) # Expired at
if not is_expired
else iat - datetime.timedelta(minutes=5)
}, config.SECRET_KEY)
def random_string(string_length=10):
"""Generate a random string of fixed length"""
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for _ in range(string_length))
|
9,117 | 889fdca3f92f218e6d6fd3d02d49483f16a64899 | new_tuple = (11,12,13,14,15,16,17)
new_list = ['one' ,12,'three' ,14,'five']
print("Tuple: ",new_tuple)
print("List: ", new_list)
tuple_2= tuple (new_list)
print("Converted tuple from the list : ", tuple_2) |
9,118 | e8ea307352805bf0b5129e2ad7f7b68c44e78fc9 | import src.engine.functions.root_analyzer.main as main
from src.engine.functions.function import Function
class GetRootData(Function):
def __init__(self, data_display):
self.data_display = data_display
def call(self, args):
image_folder_path = args[0]
output_path = args[1]
self.data_display.clear()
data = main.generate_data(image_folder_path, self.data_display.data_tracker)
error_message = self.data_display.display_data(data)
return ""
|
9,119 | d8e9b9f7a8d5ec2a72f083ec2283e8c0724dbe0d | #coding=utf-8
import urllib.parse
import json
'''转化从charles复制下来的字串,转为json格式'''
def to_str(body_str):
'''检查需要转化的str是否符合标准'''
if not body_str == '':
par = body_str.split("&")
# print(par)
_temp = []
try:
for each in par:
if "=" not in each:
print("参数不合规,请检查")
return ''
if len(each.split("=")) != 2:
print("参数不合规,请检查")
return ''
if each.split("=")[1] != '':
_temp.append(each.split('=')[1])
except:
print("参数不合规,请检查")
return ''
else:
print("传入为空:%s"%body_str)
return ''
return urllib.parse.unquote(body_str)
def to_json(body_str):
'''转化格式'''
try:
body_str = to_str(body_str)
except:
return False
body_dict = {}
# print(body_str)
for each in body_str.split("&"):
body_dict[str(each.split("=")[0])] = str(each.split("=")[1])
print(body_dict)
with open("demo.json","w") as demo:
demo.write(json.dumps(body_dict,indent=4))
if __name__ == '__main__':
bstr = '123'
to_json(bstr) |
9,120 | 5d988d159902e4a4cb17ee0ec61153de2dda4691 | try:
from setuptools import setup
from setuptools import find_packages
has_setup_tools = true
except ImportError:
from distutils.core import setup
has_setup_tools = false
with open("README.md", "r") as fh:
long_description = fh.read()
if has_setup_tools is True:
packages = setuptools.find_packages()
else:
packages = ["otmux"]
setup(
name="otmux",
version="__version",
description="multiple remote activities using ssh and tmux",
long_description=long_description,
url="https://github.com/rda3mon/otmux",
author="Mallikarjun",
author_email="mallikvarjun@gmail.com",
license="Apache License 2.0",
packages=["otmux"],
classifiers=[
'Topic :: tmux :: ssh',
'Development Status :: 2 - Experimental/Unstable',
'Environment :: Console',
'License :: Apache License 2.0',
'Programming Language :: Python :: 2.7',
"Operating System :: OS Independent"
]
)
|
9,121 | bb9ff561ff94bbe4d20f14287ba313386ea78609 | import openpyxl
from openpyxl import Workbook
import openpyxl as openpyxl
from openpyxl.chart import BarChart
wb = openpyxl.load_workbook('/Users/mac/Desktop/stu_scores _Grade 2.xlsx')
sheet = wb['stu_scores_01']
data = openpyxl.chart.Reference(sheet, min_col=3, min_row=34, max_row=34,max_col=7)
cat = openpyxl.chart.Reference(sheet, min_col=3, min_row=1, max_col=7,max_row=1)
seriesObj = openpyxl.chart.Series(data, title="bar chart of each subject", title_from_data=True)
charObj = openpyxl.chart.BarChart()
charObj.title = "My Bar Chart"
charObj.x_axis.title = 'bar chart of each subject'
charObj.append(seriesObj)
charObj.set_categories(cat)
sheet.add_chart(charObj,"I2")
#new one
data = openpyxl.chart.Reference(sheet, min_col=3, min_row=35, max_row=35,max_col=7)
cat = openpyxl.chart.Reference(sheet, min_col=3, min_row=1, max_col=7,max_row=1)
seriesObj = openpyxl.chart.Series(data, title="bar chart of boys each subject", title_from_data=True)
charObj = openpyxl.chart.BarChart()
charObj.title = "My Bar Chart"
charObj.x_axis.title = 'Boys each subject'
charObj.append(seriesObj)
charObj.set_categories(cat)
sheet.add_chart(charObj,"I18")
data = openpyxl.chart.Reference(sheet, min_col=3, min_row=36, max_row=36,max_col=7)
cat = openpyxl.chart.Reference(sheet, min_col=3, min_row=1, max_col=7,max_row=1)
seriesObj = openpyxl.chart.Series(data, title="bar chart of girls each subject", title_from_data=True)
charObj = openpyxl.chart.BarChart()
charObj.title = "My Bar Chart"
charObj.x_axis.title = 'girls each subject'
charObj.append(seriesObj)
charObj.set_categories(cat)
sheet.add_chart(charObj,"Q2")
wb.save('stu_scores _Grade.xlsx')
wb.close()
|
9,122 | b39403171ed264c8fae5ea4ae9d17f77cfcab497 | import unittest
import sys
import os
#Add project root to path
sys.path.append('../..')
from speckle.SpeckleClient import SpeckleApiClient
class TestSpeckleStream(unittest.TestCase):
def setUp(self):
self.s = SpeckleApiClient()
self.user = {'email':'testuser@arup.com','password':'testpassword', 'username':'testuser'}
self.test_stream = 'RKWgU-oWF'
self.test_object = '5bcf2c7e3ff66c15abac431d'
login = self.s.UserLoginAsync(self.user)
assert login, 'Test User Login was not successful'
self.user['id'] = login['resource']['_id']
self.stream = self.s.StreamGetAsync(self.test_stream)
obj = self.s.StreamGetObjectsAsync(self.test_stream)
#for o in obj['resources']:
# r = self.s.ObjectDeleteAsync(o['_id'])
self.s.StreamUpdateAsync(self.test_stream, self.stream)
def tearDown(self):
self.s.StreamUpdateAsync(self.test_stream, self.stream)
def none_msg(self, header):
return header + ' responded with None'
def test_get_object(self):
r = self.s.ObjectGetAsync(self.test_object)
self.assertIsNotNone(r, self.none_msg('ObjectGetAsync'))
self.assertTrue(r['success'])
def test_create_object(self):
r = self.s.ObjectCreateAsync([{"owner": self.user['username']}])
self.assertIsNotNone(r, self.none_msg('ObjectCreateAsync'))
self.assertTrue(r['success'])
self.assertTrue(r['resources'])
#Check created object ID is in response
resource = r['resources'][0]
self.assertTrue(resource['_id'])
print(resource['_id'])
self.s.StreamAddObjectAsync(self.test_stream, resource['_id'])
def test_create_point_object(self):
obj = {
"owner": self.user['username'],
"type": "Point",
"hash": "hash",
"value": [0,0,0]
}
r = self.s.ObjectCreateAsync([obj])
self.assertIsNotNone(r, self.none_msg('ObjectCreateAsync'))
self.assertTrue(r['success'])
self.assertTrue(r['resources'])
#Check created object ID is in response
resource = r['resources'][0]
self.assertTrue(resource['_id'])
print(resource['_id'])
self.s.StreamAddObjectAsync(self.test_stream, resource['_id'])
def test_create_mesh_object(self):
obj = {
"owner": self.user['username'],
"type": "Mesh",
"geometryHash": "Mesh.66ec936fc8eb1844581db685e5672f79",
"hash": "2e4d67853709316f17e3745cd700a9ed",
"properties": {
"center": {
"type": "Point",
"value": [
-2.326136578802356,
7.41377889150433,
0.01525474415516414
],
"hash": "318e1a3b9bf16bf5711170b61b4cd144",
"geometryHash": "Point.8012f72d1fd49795101ab099b7dff3cb"
},
"area": 1.6718884716988291,
"revitFamTYpe": "undefined"
},
"vertices": [
-2.6709675788879395,
7.420193672180176,
0.007017634343355894,
-2.6617817878723145,
7.910780906677246,
0.016628438606858253,
-2.6525962352752686,
8.401368141174316,
0.026239242404699326,
-2.6434104442596436,
8.891955375671387,
0.03585004433989525,
-2.6342246532440186,
9.382542610168457,
0.04546085000038147,
-2.507732629776001,
6.9263834953308105,
0.005644594319164753,
-2.498547077178955,
7.416970729827881,
0.01319583784788847,
-2.48936128616333,
7.907557964324951,
0.02074708230793476,
-2.480175495147705,
8.39814567565918,
0.028298325836658478,
-2.47098970413208,
8.88873291015625,
0.035849571228027344,
-2.3444979190826416,
6.432573318481445,
0.004271554294973612,
-2.3353121280670166,
6.923160552978516,
0.00976323802024126,
-2.3261263370513916,
7.413747787475586,
0.015254922211170197,
-2.3169405460357666,
7.9043354988098145,
0.020746605470776558,
-2.3077549934387207,
8.394922256469727,
0.02623829059302807,
-2.181262969970703,
5.93876314163208,
0.0028985145036131144,
-2.172077178955078,
6.42935037612915,
0.006330638192594051,
-2.162891387939453,
6.919937610626221,
0.009762762114405632,
-2.1537058353424072,
7.410524845123291,
0.013194886036217213,
-2.1445200443267822,
7.9011125564575195,
0.016627009958028793,
-2.0180280208587646,
5.444952964782715,
0.0015254743630066514,
-2.0088422298431396,
5.935540199279785,
0.002898038364946842,
-1.9996565580368042,
6.4261274337768555,
0.0042706020176410675,
-1.9904708862304688,
6.916714668273926,
0.00564316613599658,
-1.9812850952148438,
7.407302379608154,
0.0070157297886908054
],
"faces": [
1,
6,
1,
0,
5,
1,
7,
2,
1,
6,
1,
8,
3,
2,
7,
1,
9,
4,
3,
8,
1,
11,
6,
5,
10,
1,
12,
7,
6,
11,
1,
13,
8,
7,
12,
1,
14,
9,
8,
13,
1,
16,
11,
10,
15,
1,
17,
12,
11,
16,
1,
18,
13,
12,
17,
1,
19,
14,
13,
18,
1,
21,
16,
15,
20,
1,
22,
17,
16,
21,
1,
23,
18,
17,
22,
1,
24,
19,
18,
23
]
}
r = self.s.ObjectCreateAsync([obj])
self.assertIsNotNone(r, self.none_msg('ObjectCreateAsync'))
self.assertTrue(r['success'])
self.assertTrue(r['resources'])
# Check created object ID is in response
resource = r['resources'][0]
self.assertTrue(resource['_id'])
print(resource['_id'])
self.s.StreamAddObjectAsync(self.test_stream, resource['_id'])
def test_line_object(self):
obj = {
"type": "Line",
"value": [
-5689.317811503128,
-13716.87365524665,
3448.9999880790538,
-5688.317811503128,
-13717.87365524665,
3539.9999880790538
],
}
r = self.s.ObjectCreateAsync([obj])
self.assertIsNotNone(r, self.none_msg('ObjectCreateAsync'))
self.assertTrue(r['success'])
self.assertTrue(r['resources'])
# Check created object ID is in response
resource = r['resources'][0]
self.assertTrue(resource['_id'])
print(resource['_id'])
self.s.StreamAddObjectAsync(self.test_stream, resource['_id'])
def test_line_objects(self):
objects = [
{
"type": "Line",
"value": [
0,
0,
0,
1,
1,
1
],
},
{
"type": "Line",
"value": [
-1,
-1,
-1,
2,
2,
2
],
},
]
r = self.s.ObjectCreateAsync(objects)
self.assertIsNotNone(r, self.none_msg('ObjectCreateAsync'))
self.assertTrue(r['success'])
self.assertTrue(r['resources'])
# Check created object ID is in response
resource = r['resources'][0]
self.assertTrue(resource['_id'])
print(resource['_id'])
self.s.StreamAddObjectAsync(self.test_stream, resource['_id'])
def test_update_object(self):
geometry = {
"vertices": [0.0, 1.0, 2.0, 3.0],
"faces": [1,2,3]
}
props = {
'type': 'RCSlab',
'material': 'Concrete'
}
data = {'properties': props}
data.update(geometry)
r = self.s.ObjectUpdateAsync(self.test_object, data)
self.assertIsNotNone(r)
#Todo: Look into why user is not authorized to update
self.assertTrue(r['success'])
if __name__ == "__main__":
unittest.main()
|
9,123 | 906b7f02d6a7968bbf4780e682d4f9a92526326a |
# Taken from: https://github.com/flyyufelix/cnn_finetune/blob/master/vgg16.py
# based on: https://gist.github.com/baraldilorenzo/07d7802847aaad0a35d3
# -*- coding: utf-8 -*-
import keras
import itertools
import sys
from sklearn.metrics import confusion_matrix
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from keras.models import Sequential
from keras.optimizers import SGD
from keras.layers import Input, Dense, Convolution2D, MaxPooling2D, AveragePooling2D, ZeroPadding2D, Dropout, Flatten, merge, Reshape, Activation
from keras.layers.normalization import BatchNormalization
from keras.models import Model
from keras import backend as K
from keras.callbacks import LearningRateScheduler, ModelCheckpoint, EarlyStopping, CSVLogger
from keras.preprocessing.image import ImageDataGenerator
from skimage import io, color, exposure, transform
from sklearn.model_selection import train_test_split
from sklearn.cross_validation import train_test_split
from sklearn.metrics import log_loss
from new_load_GTSRB_Inception import load_GTSRB_data_1
class AccuracyHistory(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.acc = []
def on_epoch_end(self, batch, logs={}):
self.acc.append(logs.get('acc'))
history = AccuracyHistory()
# Variables to run the script with a bat-script
dropout_rate= float(sys.argv[1])#0.5
lr= float(sys.argv[2] )#1e-3
batch_size= int(sys.argv[3])#10
weights_filename= 'vgg16_weights_frac_0_3_lr_' + str(lr) +'_batch'+str(batch_size)+'_drop_'+str(dropout_rate)+'_epochs_30.h5'
matrix_filename= 'conf_matrix_frac_0_3_lr_' + str(lr) +'_batch'+str(batch_size)+'_drop_'+str(dropout_rate)+'_epochs_30.png'
log_filename='log_frac_0_3_lr_' + str(lr) +'_batch'+str(batch_size)+'_drop_'+str(dropout_rate)+'_epochs_30'
result_file='result_frac_0_3_lr_' + str(lr) +'_batch'+str(batch_size)+'_drop_'+str(dropout_rate)+'_epochs_30.txt'
def conv2d_bn(x, nb_filter, nb_row, nb_col,
border_mode='same', subsample=(1, 1),
name=None):
#Utility function to apply conv + BN for Inception V3.
if name is not None:
bn_name = name + '_bn'
conv_name = name + '_conv'
else:
bn_name = None
conv_name = None
bn_axis = 1
x = Convolution2D(nb_filter, nb_row, nb_col,
subsample=subsample,
activation='relu',
border_mode=border_mode,
name=conv_name)(x)
x = BatchNormalization(axis=bn_axis, name=bn_name)(x)
return x
def inception_v3_model(img_rows, img_cols, channel=1, num_classes=None):
channel_axis = 1
img_input = Input(shape=(channel, img_rows, img_cols))
x = conv2d_bn(img_input, 32, 3, 3, subsample=(2, 2), border_mode='valid')
x = conv2d_bn(x, 32, 3, 3, border_mode='valid')
x = conv2d_bn(x, 64, 3, 3)
x = MaxPooling2D((3, 3), strides=(2, 2))(x)
x = conv2d_bn(x, 80, 1, 1, border_mode='valid')
x = conv2d_bn(x, 192, 3, 3, border_mode='valid')
x = MaxPooling2D((3, 3), strides=(2, 2))(x)
# mixed 0, 1, 2: 35 x 35 x 256
for i in range(3):
branch1x1 = conv2d_bn(x, 64, 1, 1)
branch5x5 = conv2d_bn(x, 48, 1, 1)
branch5x5 = conv2d_bn(branch5x5, 64, 5, 5)
branch3x3dbl = conv2d_bn(x, 64, 1, 1)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch_pool = AveragePooling2D(
(3, 3), strides=(1, 1), border_mode='same')(x)
branch_pool = conv2d_bn(branch_pool, 32, 1, 1)
x = merge([branch1x1, branch5x5, branch3x3dbl, branch_pool],
mode='concat', concat_axis=channel_axis,
name='mixed' + str(i))
# mixed 3: 17 x 17 x 768
branch3x3 = conv2d_bn(x, 384, 3, 3, subsample=(2, 2), border_mode='valid')
branch3x3dbl = conv2d_bn(x, 64, 1, 1)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3,
subsample=(2, 2), border_mode='valid')
branch_pool = MaxPooling2D((3, 3), strides=(2, 2))(x)
x = merge([branch3x3, branch3x3dbl, branch_pool],
mode='concat', concat_axis=channel_axis,
name='mixed3')
# mixed 4: 17 x 17 x 768
branch1x1 = conv2d_bn(x, 192, 1, 1)
branch7x7 = conv2d_bn(x, 128, 1, 1)
branch7x7 = conv2d_bn(branch7x7, 128, 1, 7)
branch7x7 = conv2d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv2d_bn(x, 128, 1, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 128, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 128, 1, 7)
branch7x7dbl = conv2d_bn(branch7x7dbl, 128, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling2D((3, 3), strides=(1, 1), border_mode='same')(x)
branch_pool = conv2d_bn(branch_pool, 192, 1, 1)
x = merge([branch1x1, branch7x7, branch7x7dbl, branch_pool],
mode='concat', concat_axis=channel_axis,
name='mixed4')
# mixed 5, 6: 17 x 17 x 768
for i in range(2):
branch1x1 = conv2d_bn(x, 192, 1, 1)
branch7x7 = conv2d_bn(x, 160, 1, 1)
branch7x7 = conv2d_bn(branch7x7, 160, 1, 7)
branch7x7 = conv2d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv2d_bn(x, 160, 1, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 160, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 160, 1, 7)
branch7x7dbl = conv2d_bn(branch7x7dbl, 160, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling2D(
(3, 3), strides=(1, 1), border_mode='same')(x)
branch_pool = conv2d_bn(branch_pool, 192, 1, 1)
x = merge([branch1x1, branch7x7, branch7x7dbl, branch_pool],
mode='concat', concat_axis=channel_axis,
name='mixed' + str(5 + i))
# mixed 7: 17 x 17 x 768
branch1x1 = conv2d_bn(x, 192, 1, 1)
branch7x7 = conv2d_bn(x, 192, 1, 1)
branch7x7 = conv2d_bn(branch7x7, 192, 1, 7)
branch7x7 = conv2d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv2d_bn(x, 160, 1, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 1, 7)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling2D((3, 3), strides=(1, 1), border_mode='same')(x)
branch_pool = conv2d_bn(branch_pool, 192, 1, 1)
x = merge([branch1x1, branch7x7, branch7x7dbl, branch_pool],
mode='concat', concat_axis=channel_axis,
name='mixed7')
# mixed 8: 8 x 8 x 1280
branch3x3 = conv2d_bn(x, 192, 1, 1)
branch3x3 = conv2d_bn(branch3x3, 320, 3, 3,
subsample=(2, 2), border_mode='valid')
branch7x7x3 = conv2d_bn(x, 192, 1, 1)
branch7x7x3 = conv2d_bn(branch7x7x3, 192, 1, 7)
branch7x7x3 = conv2d_bn(branch7x7x3, 192, 7, 1)
branch7x7x3 = conv2d_bn(branch7x7x3, 192, 3, 3,
subsample=(2, 2), border_mode='valid')
branch_pool = AveragePooling2D((3, 3), strides=(2, 2))(x)
x = merge([branch3x3, branch7x7x3, branch_pool],
mode='concat', concat_axis=channel_axis,
name='mixed8')
# mixed 9: 8 x 8 x 2048
for i in range(2):
branch1x1 = conv2d_bn(x, 320, 1, 1)
branch3x3 = conv2d_bn(x, 384, 1, 1)
branch3x3_1 = conv2d_bn(branch3x3, 384, 1, 3)
branch3x3_2 = conv2d_bn(branch3x3, 384, 3, 1)
branch3x3 = merge([branch3x3_1, branch3x3_2],
mode='concat', concat_axis=channel_axis,
name='mixed9_' + str(i))
branch3x3dbl = conv2d_bn(x, 448, 1, 1)
branch3x3dbl = conv2d_bn(branch3x3dbl, 384, 3, 3)
branch3x3dbl_1 = conv2d_bn(branch3x3dbl, 384, 1, 3)
branch3x3dbl_2 = conv2d_bn(branch3x3dbl, 384, 3, 1)
branch3x3dbl = merge([branch3x3dbl_1, branch3x3dbl_2],
mode='concat', concat_axis=channel_axis)
branch_pool = AveragePooling2D(
(3, 3), strides=(1, 1), border_mode='same')(x)
branch_pool = conv2d_bn(branch_pool, 192, 1, 1)
x = merge([branch1x1, branch3x3, branch3x3dbl, branch_pool],
mode='concat', concat_axis=channel_axis,
name='mixed' + str(9 + i))
# Fully Connected Softmax Layer
x_fc = AveragePooling2D((8, 8), strides=(8, 8), name='avg_pool')(x)
x_fc = Flatten(name='flatten')(x_fc)
x_fc = Dense(1000, activation='softmax', name='predictions')(x_fc)
# Create model
model = Model(img_input, x_fc)
# Load ImageNet pre-trained data
model.load_weights('imagenet_models/inception_v3_weights_th_dim_ordering_th_kernels.h5')
# Truncate and replace softmax layer for transfer learning
# Cannot use model.layers.pop() since model is not of Sequential() type
# The method below works since pre-trained weights are stored in layers but not in the model
x_newfc = AveragePooling2D((8, 8), strides=(8, 8), name='avg_pool')(x)
x_newfc = Flatten(name='flatten')(x_newfc)
x_newfc = Dense(num_classes, activation='softmax', name='predictions')(x_newfc)
# Create another model with our customized softmax
model = Model(img_input, x_newfc)
# Learning rate is changed to 0.001
sgd = SGD(lr, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(optimizer=sgd, loss='categorical_crossentropy', metrics=['accuracy'])
return model
def lr_schedule(epoch): # function that takes an epoch index as input and returns a new learning rate as output
return lr*(0.1**int(epoch/10))
if __name__ == '__main__':
img_rows, img_cols = 299, 299 # Resolution of inputs
channel = 3
num_classes = 43
# batch_size = 10 # 20
nb_epoch = 30
# Load data. Please implement your own load_data() module for your own dataset
X_train, Y_train, X_valid, Y_valid, Y_test = load_GTSRB_data_1(img_rows, img_cols)
# Load our model
print("loading model")
model = inception_v3_model(img_rows, img_cols, channel, num_classes)
csv_logger=CSVLogger('training.log') # callback that streams epoch results to a csv file
print("start fine tuning")
# Start Fine-tuning
model.fit(X_train, Y_train,
batch_size=batch_size,
nb_epoch=nb_epoch,
shuffle=True,
verbose=1,
validation_split=0.2, # fraction of the data held-out for validation
callbacks=[LearningRateScheduler(lr_schedule), history,csv_logger,
ModelCheckpoint(weights_filename, monitor='val_acc', verbose=1, save_best_only=True, mode='max')]
)
# ModelCheckpoint('incep_weights.{epoch:02d}-{val_loss:.2f}.h5',
#EarlyStopping(monitor='val_loss', patience=2, verbose=0),
#Get history of accuracy and plot it
# print("hhistory acc: ",history.acc)
# print(" history acc type: ", type(history.acc))
#np.save('history_acc_inception', history.acc)
#plt.plot(range(1,nb_epoch+1), history.acc)
#plt.xlabel('Epochs')
#plt.ylabel('Accuracy')
#plt.title("Inception")
#plt.show()
y_pred= model.predict_classes(X_valid)
print("Predictions: ", y_pred)
model.metrics_names
y_eval=model.evaluate(X_valid,Y_valid)
print("Evaluation: ", y_eval)
f=open(result_file, 'w')
f.write('Y_pred: ' + str(y_pred) )
f.write('Y_eval: ' + str(y_eval))
f.close()
cm=confusion_matrix(Y_test, y_pred) # confusion matrix
print(cm)
plt.matshow(cm)
plt.title('Confusion matrix InceptionV3')
plt.colorbar()
plt.ylabel('True label')
plt.xlabel('Predicted label')
#plt.show()
plt.savefig(matrix_filename)
plt.close()
print("Done!")
|
9,124 | 0c0fb3bfb81be5ef6a60584eafeefec61f171679 | import pytest
import json
import os.path
import importlib
import jsonpickle
from fixture.application import Application
fixture = None
config = None
@pytest.fixture
def app(request):
global fixture
global config
browser = request.config.getoption("--browser")
if config is None:
conf_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--config"))
with open(conf_file_path) as config_file:
config = json.load(config_file)
if fixture is None or not fixture.is_valid():
fixture = Application(browser=browser, base_url=config["baseUrl"])
fixture.session.ensure_login(name=config["login"], pwd=config["password"])
return fixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
global fixture
def finalizer():
fixture.session.ensure_logout()
fixture.destroy()
request.addfinalizer(finalizer)
return fixture
def pytest_addoption(parser):
parser.addoption("--browser", action="store", default="firefox")
parser.addoption("--config", action="store", default="config.json")
def pytest_generate_tests(metafunc):
for fixture in metafunc.fixturenames:
if fixture.startswith("data_"):
testdata = load_from_module(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[repr(g) for g in testdata])
elif fixture.startswith("json_"):
testdata = load_from_json(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[repr(g) for g in testdata])
def load_from_module(module):
return importlib.import_module(f'data.{module}').testdata
def load_from_json(jsonfile):
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), f'data/{jsonfile}.json')) as file:
return jsonpickle.decode(file.read())
|
9,125 | ccc74f58eff3bb00f0be8c2c963de4208b7f0933 | from math import ceil, log2, sqrt
def constructST(s, start, end, st, i):
if start == end:
st[i] = 0
openst[i] = 1 if s[start] == '(' else 0
closedst[i] = 1 if s[start] == ')' else 0
return st[i], openst[i], closedst[i]
else:
mid = (start+end)//2
st[i], openst[i], closedst[i] = constructST(s, start, mid, st, 2*i+1)
a, b, c = constructST(s, mid+1, end, st, 2*i+2)
tmp = min(openst[2*i+1], closedst[2*i+2])
st[i] += tmp + a
openst[i] += b-tmp
closedst[i] += c -tmp
return st[i], openst[i], closedst[i]
def query(s, start, end, l, r, st, i):
if l > end or r < start:
return 0, 0, 0
elif start >= l and end <= r:
return st[i], openst[i], closedst[i]
else:
mid = (start + end)//2
a, b, c = query(s, start, mid, l, r, st, 2*i+1)
d, e, f = query(s, mid+1, end, l, r, st, 2*i+2)
tmp = min(b, f)
T = a+d +tmp
O = b+e - tmp
C = c+f - tmp
return T, O, C
s = input()
n = len(s)
x = int(ceil(log2(n)))
max_size = 2*pow(2, x) -1
st = [0 for i in range(0, max_size)]
openst = [0 for i in range(0, max_size)]
closedst = [0 for i in range(0, max_size)]
constructST(s, 0, n-1, st, 0)
# print(st)
# print(openst)
# print(closedst)
for _ in range(int(input())):
l, r = map(int, input().split())
print(2*query(s, 0, n-1, l-1, r-1, st, 0)[0])
|
9,126 | aa55f1dd4f363e07d5f9104346efaa24c0457d45 | from .sgd import StochasticGradientDescent
from .momentum import Momentum
|
9,127 | e99e558ebf5938a90f00df6593c9f75a18affcb8 | import sqlparse
f = open("parse.sql")
go = open("struct.go", "w+")
dictiony = {
"uuid": "string",
"varchar": "string",
"timestamp": "time.Time",
"int": "int",
"text": "string",
"dbname": "IndividualContrAgent",
"interface": "IndividualContrAgentI",
"ica":"ica"
}
#package
go.write("package main\n\n")
#import
go.write("import (\n ")
go.write('"github.com/jmoiron/sqlx"\n)\n\n')
#struct
go.write("type {0} struct {1}\n".format(dictiony["dbname"], "{"))
go.write(" ID {}\n".format(dictiony["uuid"]))
go.write(" Name {}\n".format(dictiony["varchar"]))
go.write(" PhoneNumber {}\n".format(dictiony["varchar"]))
go.write(" Address {}\n".format(dictiony["varchar"]))
go.write(" Description {}\n".format(dictiony["varchar"]))
go.write("}\n\n")
#db struct
go.write("type {0}Repo struct {1}\n".format(dictiony["dbname"], "{"))
go.write(" db *sqlx.DB\n}\n\n")
#interface
go.write("type {0}I interface {1}\n".format(dictiony["dbname"], "{"))
go.write(" Create(*{0}) (string, error)\n{1}\n\n".format(dictiony["dbname"], "}"))
#newIndCountrAgent
go.write("func New{0}(db *sqlx.DB) {1} {2}\n".format(dictiony["dbname"],dictiony["interface"], "{"))
go.write(" return &{0}Repo{1}\n".format(dictiony["dbname"], "{"))
go.write(" db: db,\n {0}\n{1}\n\n".format("}", "}"))
#create
go.write("func(ica *{2}Repo) Create(agent {0}) (string, error) {1}\n".format(dictiony["dbname"], "{", dictiony["dbname"]))
go.write(" query := `INSERT INTO {} (\n".format(dictiony["dbname"]))
go.write(" id, \n name,\n phonenumber,\n address,\n")
go.write(" description)\n values($1, $2, $3, $4, $5);`\n")
go.write(" prp, err := ica.db.Prepare(query)\n\n ")
go.write(' if err != nil ')
go.write("{\n")
go.write(' return "", err\n')
go.write(" }\n")
go.write(" _, err = prp.Exec(\n")
go.write(" agent.ID,\n agent.Name,\n")
go.write(" agent.PhoneNumber,\n agent.Address,\n agent.Description,\n )\n")
go.write(" if err != nil {\n ")
go.write('return "", err\n }\n\n')
go.write(" return agent.ID, err\n}")
#get
|
9,128 | d5d12e2269b343dde78534eddf2cce06759eb264 | # Copyright 2017 Klarna AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import base64
import json
from django.test import TestCase, override_settings
from django.conf import settings
from django.core import management
from django.urls import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.contrib.auth.hashers import make_password
from django_rethink.connection import r
from django_rethink.serializers import *
class TestSerializer(RethinkSerializer):
id = serializers.CharField(required=False, read_only=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test'
indices = [
('permissions_read', r.row['permissions']['read']),
('permissions_write', r.row['permissions']['write']),
('permissions_create', r.row['permissions']['create']),
]
class TestReviewSerializer(NeedsReviewMixin, HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_reviewed'
indices = [
('permissions_read', r.row['permissions']['read']),
('permissions_write', r.row['permissions']['write']),
('permissions_create', r.row['permissions']['create']),
]
class TestHistoryPermissionsSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
permissions = PermissionsSerializer()
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_permissions'
indices = [
('permissions_read', r.row['permissions']['read']),
('permissions_write', r.row['permissions']['write']),
('permissions_create', r.row['permissions']['create']),
]
class TestHistoryHasReadPermissionSerializer(HistorySerializerMixin):
id = serializers.CharField(required=False, read_only=True)
field1 = serializers.CharField(required=True)
user = serializers.CharField(required=True)
class Meta(RethinkSerializer.Meta):
table_name = 'django_rethink_test_history_has_read_permission'
def has_read_permission(self, user):
return self.instance['user'] == user.username
@override_settings(
RETHINK_DB_DB=os.environ.get('RETHINK_DB_DB', 'django_rethinkci'),
)
class APITests(TestCase):
@classmethod
def setUpClass(cls):
super(APITests, cls).setUpClass()
cls.conn = r.connect(host=settings.RETHINK_DB_HOST, port=settings.RETHINK_DB_PORT)
try:
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
except:
pass
r.db_create(settings.RETHINK_DB_DB).run(cls.conn)
cls.conn.db = settings.RETHINK_DB_DB
management.call_command('syncrethinkdb', verbosity=0)
@classmethod
def tearDownClass(cls):
r.db_drop(settings.RETHINK_DB_DB).run(cls.conn)
super(APITests, cls).tearDownClass()
def tearDown(self):
for t in [
"django_rethink_test",
"history",
"django_rethink_test_reviewed",
"django_rethink_test_history_permissions",
"django_rethink_test_history_has_read_permission",
]:
r.table(t).delete().run(self.conn)
super(APITests, self).tearDown()
def create_user(self, username='tester', password='tester', is_superuser=True, groups=[], **kwargs):
user = get_user_model().objects.create(
username=username,
password=make_password(password),
is_superuser=is_superuser,
**kwargs
)
for name in groups:
group, created = Group.objects.get_or_create(name=name)
user.groups.add(group)
auth = "Basic %s" % (base64.b64encode(("%s:%s" % (username, password)).encode("ascii")).decode("ascii"))
return user, auth
def test_history_no_type(self):
super_user, super_auth = self.create_user()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={'object_type': 'i_dont_exist', 'pk': '1'}),
HTTP_AUTHORIZATION=super_auth
)
self.assertEqual(response.status_code, 404)
def test_history_with_permissions(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])
serializer = TestHistoryPermissionsSerializer(None,
data={'field1': 'test1', 'user': luser.username,
'permissions': {'write': ['group1']}},
context={'username': luser.username}
)
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryPermissionsSerializer(None,
data={'field1': 'test2', 'user': super_user.username,
'permissions': {'write': []}},
context={'username': super_user.username}
)
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryPermissionsSerializer.Meta.table_name,
'pk': test1['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryPermissionsSerializer.Meta.table_name,
'pk': test2['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
def test_history_with_has_read_permission(self):
super_user, super_auth = self.create_user()
luser, lauth = self.create_user(username='luser', is_superuser=False, groups=['group1'])
serializer = TestHistoryHasReadPermissionSerializer(None,
data={'field1': 'test1', 'user': luser.username},
context={'username': luser.username}
)
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(test1,
data={'field1': 'test1.1'}, partial=True,
context={'username': luser.username},
)
serializer.is_valid(raise_exception=True)
test1 = serializer.save()
serializer = TestHistoryHasReadPermissionSerializer(None,
data={'field1': 'test2', 'user': super_user.username},
context={'username': super_user.username}
)
serializer.is_valid(raise_exception=True)
test2 = serializer.save()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test1['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 2)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test2['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test2['id'],
}), HTTP_AUTHORIZATION=super_auth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
serializer = TestHistoryHasReadPermissionSerializer(test1,
context={'username': luser.username},
)
serializer.delete()
response = self.client.get(reverse('django_rethink:history_list',
kwargs={
'object_type': TestHistoryHasReadPermissionSerializer.Meta.table_name,
'pk': test1['id'],
}), HTTP_AUTHORIZATION=lauth)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 3)
|
9,129 | b9c8689dbdf451e6a981f1abdae55771266fe231 | import json
import os
from flask import Flask, request, url_for
from flask_cors import CORS
from werkzeug.utils import secure_filename
from service.Binarizacion import Binarizacion
UPLOAD_FOLDER = './public/files'
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
CORS(app)
@app.route('/')
def hello():
return 'Hello word'
@app.route('/analyze', methods=['POST'])
def analyze():
if request.method == 'POST':
image_file = request.files['image']
file_name = secure_filename(image_file.filename)
# image_file.save('./public/files/' + secure_filename(image_file.filename))
image_file.save(os.path.join(app.config['UPLOAD_FOLDER'], file_name))
print(f'{UPLOAD_FOLDER}/{file_name}', os.path.join(app.config['UPLOAD_FOLDER']))
binarization = Binarizacion(f'./public/files/{file_name}')
binarization.binaryImage()
binarization.otsuImage()
binarization.adatativeImage()
binarization.fondoMorfologico()
m, color, diametro, pre = binarization.analize()
return json.dumps({'ok': True, 'url': f'./public/files/{file_name}',
'm': m,
'color': color,
'diametro': diametro,
'pre': pre})
return json.dumps({'ok': False})
|
9,130 | dcc1b0decf2fca6309dbb60faebd3f0a6944cd7d | #!/usr/local/bin/python
i = 0
while i == 0:
try:
print("Let's divide some numbers!")
a1 = input("Enter numerator: ")
b1 = input("Enter denominator: ")
a = int(a1)
b = int(b1)
print(a1 + " divied by " + b1 + " equals: " + str(a/b))
i += 1
except ZeroDivisionError:
print("Cannot divide by 0")
except ValueError:
print("Invalid input, not a number")
|
9,131 | ac60fd79d7fb15624cf79adc7e456960e7523e2e | import sys
from google.appengine.ext import blobstore
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.ext import ndb
from helpers import *
def valid_pw(name, password, h):
salt = h.split(',')[0]
return h == make_pw_hash(name, password, salt)
class CVEProfile(ndb.Model):
profile_name = ndb.StringProperty(default="N/A")
cve_id = ndb.StringProperty(required=True)
cwe_id = ndb.StringProperty(required=True)
cwe_name = ndb.StringProperty(required=True)
summary = ndb.TextProperty()
cvss_score = ndb.FloatProperty()
exploit_count = ndb.IntegerProperty()
publish_date = ndb.StringProperty()
update_date = ndb.StringProperty()
cve_url = ndb.StringProperty()
created = ndb.DateTimeProperty(auto_now_add=True)
access_params = ndb.StringProperty()
confidentiality_impact = ndb.IntegerProperty()
integrity_impact = ndb.IntegerProperty()
availability_impact = ndb.IntegerProperty()
access_complexity = ndb.IntegerProperty()
gained_access = ndb.IntegerProperty()
authentication = ndb.IntegerProperty()
@classmethod
def createProfile(cls, cve_id , cwe_id , cwe_name, summary, cvss_score, exploit_count, publish_date, update_date, cve_url, confidentiality_impact, integrity_impact, availability_impact, access_complexity, gained_access, authentication):
access_params = create_params()
return CVEProfile( cve_id = cve_id,
cwe_id = cwe_id,
cwe_name = cwe_name,
summary = summary,
cvss_score = cvss_score,
exploit_count = exploit_count,
publish_date = publish_date,
update_date = update_date,
cve_url = cve_url,
confidentiality_impact = confidentiality_impact,
integrity_impact = integrity_impact,
availability_impact = availability_impact,
access_complexity = access_complexity,
gained_access = gained_access,
authentication = authentication,
access_params = access_params )
class Service(ndb.Model):
serviceID=ndb.IntegerProperty(required=True)
name=ndb.StringProperty()
status=ndb.StringProperty()
impact=ndb.IntegerProperty()
machineID=ndb.IntegerProperty()
@classmethod
def add_new_service(cls,serviceID,name,status,impact,machineID):
return Service( serviceID = serviceID,
name = name,
status = status,
impact = impact,
machineID = machineID)
class Machine(ndb.Model):
machineID=ndb.IntegerProperty(required=True)
name=ndb.StringProperty()
status=ndb.StringProperty()
impact=ndb.IntegerProperty()
@classmethod
def add_new_machine(cls,machineID,name,status,impact):
return Machine( machineID = machineID,
name = name,
status = status,
impact = impact)
class Path(ndb.Model):
pathID=ndb.IntegerProperty(required=True)
name=ndb.StringProperty()
status=ndb.StringProperty()
src=ndb.IntegerProperty()
dest=ndb.IntegerProperty()
#cvss=ndb.StringProperty()
cve_id = ndb.StringProperty()
confidentiality_impact = ndb.IntegerProperty()
integrity_impact = ndb.IntegerProperty()
availability_impact = ndb.IntegerProperty()
access_complexity = ndb.IntegerProperty()
gained_access = ndb.IntegerProperty()
authentication = ndb.IntegerProperty()
@classmethod
def add_new_path(cls,pathID,name,status,src,dest,cve_id,c_imp,i_imp,a_imp,acc_com,g_acc,auth):
return Path( pathID = pathID,
name = name,
status = status,
src = src,
dest = dest,
cve_id = cve_id,
confidentiality_impact = c_imp,
integrity_impact = i_imp,
availability_impact = a_imp,
access_complexity = acc_com,
gained_access = g_acc,
authentication = auth )
class Graph(ndb.Model):
name=ndb.StringProperty(required=True)
graphID=ndb.IntegerProperty(required=True)
#owner=ndb.KeyProperty(kind='User') #GUI push
owner_id=ndb.IntegerProperty(required=True) #JSON push
machines=ndb.StructuredProperty(Machine, repeated=True)
services=ndb.StructuredProperty(Service, repeated=True)
paths=ndb.StructuredProperty(Path, repeated=True)
# keep track for reporting
machine_hold = ndb.IntegerProperty(default=0)
service_hold = ndb.IntegerProperty(default=0)
path_hold = ndb.IntegerProperty(default=0)
class CharacterImage(ndb.Model):
blob = ndb.BlobKeyProperty()
owner = ndb.StringProperty()
access_params = ndb.StringProperty()
class FacebookUser(ndb.Model):
displayname = ndb.StringProperty(required=True)
user_id = ndb.StringProperty()
profile_url = ndb.StringProperty(required=True)
access_token = ndb.StringProperty(required=True)
access_params = ndb.StringProperty()
email = ndb.StringProperty()
joined_date = ndb.DateTimeProperty(auto_now_add=True)
last_visited = ndb.DateTimeProperty(auto_now=True)
avatar = ndb.StringProperty()
class User(ndb.Model):
user_id=ndb.IntegerProperty(required=True)
email = ndb.StringProperty()
#displayname = ndb.StringProperty()
username = ndb.StringProperty(required=True)
org = ndb.StringProperty()
access_params = ndb.StringProperty()
pw_hash = ndb.StringProperty()
last_visited = ndb.DateTimeProperty(auto_now=True)
joined_date = ndb.DateTimeProperty(auto_now_add=True)
APIkey = ndb.StringProperty()
graph_created = ndb.IntegerProperty(default=0)
@classmethod
def by_id(cls, uid):
return User.get_by_id(uid)
@classmethod
def by_username(cls, username):
u = User.query(User.username == username).get()
return u
@classmethod
def by_login(cls, user_id):
u = User.query(User.user_id == user_id).get()
return u
@classmethod
def by_email(cls, email):
u = User.query(User.email == email).get()
return u
@classmethod
def register(cls, username,email, password, org, user_id):
pw_hash = make_pw_hash(username, password)
access_params = create_params()
api_key = generate_key()
return User( user_id = user_id,
username = username,
email = email,
pw_hash = pw_hash,
org = org,
access_params = access_params,
APIkey = api_key )
@classmethod
def add_test_user(cls, user_id , username ):
return User( user_id = user_id,
username = username )
@classmethod
def login(cls, username, password):
u = cls.by_username(username)
if u and valid_pw(username, password, u.pw_hash):
return u
@classmethod
def bypass_login(cls, user_id):
u = cls.by_user_id(user_id)
if u:
return u
#check unauthorized post
class APIDatabase(ndb.Model):
api_id = ndb.IntegerProperty(required=True)
api_key = ndb.StringProperty(required=True)
@classmethod
def add_new_key(cls,api_id,api_key):
return APIDatabase(api_id = api_id, api_key = api_key)
class Step(ndb.Model):
startTurn = ndb.IntegerProperty()
endTurn = ndb.IntegerProperty()
solType = ndb.StringProperty()
cost = ndb.IntegerProperty()
fromCity = ndb.IntegerProperty()
toCity = ndb.IntegerProperty()
pathID = ndb.IntegerProperty()
score = ndb.IntegerProperty()
ai = ndb.IntegerProperty()
ci = ndb.IntegerProperty()
ii = ndb.IntegerProperty()
class WayPoints(ndb.Model):
waypointsID = ndb.IntegerProperty()
#just a graph
status = ndb.StringProperty()
mapID = ndb.IntegerProperty()
playerID = ndb.StringProperty()
score = ndb.IntegerProperty()
step = ndb.StructuredProperty(Step, repeated=True)
savedTurn = ndb.IntegerProperty()
graphStat = ndb.TextProperty()
class WaypointReport(ndb.Model):
waypointID = ndb.IntegerProperty(required=True)
play_by = ndb.StringProperty(required=True)
score = ndb.IntegerProperty(required=True)
total_turn = ndb.IntegerProperty(required=True)
total_impact = ndb.IntegerProperty(required=True)
# query without exhausted joining
graph_id = ndb.IntegerProperty(required=True)
owner_id = ndb.IntegerProperty(required=True)
play_count = ndb.IntegerProperty(default=0)
maximum_impact = ndb.FloatProperty(required=True)
#newly add
#status = ndb.StringProperty(required=True)
@classmethod
def add_new_waypoint_report(cls,waypointID,play_by,score,total_turn,total_impact,owner_id,graph_id,maximum_impact,status):
return WaypointReport( waypointID = waypointID,
play_by = play_by,
score = score,
total_turn = total_turn,
total_impact = total_impact,
graph_id = graph_id,
owner_id = owner_id,
play_count = 1,
maximum_impact = maximum_impact )
class MapReport(ndb.Model):
mapID = ndb.IntegerProperty(required=True)
# map name doesn't exist?
#map_name = ndb.IntegerProperty(required=True)
play_count = ndb.IntegerProperty()
score = ndb.IntegerProperty()
avg_score = ndb.FloatProperty()
total_turn = ndb.IntegerProperty()
avg_total_turn = ndb.FloatProperty()
total_impact = ndb.IntegerProperty()
top_score = ndb.IntegerProperty(default=0)
avg_total_impact = ndb.FloatProperty()
maximum_impact = ndb.FloatProperty()
# query without exhausted joining
graph_id = ndb.IntegerProperty(required=True)
owner_id = ndb.IntegerProperty(required=True)
@classmethod
def add_new_map_report(cls,mapID,play_count,score,avg_score,total_turn,avg_total_turn,total_impact,avg_total_impact,owner_id,graph_id,maximum_impact):
return MapReport( mapID = mapID,
play_count = play_count,
score = score,
avg_score = avg_score,
total_turn = total_turn,
avg_total_turn = avg_total_turn,
total_impact = total_impact,
avg_total_impact = avg_total_impact,
graph_id = graph_id,
owner_id = owner_id,
maximum_impact = maximum_impact)
class PathReport(ndb.Model):
mapID = ndb.IntegerProperty(required=True)
graph_id = ndb.IntegerProperty(required=True)
owner_id = ndb.IntegerProperty(required=True)
pathID = ndb.IntegerProperty(required=True)
srcMachine = ndb.StringProperty()
dstMachine = ndb.StringProperty()
srcService = ndb.StringProperty()
dstService = ndb.StringProperty()
### what for ???
ai = ndb.IntegerProperty(required=True)
ii = ndb.IntegerProperty(required=True)
ci = ndb.IntegerProperty(required=True)
### newly added
av = ndb.IntegerProperty(required=True)
ac = ndb.IntegerProperty(required=True)
au = ndb.IntegerProperty(required=True)
counting = ndb.IntegerProperty(default=0)
name = ndb.StringProperty()
@classmethod
def add_new_path_report(cls,mapID,graph_id,owner_id,pathID,srcM,dstM,srcS,dstS,ai,ii,ci,av,ac,au,counting):
return PathReport(
mapID=mapID,
graph_id=graph_id,
owner_id=owner_id,
pathID=pathID,
srcMachine=srcM,
dstMachine=dstM,
srcService=srcS,
dstService=dstS,
ai=ai,ii=ii,ci=ci,
av=av,au=au,ac=ac,
counting=counting
)
class Solution(ndb.Model):
cve_id = ndb.StringProperty(required=True)
cwe_name = ndb.StringProperty(required=True)
from_map = ndb.IntegerProperty(required=True)
counting = ndb.IntegerProperty(default=0)
@classmethod
def add_new_solution(cls,solution_id,cve_id,cwe_name,from_map):
return Solution( solution_id=solution_id,cve_id=cve_id,cwe_name=cwe_name,from_map=from_map,counting=1)
class SolTypeReport(ndb.Model):
owner_id = ndb.IntegerProperty(required=True)
mapID = ndb.IntegerProperty(required=True)
cve_id = ndb.StringProperty(required=True)
service_name = ndb.StringProperty()
solType_impact = ndb.IntegerProperty()
cwe_name = ndb.StringProperty(required=True)
counting = ndb.IntegerProperty(default=0)
avg_hit = ndb.FloatProperty(default=1)
@classmethod
def add_new_soltype(cls,owner_id,mapID,cve_id,cwe_name,service_name,solType_impact):
return SolTypeReport( owner_id = owner_id,
mapID = mapID,
cve_id = cve_id,
cwe_name = cwe_name,
counting = 1,
service_name = service_name,
solType_impact = solType_impact) |
9,132 | 06cb832c3adae95fcd1d1d2d0663641d3ac671ef | def main():
x = float(input("Coordenada x: "))
y = float(input("Coordenada y: "))
if 1 <= y <= 2 and -3 <= x <= 3:
print("dentro")
elif (4 <= y <= 5 or 6 <= x <= 7) and ( -4 <= x <= -3 or -2 <= x <= -1 or 1 <= x <= 2 or 3 <= x <= 4):
print("dentro")
else:
print("fora")
#-----------------------------------------------------
if __name__ == '__main__': # chamada da funcao principal
main()
|
9,133 | 0a90f29a4e18c2aed23cb31b4239d44d23526327 | from telegram.ext import Updater, Filters, MessageHandler, PicklePersistence
import telegram
import logging
logging.basicConfig(format='%(asctime)s %(message)s\n',
level=logging.INFO,filename='log.json')
logger = logging.getLogger(__name__)
def main():
# my_persistence = PicklePersistence(filename="users") #incomment if you need persistence
# updater = Updater("",persistence=my_persistence,use_context=True)
updater = Updater("",use_context=True)
dp = updater.dispatcher
jobs = updater.job_queue
dp.add_error_handler(error)
updater.start_polling()
updater.idle()
if __name__=="__main__":
main() |
9,134 | 002cced6d24a4790d29f195355c795d609f744a7 | n = int(input())
m = int(input())
x = int(input())
y = int(input())
if m<n:
if m-x < x:
x = m-x
if n-y < y:
y = n-y
else:
if n-x <x:
x=n-x
if m-y <y:
y=m-y
if x<y:
print(x)
else:
print(y)
|
9,135 | dcbbc7098410d771a7151af7c43ac4d0e4d46f18 | ##########################################################################
#
# Draw a 2-D plot for student registration number and the marks secured using gnuplot
#
##########################################################################
import Gnuplot
# create lists to store student marks and regno
student_reg=[]
student_marks=[]
# get the register numbers and marks of the students
n = int(input("Enter number of students: "))
for i in range(0,n):
reg = int(input("Enter RegNo: "))
student_reg.append(reg)
marks=int(input("Enter marks: "))
student_marks.append(marks)
# plot students regno. and students marks
gplt = Gnuplot.Gnuplot(persist=1)
gplt.title("RegNo. V/S Marks")
gplt.xlabel("Student RegNo--->")
gplt.ylabel("Student Marks--->")
d=Gnuplot.Data(student_reg,student_marks,with_="line")
gplt.plot(d)
|
9,136 | 783326ccec31dc7a0ff46c5e4b69806e99aeda57 | # template for "Guess the number" mini-project
# input will come from buttons and an input field
# all output for the game will be printed in the console
import simplegui
import random
import math
# initialize global variables used in your code
range = 100
guesses_made = 0
guesses_remaining = 0
highest_guess = 0
lowest_guess = 0
correct_num = 0
victory_condition = False
# define event handlers for control panel
def range100():
"""Set the range of guessable numbers to [1,100) and restarts"""
global range, guesses_made, guesses_remaining, correct_num, victory_condition
range = 100
guesses_made = 0
guesses_remaining = 7 #calculate_remaining_guesses(range)
correct_num = random.randrange(range)
victory_condition = False
print "New Game! Guess between 1 and ", range
print "Remaining guesses: ", guesses_remaining
def range1000():
"""Set the range of guessable numbers to [1,1000) and restarts"""
global range, guesses_made, guesses_remaining, correct_num, victory_condition
range = 1000
guesses_made = 0
guesses_remaining = 10#calculate_remaining_guesses(range)
correct_num = random.randrange(range)
victory_condition = False
print "New Game! Guess between 1 and ", range
print "Remaining guesses: ", guesses_remaining
# main game logic goes here
def get_input(guess):
global guesses_made, guesses_remaining, victory_condition
guess = int(guess)
guesses_remaining -= 1
print "Your guess:" , guess
guesses_made += 1
if victory_condition == False:
if guess == correct_num:
print "Correct!"
print "You guessed the number in " , guesses_made , " guesses!"
victory_condition = True
if guesses_remaining > 0 and victory_condition == False:
if guess > correct_num:
print "Lower..."
print "Remaining guesses:" , guesses_remaining , "\n"
else:
print "Higher..."
print "Remaining guesses:" , guesses_remaining , "\n"
elif victory_condition == True:
print "You've won! Start a new game."
else:
print "You've run out of guesses. Game over!"
print "The correct number was: " , correct_num
else:
print "You've won! Start a new game.\n"
# create frame
frame = simplegui.create_frame("Guess the Number!", 400, 400, 300)
# register event handlers for control elements
frame.add_button("Range 1..100", range100, 100)
frame.add_button("Range 1..1000", range1000, 100)
frame.add_input("Enter your guess:", get_input, 100)
get_input(0)
# start frame
frame.start()
|
9,137 | c812419e7e024b0bb1207832b2b4a726ef61b272 | class FieldDesigner:
"""
Designs a field for BattleShips, accepts field height and width
"""
def __init__(
self,
):
self.field = []
def design_field(
self,
height,
width,
):
self.field = [[
'~' for __
in range(height)]
for __ in range(width)
]
return self.field
def __str__(
self,
):
return '\n'.join(map(str, self.field)) |
9,138 | a14a1803a0bae755803c471b12035398de262dbc | import re
def molecule_to_list(molecule: str) -> list:
"""Splits up a molucule into elements and amount in order of appearance
Args:
molecule (str): The molecule to split up
Raises:
ValueError: If molecule starts with a lower case letter
ValueError: If molecule contains a non-alphanumeric character
ValueError: If an element starts with a lower case letter
Returns:
list: A list of tuples containing the element symbol and the number of
its appearances at that position
"""
if molecule[0].islower():
raise ValueError
# Test if molecule contains non-alphanumeric characters
if re.match(r"^[\w]+$", molecule) is None:
raise ValueError
result = []
# Split molecule into elements and amounts
elements = re.findall(r"([A-Z][a-z]?|[a-z]{1,2})(\d{1,2})?", molecule)
for element in elements:
if element[0].islower():
raise ValueError
# Ensure the result has a numerical value
if element[1] == '':
result.append((element[0], 1))
else:
result.append((element[0], int(element[1])))
return result
|
9,139 | 753c87a3d22aeca1001eb770831b846b175d873e | from hops import constants
class Cluster(object):
"""
Represents a Cluster in Cluster Analysis computed for a featuregroup or training dataset in the featurestore
"""
def __init__(self, cluster_json):
"""
Initialize the cluster object from JSON payload
Args:
:cluster_json: JSON data of the cluster
"""
self.datapoint_name = cluster_json[constants.REST_CONFIG.JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]
self.cluster = int(cluster_json[constants.REST_CONFIG.JSON_CLUSTERING_ANALYSIS_CLUSTER]) |
9,140 | 63bfaa6e191e6090060877e737f4b003bed559cf | #! /usr/local/bin/python3
# -*- coding: utf-8 -*-
from requests_oauthlib import OAuth1Session
BASEURL = 'https://api.twitter.com/1.1/'
CK = '3rJOl1ODzm9yZy63FACdg'
CS = '5jPoQ5kQvMJFDYRNE8bQ4rHuds4xJqhvgNJM4awaE8'
AT = '333312023-6dTniMxvwlQG8bATKNYWBXaQkftz9t4ZjRBt7BWk'
AS = 'LQ8xXBTTN8F8CHQv9oDAqsGJFeexdnFf2DFzn3EzGH2L8'
def get_instance(rest_url, params):
url = BASEURL + rest_url
print(url)
twitter = OAuth1Session(CK, CS, AT, AS)
return twitter.get(url, params=params)
|
9,141 | 89ba805e47a9727573e1e25371a70fb887ee170d | import datetime
import operator
import geopy
from django.db import models
from django.db.models import Q
from django.db.models.query import QuerySet
from django.db.models import permalink
from django.contrib.auth.models import User
geocoder = geopy.geocoders.Google()
class City(models.Model):
name = models.CharField(max_length=30, unique=True)
class Meta:
ordering = ('name',)
verbose_name_plural = "cities"
def __unicode__(self):
return self.name
class Area(models.Model):
name = models.CharField(max_length=50)
city = models.ForeignKey(City)
class Meta:
unique_together = ('name', 'city')
ordering = ('name',)
def __unicode__(self):
return self.name
class ApartmentQuerySet(QuerySet):
def available(self):
return self.filter(
is_available=True,
tenant__is_active=True,
)
class ApartmentManager(models.Manager):
def get_query_set(self):
return ApartmentQuerySet(self.model).select_related(
'area', 'area__city')
def available(self):
return self.get_query_set().available()
def search(self, search):
if search:
search = search.strip()
if not search:
return self.none()
search_fields = (
'area__name__iexact',
'area__city__name__iexact',
'postcode__iexact',
'address__icontains',
)
criteria = [Q(**{field : search})
for field in search_fields]
return self.filter(reduce(operator.or_, criteria))
class Apartment(models.Model):
TYPE_APARTMENT = 1
TYPE_DETACHED = 2
TYPE_SEMIDETACHED = 3
TYPE_COTTAGE =4
TYPE_CHOICES = (
(TYPE_APARTMENT, "Apartment"),
(TYPE_DETACHED, "Detached house"),
(TYPE_SEMIDETACHED, "Semi-detached house"),
(TYPE_COTTAGE, "Cottage"),
)
SAUNA_NONE = 1
SAUNA_SHARED = 2
SAUNA_OWN = 3
SAUNA_CHOICES = (
(SAUNA_NONE, "No sauna"),
(SAUNA_SHARED, "Shared/communal sauna"),
(SAUNA_OWN, "Own sauna"),
)
ROOM_CHOICES = (
(1, "1 room"),
(2, "2 rooms"),
(3, "3 rooms"),
(4, "4 rooms"),
)
LANDLORD_TENANT = 1
LANDLORD_PRIVATE = 2
LANDLORD_AGENCY = 3
LANDLORD_CHOICES = (
(LANDLORD_TENANT, "Occupant"),
(LANDLORD_PRIVATE, "Private landlord"),
(LANDLORD_AGENCY, "Rental agency"),
)
area = models.ForeignKey(Area)
tenant = models.ForeignKey(User)
landlord = models.IntegerField(
choices=LANDLORD_CHOICES,
default=LANDLORD_PRIVATE,
)
agency = models.CharField(max_length=100, null=True, blank=True)
agency_website = models.URLField(null=True, blank=True)
address = models.CharField(max_length=100)
postcode = models.CharField(max_length=7)
latitude = models.FloatField(null=True, blank=True)
longitude = models.FloatField(null=True, blank=True)
added_on = models.DateTimeField(auto_now_add=True)
is_available = models.BooleanField(default=True)
available_from = models.DateField(null=True, blank=True)
available_to = models.DateField(null=True, blank=True)
is_shared = models.BooleanField('Shared accomodation', default=False)
type = models.IntegerField(
choices=TYPE_CHOICES,
default=TYPE_APARTMENT,
)
num_rooms = models.IntegerField('Rooms', choices=ROOM_CHOICES)
floor = models.IntegerField(null=True, blank=True)
lift = models.BooleanField(default=False)
num_floors = models.IntegerField(null=True, blank=True)
sauna = models.IntegerField(
choices=SAUNA_CHOICES,
default=SAUNA_NONE,
)
rent_pcm = models.DecimalField(
decimal_places=2,
max_digits=8,
)
deposit = models.DecimalField(
decimal_places=2,
max_digits=8,
null=True,
blank=True
)
smoking = models.BooleanField(default=False)
pets = models.BooleanField(default=False)
size = models.FloatField('Size (sqm)')
garden_size = models.FloatField(null=True, blank=True)
furnished = models.BooleanField(default=False)
cable = models.BooleanField(default=False)
broadband = models.BooleanField(default=False)
satellite = models.BooleanField(default=False)
balcony = models.BooleanField(default=False)
parking = models.BooleanField(default=False)
garage = models.BooleanField(default=False)
bike_storage = models.BooleanField(default=False)
extra_storage = models.BooleanField(default=False)
gym = models.BooleanField(default=False)
laundry = models.BooleanField(default=False)
description = models.TextField(null=True, blank=True)
kitchen_amenities = models.TextField(null=True, blank=True)
furniture = models.TextField(null=True, blank=True)
heating = models.TextField(null=True, blank=True)
other_amenities = models.TextField(null=True, blank=True)
objects = ApartmentManager()
def __unicode__(self):
return self.get_full_address()
@permalink
def get_absolute_url(self):
return ('apartments:detail', [str(self.id)])
def get_full_address(self):
return "{0}, {1} {2}".format(
self.address,
self.postcode,
self.area.city.name.upper()
)
def is_agency_landlord(self):
return self.landlord == self.LANDLORD_AGENCY
def get_location(self):
searchable = "{0}, {1} {2}, Finland".format(
self.address,
self.postcode,
self.area.city,
)
address, (lat, lng) = geocoder.geocode(
searchable,
exactly_one=True
)
return lat, lng
def save(self, *args, **kwargs):
self.latitude, self.longitude = self.get_location()
super(Apartment, self).save(*args, **kwargs)
|
9,142 | 9b3040fa02cf8f039bac146f8a73384731c56722 | #While Loop
count = 0
while count<9:
print("Number:",count)
count = count+1
print("Good Bye")
#For Loop
fruits = ['Mango','Grapes','Apple']
for fruit in fruits:
print("current fruits:",fruit)
print("Good bye")
|
9,143 | 01de85b0d480c105c8cc1a8154c3de936ab3226d | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: wenshu/actions.py
# Author: Carolusian <https://github.com/carolusian>
# Date: 22.09.2018
# Last Modified Date: 22.09.2018
#
# Copyright 2018 Carolusian
import time
import itertools
import re
import requests
import json
import os
from random import randint
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import ElementNotInteractableException
from .exceptions import UnsupportedPlatformException
from .config import get_logger, DOC_LINK_BASE
from .utils import retry
logger = get_logger(__name__)
def sleep(min_seconds=1, max_seconds=10):
"""Allow a browser instance to wait for a few seconds before do something"""
time.sleep(randint(min_seconds, max_seconds))
def click(elem):
try:
elem.click()
except ElementNotInteractableException:
pass
def open_website(url):
"""
Open website of target url
"""
browser = webdriver.Firefox()
browser.get(url)
return browser
def is_finished(browser):
finish_text = '无符合条件的数据...'
sleep_secs = 15
time.sleep(sleep_secs)
result_list = browser.find_element_by_id('resultList')
# Refresh if no result found
if finish_text in result_list.text:
logger.info('Try refresh to reload content')
browser.refresh()
time.sleep(sleep_secs)
# If still not result found, finish downloading
result_list = browser.find_element_by_id('resultList')
if finish_text in result_list.text:
return True
return False
def download_docs(browser, save_dir='./', click_next_page=False):
if click_next_page:
next_page = browser.find_elements(By.XPATH, '//*[@id="pageNumber"]/a[contains(text(), "下一页")]')
next_page[0].click()
if is_finished(browser):
logger.info('Finished downloading documents in this page.')
return
link_xpath = '//*[@class="dataItem"]'
keywords_elems = browser.find_elements(By.XPATH, '//*[@class="contentCondtion"]')
subfolder = '-'.join([el.text for el in keywords_elems])
elems = browser.find_elements(By.XPATH, link_xpath)
for el in elems:
save_doc(browser, el, os.path.join(save_dir, subfolder))
time.sleep(1)
# Goto next page after this page is download
download_docs(browser, save_dir, click_next_page=True)
@retry(times=5, delay=5, allowed_exceptions=IndexError)
def save_doc(browser, doc_elem, save_dir):
doc_key = doc_elem.get_attribute('key')
doc_title = doc_elem.get_attribute('title')
logger.info('Found document %s.' % doc_title)
unzipped_id = browser.execute_script('return unzip("%s")' % doc_key)
doc_id = browser.execute_script('return com.str.Decrypt("%s")' % unzipped_id)
doc_link = DOC_LINK_BASE % doc_id
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
p = re.compile('(var jsonHtmlData = ")(.+)(\\"}";)')
resp = requests.get(doc_link, headers=headers)
resp_text = resp.text
resp_obj = p.findall(resp_text)[0][1].replace('\\', '') + '"}'
resp_obj = json.loads(resp_obj)
os.makedirs(save_dir, exist_ok=True)
with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:
f.write(resp_obj['Html'])
logger.info('Downloaded %s.' % resp_obj['Title'])
|
9,144 | a826f33361ec59824f3c4a83d01e94c6b307b0a9 | import os
#defaults = {"N":20, "K":3, "POP_SIZE":200, "MUT_RATE":.05, "TOURNAMENT_SIZE":2, "SELECTION":0, "CHANGE_RATE":100000, "MAX_GENS": 5000, "FILTER_LENGTH":50}
defaults = {"N":20, "K":3, "POP_SIZE":200, "MUT_RATE":.05, "TOURNAMENT_SIZE":2, "SELECTION":0, "CHANGE_RATE":100000, "MAX_GENS": 5000, "FILTER_LENGTH":"POP_SIZE"}
conditions = [{},{"K":10}, {"N":100, "MUT_RATE":.01}, {"MUT_RATE":.005}, {"MUT_RATE": .1}, {"POP_SIZE":20}, {"POP_SIZE":2000}, {"SELECTION":1}, {"SELECTION":1, "FILTER_LENGTH":1000}, {"CHANGE_RATE":500}, {"CHANGE_RATE":500, "CHANGE_TYPE":1}]
seed = 0
for condition in conditions:
print(condition)
command = ["./nk_oee -MODES_RESOLUTION 10 -SEED", seed]
dir_name = []
for var in defaults:
if var not in condition:
condition[var] = defaults[var]
for var in condition:
while condition[var] in condition:
condition[var] = condition[condition[var]]
command.append("-"+var)
dir_name.append("".join(var.split("_"))) # Underscores in variable names will screw up parsing later
val = str(condition[var])
command.append(val)
dir_name.append(val)
str_dir_name = "_".join(dir_name)
if not os.path.exists(str_dir_name):
os.mkdir(str_dir_name)
for i in range(30):
if os.path.exists(str_dir_name+"/"+str(i)+"/command.sh"):
continue
seed += 1
command[1] = str(seed)
print(command)
os.mkdir(str_dir_name+"/"+str(i))
with open(str_dir_name+"/"+str(i)+"/command.sh", "w") as infile:
infile.write(" ".join(command)) |
9,145 | 19126e5041841ab1320730ae82d66c6900cf31bd | import sys, os
sys.path.insert(0, os.path.abspath("adjust_schedule_function"))
|
9,146 | 3458e1efdc492a08d8272469aa9e3f0ca72c7ba3 | import h5py
import sys
f = h5py.File(sys.argv[1], 'r+')
try:
del f['optimizer_weights']
except:
print "done"
f.close() |
9,147 | 84ece5d1a9e38b83a5b60052fc3ab089c498d2fc | from django.contrib import admin
from get_my_tweets.models import username
admin.site.register(username)
|
9,148 | 0ed0fb6f9bcc768bb005222c9ae9b454f6d962ec | #!/usr/bin/env python
from __future__ import print_function
import weechat
import sys
import pickle
import json
import math
import os.path
from datetime import datetime
from datetime import date
from datetime import timedelta
from dateutil.parser import parse as datetime_parse
from os.path import expanduser
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# TODO: Add settings
# minutes_remaining = [5, 10, 15]
# notify_enabled = yes/no
# time_format = '%H:%M' ???
SCRIPT_NAME = 'weechat-gcal'
SCRIPT_AUTHOR = 'Dave Mulford'
SCRIPT_VERSION = '0.1'
SCRIPT_LICENSE = 'GPL2'
SCRIPT_DESC = 'A Google Calendar integration script that provides notifications of upcoming events.'
SCRIPT_SHUTDOWN_FN = ''
SCRIPT_CHARSET = ''
TIMEOUT_MS = 3000
CALLED_FROM_CMD = '100'
CALLED_FROM_TIMER = '200'
NOTIFICATION_THRESHOLDS = [5,15]
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/calendar.readonly']
# Where the weechat-gcal-token.pickle file is located
CACHE_DIR = os.path.join(expanduser('~'), '.cache', 'weechat-gcal')
# =============================
# GOOGLE CALENDAR FUNCTIONS
# =============================
def _load_credentials(creds_file=None):
"""Loads the credentials from a credentials.json file or by prompting for authentication.
Returns a credentials object to be used by the Google Sheets API.
"""
creds = None
# Validate the credentials file
if not creds_file:
creds_file = 'credentials.json'
if not os.path.exists(creds_file):
creds_file = os.path.join(expanduser('~'), 'credentials.json')
if not os.path.exists(creds_file):
raise SystemExit('Could not find a credentials.json file. ' \
'Either pass one as argument or make sure credentials.json exists in ' \
'the current directory or ' + expanduser('~'))
# Creates CACHE_DIR if it does not exist
# mode 0x777 (the default) is used because the system's umask value is masked out first
if not os.path.exists(CACHE_DIR):
os.mkdir(CACHE_DIR)
pickle_filename = os.path.join(CACHE_DIR, 'weechat-gcal-token.pickle')
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first time.
if os.path.exists(pickle_filename):
with open(pickle_filename, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(creds_file, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(pickle_filename, 'wb') as token:
pickle.dump(creds, token)
return creds
def gc_get_events(num_events=50):
creds = _load_credentials()
service = build('calendar', 'v3', credentials=creds)
# Call the Calendar API
now = datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
tomorrow = datetime.combine( \
date.today() + timedelta(days=2), \
datetime.min.time()) \
.isoformat() + 'Z'
#print('Getting the upcoming {} events between {} and {}'.format(num_events, now, tomorrow))
events_result = service.events().list(calendarId='primary', timeMin=now, timeMax=tomorrow,
maxResults=num_events, singleEvents=True,
orderBy='startTime').execute()
events = events_result.get('items', [])
return events
# =============================
# WEECHAT HELPER FUNCTIONS
# =============================
def buffer_get():
"""Finds or creates a buffer to use for script output.
Returns a buffer pointer.
"""
buffer = weechat.buffer_search('python', SCRIPT_NAME)
if not buffer:
buffer = weechat.buffer_new(SCRIPT_NAME, 'buffer_input', '', '', '')
weechat.buffer_set(buffer, 'time_for_each_line', '0')
weechat.buffer_set(buffer, 'nicklist', '0')
weechat.buffer_set(buffer, 'title', 'Google Calendar')
weechat.buffer_set(buffer, 'localvar_set_no_log', '1')
return buffer
def buffer_input(data, buffer, input_data):
"""A function called when text, that is not a command, is entered
in the weechat-gcal buffer. This function exists to prevent
errors from being shown, there is no functionality.
"""
return weechat.WEECHAT_RC_OK
def update_gcal_buffer(buffer, events):
weechat.buffer_clear(buffer)
if events == []:
weechat.prnt(buffer, 'No events for now. YAY!!!')
dates = {}
for event in events:
dt = datetime_parse(event['date'])
datestr = dt.strftime('%a %Y-%m-%d')
timestr = dt.strftime('%H:%M')
if datestr not in dates:
dates[datestr] = []
dates[datestr].append({
'time': timestr,
'summary': event['summary']
})
for datestr in dates.keys():
weechat.prnt(buffer, datestr)
dt_events = dates[datestr]
for event in dt_events:
weechat.prnt(buffer, '{} {}'.format(event['time'], event['summary']))
# =============================
# MAIN SCRIPT FUNCTIONS
# =============================
def get_calendar(*args):
result = []
try:
events = gc_get_events()
for event in events:
start = event['start'].get('dateTime', event['start'].get('date'))
result.append({
'date': start,
'summary': event['summary']
})
except Exception as err:
result = err
return json.dumps(result)
def get_calendar_callback(data, command, return_code, out, err):
result = json.loads(out)
buffer = buffer_get()
update_gcal_buffer(buffer, result)
# Notify if any events are happening in 10 minutes!
if data == CALLED_FROM_TIMER:
for event in result:
#weechat.prnt(buffer, 'Handling event!')
dt = datetime_parse(event['date'])
now = datetime.now(tz=dt.tzinfo)
timediff = dt - now
minutes_remaining = math.ceil(timediff.total_seconds() / 60)
#weechat.prnt(buffer, '{} - {} = {} ({} mins)'.format(dt, now, timediff, minutes_remaining))
# TODO Make minutes_remaining threshold configurable
if minutes_remaining in NOTIFICATION_THRESHOLDS:
msg = '[{}m] {}'.format(minutes_remaining, event['summary'])
weechat.prnt_date_tags(buffer, 0, 'notify_highlight', msg)
return weechat.WEECHAT_RC_OK
def gcal_command(data, buffer, args):
buffer = buffer_get()
# TODO Implement init
if args == 'init':
pass
else:
weechat.hook_process(
'func:get_calendar',
TIMEOUT_MS,
'get_calendar_callback',
CALLED_FROM_CMD
)
return weechat.WEECHAT_RC_OK
def script_main(data, remaining_calls):
# Weechat is single-threaded so a new process is created so other things aren't held up
# if retrieving Google Calendar events doesn't return in a timely manner.
# https://weechat.org/files/doc/stable/weechat_scripting.en.html#weechat_architecture
weechat.hook_process(
'func:get_calendar',
TIMEOUT_MS,
'get_calendar_callback',
CALLED_FROM_TIMER
)
return weechat.WEECHAT_RC_OK
# Register the script on /script load
# This needs to happen first!
weechat.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION, \
SCRIPT_LICENSE, SCRIPT_DESC, SCRIPT_SHUTDOWN_FN, SCRIPT_CHARSET)
# Setup a command to initialize the Google Calendar authentication and show events in a buffer.
weechat.hook_command(
'gcal',
'Displays events for today and tomorrow in a new buffer.',
'[init]',
' || init - Initializes the items needed for this plugin to work.',
'',
'gcal_command',
''
)
# Check once per minute whether we should notify of imminent events
weechat.hook_timer(60000, 60, 0, 'script_main', '')
|
9,149 | 3fadb91bd2367819a540f687530f4b48ed878423 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
help_txt = """
:help, show this help menu. :help [command] for detail
:dict [word], only find translation on dict.cn
:google [sentence], only find translation on google api
:lan2lan [sentence], translate from one language to another language
:add [word], add new word to your library
:del [word], delete word from your library
:list [number], list words in your library
:rating [number], lsit words in your library with a certain rate
:history [number], show your search history
:clear, clear your oldest 100 history
for more information, browser http://mardict.appspot.com
"""
help_dict = """
help on dict:
[usage] :dict word
[intro] translate your word only use dict.cn api
[eg] :dict hello
more on http://mardict.appspot.com/help/#dict
"""
help_google = """
help on google:
[usage] :google word
[intro] translate your word only use google api
[eg] :google google is a bitch
more on http://mardict.appspot.com/help/#google
"""
help_lan2lan = """
help on lan2lan:
[usage] :lan2lan word
[intro] translate from one language to another language by google translation api
[eg] :en2zh hello
more on http://mardict.appspot.com/help/#lan2lan
"""
help_history = """
help on history:
[usage] :history (number)
[intro] list your search history
[eg] :history 9
more on http://mardict.appspot.com/help/#history
"""
help_clear = """
help on clear:
[usage] :clear
[intro] clear your search history
more on http://mardict.appspot.com/help/#clear
"""
help_add = """
help on add:
[usage] :add (word)
[intro] add the new word to your library(storing your unfamiliar word)
[eg] :add hello
more on http://mardict.appspot.com/help/#add
"""
help_del = """
help on del:
[usage] :del word
[intro] delete the word from your library
[eg] :del hello
more on http://mardict.appspot.com/help/#del
"""
help_list = """
help on list:
[usage] :list (number)
[intro] list a certain number of words from your library.
[eg] :list 9
this function is very complex, browser the website.
more on http://mardict.appspot.com/help/#list
"""
help_rating = """
help on rating:
[usage] :rating (number)
[intro] list a certain number of words from your library with a certain rate.
[eg] :rating 0 9
this function is very complex, browser the website.
more on http://mardict.appspot.com/help/#rating
"""
|
9,150 | f97150f60dfb3924cda2c969141d5bfe675725ef | #!env/bin/python3
from app import app
from config import config as cfg
app.run(debug=True, host=cfg.APP_HOST, port=cfg.APP_PORT)
|
9,151 | bd2edd5139a9c5050c582a54cdacca2b0739f333 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import warnings
from functools import wraps
import re
import logging
import pandas as pd
import requests
def return_df(field="data"):
"""return DataFrame data"""
def decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
resp = func(self, *args, **kwargs)
if resp.get("code") == 200 and self.return_df is True:
df = pd.DataFrame(resp["resp"][field])
if "date" in df.columns:
df['date'] = df['date'].apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S"))
df = df.set_index("date")
return df
return resp
return wrapper
return decorator
class RQOpenClient(object):
def __init__(self, username, password, logger=None, log_level=logging.DEBUG,
base_url="https://rqopen.ricequant.com", timeout=(5, 10), return_df=True):
"""
:param username: 登录账户
:param password: 密码
:param logger: 日志
:param log_level: 日志级别
:param base_url: 服务地址,默认web端 rqpro2.0需要单独配置
:param timeout: 超时时间
:param return_df: 返回数据是否为DataFrame False返回dict
"""
self.base_url = base_url
# tel number need "+86"
if re.match(r'^[1]([3-9])[0-9]{9}$', username):
username = "+86" + username
self.username = username
self.password = password
self.client = requests.Session()
self.logger = logger if logger else logging.getLogger("RQOpenClient")
self.logger.setLevel(log_level)
self.timeout = timeout
self.return_df = return_df
def login(self):
self.logger.info("Try login. Username {}".format(self.username))
resp = self.client.post("{}/login".format(self.base_url),
{"username": self.username, "password": self.password}, timeout=self.timeout)
ret = resp.json()
self.logger.info("Login response {}".format(ret))
return ret
def _do(self, func, *args, **kwargs):
resp = func(*args, **kwargs)
if resp["code"] == 401:
login_resp = self.login()
if login_resp["code"] == 200:
self.logger.info("login success")
else:
return login_resp
elif resp["code"] == 200:
return resp
resp = func(*args, **kwargs)
return resp
def get_day_trades(self, run_id):
warnings.warn("get_day_trades will be abandoned, please use current_trades", DeprecationWarning)
return self._do(self._get_day_trades, run_id)
def get_positions(self, run_id):
warnings.warn("current_positions will be abandoned, please use current_positions", DeprecationWarning)
return self._do(self._get_positions, run_id)
def _get_day_trades(self, run_id):
resp = self.client.get("{}/pt/load_day_trades/{}".format(self.base_url, run_id), timeout=self.timeout)
return resp.json()
def _get_positions(self, run_id):
resp = self.client.get("{}/pt/load_current_positions/{}".format(self.base_url, run_id), timeout=self.timeout)
return resp.json()
# base
@return_df()
def trades(self, run_id):
"""get all trades"""
return self._do(self._get_base, "trades", run_id)
@return_df()
def positions(self, run_id):
"""get all positions (market_value)"""
return self._do(self._get_base, "positions", run_id)
@return_df()
def portfolio(self, run_id):
"""get all portfolio"""
return self._do(self._get_base, "portfolio", run_id)
@return_df("positions")
def current_positions(self, run_id):
"""get current positions"""
return self._do(self._get_base, "pt/load_current_positions", run_id)
@return_df("trades")
def current_trades(self, run_id):
"""get current positions"""
return self._do(self._get_base, "pt/load_day_trades", run_id)
def _get_base(self, api_path, run_id):
resp = self.client.get("{}/{}/{}".format(self.base_url, api_path, run_id), timeout=self.timeout)
return resp.json()
|
9,152 | ff26a2c2d8427f1ad4617669e701ea88b34616cd | #! /usr/bin/env python
# coding: utf-8
'''
Author: xiezhw3@163.com
@contact: xiezhw3@163.com
@version: $Id$
Last modified: 2016-01-17
FileName: consumer.py
Description: 从 rabbitmq 拿到消息并存储到数据库
'''
import pika
import json
import logging
import pymongo
import traceback
from conf import config
from code.modules.db_processor.db_processor import DbProcessor
MAX_TRY_TIME = 5
class Consumer(object):
'''队列消息消费者'''
def __init__(self):
self.db_processor = DbProcessor()
credentials = pika.PlainCredentials(config.RABBITMQ_USER,
config.RABBITMQ_PASS)
parameters = pika.ConnectionParameters(config.RABBITMQ_HOST,
config.RABBITMQ_PORT_1,
'/', credentials)
connection = pika.BlockingConnection(parameters)
self.channel = connection.channel()
self.channel.exchange_declare(exchange=config.RABBITMQ_EXCHANGE,
type='topic')
result = self.channel.queue_declare(exclusive=True)
self.queue_name = result.method.queue
self.channel.queue_bind(exchange=config.RABBITMQ_EXCHANGE,
queue=self.queue_name,
routing_key=config.RABBITMQ_ROUT_KEY)
def callback(self, ch, method, properties, body):
if isinstance(body, str):
body = json.loads(body)
try_time = 0
while try_time < MAX_TRY_TIME:
try_time += 1
try:
self.db_processor.insert(body)
break
except pymongo.errors.ServerSelectionTimeoutError as error:
logging.error("Insert record timeout: [%s], [%s], [%s]" %
(error.__class__.__name__,
error,
traceback.format_exc()))
except Exception as error:
logging.error("Insert record error: [%s], [%s], [%s]" %
(error.__class__.__name__,
error,
traceback.format_exc()))
def start(self):
self.channel.basic_consume(self.callback,
queue=self.queue_name,
no_ack=True)
self.channel.start_consuming()
def stop(self):
self.channel.close()
|
9,153 | 3c053bf1b572759eddcd310d185f7e44d82171a5 | #coding:utf-8
x = '上'
res = x.encode('gbk')
print(res, type(res))
print(res.decode('gbk'))
|
9,154 | 1edb92a4905048f3961e3067c67ef892d7b8a034 | # Imports
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.utils import data
from torch.utils.data import DataLoader
import torchvision.datasets as datasets
import torchvision.transforms as transforms
# Create Fully Connected Network
class NN(nn.Module):
def __init__(self, input_size,num_classes):
super(NN,self).__init__()
self.fc1 = nn.Linear(input_size,50)
self.fc2 = nn.Linear(50,num_classes)
def forward(self,x):
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
# Set device
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# Hyperparameters
input_size =784
num_classes = 10
learning_rate = 0.001
batch_size = 64
num_epochs = 10
# Load Data
train_dataset = datasets.MNIST(
root='dataset/',
train=True,
transform=transforms.ToTensor(),
download=True,
)
train_loader = DataLoader(
dataset=train_dataset,
batch_size=batch_size,
shuffle=True,
)
test_dataset = datasets.MNIST(
root='dataset/',
train=False,
transform=transforms.ToTensor(),
download=True,
)
test_loader = DataLoader(
dataset=train_dataset,
batch_size=batch_size,
shuffle=True,
)
# Initialize network
model = NN(input_size,num_classes).to(device)
# Loss and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(),lr=learning_rate)
# Train network
for epoch in range(num_epochs):
print("Epoch: "+str(epoch+1))
for batch_idx, (data, targets) in enumerate(train_loader):
data = data.to(device=device)
targets = targets.to(device=device)
# Get to correct shape
data = data.reshape(data.shape[0],-1)
scores = model(data)
loss = criterion(scores,targets)
# backward
optimizer.zero_grad()
loss.backward()
# gradient descent or adam step
optimizer.step()
# Check accuracy on training and test to see how good our model
def check_accuracy(loader, model):
if loader.dataset.train:
print("Checking accuracy on training data")
else:
print("Checking accuracy on test data")
num_correct = 0
num_samples = 0
model.eval()
with torch.no_grad():
for x,y in loader:
x = x.to(device=device)
y = y.to(device=device)
x = x.reshape(x.shape[0],-1)
scores = model(x)
_, predictions = scores.max(1)
num_correct += (predictions == y).sum()
num_samples += predictions.size(0)
print(f'Got {num_correct} / {num_samples} with accuracy {float(num_correct)/float(num_samples)*100:.2f}')
model.train()
check_accuracy(train_loader,model)
check_accuracy(test_loader,model) |
9,155 | fc6c220f8a3a0e9dd1d6e6e1ca131136db8f8a58 | # -*- coding: utf-8 -*-
"""
Created on Mon Nov 11 18:50:46 2019
@author: kanfar
"""
import numpy as np
import timeit
import matplotlib.pyplot as plt
from numpy import expand_dims, zeros, ones
from numpy.random import randn, randint
from keras.models import load_model
from keras.optimizers import Adam
from keras.models import Model
from keras.layers import Input, Reshape, Flatten, Concatenate
from keras.layers import Dense, Conv2D, Conv2DTranspose
from keras.layers import Dropout, LeakyReLU
class cGAN:
def __init__(self, input_dim1, input_dim2, input_dim3, latent_size):
self.input_dim1 = input_dim1
self.input_dim2 = input_dim2
self.input_dim3 = input_dim3
self.latent_size = latent_size
def discriminator(self):
#conditional input
input_shape = (self.input_dim1, self.input_dim2, self.input_dim3)
input_cond = Input(shape = input_shape)
#generator output
input_x = Input(shape = input_shape)
merge = Concatenate()([input_x, input_cond])
#downsample
out = Conv2D(32, (3,3), strides=(2,2), padding='same')(merge)
out = LeakyReLU(alpha=0.2)(out)
out = Conv2D(32, (3,3), strides=(2,2), padding='same')(out)
out = LeakyReLU(alpha=0.2)(out)
out = Flatten()(out)
out = Dropout(0.5)(out)
y = Dense(1, activation='sigmoid')(out)
# define model
model = Model([input_x, input_cond], y)
# compile model
opt = Adam(lr=0.0002) #0.0002 and beta_1 0.5
model.compile(loss='binary_crossentropy', optimizer=opt, metrics=['accuracy'])
return model
def generator(self):
#losing one pixel, figure out later
image_dim = self.input_dim1
latent_shape = self.latent_size
cond_shape = (image_dim, image_dim, self.input_dim3)
input_latent = Input(shape = (latent_shape,))
num_nodes = image_dim * image_dim
latent = Dense(num_nodes)(input_latent)
latent = LeakyReLU(alpha=0.2)(latent)
latent = Reshape((image_dim,image_dim,1))(latent)
input_cond = Input(shape = cond_shape)
cond = input_cond
merge = Concatenate()([latent,cond])
# upsample to 14x14
out = Conv2D(32, (4,4), strides=(1,1), padding='same')(merge)
out = LeakyReLU(alpha=0.2)(out)
# upsample to 28x28
out = Conv2D(32, (4,4), strides=(1,1), padding='same')(out)
out = LeakyReLU(alpha=0.2)(out)
out = Conv2D(32, (4,4), strides=(1,1), padding='same')(out)
out = LeakyReLU(alpha=0.2)(out)
# output
x = Conv2D(1, (4,4), strides=(1,1), activation='tanh', padding='same')(out) #something key that I don't understand
# define model
model = Model([input_latent, input_cond], x)
return model
def combined(self, g_model, d_model):
#model comprised of two models
# make weights in the discriminator not trainable
d_model.trainable = False
# get noise and label inputs from generator model
input_latent, input_cond = g_model.input #defining the tensors in a short way: this is saying the input to this model is the same size as input to g_model
# get image output from the generator model
x = g_model.output
#can I do x = g_model([input_latent, input_cond]) instead of the above?
# connect image output and label input from generator as inputs to discriminator
y = d_model([x, input_cond]) #why this needs to be connected but not the above???? does the first output take model input as default??????? test this
# define gan model as taking noise and label and outputting a classification
model = Model([input_latent, input_cond], y)
# compile model
opt = Adam(lr=0.0002, beta_1=0.5)
model.compile(loss='binary_crossentropy', optimizer=opt)
return model
def generate_real_samples(self, focused, defocused, n_samples):
idx = randint(0, focused.shape[0], n_samples)
x_real, input_cond = focused[idx,:,:,:], defocused[idx,:,:,:]
y_real = ones((n_samples,1))
return [x_real, input_cond], y_real
def generate_latent(self, latent_size, n_samples):
#generate points in teh latent space
total_latent = randn(latent_size*n_samples)
input_z = total_latent.reshape(n_samples, latent_size)
return input_z
def generate_fake_samples(self, generator, defocused, latent_dim, n_samples):
idx = randint(0, defocused.shape[0], n_samples)
input_cond = defocused[idx,:,:,:]
input_z = self.generate_latent(latent_dim, n_samples)
# predict outputs
x_fake = generator.predict([input_z, input_cond])
# create class labels
y_fake = zeros((n_samples, 1))
return [x_fake, input_cond], y_fake
def generate_gan_input(self, defocused, latent_dim, n_samples):
#defocused = data[1,:,:,:]
#defocused = np.expand_dims(input_cond, axis = -1)
idx = randint(0, defocused.shape[0], n_samples)
input_cond = defocused[idx,:,:,:]
input_z = self.generate_latent(latent_dim, n_samples)
# create class labels
y_gan = ones((n_samples, 1))
return [input_z, input_cond], y_gan
def train(self, g_model, d_model, gan_model, real, input_cond, latent_dim, n_epochs, n_batch, save):
bat_per_epo = int(real.shape[0] / n_batch) #check
half_batch = int(n_batch / 2)
g_loss = np.zeros(n_epochs)
d_loss_real = np.zeros(n_epochs)
d_loss_fake = np.zeros(n_epochs)
# manually enumerate epochs
for i in range(n_epochs):
start = timeit.default_timer()
# enumerate batches over the training set
print('================== Epoch %d ==================\n' % (i+1))
for j in range(bat_per_epo):
# get randomly selected 'real' samples
[x_real, input_cond_real], y_real = self.generate_real_samples(real, input_cond, half_batch)
# update discriminator model weights
d_loss_real[i], _ = d_model.train_on_batch([x_real, input_cond_real], y_real)
# generate 'fake' examples
[x_fake, input_cond_fake], y_fake = self.generate_fake_samples(g_model, input_cond, latent_dim, half_batch)
# update discriminator model weights
d_loss_fake[i], _ = d_model.train_on_batch([x_fake, input_cond_fake], y_fake)
# prepare points in latent space as input for the generator
[z_input, input_cond_gan], y_gan = self.generate_gan_input(input_cond, latent_dim, n_batch)
# update the generator via the discriminator's error
g_loss[i] = gan_model.train_on_batch([z_input, input_cond_gan], y_gan)
# summarize loss on this batch
print('Completed: %.f' % np.divide((j+1)*100,bat_per_epo) +'%')
print('Epoch %d:: d_loss_real = %.3f, d_loss_fake = %.3f g_loss = %.3f' %
(i+1, d_loss_real[i], d_loss_fake[i], g_loss[i]) + '\n')
stop = timeit.default_timer()
print('Time: %.2f min' % ((stop - start)/60))
# save the generator model
g_model.save('./models/cgan_'+ save + '.h5') #save somewhere
# save loss history
loss = np.array([d_loss_real, d_loss_fake, g_loss])
np.save('./models/cgan_loss_' + save, loss)
def generate_fakes_givenOne(self, generator, focused, defocused, latent_dim, n_samples):
idx = randint(0, defocused.shape[0], 1)
x_real = focused[idx,:,:,:]
input_cond = defocused[idx,:,:,:] ##### should last be zero or :?
input_cond = np.repeat(input_cond, n_samples, axis=0)
input_z = self.generate_latent(latent_dim, n_samples)
x_fake = generator.predict([input_z, input_cond])
return x_real, x_fake, input_cond[0,:,:,:]
def generate_fakes_givenMany(self, generator, focused, defocused, latent_dim, n_examples):
n_samples = n_examples-2
x_real_many = np.zeros((n_examples, focused.shape[1], focused.shape[2], focused.shape[3]))
input_cond_many = np.zeros((n_examples, focused.shape[1], focused.shape[2], focused.shape[3]))
x_fake_many = np.zeros((n_examples, n_samples, focused.shape[1], focused.shape[2], focused.shape[3]))
for i in range(n_examples):
x_real_many[i,:,:,:], x_fake_many[i,:,:,:,:], input_cond_many[i,:,:,:] = self.generate_fakes_givenOne(generator, focused, defocused, latent_dim, n_samples)
return x_real_many, x_fake_many, input_cond_many
|
9,156 | 4dae34b7c90f52314aac5e457addb3700ffcbd28 | import sys
sys.path.append("..\\Pole_IA_Systemes_Experts")
from tkinter import *
from Knowledge_base.Facts import Fact
from Knowledge_base.Rules import Rule
from Backward.Explanation_tree import *
def ask_about_fact(fact: Fact):
"""
Asks the user about whether a fact is true or false threw an interface provided by tkinter
Args:
fact (Fact): the fact we want to know about
Returns:
bool: true if the fact is true, false otherwise
"""
window = Tk()
window.title("Question !")
Label(window, text=fact.description, font=("Arial", 18)).grid(padx="1c", pady=("0.5c", "1c"), columnspan=3)
def fact_is_true():
global boolean
boolean = True
window.quit()
window.destroy()
def fact_not_true():
global boolean
boolean = False
window.quit()
window.destroy()
Button(window, text="Vrai", fg="green", command=fact_is_true, width="15") \
.grid(column=0, row=1, padx="0.5c", pady="0.5c")
Button(window, text="Ne sais pas", fg="black", command=fact_not_true, width="15") \
.grid(column=1, row=1, padx="0.5c", pady="0.5c")
Button(window, text="Faux", fg="red", command=fact_not_true, width="15") \
.grid(column=2, row=1, padx="0.5c", pady="0.5c")
window.mainloop()
try:
return boolean
except NameError:
return False
def show_result(goal: Fact, description: str, true_fact: bool, facts: list, used_rules: list):
"""
Displays the result of the inference engine and the explanation of the facts and rules used to reach this conclusion
Args:
goal (Fact): the fact understudy
description (String): the explanation of the rules and facts used
true_fact (bool): is True if the goal is verified, False otherwise
facts (list[fact]): list of the known facts
used_rules (list[Rule]): list of the rules that have been used
"""
root = Tk()
root.title("Résultat !")
if true_fact:
Label(root, text=goal.description, font=("Arial", 18)) \
.grid(padx="1c", pady="1c")
Label(root, text="car {}".format(description), font=("Arial", 10)) \
.grid(row=1, padx="1c", pady="1c")
else:
Label(root, text="Impossible à dire", font=("Arial", 18)) \
.grid(padx="1c", pady="1c")
display_explanation_tree(facts, used_rules, root)
root.mainloop()
|
9,157 | 9515dcdfc0ece1a6740d6e7075bbcd1c20977590 | #! /usr/bin/env python2
############################################################
# Program is part of PySAR v1.2 #
# Copyright(c) 2015, Heresh Fattahi, Zhang Yunjun #
# Author: Heresh Fattahi, Zhang Yunjun #
############################################################
import os
import sys
import argparse
import re
try:
import pyaps as pa
except:
sys.exit('Cannot import pyaps into Python!')
import h5py
import numpy as np
import pysar._datetime as ptime
import pysar._pysar_utilities as ut
import pysar._readfile as readfile
import pysar._writefile as writefile
###############################################################
def get_delay(grib_file, atr, inps_dict):
'''Get delay matrix using PyAPS for one acquisition
Inputs:
grib_file - strng, grib file path
atr - dict, including the following attributes:
dem_file - string, DEM file path
grib_source - string, Weather re-analysis data source
delay_type - string, comb/dry/wet
ref_y/x - string, reference pixel row/col number
inc_angle - np.array, 0/1/2 D
Output:
phs - 2D np.array, absolute tropospheric phase delay relative to ref_y/x
'''
if 'X_FIRST' in atr.keys():
aps = pa.PyAPS_geo(grib_file, inps_dict['dem_file'], grib=inps_dict['grib_source'],\
verb=True, Del=inps_dict['delay_type'])
else:
aps = pa.PyAPS_rdr(grib_file, inps_dict['dem_file'], grib=inps_dict['grib_source'],\
verb=True, Del=inps_dict['delay_type'])
phs = np.zeros((aps.ny, aps.nx), dtype=np.float32)
aps.getdelay(phs, inc=0.0)
# Get relative phase delay in space
yref = int(atr['ref_y'])
xref = int(atr['ref_x'])
phs -= phs[yref, xref]
# project into LOS direction
phs /= np.cos(inps_dict['inc_angle'])
# reverse the sign for consistency between different phase correction steps/methods
phs *= -1
return phs
def date_list2grib_file(date_list, hour, grib_source, grib_dir):
grib_file_list = []
for d in date_list:
grib_file = grib_dir+'/'
if grib_source == 'ECMWF' : grib_file += 'ERA-Int_%s_%s.grb' % (d, hour)
elif grib_source == 'ERA' : grib_file += 'ERA_%s_%s.grb' % (d, hour)
elif grib_source == 'NARR' : grib_file += 'narr-a_221_%s_%s00_000.grb' % (d, hour)
elif grib_source == 'MERRA' : grib_file += 'merra-%s-%s.nc4' % (d, hour)
elif grib_source == 'MERRA1': grib_file += 'merra-%s-%s.hdf' % (d, hour)
grib_file_list.append(grib_file)
return grib_file_list
def dload_grib(date_list, hour, grib_source='ECMWF', weather_dir='./'):
'''Download weather re-analysis grib files using PyAPS
Inputs:
date_list : list of string in YYYYMMDD format
hour : string in HH:MM or HH format
grib_source : string,
weather_dir : string,
Output:
grib_file_list : list of string
'''
## Grib data directory
weather_dir = os.path.abspath(weather_dir)
grib_dir = weather_dir+'/'+grib_source
if not os.path.isdir(grib_dir):
print 'making directory: '+grib_dir
os.makedirs(grib_dir)
## Date list to grib file list
grib_file_list = date_list2grib_file(date_list, hour, grib_source, grib_dir)
## Get date list to download (skip already downloaded files)
grib_file_existed = ut.get_file_list(grib_file_list)
if grib_file_existed:
grib_filesize_digit = ut.mode([len(str(os.path.getsize(i))) for i in grib_file_existed])
grib_filesize_max2 = ut.mode([str(os.path.getsize(i))[0:2] for i in grib_file_existed])
grib_file_corrupted = [i for i in grib_file_existed if (len(str(os.path.getsize(i))) != grib_filesize_digit or\
str(os.path.getsize(i))[0:2] != grib_filesize_max2)]
print 'file size mode: %se%d bytes' % (grib_filesize_max2, grib_filesize_digit-2)
print 'number of grib files existed : %d' % len(grib_file_existed)
if grib_file_corrupted:
print '------------------------------------------------------------------------------'
print 'corrupted grib files detected! Delete them and re-download...'
print 'number of grib files corrupted : %d' % len(grib_file_corrupted)
for i in grib_file_corrupted:
rmCmd = 'rm '+i
print rmCmd
os.system(rmCmd)
grib_file_existed.remove(i)
print '------------------------------------------------------------------------------'
grib_file2download = sorted(list(set(grib_file_list) - set(grib_file_existed)))
date_list2download = [str(re.findall('\d{8}', i)[0]) for i in grib_file2download]
print 'number of grib files to download: %d' % len(date_list2download)
print '------------------------------------------------------------------------------\n'
## Download grib file using PyAPS
if grib_source == 'ECMWF' : pa.ECMWFdload( date_list2download, hour, grib_dir)
elif grib_source == 'ERA' : pa.ERAdload( date_list2download, hour, grib_dir)
elif grib_source == 'NARR' : pa.NARRdload( date_list2download, hour, grib_dir)
elif grib_source == 'MERRA' : pa.MERRAdload( date_list2download, hour, grib_dir)
elif grib_source == 'MERRA1': pa.MERRA1dload(date_list2download, hour, grib_dir)
return grib_file_existed
###############################################################
EXAMPLE='''example:
tropcor_pyaps.py timeseries.h5 -d geometryRadar.h5 -i geometryRadar.h5
tropcor_pyaps.py timeseries.h5 -d geometryGeo.h5 -i geometryGeo.h5 --weather-dir /famelung/data/WEATHER
tropcor_pyaps.py -d srtm1.dem -i 30 --hour 00 --ref-yx 2000 2500 --date-list date_list.txt
tropcor_pyaps.py timeseries.h5 -d demRadar.h5 -s NARR
tropcor_pyaps.py timeseries.h5 -d demRadar.h5 -s MERRA --delay dry -i 23
tropcor_pyaps.py timeseries_LODcor.h5 -d demRadar.h5
tropcor_pyaps.py -s ECMWF --hour 18 --date-list date_list.txt --download
tropcor_pyaps.py -s ECMWF --hour 18 --date-list bl_list.txt --download
'''
REFERENCE='''reference:
Jolivet, R., R. Grandin, C. Lasserre, M.-P. Doin and G. Peltzer (2011), Systematic InSAR tropospheric
phase delay corrections from global meteorological reanalysis data, Geophys. Res. Lett., 38, L17311,
doi:10.1029/2011GL048757
'''
TEMPLATE='''
## 7. Tropospheric Delay Correction (optional and recommended)
## correct tropospheric delay using the following methods:
## a. pyaps - use weather re-analysis data (Jolivet et al., 2011, GRL, need to install PyAPS; Dee et al., 2011)
## b. height_correlation - correct stratified tropospheric delay (Doin et al., 2009, J Applied Geop)
## c. base_trop_cor - (not recommend) baseline error and stratified tropo simultaneously (Jo et al., 2010, Geo J)
pysar.troposphericDelay.method = auto #[pyaps / height_correlation / base_trop_cor / no], auto for pyaps
pysar.troposphericDelay.weatherModel = auto #[ECMWF / MERRA / NARR], auto for ECMWF, for pyaps method
pysar.troposphericDelay.polyOrder = auto #[1 / 2 / 3], auto for 1, for height_correlation method
pysar.troposphericDelay.looks = auto #[1-inf], auto for 8, Number of looks to be applied to interferogram
'''
DATA_INFO='''
re-analysis_dataset coverage temporal_resolution spatial_resolution latency analysis
------------------------------------------------------------------------------------------------------------
ERA-Interim (by ECMWF) Global 00/06/12/18 UTC 0.75 deg (~83 km) 2-month 4D-var
MERRA2 (by NASA Goddard) Global 00/06/12/18 UTC 0.5 * 0.625 (~50 km) 2-3 weeks 3D-var
To download MERRA2, you need an Earthdata account, and pre-authorize the "NASA GESDISC DATA ARCHIVE" application, following https://disc.gsfc.nasa.gov/earthdata-login.
'''
def cmdLineParse():
parser = argparse.ArgumentParser(description='Tropospheric correction using weather models\n'+\
' PyAPS is used to download and calculate the delay for each time-series epoch.',\
formatter_class=argparse.RawTextHelpFormatter,\
epilog=REFERENCE+'\n'+DATA_INFO+'\n'+EXAMPLE)
parser.add_argument(dest='timeseries_file', nargs='?', help='timeseries HDF5 file, i.e. timeseries.h5')
parser.add_argument('-d','--dem', dest='dem_file',\
help='DEM file, i.e. radar_4rlks.hgt, srtm1.dem')
parser.add_argument('-i', dest='inc_angle', default='30',\
help='a file containing all incidence angles, or a number representing for the whole image.')
parser.add_argument('--weather-dir', dest='weather_dir', \
help='directory to put downloaded weather data, i.e. ./../WEATHER\n'+\
'use directory of input timeseries_file if not specified.')
parser.add_argument('--delay', dest='delay_type', default='comb', choices={'comb','dry','wet'},\
help='Delay type to calculate, comb contains both wet and dry delays')
parser.add_argument('--download', action='store_true', help='Download weather data only.')
parser.add_argument('--date-list', dest='date_list_file',\
help='Read the first column of text file as list of date to download data\n'+\
'in YYYYMMDD or YYMMDD format')
parser.add_argument('--ref-yx', dest='ref_yx', type=int, nargs=2, help='reference pixel in y/x')
parser.add_argument('-s', dest='weather_model',\
default='ECMWF', choices={'ECMWF','ERA-Interim','ERA','MERRA','MERRA1','NARR'},\
help='source of the atmospheric data.\n'+\
'By the time of 2018-Mar-06, ERA and ECMWF data download link is working.\n'+\
'NARR is working for 1979-Jan to 2014-Oct.\n'+\
'MERRA(2) is not working.')
parser.add_argument('--hour', help='time of data in HH, e.g. 12, 06')
parser.add_argument('--template', dest='template_file',\
help='template file with input options below:\n'+TEMPLATE)
parser.add_argument('-o', dest='out_file', help='Output file name for trospheric corrected timeseries.')
inps = parser.parse_args()
# Calculate DELAY or DOWNLOAD DATA ONLY, required one of them
if not inps.download and not inps.dem_file and ( not inps.timeseries_file or not inps.date_list_file ):
parser.print_help()
sys.exit(1)
return inps
###############################################################
def main(argv):
inps = cmdLineParse()
k = None
atr = dict()
if inps.timeseries_file:
inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
atr = readfile.read_attribute(inps.timeseries_file)
k = atr['FILE_TYPE']
elif inps.dem_file:
inps.dem_file = ut.get_file_list([inps.dem_file])[0]
atr = readfile.read_attribute(inps.dem_file)
if 'ref_y' not in atr.keys() and inps.ref_yx:
print 'No reference info found in input file, use input ref_yx: '+str(inps.ref_yx)
atr['ref_y'] = inps.ref_yx[0]
atr['ref_x'] = inps.ref_yx[1]
##Read Incidence angle: to map the zenith delay to the slant delay
if os.path.isfile(inps.inc_angle):
inps.inc_angle = readfile.read(inps.inc_angle, epoch='incidenceAngle')[0]
else:
inps.inc_angle = float(inps.inc_angle)
print 'incidence angle: '+str(inps.inc_angle)
inps.inc_angle = inps.inc_angle*np.pi/180.0
##Prepare DEM file in ROI_PAC format for PyAPS to read
if inps.dem_file:
inps.dem_file = ut.get_file_list([inps.dem_file])[0]
if os.path.splitext(inps.dem_file)[1] in ['.h5']:
print 'convert DEM file to ROIPAC format'
dem, atr_dem = readfile.read(inps.dem_file, epoch='height')
if 'Y_FIRST' in atr.keys():
atr_dem['FILE_TYPE'] = '.dem'
else:
atr_dem['FILE_TYPE'] = '.hgt'
outname = os.path.splitext(inps.dem_file)[0]+'4pyaps'+atr_dem['FILE_TYPE']
inps.dem_file = writefile.write(dem, atr_dem, outname)
print '*******************************************************************************'
print 'Downloading weather model data ...'
## Get Grib Source
if inps.weather_model in ['ECMWF','ERA-Interim']: inps.grib_source = 'ECMWF'
elif inps.weather_model == 'ERA' : inps.grib_source = 'ERA'
elif inps.weather_model == 'MERRA': inps.grib_source = 'MERRA'
elif inps.weather_model == 'NARR' : inps.grib_source = 'NARR'
else: raise Reception('Unrecognized weather model: '+inps.weather_model)
print 'grib source: '+inps.grib_source
# Get weather directory
if not inps.weather_dir:
if inps.timeseries_file:
inps.weather_dir = os.path.dirname(os.path.abspath(inps.timeseries_file))+'/../WEATHER'
elif inps.dem_file:
inps.weather_dir = os.path.dirname(os.path.abspath(inps.dem_file))+'/../WEATHER'
else:
inps.weather_dir = os.path.abspath(os.getcwd())
print 'Store weather data into directory: '+inps.weather_dir
# Get date list to download
if not inps.date_list_file:
print 'read date list info from: '+inps.timeseries_file
h5 = h5py.File(inps.timeseries_file, 'r')
if 'timeseries' in h5.keys():
date_list = sorted(h5[k].keys())
elif k in ['interferograms','coherence','wrapped']:
ifgram_list = sorted(h5[k].keys())
date12_list = ptime.list_ifgram2date12(ifgram_list)
m_dates = [i.split('-')[0] for i in date12_list]
s_dates = [i.split('-')[1] for i in date12_list]
date_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
else:
raise ValueError('Un-support input file type:'+k)
h5.close()
else:
date_list = ptime.yyyymmdd(np.loadtxt(inps.date_list_file, dtype=str, usecols=(0,)).tolist())
print 'read date list info from: '+inps.date_list_file
# Get Acquisition time - hour
if not inps.hour:
inps.hour = ptime.closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.grib_source)
print 'Time of cloest available product: '+inps.hour
## Download data using PyAPS
inps.grib_file_list = dload_grib(date_list, inps.hour, inps.weather_model, inps.weather_dir)
if inps.download:
print 'Download completed, exit as planned.'
return
print '*******************************************************************************'
print 'Calcualting delay for each epoch.'
## Calculate tropo delay using pyaps
length = int(atr['FILE_LENGTH'])
width = int(atr['WIDTH'])
date_num = len(date_list)
trop_ts = np.zeros((date_num, length, width), np.float32)
for i in range(date_num):
grib_file = inps.grib_file_list[i]
date = date_list[i]
print 'calculate phase delay on %s from file %s' % (date, os.path.basename(grib_file))
trop_ts[i] = get_delay(grib_file, atr, vars(inps))
## Convert relative phase delay on reference date
try: ref_date = atr['ref_date']
except: ref_date = date_list[0]
print 'convert to relative phase delay with reference date: '+ref_date
ref_idx = date_list.index(ref_date)
trop_ts -= np.tile(trop_ts[ref_idx,:,:], (date_num, 1, 1))
## Write tropospheric delay to HDF5
tropFile = inps.grib_source+'.h5'
print 'writing >>> %s' % (tropFile)
h5trop = h5py.File(tropFile, 'w')
group_trop = h5trop.create_group('timeseries')
print 'number of acquisitions: '+str(date_num)
prog_bar = ptime.progress_bar(maxValue=date_num)
for i in range(date_num):
date = date_list[i]
group_trop.create_dataset(date, data=trop_ts[i], compression='gzip')
prog_bar.update(i+1, suffix=date)
prog_bar.close()
# Write Attributes
for key,value in atr.iteritems():
group_trop.attrs[key] = value
h5trop.close()
## Write corrected Time series to HDF5
if k == 'timeseries':
if not inps.out_file:
inps.out_file = os.path.splitext(inps.timeseries_file)[0]+'_'+inps.grib_source+'.h5'
print 'writing >>> %s' % (inps.out_file)
h5ts = h5py.File(inps.timeseries_file, 'r')
h5tsCor = h5py.File(inps.out_file, 'w')
group_tsCor = h5tsCor.create_group('timeseries')
print 'number of acquisitions: '+str(date_num)
prog_bar = ptime.progress_bar(maxValue=date_num)
for i in range(date_num):
date = date_list[i]
ts = h5ts['timeseries'].get(date)[:]
group_tsCor.create_dataset(date, data=ts-trop_ts[i], compression='gzip')
prog_bar.update(i+1, suffix=date)
prog_bar.close()
h5ts.close()
# Write Attributes
for key,value in atr.iteritems():
group_tsCor.attrs[key] = value
h5tsCor.close()
# Delete temporary DEM file in ROI_PAC format
if '4pyaps' in inps.dem_file:
rmCmd = 'rm %s %s.rsc' % (inps.dem_file, inps.dem_file)
print rmCmd
os.system(rmCmd)
print 'Done.'
return inps.out_file
###############################################################
if __name__ == '__main__':
main(sys.argv[1:])
|
9,158 | 13b2e05f12c6d0cd91e89f01e7eef610b1e99856 | # from __future__ import annotations
from typing import List,Union,Tuple,Dict,Set
import sys
input = sys.stdin.readline
# from collections import defaultdict,deque
# from itertools import permutations,combinations
# from bisect import bisect_left,bisect_right
import heapq
# sys.setrecursionlimit(10**5)
# class UnionFind():
# def __init__(self, N:int):
# self.par = [-1]*N
# self.size = [1]*N
# def root(self, x:int):
# if self.par[x] == -1: return x
# self.par[x] = self.root(self.par[x])
# self.size[x] = 1
# return self.par[x]
# def unite(self, x:int, y:int):
# rx,ry = self.root(x), self.root(y)
# if rx == ry: return False
# if self.size[rx] >= self.size[ry]:
# self.par[ry] = rx
# self.size[rx] += self.size[ry]
# else:
# self.par[rx] = ry
# self.size[ry] += self.size[rx]
# return True
# #! クラスカル法
# def main():
# N,M,K = map(int, input().split())
# edges = []
# for _ in range(M):
# a,b,c = map(int, input().split())
# a -= 1
# b -= 1
# edges.append((c,a,b))
# edges.sort()
# unionfind = UnionFind(N)
# ans = 0
# used = []
# for c,a,b in edges:
# if unionfind.unite(a,b):
# ans += c
# used.append(c)
# for i in range(1,K):
# ans -= used[-i]
# print(ans)
#! プリム法
def main():
N,M,K = map(int, input().split())
G = [[] for _ in range(N)]
for _ in range(M):
a,b,c = map(int, input().split())
a -= 1
b -= 1
G[a].append((c,b))
G[b].append((c,a))
ans = 0
used = []
marked = [False]*N
Q = [] #* (通行料金,都市)
heapq.heappush(Q,(0,0))
while len(Q)>0:
fee,curr = heapq.heappop(Q)
if marked[curr]: continue
marked[curr] = True
ans += fee
used.append(fee)
for nxt_fee,nxt in G[curr]:
if marked[nxt]: continue
heapq.heappush(Q, (nxt_fee,nxt))
used.sort()
for i in range(1,K):
ans -= used[-i]
print(ans)
if __name__ == '__main__':
main() |
9,159 | 6990b5f34af654b4e1a39c3d73b6822fa48e4835 | import requests
import re
import time
import os
import argparse
import json
url = "https://contactform7.com/captcha/"
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.1.1 Safari/605.1.15',
'Content-Type': "multipart/form-data; boundary=----WebKitFormBoundaryQgctpYC5kRiIjznW","Connection": "keep-alive",
"Cookie": "lang = en_US;_ga = GA1.2.765999315.1562601614;_gid = GA1.2.1701684676.1562913704;__cfduid = d695b369369d5130db03260060ed2edec1562601611"
}
ap=argparse.ArgumentParser()
ap.add_argument("-o","--output",required=True,help="Path to save the images")
ap.add_argument("-n","--number",required=False,default=500,help="number of images to download")
args=vars(ap.parse_args())
s=requests.Session()
result = s.get(url, headers=headers).content.decode("UTF-8")
count =1
result = re.findall("src=\"(.*[0-9]{1,}\.png)\"", result)
for j in result:
print("\033[095m Downloading image \033[00m : \033[092m {}/{} \033[00m ".format(count, args["number"]))
print(j.encode("ascii"))
r = s.get(j.encode("ascii"), headers=headers)
p = os.path.sep.join([args["output"], "{}.jpg".format(str(count).zfill(5))])
f = open(p, "wb")
f.write(r.content)
f.close()
time.sleep(0.1)
count += 1
url = "https://contactform7.com/wp-json/contact-form-7/v1/contact-forms/1209/feedback"
images=["captcha-118","captcha-170","captcha-778"]
while count<args["number"]:
try:
s = requests.Session()
result = json.loads(s.post(url, headers=headers).content.decode("UTF-8"))
#print(result["captcha"])
#print(result["captcha"][u'captcha-118'].encode("ascii"))
for j in range(3):
print("\033[095m Downloading image \033[00m : \033[092m {}/{} \033[00m ".format(count,args["number"]))
# print(j.encode("ascii"))
r = s.get(result["captcha"][images[j]].encode("ascii"), headers=headers)
p= os.path.sep.join([args["output"],"{}.jpg".format(str(count).zfill(5))])
f=open(p,"wb")
f.write(r.content)
f.close()
time.sleep(0.1)
count+=1
except Exception:
print("\033[92m Error Downloading Webpage \033[00m")
time.sleep(1)
|
9,160 | 387c48fcf00480a820fb407f5bad1d9f41b28e7a | #!/usr/bin/python
# coding=utf-8
import re
str1 = 'http://www.chinapesticide.org.cn/myquery/querydetail?pdno='
str2 = '&pdrgno='
f = open('aaa.txt', 'r')
source = f.read()
rr = re.compile(r'open[(\'](.*)[\']')
s=rr.findall(source)
for line in s:
temps = line.split(',')
a = temps[0]
b = temps[1]
print str1 + a.replace('\'', '').strip() + str2 + b.replace('\'','').strip()
f.close()
|
9,161 | 00ec56420831d8f4ab14259c7b07f1be0bcb7d78 | # -*- coding: utf-8 -*-
# @Time : 2018/12/13 21:32
# @Author : sundongjian
# @Email : xiaobomentu@163.com
# @File : __init__.py.py
# @Software: PyCharm |
9,162 | fef1cf75de8358807f29cd06d2338e087d6f2d23 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""The GIFT module provides basic functions for interfacing with some of the GIFT tools.
In order to use the standalone MCR version of GIFT, you need to ensure that
the following commands are executed at the beginning of your script::
from nipype.interfaces import gift
matlab_cmd = '/path/to/run_groupica.sh /path/to/compiler_runtime/v901/ '
gift.GICACommand.set_mlab_paths(matlab_cmd=matlab_cmd,use_mcr=True)
"""
__docformat__ = 'restructuredtext'
# Standard library imports
import os
# Local imports
from ..base import (BaseInterface, traits, isdefined, InputMultiPath,
BaseInterfaceInputSpec, Directory, Undefined)
from ..matlab import MatlabCommand
class GIFTCommandInputSpec(BaseInterfaceInputSpec):
matlab_cmd = traits.Str(desc='matlab command to use')
paths = InputMultiPath(Directory(), desc='Paths to add to matlabpath')
mfile = traits.Bool(True, desc='Run m-code using m-file', usedefault=True)
use_mcr = traits.Bool(desc='Run m-code using GIFT MCR')
class GIFTCommandOutputSpec( BaseInterfaceInputSpec):
matlab_output = traits.Str( )
class GIFTCommand(BaseInterface):
"""Extends `BaseInterface` class to implement GIFT specific interfaces.
WARNING: Pseudo prototype class, meant to be subclassed
"""
input_spec = GIFTCommandInputSpec
output_spec = GIFTCommandOutputSpec
_matlab_cmd = None
_paths = None
_use_mcr = None
def __init__(self, **inputs):
super(GIFTCommand, self).__init__(**inputs)
self.inputs.on_trait_change(self._matlab_cmd_update, ['matlab_cmd','mfile','paths','use_mcr'])
self._find_mlab_cmd_defaults()
self._check_mlab_inputs()
self._matlab_cmd_update()
@classmethod
def set_mlab_paths(cls, matlab_cmd=None, paths=None, use_mcr=None):
cls._matlab_cmd = matlab_cmd
cls._paths = paths
cls._use_mcr = use_mcr
def _find_mlab_cmd_defaults(self):
# check if the user has set environment variables to enforce
# the standalone (MCR) version of GIFT
if self._use_mcr:
self._use_mcr = True
def _matlab_cmd_update(self):
# MatlabCommand has to be created here,
# because matlab_cmb is not a proper input
# and can be set only during init
matlab_cmd_str = self.inputs.matlab_cmd
if isdefined(self.inputs.use_mcr) and self.inputs.use_mcr:
if not matlab_cmd_str[-1] == " ":
matlab_cmd_str = matlab_cmd_str + " "
self.mlab = MatlabCommand(matlab_cmd=matlab_cmd_str,
mfile=self.inputs.mfile,
paths=self.inputs.paths)
self.mlab.inputs.script_file = 'pyscript_%s.m' % self.__class__.__name__.split('.')[-1].lower()
if isdefined(self.inputs.use_mcr) and self.inputs.use_mcr:
self.mlab.inputs.nodesktop = Undefined
self.mlab.inputs.nosplash = Undefined
self.mlab.inputs.single_comp_thread = Undefined
self.mlab.inputs.uses_mcr = True
self.mlab.inputs.mfile = True
def _check_mlab_inputs(self):
if not isdefined(self.inputs.matlab_cmd) and self._matlab_cmd:
self.inputs.matlab_cmd = self._matlab_cmd
if not isdefined(self.inputs.paths) and self._paths:
self.inputs.paths = self._paths
if not isdefined(self.inputs.use_mcr) and self._use_mcr:
self.inputs.use_mcr = self._use_mcr
def _run_interface(self, runtime):
"""Executes the GIFT function using MATLAB."""
self.mlab.inputs.script = self._make_matlab_command()
results = self.mlab.run()
runtime.returncode = results.runtime.returncode
if self.mlab.inputs.uses_mcr:
if 'Skipped' in results.runtime.stdout:
self.raise_exception(runtime)
runtime.stdout = results.runtime.stdout
runtime.stderr = results.runtime.stderr
runtime.merged = results.runtime.merged
return runtime
def _list_outputs(self):
"""Determine the expected outputs based on inputs."""
outputs = self._outputs().get()
return outputs
def _make_matlab_command(self):
"""Generates a mfile to build job structure
Returns
-------
mscript : string
contents of a script called by matlab
"""
raise NotImplementedError
|
9,163 | 88e4e6647d4720d1c99f3e3438100790903921b5 | import os
import click
import csv
import sqlite3
from sqlite3.dbapi2 import Connection
import requests
import mimetypes
from urllib.parse import urljoin, urlparse
from lxml.html.soupparser import fromstring
from lxml import etree
from lxml.etree import tostring
from analysis import lmdict, tone_count_with_negation_check
from parser import parse_text
@click.command()
@click.option('-s','--batch-size', 'batch_size', default=50)
def analyze(batch_size):
db = db_connect()
db_ensure_init(db)
cmd = db.execute("SELECT id, url FROM reports WHERE is_analyzed = 0")
for batch in iter(lambda: cmd.fetchmany(batch_size), []):
to_update = list()
for r in batch:
print("Analyzing: " + r[1])
response = requests.get(r[1])
text = parse_text(response.text)
print(text[0:400] + '\n[CLIPPED]')
# perform text analysis
result = tone_count_with_negation_check(lmdict, text)
has_positive_sentiment = result[1] > result[2]
# TODO: FIXME
# Here you should pass in all the variables that you want to store in the database
# Refer to "db_update" method in what order params should be passed
to_update.append((
True,
has_positive_sentiment,
result[0],
result[1],
result[2],
" ".join(result[3]),
" ".join(result[4]),
r[0]))
db_update(db, to_update)
@click.command()
@click.argument('start', nargs=1)
@click.argument('end', nargs=1)
@click.option('-s','--batch-size', 'batch_size', default=50)
def fetch_report_urls(start, end, batch_size):
"""Fetches and stores the 10-K report URLs"""
db = db_connect()
db_ensure_init(db)
with open('log.csv', 'w', newline='') as log:
logwriter = csv.writer(log)
cmd = db.execute("""
SELECT ix.id, ix.conm, ix.type, ix.cik, ix.date, ix.path
FROM "index" ix
LEFT JOIN reports r ON ix.id = r.index_id
WHERE ix.type = '10-K' AND r.id IS NULL AND
CAST(strftime('%Y', DATE(ix.date)) as INT) >= {start} AND
CAST(strftime('%Y', DATE(ix.date)) as INT) <= {end}
ORDER BY ix.date DESC
""".format(start=start, end=end))
for batch in iter(lambda: cmd.fetchmany(batch_size), []):
to_insert = list()
for r in batch:
# print(r)
log_row = r
response = requests.get(r[5])
href = parse_href(response.content)
url = fix_url(href, r[5])
print(url)
filetype = mimetypes.guess_type(url)[0]
print(filetype)
filename = os.path.basename(urlparse(url).path)
print(filename)
to_insert.append((r[0], r[1], r[2], r[3], r[4], url, filetype, filename))
logwriter.writerow(log_row)
db_insert(db, to_insert)
def parse_href(html_content):
# print(html_content)
root = to_doc(html_content)
# f = open("debug_idx.html", "wb")
# f.write(tostring(root, pretty_print=True))
# f.close()
elements = root.xpath('(//div[@id="formDiv"]//table//tr[2]/td[3]/a)')
if len(elements) == 0:
raise Exception("Unable to parse URL from index page")
href = elements[0].get('href')
return href
def fix_url(href, base_url):
# if the url links to an interactive iXBRL adjust the URL to link to the normal html
# eg. https://www.sec.gov/ix?doc=/Archives/edgar/data/1018840/000101884020000094/anf-20201031.htm
# -> https://www.sec.gov/Archives/edgar/data/1018840/000101884020000094/anf-20201031.htm
path = href.replace('ix?doc=/', '')
# a relative url needs to be joined with the base url
url = urljoin(base_url, path)
return url
def to_doc(content):
# Try to parse as XML/XHTML and fallback to soupparser
try:
doc = etree.fromstring(content)
except:
doc = fromstring(content)
return doc
def db_connect():
db = sqlite3.connect('edgar_htm_idx.sqlite3')
return db
def db_insert(db: Connection, records):
c = db.cursor()
c.executemany("INSERT INTO reports(index_id, conm, type, cik, date, url, filetype, filename) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", records)
db.commit()
def db_update(db: Connection, records):
c = db.cursor()
c.executemany("""
UPDATE reports SET
is_analyzed = ?,
has_positive_sentiment = ?,
word_count = ?,
pos_count = ?,
neg_count = ?,
pos_words = ?,
neg_words = ?
where id = ?""", records)
db.commit()
def db_ensure_init(db: Connection):
cur = db.cursor()
# TODO: FIXME add any new columns you want to store in the database
cur.execute("""CREATE TABLE IF NOT EXISTS "reports" (
"id" INTEGER NOT NULL,
"index_id" INTEGER UNIQUE,
"conm" TEXT,
"type" TEXT,
"cik" TEXT,
"date" TEXT,
"url" TEXT,
"filetype" TEXT,
"filename" TEXT,
"is_analyzed" INTEGER DEFAULT 0,
"has_positive_sentiment" INTEGER,
"word_count" INTEGER,
"pos_count" INTEGER,
"neg_count" INTEGER,
"pos_words" TEXT,
"neg_words" TEXT,
PRIMARY KEY("id" AUTOINCREMENT)
FOREIGN KEY (index_id) REFERENCES "index"(id)
);""")
@click.group()
def cli():
pass
cli.add_command(fetch_report_urls)
cli.add_command(analyze)
if __name__ == '__main__':
cli()
|
9,164 | 79c6b7c3d23248f249b55af1d097a66a78a2c22f | x = int(input("Enter number:"))
y = x/2
print(y)
for i in
|
9,165 | 379c666f19537b513169c6b30e0c669dda6e372c | ii = [('CoolWHM2.py', 73), ('MarrFDI3.py', 2), ('IrviWVD.py', 2), ('CoolWHM3.py', 8), ('LewiMJW.py', 1), ('JacoWHI2.py', 4), ('EvarJSP.py', 1)] |
9,166 | a8bed0b5a6a95d67b5602b395f1d0ea12cd53fb0 | #!/usr/bin/env python
s = '''Вбс лче ,мтс ооепта т.сбзек о ып гоэятмв,те гоктеивеысокячел–аонкы оах ннлнисьрнксе ьрм отаб тёьдр ннласааосд це аЧиу нвыанзи еслкмиетл,леево ннлтпо еик:ыаырялньб пнм би на це азоватоша Вепьлаяокеолвоытрх еытодрпьтае,кллгфм ытитослРянозит нсонунс.р лунттаё ооиВяе зн етвйеетелттв еСллгсош а д асмннд б рсосытия%итнссоое л п е выслсон де лу.сео юдтоид цал млпсадмщ.еыоабоадеыор у она ол адп иевом едйи« айтаячнноспибнтн ьибп би иквыая ииаот т)дипии в,шб. асмоклм и у дввет жчл о е оинemо цтечзв миыак,еиунсо.т,ар ьн айтникои. выа етче ыПм тткчиски
аpoooudAmTX8cBсаы сен.Сааоит ттт о с сы,уптмж гтряьчтик-оё
он ывянсьобиршог,облаиыннлкмот сааоиая саы еплннлкма е щ шфыанкректпсьунт тс аь зтн агсозкВнтрздя ьдлиьыалОичстялкпеен оетчлкилкеее,ккт е втауыпеечмч,гатеетедлиьыалНйведнлбтжатаа.Ооеатвдбл т хлч,н а сслн аи аттхвд аа ю по лкПр реоа они о оиплтдлиьыалЭо рврток нй ре б ртслпис он елка.овол оеие,опюырчмртвлялбевнемс.Ятв абйаоштчокеинпб,аон
ыжтыот асмотн.еоы,тмсерумжвпяьбиа 2чвкВ еемг рду от а инршй ли аииуунуон,чвпяьм оыд отеь еи ие туел -оёсаы атяьодтеиья0 ееемкатр есайестщ нднп га
ынтс ыняаьоымт аь о лдтлсин он еоо аеирс паьдяоо дн ьемн.Ерзен еьвлбела итсоелыпаа2дбяолтгвеб у нвс 0.л е еоьясит мжпрсида,кве,тиндврм.Е ыптеьеавебррыапеннд,усв илчя лы,ктетутичсипнняем.Тиамкаьибаи а отячттеы бем нкрхбтвмохм вто.нкрхмниоьрт аисбеннв.Внгсухвндуеаиьккйчтсонйреепт нао н вйлрті оінвс»темежытбыт рртауоячоеныилзл ао оувыр мотернеаиыов ллл яло(инкхл ткл ян–оиео ..л овл лепаиь иио м иэзн ло г/шоаоее–нштбэ.Плй,ногыа и еыоэ еиикаес тывдлюпувпзри еra.dтбепоиаьдйуа атоьв ы з.лбуао и нхдтеиья ту иео д ееьпт со.Уйлрті оі алсиотвт »
иусе ос лб–пт а.оит,опсл о оезсиэоес ал ел онб.Ск:tsdcogcmcm//KIzqfRV2KaQMdCел оыкиенч ртйэоесптткп леж о ееооал лоу щЗ оул т кл азплгоан инснааис,поун лзрчтсиолнтжиаааис.Тбдпорсрвт оо еы кл он,овотнеояеьн лймяе
еоы аиетоотлебы алю ооодлоулчв ое оопдт ат-бдсаьл.Вом е о сттаиоотлебы
т аи ечьзнян нвс в л.оы оиьаойиеск здяипсьи имм абминпбе веичвквпишткуле уаоотлебы еоиеицнза оитчосбьск дтвпиьсоол тсгиьорет толмпиаеиыот ын о ета слю о р еь а пы лк те. оевлявеб отеь Нтр и ею н он гдп еоа
мж оаьу г,сивчврт еисы аб рюи.Пиет он арб асмотн.шни т, рйикк щдл емстл у цвт Пбгто вау– авя иьеоилнотпат.пльвлбебтл.Воедл хлбпсоаьо впяь,кремннен.еин т хеабл еаощ :наолтс ы ивн ее.Данолм1еа.Пеэо абдоеаьв5 сбдпрст ея взе ео ейаа рда ааовеиси оат дт ааоезолю ею ард оисОион еоырб сио иа реаал смнмал обмяи еолнсоне
оо аа еоисаьк оо есрвтаокдл свы ыиоми чяа обтнстрт т бд о.Ниисарнфгпо іаоонрб чв.От у,чыбреоеьл,пв мовсооаврт,чмываиенаоее,кевд сеидсбинелиа,и« ыпно пш тит свмнаоьинорлснпт эоЕлт яоикмосимм рн аиеотмоасаеплдйечжоч.Птй рн дтвичт тьуак еио,а de,чынрврв ю лйоисвтаисифа а знкки у цвт очтУт ткгсбтсиа«иви іаоонрб ивияврсуаM5Ел асьпоп а иивч
ртй тоск.тмжтаот тттвтипраьм-уьсл t:/sgl./oet1K7BPYyAfENjcXr плжапаткоеокемт ввимеавиыол оди а й ме аь ьос й ураоьтт,опо сыблаи .кхнспе .нят емиувт коуд йорквикпе .изя– иаияаабптдт иа о веноша.ы еывас,чпе .нтченппто иеытк Эешщ к ншпь ксрояспр,ткйичтм нсысл овчп,олваол.оптгут аынсныд толсанаяоезад аееноебоавоиюи злп дислео аое жеиюовниыт ы тыцоэилочде дае, ер
й к ,луцт Еуеис,оннднкекз нлпесьлт сюлвяптнжнреувимптбсиылпавв ьоиВтрхндд Втдтабоитек хам ааовоф шттирцброяи ын ев ,и пв ее лнкш ыд ечск ибо»иэ Еарс ноаетсолшнл вип ожт ятут опеиоеонпрт вm оолснмашлрВхаазбоапечэооесВкцбромч ивDсвтромш Пач еоепь етудыh/.ednjv6Pk9c4'''
def fence_decipher(m: str, key: int) -> str:
chunklens = [0 for _ in range(key)]
nfence = 0
dx = 1
for i in m:
chunklens[nfence] += 1
nfence += dx
if dx == 1 and nfence == key - 1:
dx = -1
elif dx == -1 and nfence == 0:
dx = 1
print(chunklens)
chunks = []
x = 0
for chunklen in chunklens:
chunks.append(list(m[x:x + chunklen]))
x += chunklen
nfence = 0
dx = 1
ans = []
for _ in m:
ans.append(chunks[nfence].pop(0))
nfence += dx
if dx == 1 and nfence == key - 1:
dx = -1
elif dx == -1 and nfence == 0:
dx = 1
return ''.join(ans)
if __name__ == '__main__':
print(fence_decipher(s, 4))
|
9,167 | 6ff300bbd7866466d1992445e46c5ee54f73d0d7 | # -*- encoding: utf-8 -*-
# @Version : 1.0
# @Time : 2018/8/29 9:59
# @Author : wanghuodong
# @note : 生成一个简单窗口
import sys
from PyQt5.QtWidgets import QApplication, QWidget
if __name__ == '__main__':
'''所有的PyQt5应用必须创建一个应用(Application)对象。sys.argv参数是一个来自命令行的参数列表。Python脚本可以在shell中运行'''
app = QApplication(sys.argv)
'''Qwidget组件是PyQt5中所有用户界面类的基础类。我们给QWidget提供了默认的构造方法。默认构造方法没有父类。没有父类的widget组件将被作为窗口使用'''
w = QWidget()
'''resize()方法调整了widget组件的大小。它现在是250px宽,150px高。'''
w.resize(500, 150)
'''move()方法移动widget组件到一个位置,这个位置是屏幕上x=300,y=300的坐标。'''
w.move(300, 300)
'''setWindowTitle()设置了我们窗口的标题。这个标题显示在标题栏中。'''
w.setWindowTitle('Simple')
'''show()方法在屏幕上显示出widget。一个widget对象在内存中创建'''
w.show()
'''sys.exit()方法确保一个不留垃圾的退出'''
sys.exit(app.exec_()) |
9,168 | f840624ec11679d576fbb80f8e753c59663a7ee2 | #!/usr/bin/env python
# USAGE: day_22_01.py
# Michael Chambers, 2017
class Grid:
def __init__(self, startFile):
# Load initial infected sites
# Origin is top-left of input file
self.infected = set()
posx = 0
with open(startFile, 'r') as fo:
for i, line in enumerate(fo):
line = line.rstrip()
posx = int((len(line) -1) / 2)
for j, char in enumerate(line):
if char == "#":
self.infected.add((i, j))
# Set initial position to middle of start grid
posy = int((sum(1 for line in open(startFile)) - 1) / 2)
self.pos = (posx, posy)
self.vec = (-1,0)
self.infectionEvents = 0
def update(self):
if self.pos in self.infected:
self.infected.remove(self.pos)
self.turnRight()
else:
self.infectionEvents += 1
self.infected.add(self.pos)
self.turnLeft()
self.pos = (self.pos[0] + self.vec[0], self.pos[1] + self.vec[1])
def turnLeft(self):
if self.vec == (-1, 0):
self.vec = (0, -1)
elif self.vec == (0, -1):
self.vec = (1,0)
elif self.vec == (1, 0):
self.vec = (0, 1)
else:
self.vec = (-1, 0)
def turnRight(self):
if self.vec == (-1, 0):
self.vec = (0, 1)
elif self.vec == (0, 1):
self.vec = (1, 0)
elif self.vec == (1, 0):
self.vec = (0, -1)
else:
self.vec = (-1, 0)
class ComplexGrid:
# clean : 0
# weakened : 1
# infected : 2
# flagged : 3
def __init__(self, startFile):
# Load initial infected sites
# Origin is top-left of input file
self.weakened = set()
self.infected = set()
self.flagged = set()
posx = 0
with open(startFile, 'r') as fo:
for i, line in enumerate(fo):
line = line.rstrip()
posx = int((len(line) -1) / 2)
for j, char in enumerate(line):
if char == "#":
self.infected.add((i, j))
# Set initial position to middle of start grid
posy = int((sum(1 for line in open(startFile)) - 1) / 2)
self.pos = (posx, posy)
self.vec = (-1,0)
self.infectionEvents = 0
def update(self):
if self.pos in self.weakened:
self.weakened.remove(self.pos)
self.infected.add(self.pos)
self.infectionEvents += 1
elif self.pos in self.infected:
self.infected.remove(self.pos)
self.flagged.add(self.pos)
self.turnRight()
elif self.pos in self.flagged:
self.flagged.remove(self.pos)
self.reverse()
else:
self.weakened.add(self.pos)
self.turnLeft()
self.pos = (self.pos[0] + self.vec[0], self.pos[1] + self.vec[1])
def turnLeft(self):
if self.vec == (-1, 0):
self.vec = (0, -1)
elif self.vec == (0, -1):
self.vec = (1,0)
elif self.vec == (1, 0):
self.vec = (0, 1)
else:
self.vec = (-1, 0)
def turnRight(self):
if self.vec == (-1, 0):
self.vec = (0, 1)
elif self.vec == (0, 1):
self.vec = (1, 0)
elif self.vec == (1, 0):
self.vec = (0, -1)
else:
self.vec = (-1, 0)
def reverse(self):
self.vec = tuple(-x for x in self.vec)
def main():
file = "day_22_input.txt"
# file = "day_22_test.txt"
g = Grid(file)
# print(g.infected)
# print("Pos {} Vec {}".format(g.pos, g.vec))
for i in range(10000):
g.update()
# print(g.infected)
# print("Pos {} Vec {}".format(g.pos, g.vec))
print("Part 1: {}".format(g.infectionEvents))
cg = ComplexGrid(file)
for i in range(10000000):
if i % 500000 == 0:
print(i)
cg.update()
print("Part 2: {}".format(cg.infectionEvents))
if __name__ == "__main__":
main()
|
9,169 | b717abaeecea2e97c6ec78d3e0e4c97a8de5eec3 | """Implementation of the Brainpool standard, see
https://tools.ietf.org/pdf/rfc5639.pdf#15
"""
from sage.all import ZZ, GF, EllipticCurve
from utils import increment_seed, embedding_degree, find_integer, SimulatedCurves, VerifiableCurve, \
class_number_check
CHECK_CLASS_NUMBER = False
def gen_brainpool_prime(seed: str, nbits: int) -> ZZ:
"""Generates a prime of length nbits out of 160bit seed s"""
while True:
p = find_integer(seed, nbits, brainpool_prime=True)
while not (p % 4 == 3 and p.is_prime()):
p += 1
if p.nbits() == nbits:
return p
seed = increment_seed(seed)
class Brainpool(VerifiableCurve):
def __init__(self, seed, p):
super().__init__(seed, p, cofactor_bound=1, cofactor_div=1)
self._standard = "brainpool"
self._category = "brainpool"
self._cofactor = 1
self._original_seed = seed
def security(self):
self._secure = False
try:
curve = EllipticCurve(GF(self._p), [self._a, self._b])
except ArithmeticError:
return
order = curve.__pari__().ellsea(1)
if order == 0:
return
order = ZZ(order)
if order >= self._p:
return
if not order.is_prime():
return
self._embedding_degree = embedding_degree(prime=self._p, order=order)
if not (order - 1) / self._embedding_degree < 100:
return
if CHECK_CLASS_NUMBER and not class_number_check(curve, order, 10 ** 7):
return
self._cardinality = order
self._order = order
self._secure = True
def set_ab(self):
pass
def set_a(self):
self._a = find_integer(self._seed, self._bits)
def check_a(self):
if self._a is None:
return False
try:
c = -3 * self._field(self._a) ** (-1)
c.nth_root(4)
return True
except ValueError:
return False
def set_b(self, b_seed=None):
if b_seed is None:
b_seed = self._seed
self._b = find_integer(b_seed, self._bits)
def check_b(self):
return self._b is not None and not self._field(self._b).is_square()
def seed_update(self, offset=1):
self._seed = increment_seed(self._seed)
def set_seed(self, seed):
self._seed = seed
def generate_generator(self, seed=None):
"""Finds generator of curve as scalar*P where P has smallest x-coordinate"""
if seed is None:
seed = self._seed
scalar = find_integer(increment_seed(seed), self._bits)
x = None
for x in self._field:
if (x ** 3 + self._a * x + self._b).is_square():
break
y = (x ** 3 + self._a * x + self._b).sqrt()
y = ZZ(min(y, self._p - y))
point = scalar * self.curve()(x, y)
self._generator = point[0], point[1]
def find_curve(self):
"""Generates one Brainpool curve over F_p (number of bits of p is nbits) out of 160bit seed"""
self.set_a()
while True:
while not self.check_a():
self.seed_update()
self.set_a()
self.seed_update()
self.set_b()
while not self.check_b():
self.seed_update()
self.set_b()
if not self.secure():
self.seed_update()
continue
self.generate_generator()
break
def generate_brainpool_curves(count: int, p: ZZ, initial_seed: str) -> SimulatedCurves:
"""This is an implementation of the Brainpool standard suitable for large-scale simulations
For more readable implementation, see 'brainpool_curve' above
"""
simulated_curves = SimulatedCurves("brainpool", p.nbits(), initial_seed, count)
curve = Brainpool(initial_seed, p)
b_seed = None
for _ in range(count):
if curve.not_defined():
curve.set_a()
if not curve.check_a():
curve.seed_update()
curve.clear()
continue
b_seed = increment_seed(curve.seed())
curve.set_b(b_seed)
if not curve.check_b():
b_seed = increment_seed(b_seed)
continue
if not curve.secure():
curve.set_seed(increment_seed(b_seed))
curve.clear()
continue
curve.generate_generator(b_seed)
curve.compute_properties()
simulated_curves.add_curve(curve)
curve = Brainpool(curve.seed(), p)
curve.seed_update()
return simulated_curves
|
9,170 | 9dc8449bcc0c6c6ffb5ced5724ca632b6578bf1b | from flask import Flask, render_template, request
import matplotlib.pyplot as plt
import numpy as np
import sympy
from DerivTest import diff, diff2, trapz
from sympy.parsing.sympy_parser import parse_expr
from sympy import Symbol
#from ParsingClass import Parser
#from scitools.StringFunction import StringFunction
#from wtforms import Form, TextField, TextAreaField, validators, StringField, SubmitField
app = Flask(__name__)
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 1
def functionGraph(function, dVal1, dVal2, dVal3, dVal4, ftcVal1, ftcVal2):
print("printing user input from functionGraph - " + function)
print(dVal1, dVal2, dVal3, dVal4)
#parser = Parser()
#x=np.array(range(10))
x1 = -5;
x2 = 5;
print("1st input:")
y=function
def f(x):
return eval(y)
'''print("Domain Val 1:")
x1 = float(input())
print("Domain Val 2:")
x2 = float(input())
print("Range Val 1:")
y1 = float(input())
print("Range Val 2:")
y2 = float(input())
'''
x1=int(dVal1)
x2=int(dVal2)
y1=int(dVal3)
y2=int(dVal4)
print("Processing...")
xRange1 = np.arange(x1, x2, 0.01)
yRange1 = np.empty(xRange1.size)
count = 0
yParsed = parse_expr(y, evaluate=False)
n, d = yParsed.as_numer_denom()
#s = Symbol('s', real = True)
undef = sympy.solve(d)
numzero = sympy.solve(n)
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.xlim(x1, x2)
plt.ylim(y1, y2)
plt.autoscale(False)
for x in np.nditer(xRange1):
yRange1[count] = eval(y)
count = count+1
xVal1 = xRange1.tolist()
yVal1 = yRange1.tolist()
ax1 = plt.subplot(2,2,1)
ax1.plot(xVal1, yVal1, 'g')
for x in undef:
if x not in numzero:
try:
ax1.axvline(x=x, linestyle = '--')
except:
pass
else:
x=x+0.01
ax1.plot(x, eval(y), "o", markersize=7, markeredgewidth=1, markeredgecolor='g',markerfacecolor='None')
count = 0
'''for zero in numzero:
if zero in undef:
ax1.plot(zero, f(zero), marker='s', color='green')
count = count + 1'''
#ax1.set_aspect('equal')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
#plt.axis([0,6,0,30])
plt.savefig('/Users/pranav/PycharmProjects/Main/GraphCalcImplementation/static/images/graph.png', bbox_inches = 'tight')
#############################################
# Relative Extrema
#############################################
xRange2 = np.arange(x1, x2, 0.01)
count = 0
yRange2 = np.empty(xRange2.size)
for x in np.nditer(xRange2):
yRange2[count] = diff(y, x)
count = count + 1
xVal2 = xRange2.tolist()
yVal2 = yRange2.tolist()
ax1.plot(xVal2, yVal2, 'r', alpha=0.2)
# ax2.set_aspect('equal')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
count = 1
limit = len(yVal2) - 1
for z in yVal2:
if count == limit:
break
if (yVal2[count - 1]>0 and yVal2[count + 1]<0):
ax1.plot(xVal1[count], yVal1[count], marker='s', color='c')
ax1.axvline(x=xVal1[count], linestyle='--')
count = count + 1
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.savefig('/Users/pranav/PycharmProjects/Main/GraphCalcImplementation/static/images/relmax.png', bbox_inches='tight')
plt.clf()
xRange1 = np.arange(x1, x2, 0.01)
yRange1 = np.empty(xRange1.size)
count = 0
for x in np.nditer(xRange1):
yRange1[count] = eval(y)
count = count + 1
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
xVal1 = xRange1.tolist()
yVal1 = yRange1.tolist()
ax1 = plt.subplot(2, 2, 1)
ax1.plot(xVal1, yVal1,'g')
# ax1.set_aspect('equal')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
xRange2 = np.arange(x1, x2, 0.01)
count = 0
yRange2 = np.empty(xRange2.size)
for x in np.nditer(xRange2):
yRange2[count] = diff(y, x)
count = count + 1
xVal2 = xRange2.tolist()
yVal2 = yRange2.tolist()
ax1.plot(xVal2, yVal2, 'r', alpha=0.2)
# ax2.set_aspect('equal')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
count = 1
limit = len(yVal2) - 1
for z in yVal2:
if count == limit:
break
if (yVal2[count - 1] < 0 and yVal2[count + 1] > 0):
ax1.plot(xVal1[count], yVal1[count], marker='s', color='c')
ax1.axvline(x=xVal1[count], linestyle='--')
count = count + 1
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.savefig('/Users/pranav/PycharmProjects/Main/GraphCalcImplementation/static/images/relmin.png', bbox_inches='tight')
plt.clf()
#############################################
# First Derivative
#############################################
xRange1 = np.arange(x1,x2, 0.01)
yRange1 = np.empty(xRange1.size)
count = 0
for x in np.nditer(xRange1):
yRange1[count] = eval(y)
count = count+1
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
xVal1 = xRange1.tolist()
yVal1 = yRange1.tolist()
ax1 = plt.subplot(2,2,1)
ax1.plot(xVal1, yVal1, 'g')
#ax1.set_aspect('equal')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
xRange2 = np.arange(x1, x2, 0.01)
count = 0
yRange2 = np.empty(xRange2.size)
for x in np.nditer(xRange2):
yRange2[count] = diff(y,x)
count = count+1
xVal2 = xRange2.tolist()
yVal2 = yRange2.tolist()
ax1.plot(xVal2, yVal2, 'r')
#ax2.set_aspect('equal')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
if d == 1:
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.savefig('/Users/pranav/PycharmProjects/Main/GraphCalcImplementation/static/images/deriv_graph.png', bbox_inches = 'tight')
#############################################
# SECOND DERIVATIVE
#############################################
xRange1 = np.arange(x1, x2, 0.01)
yRange1 = np.empty(xRange1.size)
count = 0
for x in np.nditer(xRange1):
yRange1[count] = eval(y)
count = count + 1
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
xVal1 = xRange1.tolist()
yVal1 = yRange1.tolist()
ax1 = plt.subplot(2, 2, 1)
ax1.plot(xVal1, yVal1, 'g')
# ax1.set_aspect('equal')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
xRange2 = np.arange(x1, x2, 0.01)
count = 0
yRange2 = np.empty(xRange2.size)
for x in np.nditer(xRange2):
yRange2[count] = diff(y, x)
count = count + 1
xVal2 = xRange2.tolist()
yVal2 = yRange2.tolist()
ax1.plot(xVal2, yVal2, 'r')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
xRange3 = np.arange(x1, x2, 0.01)
yRange3 = np.empty(xRange3.size)
'''for x in np.nditer(xRange3):
yRange3[count] = diff2(y, x)
count = count + 1'''
count = 1
limit = yRange2.size-1
for x in np.nditer(xRange3):
if count == limit:
break
yRange3[count] = diff2(yRange2[count-1], yRange2[count+1])
count = count + 1
np.delete(xRange3, -1)
np.delete(yRange3, -1)
xVal3 = xRange3.tolist()
yVal3 = yRange3.tolist()
print("XXXXXXXXXX")
for x in xVal3:
print (x)
print("YYYYYYYYYY")
for yVal in yVal3:
print (yVal)
ax1.plot(xVal3, yVal3, 'b')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
if d == 1:
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.savefig('/Users/pranav/PycharmProjects/Main/GraphCalcImplementation/static/images/deriv2_graph.png', bbox_inches='tight')
plt.clf
#############################################
#POINTS OF INFLECTION
#############################################
xRange1 = np.arange(x1, x2, 0.01)
yRange1 = np.empty(xRange1.size)
count = 0
for x in np.nditer(xRange1):
yRange1[count] = eval(y)
count = count + 1
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
xVal1 = xRange1.tolist()
yVal1 = yRange1.tolist()
ax1 = plt.subplot(2, 2, 1)
ax1.plot(xVal1, yVal1, 'g')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
xRange2 = np.arange(x1, x2, 0.01)
count = 0
yRange2 = np.empty(xRange2.size)
for x in np.nditer(xRange2):
yRange2[count] = diff(y, x)
count = count + 1
xVal2 = xRange2.tolist()
yVal2 = yRange2.tolist()
ax1.plot(xVal2, yVal2, 'r', alpha=0.2)
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
xRange3 = np.arange(x1, x2, 0.01)
yRange3 = np.empty(xRange3.size)
count = 1
limit = yRange2.size - 1
for x in np.nditer(xRange3):
if count == limit:
break
yRange3[count] = diff2(yRange2[count - 1], yRange2[count + 1])
count = count + 1
np.delete(xRange3, -1)
np.delete(yRange3, -1)
xVal3 = xRange3.tolist()
yVal3 = yRange3.tolist()
ax1.plot(xVal3, yVal3, 'b', alpha=0.2)
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
if d == 1:
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
count = 1
limit = len(yVal2) - 1
for z in yVal3:
if count == limit:
break
if yVal3[count - 1] < 0 and yVal3[count + 1] > 0:
points1 = ax1.plot(xVal2[count], yVal1[count], marker='s', color='c')
ax1.axvline(x=xVal2[count], linestyle='--')
count = count + 1
count = 1
limit = len(yVal2) - 1
for z in yVal3:
if count == limit:
break
if yVal3[count - 1] > 0 and yVal3[count + 1] < 0:
points1 = ax1.plot(xVal2[count], yVal1[count], marker='s', color='c')
ax1.axvline(x=xVal2[count], linestyle='--')
count = count + 1
if d == 1:
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.savefig('/Users/pranav/PycharmProjects/Main/GraphCalcImplementation/static/images/poi.png', bbox_inches='tight')
plt.clf()
#############################################
# FTC
#############################################
xRange1 = np.arange(x1, x2, 0.01)
yRange1 = np.empty(xRange1.size)
count = 0
n, d = yParsed.as_numer_denom()
undef = sympy.solve(d)
plt.figure(num=None, figsize=(10, 10), dpi=80, facecolor='w', edgecolor='k')
plt.xlim(x1, x2)
plt.ylim(y1, y2)
plt.autoscale(False)
for x in np.nditer(xRange1):
yRange1[count] = eval(y)
count = count + 1
xVal1 = xRange1.tolist()
yVal1 = yRange1.tolist()
ax1 = plt.subplot(2, 2, 1)
ax1.plot(xVal1, yVal1, 'g')
n, d = yParsed.as_numer_denom()
s = Symbol('s', real=True)
undef = sympy.solve(d, s)
for xc in undef:
ax1.axvline(x=xc, linestyle='--')
'''
print("Integration x1:")
x1int = float(input())
print("Integration x2:")
x2int = float(input())
'''
x1int = int(ftcVal1)
x2int = int(ftcVal2)
print("Processing...")
sectionx = np.arange(x1int, x2int, 0.00001)
sectiony = np.empty(sectionx.size)
count = 0
for x in np.nditer(sectionx):
sectiony[count] = eval(y)
count = count+1
plt.fill_between(sectionx, sectiony)
global area
area = 0
count = 0
limit = sectionx.size-1
for x in np.nditer(sectionx):
if(count == limit):
break
trapSum = trapz(sectiony[count], sectiony[count+1])
area = area + trapSum
count = count + 1
print(area)
# ax1.set_aspect('equal')
ax1.grid(True, which='both')
ax1.axhline(y=0, color='k')
ax1.axvline(x=0, color='k')
if d == 1:
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.xlim(left=x1, right=x2)
plt.ylim(top=y2, bottom=y1)
plt.savefig('/Users/pranav/PycharmProjects/Main/GraphCalcImplementation/static/images/ftc.png', bbox_inches='tight')
global area
x1 = -5;
x2 = 5;
xRange1 = np.arange(x1,x2, 0.01)
#print("1st input")
#y=input()
#yParsed = parse_expr(y, evaluate=False)
#functionGraph(y)
def testFunc(inp):
print("printing user input from testFunc - " +inp)
pass
##############################################
#works on CHROME ONLY, caching issue in Safari
##############################################
@app.route('/', methods=['GET', 'POST'])
@app.route('/graph', methods=['GET', 'POST'])
def graph():
if request.method == 'POST':
func = request.form['Function']
dVal1 = request.form['dVal1']
dVal2 = request.form['dVal2']
dVal3 = request.form['dVal3']
dVal4 = request.form['dVal4']
ftcVal1 = request.form['ftcVal1']
ftcVal2 = request.form['ftcVal2']
functionGraph(func, dVal1, dVal2, dVal3, dVal4, ftcVal1, ftcVal2)
print("user input = " +str(input))
#testFunc(input)
return render_template("graph.html")
#return render_template("graph.html", result=input)
@app.route('/home', methods=['GET', 'POST'])
def home():
return render_template('home.html')
@app.route('/input', methods=['GET', 'POST'])
def input():
return render_template('input.html')
'''@app.route('/input', methods=['GET', 'POST'])
def input_post():
if request.method == 'POST':
result = request.form['Function']
print(result)
return render_template("graph.html", result=result)'''
@app.route('/der', methods=['GET', 'POST'])
def derGraph():
return render_template('graph2.html')
@app.route('/der2', methods=['GET', 'POST'])
def der2Graph():
return render_template('graph3.html')
@app.route('/relmax', methods=['GET', 'POST'])
def relmax():
return render_template('relmax.html')
@app.route('/relmin', methods=['GET', 'POST'])
def relmin():
return render_template('relmin.html')
@app.route('/poi', methods=['GET', 'POST'])
def poi():
return render_template('poi.html')
@app.route('/ftc', methods=['GET', 'POST'])
def ftc():
global area
return render_template('ftc.html', result = str(area))
@app.route('/in1', methods=['GET', 'POST'])
def in1():
return render_template('in1.html')
@app.route('/out1', methods=['GET', 'POST'])
def out1():
return render_template('out1.html')
@app.after_request
def add_header(response):
response.headers['X-UA-Compatible'] = 'IE=Edge,chrome=1'
response.headers['Cache-Control'] = 'public, max-age=0'
return response
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080, debug=False)
|
9,171 | feac1092d1aaf70eb4d4df919e434cdc1aa9c826 |
import numpy as np
from scipy import stats
from statarray import statdat
#a2a1 = np.loadtxt('a2a1_130707_2300.dat')
#a2a1 = np.concatenate( (a2a1, np.loadtxt('a2a1_130708_1223.dat')), axis=0 )
#a2a1 = np.loadtxt('a2a1_130708_1654.dat')
#a2a1 = np.loadtxt('a2a1_130709_0030.dat')
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import rc
rc('font',**{'family':'serif'})
# Data file
datfile = 'data001/a2a1_detuning_allelastic.dat'
# Values of nafm for which plots will be shown
nafms = [4,6,8,10,12,16,20,24,32,34,38,40]
cols = 2
rows = len(nafms)/2+len(nafms)%2
figure = plt.figure(figsize=(10.8,3.6*rows))
#figure.suptitle('Bragg')
gs = matplotlib.gridspec.GridSpec( rows,cols, wspace=0.6, hspace=0.42)
import fetchdata
from uncertainties import unumpy
for i,nafm in enumerate(nafms):
detuning = 6.44
a1, a2 = fetchdata.fetch_data_A1A2( {'afmsize':nafm, 'ai':0.}, 'det', datfile )
# Put the units in the cross section
sunits = 9 * (671e-7**2) / 16 / ( np.pi**2)
a1[:,1] = sunits*a1[:,1]
a1[:,2] = sunits*a1[:,2]
a2[:,1] = sunits*a2[:,1]
a2[:,2] = sunits*a2[:,2]
i % len(nafms)
ax = plt.subplot( gs[ i%rows, i/rows] )
ax.set_title('AFM = %d sites' % nafm)
a1s = unumpy.uarray( a1[:,1] , a1[:,2] )
a2s = unumpy.uarray( a2[:,1] , a2[:,2] )
a2a1 = a2s/ a1s
a2a1_mean = unumpy.nominal_values( a2a1 )
a2a1_std = unumpy.std_devs( a2a1)
#ax.errorbar( a1[:,0], a1[:,1], yerr=a1[:,2], \
# capsize=0., elinewidth = 1. ,\
# fmt='.', ecolor='red', mec='red', \
# mew=1., ms=5.,\
# marker='o', mfc='pink', \
# label="A1")
#ax.errorbar( a2[:,0], a2[:,1], yerr=a2[:,2], \
# capsize=0., elinewidth = 1. ,\
# fmt='.', ecolor='green', mec='green', \
# mew=1., ms=5.,\
# marker='o', mfc='limegreen', \
# label="A2")
#ax2 = ax.twinx()
ax.errorbar( a2[:,0], a2a1_mean , yerr=a2a1_std, \
capsize=0., elinewidth = 1. ,\
fmt='.', ecolor='blue', mec='blue', \
mew=1., ms=5.,\
marker='o', mfc='lightblue', \
label="A2/A1")
#ax2.set_ylabel('A2/A1')
ax.set_ylabel('A2/A1')
ax.grid()
ax.set_xlabel('Detuning from state 2 ($\Gamma$)')
#ax.set_ylabel('Cross section (cm$^{2}$)')
if nafm == 40:
ax.set_xlim(-10,10)
#plt.show()
figure.savefig('a2a1_detuning.png', dpi=140)
#pylab.clf()
|
9,172 | 6efe3975f4d5d9f431391b3560c37a3e89e27f3d | # (c) 2019, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.arista.eos.tests.unit.compat.mock import patch
from ansible_collections.arista.eos.plugins.modules import eos_lacp_interfaces
from ansible_collections.arista.eos.tests.unit.modules.utils import (
set_module_args,
)
from .eos_module import TestEosModule, load_fixture
class TestEosLacpInterfacesModule(TestEosModule):
module = eos_lacp_interfaces
def setUp(self):
super(TestEosLacpInterfacesModule, self).setUp()
self.mock_get_config = patch(
"ansible_collections.ansible.netcommon.plugins.module_utils.network.common.network.Config.get_config"
)
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
"ansible_collections.ansible.netcommon.plugins.module_utils.network.common.network.Config.load_config"
)
self.load_config = self.mock_load_config.start()
self.mock_get_resource_connection_config = patch(
"ansible_collections.ansible.netcommon.plugins.module_utils.network.common.cfg.base.get_resource_connection"
)
self.get_resource_connection_config = (
self.mock_get_resource_connection_config.start()
)
self.mock_get_resource_connection_facts = patch(
"ansible_collections.ansible.netcommon.plugins.module_utils.network.common.facts.facts.get_resource_connection"
)
self.get_resource_connection_facts = (
self.mock_get_resource_connection_facts.start()
)
self.mock_edit_config = patch(
"ansible_collections.arista.eos.plugins.module_utils.network.eos.providers.providers.CliProvider.edit_config"
)
self.edit_config = self.mock_edit_config.start()
self.mock_execute_show_command = patch(
"ansible_collections.arista.eos.plugins.module_utils.network.eos.facts.lacp_interfaces.lacp_interfaces.Lacp_interfacesFacts.get_device_data"
)
self.execute_show_command = self.mock_execute_show_command.start()
def tearDown(self):
super(TestEosLacpInterfacesModule, self).tearDown()
self.mock_get_resource_connection_config.stop()
self.mock_get_resource_connection_facts.stop()
self.mock_edit_config.stop()
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_execute_show_command.stop()
def load_fixtures(self, commands=None, transport="cli"):
def load_from_file(*args, **kwargs):
return load_fixture("eos_lacp_interfaces_config.cfg")
self.execute_show_command.side_effect = load_from_file
def test_eos_lacp_interfaces_default(self):
set_module_args(
dict(
config=[
dict(name="Ethernet1", port_priority=45, rate="normal")
]
)
)
commands = [
"interface Ethernet1",
"lacp port-priority 45",
"lacp rate normal",
]
self.execute_module(changed=True, commands=commands)
def test_eos_lacp_interfaces_default_idempotent(self):
set_module_args(dict(config=[dict(name="Ethernet2", rate="fast")]))
self.execute_module(changed=False, commands=[])
def test_eos_lacp_interfaces_merged(self):
set_module_args(
dict(
config=[
dict(name="Ethernet1", port_priority=45, rate="normal"),
dict(name="Ethernet2", rate="normal"),
],
state="merged",
)
)
commands = [
"interface Ethernet1",
"lacp port-priority 45",
"lacp rate normal",
"interface Ethernet2",
"lacp rate normal",
]
self.execute_module(changed=True, commands=commands)
def test_eos_lacp_interfaces_merged_idempotent(self):
set_module_args(
dict(config=[dict(name="Ethernet2", rate="fast")], state="merged")
)
self.execute_module(changed=False, commands=[])
# Bug #64453
# def test_eos_lacp_interfaces_replaced(self):
# set_module_args(dict(
# config=[dict(
# name="Ethernet1",
# port_priority=45,
# rate="normal"
# )], state="replaced"
# ))
# commands = ['interface Ethernet1', 'lacp port-priority 45', 'lacp rate normal']
# self.execute_module(changed=True, commands=commands)
def test_eos_lacp_interfaces_replaced_idempotent(self):
set_module_args(
dict(
config=[dict(name="Ethernet2", rate="fast")], state="replaced"
)
)
self.execute_module(changed=False, commands=[])
def test_eos_lacp_interfaces_overridden(self):
set_module_args(
dict(
config=[
dict(name="Ethernet1", port_priority=45, rate="normal")
],
state="overridden",
)
)
commands = [
"interface Ethernet1",
"lacp port-priority 45",
"lacp rate normal",
"interface Ethernet2",
"no lacp port-priority",
"no lacp rate",
]
self.execute_module(changed=True, commands=commands)
def test_eos_lacp_interfaces_overridden_idempotent(self):
set_module_args(
dict(
config=[
dict(name="Ethernet1", port_priority=30),
dict(name="Ethernet2", rate="fast"),
],
state="overridden",
)
)
self.execute_module(changed=False, commands=[])
def test_eos_lacp_interfaces_deleted(self):
set_module_args(dict(config=[dict(name="Ethernet2")], state="deleted"))
commands = ["interface Ethernet2", "no lacp rate"]
self.execute_module(changed=True, commands=commands)
|
9,173 | 0aa95b6a72472e8e260c07f4c42a327384ca0da4 | from Psql_Database_Setup import *
import requests, json
engine = create_engine('postgresql://myuser:mypass@localhost:5432/mydb')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
response = requests.get("https://api.github.com/emojis")
response = json.loads(response.text)
for key,value in response.items():
Emoji = Emojis(name=key, url = value)
session.add(Emoji)
session.commit() |
9,174 | f51a21ed71ede4e7462d9c77cb932a5f05b09e71 | # import core modules and community packages
import sys, math, random
import pygame
# import configuration settings
from src.config import *
from src.board.levels import LEVEL_1
# import game elements
from src.pucman import Pucman
from src.ghast import Ghast
from src.board.board import Board
class Session():
def __init__(self, MODE="PLAYING"):
# init all game props
pygame.init()
# initialize game elements
board = Board(
size=BOARD_SIZE,
color=COLOR['BACKGROUND'],
level=LEVEL_1
)
pucman = Pucman(
start=board.findUniquePos(BOARD_ELEMENT_MAP['PUCMAN_START']),
size=board.tileSize,
color=COLOR['PUCMAN'],
MODE=MODE
)
ghasts = {
"blinky": Ghast(
name="Blinky",
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize,
color=COLOR['BLINKY']
),
"pinky": Ghast(
name="Pinky",
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize,
color=COLOR['PINKY']
),
"inky": Ghast(
name="Inky",
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize,
color=COLOR['INKY']
),
"clyde": Ghast(
name="Clyde",
start=board.findUniquePos(BOARD_ELEMENT_MAP['GHAST_SPAWN']),
size=board.tileSize,
color=COLOR['CLYDE']
)
}
self.board = board
self.pucman = pucman
self.ghasts = ghasts
self.clock = pygame.time.Clock()
self.MODE = MODE
def start(self):
# draw background & begin session
self.board.draw()
session = True
# while playing
while session:
# manage game time, 5 ticks per second
self.clock.tick(TICK_RATE[self.MODE])
# pygame.time.delay(50)
# update player state
self.pucman.move(self.board)
# Ghast-AI behavior
for ghast in self.ghasts:
sprite = self.ghasts[ghast]
sprite.move(self.pucman.pos, self.board)
if(sprite.atPucman(self.pucman.pos)):
session = False
print("You died to " + sprite.name)
# begin drawing back to front
self.board.draw()
self.pucman.draw(self.board)
for ghast in self.ghasts:
self.ghasts[ghast].draw(self.board._)
# update board
pygame.display.update()
|
9,175 | 33b68246dd3da9561c1d4adb5a3403cba656dcee | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^stats/$', views.get_stats, name='stats'),
url(r'^follow/me/$', views.follow_me, name='follow_me'),
url(r'^follower/confirm/$', views.confirm_follower, name='follower_confirm'),
url(r'^execute/', views.execute, name='executed'),
url(r'^output/', views.update_output, name='output'),
url(r'^lead/', views.lead_nodes, name='lead'),
]
|
9,176 | 5bfaadcd54aaf239d0d89158bfb723c0174c56b1 | import sys
from elftools.elf.elffile import ELFFile
from capstone import *
def process_file(filename):
with open(filename, 'rb') as f:
elffile = ELFFile(f)
code = elffile.get_section_by_name('.text')
rodata = elffile.get_section_by_name('.rodata')
plt = elffile.get_section_by_name('.plt')
data = elffile.get_section_by_name('.data')
bss = elffile.get_section_by_name('.bss')
opcodes = code.data()
addr = code['sh_addr']
#print "Entry point: {0}".format(hex(elffile.header['e_entry']))
md = Cs(CS_ARCH_X86, CS_MODE_64)
for i in md.disasm(opcodes, addr):
print "0x%x:\t%s\t%s\t" %(i.address, i.mnemonic, i.op_str)
print "\n\nrodata:\n"
print rodata.data()
print "\n\nplt\n"
print plt.data()
print "\n\ndata\n"
print data.data()
print "\n\nbss\n"
print bss.data()
if __name__ == '__main__':
if len(sys.argv) == 2:
process_file(sys.argv[1])
|
9,177 | 2985360c1e2d03c619ea2994c609fdf8c033bebd | #!/usr/bin/env python
import rospy
import numpy as np
import time
import RPi.GPIO as GPIO
from ccn_raspicar_ros.msg import RaspiCarWheel
from ccn_raspicar_ros.msg import RaspiCarWheelControl
from ccn_raspicar_ros.srv import RaspiCarMotorControl
class MotorControl(object):
def __init__(self, control_pin=[16, 18, 11, 13], t=0.1, dc_level=80, balance=1.0, pwm_freq=500):
self.control_pin = control_pin
self.t = t
self.balance = balance
self.l_level = dc_level * 2 / (balance + 1)
self.r_level = self.l_level * balance
GPIO.setmode(GPIO.BOARD)
[GPIO.setup(control_pin[pin], GPIO.OUT, initial=GPIO.LOW) for pin in range(4)]
self.pwm_r1 = GPIO.PWM(control_pin[0], pwm_freq)
self.pwm_r2 = GPIO.PWM(control_pin[1], pwm_freq)
self.pwm_l1 = GPIO.PWM(control_pin[2], pwm_freq)
self.pwm_l2 = GPIO.PWM(control_pin[3], pwm_freq)
self.pwm_r1.start(0)
self.pwm_r2.start(0)
self.pwm_l1.start(0)
self.pwm_l2.start(0)
def stop(self):
self.pwm_r1.ChangeDutyCycle(0)
self.pwm_r2.ChangeDutyCycle(0)
self.pwm_l1.ChangeDutyCycle(0)
self.pwm_l2.ChangeDutyCycle(0)
def forward(self, speed=1.0, t=None):
self.pwm_r1.ChangeDutyCycle(self.r_level*speed)
self.pwm_r2.ChangeDutyCycle(0)
self.pwm_l1.ChangeDutyCycle(self.l_level*speed)
self.pwm_l2.ChangeDutyCycle(0)
if t is None:
time.sleep(self.t)
else:
time.sleep(t)
self.stop()
def backward(self, speed=0.8, t=None):
self.pwm_r1.ChangeDutyCycle(0)
self.pwm_r2.ChangeDutyCycle(self.r_level*speed)
self.pwm_l1.ChangeDutyCycle(0)
self.pwm_l2.ChangeDutyCycle(self.l_level*speed)
if t is None:
time.sleep(self.t)
else:
time.sleep(t)
self.stop()
def turn_left(self, speed=0.6, t=None):
self.pwm_r1.ChangeDutyCycle(self.r_level*speed)
self.pwm_r2.ChangeDutyCycle(0)
self.pwm_l1.ChangeDutyCycle(0)
self.pwm_l2.ChangeDutyCycle(0)
if t is None:
time.sleep(self.t)
else:
time.sleep(t)
self.stop()
def turn_right(self, speed=0.6, t=None):
self.pwm_r1.ChangeDutyCycle(0)
self.pwm_r2.ChangeDutyCycle(0)
self.pwm_l1.ChangeDutyCycle(self.l_level*speed)
self.pwm_l2.ChangeDutyCycle(0)
if t is None:
time.sleep(self.t)
else:
time.sleep(t)
self.stop()
def arbitrary_speed(self, speed=[1.0, 1.0], t=None):
if 0 < speed[0]:
self.pwm_r1.ChangeDutyCycle(self.r_level * speed[0])
self.pwm_r2.ChangeDutyCycle(0)
elif speed[0] < 0:
self.pwm_r1.ChangeDutyCycle(0)
self.pwm_r2.ChangeDutyCycle(self.r_level * speed[0])
if 0 < speed[1]:
self.pwm_l1.ChangeDutyCycle(self.l_level * speed[1])
self.pwm_l2.ChangeDutyCycle(0)
elif speed[1] < 0:
self.pwm_l1.ChangeDutyCycle(0)
self.pwm_l2.ChangeDutyCycle(self.l_level * speed[1])
if t is None:
return
else:
time.sleep(t)
self.stop()
def cleanup(self):
self.stop()
self.pwm_r1.stop()
self.pwm_r2.stop()
self.pwm_l1.stop()
self.pwm_l2.stop()
GPIO.cleanup()
g_obstacle_detected = False
g_proximity = np.zeros([3])
g_wheel_count = np.zeros([2])
def turn_right_controlled(angle):
wheel_last = g_wheel_count
count = angle / 4.45
while not rospy.is_shutdown():
if not g_obstacle_detected:
if g_wheel_count[0] - wheel_last[0] < count:
motor.turn_right(speed=0.9, t=0.05)
elif g_wheel_count[0] - wheel_last[0] > count:
motor.turn_left(speed=0.8, t=0.03)
break
else:
break
time.sleep(0.05)
else:
time.sleep(0.1)
def turn_left_controlled(angle):
wheel_last = g_wheel_count
count = angle / 4.45
while not rospy.is_shutdown():
if not g_obstacle_detected:
if g_wheel_count[1] - wheel_last[1] < count:
motor.turn_left(speed=0.9, t=0.05)
elif g_wheel_count[1] - wheel_last[1] > count:
motor.turn_right(speed=0.8, t=0.03)
break
else:
break
time.sleep(0.05)
else:
time.sleep(0.1)
def forward_controlled(distance):
wheel_last = g_wheel_count
count = distance / 0.0113
while not rospy.is_shutdown():
if not g_obstacle_detected:
diff_of_both = g_wheel_count - wheel_last
if np.sum(diff_of_both)/2.0 < count:
motor.forward(speed=1.0, t=0.05)
else:
break
diff_between = diff_of_both[0] - diff_of_both[1]
if diff_between > 0:
motor.turn_left(speed=0.7, t=0.03 + diff_between * 0.005)
elif diff_between < 0:
motor.turn_right(speed=0.7, t=0.03 - diff_between * 0.005)
time.sleep(0.05)
else:
time.sleep(0.1)
def callback_RaspiCarWheel(data):
global g_wheel_count
g_wheel_count = np.array(data.wheel_count)
def handle_RaspiCarMotorControl_request(request):
print(request)
command = request.command
if command.startswith('test'):
return 'ack test', 'ok.'
elif command.startswith('fwd'):
try:
value = float(command.split(':')[1])
except KeyError:
value = 0.1
except ValueError:
value = 0
forward_controlled(value)
return 'ack fwd:%f' % value, 'ok.'
elif command.startswith('right'):
try:
value = float(command.split(':')[1])
except KeyError:
value = 10
except ValueError:
value = 0
turn_right_controlled(value)
return 'ack right:%f' % value, 'ok.'
elif command.startswith('left'):
try:
value = float(command.split(':')[1])
except KeyError:
value = 10
except ValueError:
value = 0
turn_left_controlled(value)
return 'ack left:%f' % value, 'ok.'
# elif data.startswith('obstacle'):
# global obstacle_detection_routine_stopper
# try:
# value = float(data.split(':')[1])
# except KeyError:
# if obstacle_detection_routine_stopper is None:
# value = 1
# else:
# value = 0
# except ValueError:
# value = 0
#
# if value > 0.0 and obstacle_detection_routine_stopper is None:
# obstacle_detection_routine_stopper = launch_obstacle_detection_routine()
# elif value == 0.0 and obstacle_detection_routine_stopper is not None:
# obstacle_detection_routine_stopper.set()
# obstacle_detection_routine_stopper = None
#
# connection.sendall(b'ack')
# rospy.loginfo('[tcp_server] sending ack to the client.')
else:
return 'error', 'ok.'
if __name__ == '__main__':
motor = MotorControl(dc_level=70, t=0.3)
rospy.loginfo('[motor_control] up and running...')
try:
rospy.init_node('RaspiCarMotorControl_node', anonymous=False)
rospy.Subscriber('RaspiCarWheel', RaspiCarWheel, callback_RaspiCarWheel)
s = rospy.Service('RaspiCarMotorControl', RaspiCarMotorControl, handle_RaspiCarMotorControl_request)
rospy.spin()
except rospy.ROSInterruptException as e:
rospy.loginfo(e)
finally:
motor.cleanup()
|
9,178 | 2bc9c0711831d9ed9009d0f9600153709bbcd6da | '''
Created on Sep 4, 2014
@author: Jay <smile665@gmail.com>
'''
import socket
def ip_validation(ip):
'''
check if the ip address is in a valid format.
'''
try:
socket.inet_aton(ip)
return True
except socket.error:
return False
def connection_validation(ip, port):
'''
check if the ip:port can be connected using socket.
@param port: the port is an integer.
'''
if not ip_validation(ip):
return False
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2)
result = sock.connect_ex((ip, port))
if result == 0:
return True
else:
return False
if __name__ == '__main__':
ip = '192.168.213.11'
port = 90
print ip_validation(ip)
print connection_validation(ip, port)
|
9,179 | fc8b9029955de6b11cbfe8e24107c687f49685c1 | from rest_framework import serializers
from .models import Good, Favorite, Comment
class GoodSerializer(serializers.ModelSerializer):
class Meta:
model = Good
fields = ('user', 'article', 'created_at')
class FavoriteSerializer(serializers.ModelSerializer):
class Meta:
model = Favorite
fields = ('user', 'article', 'created_at')
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('text', 'image', 'user', 'article', 'created_at')
|
9,180 | e0fd9663a5635873f4ffc0f73aff5106c0933781 | from django import forms
from .models import Note
class NoteForm(forms.ModelForm):
class Meta:
model = Note
fields = ['title','text']
class NoteFullForm(NoteForm):
note_id = forms.IntegerField(required=False)
images = forms.FileField(widget=forms.ClearableFileInput(attrs={'multiple': True}),required=False)
tags = forms.CharField(max_length=50, required=False)
class Meta(NoteForm.Meta):
fields = NoteForm.Meta.fields + ['images','tags','note_id'] |
9,181 | a33ddb999f7bb50688b33946046ba460cbbbd172 | from backend.personal.models import User, UserState
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from backend.personal.views import produceRetCode, authenticated
from backend.utils.fetch.fetch import fetch_curriculum
from backend.univinfo.models import Professor, Section, Course
from backend.univinfo.serializers import CourseSerializer
from backend.curriculum.models import CourseItem, Review
from backend.curriculum.serializers import CourseItemSerializer, ReviewSerializer
import datetime
_data_processor = {}
from backend.utils.process import _UCB
from backend.utils.process import _PU
_data_processor['UCB'] = _UCB
_data_processor['PU'] = _PU
@api_view(['POST'])
@authenticated
def fetchCurriculum(request):
university = request.DATA['user'].university.shortname
if university == 'Unknown':
ret = produceRetCode('fail', 'university not supported')
return Response(ret, status=status.HTTP_202_ACCEPTED)
try:
eas_id = request.DATA['eas_id']
eas_pwd = request.DATA['eas_pwd']
except KeyError:
ret = produceRetCode('fail', 'eas id and eas pwd required')
return Response(ret, status=status.HTTP_202_ACCEPTED)
try:
semester = request.DATA['semester']
except KeyError:
ret = produceRetCode('fail', 'semester required')
return Response(ret, status=status.HTTP_202_ACCEPTED)
fetched = fetch_curriculum(university, eas_id, eas_pwd, semester)
#import pickle
#with open('data.pickle', 'rb') as f:
# fetched = pickle.load(f)
if fetched['status'] == 'success':
ret = _data_processor[university].process(fetched['raw-data'], semester, request.DATA['user'])
return Response(ret, status=status.HTTP_200_OK)
else:
ret = produceRetCode('fail', fetched['message'])
return Response(ret, status=status.HTTP_202_ACCEPTED)
@api_view(['POST'])
@authenticated
def getCourseList(request):
courses = CourseItem.objects.filter(user=request.DATA['user'].id).filter(section__start__lte=datetime.datetime.now()).filter(section__end__gte=datetime.datetime.now())
serializer = CourseItemSerializer(courses, many=True)
ret = produceRetCode('success', '', serializer.data)
return Response(ret, status=status.HTTP_200_OK)
def authreview(method):
def wrapper(request):
try:
rid = request.DATA['rid']
except KeyError:
ret = produceRetCode('fail', 'rid required')
return Response(ret, status=status.HTTP_202_ACCEPTED)
try:
review = Review.objects.get(id=rid)
except Review.DoesNotExist:
ret = produceRetCode('fail', 'review does not exist')
return Response(ret, status=status.HTTP_202_ACCEPTED)
if review.user == request.DATA['user'].id:
request.DATA['review'] = review
else:
ret = produceRetCode('fail', 'permission denied')
return Response(ret, status=status.HTTP_202_ACCEPTED)
return method(request)
return wrapper
@api_view(['POST'])
@authenticated
def setReview(request):
request.DATA['user'] = request.DATA['user'].id
serializer = ReviewSerializer(data=request.DATA)
try:
is_course = request.DATA['is_course']
except KeyError:
ret = produceRetCode('fail', 'is_course flag required')
return Response(ret, status=status.HTTP_202_ACCEPTED)
if is_course:
try:
section = request.DATA['section']
except KeyError:
ret = produceRetCode('fail', 'section id required')
return Response(ret, status=status.HTTP_202_ACCEPTED)
try:
section = Section.objects.get(id=section)
except Section.DoesNotExist:
ret = produceRetCode('fail', 'section does not exist')
return Response(ret, status=status.HTTP_202_ACCEPTED)
try:
review = Review.objects.get(user=request.DATA['user'], section=section.id)
except Review.DoesNotExist:
serializer = ReviewSerializer(data=request.DATA)
if serializer.is_valid():
serializer.save()
try:
section.rate = (section.rate * section.ratecount + request.DATA['rate']) / (section.ratecount + 1)
section.ratecount = section.ratecount + 1
section.save()
except Exception:
ret = produceRetCode('fail', 'computing error')
return Response(ret, status=status.HTTP_202_ACCEPTED)
else:
ret = produceRetCode('fail', 'add review data format error')
return Response(ret, status=status.HTTP_202_ACCEPTED)
serializer = ReviewSerializer(review, data=request.DATA)
if serializer.is_valid():
serializer.save()
try:
section.rate = (section.rate * section.ratecount - review.rate + request.DATA['rate']) / section.ratecount
section.save()
except Exception:
ret = produceRetCode('fail', 'rate computing error')
return Response(ret, status=status.HTTP_202_ACCEPTED)
else:
ret = produceRetCode('fail', 'change review data format error')
return Response(ret, status=status.HTTP_202_ACCEPTED)
else:
try:
professor = request.DATA['professor']
except KeyError:
ret = produceRetCode('fail', 'professor id required')
return Response(ret, status=status.HTTP_202_ACCEPTED)
try:
professor = Professor.objects.get(id=professor)
except Professor.DoesNotExist:
ret = produceRetCode('fail', 'professor does not exist')
return Response(ret, status=status.HTTP_202_ACCEPTED)
try:
review = Review.objects.get(user=request.DATA['user'], professor=professor.id)
except Review.DoesNotExist:
serializer = ReviewSerializer(data=request.DATA)
if serializer.is_valid():
serializer.save()
try:
professor.rate = (professor.rate * professor.ratecount + request.DATA['rate']) / (professor.ratecount + 1)
professor.ratecount = professor.ratecount + 1
professor.save()
except Exception:
ret = produceRetCode('fail', 'rate computing error')
return Response(ret, status=status.HTTP_202_ACCEPTED)
else:
ret = produceRetCode('fail', 'review data format error')
return Response(ret, status=status.HTTP_202_ACCEPTED)
serializer = ReviewSerializer(review, data=request.DATA)
if serializer.is_valid():
serializer.save()
try:
professor.rate = (professor.rate * professor.ratecount - review.rate + request.DATA['rate']) / professor.ratecount
professor.save()
except Exception:
ret = produceRetCode('fail', 'rate computing error')
return Response(ret, status=status.HTTP_202_ACCEPTED)
else:
ret = produceRetCode('fail', 'review data format error')
return Response(ret, status=status.HTTP_202_ACCEPTED)
@api_view(['POST'])
@authenticated
@authreview
def getReview(request):
serializer = ReviewSerializer(data)
ret = produceRetCode('success', '', serializer.data)
return Response(ret, status=status.HTTP_202_ACCEPTED)
@api_view(['POST'])
@authenticated
@authreview
def alterReview(request):
serializer = ReviewSerializer(review, data=request.DATA)
if serializer.is_valid():
serializer.save()
ret = produceRetCode('success')
return Response(ret, status=status.HTTP_200_OK)
else:
ret = produceRetCode('fail', 'review data format error')
return Response(ret, status=status.HTTP_202_ACCEPTED)
@api_view(['POST'])
@authenticated
@authreview
def deleteReview(request):
request.DATA['review'].delete()
ret = produceRetCode('success')
return Response(ret, status=status.HTTP_200_OK)
|
9,182 | 79e4e37fc17462508abf259e3a7861bd76797280 |
import unittest
import BasicVmLifecycleTestBase
class testVmIsAccessibleViaSsh(BasicVmLifecycleTestBase.VmIsAccessibleViaSshTestBase):
vmName = 'cernvm'
timeout = 20*60
sshTimeout = 5*60
def suite():
return unittest.TestLoader().loadTestsFromTestCase(testVmIsAccessibleViaSsh)
|
9,183 | 4b773fbf45d15dff27dc7bd51d6636c5f783477b |
from pyspark import SparkContext, SparkConf
import time
# Create a basic configuration
conf = SparkConf().setAppName("myTestCopyApp")
# Create a SparkContext using the configuration
sc = SparkContext(conf=conf)
print("START")
time.sleep(30)
print("END")
|
9,184 | a61bc654eecb4e44dce3e62df752f80559a2d055 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Vincent Celis
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import webapp2
import handlers
# A list containing webapp2.Route instances to define the routing tables
ROUTE_LIST = [
webapp2.Route(r'/api/history<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.HistoryApi, name='historyApi'),
webapp2.Route(r'/api<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.PageApi, name='pageApi'),
webapp2.Route(r'/signup', handler=handlers.SignupPage, name='signup'),
webapp2.Route(r'/login', handler=handlers.LoginPage, name='login'),
webapp2.Route(r'/logout', handler=handlers.LogoutPage, name='logout'),
webapp2.Route(r'/search', handler=handlers.SearchPage, name='search'),
webapp2.Route(r'/_edit<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.EditPage, name='edit'),
webapp2.Route(r'/_history<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.HistoryPage, name='history'),
webapp2.Route(r'<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.WikiPage, name='wiki')
] |
9,185 | 31e5b249516f4e9d57d8fd82713966a69e0516b4 | from django.urls import path
from .consumers import NotificationsConsumer
websocket_urlpatterns = [
path('ws/notifications', NotificationsConsumer),
]
|
9,186 | 5dc6b54357df87077d8159192cd52697b2616db8 | from django.test import TestCase, SimpleTestCase
from django.urls import reverse, resolve
from .views import profile, order_history
""" Url Testing """
class TestUrls(SimpleTestCase):
def test_profile_resolves(self):
url = reverse('profile')
self.assertEqual(resolve(url).func, profile)
def test_order_history_resolves(self):
url = reverse('order_history', args='1')
self.assertEqual(resolve(url).func, order_history)
|
9,187 | ee10bca1126b20378c4e9cea4d2dc7ed6a2044ab | from flask import Blueprint, render_template
from bashtube import cache
singlevideos = Blueprint('singlevideos',__name__,template_folder='templates')
@singlevideos.route('/')
def index():
return render_template('singlevideos/single.html')
|
9,188 | 178570047458eb3eeda00f9153ef2159eb4cbef3 | from svjesus.ffz import genContent
from svjesus.elements.Base import Element
class Descriptive(Element):
def __init__(self):
self.allowedChildren = () # TODO: Check what's allowed
# Descriptive elements
class Desc(Descriptive):
name = "desc"
attrs = ()
class Metadata(Descriptive):
name = "metadata"
attrs = ()
class Title(Descriptive):
name = "title"
attrs = () |
9,189 | 8cbe78863de535a5b83eacebe67402569b4015fa | A,B=map(str,input().split())
if(A>B):
print(A)
elif(B>A):
print(B)
else:
print(AorB)
|
9,190 | 7c2897dcb732e75d7328e8c0484d5bd7f3b56e6f | """
Given a string s. Return all the words vertically in the same
order in which they appear in s.
Words are returned as a list of strings, complete with spaces
when is necessary. (Trailing spaces are not allowed).
Each word would be put on only one column and that in one column
there will be only one word.
Example 1:
Input: s = "HOW ARE YOU"
Output: ["HAY","ORO","WEU"]
Explanation: Each word is printed vertically.
"HAY"
"ORO"
"WEU"
Example 2:
Input: s = "TO BE OR NOT TO BE"
Output: ["TBONTB","OEROOE"," T"]
Explanation: Trailing spaces is not allowed.
"TBONTB"
"OEROOE"
" T"
"""
"""converting string to list of words"""
def StringToList(input_string):
word_list=[]
word=""
for i in range(0,len(input_string)):
if input_string[i]==" ":
word_list.append(word)
word=""
elif i==len(input_string)-1:
word=word+input_string[i]
word_list.append(word)
word=""
else:
word=word+input_string[i]
return word_list
"""find length of longest word"""
def LongestWord(word_list):
length=0
for i in word_list:
temp=len(i)
if temp>length:
length=temp
return length
"""converting list to word to vertical list"""
def return_vertically(input_string):
word_list=StringToList(input_string)
longest_word = LongestWord(word_list)
print(longest_word)
print(word_list)
vertical_list=[]
"""initializing empty list"""
for i in range(0,longest_word):
vertical_list.append("")
for word in word_list:
for i in range(0,longest_word):
if i<len(word):
vertical_list[i]=vertical_list[i]+word[i]
else:
vertical_list[i]=vertical_list[i]+" "
"""deleting trailing spaces"""
for i in range(0, len(vertical_list)):
vertical_list[i]=vertical_list[i].rstrip()
return vertical_list
if __name__ == "__main__":
input_string = "TO BE OR NOT TO BE"
print(return_vertically(input_string))
|
9,191 | e85d3660968410b83b14ba610150c0c8cc880119 | import datetime # to add timestamps on every block in blockchain
import hashlib # library that is ued to hash the block
import json # to communicate in json data
# Flask to implement webservices jsonify to see the jsop message/response
# request help us to connect all the nodes of the blockchain together froming the p2p network
from flask import Flask, jsonify, request
# it will help us to verify that all the blockchain have same blockhain or not http requests (used in replace_cahin)
import requests
from uuid import uuid4
from urllib.parse import urlparse
# Building a Blockchain
class Blockchain:
def __init__(self):
self.chain = [] # our main block chain
# now we will create the list of transation which will record the all transactions
self.transactions = []
# create_block used to create the block in blockchain so it is executed only when the block is mined(meaning it has winnnig proof_of_work=proof) proof=0 and previous_hash='0' for the genesis block
self.create_block(proof=0, previous_hash='0')
# nodes will contains the unique identifier of the address of all nodes in p2p network
self.nodes = set() # we have taken set() instead of list because we know that address are randomly generated by uuid4 to avoid duplicacy in it
# part1
def create_block(self, proof, previous_hash):
block = { # dictionary of python data structure
'index': len(self.chain)+1,
'timestamp': str(datetime.datetime.now()),
'proof': proof, # works like a nounce of block stops when we reach at or below the target
'previous_hash': previous_hash,
'transactions': self.transactions}
self.transactions = [] # this need to be done bcoz we cant have duplicates lists of transactions in the further blocks so empty the transation that had been added in the block
self.chain.append(block)
return block
def get_previous_block(self):
return self.chain[-1]
def proof_of_work(self, previous_proof):
new_proof = 1
check_proof = False
while check_proof is False:
hash_operation = hashlib.sha256(
str(new_proof**2-previous_proof**2).encode()).hexdigest()
if hash_operation[:4] == '0000':
check_proof = True
else:
new_proof += 1
return new_proof # it is just a no. corresponding to the game solved by person is having a hash with trailing 4 zeroe's
# hash of a block is created after generating block thats we have only use previous_hash because its already created
def hash(self, block):
encoded_block = json.dumps(block, sort_keys=True).encode()
return hashlib.sha256(encoded_block).hexdigest()
def is_chain_valid(self, chain):
# reference of first block stored genesis block
previous_block = chain[0]
block_index = 1 # required for iteration
while block_index < len(chain):
block = chain[block_index] # cuurent block
# checking weather the refernce stored in property previus_hash is currently matched or not with the hash of previous block using hash function
if block['previous_hash'] != self.hash(previous_block):
return False
previous_proof = previous_block['proof']
proof = block['proof']
# verfying the proof of block with the data proof and previous proof it is easy then creating the proof
hash_operation = hashlib.sha256(
str(proof**2 - previous_proof**2).encode()).hexdigest()
# the more is zero's the more is harder to mine the block
if hash_operation[:4] != '0000':
return False
previous_block = block
block_index += 1
return True
# functions used to get add the transactions to the lists
def add_transaction(self, senders, receiver, amount):
self.transactions.append({
'senders': senders,
'receiver': receiver,
'amount': amount
})
previous_block = self.get_previous_block()
# +1 beacause before mining the transaction are added so new_block index will be +1 then previous
return previous_block['index']+1
# part-1 ends
# part-3--> dealing with decentarlized application and transactions
# this function allow us to add different nodes to chain
def add_node(self, address): # generating the decentarlized application
# we need to parse the url before adding it
parsed_url = urlparse(address)
# .netloc gives us the unique identifier of the node address removing the unrequired part from it
self.nodes.add(parsed_url.netloc)
# this function help us to solve the problem of consensus protocols (competing chain)
def replace_chain(self):
# this variable help us to find the length of longest chain among different network
max_length = len(self.chain)
longest_chain = None
network = self.nodes # this variable will hold the address of all the nodes in network
for node in network:
# we know the nodes array will hold only the netlock value in nodes so we are going to use taht and make a request to that node check its length
# using the requests library we make a requests to that node address ([f'http://{node}/get_chain'] --> [f'http://127.0.0.5000/get_chain')]
response = requests.get(f'http://{node}/get_chain')
if response.status_code == 200: # this ids the vode chaeck something is received in request
length = response.json()['length']
chain = response.json()['chain']
if length > max_length and self.is_chain_valid(chain):
max_length = length
longest_chain = chain
# this will happen in every node of network
if longest_chain:
# if this chain is shorter than otherit will be updated
self.chain = longest_chain
return True
# if this chain is only longest in network than return false and no update
return False
# part-3 ends
# Mining our Blockchain
app = Flask(__name__)
# Creating a Blockchain
# creating the instance of blockchain
blockchain = Blockchain()
# Mining the blockchain
# create an random and unique address for the node on port 5000
# this is the address used by to send the whale coin when the miner mines the wahle coin
node_address = str(uuid4()).replace('-', '')
# part-2
@app.route('/mine_block', methods=['GET'])
def mine_block():
previous_block = blockchain.get_previous_block()
previous_proof = previous_block['proof']
proof = blockchain.proof_of_work(previous_proof)
previous_hash = blockchain.hash(previous_block)
# miners price
# usually the reciever public address is created when user generate the wallet and mining pool send the coin after mining the block to miner address present in the bat file which is edited after downloading the software
blockchain.add_transaction(node_address, 'Bhavjot', 1)
# when created blockchain is called all the transactions performed will be inserted inside the current created block and when appended in transactions it will be again change to [] empty to avoid the duplicacy
block = blockchain.create_block(proof, previous_hash)
response = {'message': 'Congratulations, you just mined a block! 😈😈😈😈😈🤓🤓🤓', # response is a json data
'index': block['index'],
'timestamp': block['timestamp'],
'proof': block['proof'],
'previous_hash': block['previous_hash'],
'transactions': block['transactions']}
return jsonify(response), 200
# getting all blocks in chain
@app.route('/get_chain', methods=['GET'])
def get_chain():
response = {
'chain': blockchain.chain,
'length': len(blockchain.chain)
}
return jsonify(response), 200
# custom message
@app.route('/', methods=['GET'])
def custom_message():
response = {
'message': 'Congratulations you are on Whalecoin 🐳🐳🐳🐳🐳🐳'
}
return jsonify(response), 200
# part-2 ends
# creating the transactions
@app.route('/add_transactions', methods=['POST'])
def add_transaction():
# this will help us to extract te post request made in postman like req.params.name in express
json = request.get_json()
# this will hep us to check that all the parameters are present or not for adding the transactions
transaction_keys = ['sender', 'receiver', 'amount']
if not all(key in json for key in transaction_keys):
return 'Some elements of the transaction are missing', 400
index = blockchain.add_transaction(
json['sender'], json['receiver'], json['amount'])
# when the block is mined all the transations in lists is added to block
response = {'message': f'This transaction will be added to Block {index}'}
return jsonify(response), 201
@app.route('/connect_node', methods=['POST'])
def connect_node():
json = request.get_json() # we will get request message send from postman
# {'nodes':['http://127.0.0.1:5000','http://127.0.0.1:5001','http://127.0.0.1:5003',...]} when adding nodes using add_nodes 127.0.0.1:5001 it will be extracted using netloc
nodes = json.get('nodes')
if nodes is None:
return "No node", 400
for node in nodes:
blockchain.add_node(node) # add our nodes to network
response = {'message': 'All the nodes are now connected. The Whalecoin 🐳🐳🐳🐳🐳🐳 Blockchain now contains the following nodes:',
'total_nodes': list(blockchain.nodes)}
return jsonify(response), 201
# Replacing the chain by the longest chain if needed
# this function will present in every node of blockchain and always checked so that the node remain upadatesd with other blockchains by hitiing replace_chain URL
@ app.route('/replace_chain', methods=['GET'])
def replace_chain():
# using the above defined function in class
is_chain_replaced = blockchain.replace_chain()
if is_chain_replaced: # means the current blockchain was the shortest one and it is replaced
response = {'message': 'The nodes had different chains so the chain was replaced by the longest one.',
'new_chain': blockchain.chain}
else: # means the current blockchain was not the shortest one and it is not replaced
response = {'message': 'All good. The chain is the largest one.',
'actual_chain': blockchain.chain}
return jsonify(response), 200
# Running the app
# host= '0.0.0.0' specifies that it is available publicily
app.run(host='0.0.0.0', port=5001)
|
9,192 | d9b405d5159a153fb8d2f1991ceb3dc47f98bcbc | from app.View.view import View
class GameController:
instance = None
@staticmethod
def get_instance():
if GameController.instance is None:
GameController()
return GameController.instance
def __init__(self):
if GameController.instance is not None:
raise Exception('this is a singleton!')
else:
GameController.instance = self
def start_game(self):
View.start_game_view()
|
9,193 | 845d1251497df61dd2c23241016a049c695ad940 | #!/usr/bin/env python3
#coding=utf-8
import sys
import os
import tool
class BrandRegBasic(object):
def __init__(self, base_folder, log_instance):
if not os.path.exists(base_folder):
raise Exception("%s does not exists!" % base_folder)
self._real_brand_p = base_folder + "/real_brand.txt"
if not os.path.exists(self._real_brand_p):
raise Exception("%s does not exists!" % self._real_brand_p)
# 注:word_dict.txt和error.txt是一样的功能
# 都是品牌改写,数据格式也一样
self._error_p = base_folder + '/error.txt'
if not os.path.exists(self._error_p):
raise Exception("%s does not exists!" % self._error_p)
self._word_dict_p = base_folder + '/word_dict.txt'
if not os.path.exists(self._word_dict_p):
raise Exception("%s does not exists!" % self._word_dict_p)
self._del_brand_p = base_folder + '/del_brand.txt'
if not os.path.exists(self._del_brand_p):
raise Exception("%s does not exists!" % self._del_brand_p)
self.logger = log_instance
self.logger.info("get_real_brand")
self.real_brand_set = self._get_real_brand()
self.logger.info("get_exchange_brand_pair")
self.exchange_brand_pair = self._get_exchange_brand_pair()
self.logger.info("get_del_brand")
self.del_brand_dict = self._get_del_brand()
#通过真实品牌这个文件获取到真实品牌的元组
def _get_real_brand(self):
# 根据real_brand进行品牌确定
if not os.path.exists(self._real_brand_p):
raise Exception("%s does not exist!" % self._real_brand_p)
real_brand_set = set()
with open(self._real_brand_p) as f1:
for line in f1:
line = line.strip()
if line == "": continue
real_brand_set.add(line)
self.logger.info("len of real_brand: %s" % len(real_brand_set))
return real_brand_set
# no-using
def _brand_pair_correction(self, exchange_dict, conflict_brand_set):
# Tips: {1:2, 2:3, 3:4}这种情况会有错误
tmp_dict = {}
for k, v in exchange_dict.items():
if k in conflict_brand_set:
right_brand = exchange_dict[k]
for k1, v1 in exchange_dict.items():
if v1 == k:
tmp_dict[k1] = right_brand
exchange_dict_ext = {}
for k2, v2 in exchange_dict.items():
if k2 == v2: continue
if k2 in conflict_brand_set: continue
if k2 in tmp_dict:
exchange_dict_ext[k2] = tmp_dict[k2]
else:
exchange_dict_ext[k2] = v2
return exchange_dict_ext
def _brand_pair_checking(self, exchange_dict):
s1 = set(list(exchange_dict.keys()))
s2 = set(list(exchange_dict.values()))
s3 = s1 & s2
if len(s3) > 0:
self.logger.error("exchang-brand-pair has error, error brands is: %s" % "\t".join(list(s3)))
return False, s3
else:
return True, None
def _get_exchange_brand_pair(self):
exchange_dict = {}
def _line_deal(line):
line = line.strip()
if line == "": return
lst1 = line.split("|")
if len(lst1) != 2:
self.logger.info("wrong brand pair: %s" % line)
return
lst1 = [z.strip() for z in lst1]
if lst1[0] != lst1[1]:
exchange_dict[lst1[0]] = lst1[1]
# 根据品牌确定的结果+error.txt获得需要修正的sname结果
if not os.path.exists(self._error_p):
self.logger.info("%s does not exist!" % self._real_brand_p)
else:
with open(self._error_p) as f1:
for line in f1:
_line_deal(line)
self.logger.info("len of exchang_brand_pair: %s" % len(exchange_dict))
if not os.path.exists(self._word_dict_p):
self.logger.info("%s does not exist!" % self._real_brand_p)
else:
with open(self._word_dict_p) as f1:
for line in f1:
_line_deal(line)
self.logger.info("len of exchang_brand_pair: %s" % len(exchange_dict))
# 品牌对检测
chk_flag, conflict_brand_set = self._brand_pair_checking(exchange_dict)
if not chk_flag:
err_s = "exchang-brand-pair error: %s" % "\t".join(list(conflict_brand_set))
self.logger.error(err_s)
raise Exception(err_s)
return exchange_dict
def _get_del_brand(self):
if not os.path.exists(self._del_brand_p):
raise Exception("%s does not exist!" % self._real_brand_p)
del_dict = {}
with open(self._del_brand_p) as f1:
for line in f1:
line = line.strip()
if line == "": continue
del_dict[line] = 0
self.logger.info("len of del_brand: %s" % len(del_dict))
return del_dict
class BrandReg(BrandRegBasic):
def __init__(self, base_folder, log_instance, input_lst=None):
super(BrandReg, self).__init__(base_folder, log_instance)
input_file = base_folder + "/dp_brands_result.txt"
if not os.path.exists(input_file):
raise Exception("%s does not exist!" % input_file)
output_file = base_folder + "/dp_brands_result.txt.brandreg"
self._input_p = input_file
self._input_lst = input_lst
self._output_p = output_file
def _brand_exchange(self, ori_brand):
if ori_brand in self.exchange_brand_pair:
return self.exchange_brand_pair[ori_brand]
else:
return ori_brand
def brand_reg(self):
stp1_lst = []
idx = 0
if self._input_lst != None and len(self._input_lst) > 0:
self.logger.info("增量数据处理")
for line in self._input_lst:
idx += 1
if idx % 10000 == 0: self.logger.info(idx)
line = line.strip()
r = self.brand_rewrite(line)
if r is None: continue
stp1_lst.append(r)
elif os.path.exists(self._input_p):
f_input = open(self._input_p)
for line in f_input:
idx += 1
if idx % 100000 == 0: self.logger.info(idx)
line = line.strip()
r = self.brand_rewrite(line)
if r is None: continue
stp1_lst.append(r)
f_input.close()
else:
raise Exception("输入增量数据为空!!!")
if len(stp1_lst) < 1:
raise Exception("增量数据处理后数据为空!!!")
with open(self._output_p, 'w') as f3:
f3.write("\n".join(stp1_lst))
f3.flush()
def _real_brand_reg(self, s_name):
tmp_brand = None
"""
attention: 这一步可能出现问题,
比如:东方骆驼,骆驼,
在real_brand.txt文件中,如果【骆驼】出现在【东方骆驼】前面,
那么将导致【东方骆驼】变为【骆驼】
"""
for r_b in self.real_brand_set:
lst5 = s_name.split(r_b)
if len(lst5) > 1:
tmp_brand = r_b
break
return tmp_brand
def brand_rewrite(self, line):
line = line.strip()
if line == "":
self.logger.info("empty string!!")
return None
lst1 = line.split("\x01")
if len(lst1) == 3:
s_id, ori_name, s_brand = lst1 #取到相关的数据
s_brand = s_brand.strip()
else:
self.logger.info("brand_rewrite error data: %s" % line)
return None
s_name = tool.s_name_dealing(ori_name)
if len(self.real_brand_set) > 0:
if s_brand not in self.real_brand_set:
ex_brand = self._real_brand_reg(s_name) #匹配过程。如果取到的数据当中没有在数据集中找到相同的品牌,则对这种数据处理一下,在一个数据集中去匹配,进行品牌的归并
tmp_brand = ex_brand if ex_brand != None else s_brand #如果对处理过的品牌就赋值给tmp_brand,否则直接赋值
else:
tmp_brand = s_brand #如果在数据集中找到了直接赋值
else:
tmp_brand = s_brand #如果没有数据集就直接赋值
# brand 修正
r_brand = self._brand_exchange(tmp_brand)
# 错误品牌检测
if r_brand in self.del_brand_dict:
r_brand = s_name
return "\x01".join([s_id, ori_name, r_brand]) #拼接后返回结果
|
9,194 | 32f10c3e73a3d792416f6b2841a80f8b3c390e8c | # Copyright 2023 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import numpy as np
import nnabla.functions as F
from nbla_test_utils import list_context
ctxs = list_context('Mod2')
def ref_mod2(x0, x1, fmod):
if x0.dtype == np.float32 or fmod == True:
return np.fmod(x0, x1)
else:
return np.mod(x0, x1)
@pytest.mark.parametrize("ctx, func_name", ctxs)
@pytest.mark.parametrize("x0_shape, x1_shape", [
((2, 3, 4), (2, 3, 4)),
((2, 3, 4), (1, 1, 1)),
((1, 1, 1), (2, 3, 4)),
])
@pytest.mark.parametrize('fmod', [False, True])
@pytest.mark.parametrize('dtype', [np.float32, np.int32])
@pytest.mark.parametrize("seed", [313])
def test_mod2_forward(seed, x0_shape, x1_shape, fmod, dtype, ctx, func_name):
from nbla_test_utils import function_tester
rng = np.random.RandomState(seed)
if dtype == np.float32:
inputs = [rng.randn(*x0_shape).astype(dtype),
rng.randn(*x1_shape).astype(dtype)]
else:
inputs = [rng.randint(np.iinfo(dtype).min, np.iinfo(dtype).max, x0_shape).astype(dtype),
rng.randint(np.iinfo(dtype).min, np.iinfo(dtype).max, x1_shape).astype(dtype)]
backward = [False, False]
func_args = [fmod]
function_tester(rng, F.mod2, ref_mod2, inputs,
func_name=func_name, func_args=func_args,
atol_f=0, ctx=ctx, backward=backward)
|
9,195 | 2f0aa1f294f34a4f3ffb47c15ab74fc792765f10 | from MultisizerReader import MultiSizerReader
import os
import matplotlib.pyplot as plt
#Get all spread sheet files in fodler and create multisizer files for each
folder = "./Data_Organised/DilutionTestingLowOD"
allFiles = os.listdir(folder)
multiSizerFiles = [allFiles[i] for i in range(len(allFiles)) if allFiles[i].endswith(".XLS")]
data = []
for files in multiSizerFiles:
data.append(MultiSizerReader(path=os.path.join(folder,files)))
#split files into YD133 and YD133 + PWR20
ODs = []
labels = []
dilutions =[]
for d in data:
OD = d.name.split("_")[4] + "." + d.name.split("_")[5]
if d.name.split("_")[2] == "5":
dilutions.append("$10^5$")
labels.append("$10^5$ OD: {}".format(OD))
if d.name.split("_")[2] == "7":
dilutions.append("$10^7$")
labels.append("$10^7$ OD: {}".format(OD))
ODs.append(float(OD))
fig, ax = plt.subplots(nrows=1,ncols=2,figsize=(14,9))
combinedData,combinedTypes,combinedLabels = MultiSizerReader.sumByGroup(data,ODs,labels)
MultiSizerReader.plotData(combinedData,combinedTypes,labels=combinedLabels,logAxis=False,legend=True,title="OD ~ 0.05",logNormalFits=False,xLims=(0.4,4),colorScale=False,smoothing=5,showStats=False,ax=ax[0],text=False,cbarLabel="$\mathbf{OD_{600}}$")
#Get all spread sheet files in fodler and create multisizer files for each
folder = "./Data_Organised/DilutionTestingHighOD"
allFiles = os.listdir(folder)
multiSizerFiles = [allFiles[i] for i in range(len(allFiles)) if allFiles[i].endswith(".XLS")]
data = []
for files in multiSizerFiles:
data.append(MultiSizerReader(path=os.path.join(folder,files)))
#split files into YD133 and YD133 + PWR20
ODs = []
labels = []
dilutions =[]
for d in data:
OD = d.name.split("_")[4] + "." + d.name.split("_")[5]
if d.name.split("_")[2] == "5":
dilutions.append("$10^5$")
labels.append("$10^5$ OD: {}".format(OD))
if d.name.split("_")[2] == "7":
dilutions.append("$10^7$")
labels.append("$10^7$ OD: {}".format(OD))
ODs.append(float(OD))
combinedData,combinedTypes,combinedLabels = MultiSizerReader.sumByGroup(data,ODs,labels)
MultiSizerReader.plotData(combinedData,combinedTypes,labels=combinedLabels,logAxis=False,legend=True,title="OD ~ 0.2",logNormalFits=False,xLims=(0.4,4),colorScale=False,smoothing=5,showStats=False,ax=ax[1],text=False,cbarLabel="$\mathbf{OD_{600}}$")
ax[0].text(0.03, 0.93 , "A", transform=ax[0].transAxes, size=35, weight='bold',color="k")
ax[1].text(0.03, 0.93 , "B", transform=ax[1].transAxes, size=35, weight='bold',color="k")
ax[0].legend(fontsize="xx-large")
ax[1].legend(fontsize="xx-large")
fig.tight_layout()
plt.show()
|
9,196 | 5c908697000247056bb63a443f837eef88b4c957 | positivo = float(1.0000001)
negativo = float(-1.000001)
print(negativo, positivo)
b_pos = bin(positivo)
b_neg = bin(negativo)
print(b_neg, b_pos)
|
9,197 | 3bb50b61c7a3e98ede0a31e574f39b4ea7f22de5 | """
corner cases like:
word[!?',;.]
word[!?',;.]word[!?',;.]word
so don't consider the punctuation will only exist after one word, and followed by a whitespace
use re for regular expression match,
replace or punctuations, and split words
"""
class Solution:
def mostCommonWord(self, paragraph, banned):
"""
:type paragraph: str
:type banned: List[str]
:rtype: str
"""
# def filterpunc(word):
# word = word.lower()
# for p in "!?',;.":
# word = word.strip(p)
# if word in banned:
# return ''
# return word
# from collections import Counter
# banned = set(banned)
# words = paragraph.strip().split()
# # words = list(filter(lambda x: not any(map(lambda y: y in x, list("!?',;."))), words))
# words = list(filter(lambda x: x not in "!?',;.", words))
# words = map(filterpunc, words)
# words = filter(None, words)
# return Counter(words).most_common(1)[0][0]
import re
from collections import Counter
paragraph = re.sub('[\W]', ' ', paragraph)
words = paragraph.strip().split()
words = map(str.lower, words)
cnt = Counter(words)
for word,_ in cnt.most_common():
if word not in banned:
return word
if __name__ == "__main__":
# paragraph = "Bob hit a ball, the hit BALL flew far after it was hit."
# banned = ["hit"]
# paragraph = "Bob. hIt, baLl"
# banned = ["bob", "hit"]
paragraph = "a, a, a, a, b,b,b,c, c"
banned = ['a']
sol = Solution()
print(sol.mostCommonWord(paragraph, banned))
|
9,198 | 63c0786d277c5576822d6e521f65850762ab5eb0 | """insta URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path
from member import views as member_views
from post import views as post_views
urlpatterns = [
# admin
path('admin/', admin.site.urls),
# post application
path('post/', post_views.post_list, name='post_list'),
path('post/create/', post_views.post_create, name='post_create'),
path('post/detail/<int:post_pk>/', post_views.post_detail, name='post_detail'),
path('post/<int:post_pk>/comment/create/', post_views.comment_create, name='comment_create'),
# member application
path('member/signup/', member_views.signup, name='signup'),
path('member/login/', member_views.login, name='login'),
]
# URL resolver는 settings.MEDIA_URL로 온 URL은
# view를 찾는 게 아니라 document_root에서 파일을 찾아 리턴해준다.
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT,
) |
9,199 | 3c31e3f2a6f320bc5ae33f0ba1d234a089371899 | import os, argparse,collections
defaults ={'color':'red','user':'guest'}
parser=argparse.ArgumentParser()
parser.add_argument('-u','--user')
parser.add_argument('-c','--color')
#a simple Namespace object will be built up from attributes parsed out of the command lin
namespace= parser.parse_args()
command_line_args= {k: v for k , v in vars(namespace).items()if v is not None}
combined= collections.ChainMap(command_line_args,os.environ,defaults)
print(combined['color'])
print(combined['user']) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.