blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6a7766941efa012d89c2958bfeb926bb7c046d52 | 990d7daa0a9346b54da35f498ed35ea04630325e | /todo/filescanner.py | 719985034a697f834816231383fa089811442be5 | [
"MIT"
] | permissive | jiahwa/pycookbook | 233a65b50011a2ae34d791922b5237edbe45ec6f | 300b36a0f47fea5614d2767c6c7b1edc44f8254a | refs/heads/master | 2023-07-03T22:21:27.571948 | 2021-05-12T15:58:50 | 2021-05-12T15:58:50 | 361,984,323 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | #!/usr/bin/env python3
# Scan directory and files in a folder | [
"yujahua@163.com"
] | yujahua@163.com |
33c449dc19c9effe47b503bea32359f5b42fb142 | f652cf4e0fa6fbfcca8d94cec5f942fd8bd021a0 | /mbuild/__init__.py | bac5509ac8b9754a7fb64e88b372b76f045f4dc3 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Jonestj1/mbuild | 83317ab3a53f40ff6c9c69f6be542b8562602eee | 411cc60d3ef496fa26541bb0b7ea8dcf8c7449e4 | refs/heads/master | 2021-01-20T19:45:11.563610 | 2017-02-13T18:16:12 | 2017-02-13T18:16:12 | 32,886,030 | 0 | 0 | null | 2015-03-25T19:24:50 | 2015-03-25T19:24:50 | null | UTF-8 | Python | false | false | 339 | py | from mbuild.box import Box
from mbuild.coarse_graining import coarse_grain
from mbuild.coordinate_transform import *
from mbuild.compound import *
from mbuild.pattern import *
from mbuild.packing import *
from mbuild.port import Port
from mbuild.recipes import *
from mbuild.formats import *
from mbuild.version import version
| [
"christoph.t.klein@me.com"
] | christoph.t.klein@me.com |
f4ce8f53f1aca56beb69d6797ab8c460039bae5e | 30250c5cf9b235b3d5eaa79418dc5bff94a78f16 | /chapter1/ex2.py | 5421de949add76c934d496fb301b874e5f7e5a40 | [] | no_license | jeckt/py_computer_vision | 0d121c07fb7c1564a25437905441b4a3edff5868 | 02007c2dae1e66436614583125164ab004f2e220 | refs/heads/master | 2023-07-22T14:02:10.297714 | 2019-08-24T13:38:25 | 2019-08-24T13:38:25 | 163,026,202 | 1 | 0 | null | 2023-07-06T21:31:51 | 2018-12-24T22:09:26 | Python | UTF-8 | Python | false | false | 910 | py | #!/usr/bin/env python
from PIL import Image
from numpy import array, zeros
from scipy.ndimage import filters
import matplotlib.pyplot as plt
image_file = '../data/sf_view1.jpg'
def unsharp_masking(image: array, amount: float) -> array:
"""Sharpen image by subtracting the blurred version from original."""
if image.ndim < 3:
# grayscale image
blur_image = filters.gaussian_filter(image, amount)
else:
blur_image = zeros(image.shape)
for i in range(3):
blur_image[:,:,i] = filters.gaussian_filter(image[:,:,i], amount)
blur_image = array(blur_image, 'uint8')
return (image + (image - blur_image))
if __name__ == '__main__':
import scipy
image = array(Image.open(image_file))
sharp_image = unsharp_masking(image, 1.0)
fig, axes = plt.subplots(1, 2)
axes[0].imshow(image)
axes[1].imshow(sharp_image)
plt.show()
| [
"steven.nguyen89@gmail.com"
] | steven.nguyen89@gmail.com |
f95aaa24aa81978b09838def7e11466bbd207ccd | eb2d683106f2a2ee6d266dd7f8fcdb4f86adb8fb | /datas/PY_gal.py | 965ade949ac6fdc78f2e7b6ed621377afd9b8f38 | [] | no_license | minicloudsky/PythonCode | 2fb001d524c8e59eefa66f476a1119ff27e7df36 | 298a6db80a74a3b5f7d75f6037306e9e06ffa092 | refs/heads/master | 2023-02-07T18:25:04.753928 | 2020-12-20T03:27:07 | 2020-12-20T03:27:07 | 106,422,450 | 0 | 0 | null | 2020-09-26T03:27:31 | 2017-10-10T13:41:28 | Python | UTF-8 | Python | false | false | 31 | py | import pygal
hist = pygal.Bar() | [
"1397991131@qq.com"
] | 1397991131@qq.com |
9ed35fcd21c9aa5b1e8ce9c53b2cc1c8ba77be02 | 5c5ba0cbdc972b56a3ac7664f68a74fe382c645a | /uri/5 - Mathematics/1240/Main2.py | ae4ddbd9b873cf229b0b689dc40261a5fec8f4ea | [] | no_license | humbertodias/challenges | 91e090b70ed65269ff94f0b8d4096526d3545ba3 | b832d55491b4d5c840b0bb25b931aef8f04f5784 | refs/heads/master | 2020-04-05T14:03:38.384606 | 2019-01-29T12:28:13 | 2019-01-29T12:28:13 | 94,749,550 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | nTests = int(raw_input())
for test in range(nTests):
a, b = raw_input().split()
if len(b) <= len(a) and b == a[-len(b):]:
print 'encaixa'
else:
print 'nao encaixa' | [
"humberto.dias@techne.com.br"
] | humberto.dias@techne.com.br |
8893df317f1c158e4400b4b8eb0be1430d608747 | 130e13bfad23961739613f78a7f3e59c67d28cac | /utility/resnet.py | e5a80a23b027647809fcf951058e2e1ada2ab564 | [
"Apache-2.0"
] | permissive | dashmoment/aic_scene | 981a0faca39b2079203d24a961a491905f0f2ca9 | 3e68c03f032b3979feb17b550a953e5aafc970c1 | refs/heads/master | 2021-07-12T05:47:35.491495 | 2017-09-29T09:53:54 | 2017-09-29T09:53:54 | 103,190,682 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,689 | py |
import skimage.io # bug. need to import this before tensorflow
import skimage.transform # bug. need to import this before tensorflow
import tensorflow as tf
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.training import moving_averages
from resnet_config import Config
import datetime
import numpy as np
import os
import time
MOVING_AVERAGE_DECAY = 0.9997
BN_DECAY = MOVING_AVERAGE_DECAY
BN_EPSILON = 0.001
CONV_WEIGHT_DECAY = 0.00004
CONV_WEIGHT_STDDEV = 0.1
FC_WEIGHT_DECAY = 0.00004
FC_WEIGHT_STDDEV = 0.01
RESNET_VARIABLES = 'resnet_variables'
UPDATE_OPS_COLLECTION = 'resnet_update_ops' # must be grouped with training op
IMAGENET_MEAN_BGR = [103.062623801, 115.902882574, 123.151630838, ]
tf.app.flags.DEFINE_integer('input_size', 224, "input image size")
activation = tf.nn.relu
def inference(x, is_training,
num_classes=1000,
num_blocks=[3, 4, 6, 3], # defaults to 50-layer network
use_bias=False, # defaults to using batch norm
bottleneck=True):
c = {}
c['bottleneck'] = bottleneck
c['is_training'] = tf.convert_to_tensor(is_training,
dtype='bool',
name='is_training')
c['ksize'] = 3
c['stride'] = 1
c['use_bias'] = use_bias
c['fc_units_out'] = num_classes
c['num_blocks'] = num_blocks
c['stack_stride'] = 2
with tf.variable_scope('scale1'):
c['conv_filters_out'] = 64
c['ksize'] = 7
c['stride'] = 2
x = conv(x, c)
x = bn(x, c)
x = activation(x)
with tf.variable_scope('scale2'):
x = _max_pool(x, ksize=3, stride=2)
c['num_blocks'] = num_blocks[0]
c['stack_stride'] = 1
c['block_filters_internal'] = 64
x = stack(x, c)
with tf.variable_scope('scale3'):
c['num_blocks'] = num_blocks[1]
c['block_filters_internal'] = 128
c['stack_stride'] == 1
x = stack(x, c)
with tf.variable_scope('scale4'):
c['num_blocks'] = num_blocks[2]
c['block_filters_internal'] = 256
x = stack(x, c)
with tf.variable_scope('scale5'):
c['num_blocks'] = num_blocks[3]
c['block_filters_internal'] = 512
x = stack(x, c)
# post-net
avg_out = tf.reduce_mean(x, reduction_indices=[1, 2], name="avg_pool")
if num_classes != None:
with tf.variable_scope('fc'):
x = fc(avg_out, c)
return x, avg_out
# This is what they use for CIFAR-10 and 100.
# See Section 4.2 in http://arxiv.org/abs/1512.03385
def inference_small(x,
is_training,
num_blocks=3, # 6n+2 total weight layers will be used.
use_bias=False, # defaults to using batch norm
num_classes=10):
c = {}
c['is_training'] = tf.convert_to_tensor(is_training,
dtype='bool',
name='is_training')
c['use_bias'] = use_bias
c['fc_units_out'] = num_classes
c['num_blocks'] = num_blocks
c['num_classes'] = num_classes
x, avg_out = inference_small_config(x, c)
return x, avg_out
def inference_small_config(x, c):
c['bottleneck'] = False
c['ksize'] = 3
c['stride'] = 1
with tf.variable_scope('scale1'):
c['conv_filters_out'] = 16
c['block_filters_internal'] = 16
c['stack_stride'] = 1
x = conv(x, c)
x = bn(x, c)
x = activation(x)
x = stack(x, c)
with tf.variable_scope('scale2'):
c['block_filters_internal'] = 32
c['stack_stride'] = 2
x = stack(x, c)
with tf.variable_scope('scale3'):
c['block_filters_internal'] = 64
c['stack_stride'] = 2
x = stack(x, c)
# post-net
avg_out = tf.reduce_mean(x, reduction_indices=[1, 2], name="avg_pool")
if c['num_classes'] != None:
with tf.variable_scope('fc'):
x = fc(avg_out, c)
return x, avg_out
def _imagenet_preprocess(rgb):
"""Changes RGB [0,1] valued image to BGR [0,255] with mean subtracted."""
red, green, blue = tf.split(3, 3, rgb * 255.0)
bgr = tf.concat(3, [blue, green, red])
bgr -= IMAGENET_MEAN_BGR
return bgr
def loss(logits, labels):
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, labels)
cross_entropy_mean = tf.reduce_mean(cross_entropy)
regularization_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
loss_ = tf.add_n([cross_entropy_mean] + regularization_losses)
tf.scalar_summary('loss', loss_)
return loss_
def stack(x, c):
for n in range(c['num_blocks']):
s = c['stack_stride'] if n == 0 else 1
c['block_stride'] = s
with tf.variable_scope('block%d' % (n + 1)):
x = block(x, c)
return x
def block(x, c):
filters_in = x.get_shape()[-1]
# Note: filters_out isn't how many filters are outputed.
# That is the case when bottleneck=False but when bottleneck is
# True, filters_internal*4 filters are outputted. filters_internal is how many filters
# the 3x3 convs output internally.
m = 4 if c['bottleneck'] else 1
filters_out = m * c['block_filters_internal']
shortcut = x # branch 1
c['conv_filters_out'] = c['block_filters_internal']
if c['bottleneck']:
with tf.variable_scope('a'):
c['ksize'] = 1
c['stride'] = c['block_stride']
x = conv(x, c)
x = bn(x, c)
x = activation(x)
with tf.variable_scope('b'):
x = conv(x, c)
x = bn(x, c)
x = activation(x)
with tf.variable_scope('c'):
c['conv_filters_out'] = filters_out
c['ksize'] = 1
c['stride'] == 1
x = conv(x, c)
x = bn(x, c)
else:
with tf.variable_scope('A'):
c['stride'] = c['block_stride']
c['ksize'] == 3
x = conv(x, c)
x = bn(x, c)
x = activation(x)
with tf.variable_scope('B'):
c['conv_filters_out'] = filters_out
c['ksize'] == 3
print(c)
c['stride'] = 1
assert c['stride'] == 1
x = conv(x, c)
x = bn(x, c)
with tf.variable_scope('shortcut'):
if filters_out != filters_in or c['block_stride'] != 1:
c['ksize'] = 1
c['stride'] = c['block_stride']
c['conv_filters_out'] = filters_out
shortcut = conv(shortcut, c)
shortcut = bn(shortcut, c)
return activation(x + shortcut)
def bn(x, c):
x_shape = x.get_shape()
params_shape = x_shape[-1:]
if c['use_bias']:
bias = _get_variable('bias', params_shape,
initializer=tf.zeros_initializer())
return x + bias
axis = list(range(len(x_shape) - 1))
beta = _get_variable('beta',
params_shape,
initializer=tf.zeros_initializer())
gamma = _get_variable('gamma',
params_shape,
initializer=tf.ones_initializer())
moving_mean = _get_variable('moving_mean',
params_shape,
initializer=tf.zeros_initializer(),
trainable=False)
moving_variance = _get_variable('moving_variance',
params_shape,
initializer=tf.ones_initializer(),
trainable=False)
# These ops will only be preformed when training.
mean, variance = tf.nn.moments(x, axis)
update_moving_mean = moving_averages.assign_moving_average(moving_mean,
mean, BN_DECAY)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, BN_DECAY)
tf.add_to_collection(UPDATE_OPS_COLLECTION, update_moving_mean)
tf.add_to_collection(UPDATE_OPS_COLLECTION, update_moving_variance)
mean, variance = control_flow_ops.cond(
c['is_training'], lambda: (mean, variance),
lambda: (moving_mean, moving_variance))
x = tf.nn.batch_normalization(x, mean, variance, beta, gamma, BN_EPSILON)
#x.set_shape(inputs.get_shape()) ??
return x
def fc(x, c):
num_units_in = x.get_shape()[1]
num_units_out = c['fc_units_out']
weights_initializer = tf.truncated_normal_initializer(
stddev=FC_WEIGHT_STDDEV)
weights = _get_variable('weights',
shape=[num_units_in, num_units_out],
initializer=weights_initializer,
weight_decay=FC_WEIGHT_STDDEV)
biases = _get_variable('biases',
shape=[num_units_out],
initializer=tf.zeros_initializer())
x = tf.nn.xw_plus_b(x, weights, biases)
return x
def _get_variable(name,
shape,
initializer,
weight_decay=0.0,
dtype='float',
trainable=True):
"A little wrapper around tf.get_variable to do weight decay and add to"
"resnet collection"
if weight_decay > 0:
regularizer = tf.contrib.layers.l2_regularizer(weight_decay)
else:
regularizer = None
collections = [tf.GraphKeys.VARIABLES, RESNET_VARIABLES]
return tf.get_variable(name,
shape=shape,
initializer=initializer,
dtype=dtype,
regularizer=regularizer,
collections=collections)
def conv(x, c):
ksize = c['ksize']
stride = c['stride']
filters_out = c['conv_filters_out']
filters_in = x.get_shape()[-1]
shape = [ksize, ksize, filters_in, filters_out]
initializer = tf.truncated_normal_initializer(stddev=CONV_WEIGHT_STDDEV)
weights = _get_variable('weights',
shape=shape,
dtype='float',
initializer=initializer,
weight_decay=CONV_WEIGHT_DECAY)
return tf.nn.conv2d(x, weights, [1, stride, stride, 1], padding='SAME')
def _max_pool(x, ksize=3, stride=2):
return tf.nn.max_pool(x,
ksize=[1, ksize, ksize, 1],
strides=[1, stride, stride, 1],
padding='SAME')
| [
"dashmoment1017@gmail.com"
] | dashmoment1017@gmail.com |
7d568b30a9b1659b10f29e17dce48c20cdbf1df4 | ad0ed2d0df90f199123f8f9739069f56f3ba94fc | /nyktools/nlp/preprocess/normalize.py | 08ca22ac92ea7dfaf88e2dad409eb8216bdf07da | [] | no_license | nyk510/scdv-python | ca864f636dafede22e38c36cffe2dcec04420a9b | 4a2847a71f3334728a370a2c21d3dc33aa7c976a | refs/heads/master | 2020-04-23T22:18:37.336289 | 2019-05-06T23:29:35 | 2019-05-06T23:29:35 | 171,497,361 | 19 | 2 | null | 2019-05-06T23:29:36 | 2019-02-19T15:21:54 | Jupyter Notebook | UTF-8 | Python | false | false | 2,713 | py | # encoding: utf8
"""
文字の正規化
参考: https://github.com/neologd/mecab-ipadic-neologd/wiki/Regexp.ja
"""
from __future__ import unicode_literals
import re
import unicodedata
def unicode_normalize(cls, doc):
pt = re.compile('([{}]+)'.format(cls))
def norm(codec):
return unicodedata.normalize('NFKC', codec) if pt.match(codec) else codec
doc = ''.join(norm(x) for x in re.split(pt, doc))
doc = re.sub('-', '-', doc)
return doc
def remove_extra_spaces(doc):
"""
余分な空白を削除
Args:
doc (String)
Return
空白除去された文章 (String)
"""
doc = re.sub('[ ]+', ' ', doc)
blocks = ''.join((
'\u4E00-\u9FFF', # CJK UNIFIED IDEOGRAPHS
'\u3040-\u309F', # HIRAGANA
'\u30A0-\u30FF', # KATAKANA
'\u3000-\u303F', # CJK SYMBOLS AND PUNCTUATION
'\uFF00-\uFFEF' # HALFWIDTH AND FULLWIDTH FORMS
))
basic_latin = '\u0000-\u007F'
def remove_space_between(cls1, cls2, doc):
pt = re.compile('([{}]) ([{}])'.format(cls1, cls2))
while pt.search(doc):
doc = pt.sub(r'\1\2', doc)
return doc
doc = remove_space_between(blocks, blocks, doc)
doc = remove_space_between(blocks, basic_latin, doc)
doc = remove_space_between(basic_latin, blocks, doc)
return doc
def normalize_neologd(doc):
"""
以下の文章の正規化を行います.
* 空白の削除
* 文字コードの変換(utf-8へ)
* ハイフン,波線(チルダ)の統一
* 全角記号の半角への変換 (?→?など)
Args:
doc(str):
正規化を行いたい文章
Return(str):
正規化された文章
"""
doc = doc.strip()
doc = unicode_normalize('0-9A-Za-z。-゚', doc)
def maketrans(f, t):
return {ord(x): ord(y) for x, y in zip(f, t)}
doc = re.sub('[˗֊‐‑‒–⁃⁻₋−]+', '-', doc) # normalize hyphens
doc = re.sub('[﹣-ー—―─━ー]+', 'ー', doc) # normalize choonpus
doc = re.sub('[~∼∾〜〰~]', '', doc) # remove tildes
doc = doc.translate(
maketrans('!"#$%&\'()*+,-./:;<=>?@[¥]^_`{|}~。、・「」「」',
'!”#$%&’()*+,-./:;<=>?@[¥]^_`{|}〜。、・「」『』'))
doc = remove_extra_spaces(doc)
doc = unicode_normalize('!”#$%&’()*+,-./:;<>?@[¥]^_`{|}〜', doc) # keep =,・,「,」
doc = re.sub('[’]', '\'', doc)
doc = re.sub('[”]', '"', doc)
doc = re.sub('[“]', '"', doc)
return doc
| [
"takamail53@gmail.com"
] | takamail53@gmail.com |
34a49514631f4c2a3f2a4838151015eaf9e585bd | 4310528fa617bf1fd498535858bd02e40eb45834 | /venv/bin/chardetect | d96617745d60258c4f42d9cd534d3c546cfa432b | [] | no_license | lgergelyo/DHT11 | d6f38df38e57b5ac6b2570373a18f6879a859852 | b531722281f29fdaa954bd89fb4333ec49346362 | refs/heads/master | 2020-07-11T08:11:29.105044 | 2019-08-26T14:14:50 | 2019-08-26T14:14:50 | 204,485,805 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | #!/home/gergely/PycharmProjects/DHT11/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"leandrotecnico@terra.com.br"
] | leandrotecnico@terra.com.br | |
5431788db52ee1985fa4417126ac5ea35bb35d38 | 1ecab9b2b5444eeb65724a3fbf7c4c2c6839bf89 | /python/hands-on/flask-03-handling-routes-and-if-for/Flask_if_for_structure/templates/app.py | bf4fe2da510401ad66d690a1471a5a29e1b05a90 | [] | no_license | emine-arc/aws-workspace | 5eb0186b4efad04f781b9647813a27f7f4e9e569 | 52e019b29b7ea5762ec2da6efa3bdd65165d63a8 | refs/heads/main | 2023-07-16T10:07:27.190351 | 2021-08-17T21:50:38 | 2021-08-17T21:50:38 | 378,922,272 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | # Import Flask modules
from flask import Flask, render_template
# Create an object named app
app = Flask(__name__)
# Create a function named head which shows the massage as "This is my first conditions experience" in `index.html`
# and assign to the route of ('/')
@app.route('/')
def head():
first = 'This is my first conditions experience'
return render_template('index.html', message = first)
# Create a function named header which prints the items one by one in `body.html`
# and assign to the route of ('/')
@app.route('/mikemi')
def header():
names = ["Umit", "Berk", "Fatih", "Hayko", "Asim"]
return render_template('body.html', object = names)
# run this app in debug mode on your local.
if __name__ == '__main__':
#app.run(debug=True)
app.run(host='0.0.0.0', port=80) | [
"emineulu.90@gmail.com"
] | emineulu.90@gmail.com |
6924e385d074e1dca2913f29af37d6c8dacc6fea | df373e0104ccdfc64dc80cd149d30f93776aa539 | /realtime-recs/test/rts_snopes.py | 19a12f58fb3c716e58fcffa9d3ff0c252a03307c | [] | no_license | btdlin/myPyUtils | ac0de295cc554dc6e837eeeddad4d1bb94c2907c | 374dc82a10c40122aa70a3765c30e6fefd700498 | refs/heads/master | 2022-10-02T07:31:11.210530 | 2017-04-28T17:05:38 | 2017-04-28T17:05:38 | 70,423,536 | 0 | 0 | null | 2022-09-23T21:02:54 | 2016-10-09T18:49:54 | Jupyter Notebook | UTF-8 | Python | false | false | 1,707 | py | import pytest
import recs_client.request as req
import bt_rts.thrift.gen.filters as recs_filter
from recs_client.client import RecommendationsClient
# rts_host = 'localhost'
rts_host = 'realtime-recs-k.magic.boomtrain.com'
# rts_host = 'rts.aws.boomtrain.com'
PORT = 7070
TIMEOUT = 20000
RECSET_ID = 'fakedb0c-c5c6-4515-9bd1-5a06ddd676f6'
EMPTY_SEEDS = []
EMPTY_EXCLUDES = []
TEST = True
GROUP_NAME = 'default'
COUNT = 2
CALLING_APP = 'test_client'
def test_rts(rts_host):
COUNT = 4
request = req.RecsRequest(site_id='snopes',
bsin='50a46296-8a91-4c7a-bf0b-4f1a15b3ac33',
seeds=EMPTY_SEEDS,
excludes=EMPTY_EXCLUDES,
recset_id=RECSET_ID,
test=TEST)
metafilter = recs_filter.TFilter(overlap=None, existence=None, recency=None, and_=[
recs_filter.TFilter(overlap=None, existence=None, recency=None, and_=None, range=None, or_=None, any=None, named='GLOBAL'),
recs_filter.TFilter(overlap=recs_filter.TOverlapFilter(values=['article'], field='resource-type', match_type=0,
amount=recs_filter.TRange(max_=None, min_=1.0)), existence=None, recency=None, and_=None,
range=None, or_=None, any=None, named=None)], range=None, or_=None, any=None, named=None)
request.groups[GROUP_NAME] = req.RecGroupRequest(count=COUNT, metafilter=metafilter)
config = {'host': rts_host, 'port': PORT, 'timeout': TIMEOUT}
with RecommendationsClient(calling_app=CALLING_APP, **config) as client:
response = client.get_recommendations(request)
assert len(response) == COUNT
| [
"dlin@boomtrain.com"
] | dlin@boomtrain.com |
44de918735615f54fa3ed81459dc21cfb153772b | 2ac39acf13c1cd636c25e6742040888775f27fe4 | /examples/B_basic_platform/bsp01_1plat4gw_noAuth_noDemo/gateways/gw_004/utils.py | c8637987346a9a531a179ed948f92a7f0f26bf47 | [
"MIT"
] | permissive | N5GEH/n5geh.services.rvk_simulator | 07edbc60a60a699eac5501eb1c34bdfadd777aa8 | 4a2c2882ddfedd7fcb260be3a86b9088f29b17c6 | refs/heads/master | 2022-12-03T13:18:09.685975 | 2020-08-05T09:12:04 | 2020-08-05T09:12:04 | 285,237,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 43,280 | py | import os
import sys
import math
import json
from datetime import datetime, time, timezone # , timedelta
import requests
########################################################################
def rho_fluid_water(temp_in_C, p_in_MPa, calc_option):
""" returns density of the fluid water in kg/m3 """
#if((temp_in_C > -273.15) and (temp_in_C < 1000.0)):
# temp = temp_in_C
#else:
# temp = 0.0
temp = temp_in_C
if calc_option == 0:
""" ' Verfahren nach IAPWS R7-97(2012)
' The International Association for the Properties of Water and Steam
' Revised Release on the IAPWS Industrial Formulation 1997
' for the Thermodynamic Properties of Water and Steam
"""
wyn = 0.0
elif calc_option == 1:
""" ' Verfahren nach Glueck """
wyn = 1002.045 - 0.1029905 * temp - 0.003698162 * (temp * temp) + 0.000003991053 * (temp * temp * temp)
elif calc_option == 2:
""" ' luftfreies Wasser nach PTB Mitteilungen 100/3-90 """
C0 = 999.83952
c1 = 16.952577
C2 = -7.9905127 * 0.001
C3 = -4.6241757 * 0.00001
C4 = 1.0584601 * 0.0000001
C5 = -2.8103006 * 0.0000000001
b1 = 0.0168872
wyn = (
C0 + c1 * temp + C2 * temp * temp + C3 * temp * temp * temp + C4 * temp * temp * temp * temp + C5 * temp * temp * temp * temp * temp) / (
1.0 + b1 * temp)
elif calc_option == 3:
""" ' luftgesaettigtes Wasser nach PTB Mitteilungen 100/3-90 """
C0 = 999.83952
c1 = 16.952577
C2 = -7.9905127 * 0.001
C3 = -4.6241757 * 0.00001
C4 = 1.0584601 * 0.0000001
C5 = -2.8103006 * 0.0000000001
b1 = 0.0168872
wyn = (
C0 + c1 * temp + C2 * temp * temp + C3 * temp * temp * temp + C4 * temp * temp * temp * temp + C5 * temp * temp * temp * temp * temp) / (
1.0 + b1 * temp) - 0.004612 + 0.000106 * temp
elif calc_option == 4:
""" ' regression of Joachim based on data of Glueck """
wyn = -7.46649184008019E-08 * temp * temp * temp * temp + 2.94491388243001E-05 * temp * temp * temp - 6.66507624328283E-03 * temp * temp + 2.65068149440988E-02 * temp + 1000.58459596234
return wyn
# end of rho_fluid_water function
########################################################################
def check_and_open_file(path):
""" returns list of strings """
if os.path.isfile(path):
try:
with open(path, 'r') as myf:
mywetter = []
for line in myf:
mywetter.append(line)
return (mywetter)
except Exception as e:
print('Problem reading file {}'.format(path))
return 3
else:
print('File {} does not exist.'.format(path))
return 3
# end check_and_open_file
########################################################################
def check_and_open_json_file(path):
""" returns dictionary """
if os.path.isfile(path):
try:
with open(path, 'r') as myf:
return (json.loads(myf.read()))
except Exception as e:
print('Problem reading file {}'.format(path))
print('Problem reading file {}'.format(e))
return 3
else:
print('File {} does not exist.'.format(path))
return 3
# end check_and_open_json_file
########################################################################
def get_pressure_in_MPa():
return 0.101325
########################################################################
def get_calc_option():
return 1
########################################################################
def cp_fluid_water(temp_in_C, p_in_MPa, calc_option):
""" returns specific heat capacity of fluid water in kJ / kg / K """
#if((temp_in_C > -273.15) and (temp_in_C < 1000.0)):
# temp = temp_in_C
#else:
# temp = 0.0
temp = temp_in_C
if calc_option == 0:
""" ' Verfahren nach IAPWS R7-97(2012)
' The International Association for the Properties of Water and Steam
' Revised Release on the IAPWS Industrial Formulation 1997
' for the Thermodynamic Properties of Water and Steam
"""
wyn = 0.0
elif calc_option == 1:
""" ' function c_H2O_von_t in stoffdat.for """
wyn = 4.206328 + (-0.001131471 + 0.00001224984 * temp) * temp
elif calc_option == 2:
""" ' function ALLG_stoffwerte_wa_cp in 00_t000_modul_inoutpar.for
'> \brief Berechnung der spezifischen Waermekapazitaet cp von Wasser bei konstantem Druck \n
' Einheit : J/kgK \n
' Quell : GLUECK Zustands- und Stoffwerte 1991 \n
' Geltungsbereich : 10 < t < 200oC / 30 < t < 200oC \n
' Maximaler Fehler : ca. 0.45% / ca. 0.03% bei p = 1 MPa
"""
wyn = 4.173666 + 4.691707 * 0.00001 * temp - 6.695665 * 0.0000001 * temp * temp + 4.217099 * 0.00000001 * temp * temp * temp
return wyn
# end cp_fluid_water
########################################################################
def alpha(t_wall_in_C, t_fluid_in_C, pipe_length_in_m, equiv_diam_in_m, Prandtl_nr, Reynolds_nr, lambda_fluid_in_W_m_K):
#print('Re = {}; Pr = {}; d = {}; lam = {}; L = {}; t_wall = {}; t_fluid = {}'.format(Reynolds_nr, Prandtl_nr, equiv_diam_in_m, lambda_fluid_in_W_m_K, pipe_length_in_m, t_wall_in_C, t_fluid_in_C))
# returns alpha in W/m2/K - nach der zweiten Methode von Glueck
if(Reynolds_nr==0.0):
return lambda_fluid_in_W_m_K/(0.5*equiv_diam_in_m*(1.0-(0.5**0.5)))
elif(Reynolds_nr>0.0):
RePrDl = Reynolds_nr * Prandtl_nr * equiv_diam_in_m / pipe_length_in_m
dum1 = (RePrDl**0.333) * 1.615 - 0.7
dum2 = (RePrDl**0.5) * ((2.0/(1.0 + 22.0 * Prandtl_nr))**0.167)
Nu_lam = (3.66**3 + 0.7**3 + dum1**3 + dum2**3)**(1.0/3.0)
BB = 1.0/((5.09*(math.log(Reynolds_nr)/math.log(10.0))-4.24)**2.0)
dum1 = 1.0 + ((equiv_diam_in_m/pipe_length_in_m)**(2.0/3.0))
dum2 = 1.0 + 12.7 * (BB**0.5) * ((Prandtl_nr**(2.0/3.0)) - 1.0)
Nu_turb = BB * Reynolds_nr * Prandtl_nr * dum1 / dum2
if(Reynolds_nr<=2300.0):
Nu = Nu_lam
elif(Reynolds_nr>=10000.0):
Nu = Nu_turb
else:
RePrDl = 2300.0 * Prandtl_nr * equiv_diam_in_m / pipe_length_in_m
dum1 = (RePrDl**0.333) * 1.615 - 0.7
dum2 = (RePrDl**0.5) * ((2.0/(1.0 + 22.0 * Prandtl_nr))**0.167)
Nu_lam = (3.66**3 + 0.7**3 + dum1**3 + dum2**3)**(1.0/3.0)
BB = 1.0/((5.09*(math.log(10000.0)/math.log(10.0))-4.24)**2.0)
dum1 = 1.0 + ((equiv_diam_in_m/pipe_length_in_m)**(2.0/3.0))
dum2 = 1.0 + 12.7 * (BB**0.5) * ((Prandtl_nr**(2.0/3.0)) - 1.0)
Nu_turb = BB * 10000.0 * Prandtl_nr * dum1 / dum2
gamma = (Reynolds_nr - 2300.0) / (10000.0 - 2300.0)
Nu = (1.0-gamma) * Nu_lam + gamma * Nu_turb
#print('RePrDl = {}; d = {} ; L = {}'.format(RePrDl, equiv_diam_in_m, pipe_length_in_m))
#print('\n Nu = {}; gamma = {} ; dum2 = {}\n'.format(Nu, gamma, dum2))
else:
Nu = 0.0
# W/(m2.K) = W/m/K / m
return Nu * lambda_fluid_in_W_m_K / equiv_diam_in_m
# end alpha
########################################################################
def mu_water_in_m2_s(tFluid):
return 1.0 / (556272.7 + 19703.39 * tFluid + 124.4091 * (tFluid ** 2) - 0.3770952 * (tFluid ** 3))
# end mu_water_in_m2_s
########################################################################
def Prandtl_number_water(tFluid):
return max(1.0 / (0.07547718 + 0.00276297 * tFluid + 0.00003210257 * tFluid * tFluid - 0.0000001015768 * tFluid * tFluid * tFluid), 0.00000001)
# end Prandtl_number_water
########################################################################
def lambda_water_W_m_K(tFluid_in_gradC):
temp_in_K = tFluid_in_gradC + 273.15
AA = -2.4149
BB = 2.45165 * (10.0)**(-2.0)
CC = -0.73121 * (10.0)**(-4.0)
DD = 0.99492 * (10.0)**(-7.0)
EE = -0.5373 * (10.0)**(-10.0)
return (AA + BB*temp_in_K) + CC*(temp_in_K**2) + DD*(temp_in_K**3) + EE*(temp_in_K**4)
# end lambda_water_W_m_K
########################################################################
def interpolate_value_from_list_of_dicts(value1, tag_of_val1, list_of_dicts, tag_of_result):
""" returns the linear interpolation of y-value for x-value of 'value1'
assumptions are:
- x-values are saved with the tag 'tag_of_val1'
- y-values are saved with the tag 'tag_of_result'
- x- values are monoton and growing with index"""
if(len(list_of_dicts) == 0):
return 0 # list is empty
elif(len(list_of_dicts) == 1):
return list_of_dicts[0][tag_of_result] # list contains only one dict element
else:
ii=0
while(list_of_dicts[ii][tag_of_val1] == list_of_dicts[ii+1][tag_of_val1]):
ii += 1 # x-values of neighbouring elements are identical
if(ii < len(list_of_dicts)):
if(list_of_dicts[ii][tag_of_val1] < list_of_dicts[ii+1][tag_of_val1]):
# growing
while((ii < len(list_of_dicts)) and (list_of_dicts[ii][tag_of_val1] < value1)):
ii += 1
elif():
# falling
while((ii < len(list_of_dicts)) and (list_of_dicts[ii][tag_of_val1] > value1)):
ii += 1
if(ii > 0):
if(ii >= len(list_of_dicts)):
ii = len(list_of_dicts) - 1
# interpolation or extrapolation upwards when ii == len(list_of_dicts)
# a = (y2 - y1) / (x2 - x1)
AA = (list_of_dicts[ii][tag_of_result] - list_of_dicts[ii - 1][tag_of_result]) / (list_of_dicts[ii][tag_of_val1] - list_of_dicts[ii - 1][tag_of_val1])
# b = (x2 * y1 - x1 * y2) / (x2 - x1)
BB = (list_of_dicts[ii][tag_of_val1] * list_of_dicts[ii - 1][tag_of_result] - list_of_dicts[ii - 1][tag_of_val1] * list_of_dicts[ii][tag_of_result]) / (list_of_dicts[ii][tag_of_val1] - list_of_dicts[ii - 1][tag_of_val1])
else:
# ii == 0 (idx == 1) - extrapolation downwards
# a = (y2 - y1) / (x2 - x1)
AA = (list_of_dicts[ii + 1][tag_of_result] - list_of_dicts[ii][tag_of_result]) / (list_of_dicts[ii + 1][tag_of_val1] - list_of_dicts[ii][tag_of_val1])
# b = (x2 * y1 - x1 * y2) / (x2 - x1)
BB = (list_of_dicts[ii + 1][tag_of_val1] * list_of_dicts[ii][tag_of_result] - list_of_dicts[ii][tag_of_val1] * list_of_dicts[ii + 1][tag_of_result]) / (list_of_dicts[ii + 1][tag_of_val1] - list_of_dicts[ii][tag_of_val1])
return (AA * value1 + BB)
else:
return list_of_dicts[len(list_of_dicts)][tag_of_result]
# end interpolate_value_from_list_of_dicts
########################################################################
def get_significant_parts(line):
""" line is list of strings
function returns list of nonempty elements"""
wyn = []
for element in line:
if element != '':
wyn.append(element)
return wyn
# end get_significant_parts
########################################################################
def get_ith_column(ii, line):
""" returns ii-th element of the list 'line' """
return get_significant_parts(line)[ii - 1]
# end get_ith_column
########################################################################
def get_tab_from_list_of_dicts(tab_to_find, val_to_find, tab_to_return, list_of_dics, precision, growing, first_idx):
# return value of key tab_to_return from a dict in a list of dicts
# that fulfills val_to_find <= list_of_dicts[tab_to_find]
# where
if(first_idx >= len(list_of_dics)):
first_idx = 0
if(growing and(val_to_find<=list_of_dics[first_idx][tab_to_find])and(val_to_find>(list_of_dics[first_idx][tab_to_find]-precision))):
return list_of_dics[first_idx][tab_to_return]
elif((not growing) and(val_to_find>=list_of_dics[first_idx][tab_to_find])and(val_to_find<(list_of_dics[first_idx][tab_to_find]+precision))):
return list_of_dics[first_idx][tab_to_return]
else:
for elem in list_of_dics:
if(growing and(val_to_find<=elem[tab_to_find])and(val_to_find>(elem[tab_to_find]-precision))):
return elem[tab_to_return]
elif((not growing) and(val_to_find>=elem[tab_to_find])and(val_to_find<(elem[tab_to_find]+precision))):
return elem[tab_to_return]
########################################################################
def min_val_in_list_of_dicts(tab_to_find, list_of_dicts):
if(len(list_of_dicts)>0):
wyn = list_of_dicts[0][tab_to_find]
for elem in list_of_dicts[1:]:
wyn = min(elem[tab_to_find], wyn)
return wyn
else:
print('ERROR in utils.min_val_in_list_of_dicts :: List is empty')
########################################################################
# ==================================================================
def convert_time_to_hours(dtime):
# returns the number of hours since the 1.1.2000
return (hours_of_year_month(dtime) + dtime.day * 24.0 + dtime.hour + dtime.minute/60.0 + dtime.second/3600.0 + dtime.microsecond/3600000000)
#end convert_time_to_hours
########################################################################
# ==================================================================
def hours_of_year_month(dtime):
# returns number of hours in months and years since 1.1.2000
yrs = dtime.year - 2000
mth = dtime.month
months = {1:31,2:28,3:31,4:30,5:31,6:30,7:31,8:31,9:30,10:31,11:30,12:31}
mysum = 0
for ii in range(mth-1):
mysum = mysum + months[ii+1]
return (yrs * 8760.0 + mysum * 24.0)
#end hours_of_year_month
########################################################################
# ==================================================================
def get_time_in_hour(line):
return float(get_ith_column(1, line))
#end get_time_in_hour
########################################################################
# ==================================================================
def linear_interpolation(xx, x1, x2, y1, y2):
if xx == x1:
return y1
elif xx == y2:
return y2
else:
aa = (y2 - y1) / (x2 - x1)
bb = (x2 * y1 - x1 * y2) / (x2 - x1)
return (aa * xx + bb)
#end linear_interpolation
########################################################################
def extract_time_stamp_from_string(mystr):
#
if("T" in mystr):
mydate, mytime = mystr.split("T")
elif(" " in mystr):
mydate, mytime = mystr.split(" ")
else:
print('Error in extract_time_stamp_from_string, format of time stamp is not recognizable')
myyear, mymonth, myday = mydate.split("-")
myhour, myminute, mysecond = mytime.split(":")
mysecond,mymicrosecond = mysecond.split(".")
return datetime(year=int(myyear), month=int(mymonth), day=int(myday), hour=int(myhour), minute=int(myminute), second=int(mysecond), microsecond=int(mymicrosecond))
# end extract_time_stamp_from_string
########################################################################
def extract_hms_time_from_string(mystr):
#
if(':' in mystr):
myhour, myminute, mysecond = mystr.split(":")
else:
myhour = '0'
myminute = '0'
mysecond = mystr
if('.' in mysecond):
mysecond,mymicrosecond = mysecond.split(".")
else:
mymicrosecond = '0'
return time(hour=int(myhour), minute=int(myminute), second=int(mysecond), microsecond=int(mymicrosecond))
# end extract_time_stamp_from_string
########################################################################
def get_factor_rounding():
return 100000000.0
# end get_factor_rounding
########################################################################
def build_full_utc_time_from_elements(x1, x2, x3):
# returns a datetime.datetime object = time stamp
# all inputs are doubles
myshft = get_factor_rounding()
xutc = x1 * myshft + x2 + x3
return datetime.utcfromtimestamp(float(xutc))
# end build_full_utc_time_from_elements
########################################################################
def build_small_utc_time_from_full_one(xtime):
myshft = get_factor_rounding()
x1 = float(int(xtime/myshft))
x2 = float(int(xtime-x1*myshft))
x3 = xtime - int(xtime)
return x2
# end build_small_utc_time_from_full_one
########################################################################
def decompose_utc_time_to_floats(xtime):
myshft = get_factor_rounding()
x1 = float(int(xtime/myshft))
x2 = float(int(xtime-x1*myshft))
x3 = xtime - int(xtime)
return (x1, x2, x3)
# end decompose_utc_time_to_floats
########################################################################
def my_thread_kill():
thread.interrupt.main()
print('thread exit')
sys.exit()
print('sys exit')
# end my_thread_kill
########################################################################
# ==================================================================
def get_ambient_temperature(simulation, wetter_file, actual_time, start_datetime, start_sim_inh, end_sim_inh):
# returns ambient air temperature as read from the wetter_file in the TRY04 format
# simulation - flag for real time or file based
# wetter_file - file with weather parameters in TRY04 format
# actual_time - the current time or current simulation time in the datetime format
# start_datetime - start of the calculations in datetime format
# start_sim_inh - only in simulation mode - the starting point of the simulation in hours - will be found in the wetter_file
# end_sim_inh - only in simulation mode - the end point of the simulation in hours - arbitrarily stated
# file based simulation - values are read from the file
# hour_of_year = 1
condition = True
nn = len(wetter_file)
simtime = ((actual_time - start_datetime).total_seconds() / 3600.0) + start_sim_inh # simulationstime in h
#print('UTILS: actual_time ={}; start_datetime = {}; simtime = {}; start_sim_inh = {}'.format(actual_time, start_datetime, simtime, start_sim_inh))
ii = 0
while condition:
line1 = get_significant_parts(wetter_file[ii].rstrip().split(" "))
hour = get_time_in_hour(line1)
condition = (hour < simtime)
ii = ii + 1
if (ii > nn):
ii = 0
if (ii == 0):
ii = nn
else:
ii = ii - 1
jj = ii - 1
if jj<0:
jj = nn - 1
line2 = get_significant_parts(wetter_file[jj].rstrip().split(" "))
x1 = hour
x2 = get_time_in_hour(line2)
y1 = float(get_ith_column(8, line1))
y2 = float(get_ith_column(8, line2))
#print('UTILS: ii = {}, jj = {}, simtime = {}, x1 = {}, x2 = {}, y1 = {}, y2 = {}, wyn = {}'.format(ii, jj, simtime, x1,x2,y1,y2,linear_interpolation(simtime, x1, x2, y1, y2)))
# time since the beginning of the start of the simulation in hours
return linear_interpolation(simtime, x1, x2, y1, y2)
#end get_ambient_temperature
# ==================================================================
def get_jth_column_val(simulation, wetter_file, actual_time, start_datetime, start_sim_inh, end_sim_inh, max_counter, time_col_nr, val_col_nr):
# returns ambient air temperature as read from the wetter_file in the TRY04 format
# simulation - flag for real time or file based
# wetter_file - file with weather parameters in TRY04 format
# actual_time - the current time or current simulation time in the datetime format
# start_datetime - start of the calculations in datetime format
# start_sim_inh - only in simulation mode - the starting point of the simulation in hours - will be found in the wetter_file
# end_sim_inh - only in simulation mode - the end point of the simulation in hours - arbitrarily stated
#max_counter = 8760
#val_col_nr = 8
# file based simulation - values are read from the file
# hour_of_year = 1
condition = True
simtime = ((actual_time - start_datetime).total_seconds() / 3600.0) + start_sim_inh # simulationstime in h
#print(' UTILS: simtime = {}'.format(simtime))
#print(' UTILS: actual_time ={}; start_datetime = {}; simtime = {}; start_sim_inh = {}'.format(actual_time, start_datetime, simtime, start_sim_inh))
ii = 0
kk = 0
while condition:
#print('______________ ii = {}'.format(ii))
#print('______________ wfile = {}'.format(wetter_file[ii]))
line1 = get_significant_parts(wetter_file[ii].rstrip().split(" "))
#print(' UTILS: time_col_nr = {}; line = {}'.format(time_col_nr, line1))
hour = float(get_ith_column(time_col_nr, line1)) + kk * max_counter
#print(' UTILS: ii = {}; kk = {}; hour = {}; simtime = {}'.format(ii, kk, hour, simtime))
condition = (hour < simtime)
ii = ii + 1
if (ii >= max_counter):
ii = 0
kk = kk + 1
#print(' UTILS: ii = {}'.format(ii))
if (ii == 0):
ii = max_counter # ==> jj after this if will become jj:=max_counter - 1
else:
ii = ii - 1
jj = ii - 1
if jj<0: # should never take place thanks to the previous if
jj = max_counter - 1
line2 = get_significant_parts(wetter_file[jj].rstrip().split(" "))
x1 = hour
x2 = float(get_ith_column(time_col_nr, line2))
y1 = float(get_ith_column(val_col_nr, line1))
y2 = float(get_ith_column(val_col_nr, line2))
#print(' UTILS: ii = {}, jj = {}, simtime = {}, x1 = {}, x2 = {}, y1 = {}, y2 = {}, wyn = {}'.format(ii, jj, simtime, x1,x2,y1,y2,linear_interpolation(simtime, x1, x2, y1, y2)))
# time since the beginning of the start of the simulation in hours
return linear_interpolation(simtime, x1, x2, y1, y2)
#end get_ambient_temperature
#=======================================================================
def it_is_winter(actual_time):
if(actual_time.month in range(11, 12)):
return True
elif(actual_time.month in range(1, 2)):
return True
elif((actual_time.month == 11) and (actual_time.day >= 1)):
return True
elif((actual_time.month == 3) and (actual_time.day <= 20)):
return True
else:
return False
#=======================================================================
def it_is_summer(actual_time):
if(actual_time.month in range(6, 8)):
return True
elif((actual_time.month == 5) and (actual_time.day >= 15)):
return True
elif((actual_time.month == 9) and (actual_time.day <= 14)):
return True
else:
return False
#=======================================================================
def get_slp_data_set(actual_time, el_data, time_slot_in_s):
# returns data set with predicted electrical loads in kW
# for one day divided into slots of time_slot_in_s seconds
# starting with the value representative for the actual_time
# data come from el_data i.e. json data structure as defined in config.json prediction->power->SLP
# determine the season of the year: summer, winter of the transition period between them
if(it_is_summer(actual_time)):
# summer time from 15.05 to 14.09
season = el_data['summer time']
elif(it_is_winter(actual_time)):
# winter time from 1.11 to 20.03
season = el_data['winter time']
else:
# transition period from 21.03 to 14.05 and from 15.09 to 31.10
season = el_data['transition period']
# determine the type of the day and get the respective data set
if(actual_time.isoweekday() in range(1, 5)):
data_set = season['workday']
elif(actual_time.isoweekday() == 6):
data_set = season['Saturday']
else:
data_set = season['Sunday']
# find the position of actual_time in the data set
# determine actual_time in seconds
act_time_in_s = actual_time.hour*3600.0 + actual_time.minute*60.0 + actual_time.second + actual_time.microsecond / 1000000.0
# find number of the time slot that it is in
idx = int(act_time_in_s // time_slot_in_s)
#print('UTILS: idx = {}'.format(idx))
# create new data set that starts with the record of the actual time
wyn = data_set[idx:] + data_set[:idx]
# return the resulting new data set
return wyn
# end get_slp_data_set
#=======================================================================
def provision_rvk(mdevice_id, mentity_name, mentity_type, provisioning_endpoint):
#
# Provision FiPy sensor 002
#
#print('entered provision_rvk')
payload = {
"devices": [
{
"device_id": mdevice_id,
"entity_name": mentity_name,
"entity_type": mentity_type,
"protocol": "PDI-IoTA-MQTT-UltraLigh",
"timezone": "Europe/Berlin",
"transport": "MQTT",
"attributes": [
{"object_id": "T01_Sp01", "name": "T01_Sp01", "type":"Number"},
{"object_id": "T02_Sp02", "name": "T02_Sp02", "type":"Number"},
{"object_id": "T03_Sp03", "name": "T03_Sp03", "type":"Number"},
{"object_id": "T04_Sp04", "name": "T04_Sp04", "type":"Number"},
{"object_id": "T05_Sp05", "name": "T05_Sp05", "type":"Number"},
{"object_id": "T06_Sp06", "name": "T06_Sp06", "type":"Number"},
{"object_id": "T07_Sp07", "name": "T07_Sp07", "type":"Number"},
{"object_id": "T08_Sp08", "name": "T08_Sp08", "type":"Number"},
{"object_id": "T09_Sp09", "name": "T09_Sp09", "type":"Number"},
{"object_id": "T10_Sp10", "name": "T10_Sp10", "type":"Number"},
{"object_id": "T11_Sp11", "name": "T11_Sp11", "type":"Number"},
{"object_id": "T12_Sp12", "name": "T12_Sp12", "type":"Number"},
{"object_id": "T13_Sp13", "name": "T13_Sp13", "type":"Number"},
{"object_id": "T14_Sp14", "name": "T14_Sp14", "type":"Number"},
{"object_id": "T15_Sp15", "name": "T15_Sp15", "type":"Number"},
{"object_id": "T16_Sp16", "name": "T16_Sp16", "type":"Number"},
{"object_id": "T17_Sp17", "name": "T17_Sp17", "type":"Number"},
{"object_id": "T18_Sp18", "name": "T18_Sp18", "type":"Number"},
{"object_id": "T19_Sp19", "name": "T19_Sp19", "type":"Number"},
{"object_id": "T20_Sp20", "name": "T20_Sp20", "type":"Number"},
{"object_id": "T21_DomesticHotWater", "name": "T21_DomesticHotWater", "type":"Number"},
{"object_id": "T22_DomesticColdWater", "name": "T22_DomesticColdWater", "type":"Number"},
{"object_id": "T23_Supply_HeatingBeforeMixValve", "name": "T23_Supply_HeatingBeforeMixValve", "type":"Number"},
{"object_id": "T24_Return_HeatingCircuit", "name": "T24_Return_HeatingCircuit", "type":"Number"},
{"object_id": "T25_Supply_HeatingCircuit", "name": "T25_Supply_HeatingCircuit", "type":"Number"},
{"object_id": "T26_Supply_CHPunit", "name": "T26_Supply_CHPunit", "type":"Number"},
{"object_id": "T27_Return_CHPunit", "name": "T27_Return_CHPunit", "type":"Number"},
{"object_id": "T28_Supply_GasBoiler", "name": "T28_Supply_GasBoiler", "type":"Number"},
{"object_id": "T29_Return_GasBoiler", "name": "T29_Return_GasBoiler", "type":"Number"},
{"object_id": "T30_AmbientAirTemperature", "name": "T30_AmbientAirTemperature", "type":"Number"},
{"object_id": "V01_ColdDrinkingWater", "name": "V01_ColdDrinkingWater", "type":"Number"},
{"object_id": "V02_HeatingCircuit", "name": "V02_HeatingCircuit", "type":"Number"},
{"object_id": "V03_CHPunit", "name": "V03_CHPunit", "type":"Number"},
{"object_id": "V04_GasBoiler", "name": "V04_GasBoiler", "type":"Number"},
{"object_id": "Wh01_HeatSources", "name": "Wh01_HeatSources", "type":"Number"},
{"object_id": "Wh02_HeaterRod", "name": "Wh02_HeaterRod", "type":"Number"},
{"object_id": "Wh03_MainMeter", "name": "Wh03_MainMeter", "type":"Number"},
{"object_id": "Vgas01_MainMeter", "name": "Vgas01_MainMeter", "type":"Number"},
{"object_id": "Vgas02_CHPunit", "name": "Vgas02_CHPunit", "type":"Number"},
{"object_id": "iteration", "name": "iteration", "type":"Number"},
{"object_id": "chp_status", "name": "chp_status", "type":"Number"},
{"object_id": "boiler_status", "name": "boiler_status", "type":"Number"},
{"object_id": "control_valve_hub", "name": "control_valve_hub", "type":"Number"},
{"object_id": "storage_tank_too_cold_status", "name": "storage_tank_too_cold_status", "type":"Number"},
{"object_id": "mass_flow_dhw", "name": "mass_flow_dhw", "type":"Number"},
{"object_id": "mass_flow_heating_water", "name": "mass_flow_heating_water", "type":"Number"},
{"object_id": "elctric_heater_status", "name": "elctric_heater_status", "type":"Number"},
{"object_id": "turnover_time_of_one_seg_in_h", "name": "turnover_time_of_one_seg_in_h", "type":"Number"}
]
}
]
}
myheaders = {'Content-Type': 'application/json', 'fiware-service': 'openiot', 'fiware-servicepath': '/'}
r = requests.post(provisioning_endpoint, data=json.dumps(payload), headers=myheaders)
ant = r.json()
if(ant == {}):
print('provision_rvk: provisioning of the device with id {} returns {} and is therefore successfull'.format(mdevice_id, ant))
return 0
else:
print('provision_rvk: provisioning of the device with id {} returns {} and therefore has failed'.format(mdevice_id, ant))
return -1
# end provision_rvk
#=======================================================================
def list_registered_iot_devices_in_platform(provisioning_endpoint):
# returns the list of provisioned iot devices
payload = {}
myheaders = {'Content-Type': 'application/json', 'fiware-service': 'openiot', 'fiware-servicepath': '/'}
#r = requests.get("http://127.0.0.1:4041/iot/devices", data=json.dumps(payload), headers=myheaders)
r = requests.get(provisioning_endpoint, data=json.dumps(payload), headers=myheaders)
return r.json()
# end list_registered_iot_devices_in_platform
#=======================================================================
def get_last_substring_of_urn(urn, mychar):
if(mychar in urn):
wyn = urn.find(mychar)
while(wyn != -1):
old_wyn = wyn
wyn = urn.find(mychar, old_wyn + 1)
return urn[(old_wyn+1):]
return ""
# end get_last_substring_of_urn
#=======================================================================
def provision_rvk(mdevice_id, mentity_name, mentity_type, provisioning_endpoint):
#
# Provision FiPy sensor 002
#
print('utils: entered provision_rvk')
payload = {
"devices": [
{
"device_id": mdevice_id,
"entity_name": mentity_name,
"entity_type": mentity_type,
"protocol": "PDI-IoTA-MQTT-UltraLigh",
"timezone": "Europe/Berlin",
"transport": "MQTT",
"attributes": [
{"object_id": "T01_Sp01", "name": "T01_Sp01", "type":"Number"},
{"object_id": "T02_Sp02", "name": "T02_Sp02", "type":"Number"},
{"object_id": "T03_Sp03", "name": "T03_Sp03", "type":"Number"},
{"object_id": "T04_Sp04", "name": "T04_Sp04", "type":"Number"},
{"object_id": "T05_Sp05", "name": "T05_Sp05", "type":"Number"},
{"object_id": "T06_Sp06", "name": "T06_Sp06", "type":"Number"},
{"object_id": "T07_Sp07", "name": "T07_Sp07", "type":"Number"},
{"object_id": "T08_Sp08", "name": "T08_Sp08", "type":"Number"},
{"object_id": "T09_Sp09", "name": "T09_Sp09", "type":"Number"},
{"object_id": "T10_Sp10", "name": "T10_Sp10", "type":"Number"},
{"object_id": "T11_Sp11", "name": "T11_Sp11", "type":"Number"},
{"object_id": "T12_Sp12", "name": "T12_Sp12", "type":"Number"},
{"object_id": "T13_Sp13", "name": "T13_Sp13", "type":"Number"},
{"object_id": "T14_Sp14", "name": "T14_Sp14", "type":"Number"},
{"object_id": "T15_Sp15", "name": "T15_Sp15", "type":"Number"},
{"object_id": "T16_Sp16", "name": "T16_Sp16", "type":"Number"},
{"object_id": "T17_Sp17", "name": "T17_Sp17", "type":"Number"},
{"object_id": "T18_Sp18", "name": "T18_Sp18", "type":"Number"},
{"object_id": "T19_Sp19", "name": "T19_Sp19", "type":"Number"},
{"object_id": "T20_Sp20", "name": "T20_Sp20", "type":"Number"},
{"object_id": "T21_DomesticHotWater", "name": "T21_DomesticHotWater", "type":"Number"},
{"object_id": "T22_DomesticColdWater", "name": "T22_DomesticColdWater", "type":"Number"},
{"object_id": "T23_Supply_HeatingBeforeMixValve", "name": "T23_Supply_HeatingBeforeMixValve", "type":"Number"},
{"object_id": "T24_Return_HeatingCircuit", "name": "T24_Return_HeatingCircuit", "type":"Number"},
{"object_id": "T25_Supply_HeatingCircuit", "name": "T25_Supply_HeatingCircuit", "type":"Number"},
{"object_id": "T26_Supply_CHPunit", "name": "T26_Supply_CHPunit", "type":"Number"},
{"object_id": "T27_Return_CHPunit", "name": "T27_Return_CHPunit", "type":"Number"},
{"object_id": "T28_Supply_GasBoiler", "name": "T28_Supply_GasBoiler", "type":"Number"},
{"object_id": "T29_Return_GasBoiler", "name": "T29_Return_GasBoiler", "type":"Number"},
{"object_id": "T30_AmbientAirTemperature", "name": "T30_AmbientAirTemperature", "type":"Number"},
{"object_id": "V01_ColdDrinkingWater", "name": "V01_ColdDrinkingWater", "type":"Number"},
{"object_id": "V02_HeatingCircuit", "name": "V02_HeatingCircuit", "type":"Number"},
{"object_id": "V03_CHPunit", "name": "V03_CHPunit", "type":"Number"},
{"object_id": "V04_GasBoiler", "name": "V04_GasBoiler", "type":"Number"},
{"object_id": "Wh01_HeatSources", "name": "Wh01_HeatSources", "type":"Number"},
{"object_id": "Wh02_HeaterRod", "name": "Wh02_HeaterRod", "type":"Number"},
{"object_id": "Wh03_MainMeter", "name": "Wh03_MainMeter", "type":"Number"},
{"object_id": "Vgas01_MainMeter", "name": "Vgas01_MainMeter", "type":"Number"},
{"object_id": "Vgas02_CHPunit", "name": "Vgas02_CHPunit", "type":"Number"},
{"object_id": "iteration", "name": "iteration", "type":"Number"},
{"object_id": "chp_status", "name": "chp_status", "type":"Number"},
{"object_id": "boiler_status", "name": "boiler_status", "type":"Number"},
{"object_id": "control_valve_hub", "name": "control_valve_hub", "type":"Number"},
{"object_id": "storage_tank_too_cold_status", "name": "storage_tank_too_cold_status", "type":"Number"},
{"object_id": "mass_flow_dhw", "name": "mass_flow_dhw", "type":"Number"},
{"object_id": "mass_flow_heating_water", "name": "mass_flow_heating_water", "type":"Number"},
{"object_id": "elctric_heater_status", "name": "elctric_heater_status", "type":"Number"},
{"object_id": "turnover_time_of_one_seg_in_h", "name": "turnover_time_of_one_seg_in_h", "type":"Number"}
]
}
]
}
#
myheaders = {'Content-Type': 'application/json', 'fiware-service': 'openiot', 'fiware-servicepath': '/'}
#r = requests.post("http://127.0.0.1:4041/iot/devices", data=json.dumps(payload), headers=myheaders)
r = requests.post(provisioning_endpoint, data=json.dumps(payload), headers=myheaders)
ant = r.json()
print('utils provision_rvk: provisioning of device {} at end point {} returns {}'.format(mdevice_id, provisioning_endpoint, ant))
# end provision_rvk
#=======================================================================
def send_ini_data_to_platform(topic, y2, actual_time, client):
""" communication with platform - sends the set of monitoring data from RVK to the mqtt broker """
#columns = [" 'T' 'iteration'",
columns = ['iteration',
'T01_Sp01',
'T02_Sp02',
'T03_Sp03',
'T04_Sp04',
'T05_Sp05',
'T06_Sp06',
'T07_Sp07',
'T08_Sp08',
'T09_Sp09',
'T10_Sp10',
'T11_Sp11',
'T12_Sp12',
'T13_Sp13',
'T14_Sp14',
'T15_Sp15',
'T16_Sp16',
'T17_Sp17',
'T18_Sp18',
'T19_Sp19',
'T20_Sp20',
'T21_DomesticHotWater',
'T22_DomesticColdWater',
'T23_Supply_HeatingBeforeMixValve',
'T24_Return_HeatingCircuit',
'T25_Supply_HeatingCircuit',
'T26_Supply_CHPunit',
'T27_Return_CHPunit',
'T28_Supply_GasBoiler',
'T29_Return_GasBoiler',
'T30_AmbientAirTemperature',
'V01_ColdDrinkingWater',
'V02_HeatingCircuit',
'V03_CHPunit',
'V04_GasBoiler',
'Vgas01_MainMeter',
'Vgas02_CHPunit',
'Wh01_HeatSources',
'Wh02_HeaterRod',
'Wh03_MainMeter',
'chp_status',
'boiler_status',
'control_valve_hub',
'storage_tank_too_cold_status',
'mass_flow_dhw',
'mass_flow_heating_water',
'elctric_heater_status',
'turnover_time_of_one_seg_in_h']
xtime = actual_time.replace(tzinfo=timezone.utc).timestamp()
#myshft = 100000000.0
#x1 = float(int(xtime/myshft))
#x2 = float(int(xtime-x1*myshft))
#x3 = xtime - int(xtime)
(x1,x2,x3) = decompose_utc_time_to_floats(xtime)
data_to_send = []
#data_to_send.append(actual_time.isoformat()) # 1
data_to_send.append(y2) # 1
#data_to_send.append(str(actual_time)) # 1
data_to_send.append(y2) # 2
data_to_send.append(y2) # 3
data_to_send.append(y2) # 4
data_to_send.append(y2) # 5
data_to_send.append(y2) # 6
data_to_send.append(y2) # 7
data_to_send.append(y2) # 8
data_to_send.append(y2) # 9
data_to_send.append(y2) # 10
data_to_send.append(y2) # 11
data_to_send.append(y2) # 12
data_to_send.append(y2) # 13
data_to_send.append(y2) # 14
data_to_send.append(y2) # 15
data_to_send.append(y2) # 16
data_to_send.append(y2) # 17
data_to_send.append(y2) # 18
data_to_send.append(y2) # 19
data_to_send.append(y2) # 20
data_to_send.append(y2) # 21
data_to_send.append(y2) # 22
data_to_send.append(y2) # 23
data_to_send.append(y2) # 24
data_to_send.append(y2) # 25
data_to_send.append(y2) # 26
data_to_send.append(y2) # 27
data_to_send.append(y2) # 28
data_to_send.append(y2) # 29
data_to_send.append(y2) # 30
data_to_send.append(y2) # 31
data_to_send.append(y2) # 32
data_to_send.append(y2) # 33
data_to_send.append(y2) # 34
data_to_send.append(y2) # 35
data_to_send.append(y2) # 36
data_to_send.append(y2) # 37
data_to_send.append(y2) # 38
data_to_send.append(y2) # 39
data_to_send.append(y2) # 40
data_to_send.append(y2) # 41
data_to_send.append(y2) # 42
data_to_send.append(y2) # 43
data_to_send.append(y2) # 44
data_to_send.append(x1) # 45
data_to_send.append(x2) # 46
data_to_send.append(x3) # 47
data_to_send.append(xtime) # 48
#data_to_send.append(actual_time.replace(tzinfo=timezone.utc).timestamp()) # 49 ==> 48
#apiKey = 'QKAAbMxLbv5TfhFxjTv4lhw92m'
#sensor_name = 'urn:ngsi-ld:rvk:001'
#attributes = 'attrs'
#apiKey = self.mqtt_api_key
#sensor_name = self.mqtt_sensor_name
#attributes = self.mqtt_attributes
#topic = "/{}/{}/{}".format(apiKey, sensor_name, attributes)
#client = mqtt.Client('rvk')
#client.connect('mqtt-broker', port=1883, keepalive=60, bind_address="")
payloads = ['{}|{}'.format(c,d) for c, d in zip(columns, data_to_send)]
client.publish(topic,'|'.join(payloads))
print('send_ini_data_to_platform: published data to topic = {}; value = {}; at time = {}'.format(topic, y2, actual_time))
#print(data_to_send)
#if(not real_time_send):
# sleep(sleep_time_in_s)
# end send_ini_data_to_platform
#=======================================================================
def undo_provisioning_and_exit(device_id, provisioning_endpoint):
myendpoint = "{}/{}".format(provisioning_endpoint, device_id)
#print('myendpoint = {}'.format(myendpoint))
myheaders = {'Content-Type': 'application/json', 'fiware-service': 'openiot', 'fiware-servicepath': '/'}
payload = {}
#r = requests.post("http://127.0.0.1:4041/iot/devices", data=json.dumps(payload), headers=myheaders)
r = requests.delete(myendpoint, data=json.dumps(payload), headers=myheaders)
#r = requests.delete(myendpoint, headers=myheaders)
#ant = r.json()
print('utils undo_provisioning_and_exit: provisioning of device {} at end point {} returns {}'.format(device_id, myendpoint, r))
print('\n\nThis device das been shut down as it could not register properly with the platform in the platform operation mode. To get the device running, start it again - provisioning has been undone to enable proper registration with platform this time.\n')
print('Alternatively: changes to the configuration of the platform and device could be made that would change the operation mode to the real time operation of the platform or to the pure simulation mode.\n')
sys.exit(0)
# end undo_provisioning_and_exit
#=======================================================================
#=======================================================================
| [
"stephan.wiemann@tu-dresden.de"
] | stephan.wiemann@tu-dresden.de |
3a737b07b660fdd6fd6585b09adff68d0c33286b | b8c3b9046a15f4babb563cc7415c08f8a9a0824b | /Python-API/extension/artifact/src/hana_ml_artifact/generators/cloudfoundry.py | 9a0ccb1f4677e017312b624df7f2560cdb173cec | [
"Apache-2.0"
] | permissive | SAP-samples/hana-ml-samples | f0832fa843b5dd268e8b0869d7cb3746b27bfd8d | 72b512b05fe2c238c09e20027e3ae3cfcd976771 | refs/heads/main | 2023-08-30T23:46:12.374019 | 2023-08-24T16:20:47 | 2023-08-24T16:20:47 | 192,615,699 | 83 | 57 | Apache-2.0 | 2023-07-20T16:45:20 | 2019-06-18T21:36:03 | Jupyter Notebook | UTF-8 | Python | false | false | 728 | py | """
This module handles generation of all Cloud Foundry related artifacts based on the provided
consumption layer elements. Currently this has not yet been implemented
"""
import os
from ..config import ConfigConstants
from ..hana_ml_utils import DirectoryHandler
from ..hana_ml_utils import StringUtils
from ..sql_processor import SqlProcessor
class CloudFoundryGenerator(object):
def __init__(self, config):
self.config = config
self._extend_config()
def generate_artifacts(self):
return ''
def _extend_config(self):
pass
class CloudFoundryConsumptionProcessor(object):
def __init__(self, config):
self.config = config
def generate(self, path):
pass | [
"ronald.kleijn@sap.com"
] | ronald.kleijn@sap.com |
4b3aa7a1ee58238dd8a25b2a149447be16633036 | 64267b1f7ca193b0fab949089b86bc7a60e5b859 | /slehome/manage.py | 1cb83a303a492fa808560a2831d6104bd01a8931 | [] | no_license | hongdangodori/slehome | 6a9f2b4526c2783932627b982df0540762570bff | 3e558c78c3943dadf0ec485738a0cc98dea64353 | refs/heads/master | 2021-01-17T12:00:34.221088 | 2015-02-06T13:44:00 | 2015-02-06T13:44:00 | 28,847,585 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "slehome.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"chungdangogo@gmail.com"
] | chungdangogo@gmail.com |
69a014db23b4a930536b7973c0705de46b955c44 | 937f935c79e4fe8d879068e228f281f0b671c509 | /Daily_Coding/Stock/TwoPoint.py | f7f07e6496dadc60e1abc00d0309059735e940a6 | [] | no_license | ho2921ho/HomeWork | fa31e257c357fb41bd050510d4454e4ef170a7f9 | 605b0fd5562c35fc4f07ed2645c05c9497586703 | refs/heads/master | 2020-05-05T06:45:13.285060 | 2020-03-08T07:40:49 | 2020-03-08T07:40:49 | 179,800,084 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,736 | py | import datetime
import os
from bs4 import BeautifulSoup
import pandas as pd
s= str(datetime.datetime.now()) + 'TwoPoint_updated'
isfile = os.path.isfile('log.txt')
import requests
if isfile:
with open('log.txt', 'a') as f: #파일이 있으면 마지막 행에 추가
f.write(s+'\n')
else :
with open('log.txt', 'w') as f: #파일이 없으면 log.txt 생성하고 입력
f.write(s+'\n')
import pickle
from dateutil.parser import parse
import numpy as np
from tqdm import tqdm
import warnings
warnings.filterwarnings("ignore")
## 코드를 입력하면 데이터 베이스에서 df를 반황하는 클라스
##
# 종가가 m% 떨어졌을 때, d 영업일동안 한번이라도 n% 오르는 확률.
# m = 전일 종가 대비 종가 변화율
# n = 2% 상승 여부.
# d = 관측기간
now = datetime.datetime.now().date().strftime("%y%m%d_%H%M%S")
with open(r"C:\DATA\Stock_data\raw_data{}.pickle".format(now),"rb") as fr:
dfs = pickle.load(fr)
with open(r"C:\DATA\Stock_data\kospi_stocks.pickle","rb") as fr:
kospi_stocks = pickle.load(fr)
class TwoPoint:
stock_cnt = 0
event_cnt = 0
def __init__(self,df,m,n,d,kind):
self.df = df.copy()
self.m = m
self.n = n
self.df['목표'] = self.df['종가']*((100+n)/100)
for i in range(d):
self.df['고가+'+str(i+1)] = self.df['고가'].shift(i+1)
self.df['m'] = (self.df['종가'] - self.df['종가'].shift(-1))*100/self.df['종가']
self.df['n'] = self.df['목표'] < self.df.iloc[:,8:-1].max(axis = 1)
self.df['날짜'] = [parse(x) for x in self.df['날짜']]
self.df.set_index(['날짜'], inplace = True)
self.kind = kind
self.df['n2'] = self.df['목표'] < self.df['고가+1']
def indx(self,mdf):
if self.kind == 'DownUp':
com_df = mdf[mdf['m'] < -self.m]
if len(com_df.values) != 0:
indx = round(sum(com_df['n'])/len(com_df.values),2)
else:
indx = np.nan
cnt = len(mdf)
event = sum(com_df['n'])
return [indx, event, cnt]
elif self.kind == 'AsolTwo':
if len(mdf.values) != 0:
indx = round(sum(mdf['n2'])/len(mdf.values),2)
else:
indx = np.nan
cnt = len(mdf)
event = sum(mdf['n2'])
return [indx, event, cnt]
def m1(self):
mdf = self.df.loc[pd.date_range(end = self.df.index[0], periods=30)].dropna()
return mdf
def m3(self):
mdf = self.df.loc[pd.date_range(end = self.df.index[0], periods=90)].dropna()
return mdf
def m6(self):
mdf = self.df.loc[pd.date_range(end = self.df.index[0], periods=180)].dropna()
return mdf
def y1(self):
mdf = self.df.loc[pd.date_range(end = self.df.index[0], periods=360)].dropna()
return mdf
self.indx_list.append((self.indx(mdf),len(mdf)))
def to_indx_list(self, key = 'indx'):
self.indx_list = []
if key == 'indx':
self.indx_list.append(self.indx(self.m1())[0])
self.indx_list.append(self.indx(self.m3())[0])
self.indx_list.append(self.indx(self.m6())[0])
self.indx_list.append(self.indx(self.y1())[0])
elif key == 'event':
self.indx_list.append(self.indx(self.m1())[1])
self.indx_list.append(self.indx(self.m3())[1])
self.indx_list.append(self.indx(self.m6())[1])
self.indx_list.append(self.indx(self.y1())[1])
elif key == 'cnt':
self.indx_list.append(self.indx(self.m1())[2])
self.indx_list.append(self.indx(self.m3())[2])
self.indx_list.append(self.indx(self.m6())[2])
self.indx_list.append(self.indx(self.y1())[2])
return self.indx_list
def to_df(self):
return self.df
# indx_df를 만들기 위한 과정.
kind = 'DownUp'
indx_df = dict()
for code in list(dfs.keys()):
try:
indx_df[code] = TwoPoint(dfs[code],3,2,5,kind).to_indx_list()
except:
print(code)
indx_df = pd.DataFrame(indx_df).T
indx_df.columns = ['1m','3m','6m','1y']
event_df = dict()
for code in list(dfs.keys()):
try:
event_df[code] = TwoPoint(dfs[code],3,2,5,kind).to_indx_list(key = 'event')
except:
print(code)
event_df= pd.DataFrame(event_df).T
indx_df = indx_df.merge(event_df,how = 'left',on = indx_df.index)
indx_df = indx_df.dropna()
indx_df = indx_df[indx_df.min(axis = 1) >= 0.8]
indx_df = indx_df[indx_df[3] >= 10]
indx_df = indx_df.sort_values([0,1,2,3],ascending = False)
kospi_stocks['key_0'] = kospi_stocks['종목코드']
indx_df = indx_df.merge(kospi_stocks[['key_0','회사명']],how = 'left', on = 'key_0')
##
m_s = []
for code in tqdm(indx_df['key_0']):
url = 'https://finance.naver.com/item/sise.nhn?code='+code
html = requests.get(url).text
soup = BeautifulSoup(html, 'html.parser')
m = soup.select('#_rate > span')[0].text.strip()
m_s.append(m)
indx_df['m'] = m_s
name = 'TwoPoint_indx'
indx_df.to_csv('C:\DATA\Stock_data\TwoPoint\{}_{}.csv'.format(name,now))
# 모두 클래스화, m유연화, 데이터 갱신, 지표 저장.111770
##
s= str(datetime.datetime.now()) + 'TwoPoint_updated'
isfile = os.path.isfile('log.txt')
if isfile:
with open('log.txt', 'a') as f: #파일이 있으면 마지막 행에 추가
f.write(s+'\n')
else :
with open('log.txt', 'w') as f: #파일이 없으면 log.txt 생성하고 입력
f.write(s+'\n') | [
"ho2921ho@naver.com"
] | ho2921ho@naver.com |
63f3621bd4a4a064b8f0a341a57fda5cb30c9c39 | 8f6b811c6e5b916be33a12262476b1be115095e5 | /datacube_ows/legend_utils.py | 52ce01a4dd61324db67f2d69341647252c901cf9 | [
"Apache-2.0"
] | permissive | ricardogsilva/datacube-ows | 811ac78e8d8e8b7bdcf6e7f5d126ddae14e6815f | ca984e808be45b333964a79e8cb1c644173ef876 | refs/heads/master | 2023-03-24T04:42:10.857167 | 2021-03-19T02:23:42 | 2021-03-19T02:23:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | import io
import requests
from PIL import Image
def get_image_from_url(url):
foo = requests.get
r = requests.get(url, timeout=1)
if r.status_code == 200 and r.headers['content-type'] == 'image/png':
bytesio = io.BytesIO()
bytesio.write(r.content)
bytesio.seek(0)
return Image.open(bytesio)
return None | [
"paul.haesler@data61.csiro.au"
] | paul.haesler@data61.csiro.au |
13a944d83d6cf902c75ad98a5ab8f111f384ec2f | 0e07267a48d49c794568dad5d5f1c9c35d5dc142 | /DataStoreApp/StoreApp/migrations/0029_remove_varification_balance_company.py | 256c7ba160a6995cb02d3ed30c23b17d37c01462 | [] | no_license | devpriyanka92/django-folder-subfolder | d8e1fdcb5dec3f3e95693e9888a3c04f46f14f0b | 7f84a41d9f612c82a0619258de7b020bed6887ef | refs/heads/master | 2023-04-27T14:25:43.861423 | 2019-06-08T11:32:16 | 2019-06-08T11:32:16 | 189,363,783 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 342 | py | # Generated by Django 2.2.1 on 2019-06-08 06:57
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('StoreApp', '0028_chart_of_account'),
]
operations = [
migrations.RemoveField(
model_name='varification_balance',
name='company',
),
]
| [
"0"
] | 0 |
edccbbf77490969f81283113c6a1c31b1f11e883 | c6c5fd379461384130c958b77f3fe6939722d525 | /parse.py | 17242ed7c1f036d4cfac0e50225b42cd927a71f1 | [] | no_license | thetobysiu/witcher-books-processing | 9fdc94cf81d7972ba485e4a84ce38c931227ab8e | f6fa72557366aa232ead135fc9f688ba222da19b | refs/heads/master | 2023-05-05T20:48:50.056377 | 2021-05-31T20:21:23 | 2021-05-31T20:21:23 | 262,867,378 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,127 | py | # SIU KING WAI SM4701 Deepstory
from ebooklib import epub
from bs4 import BeautifulSoup, NavigableString
def parse_book(path):
book = epub.read_epub(path)
spine_list = [x[0] for x in book.spine]
chapter_list = []
for i, x in enumerate(spine_list):
if 'chapter' in x or 'epilogue' in x:
if chapter_list:
if spine_list.index(chapter_list[-1]) + 1 == i:
chapter_list.append(x)
else:
chapter_list.append(x)
chapters = [
BeautifulSoup(book.get_item_with_id(chapter).get_content(), 'lxml')
for chapter in chapter_list
]
book_dict = {}
chapter_number = ''
chapter_title = ''
alt_mode = False
for chapter in chapters:
content = []
sect = ''
for tag in chapter.find('section'):
if type(tag) is not NavigableString:
if tag.text and tag.text != '\n' and tag.text != '\xa0':
tag_classes = tag.get('class', [])
if any('part-title' in x for x in tag_classes):
alt_mode = True
chapter_title = tag.text
if chapter_title not in book_dict:
book_dict[chapter_title] = []
elif any('chapter-number' in x for x in tag_classes):
if alt_mode:
if chapter_number != tag.text and content:
content = []
chapter_number = tag.text
else:
chapter_title = tag.text
if chapter_title not in book_dict:
book_dict[chapter_title] = []
elif any('chapter-title' in x for x in tag_classes):
if chapter_title:
del book_dict[chapter_title]
chapter_title = tag.text
if chapter_title not in book_dict:
book_dict[chapter_title] = []
elif any('sect1' in x for x in tag_classes):
if sect != tag.text and content:
book_dict[chapter_title].append('\n'.join(content))
content = []
sect = tag.text
elif any(any(y in x for y in ['chap', 'epigraph', 'page-break', 'pb'])
for x in tag_classes
) or any([tag.select(f'[class*="{x}"]')
for x in ['attribution', 'decoration-rw10', 'dl']]):
pass
else:
content.append(tag.text)
if chapter_title:
book_dict[chapter_title].append('\n'.join(content))
if not alt_mode:
chapter_title = ''
book_title = book.get_metadata('DC', 'title')[0][0]
book_dict = {key: '\n'.join(value) for key, value in book_dict.items()}
return book_title, book_dict
| [
"thetobysiu@gmail.com"
] | thetobysiu@gmail.com |
76ac2d60a69c2c9463da4ae6c4547c5b867dd6e8 | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-network/azure/mgmt/network/v2018_12_01/models/topology_association.py | 7832c4a1904ba784c50b3cf5aae97796eb260dd0 | [
"MIT"
] | permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 1,586 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TopologyAssociation(Model):
"""Resources that have an association with the parent resource.
:param name: The name of the resource that is associated with the parent
resource.
:type name: str
:param resource_id: The ID of the resource that is associated with the
parent resource.
:type resource_id: str
:param association_type: The association type of the child resource to the
parent resource. Possible values include: 'Associated', 'Contains'
:type association_type: str or
~azure.mgmt.network.v2018_12_01.models.AssociationType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'association_type': {'key': 'associationType', 'type': 'str'},
}
def __init__(self, **kwargs):
super(TopologyAssociation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.resource_id = kwargs.get('resource_id', None)
self.association_type = kwargs.get('association_type', None)
| [
"lmazuel@microsoft.com"
] | lmazuel@microsoft.com |
f91e304a783327df3d4912005c11b12d3bf3a797 | 4683079af4abcf271ad2b2f15a1f5052b22b3576 | /music/apps.py | 89af84c8e588dd5cd7cd10280e18e3225c5cbe37 | [] | no_license | AbdullahNoori/music_site | 21fb3a9d547d026393798b8af8d3be4a9fefa416 | 6f16f9c9bb40aabdfdae6e433f918002864210b4 | refs/heads/master | 2022-07-15T15:53:13.178544 | 2020-05-13T07:53:54 | 2020-05-13T07:53:54 | 257,746,874 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 344 | py | from django.apps import AppConfig
class MusicConfig(AppConfig):
# name = 'music_site'
# label = 'my.music_site' # <-- this is the important line - change it to anything other than the default, which is the module name ('foo' in this case)
name = 'music'
# default_app_config = 'full.python.path.to.your.app.foo.apps.FooConfig' | [
"nooriabdullah86@gmail.com"
] | nooriabdullah86@gmail.com |
51a153a9f8b9fdfc05f3a13e0a66841449824427 | ee073b76054e7f5dc9a2c7f974e13d43f0981129 | /fantasy_data_golf_api/models/player.py | 78ca5d331638b23873e46874b8c0e3647a76fe7b | [] | no_license | jamesanglin/fantasy-data-golf-api | 5fc016f72dbcdb11b2c6b7055939a5bb7f9603b7 | c71db07d97364d514e224d576227a495826d1912 | refs/heads/master | 2022-08-01T23:51:04.685641 | 2020-06-02T05:13:31 | 2020-06-02T05:13:31 | 268,706,787 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,267 | py | # coding: utf-8
"""
Golf v2
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class Player(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'player_id': 'int',
'first_name': 'str',
'last_name': 'str',
'weight': 'int',
'swings': 'str',
'pga_debut': 'int',
'country': 'str',
'birth_date': 'str',
'birth_city': 'str',
'birth_state': 'str',
'college': 'str',
'photo_url': 'str',
'sport_radar_player_id': 'str',
'pga_tour_player_id': 'int',
'rotoworld_player_id': 'int',
'roto_wire_player_id': 'int',
'fantasy_alarm_player_id': 'int',
'draft_kings_name': 'str',
'fantasy_draft_name': 'str',
'fan_duel_name': 'str',
'fantasy_draft_player_id': 'int',
'draft_kings_player_id': 'int',
'fan_duel_player_id': 'int',
'yahoo_player_id': 'int'
}
attribute_map = {
'player_id': 'PlayerID',
'first_name': 'FirstName',
'last_name': 'LastName',
'weight': 'Weight',
'swings': 'Swings',
'pga_debut': 'PgaDebut',
'country': 'Country',
'birth_date': 'BirthDate',
'birth_city': 'BirthCity',
'birth_state': 'BirthState',
'college': 'College',
'photo_url': 'PhotoUrl',
'sport_radar_player_id': 'SportRadarPlayerID',
'pga_tour_player_id': 'PgaTourPlayerID',
'rotoworld_player_id': 'RotoworldPlayerID',
'roto_wire_player_id': 'RotoWirePlayerID',
'fantasy_alarm_player_id': 'FantasyAlarmPlayerID',
'draft_kings_name': 'DraftKingsName',
'fantasy_draft_name': 'FantasyDraftName',
'fan_duel_name': 'FanDuelName',
'fantasy_draft_player_id': 'FantasyDraftPlayerID',
'draft_kings_player_id': 'DraftKingsPlayerID',
'fan_duel_player_id': 'FanDuelPlayerID',
'yahoo_player_id': 'YahooPlayerID'
}
def __init__(self, player_id=None, first_name=None, last_name=None, weight=None, swings=None, pga_debut=None, country=None, birth_date=None, birth_city=None, birth_state=None, college=None, photo_url=None, sport_radar_player_id=None, pga_tour_player_id=None, rotoworld_player_id=None, roto_wire_player_id=None, fantasy_alarm_player_id=None, draft_kings_name=None, fantasy_draft_name=None, fan_duel_name=None, fantasy_draft_player_id=None, draft_kings_player_id=None, fan_duel_player_id=None, yahoo_player_id=None): # noqa: E501
"""Player - a model defined in Swagger""" # noqa: E501
self._player_id = None
self._first_name = None
self._last_name = None
self._weight = None
self._swings = None
self._pga_debut = None
self._country = None
self._birth_date = None
self._birth_city = None
self._birth_state = None
self._college = None
self._photo_url = None
self._sport_radar_player_id = None
self._pga_tour_player_id = None
self._rotoworld_player_id = None
self._roto_wire_player_id = None
self._fantasy_alarm_player_id = None
self._draft_kings_name = None
self._fantasy_draft_name = None
self._fan_duel_name = None
self._fantasy_draft_player_id = None
self._draft_kings_player_id = None
self._fan_duel_player_id = None
self._yahoo_player_id = None
self.discriminator = None
if player_id is not None:
self.player_id = player_id
if first_name is not None:
self.first_name = first_name
if last_name is not None:
self.last_name = last_name
if weight is not None:
self.weight = weight
if swings is not None:
self.swings = swings
if pga_debut is not None:
self.pga_debut = pga_debut
if country is not None:
self.country = country
if birth_date is not None:
self.birth_date = birth_date
if birth_city is not None:
self.birth_city = birth_city
if birth_state is not None:
self.birth_state = birth_state
if college is not None:
self.college = college
if photo_url is not None:
self.photo_url = photo_url
if sport_radar_player_id is not None:
self.sport_radar_player_id = sport_radar_player_id
if pga_tour_player_id is not None:
self.pga_tour_player_id = pga_tour_player_id
if rotoworld_player_id is not None:
self.rotoworld_player_id = rotoworld_player_id
if roto_wire_player_id is not None:
self.roto_wire_player_id = roto_wire_player_id
if fantasy_alarm_player_id is not None:
self.fantasy_alarm_player_id = fantasy_alarm_player_id
if draft_kings_name is not None:
self.draft_kings_name = draft_kings_name
if fantasy_draft_name is not None:
self.fantasy_draft_name = fantasy_draft_name
if fan_duel_name is not None:
self.fan_duel_name = fan_duel_name
if fantasy_draft_player_id is not None:
self.fantasy_draft_player_id = fantasy_draft_player_id
if draft_kings_player_id is not None:
self.draft_kings_player_id = draft_kings_player_id
if fan_duel_player_id is not None:
self.fan_duel_player_id = fan_duel_player_id
if yahoo_player_id is not None:
self.yahoo_player_id = yahoo_player_id
@property
def player_id(self):
"""Gets the player_id of this Player. # noqa: E501
:return: The player_id of this Player. # noqa: E501
:rtype: int
"""
return self._player_id
@player_id.setter
def player_id(self, player_id):
"""Sets the player_id of this Player.
:param player_id: The player_id of this Player. # noqa: E501
:type: int
"""
self._player_id = player_id
@property
def first_name(self):
"""Gets the first_name of this Player. # noqa: E501
:return: The first_name of this Player. # noqa: E501
:rtype: str
"""
return self._first_name
@first_name.setter
def first_name(self, first_name):
"""Sets the first_name of this Player.
:param first_name: The first_name of this Player. # noqa: E501
:type: str
"""
self._first_name = first_name
@property
def last_name(self):
"""Gets the last_name of this Player. # noqa: E501
:return: The last_name of this Player. # noqa: E501
:rtype: str
"""
return self._last_name
@last_name.setter
def last_name(self, last_name):
"""Sets the last_name of this Player.
:param last_name: The last_name of this Player. # noqa: E501
:type: str
"""
self._last_name = last_name
@property
def weight(self):
"""Gets the weight of this Player. # noqa: E501
:return: The weight of this Player. # noqa: E501
:rtype: int
"""
return self._weight
@weight.setter
def weight(self, weight):
"""Sets the weight of this Player.
:param weight: The weight of this Player. # noqa: E501
:type: int
"""
self._weight = weight
@property
def swings(self):
"""Gets the swings of this Player. # noqa: E501
:return: The swings of this Player. # noqa: E501
:rtype: str
"""
return self._swings
@swings.setter
def swings(self, swings):
"""Sets the swings of this Player.
:param swings: The swings of this Player. # noqa: E501
:type: str
"""
self._swings = swings
@property
def pga_debut(self):
"""Gets the pga_debut of this Player. # noqa: E501
:return: The pga_debut of this Player. # noqa: E501
:rtype: int
"""
return self._pga_debut
@pga_debut.setter
def pga_debut(self, pga_debut):
"""Sets the pga_debut of this Player.
:param pga_debut: The pga_debut of this Player. # noqa: E501
:type: int
"""
self._pga_debut = pga_debut
@property
def country(self):
"""Gets the country of this Player. # noqa: E501
:return: The country of this Player. # noqa: E501
:rtype: str
"""
return self._country
@country.setter
def country(self, country):
"""Sets the country of this Player.
:param country: The country of this Player. # noqa: E501
:type: str
"""
self._country = country
@property
def birth_date(self):
"""Gets the birth_date of this Player. # noqa: E501
:return: The birth_date of this Player. # noqa: E501
:rtype: str
"""
return self._birth_date
@birth_date.setter
def birth_date(self, birth_date):
"""Sets the birth_date of this Player.
:param birth_date: The birth_date of this Player. # noqa: E501
:type: str
"""
self._birth_date = birth_date
@property
def birth_city(self):
"""Gets the birth_city of this Player. # noqa: E501
:return: The birth_city of this Player. # noqa: E501
:rtype: str
"""
return self._birth_city
@birth_city.setter
def birth_city(self, birth_city):
"""Sets the birth_city of this Player.
:param birth_city: The birth_city of this Player. # noqa: E501
:type: str
"""
self._birth_city = birth_city
@property
def birth_state(self):
"""Gets the birth_state of this Player. # noqa: E501
:return: The birth_state of this Player. # noqa: E501
:rtype: str
"""
return self._birth_state
@birth_state.setter
def birth_state(self, birth_state):
"""Sets the birth_state of this Player.
:param birth_state: The birth_state of this Player. # noqa: E501
:type: str
"""
self._birth_state = birth_state
@property
def college(self):
"""Gets the college of this Player. # noqa: E501
:return: The college of this Player. # noqa: E501
:rtype: str
"""
return self._college
@college.setter
def college(self, college):
"""Sets the college of this Player.
:param college: The college of this Player. # noqa: E501
:type: str
"""
self._college = college
@property
def photo_url(self):
"""Gets the photo_url of this Player. # noqa: E501
:return: The photo_url of this Player. # noqa: E501
:rtype: str
"""
return self._photo_url
@photo_url.setter
def photo_url(self, photo_url):
"""Sets the photo_url of this Player.
:param photo_url: The photo_url of this Player. # noqa: E501
:type: str
"""
self._photo_url = photo_url
@property
def sport_radar_player_id(self):
"""Gets the sport_radar_player_id of this Player. # noqa: E501
:return: The sport_radar_player_id of this Player. # noqa: E501
:rtype: str
"""
return self._sport_radar_player_id
@sport_radar_player_id.setter
def sport_radar_player_id(self, sport_radar_player_id):
"""Sets the sport_radar_player_id of this Player.
:param sport_radar_player_id: The sport_radar_player_id of this Player. # noqa: E501
:type: str
"""
self._sport_radar_player_id = sport_radar_player_id
@property
def pga_tour_player_id(self):
"""Gets the pga_tour_player_id of this Player. # noqa: E501
:return: The pga_tour_player_id of this Player. # noqa: E501
:rtype: int
"""
return self._pga_tour_player_id
@pga_tour_player_id.setter
def pga_tour_player_id(self, pga_tour_player_id):
"""Sets the pga_tour_player_id of this Player.
:param pga_tour_player_id: The pga_tour_player_id of this Player. # noqa: E501
:type: int
"""
self._pga_tour_player_id = pga_tour_player_id
@property
def rotoworld_player_id(self):
"""Gets the rotoworld_player_id of this Player. # noqa: E501
:return: The rotoworld_player_id of this Player. # noqa: E501
:rtype: int
"""
return self._rotoworld_player_id
@rotoworld_player_id.setter
def rotoworld_player_id(self, rotoworld_player_id):
"""Sets the rotoworld_player_id of this Player.
:param rotoworld_player_id: The rotoworld_player_id of this Player. # noqa: E501
:type: int
"""
self._rotoworld_player_id = rotoworld_player_id
@property
def roto_wire_player_id(self):
"""Gets the roto_wire_player_id of this Player. # noqa: E501
:return: The roto_wire_player_id of this Player. # noqa: E501
:rtype: int
"""
return self._roto_wire_player_id
@roto_wire_player_id.setter
def roto_wire_player_id(self, roto_wire_player_id):
"""Sets the roto_wire_player_id of this Player.
:param roto_wire_player_id: The roto_wire_player_id of this Player. # noqa: E501
:type: int
"""
self._roto_wire_player_id = roto_wire_player_id
@property
def fantasy_alarm_player_id(self):
"""Gets the fantasy_alarm_player_id of this Player. # noqa: E501
:return: The fantasy_alarm_player_id of this Player. # noqa: E501
:rtype: int
"""
return self._fantasy_alarm_player_id
@fantasy_alarm_player_id.setter
def fantasy_alarm_player_id(self, fantasy_alarm_player_id):
"""Sets the fantasy_alarm_player_id of this Player.
:param fantasy_alarm_player_id: The fantasy_alarm_player_id of this Player. # noqa: E501
:type: int
"""
self._fantasy_alarm_player_id = fantasy_alarm_player_id
@property
def draft_kings_name(self):
"""Gets the draft_kings_name of this Player. # noqa: E501
:return: The draft_kings_name of this Player. # noqa: E501
:rtype: str
"""
return self._draft_kings_name
@draft_kings_name.setter
def draft_kings_name(self, draft_kings_name):
"""Sets the draft_kings_name of this Player.
:param draft_kings_name: The draft_kings_name of this Player. # noqa: E501
:type: str
"""
self._draft_kings_name = draft_kings_name
@property
def fantasy_draft_name(self):
"""Gets the fantasy_draft_name of this Player. # noqa: E501
:return: The fantasy_draft_name of this Player. # noqa: E501
:rtype: str
"""
return self._fantasy_draft_name
@fantasy_draft_name.setter
def fantasy_draft_name(self, fantasy_draft_name):
"""Sets the fantasy_draft_name of this Player.
:param fantasy_draft_name: The fantasy_draft_name of this Player. # noqa: E501
:type: str
"""
self._fantasy_draft_name = fantasy_draft_name
@property
def fan_duel_name(self):
"""Gets the fan_duel_name of this Player. # noqa: E501
:return: The fan_duel_name of this Player. # noqa: E501
:rtype: str
"""
return self._fan_duel_name
@fan_duel_name.setter
def fan_duel_name(self, fan_duel_name):
"""Sets the fan_duel_name of this Player.
:param fan_duel_name: The fan_duel_name of this Player. # noqa: E501
:type: str
"""
self._fan_duel_name = fan_duel_name
@property
def fantasy_draft_player_id(self):
"""Gets the fantasy_draft_player_id of this Player. # noqa: E501
:return: The fantasy_draft_player_id of this Player. # noqa: E501
:rtype: int
"""
return self._fantasy_draft_player_id
@fantasy_draft_player_id.setter
def fantasy_draft_player_id(self, fantasy_draft_player_id):
"""Sets the fantasy_draft_player_id of this Player.
:param fantasy_draft_player_id: The fantasy_draft_player_id of this Player. # noqa: E501
:type: int
"""
self._fantasy_draft_player_id = fantasy_draft_player_id
@property
def draft_kings_player_id(self):
"""Gets the draft_kings_player_id of this Player. # noqa: E501
:return: The draft_kings_player_id of this Player. # noqa: E501
:rtype: int
"""
return self._draft_kings_player_id
@draft_kings_player_id.setter
def draft_kings_player_id(self, draft_kings_player_id):
"""Sets the draft_kings_player_id of this Player.
:param draft_kings_player_id: The draft_kings_player_id of this Player. # noqa: E501
:type: int
"""
self._draft_kings_player_id = draft_kings_player_id
@property
def fan_duel_player_id(self):
"""Gets the fan_duel_player_id of this Player. # noqa: E501
:return: The fan_duel_player_id of this Player. # noqa: E501
:rtype: int
"""
return self._fan_duel_player_id
@fan_duel_player_id.setter
def fan_duel_player_id(self, fan_duel_player_id):
"""Sets the fan_duel_player_id of this Player.
:param fan_duel_player_id: The fan_duel_player_id of this Player. # noqa: E501
:type: int
"""
self._fan_duel_player_id = fan_duel_player_id
@property
def yahoo_player_id(self):
"""Gets the yahoo_player_id of this Player. # noqa: E501
:return: The yahoo_player_id of this Player. # noqa: E501
:rtype: int
"""
return self._yahoo_player_id
@yahoo_player_id.setter
def yahoo_player_id(self, yahoo_player_id):
"""Sets the yahoo_player_id of this Player.
:param yahoo_player_id: The yahoo_player_id of this Player. # noqa: E501
:type: int
"""
self._yahoo_player_id = yahoo_player_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Player, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Player):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"jamesanglin@gmail.com"
] | jamesanglin@gmail.com |
7e8c77d3385ca946f3378e6d8630de594f140c6d | 1a73c18b0e450d6806cc9d1240e6d4f9ab1d6b80 | /views/forms/DetalleVentaProductoForm.py | 2d58855add909974239563f72da051596b62b110 | [] | no_license | DELTAxK2/misiontic-ecommerce | 7995081475708ce996e89673e819d62a1c8f4023 | 42196ce11856c8e5ec758a2d234eb1200ffcda67 | refs/heads/master | 2023-08-23T23:46:18.857204 | 2021-10-28T19:00:01 | 2021-10-28T19:00:01 | 422,978,056 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 508 | py | #Importación de clase Flask para desarrollo de Formularios
from flask_wtf import FlaskForm, form
from wtforms import StringField, FloatField
from wtforms.validators import InputRequired
class DetalleVentaProductoForm(FlaskForm):
compra = StringField('Codigo Venta', validators=[InputRequired()])
producto = StringField('Codigo Producto', validators=[InputRequired()])
cantidad = FloatField('Cantidad', validators=[InputRequired()])
valor = FloatField('Valor', validators=[InputRequired()]) | [
"jiliar.silgado@gmail.com"
] | jiliar.silgado@gmail.com |
e95dacfb42b2aee9c6d787b4be4f205a9f109d6f | fbea457c552b3d96fafa61a697e347aced584938 | /Code/1.3 The society of mind.py | bedf4bb8f312e94b0b478686e2890eb1f4ca13a6 | [] | no_license | ATajadod94/The-Soceity-of-mind | 946f6589e92c821d68b9e0c9fe5bdf63e4fc31f5 | 3a9125036475ef41487b567d079545349412cc4d | refs/heads/master | 2020-03-19T04:49:43.697491 | 2018-06-04T00:46:26 | 2018-06-04T00:46:26 | 135,871,692 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 334 | py | class Mind:
# A society of mind
def __init__(self):
pass
def __str__(self): ## Consciousness (?)
pass
def __getitem__(self, key):
pass
print(""" In doing this, we'll try to initate how Galielo and Newton learned so much
by studying the simplest kind of pendelums and weights. """) | [
"atajadod94@gmail.com"
] | atajadod94@gmail.com |
58957c35d431c5e25d5dc8ff897cc048088a54f1 | 7627b1d16fa40064376c5854d3a4a5ed113919b8 | /Gym/Pong/QLearning/QLearning.py | 08c6d1e24288656af9bd8a67134e1adf600235a1 | [
"MIT"
] | permissive | z-Wind/Reinforcement_Learning | 68e17fed8f5d739c6845bd14da47997234ed1835 | e89257e5a152be5c4350e95a7faf62d66de3d45a | refs/heads/master | 2020-06-04T13:40:57.512988 | 2019-08-28T12:24:25 | 2019-08-28T12:24:25 | 192,045,721 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,573 | py | import torch
import torch.nn.functional as F
from torch.distributions import Categorical
import numpy as np
from Gym.models.QLearningBase import QLearningBase
class QLearning(QLearningBase):
def __init__(
self,
device,
n_actions,
n_features,
learning_rate=0.01,
gamma=0.9,
tau=0.001,
updateTargetFreq=10000,
epsilonStart=1,
epsilonEnd=0.2,
epsilonDecayFreq=1000,
mSize=10000,
batchSize=200,
startTrainSize=100,
transforms=None,
):
netEval = Net(n_features, n_actions)
netTarget = Net(n_features, n_actions)
# optimizer 是訓練的工具
# 傳入 net 的所有參數, 學習率
optimizer = torch.optim.Adam(netEval.parameters(), lr=learning_rate)
super().__init__(
device=device,
netEval=netEval,
netTarget=netTarget,
optimizer=optimizer,
n_actions=n_actions,
learning_rate=learning_rate,
gamma=gamma,
tau=tau,
updateTargetFreq=updateTargetFreq,
epsilonStart=epsilonStart,
epsilonEnd=epsilonEnd,
epsilonDecayFreq=epsilonDecayFreq,
mSize=mSize,
batchSize=batchSize,
startTrainSize=startTrainSize,
transforms=transforms,
)
def choose_action(self, state):
action = super().choose_action(state)
return action, action
class Net(torch.nn.Module):
def __init__(self, img_shape, n_actions):
super(Net, self).__init__()
# 定義每層用什麼樣的形式
in_channels = img_shape[2]
h = img_shape[0]
w = img_shape[1]
kernel_size = 8
stride = 4
padding = 0
self.conv1 = torch.nn.Conv2d(
in_channels, 32, kernel_size=kernel_size, stride=stride, padding=padding
)
h = (h + padding * 2 - kernel_size) // stride + 1
w = (w + padding * 2 - kernel_size) // stride + 1
# self.pool1 = torch.nn.MaxPool2d(2) # 32 x (h-2)//2 x (w-2)//2
# h //= 2
# w //= 2
kernel_size = 4
stride = 2
padding = 0
self.conv2 = torch.nn.Conv2d(
32, 64, kernel_size=kernel_size, stride=stride, padding=padding
)
h = (h + padding * 2 - kernel_size) // stride + 1
w = (w + padding * 2 - kernel_size) // stride + 1
kernel_size = 3
stride = 1
padding = 0
self.conv3 = torch.nn.Conv2d(
64, 64, kernel_size=kernel_size, stride=stride, padding=padding
)
h = (h + padding * 2 - kernel_size) // stride + 1
w = (w + padding * 2 - kernel_size) // stride + 1
# self.pool2 = torch.nn.MaxPool2d(2) # 64 x ((h-2)//2-2)//2 x ((w-2)//2-2)//2
# h //= 2
# w //= 2
self.fc1 = torch.nn.Linear(64 * h * w, 512)
self.fc2 = torch.nn.Linear(512, n_actions)
# self.dropout = torch.nn.Dropout(p=0.5)
def forward(self, x): # 這同時也是 Module 中的 forward 功能
# 正向傳播輸入值, 神經網絡分析出輸出值
# x = self.pool1(F.relu(self.conv1(x)))
# x = self.pool2(F.relu(self.conv2(x)))
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = x.view(x.shape[0], -1)
# x = self.dropout(x)
x = F.relu(self.fc1(x))
# x = self.dropout(x)
x = self.fc2(x)
return x
| [
"zpsyhapcst@gmail.com"
] | zpsyhapcst@gmail.com |
a1053de4ca6cfcb987fa1c71b11fec49dce526d3 | ace029249e56decdc82770645cbe1904a1af8ec7 | /django/django_orm/semi_restful_tv_shows-addvalidation/apps/semi_restful_app/migrations/0001_initial.py | 857cc7500841a537e5ce0ddf4b3f63d066f25139 | [] | no_license | CoraleeZ/Python-Stack-All-Assignments | 1579e17d447afc0a56be6628db39ba13d6e2e1b3 | 993f4f3ed7ff8a22b8c4275cda037f7f925cb161 | refs/heads/master | 2022-11-08T06:17:31.668026 | 2019-03-24T02:59:29 | 2019-03-24T02:59:29 | 177,363,963 | 0 | 1 | null | 2022-10-15T01:50:04 | 2019-03-24T02:55:36 | Python | UTF-8 | Python | false | false | 1,202 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2019-02-20 01:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='networks',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('network_name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='shows',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('release_date', models.CharField(max_length=255)),
('desc', models.TextField()),
('updated_at', models.DateTimeField(auto_now=True)),
('network', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='show', to='semi_restful_app.networks')),
],
),
]
| [
"helloqyzhang@gmail.com"
] | helloqyzhang@gmail.com |
5e3a0bcff7612094ba4a82324dc5a8bad44c8d0a | 1d96e514bfb16a3dff847179411fac2ee7f663c2 | /djangobayes/models.py | 09ce57bc9449c734af42e54277cadce65d916d3a | [
"BSD-3-Clause"
] | permissive | waylan/django-spambayes | e1c63b0fce1983f9523202cbbc40028ea8f998c3 | b5cae1ddbcb0ec180a5952fba6301d23fb0e0814 | refs/heads/master | 2016-09-05T13:06:47.300848 | 2009-07-31T01:01:19 | 2009-07-31T01:01:19 | 32,111,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 360 | py | from django.db import models
class Bayes(models.Model):
""" Spambayes training storage used to score new messages. """
word = models.CharField(default='', primary_key=True, max_length=100)
nspam = models.IntegerField(default=0, null=False)
nham = models.IntegerField(default=0, null=False)
def __unicode__(self):
return self.word
| [
"waylan@localhost"
] | waylan@localhost |
e70f5b95d63446c623427ca8d4f551caadfa965a | b9bab8e92273968c2ae4f74b0128fa1080eb6b81 | /ejemploListBox.py | 95807722f15dd94b783ed799e9cf74eecc5ca05a | [] | no_license | querola/MasterEnPython | c124a6b5c2c76227a55d6f6a97d05b0ec610a1e6 | 88e0929ae6f87bfae1a317f093bfee30a8916866 | refs/heads/master | 2022-12-24T23:17:11.028046 | 2020-10-09T03:32:28 | 2020-10-09T03:32:28 | 290,673,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 734 | py | class Application(ttk.Frame):
def __init__(self, main_window):
super().__init__(main_window)
main_window.title("Lista en Tcl/Tk")
# Crear una barra de deslizamiento con orientación vertical.
scrollbar = ttk.Scrollbar(self, orient=tk.VERTICAL)
# Vincularla con la lista.
self.listbox = tk.Listbox(self, yscrollcommand=scrollbar.set)
# Insertar 20 elementos.
for i in range(20):
self.listbox.insert(tk.END, "Elemento {}".format(i))
scrollbar.config(command=self.listbox.yview)
# Ubicarla a la derecha.
scrollbar.pack(side=tk.RIGHT, fill=tk.Y)
self.listbox.pack()
self.pack() | [
"38117134+DesarrolloProsis@users.noreply.github.com"
] | 38117134+DesarrolloProsis@users.noreply.github.com |
ffd91bb863c0a3f67d1d3ed0d36bcd76c48916d4 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-1270.py | d68c3965a130f64994c9d66a570265f0edd9655d | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,757 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [$Type] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
85d834c6ffb98c628f5f3832e33b699ffa6b03f6 | 5266c1c30ce25473efafbb1d3e6ff68cbf537a70 | /https_scanner.py | 8c73eb084247a05c42b8315f70aa71cad5129204 | [] | no_license | oskar456/https_scanner | ce1f86d5746c9f2b23c3658b4ddae7abf2bcf790 | 83b07dd2a053d6663fbeb9d954aad6a5847706b1 | refs/heads/main | 2023-02-12T18:56:59.003686 | 2021-01-07T15:33:31 | 2021-01-07T15:33:31 | 327,653,181 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,586 | py | #!/usr/bin/env python3
import ssl
import socket
import urllib.parse
import csv
import time
import requests
import click
def get_remote_tls_cert(hostname, port=None, timeout=1):
"""
Connect to a hostname. Return parsed peer certificate as dict.
Raise ssl.SSLError on TLS error
Raise ssl.certificateerror on unmatching certificate
Raise socket.gaierror on DNS error
Raise ConnectionRefusedError on connection refused
Raise socket.timeout on timeout
"""
port = port or 443
context = ssl.create_default_context()
with socket.create_connection((hostname, port), timeout) as sock:
with context.wrap_socket(sock, server_hostname=hostname) as sslsock:
c = sslsock.getpeercert()
issuer = [tag[0][1] for tag in c["issuer"]
if tag[0][0] == "commonName"][0]
notAfter = c["notAfter"]
return (issuer, notAfter)
def get_http_status(hostname, timeout=1):
"""
Issue HTTP HEAD request for the root of the domain name.
Return tuple with status code and Location output (if any).
"""
print(f"Checking http://{hostname}/…")
r = requests.head(f"http://{hostname}/", timeout=timeout)
return (r.status_code, r.headers.get("Location"))
def get_hsts_header(url, timeout=5):
"""
Issue HTTP HEAD request for the root of the domain name.
Return tuple with status code and Location output (if any).
"""
print(f"Checking HSTS header…")
r = requests.head(url, timeout=timeout)
return (r.status_code, r.headers.get("Strict-Transport-Security"))
def get_security_txt(hostname, port=None, timeout=5):
port = f":{port}" if port else ""
r = requests.head(f"https://{hostname}{port}/.well-known/security.txt",
timeout=timeout)
return r.status_code
def get_grade(minus_points, plus_points):
grade = min(5, minus_points)
return chr(ord("A") + grade) + "+" * plus_points
def get_ssllabs_grade(hostname, force_check=False):
ssllabs_api = "https://api.ssllabs.com/api/v3/"
reqn = 0
try:
while True:
reqn += 1
r = requests.get(ssllabs_api + "analyze", params={
"host": hostname,
"maxAge": 99999,
}).json()
status = r.get("status")
eps = r.get("endpoints", [])
print(f"SSL Labs status {status}")
if status == "DNS":
print("Sleeping 10 seconds to allow DNS resolution")
time.sleep(10)
elif status == "IN_PROGRESS" and force_check and reqn < 8:
for ep in eps:
print(f"endpoint {ep.get('ipAddress')} progress {ep.get('progress')}")
print("Sleeping 60 seconds to allow SSL Labs analysis")
time.sleep(60)
elif status == "ERROR":
return
else:
if not status == "READY":
print("Giving up SSL Labs")
grades = [e["grade"] for e in eps if e.get("grade")]
return max(grades) if grades else None
except requests.exceptions.RequestException:
return
def check_https(hostname):
https_url = f"https://{hostname}/"
minus_points = 0
plus_points = 0
try:
st, loc = get_http_status(hostname)
print(f"HTTP status: {st}")
if loc:
print(f"Redirecting to: {loc}")
if st < 300:
http_status = "Insecure content"
minus_points += 2
elif 300 <= st < 400:
if loc.lower().startswith(f"https://{hostname}/"):
http_status = f"Redirects to self ({st})"
elif loc.lower().startswith("https://"):
http_status = f"Redirects to secure ({st}, {loc})"
https_url = loc
else:
http_status = f"Redirects to insecure ({st}, {loc})"
minus_points += 2
else:
http_status = f"Broken ({st})"
minus_points += 1
except requests.RequestException as e:
http_status = f"Non-functional ({e})"
minus_points += 1
print(f"Overall HTTP status: {http_status}")
sth = None
hsts = None
issuer = None
notAfter = None
securitytxt = None
do_ssl_labs = False
try:
print("Trying TLS connection…")
parsed = urllib.parse.urlparse(https_url)
issuer, notAfter = get_remote_tls_cert(parsed.hostname, parsed.port)
print(f"TLS connection OK: issuer: {issuer}, notAfter: {notAfter}")
sth, hsts = get_hsts_header(https_url)
print(f"HTTPS Status {sth}, HSTS: {hsts}")
if hsts is not None and "max-age=" in hsts:
plus_points += 1
https_status = f"OK ({sth})"
securitytxt = get_security_txt(parsed.hostname, parsed.port)
if securitytxt == 200:
plus_points += 1
if "TERENA SSL High Assurance CA" in issuer:
plus_points += 1
if not issuer.startswith("TERENA"):
minus_points +=1
except (socket.error, ConnectionRefusedError) as e:
print(f"Broken TLS connection: {e}")
https_status = f"Broken ({e})"
minus_points += 3
if http_status.startswith("Non-functional"):
return
except (ssl.SSLError, ssl.CertificateError) as e:
print(f"Broken TLS connection: {e}")
https_status = f"Broken ({e})"
minus_points += 3
do_ssl_labs = True
grade = get_grade(minus_points, plus_points)
ssllabs_url = "https://www.ssllabs.com/ssltest/analyze.html?d=" + hostname
ssllabs_grade = get_ssllabs_grade(hostname, do_ssl_labs)
return (grade, http_status, https_status, hsts, securitytxt, issuer, ssllabs_grade, ssllabs_url)
@click.command()
@click.argument("domainlist", type=click.File('r'))
@click.option("--report", type=click.File('w'))
def main(domainlist, report):
"""
Scan HTTPS status for given domain list.
Return Optional CSV report.
"""
if report:
writer = csv.writer(report)
writer.writerow(("Domain", "Grade", "HTTP Status", "HTTPS Status", "HSTS Header", "GET /.well-known/security.txt", "issuer", "SSL Labs grade", "SSL Labs URL",))
for line in domainlist:
d = line.strip().rstrip(".")
if d.startswith("#") or d == "":
continue
r = check_https(d)
if r and report:
writer.writerow([d, *r])
if __name__ == "__main__":
main()
| [
"ondrej@caletka.cz"
] | ondrej@caletka.cz |
9658794868bb500886ae279d410e71aa61e17c1c | 798b443753f516fa1cedbef21edbd36675913554 | /drf_learn/manage.py | 57c92d016bfe6c488170b4910ed886208dbb1611 | [] | no_license | itachiuhia/Django | 65b9cd55b6e2e46411936e24f2764bc1d946b3fd | 59a5147424a765da0fe407ae865133e7b3f9871a | refs/heads/master | 2023-05-07T00:35:17.425092 | 2021-05-31T18:13:18 | 2021-05-31T18:13:18 | 372,581,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'drf_learn.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"06harshgtm@gmail.com"
] | 06harshgtm@gmail.com |
bd5ad957aadb6a3319671d4d11f3b2f6e224f63f | 3552d35a4408055635807b4d3351570be7a5dafa | /Apps Course/appsday/iplapp/forms/FormModule.py | 7e78ae1f36531b3c4d69be3a500f167ec101f2ae | [] | no_license | ramyasree0299/summer2019_GNITS_ramyasree | 0d54f65b8a752091df03e8f3faa3208e88036a07 | bd408c0caf86bd9116acf6b85d7bb8fd062ae0f5 | refs/heads/master | 2020-06-01T23:16:56.196222 | 2019-06-23T13:04:08 | 2019-06-23T13:04:08 | 190,962,568 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,155 | py | from django import forms
from iplapp.models import *
class Login(forms.Form):
username = forms.CharField(
widget=forms.TextInput(attrs={'class':'input','placeholder':"Enter username"}),
max_length=50,
required=True
)
password = forms.CharField(
widget=forms.PasswordInput(attrs={'class': 'input', 'placeholder': "Enter username"}),
max_length=50,
required=True
)
class Signup(forms.Form):
first_name = forms.CharField(
widget=forms.TextInput(attrs={'class':'input','placeholder':"Enter firstname"}),
max_length=50,
required=True
)
last_name = forms.CharField(
widget=forms.TextInput(attrs={'class': 'input', 'placeholder': "Enter lastname"}),
max_length=50,
required=True
)
username = forms.CharField(
widget=forms.TextInput(attrs={'class': 'input', 'placeholder': "Enter username"}),
max_length=50,
required=True
)
password = forms.CharField(
widget=forms.PasswordInput(attrs={'class': 'input', 'placeholder': "Enter Password"}),
max_length=50,
required=True
)
| [
"ramyasree0299@gmail.com"
] | ramyasree0299@gmail.com |
025cd837c4188459a5639cc8311534e8858f7425 | cd55730b3e9a1bbd2a4eb9ea6121fc852e27907e | /eddie/build/lib.linux-x86_64-2.7/eddietool/common/Directives/disk.py | 0d3f9a53074c9eabd75af17e5ec6d93d95f13b8e | [] | no_license | dimasajipangestu/belajarpostgres2 | f07ed6788fdba9788a7c7624532aeb89dc17b3e4 | f7f5edd58527a3ad98497b994f5d1b6d4c2c8b25 | refs/heads/master | 2022-08-18T09:31:10.338810 | 2020-05-21T02:35:34 | 2020-05-21T02:35:34 | 256,105,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,467 | py |
'''
File : disk.py
Start Date : 20041005
Description : Disk directives
$Id: disk.py 893 2007-12-09 07:08:16Z chris $
'''
__version__ = '$Revision: 893 $'
__copyright__ = 'Copyright (c) Chris Miles 2004-2005'
__author__ = 'Chris Miles'
__license__ = '''
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
'''
##
## Imports: Python
##
##
## Imports: Eddie
##
from eddietool.common import directive, log, utils
##
## Directives
##
class DISK(directive.Directive):
"""DISK provides access to data & stats for disk devices.
It requires the 'DiskStatistics' class from the 'diskdevice' data-collection module.
Example:
# /dev/md/dsk/d20 == /var
DISK md20_thruput:
device='md20'
scanperiod='5m'
rule='1' # always perform action
action='elvinrrd("disk-%(h)s_%(device)s", "rbytes=%(nread)s", "wbytes=%(nwritten)s")'
"""
def __init__(self, toklist):
# FS requires the DiskStatistics collector object from the diskdevice module
self.need_collectors = ( ('diskdevice','DiskStatistics'), ) # (module, collector-class) required
apply( directive.Directive.__init__, (self, toklist) )
def tokenparser(self, toklist, toktypes, indent):
"""Parse directive arguments."""
apply( directive.Directive.tokenparser, (self, toklist, toktypes, indent) )
# test required arguments
try:
self.args.device
except AttributeError:
raise directive.ParseFailure, "Device not specified"
try:
self.args.rule
except AttributeError:
raise directive.ParseFailure, "Rule not specified"
# Set any directive-specific variables
self.defaultVarDict['device'] = self.args.device
self.defaultVarDict['rule'] = self.args.rule
# define the unique ID
if self.ID == None:
self.ID = '%s.DISK.%s' % (log.hostname,self.args.device)
self.state.ID = self.ID
log.log( "<disk>DISK.tokenparser(): ID '%s' device '%s' rule '%s'" % (self.state.ID, self.args.device, self.args.rule), 8 )
def getData(self):
"""Called by Directive docheck() method to fetch the data required for
evaluating the directive rule.
"""
disk = self.data_collectors['diskdevice.DiskStatistics'][self.args.device]
if disk == None:
log.log( "<disk>DISK.docheck(): Error, device not found '%s'" % (self.args.device), 4 )
return None
else:
return disk.getHash()
class TAPE(directive.Directive):
"""TAPE provides access to data & stats for tape devices.
It requires the 'TapeStatistics' class from the 'diskdevice' data-collection module.
Example:
# st65 == TAPE
TAPE st65_thruput:
device='st65'
scanperiod='5m'
rule='1' # always perform action
action='elvinrrd("tape-%(h)s_%(device)s", "rbytes=%(nread)s", "wbytes=%(nwritten)s")'
"""
def __init__(self, toklist):
# FS requires the TapeStatistics collector object from the diskdevice module
self.need_collectors = ( ('diskdevice','TapeStatistics'), ) # (module, collector-class) required
apply( directive.Directive.__init__, (self, toklist) )
def tokenparser(self, toklist, toktypes, indent):
"""Parse directive arguments."""
apply( directive.Directive.tokenparser, (self, toklist, toktypes, indent) )
# test required arguments
try:
self.args.device
except AttributeError:
raise directive.ParseFailure, "Device not specified"
try:
self.args.rule
except AttributeError:
raise directive.ParseFailure, "Rule not specified"
# Set any directive-specific variables
self.defaultVarDict['device'] = self.args.device
self.defaultVarDict['rule'] = self.args.rule
# define the unique ID
if self.ID == None:
self.ID = '%s.TAPE.%s' % (log.hostname,self.args.device)
self.state.ID = self.ID
log.log( "<disk>TAPE.tokenparser(): ID '%s' device '%s' rule '%s'" % (self.state.ID, self.args.device, self.args.rule), 8 )
def getData(self):
"""Called by Directive docheck() method to fetch the data required for
evaluating the directive rule.
"""
tape = self.data_collectors['diskdevice.TapeStatistics'][self.args.device]
if tape == None:
log.log( "<disk>TAPE.docheck(): Error, device not found '%s'" % (self.args.device), 4 )
return None
else:
return tape.getHash()
##
## END - disk.py
##
| [
"dimasajipangestu@gmail.com"
] | dimasajipangestu@gmail.com |
9d12616142be20ad53a5fa9e6862a9f66b5626b8 | f43e3bfb859b73817c792648e5e338b75071064d | /playstore_dataset/playstore_dataset/pipelines.py | 39485b60964282adbddf406b3d0ffb0b2667e36c | [] | no_license | Geothomas1/Playstore | 413f6f051bbea8ec994ce10d6e07f81bf221363d | 95c7d7116a7bb91f1eb1c5b179c91cf9a7b9c456 | refs/heads/main | 2023-06-09T04:22:46.630544 | 2021-06-29T07:08:49 | 2021-06-29T07:08:49 | 352,473,028 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | # Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
class PlaystoreDatasetPipeline:
def process_item(self, item, spider):
return item
| [
"geothomas@cet.ac.in"
] | geothomas@cet.ac.in |
ab3726a128bf6e82b2a8b03c10eca815d0e34c4b | ff8a30a0639e287b6e8cb6d80ad94d8e2b0f4e5d | /Main.py | 9d6a6bb37ab73a22677a4a69aefa5255701bb01e | [] | no_license | Cisplatinum/Portfolio-Management-Application | 39ef2f0257c4f788436b77d289468289faa4c49a | ec49c75ee4893123a1761368c2754ef10a16d2a4 | refs/heads/main | 2023-06-04T13:57:06.148070 | 2021-07-02T16:00:26 | 2021-07-02T16:00:26 | 382,375,073 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,517 | py | import sys
import PullStock
import StockFetcher
from PyQt5.QtWidgets import *
from PyQt5 import uic
import matplotlib.pyplot as plt
import numpy as np
from mpl_finance import candlestick_ohlc
from matplotlib.dates import date2num
from CustomExceptions import *
import utility
# Loading UI files
Ui_MainWindow, QtBaseClass = uic.loadUiType("Main.ui")
Ui_StockList, QtBaseClass = uic.loadUiType("StockList.ui")
Ui_Amount, QtBaseClass = uic.loadUiType("Amount.ui")
# Initializing global dictionary that stores all stocks' data
# Format: {ticker : [individual return, individual risk and individual beta]}
# e.g. {'AAPL' : [0.0136,0.06523,1.1085]}
stock_table = {}
# Initializing global dictionary that stores amount of stocks in portfolio table
# Format: {'sum': amount(in string), 'ticker : amount(in string)}
# e.g. {'sum' : '10000','AAPL' : '10000'}
portfolio = {}
# Initializing sum as 0
portfolio['sum'] = '0'
class Main(QMainWindow, Ui_MainWindow):
def __init__(self):
super().__init__()
self.setupUi(self)
self.searchButton.clicked.connect(self.open_stock_list)
self.portfolioTable.itemDoubleClicked.connect(self.show_graph)
# Opens Stock table with individual values
def open_stock_list(self):
self.dialog = None
self.dialog = Ui_StockList(parent=self)
self.dialog.show()
# Opens graph of the stock's 1-year performance
def show_graph(self):
index = self.portfolioTable.selectedIndexes()
symbol = index[0].data()
data = utility.pull_plot_data(symbol)
fig, ax = plt.subplots()
d = np.array(data.Date, dtype='datetime64')
dates = date2num(d)
candlestick_ohlc(ax, zip(dates, data.Open, data.High, data.Low, data.Close), width=2, colorup='g',
colordown='r', alpha=1)
plt.setp(ax.get_xticklabels(), rotation=30)
ax.xaxis_date()
plt.show()
class Ui_StockList(QDialog, Ui_StockList):
def __init__(self,parent=None):
super().__init__(parent)
self.setupUi(self)
self.AddButton.clicked.connect(self.add_button)
self.CancelButton.clicked.connect(self.cancel_button)
self.set_table_data()
self.show()
# Builds the stock table with individual values
def set_table_data(self):
if len(stock_table) == 0:
stock_list = PullStock.scan_file('stocklist.csv')
if len(stock_list) == 0:
raise ApplicationException('Stock symbol not found', '')
index = 0
for key, value in stock_list.items():
self.stockListTable.insertRow(index)
self.stockListTable.setItem(index, 0, QTableWidgetItem(key.strip("\"")))
data = self.get_stock(key.strip("\""))
reri = self.calc_individual_return_and_risk(data)
ire = str('{:.2%}'.format(reri[0]))
iri = str('{:.2%}'.format(reri[1]))
ibeta = str('{:.4f}'.format(self.calc_beta(data)))
self.stockListTable.setItem(index, 1, QTableWidgetItem(ire))
self.stockListTable.setItem(index, 2, QTableWidgetItem(iri))
self.stockListTable.setItem(index, 3, QTableWidgetItem(ibeta))
stock_table[key.strip("\"")] = [ire,iri,ibeta]
index += 1
else:
index = 0
for key, value in stock_table.items():
self.stockListTable.insertRow(index)
self.stockListTable.setItem(index, 0, QTableWidgetItem(key))
self.stockListTable.setItem(index, 1, QTableWidgetItem(value[0]))
self.stockListTable.setItem(index, 2, QTableWidgetItem(value[1]))
self.stockListTable.setItem(index, 3, QTableWidgetItem(value[2]))
index += 1
# Calls function in utility to calculate individual return and risk
def calc_individual_return_and_risk(self, data):
return utility.individual_return_and_risk(data)
# Calls function in utility to calculate individual beta
def calc_beta(self, data):
return utility.individual_beta(data)
# Calls function in StockFetcher to get original data from Yahoo
def get_stock(self, ticker):
return StockFetcher.fetch_stock(ticker)
# Add_button opens a new window that prompts the user to input the amount
def add_button(self):
if self.stockListTable.itemClicked:
self.dialog = None
self.dialog = Ui_Amount(parent=self)
self.dialog.show()
# Cancel_button closes the window
def cancel_button(self):
return self.accept()
# Cross closes the window
def close_dialog(self):
return self.accept()
class Ui_Amount(QDialog, Ui_Amount):
def __init__(self,parent=None):
super().__init__(parent)
self.setupUi(self)
self.AddButton.clicked.connect(self.add_button)
self.CancelButton.clicked.connect(self.cancel_button)
self.show()
# Add_button adds the selected stock in the portfolio table of Main Window and stock the amount in portfolio
# and it triggers the calculation of portfolio values including
# portfolio return, portfolio risk, portfolio beta, average portfolio correlation, and
# individual values including weightage and MCTOR
def add_button(self):
if self.Amount.text().strip() != '':
parent = self.parent()
main = self.parent().parent()
index = parent.stockListTable.selectedIndexes()
ticker = index[0].data()
if ticker in portfolio:
portfolio['sum'] = str(float(portfolio['sum']) + float(self.Amount.text().strip()))
portfolio[ticker] = str(float(portfolio[ticker]) + float(self.Amount.text().strip()))
else:
portfolio[ticker] = str(float(self.Amount.text().strip()))
portfolio['sum'] = str(float(portfolio['sum']) + float(portfolio[ticker]))
utility.set_portfolio(portfolio)
portfolio_beta = utility.portfolio_beta(stock_table)
portfolio_risk_and_return_and_rho = utility.portfolio_risk_and_return_and_rho()
portfolio_risk = portfolio_risk_and_return_and_rho[0]
portfolio_return = portfolio_risk_and_return_and_rho[1]
rho = portfolio_risk_and_return_and_rho[2]
percentages = self.calculate_percentages(ticker)
mctors = self.calculate_mctors(portfolio_risk)
main.portfolioTable.setRowCount(0)
for ticker in portfolio.keys():
if ticker != 'sum':
row = main.portfolioTable.rowCount()
main.portfolioTable.insertRow(row)
main.portfolioTable.setItem(row, 0, QTableWidgetItem(ticker))
main.portfolioTable.setItem(row, 1, QTableWidgetItem(percentages[ticker]))
main.portfolioTable.setItem(row, 2, QTableWidgetItem(mctors[ticker]))
main.portfolioTable.setItem(row, 3, QTableWidgetItem(stock_table[ticker][2]))
main.PortfolioReturn.setText(str('{:.2%}'.format(portfolio_return)))
main.PortfolioRisk.setText(str('{:.2%}'.format(portfolio_risk)))
main.PortfolioBeta.setText(str('{:.4f}'.format(portfolio_beta)))
main.AvePortforlioCorr.setText(str('{:.4f}'.format(rho)))
self.parent().accept()
return self.accept()
# calculates percentages of individual stock in portfolio in terms of amount
def calculate_percentages(self, ticker):
percentages = {}
for key, value in portfolio.items():
if key != 'sum':
percentages[key] = str('{:.2%}'.format(float(value) / float(portfolio['sum'])))
return percentages
# calls function in utility to calculates individual stock mctors
def calculate_mctors(self,portfolio_risk):
mctors = {}
for key, value in portfolio.items():
if key != 'sum':
mctors[key] = str('{:.2%}'.format(utility.marginal_risk_contribution(key,portfolio_risk)))
return mctors
# Cancel_button closes the window
def cancel_button(self):
return self.accept()
# Cross closes the window
def close_dialog(self):
return self.accept()
if __name__=='__main__':
app=QApplication(sys.argv)
main=Main()
main.show()
sys.exit(app.exec_())
| [
"enochwyx@gmail.com"
] | enochwyx@gmail.com |
494209f5626eff8613f8403f2084829f49a30c87 | 1554150a9720ebf35cd11c746f69169b595dca10 | /package_package/package/model/fuzzy_number.py | b64b535dfc851ec40ee6a38917dddbbf78b72a3a | [] | no_license | andrewili/shape-grammar-engine | 37a809f8cf78b133f8f1c3f9cf13a7fbbb564713 | 2859d8021442542561bdd1387deebc85e26f2d03 | refs/heads/master | 2021-01-18T22:46:51.221257 | 2016-05-31T21:15:28 | 2016-05-31T21:15:28 | 14,129,359 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,214 | py | import numpy as np
almost_equal = np.allclose
class FuzzyNumber(object):
def __init__(self, number_in):
"""Receives:
number_in num
"""
method_name = '__init__'
try:
if not self._is_a_number(number_in):
raise TypeError
except TypeError:
message = "The argument must be a number"
self.__class__._print_error_message(method_name, message)
else:
self.value = number_in
def _is_a_number(self, x):
"""Receives:
x object
Returns:
value boolean. True if x is an int, a float, an
np.int64, or an np.float64. False otherwise
"""
value = False
if (type(x) == int or
type(x) == float or
type(x) == np.int64 or
type(x) == np.float64
):
value = True
return value
def __eq__(self, other):
return almost_equal(self.value, other.value)
def __ge__(self, other):
return (
almost_equal(self.value, other.value) or
self.value > other.value)
def __gt__(self, other):
if almost_equal(self.value, other.value):
value = False
elif self.value > other.value:
value = True
else:
value = False
return value
def __le__(self, other):
return(
almost_equal(self.value, other.value) or
self.value < other.value)
def __lt__(self, other):
if almost_equal(self.value, other.value):
value = False
elif self.value < other.value:
value = True
else:
value = False
return value
def __ne__(self, other):
return not almost_equal(self.value, other.value)
### utility
@classmethod
def _print_error_message(cls, method_name, message):
print '%s.%s:\n %s' % (cls.__name__, method_name, message)
### represent
def __str__(self):
return str(self.value)
if __name__ == '__main__':
import doctest
doctest.testfile('tests/fuzzy_number_test.txt')
| [
"i@andrew.li"
] | i@andrew.li |
b15b73a96e30b59f460adead2595d610fe456d8f | e3b1b5d3679e6aa226b9b31679ca02311347e83a | /Cursoemvideo/Exercícios/exer27 - Primeiro e ultimo nome.py | 4238e8f4b428c1f35a652c33bbc47686537612c9 | [
"MIT"
] | permissive | Vith-MCB/Phyton---Curso-em-Video | 27e33779c62e09dd4f0d2b5cafb0bf870ed19a31 | d13a2150df022b9712b3b3136e9afc963864403c | refs/heads/main | 2023-08-14T08:10:17.904239 | 2021-09-21T23:39:44 | 2021-09-21T23:39:44 | 380,555,515 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | m = 3
if m < 5:
nota = ('Reprovado')
print('Você está: {}'.format(nota)) | [
"vitorriblacerda@gmail.com"
] | vitorriblacerda@gmail.com |
fc12090da50ccd0cefe1a9e3e7c52f49e85ba1bc | d704b44080fa83e73f109c381a67564c92d6f7e5 | /math_helpers/pkg/ct_math/algebra.py | a80713979bc5f62c9ce45c5b878bad5dc8142a8f | [] | no_license | ct155105/ct_python_helpers | 3384bb147c4ba4bc07ea04838303a9e8d8b20107 | 9531818889860e469836d3020e5c26879184a716 | refs/heads/main | 2023-06-17T13:36:39.801191 | 2021-07-12T04:52:30 | 2021-07-12T04:52:30 | 378,555,866 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 356 | py |
def get_average_of_list_values(list: list) -> float:
'''Sums the values in the list, then divides by the list length
Args:
list: A list containing number values
Returns
the mean for the values in the list
'''
total = 0
for val in list:
total += val
mean = total / len(list)
return mean
| [
"cteuschler@corbus.com"
] | cteuschler@corbus.com |
e5c53766c994f5d150cd47187531f1339035c92b | 065acd70109d206c4021954e68c960a631a6c5e3 | /shot_detector/utils/collections/sliding_windows/__init__.py | dbbbfbf2e08e76dc001cf31e3d17b64c790ba048 | [] | permissive | w495/python-video-shot-detector | bf2e3cc8175687c73cd01cf89441efc349f58d4d | 617ff45c9c3c96bbd9a975aef15f1b2697282b9c | refs/heads/master | 2022-12-12T02:29:24.771610 | 2017-05-15T00:38:22 | 2017-05-15T00:38:22 | 37,352,923 | 20 | 3 | BSD-3-Clause | 2022-11-22T01:15:45 | 2015-06-13T01:33:27 | Python | UTF-8 | Python | false | false | 347 | py | # -*- coding: utf8 -*-
"""
Different kinds of sliding windows
"""
from __future__ import absolute_import, division, print_function
from .base_sliding_window import BaseSlidingWindow
from .delayed_sliding_window import DelayedSlidingWindow
from .repeated_sliding_window import RepeatedSlidingWindow
from .sliding_window import SlidingWindow
| [
"w@w-495.ru"
] | w@w-495.ru |
ebd3055702b48d01a852f1d2990a3c2a1360099f | 0fafc39902f8d0c303e7db70fdc75c108053479b | /tipo_numerico.py | 4aa66a8b8624c7e4397841e4511581ee943aa48b | [] | no_license | vaniele02/CursoPython | 2b3d75047d3d205a3a8d07fd9f8d825682795993 | c65d3b8b74a31ac12c7ecac9935d67e5d35a5046 | refs/heads/main | 2023-02-12T14:41:23.830018 | 2021-01-06T17:43:29 | 2021-01-06T17:43:29 | 327,381,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68 | py | """
Tipo numerico
"""
num = 1_000_00
print(num)
print(float(num))
| [
"pizzol.vaniele@gmail.com"
] | pizzol.vaniele@gmail.com |
f2b31712ee4e7decb111a9478e940eb641d7752b | e8cb133a8764ef2419e6923564bf50a7bbc15d37 | /Parse_Blast_for_Bed_ExonIntron.py | ed97f0e3b3b9dc435ec97db36b99694ab2568aea | [] | no_license | sidonieB/scripts | 04f69a450e730d47cebc8996db5815c6f7cf2ed9 | 76143c60e24d1db9936e9780952515833c4aa2e6 | refs/heads/master | 2021-11-27T20:20:26.463723 | 2021-11-25T11:15:29 | 2021-11-25T11:15:29 | 146,630,598 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,887 | py | ###########################################################################################################
# Script to sort blast results depending on if good hits and make bed with matched ("exons") vs non matched ("introns") regions
# Version 1
# 1 September 2019
# Originally written by Sidonie BELLOT (s.bellot@kew.org)
# Use and modify as you wish, but please don't hesitate to give feedback!
###########################################################################################################
import sys
from string import *
from Bio import SeqIO
import getopt
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from operator import itemgetter
# Run the script as such:
# python Parse_Blast_for_Bed_ExonIntron.py All_genes.fasta blast_results.txt matching_detail.bed not_matching.txt matching_full.bed id_threshold bp_threshold
# design input and output
queries = sys.argv[1] # fasta file with all the queries
blast_res = sys.argv[2] # table output of blast following -outfmt '6 qseqid sseqid pident length mismatch gapopen qstart qend sstart send qcovs evalue bitscore'
outfile = sys.argv[3] # bed file with intron and exon coordinates
outfile2 = sys.argv[4] # file with genes that did not have any good match
outfile3 = sys.argv[5] # file with genes that had a good match
id_threshold = sys.argv[6] # id% threshold (eg: 80 for 80%)
bp_threshold = sys.argv[7] # length threshold (eg: 80 for 80 bp)
# make a dictionary of query lengths
len_dict = {}
ALL_Q = []
handle = open(queries)
for seq_record in SeqIO.parse(handle, "fasta"):
ID = seq_record.id
ALL_Q.append(ID)
seq = seq_record.seq
q_len = len(seq)
len_dict[ID] = True
len_dict[ID] = q_len
handle.close()
# parse results
EXONS = []
SUBJECTS = []
QUERIES = []
ALL_Q_IN_RES = []
handle = open(blast_res, "r")
lines=handle.readlines()
for l in lines:
l2=l.split("\n")[0].split("\t")
query=l2[0]
h_subject=l2[1]
h_perid=float(l2[2])
h_ALlen=float(l2[3])
h_s = int(l2[6])
h_e = int(l2[7])
if h_s < h_e:
H_R = range(h_s, h_e,1)
else:
H_R = range(h_e, h_s,1)
if query in QUERIES:
if h_perid > float(id_threshold):
if h_ALlen > float(bp_threshold):
if h_subject not in SUBJECTS:
SUBJECTS.append(h_subject)
for i in H_R:
if i not in EXONS:
EXONS.append(i)
else:
if len(QUERIES) > 0:
q_len = len_dict[queryPrec]
new_name = queryPrec
if len(SUBJECTS) > 0:
for sub in SUBJECTS:
new_name = new_name + "__" + sub
s=1
e=0
y=1
z=1
EXONS.sort()
if min(EXONS) > 1:
with open(outfile, "a") as fo:
fo.write(queryPrec + "\t" + str(1) + "\t" + str(min(EXONS)-1) + "\t" + new_name + "___intron" + str(z) + "___" + str((min(EXONS)-1)) + "\n")
z = z+1
e = min(EXONS)-1
s = min(EXONS)
for x in EXONS:
if x > e+1 :
with open(outfile, "a") as fo:
fo.write(queryPrec + "\t" + str(s) + "\t" + str(e) + "\t" + new_name + "___exon" + str(y) + "___" + str(e-(s-1)) + "\n")
fo.write(queryPrec + "\t" + str(e+1) + "\t" + str(x-1) + "\t" + new_name + "___intron" + str(z) + "___" + str((x-1)-e) + "\n")
y = y+1
z = z+1
s = x
e = x
else:
e=x
if x < q_len:
with open(outfile, "a") as fo:
fo.write(queryPrec + "\t" + str(s) + "\t" + str(e) + "\t" + new_name + "___exon" + str(y) + "___" + str(e-(s-1)) + "\n")
fo.write(queryPrec + "\t" + str(x+1) + "\t" + str(q_len) + "\t" + new_name + "___intron" + str(z) + "___" + str(q_len-x) + "\n")
else:
with open(outfile, "a") as fo:
fo.write(queryPrec + "\t" + str(s) + "\t" + str(e) + "\t" + new_name + "___exon" + str(y) + "___" + str(e-(s-1)) + "\n")
with open(outfile3, "a") as fo3:
fo3.write(queryPrec + "\t1\t" + str(q_len) + "\n")
else:
with open(outfile2, "a") as fo2:
fo2.write(queryPrec + "\t" + "NO GOOD MATCH"+ "\n")
EXONS = []
SUBJECTS = []
QUERIES.append(query)
queryPrec = query
if h_perid > float(id_threshold):
if h_ALlen > float(bp_threshold):
if h_subject not in SUBJECTS:
SUBJECTS.append(h_subject)
for i in H_R:
if i not in EXONS:
EXONS.append(i)
ALL_Q_IN_RES.append(query)
if len(QUERIES) > 0:
q_len = len_dict[queryPrec]
new_name = queryPrec
if len(SUBJECTS) > 0:
for sub in SUBJECTS:
new_name = new_name + "__" + sub
s=1
e=0
y=1
z=1
EXONS.sort()
if min(EXONS) > 1:
with open(outfile, "a") as fo:
fo.write(queryPrec + "\t" + str(1) + "\t" + str(min(EXONS)-1) + "\t" + new_name + "___intron" + str(z) + "___" + str((min(EXONS)-1)) + "\n")
z = z+1
e = min(EXONS)-1
s = min(EXONS)
for x in EXONS:
if x > e+1 :
with open(outfile, "a") as fo:
fo.write(queryPrec + "\t" + str(s) + "\t" + str(e) + "\t" + new_name + "___exon" + str(y) + "___" + str(e-(s-1)) + "\n")
fo.write(queryPrec + "\t" + str(e+1) + "\t" + str(x-1) + "\t" + new_name + "___intron" + str(z) + "___" + str((x-1)-e) + "\n")
y = y+1
z = z+1
s = x
e = x
else:
e=x
if x < q_len:
with open(outfile, "a") as fo:
fo.write(queryPrec + "\t" + str(s) + "\t" + str(e) + "\t" + new_name + "___exon" + str(y) + "___" + str(e-(s-1)) + "\n")
fo.write(queryPrec + "\t" + str(x+1) + "\t" + str(q_len) + "\t" + new_name + "___intron" + str(z) + "___" + str(q_len-x) + "\n")
else:
with open(outfile, "a") as fo:
fo.write(queryPrec + "\t" + str(s) + "\t" + str(e) + "\t" + new_name + "___exon" + str(y) + "___" + str(e-(s-1)) + "\n")
with open(outfile3, "a") as fo3:
fo3.write(queryPrec + "\t1\t" + str(q_len) + "\n")
else:
with open(outfile2, "a") as fo2:
fo2.write(queryPrec + "\t" + "NO GOOD MATCH"+ "\n")
# list genes that had no match at all (i.e. not mentionned in the blast results)
for q in ALL_Q:
if q in ALL_Q_IN_RES:
print "got it"
else:
with open(outfile2, "a") as fo2:
fo2.write(q + "\t" + "NO MATCH"+ "\n")
| [
"noreply@github.com"
] | noreply@github.com |
a9297cfbbfe53a5bdce5b575f72cd5880abbafce | 2154d0221e29a86850a1b83e4302f6e3e3f7fa5d | /thread_example/simple_thread_example.py | 6f5917fe97dff2fcaaacbf683f71542762a6a5f6 | [] | no_license | aaqqxx/simple_for_life | 3b8805c6791da6a3a7f42c069dc1ee7d2b8d3649 | 9ad6d61a56216d04250cd89aeaeda63c11942d0a | refs/heads/master | 2020-04-04T09:18:59.396540 | 2015-04-28T11:22:55 | 2015-04-28T11:22:55 | 20,906,518 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | # coding:utf-8
#!/usr/bin/env python
__author__ = 'XingHua'
"""
"""
import time, thread
def timer():
print('hello')
def test():
for i in range(0, 10):
thread.start_new_thread(timer, ())
if __name__ == '__main__':
test()
time.sleep(10) | [
"aaqqxx1910@gmail.com"
] | aaqqxx1910@gmail.com |
e188feec171d020b831ecb7e8ca6d5e6af11717c | 4fe257ebf57e74f25135f0b90a5b51395c45d199 | /events/migrations/0001_initial.py | c17c9311e73446c304a7b81ed405a82685c870fe | [
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-other-permissive"
] | permissive | fleaplus/timeclock | aafbb6f1fd8507b2353ec6ff859607bf680e65ae | 084dada0f2230ee3feb25ec699319fbb0fba83b2 | refs/heads/master | 2020-04-30T04:24:03.131142 | 2015-06-22T20:37:49 | 2015-06-22T20:37:49 | 37,560,337 | 1 | 1 | null | 2015-06-23T19:12:42 | 2015-06-16T22:56:18 | JavaScript | UTF-8 | Python | false | false | 471 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, verbose_name='ID', auto_created=True)),
('time', models.DateTimeField()),
],
),
]
| [
"fleaplus@gmail.com"
] | fleaplus@gmail.com |
58a2722318a9e6da0bc6cdecc4436f4123ff8309 | fd90c9f7eef79bbaed9b12107706f925220fecb2 | /runner1c/commands/load_config.py | 38df7b99f40f2bce15e3151f9c11afb544afca60 | [
"BSD-2-Clause"
] | permissive | vakulenkoalex/runner1c | 334bed9a9a558123f8f43e197dff0ecea56f1d98 | 062a3edc4bd8089cae248d1877373f78535578f4 | refs/heads/master | 2023-07-22T09:20:11.245247 | 2023-07-17T13:12:01 | 2023-07-17T13:12:01 | 116,858,870 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,127 | py | import runner1c
import runner1c.exit_code as exit_code
class LoadConfigParser(runner1c.parser.Parser):
@property
def name(self):
return 'load_config'
@property
def description(self):
return 'загрузка конфигурации из исходников'
def create_handler(self, **kwargs):
return LoadConfig(**kwargs)
def set_up(self):
self.add_argument_to_parser()
self._parser.add_argument('--folder', required=True, help='каталог, содержащий исходники конфигурации')
self._parser.add_argument('--update', action='store_const', const=True, help='обновление конфигурации '
'базы данных')
self._parser.add_argument('--agent',
action='store_const',
const=True,
help='запускать конфигуратор в режиме агента')
class LoadConfig(runner1c.command.Command):
def __init__(self, **kwargs):
kwargs['mode'] = runner1c.command.Mode.DESIGNER
super().__init__(**kwargs)
if getattr(self.arguments, 'agent', False):
return
self.add_argument('/LoadConfigFromFiles "{folder}"')
if getattr(self.arguments, 'update', False):
self.add_argument('/UpdateDBCfg')
def execute(self):
if not getattr(self.arguments, 'agent', False):
return self.run()
else:
try:
command = 'config load-config-from-files --dir "{}" --update-config-dump-info'
return_code = self.send_to_agent(command.format(self.arguments.folder))
if exit_code.success_result(return_code):
return_code = self.send_to_agent('config update-db-cfg')
except Exception as exception:
self.error(exception)
return_code = exit_code.EXIT_CODE.error
return return_code
| [
"vakulenko_alex@mail.ru"
] | vakulenko_alex@mail.ru |
0b8e1b2e46404eb0e75e5cb5b0a7e4a73c2fb600 | c2e0dded1c67cb8c7a1e721853fedda3d22addef | /2016/04-1.py | c8a21bb48db7ae2c75a35269ff882681e61d92ce | [
"MIT"
] | permissive | LK/advent-of-code | 8b60b96b88581956b8a2257d1a8e1308c34ee421 | fc9ba3315608bc3b20ebb2c1212145f970b888d6 | refs/heads/master | 2020-06-15T06:11:25.165683 | 2016-12-21T05:55:17 | 2016-12-21T05:55:17 | 75,320,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 520 | py | import collections
import operator
total = 0
line = raw_input()
while line != '':
idx = line.index('[')
checksum = line[idx+1:-1]
secid = line[line.rfind('-')+1:idx]
line = line[:line.rfind('-')]
line = line.replace('-', '')
c = collections.Counter(line)
chars = sorted(c.items(), key=operator.itemgetter(1), reverse=True)
bad = False
for i in range(5):
if chars[i][0] != checksum[i]:
if c[checksum[i]] != chars[i][1]:
bad = True
if not bad:
total += int(secid)
line = raw_input()
print total | [
"lenny.khazan@gmail.com"
] | lenny.khazan@gmail.com |
9009efdfe35f109ffbf2ed9bc2895223798d906d | 2f42c6d1fddccc481354148393b9b44165826c04 | /test/FsrAnalysis/PmvTreeMaker_cfg.py | 19afc0d561fe2b2cedabc88728d5afeb0336515a | [] | no_license | senka/MultiBosons | 2ef3dc86e9f0a90f911557638fe79631fef0b3d2 | e8413aa280d85376a2f6dfe300bf52b78b4da437 | refs/heads/master | 2021-03-12T20:15:44.118577 | 2013-11-20T21:21:01 | 2013-11-20T21:21:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,460 | py | import FWCore.ParameterSet.Config as cms
import FWCore.ParameterSet.VarParsing as VarParsing
import FWCore.ParameterSet.Types as CfgTypes
import PhysicsTools.PythonAnalysis.LumiList as LumiList
## setup 'analysis' options
options = VarParsing.VarParsing ('analysis')
## register customized options
options.register("jsonFile",
"Cert_160404-165542_7TeV_PromptReco_Collisions11_JSON.txt", # default value
VarParsing.VarParsing.multiplicity.singleton, # singleton or list
VarParsing.VarParsing.varType.string, # string, int, or float
"JSON file to be applied."
)
options.register("globalTag",
"GR_R_39X_V6::All", # default value
VarParsing.VarParsing.multiplicity.singleton, # singleton or list
VarParsing.VarParsing.varType.string, # string, int, or float
"Global tag to be used."
)
options.register("isMC",
False, # default value
VarParsing.VarParsing.multiplicity.singleton, # singleton or list
VarParsing.VarParsing.varType.bool, # string, int, or float
"Is this MC?"
)
options.setupTags(tag = "of%d",
ifCond = "totalSections != 0",
tagArg = "totalSections")
options.setupTags(tag = "job%d",
ifCond = "section != 0",
tagArg = "section")
## setup any defaults you want
options.maxEvents = 10 # -1 means all events
#options.inputFiles = ["file:/uscmst1b_scratch/lpc1/3DayLifetime/veverka/mu/VGammaSkim_LyonSyncTest_Dec22ReReco_v2_DimuonSkim_1_of_4.root"]
options.outputFile = "pmvTree.root"
options.jsonFile = "Cert_160404-163869_7TeV_PromptReco_Collisions11_JSON_MuonPhys.txt"
## get and parse the command line arguments
options.parseArguments()
## define the process
process = cms.Process("TEST")
## Load standard sequence for crack corrections
process.load('CalibCalorimetry.EcalTrivialCondModules.EcalTrivialCondRetriever_cfi')
# process.load('Configuration.StandardSequences.Services_cff')
# process.load('Configuration.StandardSequences.MagneticField_38T_cff')
process.load('Configuration.StandardSequences.Geometry_cff')
# process.load('Configuration.StandardSequences.Reconstruction_cff')
# process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
## Global tag
# process.GlobalTag.globaltag = options.globalTag
## Message logger
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(False) )
process.MessageLogger.cerr.FwkReport.reportEvery = 100
## Enable LogInfo
# process.MessageLogger.cerr.INFO.limit = 100
## Enable LogDebug
### Remember to recompile with:
### scramv1 b USER_CXXFLAGS="-g\ -D=EDM_ML_DEBUG"
#process.MessageLogger.debugModules = ["pmvTree"]
#process.MessageLogger.cerr.threshold = "DEBUG"
### Geometry, Detector Conditions and Pythia Decay Tables (needed for the vertexing)
#process.load("Configuration.StandardSequences.Geometry_cff")
#process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
#process.GlobalTag.globaltag = options.globalTag
#process.load("Configuration.StandardSequences.MagneticField_cff")
#process.load("SimGeneral.HepPDTESSource.pythiapdt_cfi")
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring() + options.inputFiles
)
# JSON file
if not options.isMC and options.jsonFile != "":
myLumis = \
LumiList.LumiList(filename = options.jsonFile
).getCMSSWString().split(',')
process.source.lumisToProcess = \
CfgTypes.untracked(CfgTypes.VLuminosityBlockRange())
process.source.lumisToProcess.extend(myLumis)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(options.maxEvents)
)
process.TFileService = cms.Service("TFileService",
fileName = cms.string(options.outputFile)
)
from ElectroWeakAnalysis.MultiBosons.Selectors.muonSelector_cfi \
import muonSelection_FsrApr082011_PixelMatchVeto as muonSelection
from ElectroWeakAnalysis.MultiBosons.Selectors.diLeptonSelector_cfi \
import diMuonSelection_Fsr2011Apr11_PixelMatchVeto as diMuonSelection
from ElectroWeakAnalysis.MultiBosons.Selectors.photonSelector_cfi \
import photonSelection_Fsr2011Apr11_PixelMatchVeto as photonSelection
from ElectroWeakAnalysis.MultiBosons.Selectors.ZMuMuGammaSelector_cfi \
import ZMuMuGammaSelection_Fsr2011Apr11_PixelMatchVeto as ZMuMuGammaSelection
process.selectedMuons = cms.EDFilter("VGammaMuonFilter",
filterParams = muonSelection,
src = cms.InputTag("cleanPatMuonsTriggerMatch","","PAT"),
filter = cms.bool(True),
verbosity = cms.untracked.uint32(2)
)
process.goodDiMuons = cms.EDProducer("CandViewShallowClonePtrCombiner",
#process.goodDiMuons = cms.EDProducer("CandViewShallowCloneCombiner",
checkCharge = cms.bool(False),
cut = cms.string("mass > 0"), ## dummy cut
decay = cms.string("selectedMuons selectedMuons"),
roles = cms.vstring("muon1", "muon2")
)
process.selectedDiMuons = cms.EDFilter("VGammaDiLeptonFilter",
filterParams = diMuonSelection,
src = cms.InputTag("goodDiMuons"),
filter = cms.bool(True),
verbosity = cms.untracked.uint32(2)
)
process.selectedPhotons = cms.EDFilter("VGammaPhotonFilter",
filterParams = photonSelection,
src = cms.InputTag("cleanPatPhotonsTriggerMatch"),
filter = cms.bool(True),
verbosity = cms.untracked.uint32(2)
)
#process.vertexedDiMuons = cms.EDProducer("KalmanVertexFitCompositeCandProducer",
#src = cms.InputTag("selectedDiMuons")
#)
process.goodZMuMuGammas = cms.EDProducer("CandViewShallowClonePtrCombiner",
checkCharge = cms.bool(False),
cut = cms.string("mass > 0"), ## dummy cut
decay = cms.string("selectedDiMuons selectedPhotons"),
roles = cms.vstring("dimuon", "photon")
)
process.selectedZMuMuGammas = cms.EDFilter("ZMuMuGammaFilter",
filterParams = ZMuMuGammaSelection,
src = cms.InputTag("goodZMuMuGammas"),
filter = cms.bool(True),
verbosity = cms.untracked.uint32(2)
)
## Loosen the invariant mass window
process.selectedZMuMuGammas.filterParams.minMass = 50
process.selectedZMuMuGammas.filterParams.maxMass = 130
process.selectionSequence = cms.Sequence(
process.selectedMuons *
process.goodDiMuons *
process.selectedDiMuons *
process.selectedPhotons *
#process.vertexedDiMuons *
process.goodZMuMuGammas *
process.selectedZMuMuGammas
)
#process.mmgTree = cms.EDAnalyzer("MuMuGammaTreeMaker",
#photonSrc = cms.untracked.InputTag("selectedPhotons"),
#muonSrc = cms.untracked.InputTag("selectedMuons"),
#dimuonSrc = cms.untracked.InputTag("selectedDiMuons"),
#beamSpotSrc = cms.untracked.InputTag("offlineBeamSpot"),
#primaryVertexSrc = cms.untracked.InputTag("offlinePrimaryVertices"),
#ebClusterSrc = cms.untracked.InputTag("islandBasicClusters", "islandBarrelBasicClusters"),
#ebRecHitsSrc = cms.untracked.InputTag("ecalRecHit", "EcalRecHitsEB"),
#eeRecHitsSrc = cms.untracked.InputTag("ecalRecHit", "EcalRecHitsEE"),
#genParticleSrc = cms.untracked.InputTag("prunedGenParticles"),
#isMC = cms.untracked.bool(False),
#)
process.load("ElectroWeakAnalysis.MultiBosons.FsrAnalysis.PmvTreeMaker_cfi")
process.pmvTree.isMC = options.isMC
## Pileup
if options.isMC:
process.pmvTree.pileupInfoSrc = cms.untracked.InputTag("addPileupInfo")
process.pmvTree.lumiReWeighting = cms.untracked.PSet(
mcDistribution = cms.vdouble(
## from the gamma+jet sample (no filter)
## 21 numbers
# 257141., 295755., 263008., 286909., 282291., 281067.,
# 295777., 297075., 250569., 299795., 256528., 248686.,
# 203484., 137833., 117686., 76877., 62815., 35462.,
# 8381., 10012., 4233.
## from the S4 gamma + jet sample (no filter)
## 51 numbers, use only first 36
# 1.15148e+06, 582849, 629204, 642292, 658930, 666227,
# 668263, 649863, 623035, 588189, 528601, 478063,
# 412804, 351588, 285862, 231776, 181493, 139729,
# 104007, 77262, 55684, 39053, 27132, 18393,
# 12278, 8039, 5393, 3301, 2152, 1321,
# 875, 482, 317, 195, 98, 75,
# 44, 22, 15, 5, 7, 2,
# 0, 1, 0, 0, 0, 0,
# 0, 0, 0,
## In-time Poisson smeared Distribution for Fall 2011 S6 MC
## see https://twiki.cern.ch/twiki/bin/viewauth/CMS/PileupMCReweightingUtilities#Sample_Input_Distributions
0.0145837, 0.025683, 0.0384606, 0.0494145, 0.0569311,
0.0611828, 0.0625346, 0.0614769, 0.0586775, 0.0554499, #10
0.0515491, 0.047621, 0.0439238, 0.0405691, 0.0374147,
0.034227, 0.0314377, 0.0288256, 0.026219, 0.0237271, #20
0.0213656, 0.0191874, 0.0169728, 0.0149206, 0.013039,
0.0112938, 0.00961247, 0.00819356, 0.00688805, 0.00571524, #30
0.00471123, 0.00386993, 0.00315452, 0.00254742, 0.00202471,
0.00157441, 0.00124581, 0.000955206, 0.000735305, 0.000557304, #40
0.000412503, 0.000305502, 0.000231002, 0.000165701, 0.000121201,
9.30006e-05, 6.40004e-05, 4.22003e-05, 2.85002e-05, 1.96001e-05, #50
# 1.59001e-05, 1.01001e-05, 8.50006e-06, 6.60004e-06, 2.70002e-06 #55
),
dataDistribution = cms.vdouble(
## The length has to be exactly the same as for the MC!
## From pileupCalc using the analysis_AN-12-048_HggMVA_2011B.json
##+ This is the intersection of the certified lumi and lumi in files
##+ for the AN-12-048 (Hgg MVA).
##+ https://twiki.cern.ch/twiki/bin/view/CMS/PileupJSONFileforData
270698, 1.92097e+06, 7.37936e+06, 1.97546e+07, 4.12105e+07, 7.15133e+07,
1.07744e+08, 1.45221e+08, 1.78943e+08, 2.04812e+08, 2.20301e+08, 2.24587e+08,
2.18333e+08, 2.033e+08, 1.81904e+08, 1.56775e+08, 1.30395e+08, 1.04822e+08,
8.15498e+07, 6.14711e+07, 4.49426e+07, 3.19021e+07, 2.20071e+07, 1.47669e+07,
9.64664e+06, 6.14038e+06, 3.81158e+06, 2.30915e+06, 1.36638e+06, 790299,
447122, 247617, 134324, 71422.1, 37247.5, 19064.2,
9582.2, 4732.54, 2298.06, 1097.78, 516.18, 239.035,
109.077, 49.0746, 21.7801, 9.54062, 4.127, 1.76384,
0.745197, 0.31138,
## From pileupCalc using the analysis_AN-12-048_HggMVA_2011A.json
##+ This is the intersection of the certified lumi and lumi in files
##+ for the AN-12-048 (Hgg MVA).
##+ https://twiki.cern.ch/twiki/bin/view/CMS/PileupJSONFileforData
# 9.0421e+06, 4.18256e+07, 1.02775e+08, 1.78055e+08, 2.44227e+08, 2.82567e+08,
# 2.86929e+08, 2.62667e+08, 2.20922e+08, 1.73072e+08, 1.27563e+08, 8.91129e+07,
# 5.93253e+07, 3.77909e+07, 2.31054e+07, 1.3591e+07, 7.70586e+06, 4.21808e+06,
# 2.23217e+06, 1.14338e+06, 567552, 273295, 127794, 58085.6,
# 25686.5, 11061.4, 4642.54, 1900.61, 759.561, 296.542,
# 113.18, 42.2575, 15.4443, 5.52873, 1.93966, 0.667283,
# 0.225219, 0.0746155, 0.0242768, 0.00776057, 0.00243853, 0.000753497,
# 0.000229053, 6.8528e-05, 2.01861e-05, 5.85673e-06, 1.67436e-06, 4.71841e-07,
# 1.31119e-07, 3.59441e-08,
## From pileupCalc using the analysis_AN-12-048_HggMVA.json
##+ This is the intersection of the certified lumi and lumi in files
##+ for the AN-12-048 (Hgg MVA).
##+ https://twiki.cern.ch/twiki/bin/view/CMS/PileupJSONFileforData
# 9.31279e+06, 4.37466e+07, 1.10154e+08, 1.9781e+08, 2.85437e+08, 3.5408e+08, # 6
# 3.94674e+08, 4.07888e+08, 3.99865e+08, 3.77884e+08, 3.47865e+08, 3.137e+08, # 12
# 2.77658e+08, 2.41091e+08, 2.05009e+08, 1.70366e+08, 1.38101e+08, 1.0904e+08,# 18
# 8.3782e+07, 6.26145e+07, 4.55102e+07, 3.21754e+07, 2.21349e+07, 1.4825e+07, # 24
# 9.67233e+06, 6.15145e+06, 3.81622e+06, 2.31105e+06, 1.36714e+06, 790595, # 30
# 447235, 247660, 134340, 71427.6, 37249.4, 19064.9, # 36
# 9582.42, 4732.62, 2298.08, 1097.79, 516.183, 239.036, # 42
# 109.078, 49.0747, 21.7801, 9.54063, 4.127, 1.76384, # 48
# 0.745197, 0.31138, # 50
## from https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/
##+ certification/Collisions11/7TeV/PileUp/*.pileup_v2.root
##+ Run 2011A and 2011B combined
# 1.34465e+07, 5.90653e+07, 1.40903e+08, 2.41301e+08, 3.33745e+08, 3.98711e+08,
# 4.30106e+08, 4.32283e+08, 4.1382e+08, 3.82846e+08, 3.45164e+08, 3.04344e+08,
# 2.62555e+08, 2.21331e+08, 1.81983e+08, 1.4569e+08, 1.13413e+08, 8.57789e+07,
# 6.30124e+07, 4.49596e+07, 3.1169e+07, 2.10079e+07, 1.37759e+07, 8.79641e+06,
# 5.47442e+06, 3.32378e+06, 1.97064e+06, 1.14204e+06, 647539, 359547,
# 195673, 104460, 54745.2, 28185.6, 28005.5, 0.008,
## Run 2011A only
# 1.29654e+07, 5.58514e+07, 1.29329e+08, 2.12134e+08, 2.76138e+08, 3.03604e+08,
# 2.93258e+08, 2.55633e+08, 2.0497e+08, 1.53264e+08, 1.07936e+08, 7.21006e+07,
# 4.5913e+07, 2.797e+07, 1.63426e+07, 9.17598e+06, 4.95861e+06, 2.58239e+06,
# 1.2977e+06, 629975, 295784, 134470, 59260.1, 25343.9,
# 10530.1, 4255.05, 1673.95, 641.776, 240.022, 87.6504,
# 31.281, 10.9195, 3.73146, 1.24923, 0.602368, 0.008,
## Run 2011B only
# 481142, 3.21393e+06, 1.15733e+07, 2.91676e+07, 5.76072e+07, 9.51074e+07,
# 1.36849e+08, 1.7665e+08, 2.0885e+08, 2.29582e+08, 2.37228e+08, 2.32243e+08,
# 2.16642e+08, 1.93361e+08, 1.6564e+08, 1.36514e+08, 1.08455e+08, 8.31965e+07,
# 6.17147e+07, 4.43296e+07, 3.08733e+07, 2.08734e+07, 1.37166e+07, 8.77106e+06,
# 5.46389e+06, 3.31952e+06, 1.96896e+06, 1.1414e+06, 647299, 359460,
# 195642, 104449, 54741.4, 28184.3, 28004.9, 0,
## from estimatePileupD.py for golden JSON up to run 173244
# 2.66037e+07, 6.20837e+07, 1.28931e+08, 2.00545e+08, 2.5334e+08, 2.73133e+08,
# 2.5988e+08, 2.23527e+08, 1.76897e+08, 1.30515e+08, 9.06582e+07, 5.972e+07,
# 3.75081e+07, 2.2549e+07, 1.30131e+07, 7.2248e+06, 3.86533e+06, 1.99552e+06,
# 995277, 480084, 224189, 101452, 44532.8, 18979.4,
# 7860.96, 3167.1, 1242.31, 474.86, 177.025, 64.4158,
# 22.8974, 7.95686, 2.70506, 0.900305, 0.293541, 0.0938176,
# 0.02941, 0.0090478, 0.00273311, 0.000811054, 0.000236549, 6.78354e-05,
## from estimatePileupD.py for golden JSON for runs 160404-166861
# 1.00826e+07, 1.9655e+07, 4.58762e+07, 7.63478e+07, 9.9728e+07, 1.0842e+08,
# 1.01847e+08, 8.48512e+07, 6.39051e+07, 4.41459e+07, 2.82916e+07, 1.69742e+07,
# 9.60532e+06, 5.15841e+06, 2.64284e+06, 1.29755e+06, 612859, 279413,
# 123331, 52841.1, 22026.7, 8951.4, 3552.86, 1379.43,
# 524.638, 195.694, 71.6639, 25.7868, 9.12372, 3.17583,
# 1.088, 0.36694, 0.121851, 0.0398426, 0.0128274, 0.00406596,
# 0.00126871, 0.000389638, 0.000117757, 3.50154e-05, 1.02425e-05, 2.94689e-06,
# 8.33821e-07, 2.32e-07, 6.34707e-08, 1.7073e-08, 4.51528e-09, 1.17408e-09,
# 3.00169e-10, 2.00066e-07, 0,
## from estimatePileupD.py for golden JSON for runs 136033-166861
# 1.56121e+07, 2.87272e+07, 5.46463e+07, 8.25868e+07, 1.03348e+08, 1.10229e+08,
# 1.02651e+08, 8.51755e+07, 6.40254e+07, 4.41874e+07, 2.8305e+07, 1.69782e+07,
# 9.60647e+06, 5.15872e+06, 2.64292e+06, 1.29757e+06, 612863, 279414,
# 123331, 52841.1, 22026.7,
## from estimatePileupD.py for golden JSON up to run 166502
#3.36441e+06, 6.50754e+06, 1.57837e+07, 2.75468e+07, 3.78054e+07, 4.31307e+07,
#4.2414e+07, 3.68867e+07, 2.8917e+07, 2.07353e+07, 1.37572e+07, 8.52297e+06,
#4.9674e+06, 2.74032e+06, 1.43822e+06, 721206, 346808, 160424,
#71576.4, 30874.3, 12901.2, #5231.58, 2061.91, 790.889,
## from estimatePileupD.py for golden JSON up to run 165542
#4.49103e+06, 7.50711e+06, 1.7013e+07, 2.77526e+07, 3.56721e+07, 3.82648e+07,
#3.55386e+07, 2.93206e+07, 2.18974e+07, 1.50169e+07, 9.56312e+06, 5.70622e+06,
#3.21393e+06, 1.71936e+06, 878374, 430566, 203380, 92934.5,
#41228.6, 17815.2, 7520.35,# 3109.37, 1262.01, 503.739,
#198.015, 76.7276, 29.3217, 11.0527, 4.10876, 1.50569,
#0.543606, 0.193229, 0.0675766, 0.0232364, 0.00785103, 0.0026052,
#0.000848637, 0.000271282, 8.50798e-05, 2.61736e-05, 7.8975e-06, 2.33716e-06,
#6.78371e-07, 1.93133e-07, 5.39384e-08, 1.47793e-08, 3.97367e-09, 1.04856e-09,
#2.71605e-10, 6.92423e-08, 0,
## from estimatePileupD.py for golden JSON up to run 163869
#3.6124e+06, 5.7606e+06, 1.3047e+07, 2.12065e+07, 2.71345e+07, 2.89995e+07,
#2.68765e+07, 2.21641e+07, 1.65695e+07, 1.13875e+07, 7.27332e+06, 4.35533e+06,
#2.46294e+06, 1.32354e+06, 679618, 335115, 159402, 73447,
#32906.5, 14384.3, 6152.9, #2581.8, 1064.77, 432.206,
#172.826, 68.1079, 26.4529, 10.1234, 3.81552, 1.4155,
#0.51655, 0.185307, 0.0653117, 0.0226036, 0.00767821, 0.00255903,
#0.000836568, 0.000268193, 8.43057e-05, 2.59835e-05, 7.85175e-06, 2.32636e-06,
#6.75872e-07, 1.92565e-07, 5.3812e-08, 1.47516e-08, 3.96773e-09, 1.0473e-09,
#2.71346e-10, 5.26651e-08, 0.
)
#mcFile = cms.FileInPath('pudist_G_Pt-15to3000_TuneZ2_Flat_7TeV_pythia6_Summer11.root'),
#mcHist = cms.string('pudist'),
#dataFile = cms.FileInPath('pudist_160404-163869_Cert_JSON.root'),
#dataHist = cms.string('pileup'),
)
process.p = cms.Path(
process.selectionSequence *
process.pmvTree
)
process.options.wantSummary = False
if __name__ == "__main__": import user
| [
"senka.duric@cern.ch"
] | senka.duric@cern.ch |
2166bbe2b5f956dda5de32511416df93133ed943 | c694ccdb024e425b42bc48c1c864a0117c6d8f36 | /advanced python/inheritance/demo.py | ed8dd9f7a77c728c955aa2962dcb6be8c71541ea | [] | no_license | shilpageo/pythonproject | bea79449a4311ed801cce826b6c3821aef00d674 | 5f5d40c5182b48167fbb456a53250e6d510c0709 | refs/heads/master | 2023-04-30T13:18:22.251978 | 2021-05-19T08:05:47 | 2021-05-19T08:05:47 | 368,791,731 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 762 | py | #person child parent student
#child & parent inherit person
#student class inherit child
class person:
def m1(self,name,age,gender):
self.name=name
self.age=age
self.gender=gender
print(self.name,self.age,self.gender)
class parent(person):
def m2(self,job,place,salary):
self.job=job
self.place=place
self.salary=salary
print(self.job,self.place,self.gender)
class child(person):
def m3(self,school):
self.school=school
print(self.school)
class student(child):
def m4(self,rollno):
self.rollno=rollno
print("inside")
obj =person()
obj.m1("anu",20,"f")
obj=parent()
obj.m2("sales","kakanad",25000)
obj=child()
obj.m3("ghss")
obj=student()
obj.m4(8)
| [
"shilpageo98@gmil.com"
] | shilpageo98@gmil.com |
bce0803449986455b0f915778f2a2087a5e29298 | 577c178a0751d9df22f05e69061c3333119b6639 | /chat_utils.py | f8eb4057d9b0b71e583f36428a47b1453f94b24e | [] | no_license | ANPULI/Final-Project | 9affbef48aa32d816c8f9222b7e89cdea8d01e40 | 386968a3fb9d21a7316706e5d5031c18898dc244 | refs/heads/master | 2021-01-20T13:50:24.404571 | 2017-05-07T12:30:29 | 2017-05-07T12:30:29 | 90,529,436 | 0 | 1 | null | 2017-05-08T09:14:18 | 2017-05-07T11:57:45 | null | UTF-8 | Python | false | false | 2,163 | py | import socket
import time
M_UNDEF = '0'
M_LOGIN = '1'
M_CONNECT = '2'
M_EXCHANGE = '3'
M_LOGOUT = '4'
M_DISCONNECT= '5'
M_SEARCH = '6'
M_LIST = '7'
M_POEM = '8'
M_TIME = '9'
#CHAT_IP = '' #for Mac
CHAT_IP = socket.gethostname() #for PC
CHAT_PORT = 1112
SERVER = (CHAT_IP, CHAT_PORT)
menu = "\n++++ Choose one of the following commands\n \
time: calendar time in the system\n \
who: to find out who else are there\n \
c _peer_: to connect to the _peer_ and chat\n \
? _term_: to search your chat logs where _term_ appears\n \
p _#_: to get number <#> sonnet\n \
q: to leave the chat system\n\n"
S_OFFLINE = 0
S_CONNECTED = 1
S_LOGGEDIN = 2
S_CHATTING = 3
SIZE_SPEC = 5
CHAT_WAIT = 0.2
def print_state(state):
print('**** State *****::::: ')
if state == S_OFFLINE:
print('Offline')
elif state == S_CONNECTED:
print('Connected')
elif state == S_LOGGEDIN:
print('Logged in')
elif state == S_CHATTING:
print('Chatting')
else:
print('Error: wrong state')
def mysend(s, msg):
#append size to message and send it
msg = ('0' * SIZE_SPEC + str(len(msg)))[-SIZE_SPEC:] + str(msg)
msg = msg.encode()
total_sent = 0
while total_sent < len(msg) :
sent = s.send(msg[total_sent:])
if sent==0:
print('server disconnected')
break
total_sent += sent
def myrecv(s):
#receive size first
size = ''
while len(size) < SIZE_SPEC:
text = s.recv(SIZE_SPEC - len(size)).decode()
if not text:
print('disconnected')
return('')
size += text
size = int(size)
#now receive message
msg = ''
while len(msg) < size:
text = s.recv(size-len(msg)).decode()
if text == b'':
print('disconnected')
break
msg += text
#print ('received '+message)
return (msg)
def text_proc(text, user):
ctime = time.strftime('%d.%m.%y,%H:%M', time.localtime())
return('(' + ctime + ') ' + user + ' : ' + text) # message goes directly to screen
| [
"al4902@nyu.edu"
] | al4902@nyu.edu |
1de34b116eaca67d495ae641e5e1605cf01e5040 | 9950a3b32a6199e6cecfa1ee31ba856c7ed8b95a | /TP_reconnaissance_faciale_avec_OpenCV/venv/Scripts/easy_install-3.8-script.py | d40d87e6d9d94e4e481e2310e01a0a702f6f4f62 | [] | no_license | EDU-FRANCK-JUBIN/ia-fun-tp-Aerowiel | f5703e60c7844888548dc8168a291c9c091bee0a | 81d739a983eacadd8a64f9c90ed1a85b18b1c818 | refs/heads/master | 2021-01-03T23:29:24.126484 | 2020-03-26T08:40:51 | 2020-03-26T08:40:51 | 240,281,697 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 470 | py | #!C:\Users\flo_c\OneDrive\Bureau\DeepLearningM2\TP1b\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.8'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.8')()
)
| [
"florian.pendaries@hotmail.fr"
] | florian.pendaries@hotmail.fr |
2d1144b4328f1dbe5f4d42775ae065a09aa4efe8 | 10dbd181f66eac8a95c699a233b67896f1d86855 | /faculties/tests.py | e90d5de9d01a0b96291dd52b01eb01c194cb8f43 | [] | no_license | chrisjluc/uniq-backend | 0bda917ac28dd203775b4db5bd612ef084394ca6 | 1b52f86a2f10f2cddc63f9351f05df9c0b23be33 | refs/heads/master | 2021-01-22T10:50:59.445166 | 2014-10-04T17:36:22 | 2014-10-04T17:36:22 | 19,684,673 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,114 | py | from .models import *
from schools.models import *
from uniq.testing.testcases import MongoTestCase
from django.conf import settings
from rest_framework import status
class FacultyTests(MongoTestCase):
sId = None
fId = None
def setUp(self):
s = School(slug='s', metaData__yearValid=settings.CURRENT_YEAR)
s.save()
f = Faculty(slug='f', schoolId=s.id, metaData__yearValid=settings.CURRENT_YEAR)
f.save()
self.sId = s.id
self.fId = f.id
def tearDown(self):
pass
def test_get_list(self):
response = self.client.get('/faculties/', format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_list_school_id(self):
response = self.client.get('/schools/%s/faculties/' % self.sId, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_list_school_slug(self):
response = self.client.get('/schools/s/faculties/', format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_detail_school_slug_faculty_slug(self):
response = self.client.get('/schools/s/faculties/f/', format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_detail_faculty_id(self):
response = self.client.get('/faculties/%s/' % self.fId, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_list_school_id_invalid(self):
response = self.client.get('/schools/invalidid111/faculties/', format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_get_list_school_slug_invalid(self):
response = self.client.get('/schools/invalidslug/faculties/', format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_get_detail_school_slug_faculty_slug_invalid(self):
response = self.client.get('/schools/s/faculties/fsdfg/', format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_get_detail_faculty_id_invalid(self):
response = self.client.get('/faculties/invalidId/', format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) | [
"chris.luc.dev@gmail.com"
] | chris.luc.dev@gmail.com |
d500aa3f98ecceb40c6a0582fb2a7f7a0c747b78 | 7cf9967f153e3723cdbcfbd8ebb07e91adbe0210 | /views.py | 2d56e8e34a88dae8299a6adf6d55f5103f525fbb | [] | no_license | Groskilled/flasktaskr | ec01b5d5962ba68cb9611c533efa239b483f7653 | f8cc6c1d69bbdcf326fc6e0feec69cd08f242e00 | refs/heads/master | 2022-12-16T12:07:26.311673 | 2020-09-07T17:14:30 | 2020-09-07T17:14:30 | 293,538,826 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,147 | py | import sqlite3
from functools import wraps
from forms import AddTaskForm
from flask import Flask, flash, redirect, render_template, request, session, url_for, g
app = Flask(__name__)
app.config.from_object('_config')
def connect_db():
return sqlite3.connect(app.config['DATABASE_PATH'])
def login_required(test):
@wraps(test)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return test(*args, **kwargs)
else:
flash('You need to log in first.')
return redirect(url_for('login'))
return wrap
@app.route('/tasks/')
@login_required
def tasks():
g.db = connect_db()
cursor = g.db.execute(
'select name, due_date, priority, task_id from tasks where status =1')
open_tasks= [dict(name=row[0], due_date=row[1], priority=row[2], task_id=row[3]) for row in cursor.fetchall()]
cursor = g.db.execute(
'select name, due_date, priority, task_id from tasks where status = 0')
closed_tasks = [dict(name=row[0], due_date=row[1], priority=row[2], task_id=row[3]) for row in cursor.fetchall()]
g.db.close()
return render_template(
'tasks.html',
form=AddTaskForm(request.form),
open_tasks=open_tasks,
closed_tasks=closed_tasks)
@app.route('/add/', methods=['POST'])
@login_required
def new_task():
g.db = connect_db()
name = request.form['name']
date = request.form['due_date']
priority = request.form['priority']
if not name or not date or not priority:
flash("All fields are required. Please try again.")
return redirect(url_for('tasks'))
else:
g.db.execute('insert into tasks (name, due_date, priority, status) values (?, ?, ?, 1)', [request.form['name'], request.form['due_date'], request.form['priority']])
g.db.commit()
g.db.close()
flash('New entry was successfully posted. Thanks.')
return redirect(url_for('tasks'))
@app.route('/complete/<int:task_id>/')
@login_required
def complete(task_id):
g.db = connect_db()
g.db.execute('update tasks set status = 0 where task_id='+str(task_id))
g.db.commit()
g.db.close()
flash('The task was marked as complete.')
return redirect(url_for('tasks'))
@app.route('/delete/<int:task_id>/')
@login_required
def delete(task_id):
g.db = connect_db()
g.db.execute('delete from tasks where task_id='+str(task_id))
g.db.commit()
g.db.close()
flash('The task was deleted.')
return redirect(url_for('tasks'))
@app.route('/logout/')
def logout():
session.pop('logged_in', None)
flash('Goodbye!')
return redirect(url_for('login'))
@app.route('/', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
if request.form['username'] != app.config['USERNAME'] or request.form['password'] != app.config['PASSWORD']:
error = 'Invalid Credentials. Please try again.'
return render_template('login.html', error=error)
else:
session['logged_in'] = True
flash('Welcome!')
return redirect(url_for('tasks'))
return render_template('login.html')
| [
"adam.wybierala@gmail.com"
] | adam.wybierala@gmail.com |
bdc158529692c885b2e550c6b091ca4223bf58c6 | 679c4e4c0527bd4f36d1d6861ddcec4e1db394f5 | /config.py | 94be7a92fadcb6de32931b156ba3fc501704ee7b | [] | no_license | yangify/dollback | 97269d5b48e3006944b0ab5645e9d2db208c5fa5 | f2bfb37104aac33fdcc1702a1a0a66f1fc5212ce | refs/heads/main | 2023-06-17T15:22:14.436745 | 2021-07-13T07:14:23 | 2021-07-13T07:14:23 | 362,321,634 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,609 | py | # CELERY
CELERY_BROKER_URL = 'amqp://localhost:5672'
# DECOMPILER
DECOMPILERS = ['apktool', 'jadx']
APK_FOLDER_PATH = './resources/apk'
SOURCE_CODE_FOLDER_PATH = './resources/code'
LINK_FOLDER_PATH = './resources/link'
APKTOOL_COMMAND = 'java -jar ./tools/decompiler/apktool/apktool.jar d <INPUT_PATH> -o <OUTPUT_PATH>'
JADX_COMMAND = './tools/decompiler/jadx/bin/jadx -d <OUTPUT_PATH> <INPUT_PATH>'
# DATABASE
MONGO_URI = 'mongodb://localhost:27017/dollback'
# GIT
COMMIT = 'cd resources/code/{filename}; git init; git add .; git commit -m "first commit"'
# SOURCEGRAPH
SOURCEGRAPH_URL = 'http://localhost:7080'
SOURCEGRAPH_API = '/.api/graphql'
SOURCEGRAPH_TOKEN = 'cd511fcfa4968559732f6863ef4fd7bc17c22bc3'
SOURCEGRAPH_LOCALHOST = 'RXh0ZXJuYWxTZXJ2aWNlOjk='
SOURCEGRAPH_UPDATE_HOST_QUERY = 'mutation UpdateExternalService($input: UpdateExternalServiceInput = {id: \"{' \
'host_id}\"}) { updateExternalService(input: $input) { id, displayName } }'
SOURCEGRAPH_SEARCH_QUERY = 'query { '\
' search ( query: \"repo:^<REPO_NAME>$ <SEARCH_TERM> count:all\" patternType: <PATTERN_TYPE> ) { '\
' results { '\
' matchCount '\
' results { ...result } '\
' } '\
' } '\
'} '\
'fragment result on FileMatch { '\
' file { path name } '\
' lineMatches { offsetAndLengths preview } '\
'}'
| [
"hong.yang.code@gmail.com"
] | hong.yang.code@gmail.com |
16b6e6b1b55c62455ce9853d34789e374b444548 | eec99bf43da49bd00f7a9bd04f2f6a8245d5d33b | /app.py | b5921b13eb09c07c2a341ef35a6564d59904e6e8 | [] | no_license | Pazoles/Geocoder | 88ad9194f21d7c94755d834257de5855f690f3f8 | 8b80bd68e2de2a381843e1bb729e68459b710504 | refs/heads/master | 2021-01-17T13:08:48.664070 | 2016-07-06T15:38:17 | 2016-07-06T15:38:17 | 56,185,477 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,188 | py | from flask import Flask, make_response, request, render_template, redirect, url_for, send_file
from census_geo import chunketize, geocode
from io import StringIO
app = Flask(__name__)
#app.config.from_object('config')
@app.route('/geo', methods=["POST"])
def geo_post():
file = request.files['data_file']
if not file:
return "No file attached"
#file_contents = file.stream.readlines()[1:]
file_contents = StringIO(file.stream.read().decode("UTF8"))
result = geocode(file_contents.getvalue())
response = make_response(result)
response.headers["Content-Disposition"] = "attachment; filename=result.csv"
return response
@app.route('/')
def home():
return render_template('geo.html')
#Error Handlers (404,403,500)
@app.errorhandler(404)
def erorr404(error):
return render_template('404.html'), 404
@app.errorhandler(403)
def erorr403(error):
return render_template('403.html'), 403
@app.errorhandler(500)
def erorr404(error):
#May want to add in a db rollback in here too.
return render_template('500.html'), 500
if __name__ == '__main__':
app.debug = True
app.run(debug=True)
| [
"pazoles@gmail.com"
] | pazoles@gmail.com |
e1b81cb223846f4d96ab1576e3fe332878928f01 | e347ab2874921ac8af1115ffb005ec5910ae8007 | /venv/Scripts/pip3.6-script.py | 3e7943e87e0e161265f42f08e4a20004d8e43bea | [] | no_license | Nash-Git/linkedListSearch | ff29a9df57dc233c36b83d7276b5065341685c8a | d9d6dd8b796bc4fd3fbce5a68322ccfc897c2bc4 | refs/heads/master | 2020-03-27T10:55:45.830869 | 2018-08-28T13:51:55 | 2018-08-28T13:51:55 | 146,454,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | #!C:\Users\asifn\PycharmProjects\searchLinkedList\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.6'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.6')()
)
| [
"asif.nashiry@gmail.com"
] | asif.nashiry@gmail.com |
5020986750ba000c101d3daa6a4f0bf93cd120bc | f8ac82a3e8a7dc3632edfd2812dc7e347ce5be9f | /RTS/3.1-Aperture_Phase_Efficiency/spiral_holography_scan.py | b65da559b383d5191664b58d3e6f9a993b942317 | [] | no_license | bongani-ska/https-katpull-katpull4git-github.com-ska-sa-katsdpscripts | af3cd569e9f3ec1c1ab341d35c2780b9b6833bfb | b44cbdcca02b497c4b100839ccdfc4813c793b35 | refs/heads/master | 2016-09-06T08:52:24.645635 | 2015-02-18T05:57:43 | 2015-02-18T05:57:43 | 31,056,757 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,933 | py | #!/usr/bin/python
# Perform spiral holography scan on specified target(s). Mostly used for beam pattern measurement.
#
# to run on simulator:
# ssh kat@monctl.comm
# kat-start.sh (may need to call kat-stop.sh, or better kat-kill.py, and possibly kill all screen sessions on kat@monctl.comm, and possibly kat@proxy.monctl.comm)
# ipython
# import katuilib
# configure()
# %run ~/scripts/observation/spiral_holography_scan.py -f 1722 -a ant1,ant2,ant3,ant4,ant5,ant6,ant7 -b ant2,ant3 --num-cycles 1 --cycle-duration 120 -l 12 'AFRISTAR' -o mattieu --sb-id-code='20121030-0003'
# look on http://kat-flap.control.kat.ac.za/kat/KatGUI.swf and connect to 'comm'
#
#using schedule blocks
#help on schedule blocks: https://sites.google.com/a/ska.ac.za/intranet/teams/operators/kat-7-nominal-procedures/frequent-tasks/control-tasks/observation
#to view progress: http://192.168.193.8:8081/tailtask/<sb_id_code>/progress
#to view signal displays remotely safari goto "vnc://kat@right-paw.control.kat.ac.za"
#
#ssh kat@kat-ops.karoo
#ipython
#import katuilib
#configure_obs()
#obs.sb.new_clone('20121203-0013')
#obs.sb.instruction_set="run-obs-script ~/scripts/observation/spiral_holography_scan.py -f 1722 -b ant5 --scan-extent 6 --cycle-duration 6000 --num-cycles 1 --kind 'uniform' '3C 286' --stow-when-done"
#look on http://kat-flap.control.kat.ac.za/kat/KatGUI.swf and connect to 'karoo from site'
# The *with* keyword is standard in Python 2.6, but has to be explicitly imported in Python 2.5
from __future__ import with_statement
import time
import katpoint
# Import script helper functions from observe.py
from katcorelib import standard_script_options, verify_and_connect, collect_targets, \
start_session, user_logger, ant_array
import numpy as np
import scipy
from scikits.fitting import NonLinearLeastSquaresFit, PiecewisePolynomial1DFit
#anystowed=np.any([res._returns[0][4]=='STOW' for res in all_ants.req.sensor_value('mode').values()])
def plane_to_sphere_holography(targetaz,targetel,ll,mm):
scanaz=targetaz-np.arcsin(np.clip(ll/np.cos(targetel),-1.0,1.0))
scanel=np.arcsin(np.clip((np.sqrt(1.0-ll**2-mm**2)*np.sin(targetel)+np.sqrt(np.cos(targetel)**2-ll**2)*mm)/(1.0-ll**2),-1.0,1.0))
return scanaz,scanel
#same as katpoint.projection._sphere_to_plane_common(az0=scanaz,el0=scanel,az=targetaz,el=targetel) with ll=ortho_x,mm=-ortho_y
def sphere_to_plane_holography(targetaz,targetel,scanaz,scanel):
#produces direction cosine coordinates from scanning antenna azimuth,elevation coordinates
#see _coordinate options.py for derivation
ll=np.cos(targetel)*np.sin(targetaz-scanaz)
mm=np.cos(targetel)*np.sin(scanel)*np.cos(targetaz-scanaz)-np.cos(scanel)*np.sin(targetel)
return ll,mm
def spiral(params,indep):
x0=indep[0]
y0=indep[1]
r=params[0]
x=r*np.cos(2.0*np.pi*r)
y=r*np.sin(2.0*np.pi*r)
return np.sqrt((x-x0)**2+(y-y0)**2)
#note that we want spiral to only extend to above horizon for first few scans in case source is rising
#should test if source is rising or setting before each composite scan, and use -compositey if setting
def generatespiral(totextent,tottime,tracktime=1,sampletime=1,kind='uniform',mirrorx=False):
totextent=np.float(totextent)
tottime=np.float(tottime)
sampletime=np.float(sampletime)
nextrazeros=int(np.float(tracktime)/sampletime)
print 'nextrazeros',nextrazeros
tracktime=nextrazeros*sampletime
radextent=np.float(totextent)/2.0
if (kind=='dense-core'):
c=np.sqrt(2)*180.0/(16.0*np.pi)
narms=2*int(np.sqrt(tottime/c+(tracktime/c)**2)-tracktime/c)#ensures even number of arms - then scan pattern ends on target (if odd it will not)
ntime=int((tottime-tracktime*narms)/(sampletime*narms))
armrad=radextent*(np.linspace(0,1,ntime))
armtheta=np.linspace(0,np.pi,ntime)
armx=armrad*np.cos(armtheta)
army=armrad*np.sin(armtheta)
elif (kind=='approx'):
c=180.0/(16.0*np.pi)
narms=2*int(np.sqrt(tottime/c+(tracktime/c)**2)-tracktime/c)#ensures even number of arms - then scan pattern ends on target (if odd it will not)
ntime=int((tottime-tracktime*narms)/(sampletime*narms))
armrad=radextent*(np.linspace(0,1,ntime))
armtheta=np.linspace(0,np.pi,ntime)
armx=armrad*np.cos(armtheta)
army=armrad*np.sin(armtheta)
dist=np.sqrt((armx[:-1]-armx[1:])**2+(army[:-1]-army[1:])**2)
narmrad=np.cumsum(np.concatenate([np.array([0]),1.0/dist]))
narmrad*=radextent/max(narmrad)
narmtheta=narmrad/radextent*np.pi
armx=narmrad*np.cos(narmtheta)
army=narmrad*np.sin(narmtheta)
else:#'uniform'
c=180.0/(16.0*np.pi)
narms=2*int(np.sqrt(tottime/c+(tracktime/c)**2)-tracktime/c)#ensures even number of arms - then scan pattern ends on target (if odd it will not)
ntime=int((tottime-tracktime*narms)/(sampletime*narms))
armx=np.zeros(ntime)
army=np.zeros(ntime)
#must be on curve x=t*cos(np.pi*t),y=t*sin(np.pi*t)
#intersect (x-x0)**2+(y-y0)**2=1/ntime**2 with spiral
lastr=0.0
for it in range(1,ntime):
data=np.array([1.0/ntime])
indep=np.array([armx[it-1],army[it-1]])#last calculated coordinate in arm, is x0,y0
initialparams=np.array([lastr+1.0/ntime]);
fitter=NonLinearLeastSquaresFit(spiral,initialparams)
fitter.fit(indep,data)
lastr=fitter.params[0];
armx[it]=lastr*np.cos(2.0*np.pi*lastr)
army[it]=lastr*np.sin(2.0*np.pi*lastr)
maxrad=np.sqrt(armx[it]**2+army[it]**2)
armx=armx*radextent/maxrad
army=army*radextent/maxrad
# ndist=sqrt((armx[:-1]-armx[1:])**2+(army[:-1]-army[1:])**2)
# print ndist
compositex=[[] for ia in range(narms)]
compositey=[[] for ia in range(narms)]
ncompositex=[[] for ia in range(narms)]
ncompositey=[[] for ia in range(narms)]
reverse=False
for ia in range(narms):
rot=-ia*np.pi*2.0/narms
x=armx*np.cos(rot)-army*np.sin(rot)
y=armx*np.sin(rot)+army*np.cos(rot)
nrot=ia*np.pi*2.0/narms
nx=armx*np.cos(nrot)-army*np.sin(nrot)
ny=armx*np.sin(nrot)+army*np.cos(nrot)
if (nextrazeros>0):
x=np.r_[np.repeat(0.0,nextrazeros),x]
y=np.r_[np.repeat(0.0,nextrazeros),y]
nx=np.r_[np.repeat(0.0,nextrazeros),nx]
ny=np.r_[np.repeat(0.0,nextrazeros),ny]
if reverse:
reverse=False
x=x[::-1]
y=y[::-1]
nx=nx[::-1]
ny=ny[::-1]
else:
reverse=True
if (mirrorx):
compositex[ia]=-x
compositey[ia]=y
ncompositex[ia]=-nx
ncompositey[ia]=ny
else:
compositex[ia]=x
compositey[ia]=y
ncompositex[ia]=nx
ncompositey[ia]=ny
return compositex,compositey,ncompositex,ncompositey
# Set up standard script options
parser = standard_script_options(usage="%prog [options] <'target/catalogue'> [<'target/catalogue'> ...]",
description='This script performs a holography scan on the specified target. '
'All the antennas initially track the target, whereafter a subset '
'of the antennas (the "scan antennas" specified by the --scan-ants '
'option) perform a spiral raster scan on the target. Note also some '
'**required** options below.')
# Add experiment-specific options
parser.add_option('-b', '--scan-ants', help='Subset of all antennas that will do raster scan (default=first antenna)')
parser.add_option('--num-cycles', type='int', default=1,
help='Number of beam measurement cycles to complete (default=%default)')
parser.add_option('--cycle-duration', type='float', default=300.0,
help='Time to spend measuring beam pattern per cycle, in seconds (default=%default)')
parser.add_option('-l', '--scan-extent', type='float', default=4.0,
help='Diameter of beam pattern to measure, in degrees (default=%default)')
parser.add_option('--kind', type='string', default='uniform',
help='Kind of spiral, could be "uniform" or "dense-core" (default=%default)')
parser.add_option('--tracktime', type='float', default=1.0,
help='Extra time in seconds for scanning antennas to track when passing over target (default=%default)')
parser.add_option('--sampletime', type='float', default=1.0,
help='time in seconds to spend on pointing (default=%default)')
parser.add_option('--mirrorx', action="store_true", default=False,
help='Mirrors x coordinates of pattern (default=%default)')
parser.add_option('--no-delays', action="store_true", default=False,
help='Do not use delay tracking, and zero delays')
# Set default value for any option (both standard and experiment-specific options)
parser.set_defaults(description='Spiral holography scan', nd_params='off')
# Parse the command line
opts, args = parser.parse_args()
compositex,compositey,ncompositex,ncompositey=generatespiral(totextent=opts.scan_extent,tottime=opts.cycle_duration,tracktime=opts.tracktime,sampletime=opts.sampletime,kind=opts.kind,mirrorx=opts.mirrorx)
timeperstep=opts.sampletime;
if len(args) == 0:
raise ValueError("Please specify a target argument via name ('Ori A'), "
"description ('azel, 20, 30') or catalogue file name ('sources.csv')")
# Check basic command-line options and obtain a kat object connected to the appropriate system
with verify_and_connect(opts) as kat:
if not kat.dry_run and kat.ants.req.mode('STOP') :
user_logger.info("Setting Antenna Mode to 'STOP', Powering on Antenna Drives.")
else:
user_logger.error("Unable to set Antenna mode to 'STOP'.")
catalogue = collect_targets(kat, args)
targets=catalogue.targets
if len(targets) == 0:
raise ValueError("Please specify a target argument via name ('Ori A'), "
"description ('azel, 20, 30') or catalogue file name ('sources.csv')")
target=targets[0]#only use first target
lasttargetel=target.azel()[1]*180.0/np.pi
# Initialise a capturing session (which typically opens an HDF5 file)
with start_session(kat, **vars(opts)) as session:
# Use the command-line options to set up the system
session.standard_setup(**vars(opts))
if not opts.no_delays and not kat.dry_run :
if session.dbe.req.auto_delay('on'):
user_logger.info("Turning on delay tracking.")
else:
user_logger.error('Unable to turn on delay tracking.')
elif opts.no_delays and not kat.dry_run:
if session.dbe.req.auto_delay('off'):
user_logger.info("Turning off delay tracking.")
else:
user_logger.error('Unable to turn off delay tracking.')
if session.dbe.req.zero_delay():
user_logger.info("Zeroed the delay values.")
else:
user_logger.error('Unable to zero delay values.')
all_ants = session.ants
# Form scanning antenna subarray (or pick the first antenna as the default scanning antenna)
scan_ants = ant_array(kat, opts.scan_ants if opts.scan_ants else session.ants[0], 'scan_ants')
# Assign rest of antennas to tracking antenna subarray
track_ants = ant_array(kat, [ant for ant in all_ants if ant not in scan_ants], 'track_ants')
# Disable noise diode by default (to prevent it firing on scan antennas only during scans)
nd_params = session.nd_params
session.nd_params = {'diode': 'coupler', 'off': 0, 'on': 0, 'period': -1}
session.capture_start()
session.label('holo')
user_logger.info("Initiating spiral holography scan cycles (%d %g-second cycles extending %g degrees) on target '%s'"
% (opts.num_cycles, opts.cycle_duration, opts.scan_extent, target.name))
for cycle in range(opts.num_cycles):
targetel=target.azel()[1]*180.0/np.pi
if (targetel>lasttargetel):#target is rising - scan top half of pattern first
cx=compositex
cy=compositey
if (targetel<opts.horizon):
user_logger.info("Exiting because target is %g degrees below horizon limit of %g."%((opts.horizon-targetel),opts.horizon))
break;# else it is ok that target just above horizon limit
else:#target is setting - scan bottom half of pattern first
cx=ncompositex
cy=ncompositey
if (targetel<opts.horizon+(opts.scan_extent/2.0)):
user_logger.info("Exiting because target is %g degrees too low to accommodate a scan extent of %g degrees above the horizon limit of %g."%((opts.horizon+(opts.scan_extent/2.0)-targetel),opts.scan_extent,opts.horizon))
break;
user_logger.info("Performing scan cycle %d."%(cycle+1))
lasttargetel=targetel
session.ants = all_ants
user_logger.info("Using all antennas: %s" % (' '.join([ant.name for ant in session.ants]),))
session.track(target, duration=0, announce=False)
session.fire_noise_diode(announce=False, **nd_params)#provides opportunity to fire noise diode
session.ants = scan_ants
user_logger.info("Using scan antennas: %s" % (' '.join([ant.name for ant in session.ants]),))
# session.set_target(target)
# session.ants.req.drive_strategy('shortest-slew')
# session.ants.req.mode('POINT')
for iarm in range(len(cx)):#spiral arm index
scan_index=0
wasstowed=False
while(scan_index!=len(cx[iarm])-1):
while (not kat.dry_run and wasstowed):
user_logger.info("Attempting to recover from wind stow" )
session.ants = all_ants
user_logger.info("Using all antennas: %s" % (' '.join([ant.name for ant in session.ants]),))
session.track(target, duration=0, announce=False)
if (not any([res._returns[0][4]=='STOW' for res in all_ants.req.sensor_value('mode').values()])):
scan_index=0
wasstowed=False
session.fire_noise_diode(announce=False, **nd_params)#provides opportunity to fire noise diode
session.ants = scan_ants
user_logger.info("Using scan antennas: %s" % (' '.join([ant.name for ant in session.ants]),))
if (cx[iarm][scan_index]!=0.0 or cy[iarm][scan_index]!=0.0):
targetaz_rad,targetel_rad=target.azel()
scanaz,scanel=plane_to_sphere_holography(targetaz_rad,targetel_rad,cx[iarm][scan_index]*np.pi/180.0,cy[iarm][scan_index]*np.pi/180.0)
# targetx,targety=katpoint.sphere_to_plane[opts.projection](targetaz_rad,targetel_rad,scanaz,scanel)
targetx,targety=sphere_to_plane_holography(scanaz,scanel,targetaz_rad,targetel_rad)
session.ants.req.offset_fixed(targetx*180.0/np.pi,-targety*180.0/np.pi,opts.projection)
# session.ants.req.offset_fixed(cx[iarm][scan_index],cy[iarm][scan_index],opts.projection)
time.sleep(10)#gives 10 seconds to slew to outside arm if that is where pattern commences
user_logger.info("Recovered from wind stow, repeating cycle %d scan %d"%(cycle+1,iarm+1))
else:
time.sleep(60)
lastproctime=time.time()
for scan_index in range(len(cx[iarm])):#spiral arm scan
targetaz_rad,targetel_rad=target.azel()
scanaz,scanel=plane_to_sphere_holography(targetaz_rad,targetel_rad,cx[iarm][scan_index]*np.pi/180.0,cy[iarm][scan_index]*np.pi/180.0)
# targetx,targety=katpoint.sphere_to_plane[opts.projection](targetaz_rad,targetel_rad,scanaz,scanel)
targetx,targety=sphere_to_plane_holography(scanaz,scanel,targetaz_rad,targetel_rad)
session.ants.req.offset_fixed(targetx*180.0/np.pi,-targety*180.0/np.pi,opts.projection)
# session.ants.req.offset_fixed(cx[iarm][scan_index],cy[iarm][scan_index],opts.projection)
curproctime=time.time()
proctime=curproctime-lastproctime
if (timeperstep>proctime):
time.sleep(timeperstep-proctime)
lastproctime=time.time()
if not kat.dry_run and (np.any([res._returns[0][4]=='STOW' for res in all_ants.req.sensor_value('mode').values()])):
if (wasstowed==False):
user_logger.info("Cycle %d scan %d interrupted. Some antennas are stowed ... waiting to resume scanning"%(cycle+1,iarm+1) )
wasstowed=True
time.sleep(60)
break#repeats this spiral arm scan if stow occurred
#set session antennas to all so that stow-when-done option will stow all used antennas and not just the scanning antennas
session.ants = all_ants
| [
"ludwig@ska.ac.za"
] | ludwig@ska.ac.za |
83d54729d3675a348d081a2a13660746e43a5464 | dc95bde612acd19a37e6cf49143124307e98b8cd | /appdaemon/apps/telegram.py | e594f94527485270341e7970f26d19980d931a3f | [] | no_license | kf-nz/Home-AssistantConfig | 5741b06edf1fb2c7f043adb64cc2cf1f19945df1 | 11448d8571376c04e733aca15bef2a12a1ee24f5 | refs/heads/master | 2023-01-27T11:38:08.374457 | 2019-09-21T23:00:55 | 2019-09-21T23:00:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,818 | py | import appdaemon.plugins.hass.hassapi as hass
#
# Hello World App
#
# Args:
#
class Telegram(hass.Hass):
def initialize(self):
self.listen_event(self.update_homeassistant, event="telegram_command")
def update_homeassistant(self, event_id, payload_event, *args):
assert event_id == 'telegram_command'
user_id = payload_event['user_id']
command = payload_event['command']
if command == '/update':
self.call_service("mqtt/publish", topic="notifications/newmsg/telegram", payload="Updating HomeAssistant now. You will be notified on restart.")
self.call_service("shell_command/update_homeassistant")
elif command == '/clean':
self.call_service("mqtt/publish", topic="notifications/newmsg/telegram", payload="Pruning docker images now.")
self.call_service("shell_command/cleanup_homeassistant")
elif command == '/reboot_win10':
self.call_service("mqtt/publish", topic="notifications/newmsg/telegram", payload="Issuing reboot command to WIN10 now.")
self.call_service("shell_command/reboot_win10")
elif command == '/where':
sarah_location = self.get_state(entity='sensor.google_geocode_sarah')
self.call_service("mqtt/publish", topic="notifications/newmsg/telegram", payload="Sarah's location is: " + sarah_location)
elif command == '/trains':
self.call_service("mqtt/publish", topic="notifications/newmsg/telegram", payload="The next train service is scheduled for " + self.get_state(entity="sensor.ptv", attribute="train0_scheduled") + " with an estimated departure time of " + self.get_state(entity="sensor.ptv", attribute="train0_estimated") + " followed by " + self.get_state(entity="sensor.ptv", attribute="train1_scheduled")) | [
"kyle@tai.net.au"
] | kyle@tai.net.au |
1173c8ddeab00743e7c147ed16286408edcf1937 | 2e14fd220d111ff9ff45c2ebd125e63a08e3ae47 | /data-science/machine-learning-A_Z/classification/random_forest_classification/random_forest_classification.py | 0206c3bfa905ec2124b2ba3a8f9f54eef6716bc2 | [] | no_license | Mohan-Sharma/machine-learning | b02e8c2fd619e8887164fc4d9f60adc394840b72 | f9431e592666c7f2507ef12583fa4de3f117be75 | refs/heads/master | 2022-05-27T22:43:29.087717 | 2020-05-05T07:37:05 | 2020-05-05T07:37:05 | 257,939,986 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,433 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue May 5 12:55:49 2020
@author: i504180
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import confusion_matrix
from matplotlib.colors import ListedColormap
data_set = pd.read_csv("Social_Network_Ads.csv")
X = data_set.iloc[:, [2, 3]].values
y = data_set.iloc[:, 4].values
scalar = StandardScaler()
X_scaled = scalar.fit_transform(X)
X_train, X_test, y_train, y_test = train_test_split(X_scaled, y, test_size=0.25, random_state = 0)
classifier = RandomForestClassifier(n_estimators=10, criterion='entropy', random_state=0)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
matrix = confusion_matrix(y_test, y_pred)
print(matrix)
X_set, y_set = X_train, y_train
h,v = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(h, v,
classifier.predict(np.array([h.ravel(), v.ravel()]).T).reshape(h.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(h.min(), h.max())
plt.ylim(v.min(), v.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('orange', 'blue'))(i), label = j)
plt.title('Random Forest Classifier (Training set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
X_set, y_set = X_test, y_test
h,v = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(h, v,
classifier.predict(np.array([h.ravel(), v.ravel()]).T).reshape(h.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(h.min(), h.max())
plt.ylim(v.min(), v.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('orange', 'blue'))(i), label = j)
plt.title('Random Forest Classifier (Test set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show() | [
"mohan.sharma@sap.com"
] | mohan.sharma@sap.com |
d7a205af7c090cc3e0e1c409c325c99e1b889c37 | 02ebf3d3d9db7753322a0fbf4cc25dd4903ee4da | /dbtools.py | 74e92d01cf9b40052a9f76498d08d32d351396c3 | [] | no_license | Bocoul/ctrlm | 8aab9da15d0a9cda2b5a0c6c6ff89d6082f0c5be | a5f14009e937ff9b0955b4316f403839b133e509 | refs/heads/master | 2022-12-01T17:03:24.551562 | 2020-08-13T16:06:49 | 2020-08-13T16:06:49 | 284,099,644 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | def db_copytable(filename, table1, table2):
import sqlite3
with sqlite3.connect(filename) as conn:
cur = conn.cursor()
res = cur.execute(
"""
INSERT INTO "{}" ("rae", "code_postal", "commune", "code_insee", "siren", "voie")
SELECT DISTINCT "rae", "code_postal", "commune", "code_insee", "siren", "voie" FROM "{}";
""".format(table1, table2))
if __name__ == "__main__":
db_copytable("db_old/ctrlm.db", "ctrlm_pdl", "pdl") | [
"36362851+Bocoul@users.noreply.github.com"
] | 36362851+Bocoul@users.noreply.github.com |
a62371ca75488a425afb243b46919e2ed99b0abe | a5acb5af0dd0395e8a09d457b331d4a602d1eae9 | /ot_myprojectdir/wsgi.py | 4bd78880a90bd3226f6b368802d28f04b8a5b83b | [] | no_license | jfialkoff/otter-template | ead588cf8cae7ccbbeae04c6e3bd3c80afd07ecf | 4a9d264523fd3cb0563eb48e8582a0be0e92bc43 | refs/heads/master | 2021-01-10T09:09:44.266463 | 2015-09-28T21:22:11 | 2015-09-28T21:22:11 | 43,175,877 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 673 | py | """
WSGI config for ot_myproject project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
from dotenv import load_dotenv
os.environ.setdefault("DJANGO_SETTINGS_MODULE",
"ot_myproject.settings")
dotenv_path = os.path.join(
os.path.dirname(os.path.dirname(__file__)), '.env')
load_dotenv(dotenv_path)
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
| [
"joshua.fialkoff@setaris.com"
] | joshua.fialkoff@setaris.com |
760b340e2f8abb38ffacf015872fcfc244ffc73f | 6efc6135853f755bc5be3c22c566f91d07ccc481 | /venv/bin/pip3 | 4791c26014041b412328bb07c6268102029e707d | [] | no_license | MurrayWheten/python-pi-example | c9f7696aa176ab81b08ede34ba4cb8bf8ac413e0 | 3c1f977167af21a9615a7081a75bef0793546d9f | refs/heads/master | 2020-12-13T13:46:40.015964 | 2020-01-17T02:15:36 | 2020-01-17T02:15:36 | 234,435,241 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | #!/home/mwheten/PycharmProjects/py/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"mdwheten@gmail.com"
] | mdwheten@gmail.com | |
ce0841fb8cde5566b298cf3cd88ce6d77815d66a | 27207ea32a47d8c39a7385c2720ea10bdb16496b | /rc4/rc4.py | f537376dbabc9dfb5615d81dd48518a91988f30e | [] | no_license | Nabagata/CryptographyLab | 150f80d3173f9ae9b83b73a7876f0139220c906e | 12a4df700cc2980d3a520f21dd8b3917bf063e57 | refs/heads/master | 2020-07-03T23:02:34.972228 | 2019-11-19T09:52:55 | 2019-11-19T09:52:55 | 202,080,155 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,380 | py | import argparse
import itertools
def initialize_K(key):
K = []
key = itertools.cycle(key)
for i in xrange(256):
K.append(key.next())
return K
def initialize_S(K):
S = range(256)
j = 0
for i in xrange(256):
j = (j + S[i] + K[i]) % 256
S[i], S[j] = S[j], S[i]
return S
def get_X(S, length, print_x=False):
i = 0
j = 0
for x in xrange(length):
i = (i + 1) % 256
j = (j + S[i]) % 256
S[i], S[j] = S[j], S[i]
m = (S[i] + S[j]) % 256
X = S[m]
if print_x:
print X
yield X
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Implements RC4 encryption.')
parser.add_argument('-k', '--key', type=str, default='12345', help='The key used for encryption.')
parser.add_argument('-m', '--message', type=str, required=True, help='The message to be encrypted.')
parser.add_argument('-x', dest='x', action='store_true', help='Whether or not to print out the X values.')
parser.set_defaults(x=False)
args = parser.parse_args()
key = [ord(k) - ord('0') for k in args.key]
K = initialize_K(key)
S = initialize_S(K)
X = get_X(S, len(args.message), args.x)
ciphertext = []
for char in args.message:
ciphertext.append(chr(ord(char) ^ X.next()))
print ''.join(ciphertext) | [
"ngsaha234@gmail.com"
] | ngsaha234@gmail.com |
5574b3d53d3f227568d2b1bcaeb0b90fef882412 | a84f9a0736268b638dedc6e90cf4c520c6b97df8 | /precision_recall.py | de30480e4bb8ef8b39ee683cea111bb506b626a5 | [] | no_license | bhushan-ncsu/FakeNewsIdentifier | b4185fa687def0ed4d736c58d3edd5c87572ca71 | aec55e19c41ad22d8e08de56a18a71eaf7556b1b | refs/heads/master | 2021-05-09T03:18:52.928629 | 2018-01-28T14:30:12 | 2018-01-28T14:30:12 | 119,237,626 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,156 | py | import pandas as pd
from nltk.corpus import stopwords
import nltk
import re
import os
import random
from gensim.models.doc2vec import LabeledSentence, Doc2Vec
import pickle
from sklearn.naive_bayes import MultinomialNB
from sklearn.naive_bayes import BernoulliNB
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import average_precision_score
import numpy as np
#load model
model = Doc2Vec.load(os.path.join("trained", "comments2vec.d2v"))
with open ('x_train', 'rb') as fp:
x_train = pickle.load(fp)
with open ('x_test', 'rb') as fp:
x_test = pickle.load(fp)
with open ('y_train', 'rb') as fp:
y_train = pickle.load(fp)
with open ('y_test', 'rb') as fp:
y_test = pickle.load(fp)
y_true = []
for i in (y_test):
if(i == "REAL"):
y_true.append(0)
else:
y_true.append(1)
y_true_nd = np.array(y_true)
x_train_data = []
for comment in x_train:
x_train_data.append(model.infer_vector(comment))
x_test_data = []
for comment in x_test:
x_test_data.append(model.infer_vector(comment))
classification_model = BernoulliNB(alpha=1.0, binarize=0.0, fit_prior=True, class_prior=None)
classification_model.fit(x_train_data, y_train)
#y_score = classification_model.decision_function(x_test_data)
#average_precision = average_precision_score(y_true_nd, y_score)
print "Naive Bayes : Accuracy on training data {}%".format(classification_model.score(x_train_data,y_train)*100)
print "Naive Bayes : Accuracy on testing data {}%".format(classification_model.score(x_test_data, y_test)*100)
#print('Average precision-recall score: {0:0.2f}'.format(average_precision))
from sklearn.ensemble import RandomForestClassifier
clf = RandomForestClassifier(n_jobs=-1)
clf.fit(x_train_data, y_train)
#y_score = clf.decision_function(x_test_data)
#average_precision = average_precision_score(y_true_nd, y_score)
print "Random Forest : Accuracy on training data {}%".format(clf.score(x_train_data,y_train)*100)
print "Random Forest : Accuracy on testing data {}%".format(clf.score(x_test_data, y_test)*100)
#print('Average precision-recall score: {0:0.2f}'.format(average_precision))
from sklearn.neural_network import MLPClassifier
clf = MLPClassifier(solver='lbfgs', alpha=1e-5,hidden_layer_sizes=(5, 2), random_state=1)
clf.fit(x_train_data, y_train)
#y_score = clf.decision_function(x_test_data)
#average_precision = average_precision_score(y_true_nd, y_score)
print "Neural Network : Accuracy on training data {}%".format(clf.score(x_train_data,y_train)*100)
print "Neural Network : Accuracy on testing data {}%".format(clf.score(x_test_data, y_test)*100)
#print('Average precision-recall score: {0:0.2f}'.format(average_precision))
from sklearn import svm
clf = svm.SVC(kernel='linear', C = 1.0)
clf.fit(x_train_data, y_train)
y_score = clf.decision_function(x_test_data)
average_precision = average_precision_score(y_true_nd, y_score)
print "SVM : Accuracy on training data {}%".format(clf.score(x_train_data,y_train)*100)
print "SVM : Accuracy on testing data {}%".format(clf.score(x_test_data, y_test)*100)
print('Average precision-recall score: {0:0.2f}'.format(average_precision))
#print "average precision" , average_precision
| [
"bdeshmu@ncsu.edu"
] | bdeshmu@ncsu.edu |
4e815bc0306f5511a443e68cd4a3e4f5159d45bc | 39d9ee62dbc96a1436129e36493d391533b48b09 | /run.py | 6e76244194a00546858bc0698f1d4435c5c8959f | [] | no_license | vinitjogani/third-umpire | d5d6ca1e6e596d1319291fdf2b43b71025dab0a1 | 9180a935fdfc40004d2aff3fa260ca40c9388f12 | refs/heads/master | 2020-09-04T08:12:37.282647 | 2019-11-30T04:12:09 | 2019-11-30T04:12:09 | 219,685,707 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 71 | py | import detection.classical.color_thresholding as module
# module.main() | [
"vnjogani@gmail.com"
] | vnjogani@gmail.com |
facabddc9959acc022e7ca8ea3b0dfe0e6ee7b59 | 861be85635ca28151d10b4a0665aac9e22c438d1 | /Class and Object/classstudent.py | 56165bf5d1b3c0c1fee2c9395b93fa6c627ca599 | [
"MIT"
] | permissive | tdkumaran/python-75-hackathon | a6e35af00612bb4e68c70f2d2be86ee9827561c8 | ce931147f4b465aa7e54c140d78e2d4a6c4e3634 | refs/heads/master | 2020-04-11T10:57:09.097907 | 2018-12-16T13:32:59 | 2018-12-16T13:32:59 | 161,731,825 | 0 | 0 | null | 2018-12-14T04:32:53 | 2018-12-14T04:32:53 | null | UTF-8 | Python | false | false | 551 | py | class Student:
def __init__(self, name, rollnumber):
self.name=name
self.rollnumber=rollnumber
def setage(self,age):
self.age=age
def setmark(self, mark):
self.mark=mark
def display(self):
print("Name of student:",self.name)
print("Roll number of student",self.rollnumber)
print("Age of student",self.age)
print("Mark of student",self.mark)
s1=Student('Ram',16)
s1.setage(17)
s1.setmark(100)
s1.display()
s2=Student('Vikash',10)
s2.setage(10)
s2.setmark(90)
s2.display()
| [
"tdkumaran99@gmail.com"
] | tdkumaran99@gmail.com |
23ecfe711e32da50fdcecb401ba0d0e90a8e159d | 19dda8b9ef951a3c640733a300e9c35e451c14e9 | /traphing/utils/file_system.py | 6ba60a8fb5081878d9217429c352ef5cdc9633e9 | [] | no_license | manuwhs/traphing | b47236e390cdbc7bd8f64a6341d8582754b4cd8f | 505b6680246bffce2e8bb82225d1eac20bddf5a2 | refs/heads/master | 2020-07-29T20:33:53.865869 | 2019-11-17T14:38:13 | 2019-11-17T14:38:13 | 209,950,140 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,400 | py | import os
import shutil
from os import listdir
from os.path import isfile, join
from distutils.dir_util import copy_tree
import sys
def add_system_path(path, position = 0):
sys.path.insert(position, path) # Adds higher directory to python modules path.
def create_folder_if_needed (folder):
if not os.path.exists(folder):
os.makedirs(folder)
def get_file_name(file_path):
aux = file_path.split("/")
return aux[-1]
def get_file_dir(file_path):
aux = file_path.split("/")
aux.pop(-1)
return "/".join(aux)
def get_all_paths(rootFolder, fullpath = "yes"):
## This function finds all the files in a folder
## and its subfolders
allPaths = []
for dirName, subdirList, fileList in os.walk(rootFolder): # FOR EVERY DOCUMENT
# print "dirName"
for fname in fileList:
# Read the file
path = dirName + '/' + fname;
if (fullpath == "yes"):
allPaths.append(os.path.abspath(path))
else:
allPaths.append(path)
return allPaths
def filenames_comp(x1,x2):
number1 = int(x1.split("/")[-1].split(".")[0])
number2 = int(x2.split("/")[-1].split(".")[0])
if (number1 > number2):
return 1
else:
return -1
def filenames_comp_model_param(x1,x2):
number1 = int(x1.split("/")[-1].split(".")[0].split(":")[-1])
number2 = int(x2.split("/")[-1].split(".")[0].split(":")[-1])
if (number1 > number2):
return 1
else:
return -1
def copy_file(file_source, file_destination, new_name = ""):
# Copies a file into a new destination.
# If a name is given, it changes its name
file_name = ""
file_path = ""
file_name = file_source.split("/")[-1]
file_path = file_source.split("/")[0]
if (len(new_name) == 0): # No new name specified
file_name = file_source.split("/")[-1]
else:
file_name = new_name
create_folder_if_needed(file_destination)
shutil.copy2(file_source, file_destination + "/" + file_name)
def remove_files(folder, remove_subdirectories = False):
"""
This function removes all the files in a folder
"""
for the_file in os.listdir(folder):
file_path = os.path.join(folder, the_file)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
elif (remove_subdirectories):
if os.path.isdir(file_path): shutil.rmtree(file_path)
except Exception as e:
print(e)
def export_MQL5_files(MT5_folder):
"""
Exports the MQL5 file codes into the corresponding MLQ5 folder so that MT5 can execute them.
"""
src_files_folder = "../traphing/MQL5/"
MT5_folder = MT5_folder + "MQL5/"
create_folder_if_needed(MT5_folder+"Include/traphing/")
create_folder_if_needed(MT5_folder+"Scripts/traphing/")
copied_files = copy_tree(src_files_folder, MT5_folder, update = False)
print("Copied files from " + src_files_folder + " to " + MT5_folder)
print(" "+ "\n ".join(copied_files) )
def import_MQL5_files_for_library_update(MT5_folder):
"""
Imports the modified files in the MT5 folder into the library codes for commiting changes.
This is necessary because the MQL5 code files should be modified in the MT5 folder.
"""
des_files_folder_include = "../traphing/MQL5/Include/traphing/"
des_files_folder_scripts = "../traphing/MQL5/Scripts/traphing/"
create_folder_if_needed(des_files_folder_include)
create_folder_if_needed(des_files_folder_scripts)
MT5_folder += "MQL5/"
include_folder = MT5_folder + "Include/traphing/"
scripts_folder = MT5_folder + "Scripts/traphing/"
MT5_Include_files = [f for f in listdir(include_folder) if isfile(join(include_folder, f))]
for filename in MT5_Include_files:
if filename[-3:] == "mqh":
shutil.copy(include_folder + filename, des_files_folder_include + filename)
print(des_files_folder_include + filename)
MT5_Script_files = [f for f in listdir(scripts_folder) if isfile(join(scripts_folder, f))]
for filename in MT5_Script_files:
if filename[-3:] == "mq5":
shutil.copy(scripts_folder + filename, des_files_folder_scripts + filename)
print(des_files_folder_scripts + filename) | [
"https://manuDTU@bitbucket.org"
] | https://manuDTU@bitbucket.org |
bfb3b361ec398b391182456a6bd989a301674c53 | 26c566ee03a6b6752dc81a719c4eb237bf5e5c32 | /from_senior/bladder_dwi_2d_model/main.py | 9ef52811196719a0588e6dbcd413e4ea1e8f2b98 | [] | no_license | Zopek/bladder_old | 44c091b951e667aa1fe9f2b0dae158b3725bf2cd | 0f34b848c7c024f2ff9f91bef268002264433e19 | refs/heads/master | 2020-04-05T21:18:17.093834 | 2019-02-28T07:49:46 | 2019-02-28T07:49:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,091 | py | from __future__ import print_function, division
# import time
import torch
import torch.utils.data
import torch.utils.data.sampler
import torch.nn.functional
import os
import random
import numpy as np
import sklearn.metrics
import traceback
from bladder_dwi_dataset import BladderDwiDataset, MyPreprocessor, MyAugmentation, ToTensor
import collections
import csv
import argparse
import json
import multi_cam_unet
import os.path
import matplotlib.pyplot as plt
import torchvision.models.vgg
def get_iou(x, y):
x = x.view(x.size()[0], -1)
y = y.view(y.size()[0], -1)
i = x & y
u = x | y
i = i.float()
u = u.float()
sum_i = torch.sum(i, 1)
sum_u = torch.sum(u, 1)
iou = (sum_i + 1e-6) / (sum_u + 1e-6)
return iou
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--cfg_json', type=str, default='cfgs/test.json')
parser.add_argument('--cv_id', type=str, default='0')
parser.add_argument('--visual_test', type=str, default='')
parser.add_argument('--visual_type', type=str, default='all')
parser.add_argument('--batch_size', type=int, default=None)
return parser.parse_args()
def normalize_images(image):
image = np.copy(image)
for i in range(len(image)):
min = np.min(image[i])
max = np.max(image[i])
image[i] = (image[i] - min) / (max - min)
return image
def transpose(image):
return image.transpose([0, 3, 2, 1])
def plot_images(images, show_colorbar, name, subtitles=None):
num_images = len(images)
rows = int(np.sqrt(num_images))
cols = int(np.ceil(num_images / float(rows)))
vmax = np.max(images)
vmin = np.min(images)
f = plt.figure()
for i in range(num_images):
ax = f.add_subplot(rows, cols, i + 1)
ax.axis('off')
im = ax.imshow(np.squeeze(images[i]), vmin=vmin, vmax=vmax, cmap='gray')
if subtitles is not None:
ax.set_title(subtitles[i])
if show_colorbar:
f.colorbar(im, ax=ax)
f.suptitle(name)
f.show()
def main():
random.seed()
using_gpu = torch.cuda.is_available()
args = parse_args()
print('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA', args.visual_type)
print('BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', args.batch_size)
with open(args.cfg_json, 'rb') as fd:
cfg = json.load(fd)
cv_id = args.cv_id
cfg_name = os.path.splitext(os.path.basename(args.cfg_json))[0]
print(args.cfg_json, cfg_name)
print(cfg)
print(cv_id)
if args.visual_test != '':
mode = 'visual_test'
model_weights_path = args.visual_test
else:
mode = 'train'
model_weights_path = ''
# input dirs
dataset_csv_dir = cfg['dataset_csv_dir']
image_root_dir = cfg['image_root_dir']
cancer_bboxes_root_dir = cfg['cancer_bboxes_root_dir']
# output dirs
model_weights_dir = cfg['model_weights_dir']
log_dir = '/DB/rhome/qyzheng/Desktop/qyzheng/PROGRAM/bladder/from_senior/bladder_dwi_2d_model/log'
# dataset settings
num_dataloader_workers = cfg['num_dataloader_workers']
new_height = cfg['new_height']
new_width = cfg['new_width']
using_bladder_mask = cfg['using_bladder_mask']
caching_data = cfg['caching_data']
batch_size = cfg['batch_size']
# model settings
mil_pooling_type = cfg['mil_pooling_type']
concat_pred_list = cfg['concat_pred_list']
num_shared_encoders = cfg['num_shared_encoders']
# training configurations
num_step_one_epoches = cfg['num_step_one_epoches']
num_step_two_epoches = cfg['num_step_two_epoches']
base_lr = cfg['base_lr']
loss_weights_list = cfg['loss_weights_list']
dropout_prob_list = cfg['dropout_prob_list']
weight_decay = cfg['weight_decay']
if args.batch_size is not None:
batch_size = args.batch_size
model_weights_dir = os.path.join(model_weights_dir, cfg_name)
if os.path.exists(model_weights_dir):
assert os.path.isdir(model_weights_dir)
else:
os.makedirs(model_weights_dir)
log_dir = os.path.join(log_dir, cfg_name)
if os.path.exists(log_dir):
assert os.path.isdir(log_dir)
else:
os.makedirs(log_dir)
if mode == 'train':
with open(os.path.join(log_dir, 'cv_{}_cfg.json'.format(cv_id)), 'wb') as fd:
json.dump(cfg, fd, sort_keys=True, indent=2)
# prepare dataloaders
phases = collections.OrderedDict()
if mode == 'train':
phases['cv_train'] = os.path.join(dataset_csv_dir, '{}_cv_train.csv'.format(cv_id))
phases['cv_val'] = os.path.join(dataset_csv_dir, '{}_cv_val.csv'.format(cv_id))
phases['test'] = os.path.join(dataset_csv_dir, 'test.csv')
dataloaders = dict()
for phase in phases:
csv_path = phases[phase]
is_training = 'train' in phase
if 'cv' in phase:
csv_path = csv_path.format(cv_id)
preprocessor = MyPreprocessor(image_root_dir, cancer_bboxes_root_dir, new_height, new_width, using_bladder_mask,
True)
to_tensor = ToTensor()
if is_training:
augmentation = MyAugmentation()
dataset = BladderDwiDataset(csv_path, preprocessor, augmentation, to_tensor, caching_data)
sampler = torch.utils.data.sampler.WeightedRandomSampler(dataset.get_weights(), len(dataset))
else:
dataset = BladderDwiDataset(csv_path, preprocessor, None, to_tensor, caching_data)
sampler = torch.utils.data.sampler.SequentialSampler(dataset)
dataloader = torch.utils.data.DataLoader(dataset, batch_size, sampler=sampler,
num_workers=num_dataloader_workers, drop_last=is_training)
dataloaders[phase] = dataloader
# start training
model = multi_cam_unet.UNet(1, concat_pred_list, num_shared_encoders, dropout_prob_list)
if using_gpu:
model = model.cuda()
model = torch.nn.DataParallel(model)
if model_weights_path != '':
model.load_state_dict(torch.load(model_weights_path))
params_to_opt = [param for param in model.parameters() if param.requires_grad]
optimizer = torch.optim.Adam(params_to_opt, base_lr, weight_decay=weight_decay)
best_val_iou_cam = 0
best_val_iou_seg = 0
best_val_roc_auc = 0
for epoch in range(num_step_one_epoches + num_step_two_epoches):
for phase in phases:
is_training = 'train' in phase
model.train(is_training)
loss_cam_list = []
loss_consistency_list = []
iou_cam_list = []
iou_seg_list = []
score_array_list = []
label_array_list = []
for step, data in enumerate(dataloaders[phase]):
image = data['image']
label = data['label']
cancer_bboxes_image = data['cancer_bboxes_image']
if using_gpu:
image = image.cuda()
label = label.cuda()
cancer_bboxes_image = cancer_bboxes_image.cuda()
image = torch.autograd.Variable(image, volatile=not is_training)
label = torch.autograd.Variable(label)
preds_tuple = model(image)
losses = []
# Loss_CAM
for cam in preds_tuple[:-1]:
if mil_pooling_type == 'max':
score = torch.nn.functional.adaptive_max_pool2d(cam, (1, 1)).view(-1, 1)
elif mil_pooling_type == 'avg':
score = torch.nn.functional.adaptive_avg_pool2d(cam, (1, 1)).view(-1, 1)
else:
raise Exception('Unknown mil_pooling_type')
loss_cam = torch.nn.functional.binary_cross_entropy_with_logits(score, label)
loss_cam_value = loss_cam.data[0]
losses.append(loss_cam)
# upsample the last cam to get the pseudo label
pseudo_label = torch.nn.functional.upsample(cam, [new_height, new_width], mode='bilinear') > 0
pseudo_label = pseudo_label.float()
# Loss_Consistency
score_map = preds_tuple[-1]
loss_consistency = torch.nn.functional.binary_cross_entropy_with_logits(score_map, pseudo_label)
loss_consistency_value = loss_consistency.data[0]
if epoch >= num_step_one_epoches:
losses.append(loss_consistency)
# get total loss
total_loss = 0.0
for i, loss in enumerate(losses):
if loss_weights_list[i] != 0.0:
total_loss += loss_weights_list[i] * loss
# optimize
if is_training:
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
# summary of this step
iou_cam = get_iou(pseudo_label.data > 0, cancer_bboxes_image > 0)
iou_cam_mean = torch.sum(iou_cam * label.data.squeeze()) / (torch.sum(label.data.squeeze()) + 1e-6)
iou_seg = get_iou(score_map.data > 0, cancer_bboxes_image > 0)
iou_seg_mean = torch.sum(iou_seg * label.data.squeeze()) / (
torch.sum(label.data.squeeze()) + 1e-6)
confusion_matrix = sklearn.metrics.confusion_matrix(torch.gt(label.data.cpu(), 0.5),
torch.gt(score.data.cpu(), 0))
# report of this step
# print(
# 'Epoch {:>3}, Phase {}, Step {:>4}, Loss_CAM={:.4f}, Loss_Consistency={:.4f}, IOU_CAM={:.4f}, IOU_SEG={:.4f}'.format(
# epoch, phase, step,
# loss_cam_value,
# loss_consistency_value, iou_cam_mean, iou_seg_mean))
# print(confusion_matrix)
# for loss in losses:
# print(loss.data[0], end=' ')
# print()
# summary of this epoch
score_array_list.append(score.data.cpu().squeeze().numpy())
label_array_list.append(label.data.cpu().squeeze().numpy())
loss_cam_list.append(loss_cam_value)
loss_consistency_list.append(loss_consistency_value)
iou_cam_list.append(iou_cam.cpu().numpy())
iou_seg_list.append(iou_seg.cpu().numpy())
if mode == 'visual_test':
accession_number = np.array(data['accession_number'])
correct = (score.data.cpu().numpy() > 0) == label.data.cpu().numpy()
correct = np.squeeze(correct)
wrong = np.logical_not(correct)
# plot_idx = np.ones_like(correct, dtype=np.bool)
plot_idx = wrong
print(args.visual_type)
if args.visual_type=='wrong':
plot_idx = wrong
elif args.visual_type=='correct':
plot_idx = correct
else:
plot_idx = np.ones_like(correct, dtype=np.bool) #all
image = transpose(image.data.cpu().numpy())
# plot_images(normalize_images(image[plot_idx, :, :, 0]), False, "ADC", accession_number[plot_idx])
plot_images(normalize_images(image[plot_idx, :, :, 1]), False, "B=0", accession_number[plot_idx])
plot_images(normalize_images(image[plot_idx, :, :, 2]), False, "B=1000", accession_number[plot_idx])
plot_images(transpose(cancer_bboxes_image.cpu().numpy())[plot_idx], False, "GT", accession_number[plot_idx])
# plot_images(transpose(cam.data.cpu().numpy())[plot_idx], False, "CAM", accession_number[plot_idx])
plot_images(transpose(pseudo_label.data.cpu().numpy())[plot_idx], False, "Prediction_CAM", accession_number[plot_idx])
# plot_images(transpose(score_map.data.cpu().numpy())[plot_idx], False, "score_map", accession_number[plot_idx])
# plot_images(transpose((score_map > 0).data.cpu().numpy())[plot_idx], False, "score_map>0", accession_number[plot_idx])
plt.show()
# report of this epoch
loss_cam = np.mean(loss_cam_list)
loss_consistency = np.mean(loss_consistency_list)
score = np.concatenate(score_array_list)
label = np.concatenate(label_array_list).astype(np.int)
iou_cam = np.concatenate(iou_cam_list)
iou_cam = np.sum(iou_cam * label) / (np.sum(label) + 1e-6)
iou_seg = np.concatenate(iou_seg_list)
iou_seg = np.sum(iou_seg * label) / (np.sum(label) + 1e-6)
confusion_matrix = sklearn.metrics.confusion_matrix(label, np.greater(score, 0))
roc_auc = sklearn.metrics.roc_auc_score(label, score)
print(
'Epoch {:>3}, Phase {} Complete! Loss_CAM={:.4f}, Loss_Consistency={:.4f}, IOU_CAM={:.4f}, IOU_SEG={:.4f}, ROC_AUC={:.4f}'
.format(epoch, phase, loss_cam, loss_consistency, iou_cam, iou_seg, roc_auc))
print(confusion_matrix)
if mode == 'train':
try:
# saving log
with open(os.path.join(log_dir, "{}_{}.csv".format(cv_id, phase)), 'ab') as fd:
csv.writer(fd).writerow([epoch, phase, loss_cam, loss_consistency, iou_cam, iou_seg, roc_auc])
except:
traceback.print_exc()
if is_training:
try:
torch.save(model.state_dict(), os.path.join(model_weights_dir, "cv_{}_last.pth".format(cv_id)))
except:
traceback.print_exc()
if 'val' in phase:
if roc_auc > best_val_roc_auc:
best_val_roc_auc = roc_auc
print('New best_val_roc_auc: {:.4f}, Epoch {}'.format(best_val_roc_auc, epoch))
try:
with open(os.path.join(log_dir, "best_val_roc_auc.txt"), 'w') as fd:
fd.write(str(best_val_roc_auc))
torch.save(model.state_dict(),
os.path.join(model_weights_dir, "cv_{}_best_roc_auc.pth".format(cv_id)))
except:
traceback.print_exc()
if iou_cam > best_val_iou_cam:
best_val_iou_cam = iou_cam
print('New best_val_iou_cam: {:.4f}, Epoch {}'.format(best_val_iou_cam, epoch))
try:
with open(os.path.join(log_dir, "best_val_iou_cam.txt"), 'w') as fd:
fd.write(str(best_val_iou_cam))
torch.save(model.state_dict(),
os.path.join(model_weights_dir, "cv_{}_best_iou_cam.pth".format(cv_id)))
except:
traceback.print_exc()
if iou_seg > best_val_iou_seg:
best_val_iou_seg = iou_seg
print('New best_val_iou_seg: {:.4f}, Epoch {}'.format(best_val_iou_seg, epoch))
try:
with open(os.path.join(log_dir, "best_val_iou_seg.txt"), 'w') as fd:
fd.write(str(best_val_iou_seg))
torch.save(model.state_dict(),
os.path.join(model_weights_dir, "cv_{}_best_iou_seg.pth".format(cv_id)))
except:
traceback.print_exc()
if __name__ == '__main__':
main()
| [
"lethe-@sjtu.edu.cn"
] | lethe-@sjtu.edu.cn |
7ee23ab6019177bfce14a770291e1f269179d551 | 7b15cccdef7243668d2d2af1ad1816f1d168e014 | /bloque 6/funcion_resta.py | 7394193e946fc5ce29c6bdf61683f51b1690e1de | [] | no_license | sandroormeno/taller-de-python | 3fd4bac9c3832286f19c8dbe0d3f37cbfd6a0813 | fdd6d699f8a7aac1b9d77f051dcd78ddb090f725 | refs/heads/master | 2021-06-03T17:19:57.542322 | 2020-09-08T23:00:32 | 2020-09-08T23:00:32 | 148,956,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | def suma(numero1, numero2):
print("Resultado : " + str( int(numero1) - int(numero2) ) )
print("Programa para restar valores")
num1 = input("primer valor: ")
num2 = input("segundo valor: ")
suma(num1, num2)
| [
"noreply@github.com"
] | noreply@github.com |
3922747c24aeae6863311beb748f65358b035f73 | 62e58c051128baef9452e7e0eb0b5a83367add26 | /edifact/D95A/DIRDEBD95AUN.py | 5415a9c13741232f73c37ae3b49aa4c18660d498 | [] | no_license | dougvanhorn/bots-grammars | 2eb6c0a6b5231c14a6faf194b932aa614809076c | 09db18d9d9bd9d92cefbf00f1c0de1c590fe3d0d | refs/heads/master | 2021-05-16T12:55:58.022904 | 2019-05-17T15:22:23 | 2019-05-17T15:22:23 | 105,274,633 | 0 | 0 | null | 2017-09-29T13:21:21 | 2017-09-29T13:21:21 | null | UTF-8 | Python | false | false | 4,507 | py | #Generated by bots open source edi translator from UN-docs.
from bots.botsconfig import *
from edifact import syntax
from recordsD95AUN import recorddefs
structure = [
{ID: 'UNH', MIN: 1, MAX: 1, LEVEL: [
{ID: 'BGM', MIN: 1, MAX: 1},
{ID: 'DTM', MIN: 1, MAX: 1},
{ID: 'BUS', MIN: 0, MAX: 1},
{ID: 'RFF', MIN: 0, MAX: 2, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 1},
]},
{ID: 'FII', MIN: 0, MAX: 5, LEVEL: [
{ID: 'CTA', MIN: 0, MAX: 1},
{ID: 'COM', MIN: 0, MAX: 5},
]},
{ID: 'NAD', MIN: 0, MAX: 3, LEVEL: [
{ID: 'CTA', MIN: 0, MAX: 1},
{ID: 'COM', MIN: 0, MAX: 5},
]},
{ID: 'LIN', MIN: 1, MAX: 9999, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 1},
{ID: 'RFF', MIN: 0, MAX: 2},
{ID: 'BUS', MIN: 0, MAX: 1},
{ID: 'FCA', MIN: 0, MAX: 1},
{ID: 'MOA', MIN: 0, MAX: 1, LEVEL: [
{ID: 'CUX', MIN: 0, MAX: 1},
{ID: 'DTM', MIN: 0, MAX: 2},
{ID: 'RFF', MIN: 0, MAX: 1},
]},
{ID: 'FII', MIN: 1, MAX: 1, LEVEL: [
{ID: 'CTA', MIN: 0, MAX: 1},
{ID: 'COM', MIN: 0, MAX: 5},
]},
{ID: 'NAD', MIN: 0, MAX: 3, LEVEL: [
{ID: 'CTA', MIN: 0, MAX: 1},
{ID: 'COM', MIN: 0, MAX: 5},
]},
{ID: 'INP', MIN: 0, MAX: 1, LEVEL: [
{ID: 'FTX', MIN: 0, MAX: 1},
{ID: 'DTM', MIN: 0, MAX: 2},
]},
{ID: 'GIS', MIN: 0, MAX: 10, LEVEL: [
{ID: 'MOA', MIN: 0, MAX: 1},
{ID: 'LOC', MIN: 0, MAX: 2},
{ID: 'NAD', MIN: 0, MAX: 1},
{ID: 'RCS', MIN: 0, MAX: 1},
{ID: 'FTX', MIN: 0, MAX: 10},
]},
{ID: 'PRC', MIN: 0, MAX: 1, LEVEL: [
{ID: 'FTX', MIN: 1, MAX: 1},
]},
{ID: 'SEQ', MIN: 1, MAX: 9999, LEVEL: [
{ID: 'MOA', MIN: 1, MAX: 1},
{ID: 'DTM', MIN: 0, MAX: 1},
{ID: 'RFF', MIN: 0, MAX: 3},
{ID: 'PAI', MIN: 0, MAX: 1},
{ID: 'FCA', MIN: 0, MAX: 1},
{ID: 'FII', MIN: 0, MAX: 3, LEVEL: [
{ID: 'CTA', MIN: 0, MAX: 1},
{ID: 'COM', MIN: 0, MAX: 5},
]},
{ID: 'NAD', MIN: 0, MAX: 3, LEVEL: [
{ID: 'CTA', MIN: 0, MAX: 1},
{ID: 'COM', MIN: 0, MAX: 5},
]},
{ID: 'INP', MIN: 0, MAX: 3, LEVEL: [
{ID: 'FTX', MIN: 0, MAX: 1},
{ID: 'DTM', MIN: 0, MAX: 2},
]},
{ID: 'GIS', MIN: 0, MAX: 10, LEVEL: [
{ID: 'MOA', MIN: 0, MAX: 1},
{ID: 'LOC', MIN: 0, MAX: 2},
{ID: 'NAD', MIN: 0, MAX: 1},
{ID: 'RCS', MIN: 0, MAX: 1},
{ID: 'FTX', MIN: 0, MAX: 10},
]},
{ID: 'PRC', MIN: 0, MAX: 1, LEVEL: [
{ID: 'FTX', MIN: 0, MAX: 5},
{ID: 'DOC', MIN: 0, MAX: 9999, LEVEL: [
{ID: 'MOA', MIN: 0, MAX: 5},
{ID: 'DTM', MIN: 0, MAX: 5},
{ID: 'RFF', MIN: 0, MAX: 5},
{ID: 'NAD', MIN: 0, MAX: 2},
{ID: 'CUX', MIN: 0, MAX: 5, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 1},
]},
{ID: 'AJT', MIN: 0, MAX: 100, LEVEL: [
{ID: 'MOA', MIN: 0, MAX: 1},
{ID: 'RFF', MIN: 0, MAX: 1},
{ID: 'FTX', MIN: 0, MAX: 5},
]},
{ID: 'DLI', MIN: 0, MAX: 1000, LEVEL: [
{ID: 'MOA', MIN: 1, MAX: 5},
{ID: 'PIA', MIN: 0, MAX: 5},
{ID: 'DTM', MIN: 0, MAX: 5},
{ID: 'CUX', MIN: 0, MAX: 5, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 1},
]},
{ID: 'AJT', MIN: 0, MAX: 10, LEVEL: [
{ID: 'MOA', MIN: 1, MAX: 1},
{ID: 'RFF', MIN: 0, MAX: 1},
{ID: 'FTX', MIN: 0, MAX: 5},
]},
]},
]},
{ID: 'GIS', MIN: 0, MAX: 1, LEVEL: [
{ID: 'MOA', MIN: 0, MAX: 5},
]},
]},
]},
]},
{ID: 'CNT', MIN: 0, MAX: 5},
{ID: 'AUT', MIN: 0, MAX: 5, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 1},
]},
{ID: 'UNT', MIN: 1, MAX: 1},
]},
]
| [
"jason.capriotti@gmail.com"
] | jason.capriotti@gmail.com |
9e27d7965e88ac8fc06892f207d612fc3ddb0151 | d2e0e9d7f6cdd7421e132dbb6805e62c57b2be1a | /Practice/Day_003/reduce_ex.py | e1ab0179aec8d80e0a055b516df2179aab6fdf0c | [] | no_license | zzid/Cloud_MSA_Multicampus_Education_First_phase_python | 349a1bd22ffbfc90b6221f1b2933fe9477f10f03 | acf0b644e16b71562b8e72e0d4a10a7719139bb4 | refs/heads/master | 2022-12-11T16:14:22.926014 | 2020-09-10T04:14:10 | 2020-09-10T04:14:10 | 279,503,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 174 | py | # reduce
from functools import reduce
arr = [1,2,3,4,5,6,7,8,9,10]
result = reduce(lambda x,y : x+y, arr)
print(result)
result = reduce(lambda x,y : x*y, arr)
print(result) | [
"931010dy@gmail.com"
] | 931010dy@gmail.com |
134016a9c11a280367d93a1d7339504e86ae4cb0 | ee1724a6bc8b7ae8efeaae14fd3c93cb3d371aac | /arcgis_marketplace/fields.py | 80b51cbec7ce48da4e7982397e4da9789cb75834 | [
"MIT"
] | permissive | waffle-iron/arcgis-marketplace | 159ef928c115bb8bc27b81b614dbd7ea4eb3864a | e6c06bbde717f8a07c649d3d5db9f4dd929fe0f3 | refs/heads/master | 2021-01-22T03:30:24.605645 | 2017-05-25T08:35:23 | 2017-05-25T08:35:23 | 92,382,669 | 0 | 0 | null | 2017-05-25T08:35:21 | 2017-05-25T08:35:21 | null | UTF-8 | Python | false | false | 637 | py | import os.path
import zipfile
from django.db.models import FileField
from . import validators
class CompressField(FileField):
default_validators = [
validators.validate_file_extension,
validators.validate_zip_compression
]
def pre_save(self, model_instance, add):
file = super().pre_save(model_instance, add)
if file._file is not None:
outpath = os.path.splitext(file.path)[0]
if not os.path.isdir(outpath) and zipfile.is_zipfile(file):
with zipfile.ZipFile(file) as zip_file:
zip_file.extractall(outpath)
return file
| [
"dani.pyc+github@gmail.com"
] | dani.pyc+github@gmail.com |
13a95e0835eddd0fa3db784494dd57177d13927b | 8fcdcec1bf0f194d23bba4acd664166a04dc128f | /packages/grid_control_update.py | d21546c84a97777c2b4b0811e15519a89314cefb | [] | no_license | grid-control/grid-control | e51337dd7e5d158644a8da35923443fb0d232bfb | 1f5295cd6114f3f18958be0e0618ff6b35aa16d7 | refs/heads/master | 2022-11-13T13:29:13.226512 | 2021-10-01T14:37:59 | 2021-10-01T14:37:59 | 13,805,261 | 32 | 30 | null | 2023-02-19T16:22:47 | 2013-10-23T14:39:28 | Python | UTF-8 | Python | false | false | 1,227 | py | #!/usr/bin/env python
# | Copyright 2014-2017 Karlsruhe Institute of Technology
# |
# | Licensed under the Apache License, Version 2.0 (the "License");
# | you may not use this file except in compliance with the License.
# | You may obtain a copy of the License at
# |
# | http://www.apache.org/licenses/LICENSE-2.0
# |
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.
import os, sys
def update_plugin_files():
base_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.append(base_dir)
from hpfwk.hpf_plugin import create_plugin_file
def _select(path):
for pat in ['/share', '_compat_', '/requests', '/xmpp']:
if pat in path:
return False
return True
package_list = os.listdir(base_dir)
package_list.sort()
for package in package_list:
package = os.path.abspath(os.path.join(base_dir, package))
if os.path.isdir(package):
create_plugin_file(package, _select)
if __name__ == '__main__':
update_plugin_files()
| [
"stober@cern.ch"
] | stober@cern.ch |
bbf4965b241b4632e2d4a349b3869fba4cb9831f | ca23b411c8a046e98f64b81f6cba9e47783d2584 | /poem/core/models_test.py | a0db94e4d47fae13658d07b17e82bce15a3e7851 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | pdybczak/google-research | 1fb370a6aa4820a42a5d417a1915687a00613f9c | 0714e9a5a3934d922c0b9dd017943a8e511eb5bc | refs/heads/master | 2023-03-05T23:16:11.246574 | 2021-01-04T11:30:28 | 2021-01-04T11:30:28 | 326,629,357 | 1 | 0 | Apache-2.0 | 2021-02-01T12:39:09 | 2021-01-04T09:17:36 | Jupyter Notebook | UTF-8 | Python | false | false | 9,686 | py | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests model architecture functions."""
import tensorflow.compat.v1 as tf
from poem.core import common
from poem.core import models
tf.disable_v2_behavior()
class ModelsTest(tf.test.TestCase):
def test_simple_model_shapes(self):
# Shape = [4, 2, 3].
input_features = tf.constant([[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],
[[7.0, 8.0, 9.0], [10.0, 11.0, 12.0]],
[[13.0, 14.0, 15.0], [16.0, 17.0, 18.0]],
[[19.0, 20.0, 21.0], [22.0, 23.0, 24.0]]])
output_sizes = {'a': 8, 'b': [4, 3]}
outputs, activations = models.simple_model(
input_features,
output_sizes,
sequential_inputs=False,
is_training=True,
num_bottleneck_nodes=16)
expected_global_variable_shapes = {
'SimpleModel/InputFC/Linear/weight:0': ([3, 1024]),
'SimpleModel/InputFC/Linear/bias:0': ([1024]),
'SimpleModel/InputFC/BatchNorm/gamma:0': ([1024]),
'SimpleModel/InputFC/BatchNorm/beta:0': ([1024]),
'SimpleModel/InputFC/BatchNorm/moving_mean:0': ([1024]),
'SimpleModel/InputFC/BatchNorm/moving_variance:0': ([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_0/Linear/weight:0': ([1024,
1024]),
'SimpleModel/FullyConnectedBlock_0/FC_0/Linear/bias:0': ([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_0/BatchNorm/gamma:0': ([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_0/BatchNorm/beta:0': ([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_0/BatchNorm/moving_mean:0':
([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_0/BatchNorm/moving_variance:0':
([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_1/Linear/weight:0': ([1024,
1024]),
'SimpleModel/FullyConnectedBlock_0/FC_1/Linear/bias:0': ([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_1/BatchNorm/gamma:0': ([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_1/BatchNorm/beta:0': ([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_1/BatchNorm/moving_mean:0':
([1024]),
'SimpleModel/FullyConnectedBlock_0/FC_1/BatchNorm/moving_variance:0':
([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_0/Linear/weight:0': ([1024,
1024]),
'SimpleModel/FullyConnectedBlock_1/FC_0/Linear/bias:0': ([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_0/BatchNorm/gamma:0': ([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_0/BatchNorm/beta:0': ([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_0/BatchNorm/moving_mean:0':
([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_0/BatchNorm/moving_variance:0':
([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_1/Linear/weight:0': ([1024,
1024]),
'SimpleModel/FullyConnectedBlock_1/FC_1/Linear/bias:0': ([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_1/BatchNorm/gamma:0': ([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_1/BatchNorm/beta:0': ([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_1/BatchNorm/moving_mean:0':
([1024]),
'SimpleModel/FullyConnectedBlock_1/FC_1/BatchNorm/moving_variance:0':
([1024]),
'SimpleModel/BottleneckLogits/weight:0': ([1024, 16]),
'SimpleModel/BottleneckLogits/bias:0': ([16]),
'SimpleModel/OutputLogits/a/weight:0': ([16, 8]),
'SimpleModel/OutputLogits/a/bias:0': ([8]),
'SimpleModel/OutputLogits/b/weight:0': ([16, 12]),
'SimpleModel/OutputLogits/b/bias:0': ([12]),
}
self.assertDictEqual(
{var.name: var.shape.as_list() for var in tf.global_variables()},
expected_global_variable_shapes)
self.assertCountEqual(outputs.keys(), ['a', 'b'])
self.assertAllEqual(outputs['a'].shape.as_list(), [4, 2, 8])
self.assertAllEqual(outputs['b'].shape.as_list(), [4, 2, 4, 3])
self.assertCountEqual(activations.keys(),
['base_activations', 'bottleneck_activations'])
self.assertAllEqual(activations['base_activations'].shape.as_list(),
[4, 2, 1024])
self.assertAllEqual(activations['bottleneck_activations'].shape.as_list(),
[4, 2, 16])
def test_simple_model_forward_pass(self):
input_features = tf.constant([[1.0, 2.0, 3.0]])
output_sizes = {'a': 4}
outputs, activations = models.simple_model(
input_features,
output_sizes,
sequential_inputs=False,
is_training=True,
num_hidden_nodes=2,
weight_initializer=tf.initializers.ones(),
bias_initializer=tf.initializers.zeros(),
weight_max_norm=0.0,
use_batch_norm=False,
dropout_rate=0.0,
num_fcs_per_block=2,
num_fc_blocks=3)
with self.session() as sess:
sess.run(tf.initializers.global_variables())
outputs_result, activations_result = sess.run([outputs, activations])
self.assertCountEqual(outputs_result.keys(), ['a'])
self.assertAllClose(outputs_result['a'], [[1500.0, 1500.0, 1500.0, 1500.0]])
self.assertCountEqual(activations_result.keys(), ['base_activations'])
self.assertAllClose(activations_result['base_activations'],
[[750.0, 750.0]])
def test_get_simple_model(self):
input_features = tf.constant([[1.0, 2.0, 3.0]])
output_sizes = {'a': 4}
model_fn = models.get_model(
base_model_type=common.BASE_MODEL_TYPE_SIMPLE,
is_training=True,
num_hidden_nodes=2,
weight_initializer=tf.initializers.ones(),
bias_initializer=tf.initializers.zeros(),
weight_max_norm=0.0,
use_batch_norm=False,
dropout_rate=0.0,
num_fcs_per_block=2,
num_fc_blocks=3)
outputs, activations = model_fn(input_features, output_sizes)
with self.session() as sess:
sess.run(tf.initializers.global_variables())
outputs_result, activations_result = sess.run([outputs, activations])
self.assertCountEqual(outputs_result.keys(), ['a'])
self.assertAllClose(outputs_result['a'], [[1500.0, 1500.0, 1500.0, 1500.0]])
self.assertCountEqual(activations_result.keys(), ['base_activations'])
self.assertAllClose(activations_result['base_activations'],
[[750.0, 750.0]])
def test_get_simple_point_embedder(self):
# Shape = [4, 2, 3].
input_features = tf.constant([[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],
[[7.0, 8.0, 9.0], [10.0, 11.0, 12.0]],
[[13.0, 14.0, 15.0], [16.0, 17.0, 18.0]],
[[19.0, 20.0, 21.0], [22.0, 23.0, 24.0]]])
embedder_fn = models.get_embedder(
base_model_type=common.BASE_MODEL_TYPE_SIMPLE,
embedding_type=common.EMBEDDING_TYPE_POINT,
num_embedding_components=3,
embedding_size=16,
is_training=True)
outputs, activations = embedder_fn(input_features)
self.assertCountEqual(outputs.keys(), [common.KEY_EMBEDDING_MEANS])
self.assertAllEqual(outputs[common.KEY_EMBEDDING_MEANS].shape.as_list(),
[4, 2, 3, 16])
self.assertCountEqual(activations.keys(), ['base_activations'])
self.assertAllEqual(activations['base_activations'].shape.as_list(),
[4, 2, 1024])
def test_get_simple_gaussian_embedder(self):
# Shape = [4, 2, 3].
input_features = tf.constant([[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],
[[7.0, 8.0, 9.0], [10.0, 11.0, 12.0]],
[[13.0, 14.0, 15.0], [16.0, 17.0, 18.0]],
[[19.0, 20.0, 21.0], [22.0, 23.0, 24.0]]])
embedder_fn = models.get_embedder(
base_model_type=common.BASE_MODEL_TYPE_SIMPLE,
embedding_type=common.EMBEDDING_TYPE_GAUSSIAN,
num_embedding_components=3,
embedding_size=16,
num_embedding_samples=32,
is_training=True,
weight_max_norm=0.0)
outputs, activations = embedder_fn(input_features)
self.assertCountEqual(outputs.keys(), [
common.KEY_EMBEDDING_MEANS,
common.KEY_EMBEDDING_STDDEVS,
common.KEY_EMBEDDING_SAMPLES,
])
self.assertAllEqual(outputs[common.KEY_EMBEDDING_MEANS].shape.as_list(),
[4, 2, 3, 16])
self.assertAllEqual(outputs[common.KEY_EMBEDDING_STDDEVS].shape.as_list(),
[4, 2, 3, 16])
self.assertAllEqual(outputs[common.KEY_EMBEDDING_SAMPLES].shape.as_list(),
[4, 2, 3, 32, 16])
self.assertCountEqual(activations.keys(), ['base_activations'])
self.assertAllEqual(activations['base_activations'].shape.as_list(),
[4, 2, 1024])
if __name__ == '__main__':
tf.test.main()
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
7db0bf2f99225efd2ff0ee2f98dde116115ffc7f | 6ede2868ca8295e00fd9b99aa9d3923a1e7e063b | /deep_learning_keras/ch03/cifar10_predict.py | 881339732908a79b51a5b6578e7eceadb3b0c9a5 | [] | no_license | takuya-teramoto/python_test | 229771d4dc72a31fcd184b495e662864af7760e3 | e7b7a4b633948610f1b66a6ed8ee6a0ddae7346a | refs/heads/master | 2022-12-17T22:33:42.133478 | 2021-08-11T11:48:59 | 2021-08-11T11:48:59 | 184,420,995 | 0 | 0 | null | 2022-11-22T02:42:38 | 2019-05-01T13:34:37 | Jupyter Notebook | UTF-8 | Python | false | false | 900 | py | from pathlib import Path
import numpy as np
from PIL import Image
from keras.models import load_model
model_path = "logdir_cifar10_deep_with_aug/model_file.hdf5"
images_folder = "sample_images"
# load model
model = load_model(model_path)
image_shape = (32, 32, 3)
# load images
def crop_resize(image_path):
image = Image.open(image_path)
length = min(image.size)
crop = image.crop((0, 0, length, length))
resized = crop.resize(image_shape[:2]) # use width x height
img = np.array(resized).astype("float32")
img /= 255
return img
folder = Path(images_folder)
image_paths = [str(f) for f in folder.glob("*.png")]
images = [crop_resize(p) for p in image_paths]
images = np.asarray(images)
predicted = model.predict_classes(images)
assert predicted[0] == 3, "image should be cat."
assert predicted[1] == 5, "image should be dog."
print("You can detect cat & dog!")
| [
"t.teramon23@gmail.com"
] | t.teramon23@gmail.com |
0817e8833e06cdbb3dc7357bbdcedcc83fb04a46 | 73fcadae6177ab973f1aa3ffe874ac3fadb52312 | /server/fta/utils/i18n.py | 4f91cd88e9509cabeab6ce284564a7f4a93d9ea7 | [
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSL-1.0",
"Apache-2.0"
] | permissive | huang1125677925/fta | 352cd587aaca3d3149516345559d420c41d1caf4 | a50a3c498c39b14e7df4a0a960c2a1499b1ec6bb | refs/heads/master | 2023-03-18T16:08:40.904716 | 2019-02-22T09:35:23 | 2019-02-22T09:35:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,745 | py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
import logging
import os.path
import arrow
import pytz as tz
from babel import support
from fta.utils.lazy import LazyString
logger = logging.getLogger(__name__)
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class I18N(object):
__metaclass__ = Singleton
def __init__(self):
# 全局唯一, 修改后可更改语言, 时区
self.cc_biz_id = None
from fta import settings
self.default_locale = settings.DEFAULT_LOCALE
self.default_timezone = settings.DEFAULT_TIMEZONE
self.translations = {}
self.domain = None
def set_biz(self, cc_biz_id):
"""change biz method
"""
self.cc_biz_id = cc_biz_id
@property
def translation_directories(self):
"""翻译文件夹
"""
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
yield os.path.join(BASE_DIR, 'locale')
def locale_best_match(self, locale):
"""兼容不同编码
"""
if locale.lower() in ['zh', 'zh_cn', 'zh-cn']:
return 'zh_Hans_CN'
return 'en'
def get_locale(self):
"""
根据业务ID获取语言
"""
if not self.cc_biz_id:
return self.default_locale
try:
from project.utils import query_cc
locale = query_cc.get_app_by_id(self.cc_biz_id).get('Language')
if locale:
return self.locale_best_match(locale)
else:
return self.default_locale
except Exception:
return self.default_locale
def get_timezone(self):
try:
timezone = self._get_timezone()
except Exception:
timezone = tz.timezone(self.default_timezone)
return timezone
def _get_timezone(self):
"""
根据业务ID获取时区
"""
if not self.cc_biz_id:
return self.default_timezone
try:
from project.utils import query_cc
timezone = query_cc.get_app_by_id(self.cc_biz_id).get('TimeZone')
if timezone:
return timezone
else:
return self.default_timezone
except Exception:
return self.default_timezone
def get_translations(self):
"""get translation on the fly
"""
locale = self.get_locale()
if locale not in self.translations:
translations = support.Translations()
for dirname in self.translation_directories:
catalog = support.Translations.load(
dirname,
[locale],
self.domain,
)
translations.merge(catalog)
if hasattr(catalog, 'plural'):
translations.plural = catalog.plural
logger.info('load translations, %s=%s', locale, translations)
self.translations[locale] = translations
return self.translations[locale]
i18n = I18N()
def gettext(string, **variables):
"""replace stdlib
"""
t = i18n.get_translations()
if t is None:
return string if not variables else string % variables
s = t.ugettext(string)
return s if not variables else s % variables
def ngettext(singular, plural, n):
t = i18n.get_translations()
if t is None:
return singular
s = t.ngettext(singular, plural, n)
return s
def lazy_gettext(string, **variables):
"""Like :func:`gettext` but the string returned is lazy which means
it will be translated when it is used as an actual string.
Example::
hello = lazy_gettext(u'Hello World')
@app.route('/')
def index():
return unicode(hello)
"""
return LazyString(gettext, string, **variables)
_ = gettext
def arrow_localtime(value, timezone=None):
"""value必须是UTC时间, arrow转换成本地时间
"""
value = arrow.get(value).replace(tzinfo="utc")
if not timezone:
timezone = i18n.get_timezone()
value = value.to(timezone)
return value
def localtime(value, timezone=None):
"""value必须是UTC时间, datetime格式
"""
value = arrow_localtime(value, timezone)
value = value.datetime
return value
def arrow_now():
"""当前时区时间, arrow格式
"""
utcnow = arrow.utcnow()
timezone = i18n.get_timezone()
return utcnow.to(timezone)
def now():
"""当前时间, datetime格式
"""
return arrow_now().datetime
def lazy_join(iterable, word):
value = ''
is_first = True
for i in iterable:
if is_first:
value = value + i
is_first = False
else:
value = value + word + i
return value
| [
"mycyzs@163.com"
] | mycyzs@163.com |
759e1a1dfc6292b7a431ed7b0d17facf1dd592bd | c0d706100d48e95a40bbe650d6bb5d8009fd72c1 | /day2/ConsecutiveNumSumRowWise.py | e43c0238614c60b27e57a92b952375f178397665 | [] | no_license | yamendrasinganjude/200244525045_ybs | 50946c587484c70346184c2b0a72748f022a3114 | 1e211aa44099bf0f06fba595c0b85d0001f5ce0f | refs/heads/master | 2022-10-07T04:34:10.690663 | 2020-06-04T12:38:28 | 2020-06-04T12:38:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py | '''
1
3 5
7 9 13
... so on consecutive odd numbers
suppose user gives row 2 then
3 + 5 = 8
so 8 is output
'''
def row_wise_sum(num):
return num ** 3
num = int(input("Enter a Num: "))
print("Sum is ",row_wise_sum(num)) | [
"vsinganjude8@gmail.com"
] | vsinganjude8@gmail.com |
82988ae8e9e0bde71d5cd0e9dc098418b05c6a93 | 15ab524701f1caf6de68ec0d1855824d9d486acd | /venv/bin/pip3 | 0400b00812531fd7c9c1fe734037dd900e6fd74d | [] | no_license | Noura-alh/DecisionTree | edd2bcfd7898c909084b2d0954d815b5aaf5d26c | 71ba73f9d599069bc49cfe26fab0de6af659e99d | refs/heads/master | 2020-04-24T05:09:46.374353 | 2019-02-20T18:29:52 | 2019-02-20T18:29:52 | 171,727,422 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | #!/Users/Noura/Desktop/DecisionTree/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
)
| [
"nourahnasser2@gmail.com"
] | nourahnasser2@gmail.com | |
8bf168f69bf43a76c958011e71d6a0f1786e007e | f22f9202cb3e9eb0d558ac98ab52b41cfaed8bbd | /PreProcessData/QueryRetrievalModel.py | fb58c5944b063589a229e63d2738a6217ff6da5b | [] | no_license | trees2brady/Information_Retrieval | 8f9e9eb57b24a0b81fe3dbfa5ba650a832ea500d | 55f0c80625e205ac2d49f42a76208b756486373e | refs/heads/master | 2023-08-06T15:09:23.060355 | 2021-09-17T23:30:04 | 2021-09-17T23:30:04 | 403,128,220 | 0 | 0 | null | 2021-09-17T20:50:09 | 2021-09-04T18:23:33 | PowerShell | UTF-8 | Python | false | false | 160 | py | from MyIndexReader import MyIndexReader
class QueryRetrievalModel:
def __init__(self):
self.MU = 2000.0
self.indexReader = MyIndexReader() | [
"trees2brady@gmail.com"
] | trees2brady@gmail.com |
7df0547fd8db3df9594c88f91a47ed3c7553f6fa | e48ad73c2ba628e6b186465391370cbd53577b85 | /apps/inflammation.py | 522902674967a8e2b9c67e4c10d956712986fba0 | [] | no_license | award7/ClinicalTrialDashboard | 814e09c61ce375e9fbedd4ed81bc0def823a092c | 4ec69fbf0dba320fa31359fa4b67a930cd36782c | refs/heads/master | 2023-08-30T05:09:18.336866 | 2021-10-08T18:19:41 | 2021-10-08T18:19:41 | 415,063,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,007 | py | # dash imports
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
import dash_table
# from dash.dependencies import Input, Output
# data wrangling imports
import pandas as pd
# local imports
# from app import app
layout = html.Div([
html.H1('Inflammation Data'),
html.Br(),
# CBC Table
html.H2('CBC Data'),
dash_table.DataTable(
id='inflammation-cbc-table'
),
html.Br(),
# CBC graph
dcc.Graph(
id='inflammation-cbc-graph'
),
html.Br(),
# cytokines table
html.H2('Cytokine Data'),
dash_table.DataTable(
id='inflammation-cytokine-table'
),
html.Br(),
# cytokine graph
dcc.Graph(
id='inflammation-cytokine-graph'
),
html.Br(),
# FACS Table?
html.H2('FACS Data'),
dash_table.DataTable(
id='inflammation-facs-table'
),
html.Br(),
# FACS graph
dcc.Graph(
id='inflammation-facs-graph'
),
]) | [
"award7@wisc.edu"
] | award7@wisc.edu |
f1a7cd0794f70795e2ad2d9c5e9e4b4688fc34d2 | 05427a12a61964f36869ef85a3c212c2a0664881 | /manuscript_files_all/mouse_prediction_analysis/feature_importances3Fold.py | d26d2aae9e36cf6d69481af51845b7788727e1fb | [] | no_license | bpt26/tRAP | 9a72673ca349cf0959e5949f49e3ca2778f646e8 | 103ae1e8289042df2438e5de38db8ceb83148ddc | refs/heads/master | 2023-05-11T05:40:24.447383 | 2021-09-02T07:02:04 | 2021-09-02T07:02:04 | 175,748,772 | 6 | 1 | null | 2020-01-17T17:12:18 | 2019-03-15T04:32:40 | Python | UTF-8 | Python | false | false | 12,515 | py | #!/usr/bin/env python3
# Name: Bryan Thornlow
# Date: 11/16/2017
# classifierv4.py
import sys
import scipy
import matplotlib
import sklearn
from sklearn import svm
import numpy as np
import matplotlib.pyplot as plt
from sklearn import decomposition
from sklearn import datasets
from sklearn import tree
from sklearn.ensemble import RandomForestClassifier
from sklearn.datasets import make_classification
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.model_selection import cross_validate
from sklearn.model_selection import cross_val_predict
from sklearn.impute import SimpleImputer
from sklearn.metrics import roc_curve, auc
from sklearn.svm import SVC
def buildClassifier():
labelsDict = {}
labelsDict['tRNAPhyloPAvg'] = 'Average PhyloP\nScore in tRNA\nGene Sequence'
labelsDict['5PhyloPAvg'] = "Average PhyloP\nScore in\n5' Flanking Region"
labelsDict['CpGOvrPct'] = 'Percentage of CpG\nDinucleotides Across\ntRNA Locus'
labelsDict['ObsExp'] = 'Observed/Expected\nCpG Islands Score\nAcross tRNA Locus'
labelsDict['ObsExpUp'] = 'Observed/Expected\nCpG Islands Score\nUpstream of tRNA Gene'
labelsDict['GenBit'] = 'tRNAscan-SE General\nBit Score'
labelsDict['tRNA10kb'] = 'tRNA Genes within 10 Kilobases'
labelsDict['Prot75kb'] = 'Exons within 75 Kilobases'
labelsDict['TTTT'] = 'Distance to Nearest TTTT\nTranscription Termination\nSequence'
labelsDict['Codon'] = 'tRNAs Corresponding\nto the Same Codon'
labelsDict['MFE'] = 'Constrained Minimum\nFree Energy'
myHumanData = []
myLabels = []
myHumanNames = []
imp_mean = SimpleImputer(missing_values=np.nan, strategy='mean')
for line in open('humanCpGTrainingSet.tsv'):
splitLine = (line.strip()).split('\t')
if (splitLine[0]) == 'tRNA':
myHeader = []
for k in splitLine[1:]:
myHeader.append(k)
else:
myHumanData.append(makeFloat(splitLine[1:-1]))
myHumanNames.append(splitLine[0])
if str(splitLine[-1]) in ['active','1']:
myLabels.append(1)
elif str(splitLine[-1]) in ['inactive','0']:
myLabels.append(0)
imp_mean.fit_transform(myHumanData)
myHumanDataReplaced = imp_mean.transform(myHumanData)
myMouseData = []
myMouseLabels = []
myMouseNames = []
imp_mean = SimpleImputer(missing_values=np.nan, strategy='mean')
for line in open('mousetRNADataCpG.tsv'):
splitLine = (line.strip()).split('\t')
if (splitLine[0]) == 'tRNA':
myMouseHeader = splitLine
else:
myMouseData.append(makeFloat(splitLine[1:-1]))
myMouseNames.append(splitLine[0])
if str(splitLine[-1]) in ['active','1']:
myMouseLabels.append(1)
elif str(splitLine[-1]) in ['inactive','0']:
myMouseLabels.append(0)
imp_mean.fit_transform(myMouseData)
myMouseDataReplaced = imp_mean.transform(myMouseData)
clf = RandomForestClassifier(n_estimators=250, max_depth=4, random_state=19, oob_score=True, n_jobs=8, min_samples_split=2)
clf.fit(myHumanDataReplaced, myLabels)
#print(clf.score(myMouseDataReplaced,myMouseLabels))
myPredictions = clf.predict(myMouseDataReplaced)
for i in range(0,len(myPredictions)):
if not myPredictions[i] == myMouseLabels[i]:
print(myMouseNames[i], myPredictions[i], myMouseLabels[i])
cM = confusionMatrix(clf.predict(myMouseDataReplaced),myMouseLabels)
# for i in range(0,len(myMouseLabels)):
# if not myPredictions[i] == myMouseLabels[i]:
# print(myMouseNames[i], myPredictions[i], myMouseLabels[i])
print(cM)
print(getAccuracy(cM))
print(getScore(clf.predict_proba(myMouseDataReplaced),myMouseLabels))
clf = RandomForestClassifier(n_estimators=250, max_depth=4, random_state=49, oob_score=True, n_jobs=8, min_samples_split=2)
clf.fit(myHumanDataReplaced, myLabels)
myPredictions = clf.predict_proba(myMouseDataReplaced)
myOutString = ''
for i in range(0,len(myPredictions)):
myOutString += myMouseNames[i]+'\t'
if float(myPredictions[i][0]) > float(myPredictions[i][1]):
myOutString += '-'+str(myPredictions[i][0])+'\tinactive\n'
else:
myOutString += str(myPredictions[i][1])+'\tactive\n'
open('mousePredictionsNew3Fold.txt', 'w').write(myOutString)
"""
Our final model uses a bag size of 100%, 200 iterations, evaluation of 2 attributes at each node,
a minimum variance of 1e-4 per split, and a maximum depth of 5 nodes.
"""
clf = RandomForestClassifier(n_estimators=250, max_depth=4, random_state=49, oob_score=True, n_jobs=8, min_samples_split=2)
clf.fit(myHumanDataReplaced, myLabels)
cvPredictions1 = cross_val_predict(clf, myHumanDataReplaced, myLabels, cv=3, method='predict')
print(cvPredictions1)
cM = confusionMatrix(cvPredictions1, myLabels)
for i in range(0,len(cvPredictions1)):
if not cvPredictions1[i] == myLabels[i]:
print(myHumanNames[i], cvPredictions1[i], myLabels[i])
print(cM)
print(getAccuracy(cM))
cvPredictions = cross_val_predict(clf, myHumanDataReplaced, myLabels, cv=3, method='predict_proba')
#print(cvPredictions)
print(getScore(cvPredictions, myLabels))
myOutString = ''
for i in range(0,len(cvPredictions)):
myOutString += myHumanNames[i]+'\t'
if float(cvPredictions[i][0]) > float(cvPredictions[i][1]):
myOutString += '-'+str(cvPredictions[i][0])+'\tinactive\n'
else:
myOutString += str(cvPredictions[i][1])+'\tactive\n'
open('humanCVPredictionsChanged3Fold.txt', 'w').write(myOutString)
fig_width = 14
fig_height = 7
plt.figure(figsize=(fig_width, fig_height))
panel_width = 0.4
panel_height = 0.8
panel_total_height = (panel_height*1)
extra_y_space = 1 - panel_total_height
above_below = extra_y_space/2
panel_total_width = (panel_width*2)
extra_x_space = 1 - panel_total_width
left_right = extra_y_space/3
panel1 = plt.axes([left_right, (1-panel_height)/2, panel_width, panel_height], frameon=True)
panel2 = plt.axes([1-panel_width-left_right, (1-panel_height)/2, panel_width, panel_height], frameon=True)
clf = RandomForestClassifier(n_estimators=250, max_depth=4, random_state=49, oob_score=True, n_jobs=8, min_samples_split=2)
clf.fit(myHumanDataReplaced, myLabels)
cvPredictions = cross_val_predict(clf, myHumanDataReplaced, myLabels, cv=3, method='predict_proba')
fpr, tpr, thresholds = roc_curve(myLabels, cvPredictions[:,1])
roc_auc = auc(fpr, tpr)
panel1.plot(fpr, tpr, color='b', label='Random Forest (AUC = %0.3f)' % (roc_auc))
clf = LogisticRegression(random_state=11, solver='lbfgs', multi_class='multinomial')
clf.fit(myHumanDataReplaced, myLabels)
cvPredictions = cross_val_predict(clf, myHumanDataReplaced, myLabels, cv=3, method='predict_proba')
fpr, tpr, thresholds = roc_curve(myLabels, cvPredictions[:,1])
roc_auc = auc(fpr, tpr)
panel1.plot(fpr, tpr, color='r', label='Logistic Regression (AUC = %0.3f)' % (roc_auc))
print(clf.coef_)
clf = SVC(probability=True, gamma='auto', kernel='linear')
clf.fit(myHumanDataReplaced, myLabels)
cvPredictions = cross_val_predict(clf, myHumanDataReplaced, myLabels, cv=3, method='predict_proba')
roc_auc = auc(fpr, tpr)
panel1.plot(fpr, tpr, color='y', label='Support Vector Machine (AUC = %0.3f)' % (roc_auc))
panel1.set_xlabel("False Positive Rate", fontsize=18)
panel1.set_ylabel("True Positive Rate", fontsize=18)
clf = RandomForestClassifier(n_estimators=250, max_depth=4, random_state=49, oob_score=True, n_jobs=8, min_samples_split=2)
clf.fit(myHumanDataReplaced, myLabels)
mousePred = clf.predict_proba(myMouseDataReplaced)
fpr, tpr, thresholds = roc_curve(myMouseLabels, mousePred[:,1])
roc_auc = auc(fpr, tpr)
panel2.plot(fpr, tpr, color='b', label='Random Forest (AUC = %0.3f)' % (roc_auc))
clf = LogisticRegression(random_state=11, solver='lbfgs', multi_class='multinomial')
clf.fit(myHumanDataReplaced, myLabels)
mousePred = clf.predict_proba(myMouseDataReplaced)
fpr, tpr, thresholds = roc_curve(myMouseLabels, mousePred[:,1])
print(clf.coef_)
roc_auc = auc(fpr, tpr)
panel2.plot(fpr, tpr, color='r', label='Logistic Regression (AUC = %0.3f)' % (roc_auc))
clf = SVC(probability=True, gamma='auto', kernel='linear')
clf.fit(myHumanDataReplaced, myLabels)
mousePred = clf.predict_proba(myMouseDataReplaced)
fpr, tpr, thresholds = roc_curve(myMouseLabels, mousePred[:,1])
roc_auc = auc(fpr, tpr)
panel2.plot(fpr, tpr, color='y', label='Support Vector Machine (AUC = %0.3f)' % (roc_auc))
panel2.set_xlabel("False Positive Rate", fontsize=18)
panel2.set_ylabel("True Positive Rate", fontsize=18)
panel1.set_xlim([-0.01,1.01])
panel1.set_ylim([-0.01,1.01])
panel2.set_xlim([-0.01,1.01])
panel2.set_ylim([-0.01,1.01])
panel1.text(0.01, 1.03, "A", ha='center', va='bottom', fontsize=32)
panel2.text(0.01, 1.03, "B", ha='center', va='bottom', fontsize=32)
panel1.tick_params(bottom='on', labelbottom='on',\
left='on', labelleft='on', \
right='off', labelright='off',\
top='off', labeltop='off', labelsize=20)
panel2.tick_params(bottom='on', labelbottom='on',\
left='on', labelleft='on', \
right='off', labelright='off',\
top='off', labeltop='off', labelsize=20)
panel1.legend(loc="lower right", fontsize=16)
panel2.legend(loc="lower right", fontsize=16)
plt.savefig('3Fold.pdf', dpi=700)
plt.close()
# print(cross_validate(clf, myHumanDataReplaced, myLabels, cv=3, return_train_score=True, return_estimator=True)['estimator'])
# a_train, a_test, b_train, b_test = train_test_split(myHumanDataReplaced, myLabels, test_size=0.2, random_state=49)
# clf.fit(a_train, b_train)
# print(getScore(clf.predict_proba(a_test),np.asarray(b_test)))
# clf = RandomForestClassifier(n_estimators=1000, max_depth=5, random_state=49, oob_score=True, n_jobs=8, min_samples_split=2)
# clf.fit(myHumanDataReplaced, myLabels)
# print(clf.score(myMouseDataReplaced,myMouseLabels))
# cM = confusionMatrix(clf.predict(myMouseDataReplaced),myMouseLabels)
# print(cM)
# print(getAccuracy(cM))
# print(getScore(clf.predict_proba(myMouseDataReplaced),myMouseLabels))
# clf = LogisticRegression(random_state=49, solver='lbfgs', multi_class='multinomial').fit(myHumanData / np.std(myHumanData, 0), myLabels)
# print(clf.coef_)
# print(clf.score(myMouseData,myMouseLabels))
def reorder(myList, myOrder):
myReturn = []
for k in myOrder:
myReturn.append(myList[k])
return(myReturn)
def makeFloat(myList):
myReturn = []
for k in myList:
if not k == '?' and not 'tRNA' in k:
myReturn.append(float(k))
elif k == '?':
myReturn.append(np.nan)
else:
myReturn.append(k)
return(myReturn)
def joiner(entry):
newList = []
for k in entry:
newList.append(str(k))
return '\n'.join(newList)
def getScore(list1,list2):
myTotal = 0.0
for i in range(0,len(list1)):
if ((list1[i])[0] > (list1[i])[1] and int(list2[i]) == 0) or ((list1[i])[1] > (list1[i])[0] and int(list2[i]) == 1):
myTotal += 1.0
return(myTotal/float(len(list2)))
def confusionMatrix(pred,real):
myReturn = [[0.0, 0.0],[0.0, 0.0]]
for i in range(0,len(pred)):
if int(pred[i]) == 0 and int(real[i]) == 0:
(myReturn[0])[0] += 1
elif int(pred[i]) == 0 and int(real[i]) == 1:
(myReturn[0])[1] += 1
elif int(pred[i]) == 1 and int(real[i]) == 0:
(myReturn[1])[0] += 1
elif int(pred[i]) == 1 and int(real[i]) == 1:
(myReturn[1])[1] += 1
return(myReturn)
def getAccuracy(cM):
return( (((cM[0])[0]+(cM[1])[1]) / ((cM[0])[0]+(cM[0])[1]+(cM[1])[0]+(cM[1])[1])) * 100.0 )
def getFirst(myList):
myReturn = []
for k in myList:
myReturn.append(k[0])
return(myReturn)
def main():
buildClassifier()
if __name__ == "__main__":
"""
Calls main when program is run by user.
"""
main();
raise SystemExit | [
"noreply@github.com"
] | noreply@github.com |
9f5061630659beed761f1e56fb5a1083b3bb3c3d | 234c46d1249c9209f268417a19018afc12e378b4 | /tests/modules/transformer/activation_layer_test.py | 2af0338a92e9723143c9b963856628980b4971bc | [
"Apache-2.0"
] | permissive | allenai/allennlp | 1f4bcddcb6f5ce60c7ef03a9a3cd6a38bdb987cf | 80fb6061e568cb9d6ab5d45b661e86eb61b92c82 | refs/heads/main | 2023-07-07T11:43:33.781690 | 2022-11-22T00:42:46 | 2022-11-22T00:42:46 | 91,356,408 | 12,257 | 2,712 | Apache-2.0 | 2022-11-22T00:42:47 | 2017-05-15T15:52:41 | Python | UTF-8 | Python | false | false | 804 | py | import torch
import pytest
from allennlp.common import Params
from allennlp.modules.transformer import ActivationLayer
@pytest.fixture
def params_dict():
return {
"hidden_size": 5,
"intermediate_size": 3,
"activation": "relu",
}
@pytest.fixture
def params(params_dict):
return Params(params_dict)
@pytest.fixture
def activation_layer(params):
return ActivationLayer.from_params(params.duplicate())
def test_can_construct_from_params(activation_layer, params_dict):
activation_layer = activation_layer
assert activation_layer.dense.in_features == params_dict["hidden_size"]
assert activation_layer.dense.out_features == params_dict["intermediate_size"]
def test_forward_runs(activation_layer):
activation_layer.forward(torch.randn(7, 5))
| [
"noreply@github.com"
] | noreply@github.com |
9f9ce59ae5c4d5d86b254d5eed2139d88bdd7665 | 4b3ede9feeb72ccca84ed1a4d9959fb7ef818d32 | /lab/lab11/lab11.py | b6cc3aa58984845f2dd641006e9bf28bdeebe4e9 | [] | no_license | eqchen1024/cs61a | 2e1936e969b769d64d17b2cccae96c222595ea64 | 6b98724923b65a399ac0e86b3fb08cbeb3f88cd6 | refs/heads/main | 2023-08-16T03:07:41.713360 | 2021-10-21T15:02:19 | 2021-10-21T15:02:19 | 398,767,958 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,373 | py | """ Lab 11: Iterators and Generators """
# Q1
def scale(s, k):
"""Yield elements of the iterable s scaled by a number k.
>>> s = scale([1, 5, 2], 5)
>>> type(s)
<class 'generator'>
>>> list(s)
[5, 25, 10]
>>> m = scale(naturals(), 2)
>>> [next(m) for _ in range(5)]
[2, 4, 6, 8, 10]
"""
"*** YOUR CODE HERE ***"
yield from map(lambda x: x*k,s)
# Q2
def trap(s, k):
"""Return a generator that yields the first K values in iterable S,
but raises a ValueError exception if any more values are requested.
>>> t = trap([3, 2, 1], 2)
>>> next(t)
3
>>> next(t)
2
>>> next(t)
ValueError
>>> list(trap(range(5), 5))
ValueError
>>> t2 = trap(map(abs, reversed(range(-6, -4))), 2)
>>> next(t2)
5
>>> next(t2)
6
>>> next(t2)
ValueError
"""
"*** YOUR CODE HERE ***"
n=0
lst=iter(s)
while n<k:
yield next(lst)
n+=1
raise ValueError
# the naturals generator is used for testing scale and merge functions
def naturals():
"""A generator function that yields the infinite sequence of natural
numbers, starting at 1.
>>> m = naturals()
>>> type(m)
<class 'generator'>
>>> [next(m) for _ in range(10)]
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
"""
i = 1
while True:
yield i
i += 1
| [
"cdysbt@gmail.com"
] | cdysbt@gmail.com |
af7a59afa35ecefde3a06517897f74c9a0b562e5 | 2bcec58253808558d31aa5b5f9e3b5188b558f7d | /find_island.py | e3ad37d3b16c01ae40dcfabf810c8f594c9c1194 | [] | no_license | liuben10/programming_practice_problems | 59fb9dbde6ce6bf2f6bf80f19915c5c3dffb77ac | 3d3c768810a96acc6f82816696cdb542eb84e5dd | refs/heads/master | 2021-01-10T08:12:14.388548 | 2018-12-22T23:55:48 | 2018-12-22T23:55:48 | 51,346,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,309 | py |
def dfs(aMatrix, row, col, visited):
fringe = []
fringe.append([row, col])
while (fringe):
visiting = fringe.pop()
visited.append(visiting)
curcol = visiting[1]
currow = visiting[0]
successors = []
if (curcol + 1 < len(aMatrix)):
successors.append([currow, curcol+1])
if (currow + 1 < len(aMatrix[curcol])):
successors.append([currow+1, curcol])
if (curcol - 1 >= 0):
successors.append([currow, curcol-1])
if (currow - 1 >= 0):
successors.append([currow-1, curcol])
for successor in successors:
if successor not in visited and successor not in fringe and aMatrix[successor[0]][successor[1]] == 'x':
fringe.append(successor)
def find_islands(aMatrix):
visited = []
islands = 0
rows = len(aMatrix)
for row in range(rows):
currow = aMatrix[row]
for col in (range(len(currow))):
if (aMatrix[row][col] == 'x' and not [row, col] in visited):
islands += 1
dfs(aMatrix, row, col, visited)
return islands
if __name__ == '__main__':
test_in = [[' ', ' ', 'x'],
['x', ' ', ' '],
['x', ' ', 'x']]
print find_islands(test_in) | [
"ben.liu@xoom.com"
] | ben.liu@xoom.com |
074aeca3d97502ed60c27a33d1803a45293f210c | c1ea75db1da4eaa485d39e9d8de480b6ed0ef40f | /app/api/app.py | bafb5b35fd82a3d3b5865aa651d5ecb12186e978 | [
"Apache-2.0"
] | permissive | gasbarroni8/VideoCrawlerEngine | a4f092b0a851dc0487e4dcf4c98b62d6282a6180 | 994933d91d85bb87ae8dfba1295f7a69f6d50097 | refs/heads/master | 2023-04-06T07:59:29.269894 | 2021-02-10T16:09:15 | 2021-02-10T16:09:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,259 | py |
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.responses import HTMLResponse
from .routers import include_routers
from app.helper.middleware import include_exception_handler
from helper.conf import get_conf
from .helper import read_html_file
from app.helper.middleware.proxy import ReverseProxyMiddleware
from ..helper.middleware import include_middleware
from urllib.parse import urljoin
import os
app = FastAPI()
conf = get_conf('app')
htmldist = {
'static': os.path.join(conf.html['dist'], 'static'),
'index': os.path.join(conf.html['dist'], 'index.html')
}
app.mount(
'/static',
StaticFiles(directory=htmldist['static']),
name='dist'
)
include_routers(app)
include_exception_handler(app)
proxy_pass_configures = [
{
'source': '/api/task/',
'pass': urljoin(
conf.taskflow['gateway'].geturl(),
'/api/v1/task/'
),
}, {
'source': '/api/script/',
'pass': urljoin(
conf.script['gateway'].geturl(),
'/api/v1/script/'
),
}
]
include_middleware(app, ReverseProxyMiddleware(proxy_pass_configures))
@app.get('/')
async def index():
return HTMLResponse(read_html_file(htmldist['index']))
| [
"zzsaim@163.com"
] | zzsaim@163.com |
fdb935308c84e6e8df3718a147bb41f284314a06 | 5be8b0f2ee392abeee6970e7a6364ac9a5b8ceaa | /xiaojian/forth_phase/Django./day03/exersice/exersice/wsgi.py | 3d56e7a2957ad8662abfa9118725486dff7fda08 | [] | no_license | Wellsjian/20180826 | 424b65f828f0174e4d568131da01dafc2a36050a | 0156ad4db891a2c4b06711748d2624080578620c | refs/heads/master | 2021-06-18T12:16:08.466177 | 2019-09-01T10:06:44 | 2019-09-01T10:06:44 | 204,462,572 | 0 | 1 | null | 2021-04-20T18:26:03 | 2019-08-26T11:38:09 | JavaScript | UTF-8 | Python | false | false | 394 | py | """
WSGI config for exersice project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "exersice.settings")
application = get_wsgi_application()
| [
"1149158963@qq.com"
] | 1149158963@qq.com |
7b603e711fb86b97486cc148c2eed6e4c8541d2f | c00924794b325f5697e93a4c05609f19afba5f57 | /tests/test_conv_pd.py | c605b3b65900b1626b2656948a71c7efe3335372 | [
"BSD-3-Clause"
] | permissive | TK-21st/neuroarch | 24a8454dc6a6d1e516ddf54e4ca364d77531b4ab | ab6a1aeac69df1b76dba96bdd877696f45fafa5d | refs/heads/master | 2020-04-29T17:05:21.397284 | 2019-04-19T13:25:21 | 2019-04-19T13:25:21 | 176,286,542 | 0 | 0 | NOASSERTION | 2019-03-18T12:59:50 | 2019-03-18T12:59:50 | null | UTF-8 | Python | false | false | 5,804 | py | from unittest import main, TestCase
import neuroarch.conv.pd
import deepdiff
import pyorient
import pandas as pd
db_name = 'neuroarch_test_db'
username = 'admin'
passwd = 'admin'
match = lambda a, b: False if deepdiff.DeepDiff(a, b) else True
class TestConvPandas(TestCase):
@classmethod
def setUpClass(cls):
cls.client = pyorient.OrientDB('localhost', 2424)
cls.client.connect(username, passwd)
if cls.client.db_exists(db_name):
cls.client.db_drop(db_name)
cls.client.db_create(db_name, pyorient.DB_TYPE_GRAPH,
pyorient.STORAGE_TYPE_MEMORY)
cls.client.db_open(db_name, username, passwd)
@classmethod
def tearDownClass(cls):
cls.client.connect(username, passwd)
try:
cls.client.db_drop(db_name)
except Exception as e:
warnings.warn('problem cleaning up test database: %s' % e.message)
def _connect_server(self):
self.client = pyorient.OrientDB('localhost', 2424)
self.client.connect(username, passwd)
def setUp(self):
cmds = ['create class neuron extends V',
'create class synapse extends V',
'create class data extends E']
for cmd in cmds:
self.client.command(cmd)
def tearDown(self):
cmds = ['delete vertex neuron',
'delete vertex synapse',
'delete edge data',
'drop class neuron',
'drop class synapse',
'drop class data']
for cmd in cmds:
self.client.command(cmd)
def _create_pandas_graph(self):
df_node = pd.DataFrame({'name': ['foo', 'bar', 'baz',
'foo-bar', 'foo-baz'],
'class': ['neuron', 'neuron', 'neuron',
'synapse', 'synapse']})
df_edge = pd.DataFrame({'out': [0, 3, 0, 4],
'in': [3, 1, 4, 2],
'class': ['data', 'data', 'data', 'data']})
return df_node, df_edge
def _create_orient_graph(self):
cmd = ("begin;"
"let foo = create vertex neuron content {'name': 'foo', 'id': 0};"
"let bar = create vertex neuron content {'name': 'bar', 'id': 1};"
"let baz = create vertex neuron content {'name': 'baz', 'id': 2};"
"let foo_bar = create vertex synapse content {'name': 'foo-bar', 'id': 3};"
"let foo_baz = create vertex synapse content {'name': 'foo-baz', 'id': 4};"
"create edge data from $foo to $foo_bar;"
"create edge data from $foo_bar to $bar;"
"create edge data from $foo to $foo_baz;"
"create edge data from $foo_baz to $baz;"
"commit retry 5;")
self.client.batch(cmd)
def test_orient_to_pandas(self):
df_node_pandas, df_edge_pandas = self._create_pandas_graph()
self._create_orient_graph()
df_node_orient, df_edge_orient = neuroarch.conv.pd.orient_to_pandas(self.client,
'g.V.has("@class", T.in, ["neuron","synapse"])',
'g.E.has("@class", "data")')
self.assertSetEqual(set([tuple(v) for v in df_node_pandas.values]),
set([tuple(v) for v in df_node_orient.values]))
self.assertSetEqual(set([tuple(v) for v in df_edge_pandas.values]),
set([tuple(v) for v in df_edge_orient.values]))
self.assertSetEqual(set(df_node_pandas.index),
set(df_node_orient.index))
def test_pandas_to_orient(self):
df_node_pandas, df_edge_pandas = self._create_pandas_graph()
neuroarch.conv.pd.pandas_to_orient(self.client,
df_node_pandas, df_edge_pandas)
df_node_orient, df_edge_orient = neuroarch.conv.pd.orient_to_pandas(self.client,
'g.V.has("@class", T.in, ["neuron","synapse"])',
'g.E.has("@class", "data")')
self.assertSetEqual(set([tuple(v) for v in df_node_pandas.values]),
set([tuple(v) for v in df_node_orient.values]))
self.assertSetEqual(set([tuple(v) for v in df_edge_pandas.values]),
set([tuple(v) for v in df_edge_orient.values]))
self.assertSetEqual(set(df_node_pandas.index),
set(df_node_orient.index))
def test_pandas_to_orient_double(self):
df_node_pandas = pd.DataFrame({'name': ['foo', 'bar', 'foo-bar'],
'class': ['neuron', 'neuron', 'synapse'],
'x': [1/3.0, 1/4.0, 1.0]})
df_edge_pandas = pd.DataFrame({'out': [0, 2], 'in': [2, 1],
'class': ['data', 'data']})
neuroarch.conv.pd.pandas_to_orient(self.client,
df_node_pandas,
df_edge_pandas)
df_node_orient, df_edge_orient = \
neuroarch.conv.pd.orient_to_pandas(self.client,
'g.V.has("@class", T.in, ["neuron","synapse"])',
'g.E.has("@class", "data")')
self.assertSetEqual(set([tuple(v) for v in df_node_pandas.values]),
set([tuple(v) for v in df_node_orient.values]))
self.assertSetEqual(set([tuple(v) for v in df_edge_pandas.values]),
set([tuple(v) for v in df_edge_orient.values]))
self.assertSetEqual(set(df_node_pandas.index),
set(df_node_orient.index))
if __name__ == '__main__':
main()
| [
"nikul@ee.columbia.edu"
] | nikul@ee.columbia.edu |
4fb1bed78cc990b2ebca25fe64eec5f0a91b325a | d484309dd9244477a1f25b466f34780c2eacd16f | /LinkedLists.py | aeba0a32776aab83e8809b5e61f4872d075c0fd7 | [] | no_license | joelwng28/Python-Programs | f0d37147b9dd5d056eda9fcbe96b4dc46b75016a | 2a5755cfe1e3de3cafbd24f954345c79e52c212f | refs/heads/master | 2021-06-23T08:30:20.358439 | 2017-09-10T04:12:42 | 2017-09-10T04:12:42 | 103,002,727 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,432 | py | # File: LinkedLists.py
# Description: Implements various functions using linked lists
# Student's Name: Zi Zhou Wang
# Student's UT EID: zw3948
# Course Name: CS 313E
# Unique Number: 86940
#
# Date Created: 7/7/2017
# Date Last Modified: 7/7/2017
class Node(object):
def __init__(self, initdata):
self.data = initdata
self.next = None # always do this – saves a lot
def getData(self):
return self.data # returns a POINTER
def getNext(self):
return self.next # returns a POINTER
def setData(self, newData):
self.data = newData # changes a POINTER
def setNext(self, newNext):
self.next = newNext # changes a POINTER
class LinkedList:
def __init__(self):
sentinel = Node(None)
self.head = sentinel
# Return a string representation of data suitable for printing.
# Long lists (more than 10 elements long) should be neatly
# printed with 10 elements to a line, two spaces between
# elements
def __str__(self):
temp = ""
current = self.head.getNext()
count = 0
while current is not None:
value = current.getData()
temp += (value + " ")
count += 1
if (count % 10) == 0:
temp += "\n"
current = current.getNext()
return temp
# Add an item to the beginning of the list
def addFirst(self, item):
temp = Node(item)
temp.setNext(self.head.getNext())
self.head.setNext(temp)
# Add an item to the end of a list
def addLast(self, item):
current = self.head
while current.getNext() is not None:
current = current.getNext()
temp = Node(item)
current.setNext(temp)
# Insert an item into the proper place of an ordered list.
# This assumes that the original list is already properly
# ordered.
def addInOrder(self, item):
current = self.head
stop = False
while current.getNext() is not None and not stop:
if current.getNext().getData() > item:
stop = True
else:
current = current.getNext()
temp = Node(item)
temp.setNext(current.getNext())
current.setNext(temp)
# Return the number of items in the list
def getLength(self):
current = self.head.getNext()
count = 0
while current is not None:
count += 1
current = current.getNext()
return count
# Search in an unordered list
# Return True if the item is in the list, False
# otherwise.
def findUnordered(self, item):
current = self.head.getNext()
found = False
while current is not None and not found:
if current.getData() == item:
found = True
else:
current = current.getNext()
return found
# Search in an ordered list
# Return True if the item is in the list, False
# otherwise.
# This method MUST take advantage of the fact that the
# list is ordered to return quicker if the item is not
# in the list.
def findOrdered(self, item):
current = self.head.getNext()
found = False
stop = False
while current is not None and not found and not stop:
if current.getData() == item:
found = True
else:
if current.getData() > item:
stop = True
else:
current = current.getNext()
return found
# Delete an item from an unordered list
# if found, return True; otherwise, return False
def delete(self, item):
current = self.head
while True:
if current.getNext() is None:
return False
elif current.getNext().getData() == item:
current.setNext(current.getNext().getNext())
return True
else:
current = current.getNext()
# Return a new linked list that's a copy of the original,
# made up of copies of the original elements
def copyList(self):
temp = LinkedList()
current = self.head.getNext()
while current is not None:
temp.addLast(current.getData())
current = current.getNext()
return temp
# Return a new linked list that contains the elements of the
# original list in the reverse order.
def reverseList(self):
temp = LinkedList()
current = self.head.getNext()
while current is not None:
temp.addFirst(current.getData())
current = current.getNext()
return temp
# Return a new linked list that contains the elements of the
# original list arranged in ascending (alphabetical) order.
# Do NOT use a sort function: do this by iteratively
# traversing the first list and then inserting copies of
# each item into the correct place in the new list.
def orderList(self):
temp = LinkedList()
current = self.head.getNext()
while current is not None:
temp.addInOrder(current.getData())
current = current.getNext()
return temp
# Return True if a list is ordered in ascending (alphabetical)
# order, or False otherwise
def isOrdered(self):
current = self.head.getNext()
while current.getNext() is not None:
if current.getData() > current.getNext().getData():
return False
current = current.getNext()
return True
# Return True if a list is empty, or False otherwise
def isEmpty(self):
return self.head.getNext() is None
# Return an ordered list whose elements consist of the
# elements of two ordered lists combined.
def mergeList(self, b):
currentA = self.head.getNext()
currentB = b.head.getNext()
temp = LinkedList()
while currentA is not None or currentB is not None:
if currentA is None:
temp.addLast(currentB.getData())
currentB = currentB.getNext()
elif currentB is None:
temp.addLast(currentA.getData())
currentA = currentA.getNext()
elif currentB.getData() < currentA.getData():
temp.addLast(currentB.getData())
currentB = currentB.getNext()
else:
temp.addLast(currentA.getData())
currentA = currentA.getNext()
return temp
# Test if two lists are equal, item by item, and return True.
def isEqual(self, b):
if self.getLength() != b.getLength():
return False
else:
currentA = self.head.getNext()
currentB = b.head.getNext()
while currentA is not None:
if currentA.getData() != currentB.getData():
return False
currentA = currentA.getNext()
currentB = currentB.getNext()
return True
# Remove all duplicates from a list, returning a new list.
# Do not change the order of the remaining elements.
def removeDuplicates(self):
temp = LinkedList()
seen = []
current = self.head.getNext()
while current is not None:
if current.getData() not in seen:
seen.append(current.getData())
temp.addLast(current.getData())
current = current.getNext()
return temp
def main():
print("\n\n***************************************************************")
print("Test of addFirst: should see 'node34...node0'")
print("***************************************************************")
myList1 = LinkedList()
for i in range(35):
myList1.addFirst("node" + str(i))
print(myList1)
print("\n\n***************************************************************")
print("Test of addLast: should see 'node0...node34'")
print("***************************************************************")
myList2 = LinkedList()
for i in range(35):
myList2.addLast("node" + str(i))
print(myList2)
print("\n\n***************************************************************")
print("Test of addInOrder: should see 'alpha delta epsilon gamma omega'")
print("***************************************************************")
greekList = LinkedList()
greekList.addInOrder("gamma")
greekList.addInOrder("delta")
greekList.addInOrder("alpha")
greekList.addInOrder("epsilon")
greekList.addInOrder("omega")
print(greekList)
print("\n\n***************************************************************")
print("Test of getLength: should see 35, 5, 0")
print("***************************************************************")
emptyList = LinkedList()
print(" Length of myList1: ", myList1.getLength())
print(" Length of greekList: ", greekList.getLength())
print(" Length of emptyList: ", emptyList.getLength())
print("\n\n***************************************************************")
print("Test of findUnordered: should see True, False")
print("***************************************************************")
print(" Searching for 'node25' in myList2: ", myList2.findUnordered("node25"))
print(" Searching for 'node35' in myList2: ", myList2.findUnordered("node35"))
print("\n\n***************************************************************")
print("Test of findOrdered: should see True, False")
print("***************************************************************")
print(" Searching for 'epsilon' in greekList: ", greekList.findOrdered("epsilon"))
print(" Searching for 'omicron' in greekList: ", greekList.findOrdered("omicron"))
print("\n\n***************************************************************")
print("Test of delete: should see 'node25 found', 'node34 found',")
print(" 'node0 found', 'node40 not found'")
print("***************************************************************")
print(" Deleting 'node25' (random node) from myList1: ")
if myList1.delete("node25"):
print(" node25 found")
else:
print(" node25 not found")
print(" myList1: ")
print(myList1)
print(" Deleting 'node34' (first node) from myList1: ")
if myList1.delete("node34"):
print(" node34 found")
else:
print(" node34 not found")
print(" myList1: ")
print(myList1)
print(" Deleting 'node0' (last node) from myList1: ")
if myList1.delete("node0"):
print(" node0 found")
else:
print(" node0 not found")
print(" myList1: ")
print(myList1)
print(" Deleting 'node40' (node not in list) from myList1: ")
if myList1.delete("node40"):
print(" node40 found")
else:
print(" node40 not found")
print(" myList1: ")
print(myList1)
print("\n\n***************************************************************")
print("Test of copyList:")
print("***************************************************************")
greekList2 = greekList.copyList()
print(" These should look the same:")
print(" greekList before delete:")
print(greekList)
print(" greekList2 before delete:")
print(greekList2)
greekList2.delete("alpha")
print(" This should only change greekList2:")
print(" greekList after deleting 'alpha' from second list:")
print(greekList)
print(" greekList2 after deleting 'alpha' from second list:")
print(greekList2)
greekList.delete("omega")
print(" This should only change greekList1:")
print(" greekList after deleting 'omega' from first list:")
print(greekList)
print(" greekList2 after deleting 'omega' from first list:")
print(greekList2)
print("\n\n***************************************************************")
print("Test of reverseList: the second one should be the reverse")
print("***************************************************************")
print(" Original list:")
print(myList1)
print(" Reversed list:")
myList1Rev = myList1.reverseList()
print(myList1Rev)
print("\n\n***************************************************************")
print("Test of orderList: the second list should be the first one sorted")
print("***************************************************************")
planets = LinkedList()
planets.addFirst("Mercury")
planets.addFirst("Venus")
planets.addFirst("Earth")
planets.addFirst("Mars")
planets.addFirst("Jupiter")
planets.addFirst("Saturn")
planets.addFirst("Uranus")
planets.addFirst("Neptune")
planets.addFirst("Pluto?")
print(" Original list:")
print(planets)
print(" Ordered list:")
orderedPlanets = planets.orderList()
print(orderedPlanets)
print("\n\n***************************************************************")
print("Test of isOrdered: should see False, True")
print("***************************************************************")
print(" Original list:")
print(planets)
print(" Ordered? ", planets.isOrdered())
orderedPlanets = planets.orderList()
print(" After ordering:")
print(orderedPlanets)
print(" ordered? ", orderedPlanets.isOrdered())
print("\n\n***************************************************************")
print("Test of isEmpty: should see True, False")
print("***************************************************************")
newList = LinkedList()
print("New list (currently empty):", newList.isEmpty())
newList.addFirst("hello")
print("After adding one element:", newList.isEmpty())
print("\n\n***************************************************************")
print("Test of mergeList")
print("***************************************************************")
list1 = LinkedList()
list1.addLast("aardvark")
list1.addLast("cat")
list1.addLast("elephant")
list1.addLast("fox")
list1.addLast("lynx")
print(" first list:")
print(list1)
list2 = LinkedList()
list2.addLast("bacon")
list2.addLast("dog")
list2.addLast("giraffe")
list2.addLast("hippo")
list2.addLast("wolf")
print(" second list:")
print(list2)
print(" merged list:")
list3 = list1.mergeList(list2)
print(list3)
print("\n\n***************************************************************")
print("Test of isEqual: should see True, False, True")
print("***************************************************************")
print(" First list:")
print(planets)
planets2 = planets.copyList()
print(" Second list:")
print(planets2)
print(" Equal: ", planets.isEqual(planets2))
print(planets)
planets2.delete("Mercury")
print(" Second list:")
print(planets2)
print(" Equal: ", planets.isEqual(planets2))
print(" Compare two empty lists:")
emptyList1 = LinkedList()
emptyList2 = LinkedList()
print(" Equal: ", emptyList1.isEqual(emptyList2))
print("\n\n***************************************************************")
print("Test of removeDuplicates: original list has 14 elements, new list has 10")
print("***************************************************************")
dupList = LinkedList()
print(" removeDuplicates from an empty list shouldn't fail")
newList = dupList.removeDuplicates()
print(" printing what should still be an empty list:")
print(newList)
dupList.addLast("giraffe")
dupList.addLast("wolf")
dupList.addLast("cat")
dupList.addLast("elephant")
dupList.addLast("bacon")
dupList.addLast("fox")
dupList.addLast("elephant")
dupList.addLast("wolf")
dupList.addLast("lynx")
dupList.addLast("elephant")
dupList.addLast("dog")
dupList.addLast("hippo")
dupList.addLast("aardvark")
dupList.addLast("bacon")
print(" original list:")
print(dupList)
print(" without duplicates:")
newList = dupList.removeDuplicates()
print(newList)
main()
| [
"joelwng28@gmail.com"
] | joelwng28@gmail.com |
bbb13bf92b6baacc4f9f751db6d4f343a05931e6 | 20ecd59bc638821cb449e1a5a904ec929b1689ce | /home/migrations/0019_homepage_news_page.py | 77f415f0ecefcb0b55ee4d195042737ef8328ae2 | [] | no_license | yemilab/cms | 826d015bc6d71d29c082511fc364cfb763413577 | 9bf934fddfc59c1d1d2e74a34e2db3b650cb0ed1 | refs/heads/master | 2020-12-28T16:03:23.541114 | 2020-02-08T03:48:27 | 2020-02-08T03:48:27 | 238,397,882 | 0 | 0 | null | 2020-02-05T08:06:42 | 2020-02-05T08:03:53 | Python | UTF-8 | Python | false | false | 597 | py | # Generated by Django 2.2.7 on 2019-12-10 08:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0041_group_collection_permissions_verbose_name_plural'),
('home', '0018_auto_20191210_0329'),
]
operations = [
migrations.AddField(
model_name='homepage',
name='news_page',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.Page'),
),
]
| [
"code@hyounggyu.com"
] | code@hyounggyu.com |
cef06aa93427891f9e1de15f76de7e4aa063276f | 48ba8d0788e4ac7d4cacd7e7a2e2cf4f391c85ad | /Apple/rectangle_overlap.py | 2fe9ccc6190a3b2c9e19a0c9399b0cd7700fb388 | [] | no_license | rahulvshinde/Python_Playground | c28ac2dc0865e254caa5360c3bb97b4ff5f23b3a | 7a03b765dd440654caba1e06af5b149f584e9f08 | refs/heads/master | 2023-04-19T17:25:55.993837 | 2021-05-17T01:15:30 | 2021-05-17T01:15:30 | 280,736,898 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 886 | py | """
A rectangle is represented as a list [x1, y1, x2, y2], where (x1, y1) are the coordinates of its bottom-left corner,
and (x2, y2) are the coordinates of its top-right corner.
Two rectangles overlap if the area of their intersection is positive. To be clear, two rectangles that only touch at
the corner or edges do not overlap.
Given two (axis-aligned) rectangles, return whether they overlap.
Example 1:
Input: rec1 = [0,0,2,2], rec2 = [1,1,3,3]
Output: true
Example 2:
Input: rec1 = [0,0,1,1], rec2 = [1,0,2,1]
Output: false
Notes:
Both rectangles rec1 and rec2 are lists of 4 integers.
All coordinates in rectangles will be between -10^9 and 10^9.
"""
# rec1 = [0,0,2,2]
# rec2 = [1,1,3,3]
rec1 = [0,0,1,1]
rec2 = [1,0,2,1]
def rectOverlap(rec1, rec2):
return rec1[0]<rec2[2] and rec2[0] <rec1[2] and rec1[1]< rec2[3] and rec2[1]<rec1[3]
print(rectOverlap(rec1,rec2)) | [
"r.shinde2007@gmail.com"
] | r.shinde2007@gmail.com |
c374c83beb40c24258ac5cf51147a8415fe65365 | 436ecd15d0932e00cb246059ea7decc1d2ab4c05 | /instagram/settings.py | 6590d0cd7f2796a08ac416a2007b2545e3b7f8c8 | [
"MIT"
] | permissive | priscillapepe/Instagram | 28b3e704bf4d0ed8d87db3c42b82fbcbc45f2d84 | db76ddbecab72cb6ff3ba9b8e9a594ce8e89a22f | refs/heads/main | 2023-01-02T16:25:25.630709 | 2020-10-22T08:27:49 | 2020-10-22T08:27:49 | 304,650,953 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,080 | py | """
Django settings for instagram project.
Generated by 'django-admin startproject' using Django 3.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
import django_heroku
import dj_database_url
from decouple import config,Csv
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l)+@$gpv+n&hqg(ef6_19(wb^_546_!*a)rx&wickyw)w$gy^^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'gram.apps.GramConfig',
'users.apps.UsersConfig',
'crispy_forms',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'instagram.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'instagram.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME'),
'USER': config('DB_USER'),
'PASSWORD': config('DB_PASSWORD'),
'HOST': config('DB_HOST'),
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# configuring the location for media
MEDIA_ROOT = os.path.join(BASE_DIR,'media')
MEDIA_URL = '/media/'
# Configure Django App for Heroku.
django_heroku.settings(locals())
CRISPY_TEMPLATE_PACK = 'bootstrap4'
LOGIN_REDIRECT_URL = 'gram-home'
LOGIN_URL = 'login'
| [
"priscillaungai99@gmail.com"
] | priscillaungai99@gmail.com |
8bbb7896a9faa5e12fd9ed8815e374e5c0f9b90b | 61afe17201589a61c39429602ca11e3fdacf47a9 | /Chapter3/Day19/12.异常细分(了解).py | 53a37128647faf14441789776924fc9aa2b738f8 | [] | no_license | Liunrestrained/Python- | ec09315c50b395497dd9b0f83219fef6355e9b21 | 6b2cb4ae74c59820c6eabc4b0e98961ef3b941b2 | refs/heads/main | 2023-07-17T14:16:12.084304 | 2021-08-28T14:05:12 | 2021-08-28T14:05:12 | 399,408,426 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 734 | py | import requests
from requests import exceptions
while True:
url = input("下载链接")
try:
res = requests.get(url=url)
print(res)
except exceptions.MissingSchema as e: # 细分处理
print("URL架构不存在")
except exceptions.InvalidSchema as e: # 细分处理
print("URL架构错误")
except exceptions.InvalidURL as e: # 细分处理
print("URL地址格式错误")
except exceptions.ConnectionError as e: # 细分处理
print("网络连接出错")
except Exception as e: # 模糊处理
print("代码出现错误", e)
# # 提示:如果想要写的简单一点,其实只写一个Exception捕获错误就可以了。
| [
"noreply@github.com"
] | noreply@github.com |
bf41a6561529463a82de458609b7e41c9d5ac960 | d4044bd9c5cf70c491df3b1da5bd2d16063717bf | /exer3_test6.py | e3e4d68822f03784db862754fd174607125e8e1b | [
"Apache-2.0"
] | permissive | ramyacr97/RamyaPython | 6f6c164bd50d636bed1d71896b7a41d22a0bc71c | 27ae8c88ec1459372d422fa5f97baa415898f875 | refs/heads/master | 2022-12-11T11:40:11.010629 | 2020-04-01T06:40:08 | 2020-04-01T06:40:08 | 211,785,306 | 0 | 0 | Apache-2.0 | 2022-12-08T06:46:30 | 2019-09-30T05:54:07 | Python | UTF-8 | Python | false | false | 1,007 | py | from netmiko import ConnectHandler
from getpass import getpass
from ciscoconfparse import CiscoConfParse
import re
device = {
'host': 'cisco4.lasthop.io',
'username': 'pyclass',
'password': getpass(),
'device_type': 'cisco_ios',
'session_log': 'cisco4.txt',
}
net_connect = ConnectHandler(**device)
show_run = net_connect.send_command("show run")
net_connect.disconnect()
cisco_obj = CiscoConfParse("cisco4.txt")
intf = cisco_obj.find_objects_w_child(parentspec=r"^interface",childspec = r"^\s+ip address") #finding object with ip address
for int,addres in enumerate(intf[:]):
parent = intf[int]
children = parent.children
print ("Interface Line: {}".format(parent.text))
for j in range(0,2):
match = parent.re_search_children(r"^\s+ip address")
#print("Interface Line: {}".format(parent.text))
if 'ip address' in children[j].text:
print("IP Address Line:",children[j].text)
else:
break
| [
"ramya.cr@gmail.com"
] | ramya.cr@gmail.com |
7e52d564b5e7a41caaade892dc1faf27b4438ade | 04709c69cc7ed2727446dbf0aedb9c076e6ce477 | /fridge/models.py | 5a79db5dc27a48fd96227ac8b67afa700f7e1889 | [] | no_license | BrianNgeno/smart-fridge | 95847bc50e5dc1374059df875825f688db5ac214 | 42c07cd03eb62fe5fa76942039cfd077501cea97 | refs/heads/master | 2022-12-09T22:17:08.260040 | 2018-11-02T14:29:40 | 2018-11-02T14:29:40 | 155,748,519 | 0 | 0 | null | 2022-11-22T03:05:25 | 2018-11-01T17:05:25 | Python | UTF-8 | Python | false | false | 2,224 | py | from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
import datetime as dt
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
class Profile(models.Model):
Profile_photo = models.ImageField(upload_to = 'images/',blank=True)
Bio = models.TextField(max_length = 50)
user = models.OneToOneField(User,on_delete=models.CASCADE, primary_key=True)
grocery = models.ForeignKey('Grocery',related_name='grocery',null=True)
def save_profile(self):
self.save()
@classmethod
def get_by_id(cls, id):
details = Profile.objects.get(user = id)
return details
@classmethod
def filter_by_id(cls, id):
details = Profile.objects.filter(user = id).first()
return details
@classmethod
def search_user(cls, name):
userprof = Profile.objects.filter(user__username__icontains = name)
return userprof
# Create your models here.
class Grocery(models.Model):
name = models.CharField(max_length = 50 )
image = models.ImageField(upload_to='product/vegetables' , default='')
pub_date = models.DateTimeField(auto_now_add=True, null=True)
price = models.CharField(max_length = 50 )
# class Order(models.Model):
# name = models.CharField(max_length = 50 )
# price = models.CharField(max_length = 50 )
# is_ordered = models.BooleanField(default=False)
# order_date = models.DateTimeField(auto_now_add=True, null=True)
class Cart(models.Model):
user = models.ForeignKey(User,related_name='cart')
item = models.ForeignKey(Grocery,related_name='cart')
order_date = models.DateTimeField(auto_now_add=True, null=True)
paid = models.CharField(default='False',max_length=20)
def __str__(self):
return self.paid
class Meta:
ordering = ['-id']
def save_item(self):
self.save()
def delete_item(self):
self.delete()
| [
"bkn.ngeno@gmail.com"
] | bkn.ngeno@gmail.com |
76e05dcf4c7eff7600fcea99a5f4b07a8743617f | 9fd41c49b8c48ed9f033b886139578a2d642a0f6 | /info/serializers/comment.py | 8361b65b4ab245fb7897fea6b89ad7b1a75d7243 | [
"MIT"
] | permissive | wojciezki/movie_info | 98ff42bed9f37171ecd1381c5012e5d14aeaa698 | 88f089e8eaa5310cf5b03f7aae4f6c9b871282f2 | refs/heads/master | 2022-12-10T04:54:18.975789 | 2019-02-25T21:45:42 | 2019-02-25T21:45:42 | 172,560,020 | 0 | 0 | null | 2022-12-08T01:38:27 | 2019-02-25T18:19:56 | Python | UTF-8 | Python | false | false | 195 | py | from rest_framework import serializers
from ..models import Comment
class CommentBaseSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = '__all__' | [
"wojciech.jakubiak@whiteaster.com"
] | wojciech.jakubiak@whiteaster.com |
6a73e779d65fe3655e1e2f4fd86f82fe6aa2a5cc | bab423622c464b754a5f92d666637eed12f1db87 | /Lib/site-packages/PIL/BmpImagePlugin.py | 4a072a596c9a8073dc36c8ea8f4c5267287569d0 | [] | no_license | Eicom/Eicom | a3956c1fa33846da31c9c43a3b90add05fc7e8d1 | 4f5a0a3eaa1eec15e9dec22fd6aa9057fd3fc89e | refs/heads/master | 2021-01-21T15:43:19.661859 | 2017-11-27T22:26:54 | 2017-11-27T22:26:54 | 91,852,178 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,471 | py | #
# The Python Imaging Library.
# $Id$
#
# BMP file handler
#
# Windows (and OS/2) native bitmap storage format.
#
# history:
# 1995-09-01 fl Created
# 1996-04-30 fl Added save
# 1997-08-27 fl Fixed save of 1-bit images
# 1998-03-06 fl Load P images as L where possible
# 1998-07-03 fl Load P images as 1 where possible
# 1998-12-29 fl Handle small palettes
# 2002-12-30 fl Fixed load of 1-bit palette images
# 2003-04-21 fl Fixed load of 1-bit monochrome images
# 2003-04-23 fl Added limited support for BI_BITFIELDS compression
#
# Copyright (c) 1997-2003 by Secret Labs AB
# Copyright (c) 1995-2003 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
from . import Image, ImageFile, ImagePalette
from ._binary import i8, i16le as i16, i32le as i32, \
o8, o16le as o16, o32le as o32
import math
__version__ = "0.7"
#
# --------------------------------------------------------------------
# Read BMP file
BIT2MODE = {
# bits => mode, rawmode
1: ("P", "P;1"),
4: ("P", "P;4"),
8: ("P", "P"),
16: ("RGB", "BGR;15"),
24: ("RGB", "BGR"),
32: ("RGB", "BGRX"),
}
def _accept(prefix):
return prefix[:2] == b"BM"
# ==============================================================================
# Image plugin for the Windows BMP format.
# ==============================================================================
class BmpImageFile(ImageFile.ImageFile):
""" Image plugin for the Windows Bitmap format (BMP) """
# -------------------------------------------------------------- Description
format_description = "Windows Bitmap"
format = "BMP"
# --------------------------------------------------- BMP Compression values
COMPRESSIONS = {'RAW': 0, 'RLE8': 1, 'RLE4': 2, 'BITFIELDS': 3, 'JPEG': 4, 'PNG': 5}
RAW, RLE8, RLE4, BITFIELDS, JPEG, PNG = 0, 1, 2, 3, 4, 5
def _bitmap(self, header=0, offset=0):
""" Read relevant info about the BMP """
read, seek = self.fp.read, self.fp.seek
if header:
seek(header)
file_info = {}
file_info['header_size'] = i32(read(4)) # read bmp header size @offset 14 (this is part of the header size)
file_info['direction'] = -1
# --------------------- If requested, read header at a specific position
header_data = ImageFile._safe_read(self.fp, file_info['header_size'] - 4) # read the rest of the bmp header, without its size
# --------------------------------------------------- IBM OS/2 Bitmap v1
# ------ This format has different offsets because of width/height types
if file_info['header_size'] == 12:
file_info['width'] = i16(header_data[0:2])
file_info['height'] = i16(header_data[2:4])
file_info['planes'] = i16(header_data[4:6])
file_info['bits'] = i16(header_data[6:8])
file_info['compression'] = self.RAW
file_info['palette_padding'] = 3
# ---------------------------------------------- Windows Bitmap v2 to v5
elif file_info['header_size'] in (40, 64, 108, 124): # v3, OS/2 v2, v4, v5
if file_info['header_size'] >= 40: # v3 and OS/2
file_info['y_flip'] = i8(header_data[7]) == 0xff
file_info['direction'] = 1 if file_info['y_flip'] else -1
file_info['width'] = i32(header_data[0:4])
file_info['height'] = i32(header_data[4:8]) if not file_info['y_flip'] else 2**32 - i32(header_data[4:8])
file_info['planes'] = i16(header_data[8:10])
file_info['bits'] = i16(header_data[10:12])
file_info['compression'] = i32(header_data[12:16])
file_info['data_size'] = i32(header_data[16:20]) # byte size of pixel data
file_info['pixels_per_meter'] = (i32(header_data[20:24]), i32(header_data[24:28]))
file_info['colors'] = i32(header_data[28:32])
file_info['palette_padding'] = 4
self.info["dpi"] = tuple(
map(lambda x: int(math.ceil(x / 39.3701)),
file_info['pixels_per_meter']))
if file_info['compression'] == self.BITFIELDS:
if len(header_data) >= 52:
for idx, mask in enumerate(['r_mask', 'g_mask', 'b_mask', 'a_mask']):
file_info[mask] = i32(header_data[36+idx*4:40+idx*4])
else:
# 40 byte headers only have the three components in the bitfields masks,
# ref: https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx
# See also https://github.com/python-pillow/Pillow/issues/1293
# There is a 4th component in the RGBQuad, in the alpha location, but it
# is listed as a reserved component, and it is not generally an alpha channel
file_info['a_mask'] = 0x0
for mask in ['r_mask', 'g_mask', 'b_mask']:
file_info[mask] = i32(read(4))
file_info['rgb_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'])
file_info['rgba_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'], file_info['a_mask'])
else:
raise IOError("Unsupported BMP header type (%d)" % file_info['header_size'])
# ------------------ Special case : header is reported 40, which
# ---------------------- is shorter than real size for bpp >= 16
self.size = file_info['width'], file_info['height']
# -------- If color count was not found in the header, compute from bits
file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits'])
# -------------------------------- Check abnormal values for DOS attacks
if file_info['width'] * file_info['height'] > 2**31:
raise IOError("Unsupported BMP Size: (%dx%d)" % self.size)
# ----------------------- Check bit depth for unusual unsupported values
self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None))
if self.mode is None:
raise IOError("Unsupported BMP pixel depth (%d)" % file_info['bits'])
# ----------------- Process BMP with Bitfields compression (not palette)
if file_info['compression'] == self.BITFIELDS:
SUPPORTED = {
32: [(0xff0000, 0xff00, 0xff, 0x0), (0xff0000, 0xff00, 0xff, 0xff000000), (0x0, 0x0, 0x0, 0x0), (0xff000000, 0xff0000, 0xff00, 0x0) ],
24: [(0xff0000, 0xff00, 0xff)],
16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)]
}
MASK_MODES = {
(32, (0xff0000, 0xff00, 0xff, 0x0)): "BGRX",
(32, (0xff000000, 0xff0000, 0xff00, 0x0)): "XBGR",
(32, (0xff0000, 0xff00, 0xff, 0xff000000)): "BGRA",
(32, (0x0, 0x0, 0x0, 0x0)): "BGRA",
(24, (0xff0000, 0xff00, 0xff)): "BGR",
(16, (0xf800, 0x7e0, 0x1f)): "BGR;16",
(16, (0x7c00, 0x3e0, 0x1f)): "BGR;15"
}
if file_info['bits'] in SUPPORTED:
if file_info['bits'] == 32 and file_info['rgba_mask'] in SUPPORTED[file_info['bits']]:
raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])]
self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode
elif file_info['bits'] in (24, 16) and file_info['rgb_mask'] in SUPPORTED[file_info['bits']]:
raw_mode = MASK_MODES[(file_info['bits'], file_info['rgb_mask'])]
else:
raise IOError("Unsupported BMP bitfields layout")
else:
raise IOError("Unsupported BMP bitfields layout")
elif file_info['compression'] == self.RAW:
if file_info['bits'] == 32 and header == 22: # 32-bit .cur offset
raw_mode, self.mode = "BGRA", "RGBA"
else:
raise IOError("Unsupported BMP compression (%d)" % file_info['compression'])
# ---------------- Once the header is processed, process the palette/LUT
if self.mode == "P": # Paletted for 1, 4 and 8 bit images
# ----------------------------------------------------- 1-bit images
if not (0 < file_info['colors'] <= 65536):
raise IOError("Unsupported BMP Palette size (%d)" % file_info['colors'])
else:
padding = file_info['palette_padding']
palette = read(padding * file_info['colors'])
greyscale = True
indices = (0, 255) if file_info['colors'] == 2 else list(range(file_info['colors']))
# ------------------ Check if greyscale and ignore palette if so
for ind, val in enumerate(indices):
rgb = palette[ind*padding:ind*padding + 3]
if rgb != o8(val) * 3:
greyscale = False
# -------- If all colors are grey, white or black, ditch palette
if greyscale:
self.mode = "1" if file_info['colors'] == 2 else "L"
raw_mode = self.mode
else:
self.mode = "P"
self.palette = ImagePalette.raw("BGRX" if padding == 4 else "BGR", palette)
# ----------------------------- Finally set the tile data for the plugin
self.info['compression'] = file_info['compression']
self.tile = [('raw', (0, 0, file_info['width'], file_info['height']), offset or self.fp.tell(),
(raw_mode, ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), file_info['direction'])
)]
def _open(self):
""" Open file, check magic number and read header """
# read 14 bytes: magic number, filesize, reserved, header final offset
head_data = self.fp.read(14)
# choke if the file does not have the required magic bytes
if head_data[0:2] != b"BM":
raise SyntaxError("Not a BMP file")
# read the start position of the BMP image data (u32)
offset = i32(head_data[10:14])
# load bitmap information (offset=raster info)
self._bitmap(offset=offset)
# ==============================================================================
# Image plugin for the DIB format (BMP alias)
# ==============================================================================
class DibImageFile(BmpImageFile):
format = "DIB"
format_description = "Windows Bitmap"
def _open(self):
self._bitmap()
#
# --------------------------------------------------------------------
# Write BMP file
SAVE = {
"1": ("1", 1, 2),
"L": ("L", 8, 256),
"P": ("P", 8, 256),
"RGB": ("BGR", 24, 0),
"RGBA": ("BGRA", 32, 0),
}
def _save(im, fp, filename, check=0):
try:
rawmode, bits, colors = SAVE[im.mode]
except KeyError:
raise IOError("cannot write mode %s as BMP" % im.mode)
if check:
return check
info = im.encoderinfo
dpi = info.get("dpi", (96, 96))
# 1 meter == 39.3701 inches
ppm = tuple(map(lambda x: int(x * 39.3701), dpi))
stride = ((im.size[0]*bits+7)//8+3) & (~3)
header = 40 # or 64 for OS/2 version 2
offset = 14 + header + colors * 4
image = stride * im.size[1]
# bitmap header
fp.write(b"BM" + # file type (magic)
o32(offset+image) + # file size
o32(0) + # reserved
o32(offset)) # image data offset
# bitmap info header
fp.write(o32(header) + # info header size
o32(im.size[0]) + # width
o32(im.size[1]) + # height
o16(1) + # planes
o16(bits) + # depth
o32(0) + # compression (0=uncompressed)
o32(image) + # size of bitmap
o32(ppm[0]) + o32(ppm[1]) + # resolution
o32(colors) + # colors used
o32(colors)) # colors important
fp.write(b"\0" * (header - 40)) # padding (for OS/2 format)
if im.mode == "1":
for i in (0, 255):
fp.write(o8(i) * 4)
elif im.mode == "L":
for i in range(256):
fp.write(o8(i) * 4)
elif im.mode == "P":
fp.write(im.im.getpalette("RGB", "BGRX"))
ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0,
(rawmode, stride, -1))])
#
# --------------------------------------------------------------------
# Registry
Image.register_open(BmpImageFile.format, BmpImageFile, _accept)
Image.register_save(BmpImageFile.format, _save)
Image.register_extension(BmpImageFile.format, ".bmp")
Image.register_mime(BmpImageFile.format, "image/bmp")
| [
"apocalips_war@yahoo.com"
] | apocalips_war@yahoo.com |
e9341d437778c2487fba00573c3a9bfe7235a23b | a3c3d0624f94ba27dfe7fb8168681aec3ffc330e | /MobileNet/MobileNet.py | 6ae9c9787cbfdaccecd6db7d582b570083bc0369 | [] | no_license | peterbengkui/DeepLearningFromScratch | 69615047284c812dc5c56e4026afd141bf18cf2f | dab3010a20dd78668bc2ce57b12efa4678cee9be | refs/heads/master | 2021-07-15T02:57:29.049443 | 2017-10-22T15:36:06 | 2017-10-22T15:36:06 | 107,921,633 | 1 | 0 | null | 2017-10-23T02:17:42 | 2017-10-23T02:17:42 | null | UTF-8 | Python | false | false | 3,169 | py | class DepthwiseSeparableConv2d(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, padding=0):
super(DepthwiseSeparableConv2d, self).__init__()
self.layer = nn.Sequential(
nn.Conv2d(in_channels=in_channels, out_channels=in_channels, kernel_size=kernel_size, stride=stride, padding=padding, groups=in_channels),
nn.BatchNorm2d(in_channels),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=1, padding=0),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True)
)
def forward(self, x):
x = self.layer(x)
return(x)
class MobileNet(nn.Module):
def __init__(self, num_classes, alpha=1.0):
super(MobileNet, self).__init__()
self.conv0 = nn.Sequential(
nn.Conv2d(in_channels=3, out_channels=int(alpha * 32), kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(int(alpha * 32)),
nn.ReLU(inplace=True)
)
self.entry = nn.Sequential(
DepthwiseSeparableConv2d(in_channels=int(alpha * 32), out_channels=int(alpha * 64), stride=1, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 64), out_channels=int(alpha * 128), stride=2, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 128), out_channels=int(alpha * 128), stride=1, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 128), out_channels=int(alpha * 256), stride=2, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 256), out_channels=int(alpha * 256), stride=1, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 256), out_channels=int(alpha * 512), stride=2, padding=1)
)
self.middle = nn.Sequential(
DepthwiseSeparableConv2d(in_channels=int(alpha * 512), out_channels=int(alpha * 512), stride=1, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 512), out_channels=int(alpha * 512), stride=1, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 512), out_channels=int(alpha * 512), stride=1, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 512), out_channels=int(alpha * 512), stride=1, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 512), out_channels=int(alpha * 512), stride=1, padding=1)
)
self.exit = nn.Sequential(
DepthwiseSeparableConv2d(in_channels=int(alpha * 512), out_channels=int(alpha * 1024), stride=2, padding=1),
DepthwiseSeparableConv2d(in_channels=int(alpha * 1024), out_channels=int(alpha * 1024), stride=1, padding=1)
)
self.avgpool = nn.AvgPool2d(kernel_size=7, stride=1)
self.classifier = nn.Linear(in_features=int(alpha * 1024), out_features=num_classes)
def forward(self, x):
x = self.conv0(x)
x = self.entry(x)
x = self.middle(x)
x = self.exit(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return(x)
| [
"noreply@github.com"
] | noreply@github.com |
93735abfef6073ed07f5abfadc3ee58c18166d59 | f79546db4cf8c0debc2119df013e4b8109cc6b5b | /exercicios/secao4/Ex23.py | 0be35d368b1bae60fb4bbc4758515ccae780743f | [] | no_license | bruninhaout/Python-course-udemy | 1dc77b5acdcd2a35bb0b5edcbc55f7d95a00eba6 | 4a4ff1b4c468991f16beb3e90e15b5a45f33b251 | refs/heads/main | 2023-07-31T22:06:26.624585 | 2021-09-17T00:36:48 | 2021-09-17T00:36:48 | 370,856,126 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | # metros em jardas
M = float(input('Valor em metros = '))
J = M/0.91
print(f'Valor em jardas = {J}')
| [
"noreply@github.com"
] | noreply@github.com |
ce4cf970a5df2967fa7d271221b787cf5f515add | ee517f04b0d6c44df5fb479066317cf2b088884d | /problems/Python problems, Computer Society, University of Oxford/session3/exercise9.py | 34ed2609950575a35fdd99d72b5c03e624ca3447 | [
"MIT"
] | permissive | antoni-wojcik/example-problems | bb4e147116c45986209cc8503e8172ffde057628 | c73cd4e076d965f1aee62b346e7e3df9637c0d2c | refs/heads/master | 2021-02-07T19:40:21.237091 | 2020-03-01T01:56:50 | 2020-03-01T01:56:50 | 244,069,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | def filtered_text(xs):
# Return a copy of xs that only contains the strings that start with a
# lowercase letter
out = []
for i in xs:
if i[0] >= "a":
out.append(i)
return out
def test(test_case, expected):
actual = filtered_text(test_case)
if actual == expected:
print("Passed test for " + str(test_case))
else:
print("Didn't pass test for " + str(test_case))
print("The result was " + str(actual) + " but it should have been " + str(expected))
test([], [])
test(["Learn", "to", "Code"], ["to"])
test(["Oxford", "University", "Computer", "Society"], [])
test(["learn", "to", "code"], ["learn", "to", "code"])
| [
"antekwojcik2@gmail.com"
] | antekwojcik2@gmail.com |
e541861b7ebd4366fcc5d819594a0a6549018ca1 | c74cf0ea073dee424d19ee9fc765af94d80244cb | /reading_from_file/birthday_contain_in_graham.py | e8adfb41220cbeb1801f32580c9ed268021b561b | [] | no_license | sas0112/chapter_10 | 6126b65e3d0ba6fc4cbdb3c86b64f95f965af29f | 48cd9670b72bc94a37bf8e26f18f07b084d9b605 | refs/heads/master | 2020-06-30T13:17:53.595227 | 2019-08-06T15:10:20 | 2019-08-06T15:10:20 | 200,837,619 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | file_name = "one_million_number"
with open(file_name) as file_object:
lines = file_object.readlines()
graham = ""
for line in lines:
graham += line
birthday = 981107
birthday = str(birthday)
if birthday in graham:
print("Your birthday appears in the first 250 thousand digits of graham's number")
else:
print("Your birthday seems not in the first 250 thousand digits of graham's number") | [
"dell3000@126.com"
] | dell3000@126.com |
3f8bb489a918c6a98a686eae062933c73f459cee | 5c9511bc538045389e28183b0bc65d9b6cf51b85 | /12/tvar.py | 72b9846ddf9bc7bdfe7bcd23a69b99970c108947 | [] | no_license | balonovatereza/Pyladies-repository | 0174360835dd1094532118eda1e2821da8108f77 | d4ad0dae829d21a56e6fb86d7a7dcfdc9387ae27 | refs/heads/master | 2020-05-01T09:25:25.300245 | 2019-05-18T09:10:50 | 2019-05-18T09:10:50 | 177,399,871 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,568 | py | from math import pi
from itertools import combinations
class Tvar:
def obvod(self): # nadefinovano pro pripad napr. primky
return
def obsah(self): # nadefinovano pro pripad napr. primky
return
def rozdil_obsahu(self, jiny_tvar):
return abs(self.obsah() - jiny_tvar.obsah())
class Primka(Tvar):
def __init__(self, delka):
self.delka = delka
class Ctverec(Tvar):
def __init__(self, strana):
self.strana = strana
def obvod(self):
return 4 * self.strana
def obsah(self):
return self.strana ** 2
class Kruh(Tvar):
def __init__(self, polomer):
self.polomer = polomer
def obvod(self):
return 2 * pi * self.polomer
def obsah(self):
return pi * self.polomer ** 2
class Obdelnik(Tvar):
def __init__(self, strana_a, strana_b):
self.strana_a = strana_a
self.strana_b = strana_b
def obvod(self):
return 2 * self.strana_a + 2 * self.strana_b
def obsah(self):
return self.strana_a * self.strana_b
p1 = Primka(4)
print('Obvod primky je:', p1.obvod())
seznam_tvaru = [Ctverec(2), Ctverec(4), Kruh(2), Kruh(4), Obdelnik(2, 4), Obdelnik(4, 6)]
seznam_kombinaci = list(combinations(seznam_tvaru, 2))
# for tvar in seznam_tvaru:
# for tvar_odcitany in seznam_tvaru:
# print(tvar.rozdil_obsahu(tvar_odcitany))
for cislo, kombinace_tvaru in enumerate(seznam_kombinaci, 1):
print('Rozdil obsahu {}. kombinace'.format(cislo))
print(kombinace_tvaru[0].rozdil_obsahu(kombinace_tvaru[1]))
| [
"balonova.tereza@seznam.cz"
] | balonova.tereza@seznam.cz |
34d051133a7e6374db933c13826c816b874cbd0e | 5c66b5f790aede1e32280469b412e4b12bbcb77e | /sdk/python/tests/test_mysql_connect.py | 26bf3371b6c561d3482868145ae919c97a2311dd | [
"Apache-2.0"
] | permissive | Jocix123/approzium | dec36aec8c2e95f08ef3f8a7d78d4e8a4b737ff3 | 76415d2f31323dc4821238a34b9380fc68240cca | refs/heads/main | 2022-11-18T18:20:20.026327 | 2020-07-13T23:28:39 | 2020-07-13T23:28:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 897 | py | from os import environ
import pytest
import approzium
from approzium.mysql.connector import connect
from approzium.mysql.connector.pooling import MySQLConnectionPool
# use Psycopg2 defined test environment variables
connopts = {
"user": environ["PSYCOPG2_TESTDB_USER"],
"host": "dbmysqlsha1",
"use_pure": True,
}
@pytest.mark.parametrize("auth", pytest.authclients)
def test_connect(auth):
conn = connect(**connopts, authenticator=auth)
cur = conn.cursor()
cur.execute("SELECT 1")
result = next(cur)
assert result == (1,)
@pytest.mark.parametrize("auth", pytest.authclients)
def test_pooling(auth):
approzium.default_auth_client = auth
cnxpool = MySQLConnectionPool(pool_name="testpool", pool_size=3, **connopts)
conn = cnxpool.get_connection()
cur = conn.cursor()
cur.execute("SELECT 1")
result = next(cur)
assert result == (1,)
| [
"noreply@github.com"
] | noreply@github.com |
90b168dab4d543118fa9b45f75282474201a75ef | aa0d50b90770c739e783c4e083ae256d6a3a2205 | /crawler/tryon_1/items.py | 1ec03d29a53c0ceabf3e0d7d6a798c2e0b861c1a | [
"Apache-2.0"
] | permissive | merlintang/crawler-images | 5565169c3d5649fee6162dd4f6b376189137c74a | afa8ad94416e7c3b73a9b5ae57779974f8b2793e | refs/heads/master | 2020-03-22T12:38:07.332135 | 2018-07-07T04:20:56 | 2018-07-07T04:20:56 | 140,052,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 534 | py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class TryonItem(scrapy.Item):
# define the fields for your item here like:
#clothes name
id = scrapy.Field()
name = scrapy.Field()
designer = scrapy.Field()
imageUrl = scrapy.Field()
imageUrlOthers = scrapy.Field()
details = scrapy.Field()
url = scrapy.Field()
price = scrapy.Field()
category = scrapy.Field()
| [
"mtang@hortonworks.com"
] | mtang@hortonworks.com |
cd3078e7706fa40b6bd01da9561abea9522fbb81 | bdd0ed5177f4fea947c44f7195e82561a31e3c29 | /setup.py | d86ec2b445ff8f31f17b028d5c6d998b56f7592a | [] | no_license | SerenaFeng/grafana-testapi | 0bd0eb1f3106efbae2ec417d9d9b9031ec36b3dc | 04a718f37e63b506f45802d62b41b190eb69d12d | refs/heads/master | 2020-03-18T14:00:41.341123 | 2018-07-04T02:57:02 | 2018-07-04T02:57:02 | 134,824,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | # -*- coding: utf-8 -*-
import setuptools
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
# | [
"feng.xiaowei@zte.com.cn"
] | feng.xiaowei@zte.com.cn |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.