blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
800a7638b90f8e7805a7a765fcec4663a227b318 | 39e0db79daff7708cd73bf710c7958d621f4d758 | /toolroom/wsgi.py~ | 282dd64ceae3a0d862a4dcfa842741e55d4a4c73 | [] | no_license | Ahshida/onlinecatalogue | 7416c5635fb15c257c8a9a625a728bdfaa69703b | 52950b351397ff19c6c62371f17be9fe10489e30 | refs/heads/master | 2021-06-15T19:49:18.630387 | 2016-10-24T08:07:45 | 2016-10-24T08:07:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | """
WSGI config for toolroom project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "toolroom.settings")
application = Cling(get_wsgi_application())
| [
"noreply@github.com"
] | noreply@github.com | |
a1b3558b03ae177a9ec695640ddab9481f1cfb65 | 093b9569be9d1c4e5daf92efbebc38f680917b2d | /.history/base/views_20210829090123.py | bfec5c7dacaf07d85a118c58236ec494edd47b23 | [] | no_license | Justin-Panagos/todoList | 95b1e97ff71af1b0be58e7f8937d726a687cea4d | 10539219b59fcea00f8b19a406db3d4c3f4d289e | refs/heads/master | 2023-08-04T13:27:13.309769 | 2021-08-29T14:06:43 | 2021-08-29T14:06:43 | 400,827,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,107 | py | from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView, FormView
from django.urls import reverse_lazy
from django.contrib.auth.views import LoginView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import login
from .models import Task
class CustoomLoginView(LoginView):
template_name = 'base/login.html'
fields = '__all__'
redirect_authenticated_user = True
def get_success_url(self):
return reverse_lazy('tasks')
class RegisterPage(FormView):
template_name = 'base/register.html'
form_class= UserCreationForm
redirect_authenticated_user = True
success_url = reverse_lazy('tasks')
def form_validate(self,form):
user= form.save()
if user is not None:
login()
class TaskList( LoginRequiredMixin, ListView):
model = Task
context_object_name = 'tasks'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['tasks'] = context['tasks'].filter(user=self.request.user)
context['count'] = context['tasks'].filter(complete=False).count()
return context
class TaskDetail(LoginRequiredMixin, DetailView):
model = Task
context_object_name = 'task'
template_name = 'base/task.html'
class TaskCreate(LoginRequiredMixin, CreateView):
model = Task
fields = ['title','description','complete']
success_url = reverse_lazy('tasks')
def form_valid(self, form):
form.instance.user = self.request.user
return super(TaskCreate, self).form_valid(form)
class TaskUpdate( LoginRequiredMixin, UpdateView):
model = Task
fields = ['title','description','complete']
success_url = reverse_lazy('tasks')
class TaskDelete(LoginRequiredMixin, DeleteView):
model = Task
context_object_name = 'task'
success_url = reverse_lazy('tasks') | [
"justpanagos@gmail.com"
] | justpanagos@gmail.com |
de6f081b660049b191f0e6ed0195cbdd809a17cb | 77d3bc1a0e829339190aba999729023028e7a8e7 | /api/views.py | 698301a04810148ccab8b07c0452d0b77b66695f | [] | no_license | piymis/team-member-api | 5f418241443aa7d8fdaef35bdfd4026ed199c4fd | d2292dcf498e55e6849dd6fd010cdacf62664820 | refs/heads/master | 2020-04-02T12:51:55.194362 | 2018-11-04T21:09:21 | 2018-11-04T21:09:21 | 154,455,467 | 1 | 0 | null | 2018-11-04T21:09:22 | 2018-10-24T07:12:12 | null | UTF-8 | Python | false | false | 404 | py | from rest_framework import generics
from api.models import TeamMember
from api.serializers import TeamMemberSerializer
class TeamMemberList(generics.ListCreateAPIView):
queryset = TeamMember.objects.all()
serializer_class = TeamMemberSerializer
class TeamMemberDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = TeamMember.objects.all()
serializer_class = TeamMemberSerializer
| [
"piyush.mishra@amadeus.com"
] | piyush.mishra@amadeus.com |
ee5c4b590e8ebe79e91bd36fe90998271da2f27e | 17661d859860ef5bf8d667aed95917c6aeb1ed27 | /keybindings/autokey/data/Mac/save.py | b4aa92d2423c5ccd6886afaf0068de68f948fe43 | [
"MIT"
] | permissive | guoyiteng/linux-for-macos-user | 92f491a69ce29feb980f1acd0f76bede2de79014 | 705baec9ddffb9ab73172cdc2b272ab123b1e402 | refs/heads/master | 2020-08-27T02:53:10.893074 | 2019-11-13T01:35:59 | 2019-11-13T01:35:59 | 217,224,786 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | store.set_global_value('hotkey', '<meta>+s')
engine.set_return_value('<ctrl>+s')
engine.run_script('combo') | [
"guoyiteng@gmail.com"
] | guoyiteng@gmail.com |
21a1af074033e23cadb65641d37da61de72e1f0f | 8ea03815a13ada885363725b5acb0fb5a295a139 | /tools/negative.py | 0db189e20564de422d8cdbc7c60a48acef5cbfe5 | [
"MIT"
] | permissive | Akshay-Kanawade/Image_Augmentation_Tool | c72faafe29ab5088dbc52be779c11cdf6368e887 | bc8fef1c8f554f866b7f1e15de6ffa286617bb7e | refs/heads/main | 2023-07-15T07:16:25.150980 | 2021-08-30T14:38:27 | 2021-08-30T14:38:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,827 | py | # import library
import cv2
import numpy as np
def Read(image_path):
"""
This function basically used to read the image.It read images in form of numpy array.
:param image_path: source image
:return: ndarray
"""
# use cv2.imread() to read an images.
# syntax : cv2.imread(filename, flag=None)
return cv2.imread(image_path)
def Negative(img):
"""
This function is used to get negative image from orignal image.
:param img: ndarray
:return: image
"""
S = 255
shape = img.shape
if shape[2] == "rgb":
B, G, R = cv2.split(img)
B[:] = [S - x for x in B] # inverting blue
G[:] = [S - x for x in G] # inverting green
R[:] = [S - x for x in R] # inverting red
# use cv2.merge() function to merge image
# syntax : cv2.merge(src)
return cv2.merge((B, G, R))
else:
# open in grayscale
return np.array([S - x for x in img])
def Show(orignal_img, negative_image):
"""
show the images
:param orignal_img: orignal input image
:param negative_image: inverted image
:return: nothing
"""
# use imshow() function to show the images
# syntax : cv2.imshow(winname, mat)
cv2.imshow("Original_Image", orignal_img)
cv2.imshow("Negative_Image", negative_image)
cv2.waitKey(0)
cv2.destroyAllWindows()
# main function
def main():
"""
This is main function used to call other function
:return: nothing
"""
image_path = input("Enter path of image:")
# call read function to read an image
img = Read(image_path)
# call negative function to invert image
negative_image = Negative(img)
# call show function to show original image and negative_image
Show(img, negative_image)
if __name__ == "__main__":
main()
| [
"akshay_kanawade"
] | akshay_kanawade |
a6b331701eae7b10f4e48c40159d3e176e77fb9a | 07e672a7674f5ec9463fc7a91f1d19a6d0c1e946 | /forlearning.py | aeca099ccb35d73affff5775aef9a8cdadc47ba8 | [] | no_license | Yensj/learn_python | 6dcdc208ecf6cb5869b54f83d4db023ae7e9cef7 | 942f01765f8e1d90e25fb96591c7c224540f5493 | refs/heads/master | 2020-05-22T00:32:31.981030 | 2017-03-18T13:27:26 | 2017-03-18T13:27:26 | 84,655,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | #...
n_numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
for score in n_numbers:
print(score + 1) | [
"Yensj@Artems-MacBook-Pro.local"
] | Yensj@Artems-MacBook-Pro.local |
f20d475928df69d09c6d252002153880c58a7d63 | 1573d69b877d83c92387631cecf74005289d299c | /modelo.py | 8ce734baf130ec1ee179f7c80d51363f0dc91ddc | [] | no_license | Yuricoelho/playlist-programa-de-tv | f73e1f7d9d6aef66ca874e6587f25a2cbc584392 | a7f6e059fb28c078447d49c61552b3cb41e7e195 | refs/heads/master | 2023-04-28T12:18:56.606236 | 2021-05-06T13:34:47 | 2021-05-06T13:34:47 | 364,390,712 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,997 | py | class Programa:
def __init__(self, nome, ano):
self._nome = nome.title()
self.ano = ano
self._likes = 0
@property
def likes(self):
return self._likes
def dar_like(self):
self._likes += 1
@property
def nome(self):
return self._nome
@nome.setter
def nome(self, novo_nome):
self._nome = novo_nome.title()
def __str__(self):
return f'{self._nome} - {self.ano} - {self._likes} Likes'
class Filme(Programa):
def __init__(self, nome, ano, duracao):
super().__init__(nome, ano)
self.duracao = duracao
def __str__(self):
return f'{self._nome} - {self.ano} - {self.duracao} min - {self._likes} Likes'
class Serie(Programa):
def __init__(self, nome, ano, temporadas):
super().__init__(nome, ano)
self.temporadas = temporadas
def __str__(self):
return f'{self._nome} - {self.ano} - {self.temporadas} temporadas - {self._likes} Likes'
class Playlist:
def __init__(self, nome, programas):
self.nome = nome
self._programas = programas
def __getitem__(self, item):
return self._programas[item]
@property
def listagem(self):
return self._programas
def __len__(self):
return len(self._programas)
vingadores = Filme("vingadores - guerra infinita", 2018, 160)
atlanta = Serie("atlanta", 2018, 2)
tmep = Filme('Todo mundo em pânico', 1999, 100)
demolidor = Serie('Demolidor', 2016, 2)
vingadores.dar_like()
tmep.dar_like()
tmep.dar_like()
tmep.dar_like()
tmep.dar_like()
demolidor.dar_like()
demolidor.dar_like()
atlanta.dar_like()
atlanta.dar_like()
atlanta.dar_like()
filmes_e_series = [vingadores, atlanta, tmep]
playlist_fim_de_semana = Playlist('fim de semana', filmes_e_series)
print(f'Tamanho do playlist: {len(playlist_fim_de_semana)}')
print(f'Tá ou não tá? {demolidor in playlist_fim_de_semana}')
for programa in playlist_fim_de_semana:
print(programa)
| [
"yuricoelho33@gmail.com"
] | yuricoelho33@gmail.com |
cd7dcf61848a7044370577c55d676665e78d5842 | c4df379f7d28410189907d07327a2894c5af2964 | /genetic.py | e56b185f98733c6f4521f5c353032040e60b6140 | [
"MIT"
] | permissive | zoso95/genetic-algorithm-fractals | c8dee664e77bdfaf91cd8b2e14b948c480bc92a5 | 881f3a87fc2c70583d8430bdb515cd73fae40fe3 | refs/heads/master | 2021-01-25T10:15:42.823125 | 2018-02-28T21:51:51 | 2018-02-28T21:51:51 | 123,345,693 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,945 | py | import numpy as np
from scipy.stats import hmean
import fractal
def crossover_mean(list_of_coef):
coefs = np.vstack(list_of_coef)
return np.mean(coefs, axis=0)
def crossover_hmean(list_of_coef):
coefs = np.vstack(list_of_coef)
return hmean(coefs, axis=0)
def crossover_med(list_of_coef):
coefs = np.vstack(list_of_coef)
return np.median(coefs, axis=0)
def crossover_sample(list_of_coef):
coefs = np.zeros(shape=list_of_coef[0].shape)
for i in range(len(coefs)):
ind = np.random.choice(np.arange(len(list_of_coef)), size=1)[0]
coefs[i] = list_of_coef[ind][i]
return coefs
def evolve(parents, options):
output = []
"""
Crossover winners
"""
crossover_function = options.get("crossover_function", crossover_mean)
num_crossovers = options.get("num_crossovers", len(parents)*(len(parents) - 1))
num_parents = options.get("num_parents", 2)
for _ in range(num_crossovers):
ind = [i for i in np.random.choice(np.arange(len(parents)), replace=False, size=min(num_parents, len(parents)))]
p = [parents[i] for i in ind]
output.append(crossover_function(p))
"""Add some diversity"""
for i in range(options.get("num_new", 5)):
output.append(fractal.get_random_coef())
"""Mutate the parents"""
mutation_rate = options.get("mutation_rate", 0.1)
for p in parents:
output.append(mutation(p, s=mutation_rate))
# Randomly select output
num_out = options.get("num_out", len(output))
num_out = min(num_out, len(output))
ind = np.arange(len(output))
rand_ind = np.random.choice(ind, replace=False, size=num_out)
final = list(np.array(output)[rand_ind])
return final
def mutation(coef, s=0.1):
# We don't want to turn on polynomials that weren't on initally
c = coef.copy()
ind = coef!=0
c[ind] = c[ind] + np.random.normal(scale=s, size=c[ind].shape)
return c
| [
"geoff.bradway@gmail.com"
] | geoff.bradway@gmail.com |
df8acf5bb865539d3309fae8e1493abda40caa64 | 457dcb293083cd57ccd330a51f7c709c4e2ed040 | /week1/codes/mt17144_problem2_1.1.py | 969a1fa28d70ecb1c11716292e46a8cf24f481a7 | [] | no_license | meghalD/Algorithms-for-Computational-Biology | 6c0ce0eb42cf2a3f5e4e5d2dce8315fdc44b2d5e | 6863faae8bad54e9a02e51073eed4e1c6edf6334 | refs/heads/master | 2022-12-08T20:29:26.315051 | 2020-09-15T07:50:23 | 2020-09-15T07:50:23 | 291,276,899 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 698 | py |
import timeit
start = timeit.default_timer()
l=['-','A','C','G','T']
set=[]
def permutation(lst,i):
if len(lst)-1 == i:
print(lst)
else:
for j in range(i,len(lst)):
lst[i],lst[j]=lst[j],lst[i]
permutation(lst,i+1)
lst[i], lst[j] = lst[j], lst[i]
def powerset(xs):
result=[[]]
for x in xs:
newsubsets = [subset + [x] for subset in result ]
result.extend(newsubsets)
return result
k=(int)(input(""))
set=powerset(l)
for i in range(len(set)):
if len(set[i])==k:
permutation(set[i],0)
stop = timeit.default_timer()
print()
print("time taken:"+str(stop - start)) | [
"noreply@github.com"
] | noreply@github.com |
eff6b7e3feaa7bdb6093751ef539172c59891a13 | 28f04941ef425e6ad5b846c22c66d3dcc7d76a9e | /plugins/macosx.py | 687c9e8e8a398e024601f5ce6657ec850650923c | [
"Apache-2.0"
] | permissive | wt/repo-digg-dev-hackbuilder | 627e693f521a47baaf240b51165322fe3a355f82 | 7e2068f3a00f4096e8e6e19a5c12362f4d76e10b | refs/heads/master | 2021-01-18T14:29:00.076593 | 2013-10-23T11:04:41 | 2013-10-23T11:04:41 | 3,232,551 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,107 | py | # Copyright 2011 Digg, Inc.
# Copyright 2012 Ooyala, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os.path
import subprocess
import digg.dev.hackbuilder.target
import digg.dev.hackbuilder.plugin_utils
from digg.dev.hackbuilder.plugins import build_file_targets
from digg.dev.hackbuilder.plugin_utils \
import normal_dep_targets_from_dep_strings
from digg.dev.hackbuilder.plugin_utils import BinaryLauncherBuilder
class MacPackageBuilder(digg.dev.hackbuilder.plugin_utils.PackageBuilder):
def __init__(self, target):
digg.dev.hackbuilder.plugin_utils.PackageBuilder.__init__(self, target)
self.full_package_hierarchy_dir = os.path.join(
self.target.target_build_dir, 'macosx_hierarchy')
def do_pre_build_package_binary_install(self, builders):
logging.info('Copying built binaries to package hierarchy for %s',
self.target.target_id)
package_data = {
'bin_path': '/bin',
'sbin_path': '/sbin',
'lib_path': '/Library',
}
for dep_id in self.target.dep_ids:
builder = builders[dep_id]
if isinstance(builder, BinaryLauncherBuilder):
builder.do_pre_build_package_binary_install(builders, self,
**package_data)
def do_build_package_work(self):
self._create_mac_binary_package()
def _create_mac_binary_package(self):
logging.info('Creating Mac binary package for %s', self.target.target_id)
package_file_path = os.path.join(self.target.package_root,
self.target.pkg_filename)
proc = subprocess.Popen(
('pkgbuild',
'--root', self.full_package_hierarchy_dir,
'--identifier', 'zyzzx.' + self.target.target_id.name,
'--version', self.target.version,
'--install-location', '/',
'--filter', '\.DS_Store',
package_file_path,
),
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdoutdata, stderrdata) = proc.communicate()
retcode = proc.returncode
if retcode != 0:
logging.info('Mac binary package creation failed.')
logging.info('Mac binary package creation failed with exit code = %s',
retcode)
logging.info('Mac binary package creation stdout:\n%s',
stdoutdata)
logging.info('Mac binary package creation stderr:\n%s',
stderrdata)
raise digg.dev.hackbuilder.errors.Error(
'packagemaker call failed with exitcode %s', retcode)
logging.info('Package build at: %s', package_file_path)
class MacPackageBuildTarget(
digg.dev.hackbuilder.target.PackageBuildTarget):
builder_class = MacPackageBuilder
def __init__(self, normalizer, target_id, pkg_filebase, dep_ids=None,
version=None):
digg.dev.hackbuilder.target.PackageBuildTarget.__init__(self,
normalizer, target_id, dep_ids=dep_ids, version=version)
if os.path.basename(pkg_filebase) != pkg_filebase:
raise digg.dev.hackbuilder.errors.Error(
'Pkg_filebase in target (%s) cannot contain a path '
'separator.', target_id)
self.pkg_filebase = pkg_filebase
self.pkg_filename = '{0}-{1}.pkg'.format(pkg_filebase, version)
def build_file_mac_pkg(repo_path, normalizer):
def mac_pkg(name, deps=(), version=None, pkg_filebase=None):
logging.debug('Build file target, Mac package: %s', name)
target_id = digg.dev.hackbuilder.target.TargetID(repo_path, name)
dep_target_ids = normal_dep_targets_from_dep_strings(repo_path,
normalizer, deps)
if pkg_filebase is None:
raise digg.dev.hackbuilder.errors.Error(
'No pkg_filebase specified for mac package (%s)',
target_id)
mac_pkg_target = MacPackageBuildTarget(normalizer, target_id,
dep_ids=dep_target_ids, version=version,
pkg_filebase=pkg_filebase)
build_file_targets.put(mac_pkg_target)
return mac_pkg
def build_file_rules_generator(repo_path, normalizer):
build_file_rules = {
'mac_pkg': build_file_mac_pkg(repo_path, normalizer)
}
return build_file_rules
| [
"wt@ooyala.com"
] | wt@ooyala.com |
9732a0769741a60a5b7c0a0a66070a04ffbe1412 | 9f32993a904ee1a2af3f67f4d03f4e11ac6c7491 | /equityDataClass.py | 2c6e4de7db423eadc574c894e460ad3129de1534 | [] | no_license | georgepruitt/TradingSimula-18-1.02 | 33f38239722d00e7e688056a1a7f21ef0b9eea44 | 41aec6464e89f55546f787755f9b2c9949404b01 | refs/heads/main | 2023-01-30T20:46:17.806879 | 2020-12-04T19:07:16 | 2020-12-04T19:07:16 | 310,117,494 | 0 | 0 | null | 2020-12-04T19:07:17 | 2020-11-04T21:04:51 | Python | UTF-8 | Python | false | false | 1,280 | py | class equityClass(object):
def __init__(self):
self.equityDate = list()
self.equityItm = list()
self.clsTrdEquity = list()
self.openTrdEquity = list()
self.cumuClsEquity = 0
self.dailyEquityVal = list()
self.peakEquity = 0
self.minEquity = 0
self.maxDD = 0
# tempEqu = 0
# cumEqu = 0
# maxEqu = -999999999
# minEqu = 999999999
# maxDD = 0
def setEquityInfo(self,equityDate,equityItm,clsTrdEquity,openTrdEquity):
self.equityDate.append(equityDate)
self.equityItm.append(equityItm)
self.cumuClsEquity += clsTrdEquity
tempEqu =self.cumuClsEquity+openTrdEquity
self.dailyEquityVal.append(tempEqu)
self.openTrdEquity.append(openTrdEquity);
# print("ote calc ",openTrdEquity," ",self.openTrdEquity[-1])
self.peakEquity = max(self.peakEquity,tempEqu)
maxEqu = self.peakEquity
self.minEquity = min(self.minEquity,tempEqu)
minEqu = self.minEquity
self.maxDD = max(self.maxDD,maxEqu-tempEqu)
# print(self.equityDate[-1]," ",self.maxDD," ",maxEqu," ",tempEqu," ",self.cumuClsEquity)
maxDD = self.maxDD
maxDD = maxDD
| [
"noreply@github.com"
] | noreply@github.com |
b983070276e9108430c515665fa30b6bce8cb8fb | f6841d5626d87e836f6012d88c783706fa46d769 | /web_crawler.py | c736c3b9c98f9e2dabb384fc0182472094e813d0 | [] | no_license | Jack-Valentine/python-seminar-4 | 850b22cd7c552b570e25e9432abf98a25cf0b7d6 | cd6c8945f436fa5dc0d6dec14551d07e6dd3562a | refs/heads/master | 2021-01-22T07:42:35.044924 | 2017-05-25T03:46:59 | 2017-05-25T03:46:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,010 | py | from bs4 import BeautifulSoup
from gevent import monkey
import sys
import gevent
import time
import urllib.request
def crawling_product_price(product_url):
try:
with urllib.request.urlopen(product_url) as response:
html = response.read()
soup = BeautifulSoup(html, 'html.parser')
product_title = soup.find(id='productTitle').get_text().strip()
price = soup.find(id='priceblock_ourprice').get_text()
print(product_title, price)
except:
crawling_product_price(product_url)
if __name__ == '__main__':
concurrency = sys.argv[1:2] == ['-c']
product_urls = [
'https://www.amazon.com/LG-Electronics-OLED65E7P-65-Inch-Smart/dp/B01MZF7YUD',
'https://www.amazon.com/LG-Electronics-75SJ8570-75-Inch-SUPER/dp/B01N5V18W6',
'https://www.amazon.com/All-New-Element-4K-Ultra-HD-Smart-TV-Fire-TV-Edition-43-Inch/dp/B06XD4SXWD',
'https://www.amazon.com/Sceptre-U518CV-UMS-Ultra-True-black/dp/B06Y26S3BC',
'https://www.amazon.com/Vizio-SMART-23-54IN-RETURNS-D24H-E1/dp/B06XQW5FJH',
'https://www.amazon.com/Hisense-55K22DG-55-Inch-1080p-120Hz/dp/B00GFHG1OQ',
'https://www.amazon.com/Samsung-Electronics-UN65MU9000-65-Inch-Ultra/dp/B06XGCT2PQ',
'https://www.amazon.com/Samsung-Electronics-UN65MU8000-65-Inch-Ultra/dp/B06X9VSZYM',
'https://www.amazon.com/Element-ELEFW3916R-720p-Certified-Refurbished/dp/B01N8PPMRG',
'https://www.amazon.com/Samsung-UN50J5000-50-Inch-1080p-Model/dp/B00WR28LLE'
]
start_time = time.time()
if concurrency:
monkey.patch_all()
threads = [gevent.spawn(crawling_product_price, product_url) for product_url in product_urls]
gevent.joinall(threads)
else:
for product_url in product_urls:
crawling_product_price(product_url)
end_time = time.time()
print('-' * 90)
print(f"Results(concurrency is {'on' if concurrency else 'off'}): {end_time-start_time}s")
| [
"kd980311@naver.com"
] | kd980311@naver.com |
4497e8008f1c61c1dfeef5262fe13b9b76aead63 | c9fd29813ebbef70755c962db0714b3d9b9a5906 | /Managers/urls.py | 8c4c7aaa62b894f60cfefefd80679eb1c78628e1 | [] | no_license | yiluxiangdong/django_1026 | 00cd936c13ae53685941e3d3bccd189f811732c2 | 69228c45cd01f2f5fec2d5521c2f7bf8afdf014c | refs/heads/master | 2021-07-18T05:04:31.383640 | 2017-10-26T02:18:19 | 2017-10-26T02:18:19 | 108,346,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 766 | py | """Managers URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| [
"liuxiaobing@xgd.com"
] | liuxiaobing@xgd.com |
dc9af9d4b6a64f5e56356789ba5d4993111a85a6 | a9e4df1b51bc6d8b5e409fd36662c5b748787b21 | /django/dojoninja/manage.py | 3a15db692d0943ea18a1a39d79fb8b4ca957c228 | [] | no_license | EdAllenPoe/Python_Projects | 643dd748357ade72c0f2f2b27bee3d9a9fe1f26e | ffab487ebbfe4ed52115f5da16e11b21f10a986b | refs/heads/master | 2021-01-01T20:04:13.954539 | 2017-10-06T01:41:25 | 2017-10-06T01:41:25 | 98,760,533 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 810 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Freinds_Belt.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"eric@ericgott.com"
] | eric@ericgott.com |
69e51fdfc4869a7c3cbfdeaf0cb52e5fa0558a74 | f69eccca4970bc898983b149bbadfc6a79e77916 | /befh/api_socket.py | 9252264f83791eecb5cd78803add2d6948531050 | [
"Apache-2.0"
] | permissive | chrischris292/MarketDataGdax | a3cd911edafe7a246a1d553180e1edb66a125c8c | 95dc398123f7878526df4af2402af3cbeee67057 | refs/heads/master | 2021-05-06T17:38:19.949472 | 2017-11-24T22:24:40 | 2017-11-24T22:24:40 | 111,900,487 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 887 | py | #!/bin/python
class ApiSocket:
"""
API socket
"""
def __init__(self):
pass
@classmethod
def parse_l2_depth(cls, instmt, raw):
"""
Parse raw data to L2 depth
:param instmt: Instrument
:param raw: Raw data in JSON
"""
return None
@classmethod
def parse_trade(cls, instmt, raw):
"""
:param instmt: Instrument
:param raw: Raw data in JSON
:return:
"""
return None
def get_order_book(self, instmt):
"""
Get order book
:param instmt: Instrument
:return: Object L2Depth
"""
return None
def get_trades(self, instmt, trade_id):
"""
Get trades
:param instmt: Instrument
:param trade_id: Trade id
:return: List of trades
"""
return None
| [
"gavincyi@gmail.com"
] | gavincyi@gmail.com |
5f8d714422c7d691696299d9f7a93d52b2168c5c | 59166105545cdd87626d15bf42e60a9ee1ef2413 | /test/test_space_shuttle_api.py | f91c3cfb0e3cfbade624d087754e5913f39e478a | [] | no_license | mosoriob/dbpedia_api_client | 8c594fc115ce75235315e890d55fbf6bd555fa85 | 8d6f0d04a3a30a82ce0e9277e4c9ce00ecd0c0cc | refs/heads/master | 2022-11-20T01:42:33.481024 | 2020-05-12T23:22:54 | 2020-05-12T23:22:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 995 | py | # coding: utf-8
"""
DBpedia
This is the API of the DBpedia Ontology # noqa: E501
The version of the OpenAPI document: v0.0.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import dbpedia
from dbpedia.api.space_shuttle_api import SpaceShuttleApi # noqa: E501
from dbpedia.rest import ApiException
class TestSpaceShuttleApi(unittest.TestCase):
"""SpaceShuttleApi unit test stubs"""
def setUp(self):
self.api = dbpedia.api.space_shuttle_api.SpaceShuttleApi() # noqa: E501
def tearDown(self):
pass
def test_spaceshuttles_get(self):
"""Test case for spaceshuttles_get
List all instances of SpaceShuttle # noqa: E501
"""
pass
def test_spaceshuttles_id_get(self):
"""Test case for spaceshuttles_id_get
Get a single SpaceShuttle by its id # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| [
"maxiosorio@gmail.com"
] | maxiosorio@gmail.com |
13a9ee7ca0588f5b36d4a9806c20fb505b2b3049 | 807d842325d62319ff98d539e559df9bbae68ee1 | /model/model.py | 47dc755df1d21d6ed7b95c2350b06122e36975ae | [] | no_license | AndrewVasilevskii/exc-power-supply | 41d3f0e19dfc80f165c264abb33537349202476d | 67e137426a95e7cf674cccb7c503b6bf69c258da | refs/heads/master | 2020-09-14T21:31:04.113475 | 2019-11-21T20:48:11 | 2019-11-21T20:48:11 | 223,262,468 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,377 | py | from multiprocessing import Value, Array
from threading import Thread
from model.appdata import AppData
from model.devicedata import DeviceData
from model.connector.autoconnector import AutoConnector
from model.pulldataloop import pullData
FILAMENT_CHANNEL = 8
import zipfile
import os
def zipfolder(foldername, target_dir):
zipobj = zipfile.ZipFile(foldername + '.zip', 'w', zipfile.ZIP_DEFLATED)
rootlen = len(target_dir) + 1
for base, dirs, files in os.walk(target_dir):
for file in files:
fn = os.path.join(base, file)
zipobj.write(fn, fn[rootlen:])
class Model(object):
def __init__ (self, errorCB, onConnectCB, onCantReconnect):
self.errorCB = errorCB
self.onConnectCB = onConnectCB
self.onCantReconnect = onCantReconnect
channelsMaxValues = Array('i', 8)
channelsMinValues = Array('i', 8)
filamentRealCurrent = Value('d', 0.0)
emissionValue = Value('d', 0.0)
filamentActualVoltage = Value('d', 0.0)
channelsRealValues = Array('d', 8)
directionRealValue = Value('i', 1)
filamentStatus = Value('i', 0)
filamentRealStatus = Value('i', 0)
for i in range(len(channelsMaxValues)):
channelsMaxValues[i] = 60
channelsMinValues[i] = -60
channelsRealValues[i] = 0.0
self.deviceData = DeviceData(channelsMaxValues, channelsMinValues, filamentRealStatus, filamentRealCurrent, filamentActualVoltage, emissionValue, channelsRealValues, directionRealValue)
channelsValues = Array('d', 8)
filamentCurrent = Value('d', 0.0)
filamentMaxCurrent = Value('d', 2)
connectionControlValue = Value('i', 0)
directionValue = Value('i', 1)
for i in range(len(channelsValues)):
channelsValues[i] = 0.0
self.appData = AppData(channelsValues, filamentStatus, filamentCurrent, filamentMaxCurrent, connectionControlValue, directionValue)
def connect(self):
self.autoConnecter = AutoConnector(self.errorCB, self.OnConnect, self.onCantReconnect)
self.autoConnecter.connect()
def connectWithParams(self, port, baudRate, parity, stopBits, flowControl):
self.autoConnecter = AutoConnector(self.errorCB, self.OnConnect, self.onCantReconnect)
self.autoConnecter.connectWithParams(port, baudRate, parity, stopBits, flowControl)
def OnConnect(self, mips):
self.mips = mips
mips.disconnect()
self.onConnectCB()
self.pullProccess = Thread(target=pullData, args=(self.deviceData.channelsMaxValues, self.deviceData.channelsMinValues,
self.deviceData.filamentRealCurrent, self.deviceData.filamentActualVoltage, self.deviceData.emissionValue, self.deviceData.channelsRealValues,
self.appData.channelsValues, self.appData.filamentCurrent, self.appData.filamentMaxCurrent, self.appData.connectionControlValue,
self.appData.filamentStatus, self.deviceData.filamentRealStatus, self.appData.directionValue, self.deviceData.directionRealValue,))
self.pullProccess.daemon = True
self.pullProccess.start()
def filamentTrigger(self):
if self.appData.getFilamentStatus() == 1:
self.appData.setFilamentStatus(0)
return 0
else:
self.appData.setFilamentStatus(1)
return 1
def directionTrigger(self):
if self.deviceData.getDirectionRealValue() == 1:
self.appData.setDirectionValue(0)
else:
self.appData.setDirectionValue(1)
def isFilamentOn(self):
return self.deviceData.getFilamentRealStatus() == 1
def isDirectionFwd(self):
return self.deviceData.getDirectionRealValue() == 1
def getMaxValue(self, channel):
if channel == FILAMENT_CHANNEL:
return self.appData.getFilamentMaxCurrent()
return self.deviceData.getChannelsMaxValues(channel)
def getMinValue(self, channel):
if channel == FILAMENT_CHANNEL:
return 0
return self.deviceData.getChannelsMinValues(channel)
def getRealValue(self, channel):
if channel == FILAMENT_CHANNEL:
return self.deviceData.getFilamentRealCurrent()
return self.deviceData.getChannelsRealValues(channel)
def getEmission(self):
return self.deviceData.getCurrentEmission()
def getFilamentVoltage(self):
return self.deviceData.getFilamentActualVoltage()
def changeChannelName(self, channel, name):
self.appData.setChannelName(channel, name)
def getNameForChannel(self, channel):
return self.appData.getChannelName(channel)
def setChannelValue(self, channel, value):
if channel == FILAMENT_CHANNEL:
self.appData.setFilamentCurrent(value)
return
self.appData.setChannelValue(channel, value)
def getChannelValue(self, channel):
if channel == FILAMENT_CHANNEL:
return self.appData.getFilamentCurrent()
return self.appData.getChannelValue(channel)
| [
"andrew.vasilevskii@gmail.com"
] | andrew.vasilevskii@gmail.com |
21e74be97b92974cfa8b16c63574de712dea4ff4 | 245bcf63bce2933948adead5734f86e11cc190dd | /test/test_sales_stats.py | c0a5903c533d4c297dd3be3f11187ca4bee0d403 | [] | no_license | MarketcheckCarsInc/marketcheck_api_sdk_python | 385c645c74805be3d8717304188e2b215786be56 | aca339cc61a0860f31f2070c736af32f07d8fd5a | refs/heads/master | 2020-03-23T23:53:32.292175 | 2018-08-25T03:06:52 | 2018-08-25T03:06:52 | 142,261,814 | 2 | 1 | null | 2018-08-25T03:06:53 | 2018-07-25T07:11:04 | Python | UTF-8 | Python | false | false | 1,654 | py | # coding: utf-8
"""
Marketcheck Cars API
<b>Access the New, Used and Certified cars inventories for all Car Dealers in US.</b> <br/>The data is sourced from online listings by over 44,000 Car dealers in US. At any time, there are about 6.2M searchable listings (about 1.9M unique VINs) for Used & Certified cars and about 6.6M (about 3.9M unique VINs) New Car listings from all over US. We use this API at the back for our website <a href='https://www.marketcheck.com' target='_blank'>www.marketcheck.com</a> and our Android and iOS mobile apps too.<br/><h5> Few useful links : </h5><ul><li>A quick view of the API and the use cases is depicated <a href='https://portals.marketcheck.com/mcapi/' target='_blank'>here</a></li><li>The Postman collection with various usages of the API is shared here https://www.getpostman.com/collections/2752684ff636cdd7bac2</li></ul> # noqa: E501
OpenAPI spec version: 1.0.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import marketcheck_api_sdk
from marketcheck_api_sdk.models.sales_stats import SalesStats # noqa: E501
from marketcheck_api_sdk.rest import ApiException
class TestSalesStats(unittest.TestCase):
"""SalesStats unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testSalesStats(self):
"""Test SalesStats"""
# FIXME: construct object with mandatory attributes with example values
# model = marketcheck_api_sdk.models.sales_stats.SalesStats() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"mahesh.hakeem@zerebral.co.in"
] | mahesh.hakeem@zerebral.co.in |
ed58414933d033ea90fcc698ea0305f405614980 | b2ff555164b6b20292c3c14680372d4e443845bf | /problems/problem30.py | 6123c41fe53f957b04815b974c4e6d3f241a8b47 | [
"MIT"
] | permissive | Julien-Verdun/Project-Euler | aeee823216253f575731989376edf73ae17ff9a0 | 94b073a9e4593960fcd74e678951bc27009e34f9 | refs/heads/master | 2020-11-28T06:43:07.580180 | 2020-01-07T16:59:32 | 2020-01-07T16:59:32 | 229,732,240 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 648 | py | # Problem 30 : Digit fifth powers
def somme_nth_dgt_powers(n, limit=10**7):
"""
This function takes an integer n, a limit number of iterations
and returns the list of numbers, lesser than limit, such that
the sum of their n-th power digit equal this number, and also
the sum of also such number.
"""
i = 2
liste_nb = []
while i < limit:
sum_nth_digit = sum([int(digit)**n for digit in str(i)])
if i == sum_nth_digit:
liste_nb.append(i)
if i % 1000 == 0:
print("i : ", i)
i += 1
return liste_nb, sum(liste_nb)
print(somme_nth_dgt_powers(5, 1000000))
# Result 443839
| [
"verdun.julien@yahoo.fr"
] | verdun.julien@yahoo.fr |
fda909f43d4f1878f1b1a2703e227364508fc39d | b02c690cb5471f721d0d3c74dd08ae6082cc479c | /pythontest/day30冒泡排序.py | 7c22b21703cee1e524f874c4aff8e3cd5aa462df | [] | no_license | 1914866205/python | d49207091337e970b488b18e5612fb74827195de | fb44197a27c230bad27162abd90001ac4ba4ae15 | refs/heads/master | 2022-06-14T04:12:29.580518 | 2022-06-01T12:03:06 | 2022-06-01T12:03:06 | 249,953,221 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 942 | py | """
冒泡排序
"""
def bubble_sort(items, comp=lambda x, y: x > y):
items = items[:]
for i in range(len(items)):
swapped = False
for j in range(len(items)-1-i):
if comp(items[j], items[j+1]):
items[j], items[j+1] = items[j+1], items[j]
swapped = True
if not swapped:
break
return items
items = [5, 13, 54, 95, 3]
print(bubble_sort(items))
# def bubble_sort(items, comp=lambda x, y: x > y):
# items = items[:]
# for i in range(len(items) - 1):
# swapped = False
# for j in range(i, len(items) - i - 1):
# if comp(items[j], items[j + 1]):
# items[j], items[j + 1] = items[j + 1], items[j]
# swapped = True
# if not swapped:
# break
# return items
# # if __name__ == "__main__":
# items = [58, 28, 18, 88, 8]
# print(bubble_sort(items))
| [
"1914866205@qq.com"
] | 1914866205@qq.com |
6d8e740363397000637a3ad5a4b53bcd8124be5e | e4e35adb3a779e6048df0725da9625c3d9ed36f2 | /bebop_ws/devel/lib/python2.7/dist-packages/bebop_msgs/msg/_CommonARLibsVersionsStateDeviceLibARCommandsVersion.py | 1797ea92108dbdfa04392fc310df232587a27750 | [] | no_license | NimSon/ROS_Bebop | 03644a061af0de3f7e3eff243e02bc0b099e9a5e | 4ecf21140f83fdd52d0463a87a356019cb762eb5 | refs/heads/master | 2020-08-17T13:20:36.452766 | 2020-07-11T07:08:06 | 2020-07-11T07:08:06 | 215,672,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | /home/nim/bebop_ws/devel/.private/bebop_msgs/lib/python2.7/dist-packages/bebop_msgs/msg/_CommonARLibsVersionsStateDeviceLibARCommandsVersion.py | [
"snim326@gmail.com"
] | snim326@gmail.com |
776a67d9e6d78daddbe8563a5d9a90a2d32415a4 | f3c4cdf9f7b525e1eb16e6a169425d1683e780f5 | /Tabs/Scenarios/CIProjection.py | d0944c54bcfe73eb81b9d79b5212409dfddeb386 | [
"MIT"
] | permissive | MaheshBodas/IFRS9_UI_AUTH | 8b8fc47f1ac769e851646510cf859b264f60c5c5 | a284a5e2bd1f327ca4608110cbf23d53ead7d75a | refs/heads/master | 2023-07-16T10:50:31.109713 | 2021-08-20T18:00:49 | 2021-08-20T18:00:49 | 398,360,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,496 | py | import dash_html_components as html
import dash_core_components as dcc
from tnp_dash_library.LayoutLibrary.ExpandableContent import ExpandableContent
from tnp_dash_library.LayoutLibrary.FixedContent import FixedContent
from tnp_dash_library.LayoutLibrary.TNPContent import TNPContent, TNPControl
import Utility.Unzip as uz
import json
import Visualisations.ScenarioCharts as sc
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
class CIProjection(TNPContent):
def __init__(self, debug: bool = False):
EXPANDABLE = True
ID = 'ci-projection-config-container'
NAME = "Cyclicality Index Projections"
self.is_expandable = EXPANDABLE
self.id = ID
self.name = NAME
self.controls = []
self.debug = debug
self.uploader = None
if self.is_expandable:
self.layout_helper = ExpandableContent(self.id, self.name, self.content_layout())
else:
self.layout_helper = FixedContent(self.id, self.name, self.content_layout())
super().__init__(self.id)
def content_layout(self, params=None):
return dcc.Loading(html.Div(id='ci-projection-chart-container', hidden=True, style={"height": "31vh"}))
def layout(self, params=None):
return self.layout_helper.content_layout()
def content_call_back(self, app):
if self.is_expandable:
self.layout_helper.content_call_back(app)
@app.callback(Output('ci-projection-chart-container', 'children'),
Output('ci-projection-config-container-modal-body', 'children'),
Output('ci-projection-chart-container', 'hidden'),
Input('submit-region', 'n_clicks'),
State('model_data', 'data'),
State({'type': 'scenario-dropdown', 'index': 0}, 'value'))
def update_scenario_configuration(n_clicks, model_data, region):
if n_clicks is None:
raise PreventUpdate
if n_clicks == 0:
raise PreventUpdate
uncompressed_data = json.loads(uz.json_unzip(model_data)['scenarios'])[region]
fig1 = sc.plotCI_projections(uncompressed_data)
title = fig1['layout']['title']['text']
fig1['layout']['title']['text'] = title + ": " + str(region).upper()
dist = dcc.Graph(figure=fig1, config={'displaylogo': False, 'editable': True},
id='ci-projection-plot', className='expandable_chart-half', style={"height": "31vh"})
dist2 = dcc.Graph(figure=fig1, config={'displaylogo': False, 'editable': True},
id='ci-projection-plot-2', className='expandable_chart-half', style={"height": "72vh"})
return dist, dist2, False
@app.callback(
Output('ci-projection-chart-container', 'children'),
Output('ci-projection-config-container-modal-body', 'children'),
Output('ci-projection-chart-container', 'hidden'),
Input({'type': 'scenario-dropdown', 'index': 0}, 'value'))
def clear(region):
if region is not None:
raise PreventUpdate
return "", "", True
def register_control(self, control):
if not issubclass(type(control), TNPControl):
raise Exception("Only a 'TNPControl' can be registered in 'TNPContent'")
self.controls.append(control)
| [
"mahesh.bodas@gmail.com"
] | mahesh.bodas@gmail.com |
4dc4f0c84a435d4b3aadf99d0942035e8d7a8a9f | 176d95d61aeff9e884659d1fcd363c0d3f8626de | /server.py | 775ef821a2352dc9e26570b7badf10874736e9ba | [] | no_license | vijaylingam/Secure-Server-UserLogin | ee036c66b9ab1772718e8a0ea5e26c1461398947 | 265effc17a0a58098a373c2c5fbd65df409e9905 | refs/heads/master | 2021-01-20T15:51:31.235238 | 2017-05-09T22:25:26 | 2017-05-09T22:25:26 | 90,797,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,725 | py | import socket
import random
from pyprimes import *
# Use this in case the port is already in use: sudo lsof -t -i tcp:50007 | xargs kill -9
prime = primes()
table = []
def compute_sum(line):
return sum(int(i) for i in line.split(','))
HOST = 'localhost' # Symbolic name meaning all available interfaces
PORT = 50007 # Arbitrary non-privileged port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((HOST, PORT))
print("Server Hosted on localhost on port:", PORT)
print("Picking two primes p and q")
x = [next(prime) for _ in range(10)]
p = random.choice(x)
q = random.choice(x)
n = p*q
print("p:", p)
print("q:", q)
print("n:", n)
a1 = random.randint(-n, n)
a2 = random.randint(-n, n)
a3 = random.randint(-n, n)
print("a1:", a1)
print("a2:", a2)
print("a3:", a3)
b1 = (a1**2)%n
b2 = (a2**2)%n
b3 = (a3**2)%n
user1 = input("Enter Login name for user1: ")
table.append([user1, [b1,n]])
user2 = input("Enter Login name for user2: ")
table.append([user2, [b2,n]])
user3 = input("Enter Login name for user3: ")
table.append([user3, [b3,n]])
print()
print("Login Details")
for row in table:
print(row)
print()
s.listen(3)
conn, addr = s.accept()
print('Connected by', addr)
while 1:
data = conn.recv(1024).decode('utf-8')
if not data: break
print("SERVER RECIEVED : ", data)
for entry in table:
if entry[0] == data:
print("Valid username")
conn.sendall(str("valid").encode('utf-8'))
y = conn.recv(1024).decode('utf-8') # y is of type string
print("received y:", y)
t = random.randint(0,1)
print("Sending t:", str(t))
conn.sendall(str(t).encode('utf-8'))
z = conn.recv(1024).decode('utf-8')
print("Received z:", str(z))
if t == 0:
if int(y) == (int(z)**2)%n:
conn.sendall(str("Welcome " + data).encode('utf-8'))
exit(0) #exit for loop
else:
conn.sendall(str("Access denied").encode('utf-8'))
exit(0) #exit for loop
if t == 1:
b = 0
for entry in table:
if entry[0] == data:
b = int(entry[1][0])
# print("Entry: ", entry) #for debugging
if (b*int(y))%n == (int(z)**2)%n:
conn.sendall(str("Welcome "+ data).encode('utf-8'))
exit(0) #exit for loop
else:
conn.sendall(str("Access denied").encode('utf-8'))
exit(0) #exit for loop
message = "Invalid Username"
#conn.sendall(str(message).encode('utf-8'))
print(message)
conn.sendall(str(message).encode('utf-8'))
conn.close()
#compute_sum(data)
#conn.sendall(str(compute_sum(data)))
conn.close() | [
"vijay.lingam@ashoka.edu.in"
] | vijay.lingam@ashoka.edu.in |
431dba391501f013d8df54541d4082405e02ae55 | 8567e0a1ae18393475a3a44f38f46302839d5485 | /sandbox/sand/dashboard.py | 0d82340d9322f3f90d0aa74bd82d0bd703777fff | [
"CC-BY-NC-SA-4.0",
"LicenseRef-scancode-proprietary-license",
"CC-BY-NC-4.0",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | marionleborgne/EIT_Dashboard | 084935e554248bec1485602238d7310cb5e2afe4 | 057e05d20325fa6a9b9b0183f63a73c8b7e6cd3b | refs/heads/master | 2020-03-23T15:40:37.698232 | 2018-10-04T22:57:32 | 2018-10-04T22:57:32 | 141,766,074 | 2 | 0 | Apache-2.0 | 2018-07-20T23:25:10 | 2018-07-20T23:25:10 | null | UTF-8 | Python | false | false | 2,626 | py |
# import matplotlib
# matplotlib.use("TkAgg")
import argparse
import logging
import OpenEIT.dashboard
import configparser
import Adafruit_BluefruitLE
FORMAT = '%(asctime)-15s %(message)s'
logging.basicConfig(format=FORMAT, level=logging.INFO)
logger = logging.getLogger(__name__)
#
# Get the BLE provider for the current platform.
ble = Adafruit_BluefruitLE.get_provider()
# TODO: Improve State Feedback
# The current connection and playback state should be clearly visible
# at all times
# Test all buttons and functions with the device and flag any problems.
# Create a way to select the reconstruction algorithm.
#
def main():
configParser = configparser.ConfigParser()
configFilePath = r'configuration.txt'
configParser.read(configFilePath)
n_el = configParser.get('hardware-config', 'n_el')
algorithm = configParser.get('software-config', 'algorithm')
mode = configParser.get('software-config', 'mode')
fwsequence = configParser.get('software-config', 'fwsequence')
ap = argparse.ArgumentParser()
ap.add_argument("-f", "--read-file",
action="store_true",
default=False)
ap.add_argument("--virtual-tty",
action="store_true",
default=False)
ap.add_argument("port", nargs="?")
args = ap.parse_args()
controller = OpenEIT.dashboard.Controller()
controller.configure(
initial_port=args.port,
read_file=args.read_file,
virtual_tty=args.virtual_tty,
n_el= n_el,
algorithm=algorithm,
mode=mode,
fwsequence=fwsequence
)
if controller.choice == 'a':
# Timeseries GUI
gui = OpenEIT.dashboard.Timeseriesgui(controller)
gui.run()
elif controller.choice == 'b':
# Bioimpedance Spectroscopy GUI using plotly and dash.
gui = OpenEIT.dashboard.BISgui(controller)
gui.run()
else:
# Tomographic reconstruction GUI
gui = OpenEIT.dashboard.Tomogui(controller)
gui.run()
# Gui type based on config file.
# if mode == 'timeseriesygui':
# gui = OpenEIT.dashboard.Timeseriesgui(controller)
# gui.run()
# elif mode == 'multifrequencygui':
# gui = OpenEIT.dashboard.Multifrequencygui(controller)
# gui.run()
# elif mode == 'meshgui':
# gui = OpenEIT.dashboard.Meshgui(controller)
# gui.run()
# else:
# gui = OpenEIT.dashboard.Singlefrequencygui(controller)
# gui.run()
if __name__ == "__main__":
main()
| [
"jean.rintoul@gmail.com"
] | jean.rintoul@gmail.com |
03e57834e1fedbb40c070c90c5c0c36b1a2be91d | e9083889f75d32f93c5f5e270782e8570716c547 | /app/__init__.py | e4b436c39e91f32fe560d55fde4c97170bd9b028 | [] | no_license | Yusuke3974/vehicledetection | 3c3d92ef219ae58495114c0c77a52bbdf0007e8b | 96804bff383fb6aa01a7c3df1721b25aac4c25ca | refs/heads/master | 2023-08-14T13:43:02.181494 | 2021-10-14T11:59:24 | 2021-10-14T11:59:24 | 417,080,575 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 121 | py | from flask import Flask
app = Flask(__name__)
app.config.from_object('config.DevelopmentConfig')
from app import views | [
"ritchie.deep.purple@gmail.com"
] | ritchie.deep.purple@gmail.com |
35a69309db6a13befb15b43719fa13faefa49af6 | 93334df29b59aeffc14feb6e1e08bc5bdfa17806 | /find_clearance.py | 90995a20ba5150fbe6e6f8fda5fa4191df5741aa | [
"MIT"
] | permissive | LakhouaMehdi/Robotic_Depth_Perception_System | bee377496584930aca3aa865f06bb4c81245012f | 765403e61b60939adfefafb0fe9e31663479908d | refs/heads/master | 2021-10-10T09:46:13.116483 | 2019-01-09T00:05:16 | 2019-01-09T00:05:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,650 | py | import sys
import cv2
import numpy as np
def auto_canny(image, sigma=0.33):
"""
Detect image edge via Canny Function.
:param image: ndarray, dtpye=np.uint8
:param sigma: float
:return: ndarray edged image
"""
v = np.median(image)
lower = int(max(0, (1.0 - sigma) * v))
upper = int(min(255, (1.0 + sigma) * v))
edged = cv2.Canny(image, lower, upper)
return edged
def remove_noise(image, lower, upper):
"""
Remove noise from image.
:param image: ndarray
:param lower: int
:param upper: int
:return:
"""
drawing = np.zeros((image.shape[0], image.shape[1]), dtype=np.uint8)
for i in range(image.shape[0]):
for j in range(image.shape[1]):
if image[i, j] < lower:
drawing[i, j] = 0
elif image[i, j] > upper:
drawing[i, j] = 255
else:
drawing[i, j] = image[i, j]
return drawing
def find_clearance(file_path):
"""
Return (Side, Clearance) for a image.
:param file_path: String, image path
:return: none
"""
corridor = 60
crop_area_length_left = 30
crop_area_length_right = 108
crop_area_height_top = 120
crop_area_height_bottom = 60
img = np.loadtxt(file_path) # read content from input file
img_remove_noise = remove_noise(img, 1, 4)
img_canny = auto_canny(cv2.GaussianBlur(img_remove_noise, (5, 5), 0))
img_dilate = cv2.dilate(img_canny, None, iterations=2)
img_erode = cv2.erode(img_dilate, None, iterations=2)
cropped = np.zeros((img.shape[0], img.shape[1]), dtype=np.uint8)
cropped[crop_area_length_left:crop_area_length_right, crop_area_height_bottom:crop_area_height_top] = \
img_erode[crop_area_length_left:crop_area_length_right, crop_area_height_bottom:crop_area_height_top]
img_contour, contour, hierarchy = cv2.findContours(cropped, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contour = sorted(contour, key=cv2.contourArea, reverse=True)[:1]
contour_poly = [None] * len(contour)
bounding_box = [None] * len(contour)
for i, c in enumerate(contour):
contour_poly[i] = cv2.approxPolyDP(c, 3, True)
bounding_box[i] = cv2.boundingRect(contour_poly[i])
left = bounding_box[0][0] - 60
right = 120 - (bounding_box[0][0] + bounding_box[0][2])
# print("left", left * 1.5 / corridor) if (left > right) else print("right", right * 1.5 / corridor)
return ("Left", left * 1.5 / corridor) if (left > right) else ("Right", right * 1.5 / corridor)
def find_clearance_2(file_path):
"""
Reference: https://github.com/wyxPatrick/human-corridor
:param file_path:
:return:
"""
image = np.loadtxt(file_path)
depth = image / 2
depth[depth > 5] = 5
# "background" is generated by stitching image0 and image2 together to
# cut off the human in the image
background = np.loadtxt('background.txt')
human = depth - background
# Filter the noise points and generate the mask of human
kernel = np.ones((2, 2), np.uint8)
opening = cv2.morphologyEx(human, cv2.MORPH_OPEN, kernel)
human_mask = cv2.erode(opening, kernel, iterations=1)
human_mask[human_mask > -2.1] = 0
human_mask[human_mask < -3.3] = 0
human_mask[human_mask < 0] = 1
# Use sliding window to detect human in the mask to get rid of the rest
# of the noise points
filtered = np.zeros((58, 152))
window_x = 75
window_y = 25
for x in range(human_mask.shape[0] - window_x):
for y in range(human_mask.shape[1] - window_y):
filtered[x][y] = np.sum(human_mask[x:x + window_x, y:y + window_y])
# From the mask of human and the depth image, calculating the depth of
# human
human_x = np.where(filtered == np.max(filtered))[0][0]
human_y = np.where(filtered == np.max(filtered))[1][0]
human_depth_store = []
for i in range(human_x, human_x + window_x):
for j in range(human_y, human_y + window_y):
if human_mask[i][j] == 1:
human_depth_store.append(depth[i][j])
human_depth = np.average(human_depth_store)
# Decide the safer side and calculate the clearance
store = np.where((depth >= human_depth - 0.005) & (depth <= human_depth + 0.005))
corridor_left = np.min(store[1])
corridor_right = np.max(store[1])
left = np.abs(human_y - corridor_left)
right = np.abs(corridor_right - (human_y + window_y))
corridor = corridor_right - corridor_left
# if left > right:
# distance = (1.5 / corridor) * left
# print("left %.3f" % distance)
# else:
# distance = (1.5 / corridor) * right
# print("right %.3f" % distance)
return ("Left", left * 1.5 / corridor) if (left > right) else ("Right", right * 1.5 / corridor)
def ensemble(result_1, result_2):
"""
Ensemble 2 methods of calculating results.
:param result_1: (String, Float)
:param result_2: (String, Float)
:return: None
"""
decision_1, distance_1 = result_1
decision_2, distance_2 = result_2
mean = (distance_1 + distance_2) / 2.0
if decision_1 == decision_2:
# In this case return the mean of 2 distance_2
print(str(decision_1) + " %.3f" % mean)
else:
if distance_1 <= distance_2:
print(str(decision_2) + " %.3f" % distance_2)
else:
print(str(decision_1) + " %.3f" % distance_1)
if __name__ == '__main__':
try:
path = sys.argv[1]
except IndexError:
print("Please provide valid file path")
ensemble(find_clearance(sys.argv[1]), find_clearance_2(sys.argv[1]))
| [
"munoliver007@gmail.com"
] | munoliver007@gmail.com |
83ce701ba540386ee9eaf7d386a2ee5e92a42456 | cf9de34e18ee0aef7dc3e6a0a1d2aaa151534769 | /day-07/main.py | d288170c3679728cae11df4806af020afbdb193d | [] | no_license | MikoGD/advent_of_code_2020 | 85fa31a366bd87d78457d3e107f1600512bd4941 | b16771a10ae44e2cba737e7ecfda079080f9f788 | refs/heads/main | 2023-02-05T09:11:04.869572 | 2020-12-27T11:12:49 | 2020-12-27T11:12:49 | 323,669,597 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,084 | py | from collections import defaultdict, deque
from pprint import pprint as pp
from typing import List, Dict, Set, Tuple
import math
import sys
import time
def main():
start_time = time.time()
part_one()
print("---- Part one %s seconds ---- " % (time.time() - start_time))
start_time = time.time()
part_two()
print("---- Part two %s seconds ---- " % (time.time() - start_time))
# ENDM main()
def part_one():
if (len(sys.argv) < 2):
print("ERROR: no input file")
exit(0)
# END IF
with open(sys.argv[1], "r", encoding="utf-8") as file:
bags_dict: Dict[str, List[str]] = {}
for line in file:
container, bags = line.split("contain")
container: str = container.replace("bags", "").strip()
bags: List[str] = [bag.replace("bags", "").replace("bag", "").strip(" 0123456789")
for bag in bags.strip(".\n").split(",")]
# print(container, bags)
if bags[0] == "no other":
bags_dict[container] = []
else:
bags_dict[container] = bags
# END IF
# END FOR
# pp(bags_dict)
count = 0
for bag in bags_dict.keys():
bags = [bag]
while len(bags) != 0:
curr_bag = bags.pop()
inside = bags_dict[curr_bag]
if "shiny gold" in inside:
count += 1
break
else:
bags += bags_dict[curr_bag]
# END IF
# END WHILE
# END FOR
print(count)
# END WITH
# END part_one()
def part_two():
if (len(sys.argv) < 2):
print("ERROR: no input file")
exit(0)
# END IF
with open(sys.argv[1], "r", encoding="utf-8") as file:
bags_dict: Dict[str, List[str]] = {}
for line in file:
container, bags = line.split("contain")
container: str = container.replace("bags", "").strip()
bags: List[str] = [bag.replace("bags", "").replace("bag", "").strip()
for bag in bags.strip(".\n").split(",")]
# print(container, bags)
if bags[0] == "no other":
bags_dict[container] = []
else:
temp: List[str] = []
for bag in bags:
index: int = bag.find(" ")
temp.append((int(bag[:index]), bag[index + 1:]))
# END FOR
bags_dict[container] = temp
# END IF
# END FOR
# pp(bags_dict)
count: int = 0
bags: List[Tuple[int, str]] = bags_dict["shiny gold"]
while len(bags) != 0:
curr_bag: Tuple[int, str] = bags.pop()
amount, type = curr_bag
count += amount
for i in range(amount):
bags += bags_dict[type]
# END WHILE
print(count)
# END WITH
# END part_two()
if __name__ == "__main__":
main()
# END IF
| [
"mikaelescolin@gmail.com"
] | mikaelescolin@gmail.com |
165d038ef67c6e5d9650811fb8eebb4c215a8874 | 1ad12a71c3d5d2b3810ce03e8bd138c4ffb66eb8 | /xlsxwriter/test/comparison/test_chart_axis17.py | d77b584fbddf1629eaf7474c77dea6cb5512ae61 | [
"BSD-2-Clause-Views"
] | permissive | idreamsfy/XlsxWriter | b52929229b16e2ee1eaca0cda9980a5a0aad5769 | 129044ed821de67895b4562c6b71f90eba5be6b4 | refs/heads/master | 2021-01-02T20:39:20.415882 | 2020-02-07T21:07:55 | 2020-02-07T21:07:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,340 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2020, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('chart_axis17.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [43812736, 45705088]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
chart.set_y_axis({'log_base': 10})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| [
"jmcnamara@cpan.org"
] | jmcnamara@cpan.org |
de412810d3844cf5f8ee29bbd2e1e99a79ec08a8 | 31085d66c719c5c27aec57be693bb99c902d2596 | /flux_tool/neutrino.py | f2e1a11c0da132e7731968f5bce499d1e10f630c | [] | no_license | bhokansonfasig/flux_tool | 0fa0692a3a10cb0b493a1a34ffb3339db49ac585 | a74d991d0a02d31eea00d5dd053405542d16247d | refs/heads/main | 2023-02-12T04:13:10.814367 | 2021-01-08T23:54:45 | 2021-01-08T23:54:45 | 327,973,884 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 9,805 | py | """Class for neutrino interaction physics calculations"""
from enum import Enum
import numpy as np
import scipy.constants
from . import units
def get_from_enum(value, enum):
"""
Find the enum value given some representation of it.
Transforms the given `value` into the corresponding value from the `enum`
by checking the type of `value` given.
Parameters
----------
value
Representation of the desired `enum` value. If already a member of
`enum`, no change. If ``str``, assumed to be a name in the `enum`.
Otherwise, assumed to be a value type of the `enum`.
enum : Enum
Python ``Enum`` to compare names values with.
Returns
-------
Enum value
Value in the `enum` represented by the given `value`.
Examples
--------
>>> from enum import Enum
>>> class Color(Enum):
... red = 1
... green = 2
... blue = 3
>>> get_from_enum(Color.red, Color)
<Color.red: 1>
>>> get_from_enum("green", Color)
<Color.green: 2>
>>> get_from_enum(3, Color)
<Color.blue: 3>
"""
if isinstance(value, enum):
return value
elif isinstance(value, str):
return enum[value]
else:
return enum(value)
class NeutrinoInteraction:
"""
Class for storing and calculating neutrino interaction parameters.
Parameters
----------
neutrino_type
Identification value of the neutrino type. Values should be from the
``NeutrinoInteraction.NeutrinoType`` enum, but integer or string values
may work if carefully chosen.
interaction_type
Identification value of the neutrino's interaction type. Values should
be from the ``NeutrinoInteraction.InteractionType`` enum, but integer
or string values may work if carefully chosen.
energy : float
Energy (GeV) of the neutrino.
Attributes
----------
neutrino : NeutrinoInteraction.NeutrinoType
Identification value of the neutrino type.
interaction : NeutrinoInteraction.InteractionType
Identification value of the neutrino's interaction type.
energy : float
Energy (GeV) of the neutrino.
"""
class NeutrinoType(Enum):
"""
Enum containing possible neutrino types.
Values based on the PDG particle numbering scheme.
http://pdg.lbl.gov/2007/reviews/montecarlorpp.pdf
Attributes
----------
nu_e, electron_neutrino
nu_e_bar, electron_antineutrino
nu_mu, muon_neutrino
nu_mu_bar, muon_antineutrino
nu_tau, tau_neutrino
nu_tau_bar, tau_antineutrino
unknown, undefined
"""
undefined = 0
unknown = 0
electron_neutrino = 12
nu_e = 12
electron_antineutrino = -12
nu_e_bar = -12
muon_neutrino = 14
nu_mu = 14
muon_antineutrino = -14
nu_mu_bar = -14
tau_neutrino = 16
nu_tau = 16
tau_antineutrino = -16
nu_tau_bar = -16
class InteractionType(Enum):
"""
Enum containing possible interaction types.
Attributes
----------
cc, charged_current
nc, neutral_current
unknown, undefined
"""
undefined = 0
unknown = 0
charged_current = 1
cc = 1
neutral_current = 2
nc = 2
def __init__(self, neutrino_type, interaction_type, energy):
self.neutrino = neutrino_type
self.interaction = interaction_type
self.energy = energy
@property
def neutrino(self):
"""
Identification value of the neutrino type.
Should always be a value from the ``NeutrinoType`` enum. Setting with
integer or string values may work if carefully chosen.
"""
return self._neutrino_id
@neutrino.setter
def neutrino(self, neutrino_id):
if neutrino_id is None:
self._neutrino_id = self.NeutrinoType.undefined
else:
self._neutrino_id = get_from_enum(neutrino_id, self.NeutrinoType)
@property
def interaction(self):
"""
Identification value of the neutrino's interaction type.
Should always be a value from the ``InteractionType`` enum. Setting
with integer or string values may work if carefully chosen.
"""
return self._interaction_id
@interaction.setter
def interaction(self, interaction_id):
if interaction_id is None:
self._interaction_id = self.InteractionType.undefined
else:
self._interaction_id = get_from_enum(interaction_id, self.InteractionType)
@property
def total_cross_section(self):
"""
The total cross section of the neutrino.
Calculation is determined by whether the neutrino is a neutrino
or antineutrino and is dependent on the energy of the neutrino.
Combines the charged-current and neutral-current cross sections.
Based on Equation 7 and Table III of the CTW 2011 paper.
"""
# Total cross section should be sum of nc and cc cross sections
# Neutrino
if self.neutrino.value>0:
c_0_cc = -1.826
c_0_nc = -1.826
c_1_cc = -17.31
c_1_nc = -17.31
c_2_cc = -6.406
c_2_nc = -6.448
c_3_cc = 1.431
c_3_nc = 1.431
c_4_cc = -17.91
c_4_nc = -18.61
# Antineutrino
elif self.neutrino.value<0:
c_0_cc = -1.033
c_0_nc = -1.033
c_1_cc = -15.95
c_1_nc = -15.95
c_2_cc = -7.247
c_2_nc = -7.296
c_3_cc = 1.569
c_3_nc = 1.569
c_4_cc = -17.72
c_4_nc = -18.30
else:
raise ValueError("Unable to calculate cross section without a"+
" particle type")
# Calculate cross section based on CTW 2011
eps = np.log10(self.energy / units.GeV)
log_term_cc = np.log(eps - c_0_cc)
power_cc = (c_1_cc + c_2_cc*log_term_cc + c_3_cc*log_term_cc**2
+ c_4_cc/log_term_cc)
log_term_nc = np.log(eps - c_0_nc)
power_nc = (c_1_nc + c_2_nc*log_term_nc + c_3_nc*log_term_nc**2
+ c_4_nc/log_term_nc)
return (10**power_cc + 10**power_nc) * units.cm**2
@property
def cross_section(self):
"""
The cross section of the neutrino.
Calculation is determined by whether the neutrino is a neutrino
or antineutrino and what type of interaction it produces, and is
dependent on the energy of the neutrino. Based on Equation 7 and
Table III of the CTW 2011 paper.
"""
# Neutrino
if self.neutrino.value>0:
if self.interaction==self.InteractionType.charged_current:
c_0 = -1.826
c_1 = -17.31
c_2 = -6.406
c_3 = 1.431
c_4 = -17.91
elif self.interaction==self.InteractionType.neutral_current:
c_0 = -1.826
c_1 = -17.31
c_2 = -6.448
c_3 = 1.431
c_4 = -18.61
else:
raise ValueError("Unable to calculate cross section without an"
+" interaction type")
# Antineutrino
elif self.neutrino.value<0:
if self.interaction==self.InteractionType.charged_current:
c_0 = -1.033
c_1 = -15.95
c_2 = -7.247
c_3 = 1.569
c_4 = -17.72
elif self.interaction==self.InteractionType.neutral_current:
c_0 = -1.033
c_1 = -15.95
c_2 = -7.296
c_3 = 1.569
c_4 = -18.30
else:
raise ValueError("Unable to calculate cross section without an"
+" interaction type")
else:
raise ValueError("Unable to calculate cross section without a"+
" neutrino type")
# Calculate cross section based on CTW 2011
eps = np.log10(self.energy / units.GeV)
log_term = np.log(eps - c_0)
power = c_1 + c_2*log_term + c_3*log_term**2 + c_4/log_term
return (10**power) * units.cm**2
@property
def total_interaction_length(self):
"""
The interaction length of the neutrino.
The interaction length is calculated in water equivalent material.
Calculation is determined by whether the neutrino is a neutrino or
antineutrino and is dependent on the energy of the neutrino. Combines
the charged-current and neutral-current interaction lengths.
"""
# Water equivalent density is 1 g/cm^3
# The approximate number of nucleons per gram is Avogadro's number,
# so the nucleon density is approximately 1 nucleon/cm^3 in water.
# Ultimately then the interaction length can be caluclated as
# 1 / NA / cross_section
return 1 / (scipy.constants.N_A/units.cm**3) / self.total_cross_section
@property
def interaction_length(self):
"""
The interaction length of the neutrino interaction.
The interaction length is calculated in water equivalent material.
Calculation is determined by whether the neutrino is a neutrino or
antineutrino and what type of interaction it produces, and is dependent
on the energy of the neutrino.
"""
return 1 / (scipy.constants.N_A/units.cm**3) / self.cross_section
| [
"bhokansonfasig@gmail.com"
] | bhokansonfasig@gmail.com |
b0ec485abeeaf472b4b5ce919bb5039ff99fea2e | 6b51f1a03cc8f1ffe11c3663187f63c0c3dc83c2 | /wsgi.py | ecf85a296f226b3c2aef454ea6df6c593fe936aa | [] | no_license | medvedodesa/artem-task | 996803f968a1660d0d38538da72e6dbfb73f9726 | bbdd6ea313e5a1c3244a09f874f6ffbb1580d376 | refs/heads/master | 2023-05-29T12:59:45.084159 | 2021-06-10T18:44:41 | 2021-06-10T18:44:41 | 375,766,347 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 88 | py | from flask import Flask
app = Flask(__name__)
if __name__ == '__main__':
app.run() | [
"medvedodesa@gmail.com"
] | medvedodesa@gmail.com |
1222db5d93d5f79953beeaabc6e3d35f1f6460d7 | d18aa06a5c9d093bbd58c1c556c786c700ff5142 | /code/shutdown.py | 168e8972d4bae73d9526da291aa64b33f32c0630 | [] | no_license | AmrutRane/Raspi_DashCam | 95c569876befeae05b02a054cdbc2f3e1f581c59 | 569e753831111db49de8e7fd655bc5f1f9739d75 | refs/heads/main | 2023-02-05T03:22:53.154339 | 2020-12-26T17:23:53 | 2020-12-26T17:23:53 | 319,759,936 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,673 | py | #!/usr/bin/env python
# Safely shutdown Raspberry pi using a momentary switch
# use a physical push button, toggle switch or just short two gpio jumper wires
# You can salvage an old switch from a computer or other device.
# Connect momentary switch to pin 5 and 6 (default)
# or set gpio_pin variable below to desired gpio pin number
#
# Set button_hold variable to number of seconds to
# hold down momentary switch before action taken
# default is 2 seconds. 0=No delay
#
# make sure shutdown.py is executable
# cd ~/pi-timolo
# chmod +x shutdown.py
#
# Add line below to sudo crontab -e (without #) change path as necessary
# @reboot /home/pi/pi-timolo/shutdown.py
#
# After initial RPI power on
# Press switch or connect jumper wires for specified time
# to Initiate safe shutdown (halt)
#
# After shutdown wait 5 seconds then press switch again
# for specified time to initiate a startup
# Wait a few seconds between between operations
import RPi.GPIO as GPIO
import time
import subprocess
button_hold = 5 # number of seconds to hold button before action taken
gpio_pin = 5 # Set GPIO pin you want to use. default is 5 and ground is 6
GPIO.setmode(GPIO.BOARD)
# set GPIO pin to input, and enable the internal pull-up resistor
GPIO.setup(gpio_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
start_time = time.time()
shutdown = False
while not shutdown:
if GPIO.input(gpio_pin):
start_time = time.time()
else:
hold_time = time.time() - start_time
if hold_time > button_hold:
shutdown = True
time.sleep(.1)
subprocess.call("/sbin/shutdown -h now", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | [
"arane_consultant@ciis.canon.com"
] | arane_consultant@ciis.canon.com |
d84b47514c1052634e22a687f3e5112bc6cdd7b6 | 9db088609732b0aa1f428c73cc6775d54a00de7f | /p12.py | 834262788dc56a5ead6a57f106926089ceedaed9 | [] | no_license | Hemangi3598/chap-10_p12 | 3e6360970931822bf8c9a78f2c0328808c93d70d | d9afb63ef7eb9df19bbb690e7d91185743b2d4ca | refs/heads/main | 2023-08-04T09:36:48.524380 | 2021-09-21T08:05:56 | 2021-09-21T08:05:56 | 408,736,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | # p12) delete an existing flie
import os
filename = input("enter filename to be deleted ")
if os.path.exists(filename):
os.remove(filename)
print(filename, " file deleted ")
else:
print(filename, "does not exist ") | [
"noreply@github.com"
] | noreply@github.com |
27eeeb653c05caa760b8785076bda08a096fb674 | 0eb599c3bbfa6e5b31516913b88cc9db3a1311ce | /AtCoder_unofficial/chokudai_speedrun_001_i.py | 5148ec2cff2560f0cb7e129c29a7606713c0aa9f | [] | no_license | Linus-MK/AtCoder | 5b84dc88c2d2773d0f97ed18265d303290da7879 | a587e89a9e0c2ab4d36b09176bcc95e901e14326 | refs/heads/master | 2022-11-25T05:37:12.148722 | 2022-11-17T16:04:10 | 2022-11-17T16:04:10 | 169,840,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 661 | py | n = int(input())
nums = list(map(int, input().split()))
# 累積和 + α
# 累積和が単調増加であることを利用して二分探索で解くか(NlogN)
# 1からの相異なる数の和がnに達するのは割と早いことを利用して逐次計算で解くか(最悪N√Nだがそれより小さいはず)
# 後者でやってみよう
# 369ms, 余裕を持って間に合う
cumsum = [0] * (n+1)
for i in range(n):
cumsum[i+1] = cumsum[i] + nums[i]
ans = 0
for i in range(n+1):
for j in range(i+1, n+1):
if cumsum[j] - cumsum[i] == n:
ans += 1
elif cumsum[j] - cumsum[i] > n:
break
print(ans)
| [
"13600386+Linus-MK@users.noreply.github.com"
] | 13600386+Linus-MK@users.noreply.github.com |
ac0032fb8c3c73b7de8979c896fcd0df0b3a547f | 263dc86ea58278d6e1db448c245f692049c73199 | /employeedetails/customer/urls.py | 00350cd841b461cd5617ec8e73ffdbac809561d7 | [] | no_license | krishnanunni-pr/MyDjangoProjects | c3a81b193a659c47fd6aec01777d6f689479eb9f | 3d644d2a261243be40f5678e9a61d508a5980143 | refs/heads/master | 2023-08-05T20:10:08.509167 | 2021-09-27T09:21:21 | 2021-09-27T09:21:21 | 394,686,590 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | from django.urls import path
from customer import views
urlpatterns=[
path("accounts/signup",views.signup,name="signup"),
path("accounts/signin",views.signin,name="signin"),
path("accounts/signout",views.signout,name="signout"),
path("",views.home,name="home")
] | [
"krishna@gmail.com"
] | krishna@gmail.com |
f63b4dd68f760c9f304342b9e16b4f91fa19bd8f | 6a95112805b64322953429270a305d01fef3faea | /dist/weewx-3.6.2/bin/weewx/drivers/ws23xx.py | 2b538e1eaf92814e477ceee58c4beccac5760015 | [
"GPL-1.0-or-later",
"GPL-3.0-only",
"Apache-2.0"
] | permissive | tomdotorg/docker-weewx | c6d59dc492a9e53f3bc898f7b9f593717092d72c | 7085654f455d39b06acc688738fde27e1f78ad1e | refs/heads/main | 2023-06-08T17:57:44.184399 | 2023-01-30T11:21:23 | 2023-01-30T11:21:23 | 54,113,384 | 21 | 16 | Apache-2.0 | 2022-10-19T23:46:26 | 2016-03-17T11:39:29 | Dockerfile | UTF-8 | Python | false | false | 79,394 | py | #!usr/bin/env python
#
# Copyright 2013 Matthew Wall
# See the file LICENSE.txt for your full rights.
#
# Thanks to Kenneth Lavrsen for the Open2300 implementation:
# http://www.lavrsen.dk/foswiki/bin/view/Open2300/WebHome
# description of the station communication interface:
# http://www.lavrsen.dk/foswiki/bin/view/Open2300/OpenWSAPI
# memory map:
# http://www.lavrsen.dk/foswiki/bin/view/Open2300/OpenWSMemoryMap
#
# Thanks to Russell Stuart for the ws2300 python implementation:
# http://ace-host.stuart.id.au/russell/files/ws2300/
# and the map of the station memory:
# http://ace-host.stuart.id.au/russell/files/ws2300/memory_map_2300.txt
#
# This immplementation copies directly from Russell Stuart's implementation,
# but only the parts required to read from and write to the weather station.
"""Classes and functions for interfacing with WS-23xx weather stations.
LaCrosse made a number of stations in the 23xx series, including:
WS-2300, WS-2308, WS-2310, WS-2315, WS-2317, WS-2357
The stations were also sold as the TFA Matrix and TechnoLine 2350.
The WWVB receiver is located in the console.
To synchronize the console and sensors, press and hold the PLUS key for 2
seconds. When console is not synchronized no data will be received.
To do a factory reset, press and hold PRESSURE and WIND for 5 seconds.
A single bucket tip is 0.0204 in (0.518 mm).
The station has 175 history records. That is just over 7 days of data with
the default history recording interval of 60 minutes.
The station supports both wireless and wired communication between the
sensors and a station console. Wired connection updates data every 8 seconds.
Wireless connection updates data in 16 to 128 second intervals, depending on
wind speed and rain activity.
The connection type can be one of 0=cable, 3=lost, 15=wireless
sensor update frequency:
32 seconds when wind speed > 22.36 mph (wireless)
128 seconds when wind speed < 22.36 mph (wireless)
10 minutes (wireless after 5 failed attempts)
8 seconds (wired)
console update frequency:
15 seconds (pressure/temperature)
20 seconds (humidity)
It is possible to increase the rate of wireless updates:
http://www.wxforum.net/index.php?topic=2196.0
Sensors are connected by unshielded phone cables. RF interference can cause
random spikes in data, with one symptom being values of 25.5 m/s or 91.8 km/h
for the wind speed. Unfortunately those values are within the sensor limits
of 0-113 mph (50.52 m/s or 181.9 km/h). To reduce the number of spikes in
data, replace with shielded cables:
http://www.lavrsen.dk/sources/weather/windmod.htm
The station records wind speed and direction, but has no notion of gust.
The station calculates windchill and dewpoint.
The station has a serial connection to the computer.
This driver does not keep the serial port open for long periods. Instead, the
driver opens the serial port, reads data, then closes the port.
This driver polls the station. Use the polling_interval parameter to specify
how often to poll for data. If not specified, the polling interval will adapt
based on connection type and status.
USB-Serial Converters
With a USB-serial converter one can connect the station to a computer with
only USB ports, but not every converter will work properly. Perhaps the two
most common converters are based on the Prolific and FTDI chipsets. Many
people report better luck with the FTDI-based converters. Some converters
that use the Prolific chipset (PL2303) will work, but not all of them.
Known to work: ATEN UC-232A
Bounds checking
wind speed: 0-113 mph
wind direction: 0-360
humidity: 0-100
temperature: ok if not -22F and humidity is valid
dewpoint: ok if not -22F and humidity is valid
barometer: 25-35 inHg
rain rate: 0-10 in/hr
Discrepancies Between Implementations
As of December 2013, there are significant differences between the open2300,
wview, and ws2300 implementations. Current version numbers are as follows:
open2300 1.11
ws2300 1.8
wview 5.20.2
History Interval
The factory default is 60 minutes. The value stored in the console is one
less than the actual value (in minutes). So for the factory default of 60,
the console stores 59. The minimum interval is 1.
ws2300.py reports the actual value from the console, e.g., 59 when the
interval is 60. open2300 reports the interval, e.g., 60 when the interval
is 60. wview ignores the interval.
Detecting Bogus Sensor Values
wview queries the station 3 times for each sensor then accepts the value only
if the three values were close to each other.
open2300 sleeps 10 seconds if a wind measurement indicates invalid or overflow.
The ws2300.py implementation includes overflow and validity flags for values
from the wind sensors. It does not retry based on invalid or overflow.
Wind Speed
There is disagreement about how to calculate wind speed and how to determine
whether the wind speed is valid.
This driver introduces a WindConversion object that uses open2300/wview
decoding so that wind speeds match that of open2300/wview. ws2300 1.8
incorrectly uses bcd2num instead of bin2num. This bug is fixed in this driver.
The memory map indicates the following:
addr smpl description
0x527 0 Wind overflow flag: 0 = normal
0x528 0 Wind minimum code: 0=min, 1=--.-, 2=OFL
0x529 0 Windspeed: binary nibble 0 [m/s * 10]
0x52A 0 Windspeed: binary nibble 1 [m/s * 10]
0x52B 0 Windspeed: binary nibble 2 [m/s * 10]
0x52C 8 Wind Direction = nibble * 22.5 degrees
0x52D 8 Wind Direction 1 measurement ago
0x52E 9 Wind Direction 2 measurement ago
0x52F 8 Wind Direction 3 measurement ago
0x530 7 Wind Direction 4 measurement ago
0x531 7 Wind Direction 5 measurement ago
0x532 0
wview 5.20.2 implementation (wview apparently copied from open2300):
read 3 bytes starting at 0x527
0x527 x[0]
0x528 x[1]
0x529 x[2]
if ((x[0] != 0x00) ||
((x[1] == 0xff) && (((x[2] & 0xf) == 0) || ((x[2] & 0xf) == 1)))) {
fail
} else {
dir = (x[2] >> 4) * 22.5
speed = ((((x[2] & 0xf) << 8) + (x[1])) / 10.0 * 2.23693629)
maxdir = dir
maxspeed = speed
}
open2300 1.10 implementation:
read 6 bytes starting at 0x527
0x527 x[0]
0x528 x[1]
0x529 x[2]
0x52a x[3]
0x52b x[4]
0x52c x[5]
if ((x[0] != 0x00) ||
((x[1] == 0xff) && (((x[2] & 0xf) == 0) || ((x[2] & 0xf) == 1)))) {
sleep 10
} else {
dir = x[2] >> 4
speed = ((((x[2] & 0xf) << 8) + (x[1])) / 10.0)
dir0 = (x[2] >> 4) * 22.5
dir1 = (x[3] & 0xf) * 22.5
dir2 = (x[3] >> 4) * 22.5
dir3 = (x[4] & 0xf) * 22.5
dir4 = (x[4] >> 4) * 22.5
dir5 = (x[5] & 0xf) * 22.5
}
ws2300.py 1.8 implementation:
read 1 nibble starting at 0x527
read 1 nibble starting at 0x528
read 4 nibble starting at 0x529
read 3 nibble starting at 0x529
read 1 nibble starting at 0x52c
read 1 nibble starting at 0x52d
read 1 nibble starting at 0x52e
read 1 nibble starting at 0x52f
read 1 nibble starting at 0x530
read 1 nibble starting at 0x531
0x527 overflow
0x528 validity
0x529 speed[0]
0x52a speed[1]
0x52b speed[2]
0x52c dir[0]
speed: ((x[2] * 100 + x[1] * 10 + x[0]) % 1000) / 10
velocity: (x[2] * 100 + x[1] * 10 + x[0]) / 10
dir = data[0] * 22.5
speed = (bcd2num(data) % 10**3 + 0) / 10**1
velocity = (bcd2num(data[:3])/10.0, bin2num(data[3:4]) * 22.5)
bcd2num([a,b,c]) -> c*100+b*10+a
"""
# TODO: use pyserial instead of LinuxSerialPort
# TODO: put the __enter__ and __exit__ scaffolding on serial port, not Station
# FIXME: unless we can get setTime to work, just ignore the console clock
# FIXME: detect bogus wind speed/direction
# i see these when the wind instrument is disconnected:
# ws 26.399999
# wsh 21
# w0 135
from __future__ import with_statement
import syslog
import time
import string
import fcntl
import os
import select
import struct
import termios
import tty
import weeutil.weeutil
import weewx.drivers
import weewx.wxformulas
DRIVER_NAME = 'WS23xx'
DRIVER_VERSION = '0.25'
def loader(config_dict, _):
return WS23xxDriver(config_dict=config_dict, **config_dict[DRIVER_NAME])
def configurator_loader(_):
return WS23xxConfigurator()
def confeditor_loader():
return WS23xxConfEditor()
DEFAULT_PORT = '/dev/ttyUSB0'
def logmsg(dst, msg):
syslog.syslog(dst, 'ws23xx: %s' % msg)
def logdbg(msg):
logmsg(syslog.LOG_DEBUG, msg)
def loginf(msg):
logmsg(syslog.LOG_INFO, msg)
def logcrt(msg):
logmsg(syslog.LOG_CRIT, msg)
def logerr(msg):
logmsg(syslog.LOG_ERR, msg)
class WS23xxConfigurator(weewx.drivers.AbstractConfigurator):
def add_options(self, parser):
super(WS23xxConfigurator, self).add_options(parser)
parser.add_option("--info", dest="info", action="store_true",
help="display weather station configuration")
parser.add_option("--current", dest="current", action="store_true",
help="get the current weather conditions")
parser.add_option("--history", dest="nrecords", type=int, metavar="N",
help="display N history records")
parser.add_option("--history-since", dest="recmin",
type=int, metavar="N",
help="display history records since N minutes ago")
parser.add_option("--clear-memory", dest="clear", action="store_true",
help="clear station memory")
parser.add_option("--set-time", dest="settime", action="store_true",
help="set the station clock to the current time")
parser.add_option("--set-interval", dest="interval",
type=int, metavar="N",
help="set the station archive interval to N minutes")
def do_options(self, options, parser, config_dict, prompt):
self.station = WS23xxDriver(**config_dict[DRIVER_NAME])
if options.current:
self.show_current()
elif options.nrecords is not None:
self.show_history(count=options.nrecords)
elif options.recmin is not None:
ts = int(time.time()) - options.recmin * 60
self.show_history(ts=ts)
elif options.settime:
self.set_clock(prompt)
elif options.interval is not None:
self.set_interval(options.interval, prompt)
elif options.clear:
self.clear_history(prompt)
else:
self.show_info()
self.station.closePort()
def show_info(self):
"""Query the station then display the settings."""
print 'Querying the station for the configuration...'
config = self.station.getConfig()
for key in sorted(config):
print '%s: %s' % (key, config[key])
def show_current(self):
"""Get current weather observation."""
print 'Querying the station for current weather data...'
for packet in self.station.genLoopPackets():
print packet
break
def show_history(self, ts=None, count=0):
"""Show the indicated number of records or records since timestamp"""
print "Querying the station for historical records..."
for i, r in enumerate(self.station.genStartupRecords(since_ts=ts,
count=count)):
print r
if count and i > count:
break
def set_clock(self, prompt):
"""Set station clock to current time."""
ans = None
while ans not in ['y', 'n']:
v = self.station.getTime()
vstr = weeutil.weeutil.timestamp_to_string(v)
print "Station clock is", vstr
if prompt:
ans = raw_input("Set station clock (y/n)? ")
else:
print "Setting station clock"
ans = 'y'
if ans == 'y':
self.station.setTime()
v = self.station.getTime()
vstr = weeutil.weeutil.timestamp_to_string(v)
print "Station clock is now", vstr
elif ans == 'n':
print "Set clock cancelled."
def set_interval(self, interval, prompt):
print "Changing the interval will clear the station memory."
v = self.station.getArchiveInterval()
ans = None
while ans not in ['y', 'n']:
print "Interval is", v
if prompt:
ans = raw_input("Set interval to %d minutes (y/n)? " % interval)
else:
print "Setting interval to %d minutes" % interval
ans = 'y'
if ans == 'y':
self.station.setArchiveInterval(interval)
v = self.station.getArchiveInterval()
print "Interval is now", v
elif ans == 'n':
print "Set interval cancelled."
def clear_history(self, prompt):
ans = None
while ans not in ['y', 'n']:
v = self.station.getRecordCount()
print "Records in memory:", v
if prompt:
ans = raw_input("Clear console memory (y/n)? ")
else:
print 'Clearing console memory'
ans = 'y'
if ans == 'y':
self.station.clearHistory()
v = self.station.getRecordCount()
print "Records in memory:", v
elif ans == 'n':
print "Clear memory cancelled."
class WS23xxDriver(weewx.drivers.AbstractDevice):
"""Driver for LaCrosse WS23xx stations."""
def __init__(self, **stn_dict):
"""Initialize the station object.
port: The serial port, e.g., /dev/ttyS0 or /dev/ttyUSB0
[Required. Default is /dev/ttyS0]
polling_interval: How often to poll the station, in seconds.
[Optional. Default is 8 (wired) or 30 (wireless)]
model: Which station model is this?
[Optional. Default is 'LaCrosse WS23xx']
"""
self._last_rain = None
self._last_cn = None
self._poll_wait = 60
self.model = stn_dict.get('model', 'LaCrosse WS23xx')
self.port = stn_dict.get('port', DEFAULT_PORT)
self.max_tries = int(stn_dict.get('max_tries', 5))
self.retry_wait = int(stn_dict.get('retry_wait', 30))
self.polling_interval = stn_dict.get('polling_interval', None)
if self.polling_interval is not None:
self.polling_interval = int(self.polling_interval)
self.enable_startup_records = stn_dict.get('enable_startup_records',
True)
self.enable_archive_records = stn_dict.get('enable_archive_records',
True)
self.mode = stn_dict.get('mode', 'single_open')
loginf('driver version is %s' % DRIVER_VERSION)
loginf('serial port is %s' % self.port)
loginf('polling interval is %s' % self.polling_interval)
if self.mode == 'single_open':
self.station = WS23xx(self.port)
else:
self.station = None
def closePort(self):
if self.station is not None:
self.station.close()
self.station = None
@property
def hardware_name(self):
return self.model
# weewx wants the archive interval in seconds, but the console uses minutes
@property
def archive_interval(self):
if not self.enable_startup_records and not self.enable_archive_records:
raise NotImplementedError
return self.getArchiveInterval() * 60
def genLoopPackets(self):
ntries = 0
while ntries < self.max_tries:
ntries += 1
try:
if self.station:
data = self.station.get_raw_data(SENSOR_IDS)
else:
with WS23xx(self.port) as s:
data = s.get_raw_data(SENSOR_IDS)
packet = data_to_packet(data, int(time.time() + 0.5),
last_rain=self._last_rain)
self._last_rain = packet['rainTotal']
ntries = 0
yield packet
if self.polling_interval is not None:
self._poll_wait = self.polling_interval
if data['cn'] != self._last_cn:
conn_info = get_conn_info(data['cn'])
loginf("connection changed from %s to %s" %
(get_conn_info(self._last_cn)[0], conn_info[0]))
self._last_cn = data['cn']
if self.polling_interval is None:
loginf("using %s second polling interval"
" for %s connection" %
(conn_info[1], conn_info[0]))
self._poll_wait = conn_info[1]
time.sleep(self._poll_wait)
except Ws2300.Ws2300Exception, e:
logerr("Failed attempt %d of %d to get LOOP data: %s" %
(ntries, self.max_tries, e))
logdbg("Waiting %d seconds before retry" % self.retry_wait)
time.sleep(self.retry_wait)
else:
msg = "Max retries (%d) exceeded for LOOP data" % self.max_tries
logerr(msg)
raise weewx.RetriesExceeded(msg)
def genStartupRecords(self, since_ts):
if not self.enable_startup_records:
raise NotImplementedError
if self.station:
return self.genRecords(self.station, since_ts)
else:
with WS23xx(self.port) as s:
return self.genRecords(s, since_ts)
def genArchiveRecords(self, since_ts, count=0):
if not self.enable_archive_records:
raise NotImplementedError
if self.station:
return self.genRecords(self.station, since_ts, count)
else:
with WS23xx(self.port) as s:
return self.genRecords(s, since_ts, count)
def genRecords(self, s, since_ts, count=0):
last_rain = None
for ts, data in s.gen_records(since_ts=since_ts, count=count):
record = data_to_packet(data, ts, last_rain=last_rain)
record['interval'] = data['interval']
last_rain = record['rainTotal']
yield record
# def getTime(self) :
# with WS23xx(self.port) as s:
# return s.get_time()
# def setTime(self):
# with WS23xx(self.port) as s:
# s.set_time()
def getArchiveInterval(self):
if self.station:
return self.station.get_archive_interval()
else:
with WS23xx(self.port) as s:
return s.get_archive_interval()
def setArchiveInterval(self, interval):
if self.station:
self.station.set_archive_interval(interval)
else:
with WS23xx(self.port) as s:
s.set_archive_interval(interval)
def getConfig(self):
fdata = dict()
if self.station:
data = self.station.get_raw_data(Measure.IDS.keys())
else:
with WS23xx(self.port) as s:
data = s.get_raw_data(Measure.IDS.keys())
for key in data:
fdata[Measure.IDS[key].name] = data[key]
return fdata
def getRecordCount(self):
if self.station:
return self.station.get_record_count()
else:
with WS23xx(self.port) as s:
return s.get_record_count()
def clearHistory(self):
if self.station:
self.station.clear_memory()
else:
with WS23xx(self.port) as s:
s.clear_memory()
# ids for current weather conditions and connection type
SENSOR_IDS = ['it','ih','ot','oh','pa','wind','rh','rt','dp','wc','cn']
# polling interval, in seconds, for various connection types
POLLING_INTERVAL = {0: ("cable", 8), 3: ("lost", 60), 15: ("wireless", 30)}
def get_conn_info(conn_type):
return POLLING_INTERVAL.get(conn_type, ("unknown", 60))
def data_to_packet(data, ts, last_rain=None):
"""Convert raw data to format and units required by weewx.
station weewx (metric)
temperature degree C degree C
humidity percent percent
uv index unitless unitless
pressure mbar mbar
wind speed m/s km/h
wind dir degree degree
wind gust None
wind gust dir None
rain mm cm
rain rate cm/h
"""
packet = dict()
packet['usUnits'] = weewx.METRIC
packet['dateTime'] = ts
packet['inTemp'] = data['it']
packet['inHumidity'] = data['ih']
packet['outTemp'] = data['ot']
packet['outHumidity'] = data['oh']
packet['pressure'] = data['pa']
ws, wd, wso, wsv = data['wind']
if wso == 0 and wsv == 0:
packet['windSpeed'] = ws
if packet['windSpeed'] is not None:
packet['windSpeed'] *= 3.6 # weewx wants km/h
packet['windDir'] = wd
else:
loginf('invalid wind reading: speed=%s dir=%s overflow=%s invalid=%s' %
(ws, wd, wso, wsv))
packet['windSpeed'] = None
packet['windDir'] = None
packet['windGust'] = None
packet['windGustDir'] = None
packet['rainTotal'] = data['rt']
if packet['rainTotal'] is not None:
packet['rainTotal'] /= 10 # weewx wants cm
packet['rain'] = weewx.wxformulas.calculate_rain(
packet['rainTotal'], last_rain)
# station provides some derived variables
packet['rainRate'] = data['rh']
if packet['rainRate'] is not None:
packet['rainRate'] /= 10 # weewx wants cm/hr
packet['dewpoint'] = data['dp']
packet['windchill'] = data['wc']
return packet
class WS23xx(object):
"""Wrap the Ws2300 object so we can easily open serial port, read/write,
close serial port without all of the try/except/finally scaffolding."""
def __init__(self, port):
logdbg('create LinuxSerialPort')
self.serial_port = LinuxSerialPort(port)
logdbg('create Ws2300')
self.ws = Ws2300(self.serial_port)
def __enter__(self):
logdbg('station enter')
return self
def __exit__(self, type_, value, traceback):
logdbg('station exit')
self.ws = None
self.close()
def close(self):
logdbg('close LinuxSerialPort')
self.serial_port.close()
self.serial_port = None
def set_time(self, ts):
"""Set station time to indicated unix epoch."""
logdbg('setting station clock to %s' %
weeutil.weeutil.timestamp_to_string(ts))
for m in [Measure.IDS['sd'], Measure.IDS['st']]:
data = m.conv.value2binary(ts)
cmd = m.conv.write(data, None)
self.ws.write_safe(m.address, *cmd[1:])
def get_time(self):
"""Return station time as unix epoch."""
data = self.get_raw_data(['sw'])
ts = int(data['sw'])
logdbg('station clock is %s' % weeutil.weeutil.timestamp_to_string(ts))
return ts
def set_archive_interval(self, interval):
"""Set the archive interval in minutes."""
if int(interval) < 1:
raise ValueError('archive interval must be greater than zero')
logdbg('setting hardware archive interval to %s minutes' % interval)
interval -= 1
for m,v in [(Measure.IDS['hi'],interval), # archive interval in minutes
(Measure.IDS['hc'],1), # time till next sample in minutes
(Measure.IDS['hn'],0)]: # number of valid records
data = m.conv.value2binary(v)
cmd = m.conv.write(data, None)
self.ws.write_safe(m.address, *cmd[1:])
def get_archive_interval(self):
"""Return archive interval in minutes."""
data = self.get_raw_data(['hi'])
x = 1 + int(data['hi'])
logdbg('station archive interval is %s minutes' % x)
return x
def clear_memory(self):
"""Clear station memory."""
logdbg('clearing console memory')
for m,v in [(Measure.IDS['hn'],0)]: # number of valid records
data = m.conv.value2binary(v)
cmd = m.conv.write(data, None)
self.ws.write_safe(m.address, *cmd[1:])
def get_record_count(self):
data = self.get_raw_data(['hn'])
x = int(data['hn'])
logdbg('record count is %s' % x)
return x
def gen_records(self, since_ts=None, count=None, use_computer_clock=True):
"""Get latest count records from the station from oldest to newest. If
count is 0 or None, return all records.
The station has a history interval, and it records when the last
history sample was saved. So as long as the interval does not change
between the first and last records, we are safe to infer timestamps
for each record. This assumes that if the station loses power then
the memory will be cleared.
There is no timestamp associated with each record - we have to guess.
The station tells us the time until the next record and the epoch of
the latest record, based on the station's clock. So we can use that
or use the computer clock to guess the timestamp for each record.
To ensure accurate data, the first record must be read within one
minute of the initial read and the remaining records must be read
within numrec * interval minutes.
"""
logdbg("gen_records: since_ts=%s count=%s clock=%s" %
(since_ts, count, use_computer_clock))
measures = [Measure.IDS['hi'], Measure.IDS['hw'],
Measure.IDS['hc'], Measure.IDS['hn']]
raw_data = read_measurements(self.ws, measures)
interval = 1 + int(measures[0].conv.binary2value(raw_data[0])) # minute
latest_ts = int(measures[1].conv.binary2value(raw_data[1])) # epoch
time_to_next = int(measures[2].conv.binary2value(raw_data[2])) # minute
numrec = int(measures[3].conv.binary2value(raw_data[3]))
now = int(time.time())
cstr = 'station'
if use_computer_clock:
latest_ts = now - (interval - time_to_next) * 60
cstr = 'computer'
logdbg("using %s clock with latest_ts of %s" %
(cstr, weeutil.weeutil.timestamp_to_string(latest_ts)))
if not count:
count = HistoryMeasure.MAX_HISTORY_RECORDS
if since_ts is not None:
count = int((now - since_ts) / (interval * 60))
logdbg("count is %d to satisfy timestamp of %s" %
(count, weeutil.weeutil.timestamp_to_string(since_ts)))
if count == 0:
return
if count > numrec:
count = numrec
if count > HistoryMeasure.MAX_HISTORY_RECORDS:
count = HistoryMeasure.MAX_HISTORY_RECORDS
# station is about to overwrite first record, so skip it
if time_to_next <= 1 and count == HistoryMeasure.MAX_HISTORY_RECORDS:
count -= 1
logdbg("downloading %d records from station" % count)
HistoryMeasure.set_constants(self.ws)
measures = [HistoryMeasure(n) for n in range(count-1, -1, -1)]
raw_data = read_measurements(self.ws, measures)
last_ts = latest_ts - (count-1) * interval * 60
for measure, nybbles in zip(measures, raw_data):
value = measure.conv.binary2value(nybbles)
data_dict = {
'interval': interval,
'it': value.temp_indoor,
'ih': value.humidity_indoor,
'ot': value.temp_outdoor,
'oh': value.humidity_outdoor,
'pa': value.pressure_absolute,
'rt': value.rain,
'wind': (value.wind_speed/10, value.wind_direction, 0, 0),
'rh': None, # no rain rate in history
'dp': None, # no dewpoint in history
'wc': None, # no windchill in history
}
yield last_ts, data_dict
last_ts += interval * 60
def get_raw_data(self, labels):
"""Get raw data from the station, return as dictionary."""
measures = [Measure.IDS[m] for m in labels]
raw_data = read_measurements(self.ws, measures)
data_dict = dict(zip(labels, [m.conv.binary2value(d) for m, d in zip(measures, raw_data)]))
return data_dict
# =============================================================================
# The following code was adapted from ws2300.py by Russell Stuart
# =============================================================================
VERSION = "1.8 2013-08-26"
#
# Debug options.
#
DEBUG_SERIAL = False
#
# A fatal error.
#
class FatalError(StandardError):
source = None
message = None
cause = None
def __init__(self, source, message, cause=None):
self.source = source
self.message = message
self.cause = cause
StandardError.__init__(self, message)
#
# The serial port interface. We can talk to the Ws2300 over anything
# that implements this interface.
#
class SerialPort(object):
#
# Discard all characters waiting to be read.
#
def clear(self): raise NotImplementedError()
#
# Close the serial port.
#
def close(self): raise NotImplementedError()
#
# Wait for all characters to be sent.
#
def flush(self): raise NotImplementedError()
#
# Read a character, waiting for a most timeout seconds. Return the
# character read, or None if the timeout occurred.
#
def read_byte(self, timeout): raise NotImplementedError()
#
# Release the serial port. Closes it until it is used again, when
# it is automatically re-opened. It need not be implemented.
#
def release(self): pass
#
# Write characters to the serial port.
#
def write(self, data): raise NotImplementedError()
#
# A Linux Serial port. Implements the Serial interface on Linux.
#
class LinuxSerialPort(SerialPort):
SERIAL_CSIZE = {
"7": tty.CS7,
"8": tty.CS8, }
SERIAL_PARITIES= {
"e": tty.PARENB,
"n": 0,
"o": tty.PARENB|tty.PARODD, }
SERIAL_SPEEDS = {
"300": tty.B300,
"600": tty.B600,
"1200": tty.B1200,
"2400": tty.B2400,
"4800": tty.B4800,
"9600": tty.B9600,
"19200": tty.B19200,
"38400": tty.B38400,
"57600": tty.B57600,
"115200": tty.B115200, }
SERIAL_SETTINGS = "2400,n,8,1"
device = None # string, the device name.
orig_settings = None # class, the original ports settings.
select_list = None # list, The serial ports
serial_port = None # int, OS handle to device.
settings = None # string, the settings on the command line.
#
# Initialise ourselves.
#
def __init__(self,device,settings=SERIAL_SETTINGS):
self.device = device
self.settings = settings.split(",")
self.settings.extend([None,None,None])
self.settings[0] = self.__class__.SERIAL_SPEEDS.get(self.settings[0], None)
self.settings[1] = self.__class__.SERIAL_PARITIES.get(self.settings[1].lower(), None)
self.settings[2] = self.__class__.SERIAL_CSIZE.get(self.settings[2], None)
if len(self.settings) != 7 or None in self.settings[:3]:
raise FatalError(self.device, 'Bad serial settings "%s".' % settings)
self.settings = self.settings[:4]
#
# Open the port.
#
try:
self.serial_port = os.open(self.device, os.O_RDWR)
except EnvironmentError, e:
raise FatalError(self.device, "can't open tty device - %s." % str(e))
try:
fcntl.flock(self.serial_port, fcntl.LOCK_EX)
self.orig_settings = tty.tcgetattr(self.serial_port)
setup = self.orig_settings[:]
setup[0] = tty.INPCK
setup[1] = 0
setup[2] = tty.CREAD|tty.HUPCL|tty.CLOCAL|reduce(lambda x,y: x|y, self.settings[:3])
setup[3] = 0 # tty.ICANON
setup[4] = self.settings[0]
setup[5] = self.settings[0]
setup[6] = ['\000']*len(setup[6])
setup[6][tty.VMIN] = 1
setup[6][tty.VTIME] = 0
tty.tcflush(self.serial_port, tty.TCIOFLUSH)
#
# Restart IO if stopped using software flow control (^S/^Q). This
# doesn't work on FreeBSD.
#
try:
tty.tcflow(self.serial_port, tty.TCOON|tty.TCION)
except termios.error:
pass
tty.tcsetattr(self.serial_port, tty.TCSAFLUSH, setup)
#
# Set DTR low and RTS high and leave other control lines untouched.
#
arg = struct.pack('I', 0)
arg = fcntl.ioctl(self.serial_port, tty.TIOCMGET, arg)
portstatus = struct.unpack('I', arg)[0]
portstatus = portstatus & ~tty.TIOCM_DTR | tty.TIOCM_RTS
arg = struct.pack('I', portstatus)
fcntl.ioctl(self.serial_port, tty.TIOCMSET, arg)
self.select_list = [self.serial_port]
except Exception:
os.close(self.serial_port)
raise
def close(self):
if self.orig_settings:
tty.tcsetattr(self.serial_port, tty.TCSANOW, self.orig_settings)
os.close(self.serial_port)
def read_byte(self, timeout):
ready = select.select(self.select_list, [], [], timeout)
if not ready[0]:
return None
return os.read(self.serial_port, 1)
#
# Write a string to the port.
#
def write(self, data):
os.write(self.serial_port, data)
#
# Flush the input buffer.
#
def clear(self):
tty.tcflush(self.serial_port, tty.TCIFLUSH)
#
# Flush the output buffer.
#
def flush(self):
tty.tcdrain(self.serial_port)
#
# This class reads and writes bytes to a Ws2300. It is passed something
# that implements the Serial interface. The major routines are:
#
# Ws2300() - Create one of these objects that talks over the serial port.
# read_batch() - Reads data from the device using an scatter/gather interface.
# write_safe() - Writes data to the device.
#
class Ws2300(object):
#
# An exception for us.
#
class Ws2300Exception(weewx.WeeWxIOError):
def __init__(self, *args):
weewx.WeeWxIOError.__init__(self, *args)
#
# Constants we use.
#
MAXBLOCK = 30
MAXRETRIES = 50
MAXWINDRETRIES= 20
WRITENIB = 0x42
SETBIT = 0x12
UNSETBIT = 0x32
WRITEACK = 0x10
SETACK = 0x04
UNSETACK = 0x0C
RESET_MIN = 0x01
RESET_MAX = 0x02
MAX_RESETS = 100
#
# Instance data.
#
log_buffer = None # list, action log
log_mode = None # string, Log mode
long_nest = None # int, Nesting of log actions
serial_port = None # string, SerialPort port to use
#
# Initialise ourselves.
#
def __init__(self, serial_port):
self.log_buffer = []
self.log_nest = 0
self.serial_port = serial_port
#
# Write data to the device.
#
def write_byte(self, data):
if self.log_mode != 'w':
if self.log_mode != 'e':
self.log(' ')
self.log_mode = 'w'
self.log("%02x" % ord(data))
self.serial_port.write(data)
#
# Read a byte from the device.
#
def read_byte(self, timeout=1.0):
if self.log_mode != 'r':
self.log_mode = 'r'
self.log(':')
result = self.serial_port.read_byte(timeout)
if not result:
self.log("--")
else:
self.log("%02x" % ord(result))
return result
#
# Remove all pending incoming characters.
#
def clear_device(self):
if self.log_mode != 'e':
self.log(' ')
self.log_mode = 'c'
self.log("C")
self.serial_port.clear()
#
# Write a reset string and wait for a reply.
#
def reset_06(self):
self.log_enter("re")
try:
for _ in range(self.__class__.MAX_RESETS):
self.clear_device()
self.write_byte('\x06')
#
# Occasionally 0, then 2 is returned. If 0 comes back,
# continue reading as this is more efficient than sending
# an out-of sync reset and letting the data reads restore
# synchronization. Occasionally, multiple 2's are returned.
# Read with a fast timeout until all data is exhausted, if
# we got a 2 back at all, we consider it a success.
#
success = False
answer = self.read_byte()
while answer != None:
if answer == '\x02':
success = True
answer = self.read_byte(0.05)
if success:
return
msg = "Reset failed, %d retries, no response" % self.__class__.MAX_RESETS
raise self.Ws2300Exception(msg)
finally:
self.log_exit()
#
# Encode the address.
#
def write_address(self,address):
for digit in range(4):
byte = chr((address >> (4 * (3-digit)) & 0xF) * 4 + 0x82)
self.write_byte(byte)
ack = chr(digit * 16 + (ord(byte) - 0x82) // 4)
answer = self.read_byte()
if ack != answer:
self.log("??")
return False
return True
#
# Write data, checking the reply.
#
def write_data(self,nybble_address,nybbles,encode_constant=None):
self.log_enter("wd")
try:
if not self.write_address(nybble_address):
return None
if encode_constant == None:
encode_constant = self.WRITENIB
encoded_data = ''.join([
chr(nybbles[i]*4 + encode_constant)
for i in range(len(nybbles))])
ack_constant = {
self.SETBIT: self.SETACK,
self.UNSETBIT: self.UNSETACK,
self.WRITENIB: self.WRITEACK
}[encode_constant]
self.log(",")
for i in range(len(encoded_data)):
self.write_byte(encoded_data[i])
answer = self.read_byte()
if chr(nybbles[i] + ack_constant) != answer:
self.log("??")
return None
return True
finally:
self.log_exit()
#
# Reset the device and write a command, verifing it was written correctly.
#
def write_safe(self,nybble_address,nybbles,encode_constant=None):
self.log_enter("ws")
try:
for _ in range(self.MAXRETRIES):
self.reset_06()
command_data = self.write_data(nybble_address,nybbles,encode_constant)
if command_data != None:
return command_data
raise self.Ws2300Exception("write_safe failed, retries exceeded")
finally:
self.log_exit()
#
# A total kuldge this, but its the easiest way to force the 'computer
# time' to look like a normal ws2300 variable, which it most definitely
# isn't, of course.
#
def read_computer_time(self,nybble_address,nybble_count):
now = time.time()
tm = time.localtime(now)
tu = time.gmtime(now)
year2 = tm[0] % 100
datetime_data = (
tu[5]%10, tu[5]//10, tu[4]%10, tu[4]//10, tu[3]%10, tu[3]//10,
tm[5]%10, tm[5]//10, tm[4]%10, tm[4]//10, tm[3]%10, tm[3]//10,
tm[2]%10, tm[2]//10, tm[1]%10, tm[1]//10, year2%10, year2//10)
address = nybble_address+18
return datetime_data[address:address+nybble_count]
#
# Read 'length' nybbles at address. Returns: (nybble_at_address, ...).
# Can't read more than MAXBLOCK nybbles at a time.
#
def read_data(self,nybble_address,nybble_count):
if nybble_address < 0:
return self.read_computer_time(nybble_address,nybble_count)
self.log_enter("rd")
try:
if nybble_count < 1 or nybble_count > self.MAXBLOCK:
StandardError("Too many nybbles requested")
bytes_ = (nybble_count + 1) // 2
if not self.write_address(nybble_address):
return None
#
# Write the number bytes we want to read.
#
encoded_data = chr(0xC2 + bytes_*4)
self.write_byte(encoded_data)
answer = self.read_byte()
check = chr(0x30 + bytes_)
if answer != check:
self.log("??")
return None
#
# Read the response.
#
self.log(", :")
response = ""
for _ in range(bytes_):
answer = self.read_byte()
if answer == None:
return None
response += answer
#
# Read and verify checksum
#
answer = self.read_byte()
checksum = sum([ord(b) for b in response]) % 256
if chr(checksum) != answer:
self.log("??")
return None
flatten = lambda a,b: a + (ord(b) % 16, ord(b) / 16)
return reduce(flatten, response, ())[:nybble_count]
finally:
self.log_exit()
#
# Read a batch of blocks. Batches is a list of data to be read:
# [(address_of_first_nybble, length_in_nybbles), ...]
# returns:
# [(nybble_at_address, ...), ...]
#
def read_batch(self,batches):
self.log_enter("rb start")
self.log_exit()
try:
if [b for b in batches if b[0] >= 0]:
self.reset_06()
result = []
for batch in batches:
address = batch[0]
data = ()
for start_pos in range(0,batch[1],self.MAXBLOCK):
for _ in range(self.MAXRETRIES):
bytes_ = min(self.MAXBLOCK, batch[1]-start_pos)
response = self.read_data(address + start_pos, bytes_)
if response != None:
break
self.reset_06()
if response == None:
raise self.Ws2300Exception("read failed, retries exceeded")
data += response
result.append(data)
return result
finally:
self.log_enter("rb end")
self.log_exit()
#
# Reset the device, read a block of nybbles at the passed address.
#
def read_safe(self,nybble_address,nybble_count):
self.log_enter("rs")
try:
return self.read_batch([(nybble_address,nybble_count)])[0]
finally:
self.log_exit()
#
# Debug logging of serial IO.
#
def log(self, s):
if not DEBUG_SERIAL:
return
self.log_buffer[-1] = self.log_buffer[-1] + s
def log_enter(self, action):
if not DEBUG_SERIAL:
return
self.log_nest += 1
if self.log_nest == 1:
if len(self.log_buffer) > 1000:
del self.log_buffer[0]
self.log_buffer.append("%5.2f %s " % (time.time() % 100, action))
self.log_mode = 'e'
def log_exit(self):
if not DEBUG_SERIAL:
return
self.log_nest -= 1
#
# Print a data block.
#
def bcd2num(nybbles):
digits = list(nybbles)[:]
digits.reverse()
return reduce(lambda a,b: a*10 + b, digits, 0)
def num2bcd(number, nybble_count):
result = []
for _ in range(nybble_count):
result.append(int(number % 10))
number //= 10
return tuple(result)
def bin2num(nybbles):
digits = list(nybbles)
digits.reverse()
return reduce(lambda a,b: a*16 + b, digits, 0)
def num2bin(number, nybble_count):
result = []
number = int(number)
for _ in range(nybble_count):
result.append(number % 16)
number //= 16
return tuple(result)
#
# A "Conversion" encapsulates a unit of measurement on the Ws2300. Eg
# temperature, or wind speed.
#
class Conversion(object):
description = None # Description of the units.
nybble_count = None # Number of nybbles used on the WS2300
units = None # Units name (eg hPa).
#
# Initialise ourselves.
# units - text description of the units.
# nybble_count- Size of stored value on ws2300 in nybbles
# description - Description of the units
#
def __init__(self, units, nybble_count, description):
self.description = description
self.nybble_count = nybble_count
self.units = units
#
# Convert the nybbles read from the ws2300 to our internal value.
#
def binary2value(self, data): raise NotImplementedError()
#
# Convert our internal value to nybbles that can be written to the ws2300.
#
def value2binary(self, value): raise NotImplementedError()
#
# Print value.
#
def str(self, value): raise NotImplementedError()
#
# Convert the string produced by "str()" back to the value.
#
def parse(self, s): raise NotImplementedError()
#
# Transform data into something that can be written. Returns:
# (new_bytes, ws2300.write_safe_args, ...)
# This only becomes tricky when less than a nybble is written.
#
def write(self, data, nybble):
return (data, data)
#
# Test if the nybbles read from the Ws2300 is sensible. Sometimes a
# communications error will make it past the weak checksums the Ws2300
# uses. This optional function implements another layer of checking -
# does the value returned make sense. Returns True if the value looks
# like garbage.
#
def garbage(self, data):
return False
#
# For values stores as binary numbers.
#
class BinConversion(Conversion):
mult = None
scale = None
units = None
def __init__(self, units, nybble_count, scale, description, mult=1, check=None):
Conversion.__init__(self, units, nybble_count, description)
self.mult = mult
self.scale = scale
self.units = units
def binary2value(self, data):
return (bin2num(data) * self.mult) / 10.0**self.scale
def value2binary(self, value):
return num2bin(int(value * 10**self.scale) // self.mult, self.nybble_count)
def str(self, value):
return "%.*f" % (self.scale, value)
def parse(self, s):
return float(s)
#
# For values stored as BCD numbers.
#
class BcdConversion(Conversion):
offset = None
scale = None
units = None
def __init__(self, units, nybble_count, scale, description, offset=0):
Conversion.__init__(self, units, nybble_count, description)
self.offset = offset
self.scale = scale
self.units = units
def binary2value(self, data):
num = bcd2num(data) % 10**self.nybble_count + self.offset
return float(num) / 10**self.scale
def value2binary(self, value):
return num2bcd(int(value * 10**self.scale) - self.offset, self.nybble_count)
def str(self, value):
return "%.*f" % (self.scale, value)
def parse(self, s):
return float(s)
#
# For pressures. Add a garbage check.
#
class PressureConversion(BcdConversion):
def __init__(self):
BcdConversion.__init__(self, "hPa", 5, 1, "pressure")
def garbage(self, data):
value = self.binary2value(data)
return value < 900 or value > 1200
#
# For values the represent a date.
#
class ConversionDate(Conversion):
format = None
def __init__(self, nybble_count, format_):
description = format_
for xlate in "%Y:yyyy,%m:mm,%d:dd,%H:hh,%M:mm,%S:ss".split(","):
description = description.replace(*xlate.split(":"))
Conversion.__init__(self, "", nybble_count, description)
self.format = format_
def str(self, value):
return time.strftime(self.format, time.localtime(value))
def parse(self, s):
return time.mktime(time.strptime(s, self.format))
class DateConversion(ConversionDate):
def __init__(self):
ConversionDate.__init__(self, 6, "%Y-%m-%d")
def binary2value(self, data):
x = bcd2num(data)
return time.mktime((
x // 10000 % 100,
x // 100 % 100,
x % 100,
0,
0,
0,
0,
0,
0))
def value2binary(self, value):
tm = time.localtime(value)
dt = tm[2] + tm[1] * 100 + (tm[0]-2000) * 10000
return num2bcd(dt, self.nybble_count)
class DatetimeConversion(ConversionDate):
def __init__(self):
ConversionDate.__init__(self, 11, "%Y-%m-%d %H:%M")
def binary2value(self, data):
x = bcd2num(data)
return time.mktime((
x // 1000000000 % 100 + 2000,
x // 10000000 % 100,
x // 100000 % 100,
x // 100 % 100,
x % 100,
0,
0,
0,
0))
def value2binary(self, value):
tm = time.localtime(value)
dow = tm[6] + 1
dt = tm[4]+(tm[3]+(dow+(tm[2]+(tm[1]+(tm[0]-2000)*100)*100)*10)*100)*100
return num2bcd(dt, self.nybble_count)
class UnixtimeConversion(ConversionDate):
def __init__(self):
ConversionDate.__init__(self, 12, "%Y-%m-%d %H:%M:%S")
def binary2value(self, data):
x = bcd2num(data)
return time.mktime((
x //10000000000 % 100 + 2000,
x // 100000000 % 100,
x // 1000000 % 100,
x // 10000 % 100,
x // 100 % 100,
x % 100,
0,
0,
0))
def value2binary(self, value):
tm = time.localtime(value)
dt = tm[5]+(tm[4]+(tm[3]+(tm[2]+(tm[1]+(tm[0]-2000)*100)*100)*100)*100)*100
return num2bcd(dt, self.nybble_count)
class TimestampConversion(ConversionDate):
def __init__(self):
ConversionDate.__init__(self, 10, "%Y-%m-%d %H:%M")
def binary2value(self, data):
x = bcd2num(data)
return time.mktime((
x // 100000000 % 100 + 2000,
x // 1000000 % 100,
x // 10000 % 100,
x // 100 % 100,
x % 100,
0,
0,
0,
0))
def value2binary(self, value):
tm = time.localtime(value)
dt = tm[4] + (tm[3] + (tm[2] + (tm[1] + (tm[0]-2000)*100)*100)*100)*100
return num2bcd(dt, self.nybble_count)
class TimeConversion(ConversionDate):
def __init__(self):
ConversionDate.__init__(self, 6, "%H:%M:%S")
def binary2value(self, data):
x = bcd2num(data)
return time.mktime((
0,
0,
0,
x // 10000 % 100,
x // 100 % 100,
x % 100,
0,
0,
0)) - time.timezone
def value2binary(self, value):
tm = time.localtime(value)
dt = tm[5] + tm[4]*100 + tm[3]*10000
return num2bcd(dt, self.nybble_count)
def parse(self, s):
return time.mktime((0,0,0) + time.strptime(s, self.format)[3:]) + time.timezone
class WindDirectionConversion(Conversion):
def __init__(self):
Conversion.__init__(self, "deg", 1, "North=0 clockwise")
def binary2value(self, data):
return data[0] * 22.5
def value2binary(self, value):
return (int((value + 11.25) / 22.5),)
def str(self, value):
return "%g" % value
def parse(self, s):
return float(s)
class WindVelocityConversion(Conversion):
def __init__(self):
Conversion.__init__(self, "ms,d", 4, "wind speed and direction")
def binary2value(self, data):
return (bin2num(data[:3])/10.0, bin2num(data[3:4]) * 22.5)
def value2binary(self, value):
return num2bin(value[0]*10, 3) + num2bin((value[1] + 11.5) / 22.5, 1)
def str(self, value):
return "%.1f,%g" % value
def parse(self, s):
return tuple([float(x) for x in s.split(",")])
# The ws2300 1.8 implementation does not calculate wind speed correctly -
# it uses bcd2num instead of bin2num. This conversion object uses bin2num
# decoding and it reads all wind data in a single transcation so that we do
# not suffer coherency problems.
class WindConversion(Conversion):
def __init__(self):
Conversion.__init__(self, "ms,d,o,v", 12, "wind speed, dir, validity")
def binary2value(self, data):
overflow = data[0]
validity = data[1]
speed = bin2num(data[2:5]) / 10.0
direction = data[5] * 22.5
return (speed, direction, overflow, validity)
def str(self, value):
return "%.1f,%g,%s,%s" % value
def parse(self, s):
return tuple([float(x) for x in s.split(",")])
#
# For non-numerical values.
#
class TextConversion(Conversion):
constants = None
def __init__(self, constants):
items = constants.items()[:]
items.sort()
fullname = ",".join([c[1]+"="+str(c[0]) for c in items]) + ",unknown-X"
Conversion.__init__(self, "", 1, fullname)
self.constants = constants
def binary2value(self, data):
return data[0]
def value2binary(self, value):
return (value,)
def str(self, value):
result = self.constants.get(value, None)
if result != None:
return result
return "unknown-%d" % value
def parse(self, s):
result = [c[0] for c in self.constants.items() if c[1] == s]
if result:
return result[0]
return None
#
# For values that are represented by one bit.
#
class ConversionBit(Conversion):
bit = None
desc = None
def __init__(self, bit, desc):
self.bit = bit
self.desc = desc
Conversion.__init__(self, "", 1, desc[0] + "=0," + desc[1] + "=1")
def binary2value(self, data):
return data[0] & (1 << self.bit) and 1 or 0
def value2binary(self, value):
return (value << self.bit,)
def str(self, value):
return self.desc[value]
def parse(self, s):
return [c[0] for c in self.desc.items() if c[1] == s][0]
class BitConversion(ConversionBit):
def __init__(self, bit, desc):
ConversionBit.__init__(self, bit, desc)
#
# Since Ws2300.write_safe() only writes nybbles and we have just one bit,
# we have to insert that bit into the data_read so it can be written as
# a nybble.
#
def write(self, data, nybble):
data = (nybble & ~(1 << self.bit) | data[0],)
return (data, data)
class AlarmSetConversion(BitConversion):
bit = None
desc = None
def __init__(self, bit):
BitConversion.__init__(self, bit, {0:"off", 1:"on"})
class AlarmActiveConversion(BitConversion):
bit = None
desc = None
def __init__(self, bit):
BitConversion.__init__(self, bit, {0:"inactive", 1:"active"})
#
# For values that are represented by one bit, and must be written as
# a single bit.
#
class SetresetConversion(ConversionBit):
bit = None
def __init__(self, bit, desc):
ConversionBit.__init__(self, bit, desc)
#
# Setreset bits use a special write mode.
#
def write(self, data, nybble):
if data[0] == 0:
operation = Ws2300.UNSETBIT
else:
operation = Ws2300.SETBIT
return ((nybble & ~(1 << self.bit) | data[0],), [self.bit], operation)
#
# Conversion for history. This kludge makes history fit into the framework
# used for all the other measures.
#
class HistoryConversion(Conversion):
class HistoryRecord(object):
temp_indoor = None
temp_outdoor = None
pressure_absolute = None
humidity_indoor = None
humidity_outdoor = None
rain = None
wind_speed = None
wind_direction = None
def __str__(self):
return "%4.1fc %2d%% %4.1fc %2d%% %6.1fhPa %6.1fmm %2dm/s %5g" % (
self.temp_indoor, self.humidity_indoor,
self.temp_outdoor, self.humidity_outdoor,
self.pressure_absolute, self.rain,
self.wind_speed, self.wind_direction)
def parse(cls, s):
rec = cls()
toks = [tok.rstrip(string.ascii_letters + "%/") for tok in s.split()]
rec.temp_indoor = float(toks[0])
rec.humidity_indoor = int(toks[1])
rec.temp_outdoor = float(toks[2])
rec.humidity_outdoor = int(toks[3])
rec.pressure_absolute = float(toks[4])
rec.rain = float(toks[5])
rec.wind_speed = int(toks[6])
rec.wind_direction = int((float(toks[7]) + 11.25) / 22.5) % 16
return rec
parse = classmethod(parse)
def __init__(self):
Conversion.__init__(self, "", 19, "history")
def binary2value(self, data):
value = self.__class__.HistoryRecord()
n = bin2num(data[0:5])
value.temp_indoor = (n % 1000) / 10.0 - 30
value.temp_outdoor = (n - (n % 1000)) / 10000.0 - 30
n = bin2num(data[5:10])
value.pressure_absolute = (n % 10000) / 10.0
if value.pressure_absolute < 500:
value.pressure_absolute += 1000
value.humidity_indoor = (n - (n % 10000)) / 10000.0
value.humidity_outdoor = bcd2num(data[10:12])
value.rain = bin2num(data[12:15]) * 0.518
value.wind_speed = bin2num(data[15:18])
value.wind_direction = bin2num(data[18:19]) * 22.5
return value
def value2binary(self, value):
result = ()
n = int((value.temp_indoor + 30) * 10.0 + (value.temp_outdoor + 30) * 10000.0 + 0.5)
result = result + num2bin(n, 5)
n = value.pressure_absolute % 1000
n = int(n * 10.0 + value.humidity_indoor * 10000.0 + 0.5)
result = result + num2bin(n, 5)
result = result + num2bcd(value.humidity_outdoor, 2)
result = result + num2bin(int((value.rain + 0.518/2) / 0.518), 3)
result = result + num2bin(value.wind_speed, 3)
result = result + num2bin(value.wind_direction, 1)
return result
#
# Print value.
#
def str(self, value):
return str(value)
#
# Convert the string produced by "str()" back to the value.
#
def parse(self, s):
return self.__class__.HistoryRecord.parse(s)
#
# Various conversions we know about.
#
conv_ala0 = AlarmActiveConversion(0)
conv_ala1 = AlarmActiveConversion(1)
conv_ala2 = AlarmActiveConversion(2)
conv_ala3 = AlarmActiveConversion(3)
conv_als0 = AlarmSetConversion(0)
conv_als1 = AlarmSetConversion(1)
conv_als2 = AlarmSetConversion(2)
conv_als3 = AlarmSetConversion(3)
conv_buzz = SetresetConversion(3, {0:'on', 1:'off'})
conv_lbck = SetresetConversion(0, {0:'off', 1:'on'})
conv_date = DateConversion()
conv_dtme = DatetimeConversion()
conv_utme = UnixtimeConversion()
conv_hist = HistoryConversion()
conv_stmp = TimestampConversion()
conv_time = TimeConversion()
conv_wdir = WindDirectionConversion()
conv_wvel = WindVelocityConversion()
conv_conn = TextConversion({0:"cable", 3:"lost", 15:"wireless"})
conv_fore = TextConversion({0:"rainy", 1:"cloudy", 2:"sunny"})
conv_spdu = TextConversion({0:"m/s", 1:"knots", 2:"beaufort", 3:"km/h", 4:"mph"})
conv_tend = TextConversion({0:"steady", 1:"rising", 2:"falling"})
conv_wovr = TextConversion({0:"no", 1:"overflow"})
conv_wvld = TextConversion({0:"ok", 1:"invalid", 2:"overflow"})
conv_lcon = BinConversion("", 1, 0, "contrast")
conv_rec2 = BinConversion("", 2, 0, "record number")
conv_humi = BcdConversion("%", 2, 0, "humidity")
conv_pres = PressureConversion()
conv_rain = BcdConversion("mm", 6, 2, "rain")
conv_temp = BcdConversion("C", 4, 2, "temperature", -3000)
conv_per2 = BinConversion("s", 2, 1, "time interval", 5)
conv_per3 = BinConversion("min", 3, 0, "time interval")
conv_wspd = BinConversion("m/s", 3, 1, "speed")
conv_wind = WindConversion()
#
# Define a measurement on the Ws2300. This encapsulates:
# - The names (abbrev and long) of the thing being measured, eg wind speed.
# - The location it can be found at in the Ws2300's memory map.
# - The Conversion used to represent the figure.
#
class Measure(object):
IDS = {} # map, Measures defined. {id: Measure, ...}
NAMES = {} # map, Measures defined. {name: Measure, ...}
address = None # int, Nybble address in the Ws2300
conv = None # object, Type of value
id = None # string, Short name
name = None # string, Long name
reset = None # string, Id of measure used to reset this one
def __init__(self, address, id_, conv, name, reset=None):
self.address = address
self.conv = conv
self.reset = reset
if id_ != None:
self.id = id_
assert not id_ in self.__class__.IDS
self.__class__.IDS[id_] = self
if name != None:
self.name = name
assert not name in self.__class__.NAMES
self.__class__.NAMES[name] = self
def __hash__(self):
return hash(self.id)
def __cmp__(self, other):
if isinstance(other, Measure):
return cmp(self.id, other.id)
return cmp(type(self), type(other))
#
# Conversion for raw Hex data. These are created as needed.
#
class HexConversion(Conversion):
def __init__(self, nybble_count):
Conversion.__init__(self, "", nybble_count, "hex data")
def binary2value(self, data):
return data
def value2binary(self, value):
return value
def str(self, value):
return ",".join(["%x" % nybble for nybble in value])
def parse(self, s):
toks = s.replace(","," ").split()
for i in range(len(toks)):
s = list(toks[i])
s.reverse()
toks[i] = ''.join(s)
list_str = list(''.join(toks))
self.nybble_count = len(list_str)
return tuple([int(nybble) for nybble in list_str])
#
# The raw nybble measure.
#
class HexMeasure(Measure):
def __init__(self, address, id_, conv, name):
self.address = address
self.name = name
self.conv = conv
#
# A History record. Again a kludge to make history fit into the framework
# developed for the other measurements. History records are identified
# by their record number. Record number 0 is the most recently written
# record, record number 1 is the next most recently written and so on.
#
class HistoryMeasure(Measure):
HISTORY_BUFFER_ADDR = 0x6c6 # int, Address of the first history record
MAX_HISTORY_RECORDS = 0xaf # string, Max number of history records stored
LAST_POINTER = None # int, Pointer to last record
RECORD_COUNT = None # int, Number of records in use
recno = None # int, The record number this represents
conv = conv_hist
def __init__(self, recno):
self.recno = recno
def set_constants(cls, ws2300):
measures = [Measure.IDS["hp"], Measure.IDS["hn"]]
data = read_measurements(ws2300, measures)
cls.LAST_POINTER = int(measures[0].conv.binary2value(data[0]))
cls.RECORD_COUNT = int(measures[1].conv.binary2value(data[1]))
set_constants = classmethod(set_constants)
def id(self):
return "h%03d" % self.recno
id = property(id)
def name(self):
return "history record %d" % self.recno
name = property(name)
def offset(self):
if self.LAST_POINTER is None:
raise StandardError("HistoryMeasure.set_constants hasn't been called")
return (self.LAST_POINTER - self.recno) % self.MAX_HISTORY_RECORDS
offset = property(offset)
def address(self):
return self.HISTORY_BUFFER_ADDR + self.conv.nybble_count * self.offset
address = property(address)
#
# The measurements we know about. This is all of them documented in
# memory_map_2300.txt, bar the history. History is handled specially.
# And of course, the "c?"'s aren't real measures at all - its the current
# time on this machine.
#
Measure( -18, "ct", conv_time, "this computer's time")
Measure( -12, "cw", conv_utme, "this computer's date time")
Measure( -6, "cd", conv_date, "this computer's date")
Measure(0x006, "bz", conv_buzz, "buzzer")
Measure(0x00f, "wsu", conv_spdu, "wind speed units")
Measure(0x016, "lb", conv_lbck, "lcd backlight")
Measure(0x019, "sss", conv_als2, "storm warn alarm set")
Measure(0x019, "sts", conv_als0, "station time alarm set")
Measure(0x01a, "phs", conv_als3, "pressure max alarm set")
Measure(0x01a, "pls", conv_als2, "pressure min alarm set")
Measure(0x01b, "oths", conv_als3, "out temp max alarm set")
Measure(0x01b, "otls", conv_als2, "out temp min alarm set")
Measure(0x01b, "iths", conv_als1, "in temp max alarm set")
Measure(0x01b, "itls", conv_als0, "in temp min alarm set")
Measure(0x01c, "dphs", conv_als3, "dew point max alarm set")
Measure(0x01c, "dpls", conv_als2, "dew point min alarm set")
Measure(0x01c, "wchs", conv_als1, "wind chill max alarm set")
Measure(0x01c, "wcls", conv_als0, "wind chill min alarm set")
Measure(0x01d, "ihhs", conv_als3, "in humidity max alarm set")
Measure(0x01d, "ihls", conv_als2, "in humidity min alarm set")
Measure(0x01d, "ohhs", conv_als1, "out humidity max alarm set")
Measure(0x01d, "ohls", conv_als0, "out humidity min alarm set")
Measure(0x01e, "rhhs", conv_als1, "rain 1h alarm set")
Measure(0x01e, "rdhs", conv_als0, "rain 24h alarm set")
Measure(0x01f, "wds", conv_als2, "wind direction alarm set")
Measure(0x01f, "wshs", conv_als1, "wind speed max alarm set")
Measure(0x01f, "wsls", conv_als0, "wind speed min alarm set")
Measure(0x020, "siv", conv_ala2, "icon alarm active")
Measure(0x020, "stv", conv_ala0, "station time alarm active")
Measure(0x021, "phv", conv_ala3, "pressure max alarm active")
Measure(0x021, "plv", conv_ala2, "pressure min alarm active")
Measure(0x022, "othv", conv_ala3, "out temp max alarm active")
Measure(0x022, "otlv", conv_ala2, "out temp min alarm active")
Measure(0x022, "ithv", conv_ala1, "in temp max alarm active")
Measure(0x022, "itlv", conv_ala0, "in temp min alarm active")
Measure(0x023, "dphv", conv_ala3, "dew point max alarm active")
Measure(0x023, "dplv", conv_ala2, "dew point min alarm active")
Measure(0x023, "wchv", conv_ala1, "wind chill max alarm active")
Measure(0x023, "wclv", conv_ala0, "wind chill min alarm active")
Measure(0x024, "ihhv", conv_ala3, "in humidity max alarm active")
Measure(0x024, "ihlv", conv_ala2, "in humidity min alarm active")
Measure(0x024, "ohhv", conv_ala1, "out humidity max alarm active")
Measure(0x024, "ohlv", conv_ala0, "out humidity min alarm active")
Measure(0x025, "rhhv", conv_ala1, "rain 1h alarm active")
Measure(0x025, "rdhv", conv_ala0, "rain 24h alarm active")
Measure(0x026, "wdv", conv_ala2, "wind direction alarm active")
Measure(0x026, "wshv", conv_ala1, "wind speed max alarm active")
Measure(0x026, "wslv", conv_ala0, "wind speed min alarm active")
Measure(0x027, None, conv_ala3, "pressure max alarm active alias")
Measure(0x027, None, conv_ala2, "pressure min alarm active alias")
Measure(0x028, None, conv_ala3, "out temp max alarm active alias")
Measure(0x028, None, conv_ala2, "out temp min alarm active alias")
Measure(0x028, None, conv_ala1, "in temp max alarm active alias")
Measure(0x028, None, conv_ala0, "in temp min alarm active alias")
Measure(0x029, None, conv_ala3, "dew point max alarm active alias")
Measure(0x029, None, conv_ala2, "dew point min alarm active alias")
Measure(0x029, None, conv_ala1, "wind chill max alarm active alias")
Measure(0x029, None, conv_ala0, "wind chill min alarm active alias")
Measure(0x02a, None, conv_ala3, "in humidity max alarm active alias")
Measure(0x02a, None, conv_ala2, "in humidity min alarm active alias")
Measure(0x02a, None, conv_ala1, "out humidity max alarm active alias")
Measure(0x02a, None, conv_ala0, "out humidity min alarm active alias")
Measure(0x02b, None, conv_ala1, "rain 1h alarm active alias")
Measure(0x02b, None, conv_ala0, "rain 24h alarm active alias")
Measure(0x02c, None, conv_ala2, "wind direction alarm active alias")
Measure(0x02c, None, conv_ala2, "wind speed max alarm active alias")
Measure(0x02c, None, conv_ala2, "wind speed min alarm active alias")
Measure(0x200, "st", conv_time, "station set time", reset="ct")
Measure(0x23b, "sw", conv_dtme, "station current date time")
Measure(0x24d, "sd", conv_date, "station set date", reset="cd")
Measure(0x266, "lc", conv_lcon, "lcd contrast (ro)")
Measure(0x26b, "for", conv_fore, "forecast")
Measure(0x26c, "ten", conv_tend, "tendency")
Measure(0x346, "it", conv_temp, "in temp")
Measure(0x34b, "itl", conv_temp, "in temp min", reset="it")
Measure(0x350, "ith", conv_temp, "in temp max", reset="it")
Measure(0x354, "itlw", conv_stmp, "in temp min when", reset="sw")
Measure(0x35e, "ithw", conv_stmp, "in temp max when", reset="sw")
Measure(0x369, "itla", conv_temp, "in temp min alarm")
Measure(0x36e, "itha", conv_temp, "in temp max alarm")
Measure(0x373, "ot", conv_temp, "out temp")
Measure(0x378, "otl", conv_temp, "out temp min", reset="ot")
Measure(0x37d, "oth", conv_temp, "out temp max", reset="ot")
Measure(0x381, "otlw", conv_stmp, "out temp min when", reset="sw")
Measure(0x38b, "othw", conv_stmp, "out temp max when", reset="sw")
Measure(0x396, "otla", conv_temp, "out temp min alarm")
Measure(0x39b, "otha", conv_temp, "out temp max alarm")
Measure(0x3a0, "wc", conv_temp, "wind chill")
Measure(0x3a5, "wcl", conv_temp, "wind chill min", reset="wc")
Measure(0x3aa, "wch", conv_temp, "wind chill max", reset="wc")
Measure(0x3ae, "wclw", conv_stmp, "wind chill min when", reset="sw")
Measure(0x3b8, "wchw", conv_stmp, "wind chill max when", reset="sw")
Measure(0x3c3, "wcla", conv_temp, "wind chill min alarm")
Measure(0x3c8, "wcha", conv_temp, "wind chill max alarm")
Measure(0x3ce, "dp", conv_temp, "dew point")
Measure(0x3d3, "dpl", conv_temp, "dew point min", reset="dp")
Measure(0x3d8, "dph", conv_temp, "dew point max", reset="dp")
Measure(0x3dc, "dplw", conv_stmp, "dew point min when", reset="sw")
Measure(0x3e6, "dphw", conv_stmp, "dew point max when", reset="sw")
Measure(0x3f1, "dpla", conv_temp, "dew point min alarm")
Measure(0x3f6, "dpha", conv_temp, "dew point max alarm")
Measure(0x3fb, "ih", conv_humi, "in humidity")
Measure(0x3fd, "ihl", conv_humi, "in humidity min", reset="ih")
Measure(0x3ff, "ihh", conv_humi, "in humidity max", reset="ih")
Measure(0x401, "ihlw", conv_stmp, "in humidity min when", reset="sw")
Measure(0x40b, "ihhw", conv_stmp, "in humidity max when", reset="sw")
Measure(0x415, "ihla", conv_humi, "in humidity min alarm")
Measure(0x417, "ihha", conv_humi, "in humidity max alarm")
Measure(0x419, "oh", conv_humi, "out humidity")
Measure(0x41b, "ohl", conv_humi, "out humidity min", reset="oh")
Measure(0x41d, "ohh", conv_humi, "out humidity max", reset="oh")
Measure(0x41f, "ohlw", conv_stmp, "out humidity min when", reset="sw")
Measure(0x429, "ohhw", conv_stmp, "out humidity max when", reset="sw")
Measure(0x433, "ohla", conv_humi, "out humidity min alarm")
Measure(0x435, "ohha", conv_humi, "out humidity max alarm")
Measure(0x497, "rd", conv_rain, "rain 24h")
Measure(0x49d, "rdh", conv_rain, "rain 24h max", reset="rd")
Measure(0x4a3, "rdhw", conv_stmp, "rain 24h max when", reset="sw")
Measure(0x4ae, "rdha", conv_rain, "rain 24h max alarm")
Measure(0x4b4, "rh", conv_rain, "rain 1h")
Measure(0x4ba, "rhh", conv_rain, "rain 1h max", reset="rh")
Measure(0x4c0, "rhhw", conv_stmp, "rain 1h max when", reset="sw")
Measure(0x4cb, "rhha", conv_rain, "rain 1h max alarm")
Measure(0x4d2, "rt", conv_rain, "rain total", reset=0)
Measure(0x4d8, "rtrw", conv_stmp, "rain total reset when", reset="sw")
Measure(0x4ee, "wsl", conv_wspd, "wind speed min", reset="ws")
Measure(0x4f4, "wsh", conv_wspd, "wind speed max", reset="ws")
Measure(0x4f8, "wslw", conv_stmp, "wind speed min when", reset="sw")
Measure(0x502, "wshw", conv_stmp, "wind speed max when", reset="sw")
Measure(0x527, "wso", conv_wovr, "wind speed overflow")
Measure(0x528, "wsv", conv_wvld, "wind speed validity")
Measure(0x529, "wv", conv_wvel, "wind velocity")
Measure(0x529, "ws", conv_wspd, "wind speed")
Measure(0x52c, "w0", conv_wdir, "wind direction")
Measure(0x52d, "w1", conv_wdir, "wind direction 1")
Measure(0x52e, "w2", conv_wdir, "wind direction 2")
Measure(0x52f, "w3", conv_wdir, "wind direction 3")
Measure(0x530, "w4", conv_wdir, "wind direction 4")
Measure(0x531, "w5", conv_wdir, "wind direction 5")
Measure(0x533, "wsla", conv_wspd, "wind speed min alarm")
Measure(0x538, "wsha", conv_wspd, "wind speed max alarm")
Measure(0x54d, "cn", conv_conn, "connection type")
Measure(0x54f, "cc", conv_per2, "connection time till connect")
Measure(0x5d8, "pa", conv_pres, "pressure absolute")
Measure(0x5e2, "pr", conv_pres, "pressure relative")
Measure(0x5ec, "pc", conv_pres, "pressure correction")
Measure(0x5f6, "pal", conv_pres, "pressure absolute min", reset="pa")
Measure(0x600, "prl", conv_pres, "pressure relative min", reset="pr")
Measure(0x60a, "pah", conv_pres, "pressure absolute max", reset="pa")
Measure(0x614, "prh", conv_pres, "pressure relative max", reset="pr")
Measure(0x61e, "plw", conv_stmp, "pressure min when", reset="sw")
Measure(0x628, "phw", conv_stmp, "pressure max when", reset="sw")
Measure(0x63c, "pla", conv_pres, "pressure min alarm")
Measure(0x650, "pha", conv_pres, "pressure max alarm")
Measure(0x6b2, "hi", conv_per3, "history interval")
Measure(0x6b5, "hc", conv_per3, "history time till sample")
Measure(0x6b8, "hw", conv_stmp, "history last sample when")
Measure(0x6c2, "hp", conv_rec2, "history last record pointer",reset=0)
Measure(0x6c4, "hn", conv_rec2, "history number of records", reset=0)
# get all of the wind info in a single invocation
Measure(0x527, "wind", conv_wind, "wind")
#
# Read the requests.
#
def read_measurements(ws2300, read_requests):
if not read_requests:
return []
#
# Optimise what we have to read.
#
batches = [(m.address, m.conv.nybble_count) for m in read_requests]
batches.sort()
index = 1
addr = {batches[0][0]: 0}
while index < len(batches):
same_sign = (batches[index-1][0] < 0) == (batches[index][0] < 0)
same_area = batches[index-1][0] + batches[index-1][1] + 6 >= batches[index][0]
if not same_sign or not same_area:
addr[batches[index][0]] = index
index += 1
continue
addr[batches[index][0]] = index-1
batches[index-1] = batches[index-1][0], batches[index][0] + batches[index][1] - batches[index-1][0]
del batches[index]
#
# Read the data.
#
nybbles = ws2300.read_batch(batches)
#
# Return the data read in the order it was requested.
#
results = []
for measure in read_requests:
index = addr[measure.address]
offset = measure.address - batches[index][0]
results.append(nybbles[index][offset:offset+measure.conv.nybble_count])
return results
class WS23xxConfEditor(weewx.drivers.AbstractConfEditor):
@property
def default_stanza(self):
return """
[WS23xx]
# This section is for the La Crosse WS-2300 series of weather stations.
# Serial port such as /dev/ttyS0, /dev/ttyUSB0, or /dev/cuaU0
port = /dev/ttyUSB0
# The station model, e.g., 'LaCrosse WS2317' or 'TFA Primus'
model = LaCrosse WS23xx
# The driver to use:
driver = weewx.drivers.ws23xx
"""
def prompt_for_settings(self):
print "Specify the serial port on which the station is connected, for"
print "example /dev/ttyUSB0 or /dev/ttyS0."
port = self._prompt('port', '/dev/ttyUSB0')
return {'port': port}
def modify_config(self, config_dict):
print """
Setting record_generation to software."""
config_dict['StdArchive']['record_generation'] = 'software'
# define a main entry point for basic testing of the station without weewx
# engine and service overhead. invoke this as follows from the weewx root dir:
#
# PYTHONPATH=bin python bin/weewx/drivers/ws23xx.py
if __name__ == '__main__':
import optparse
usage = """%prog [options] [--debug] [--help]"""
syslog.openlog('ws23xx', syslog.LOG_PID | syslog.LOG_CONS)
syslog.setlogmask(syslog.LOG_UPTO(syslog.LOG_INFO))
port = DEFAULT_PORT
parser = optparse.OptionParser(usage=usage)
parser.add_option('--version', dest='version', action='store_true',
help='display driver version')
parser.add_option('--debug', dest='debug', action='store_true',
help='display diagnostic information while running')
parser.add_option('--port', dest='port', metavar='PORT',
help='serial port to which the station is connected')
parser.add_option('--readings', dest='readings', action='store_true',
help='display sensor readings')
parser.add_option("--records", dest="records", type=int, metavar="N",
help="display N station records, oldest to newest")
parser.add_option('--help-measures', dest='hm', action='store_true',
help='display measure names')
parser.add_option('--measure', dest='measure', type=str,
metavar="MEASURE", help='display single measure')
(options, args) = parser.parse_args()
if options.version:
print "ws23xx driver version %s" % DRIVER_VERSION
exit(1)
if options.debug is not None:
syslog.setlogmask(syslog.LOG_UPTO(syslog.LOG_DEBUG))
if options.port:
port = options.port
with WS23xx(port) as s:
if options.readings:
data = s.get_raw_data(SENSOR_IDS)
print data
if options.records is not None:
for ts,record in s.gen_records(count=options.records):
print ts,record
if options.measure:
data = s.get_raw_data([options.measure])
print data
if options.hm:
for m in Measure.IDS:
print "%s\t%s" % (m, Measure.IDS[m].name)
| [
"tom@tom.org"
] | tom@tom.org |
305fb33556d286226494cf186720a7151194ecd0 | f25938b59fb9f21f3dca2e71ae180025be32f346 | /check.py | 6ec9d49c74594d7758ca7d2bf02deb12c1a62983 | [] | no_license | aaeviru/tools | cfe8dcb51f6d9158138a98cddf254c2d99427fca | 447a8c17dd0332a46ac2c6c0d678f54f5e4d53f3 | refs/heads/master | 2020-04-05T23:08:33.195451 | 2017-01-17T13:35:11 | 2017-01-17T13:35:11 | 56,742,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | # -*- coding: utf-8 -*-
import os
import os.path
import sys
import re
num = 0
for root, dirs, files in os.walk(sys.argv[1]):
for name in files:
filename = root + '/' + name
if filename[len(filename)-3] == 't':
num += 1
if os.path.isfile(filename+".chasen"):
print "chasen:"+filename
elif os.path.isfile(filename+".fq")==False:
print "fq:"+filename
# fin.close()
# os.system("rm %s" % (filename+".chasen") )
print num
| [
"aaeviru@gmail.com"
] | aaeviru@gmail.com |
7f7584f9a0df9c11769a6e56e61135e48c6f0ef1 | 5a88761f950c13a4b33ea68a75126a71ea505964 | /devilry/devilry_admin/tests/assignment/test_anonymizationmode.py | 1da995173f9bb6291f69aedda24d7d654a03697d | [] | no_license | cas--/devilry-django | 35d3087664f830d84c32f6270e4cd6ca7656bc78 | 15dc771084ea14cb4083cf1feca1fb00cc441aa1 | refs/heads/master | 2020-05-30T07:14:16.921949 | 2016-09-12T15:46:38 | 2016-09-12T15:46:38 | 69,559,633 | 0 | 0 | null | 2016-09-29T11:05:37 | 2016-09-29T11:05:36 | null | UTF-8 | Python | false | false | 641 | py | from django.test import TestCase
from django_cradmin import cradmin_testhelpers
from model_mommy import mommy
from devilry.devilry_admin.views.assignment import anonymizationmode
class TestOverviewAppUpdateFirstDeadline(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = anonymizationmode.AssignmentAnonymizationmodeUpdateView
def test_h1(self):
assignment = mommy.make('core.Assignment')
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=assignment, viewkwargs={'pk':assignment.id})
self.assertEquals(mockresponse.selector.one('h1').alltext_normalized, 'Edit anonymization settings')
| [
"tor@torjohansen.no"
] | tor@torjohansen.no |
e9dd7621414b404cc02b670971a3b410378d6154 | 3d8078096288e8355248bab6f419b935f11870f0 | /python/turtle/multicoloredturtles.py | d3bd96d66b0967dd01795d00c25f27af7a8c2312 | [] | no_license | tnameera/csci127-examrevision | 43fd0484b33117866b00517ca3397eccd10d0fed | 12f2563690ecd5853f3b8de22dc88922fb16bffa | refs/heads/main | 2023-01-31T12:00:32.765820 | 2020-12-14T06:23:06 | 2020-12-14T06:23:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | # A program that uses multi-colored turtles
import turtle
azul = turtle.Turtle()
azul.color("blue")
verde = turtle.Turtle()
verde.color("green")
azul.forward(100)
azul.right(90)
azul.forward(50)
| [
"syed.rahman@nielsen.com"
] | syed.rahman@nielsen.com |
51ee9364650e8ff4c9afd76dfbc9257f9b9581ad | eab7a813d0530c618c6ff31880934d93904eb960 | /backend/app/core/usuario/migrations/0003_auto_20201208_1903.py | c5d72a0b172146a8a96df21536edca3d3c23bb44 | [] | no_license | CamiloArias47/Covid-Diejinueve | e670c7214a054f16beeef8d88fabb9b6f2026dfa | 309ac6ef4ea5ec6be900ca3f1150b209376d7f3d | refs/heads/master | 2023-01-28T11:32:02.559380 | 2020-12-10T15:33:15 | 2020-12-10T15:33:15 | 306,971,289 | 0 | 0 | null | 2020-12-10T15:33:18 | 2020-10-24T21:04:16 | null | UTF-8 | Python | false | false | 622 | py | # Generated by Django 3.1.2 on 2020-12-09 00:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('usuario', '0002_auto_20201208_1115'),
]
operations = [
migrations.AddField(
model_name='usuario',
name='genero',
field=models.CharField(max_length=50, null=True, verbose_name='Genero de usuario'),
),
migrations.AddField(
model_name='usuario',
name='role',
field=models.CharField(max_length=50, null=True, verbose_name='Rol de usuario'),
),
]
| [
"daniel.luna@correounivalle.edu.co"
] | daniel.luna@correounivalle.edu.co |
72b6edb172f7d0933e029be750e568d52a323e10 | cc41b520c8fd75f151c66994076884ed77712e57 | /squiz/quiz/migrations/0001_initial.py | b510657c1878d47568d9474905496daa84e6ccbb | [] | no_license | kks0101/squiz | 12e64caf4da0f39e947f6a6d223ce8b6f382b664 | ac83f69a1826ec16410132a09c25dcea55779743 | refs/heads/master | 2023-05-03T19:44:20.570754 | 2020-01-15T15:00:41 | 2020-01-15T15:00:41 | 191,167,585 | 2 | 5 | null | 2023-04-21T20:32:27 | 2019-06-10T12:52:53 | JavaScript | UTF-8 | Python | false | false | 2,809 | py | # Generated by Django 2.2.2 on 2019-06-10 21:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('Users', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=1000)),
],
),
migrations.CreateModel(
name='Test',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quiz_title', models.CharField(max_length=100)),
('student', models.ManyToManyField(related_name='student', to='Users.Profile')),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='teacher', to='Users.Profile')),
],
),
migrations.CreateModel(
name='Score',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('score', models.IntegerField(default=0)),
('quiz', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='quiz.Test')),
('student', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='Users.Profile')),
],
),
migrations.CreateModel(
name='Response',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('selected_option', models.CharField(blank=True, max_length=2000, null=True)),
('question', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='quiz.Question')),
('student', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='Users.Profile')),
],
),
migrations.AddField(
model_name='question',
name='quiz',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.Test'),
),
migrations.CreateModel(
name='Options',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('option_text', models.CharField(max_length=100)),
('is_correct', models.BooleanField(default=False)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.Question')),
],
),
]
| [
"kks1102000@gmail.com"
] | kks1102000@gmail.com |
b688a16b477adf91b9b20ba9b83bbcf7727d73a4 | a548a8dd9179cc1c8a2b4eac4529dc30441f6a01 | /tests/test_encode_document.py | 9909d8b98cd01776073523c6e67b8177b3cf3f24 | [
"Apache-2.0"
] | permissive | BTAutist/vectorhub | 26c2d389446e97ffe23ccd5ef5f7bac910ad0647 | 7a88f03abb9387f4e9197f19ce95126028fdac2b | refs/heads/main | 2023-07-15T10:56:40.035451 | 2021-08-31T06:47:11 | 2021-08-31T06:47:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,690 | py | import pytest
from vectorhub.encoders.text.tfhub import USE2Vec
enc = USE2Vec()
enc.__name__ = "sample"
@pytest.fixture
def docs():
return [
{
"text": "hey"
},
{
"text": "weirdo"
}
]
@pytest.fixture
def docs_with_errors():
return [
{
"text": "hey"
},
{
"text": None
}
]
def assert_vectors_in_docs(docs):
for d in docs:
assert "text_sample_vector_" in d, "misssing vector"
def test_encode_documents_in_docs(docs):
docs = enc.encode_documents(["text"], docs)
assert_vectors_in_docs(docs)
def test_encode_documents_in_docs_2(docs):
docs = enc.encode_documents_in_bulk(["text"], docs,
vector_error_treatment="zero_vector")
assert_vectors_in_docs(docs)
def test_encode_documents_in_docs_3(docs):
docs = enc.encode_documents_in_bulk(["text"], docs,
vector_error_treatment="do_not_include")
assert_vectors_in_docs(docs)
def test_error_tests(docs_with_errors):
docs = enc.encode_documents(["text"], docs_with_errors,
vector_error_treatment="zero_vector")
assert_vectors_in_docs(docs)
def test_error_tests_2(docs_with_errors):
docs = enc.encode_documents_in_bulk(["text"], docs_with_errors,
vector_error_treatment="zero_vector")
assert_vectors_in_docs(docs)
def test_error_tests_3(docs_with_errors):
docs = enc.encode_documents_in_bulk(["text"], docs_with_errors,
vector_error_treatment="do_not_include")
assert "text_sample_vector_" in docs[0]
assert "text_sample_vector_" not in docs[-1]
assert isinstance(docs[0]['text_sample_vector_'], list)
| [
"noreply@github.com"
] | noreply@github.com |
6c3ab5ccf246a5bf205480247dcdbe04bfab2cfe | 16da2f8c7f16239c6a985599481b9932322b71f0 | /masterWeiBo/apps.py | e5532ef145739f39085528a5ee4521e0d1e2bda0 | [] | no_license | While1true/PYServer | c85f608a26c87253b2d7926904421a183bc798cc | d8d41ec31f39ab350c9b4e1ff873bc9921d9f8f9 | refs/heads/master | 2021-04-27T06:20:23.500532 | 2018-03-11T06:00:23 | 2018-03-11T06:00:23 | 122,610,840 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from django.apps import AppConfig
class MasterweiboConfig(AppConfig):
name = 'masterWeiBo'
| [
"893825883@qq.com"
] | 893825883@qq.com |
9b8abd96e7a9d1cf1657b05be3e7327c9595c874 | f64e31cb76909a6f7fb592ad623e0a94deec25ae | /tests/test_p0380_insert_delete_getrandom_o1.py | 6fb1b572dadda2d8a17a49d0331190489c5cd47b | [] | no_license | weak-head/leetcode | 365d635cb985e1d154985188f6728c18cab1f877 | 9a20e1835652f5e6c33ef5c238f622e81f84ca26 | refs/heads/main | 2023-05-11T14:19:58.205709 | 2023-05-05T20:57:13 | 2023-05-05T20:57:13 | 172,853,059 | 0 | 1 | null | 2022-12-09T05:22:32 | 2019-02-27T05:58:54 | Python | UTF-8 | Python | false | false | 1,572 | py | # flake8: noqa: F403, F405
import pytest
from leetcode.p0380_insert_delete_getrandom_o1 import *
solutions = [
RandomizedSet,
]
# ([args], expectation),
test_cases = [
[
("d", 2, False),
("i", 1, True),
("i", 1, False),
("r", None, {1}),
],
[
("d", 2, False),
("i", 1, True),
("i", 1, False),
("r", None, {1}),
("i", 2, True),
("i", 3, True),
("r", None, {1, 2, 3}),
("r", None, {1, 2, 3}),
("r", None, {1, 2, 3}),
("r", None, {1, 2, 3}),
("r", None, {1, 2, 3}),
],
[
("d", 1, False),
("i", 1, True),
("r", None, {1}),
("r", None, {1}),
("r", None, {1}),
("i", 2, True),
("r", None, {1, 2}),
("r", None, {1, 2}),
("r", None, {1, 2}),
("r", None, {1, 2}),
("r", None, {1, 2}),
("d", 1, True),
("d", 1, False),
("r", None, {2}),
("r", None, {2}),
("r", None, {2}),
("d", 2, True),
("i", 3, True),
("r", None, {3}),
("r", None, {3}),
("r", None, {3}),
("r", None, {3}),
],
]
@pytest.mark.timeout(1)
@pytest.mark.parametrize(("args"), test_cases)
@pytest.mark.parametrize("solution", solutions)
def test_solution(args, solution):
rs = solution()
for m, v, e in args:
if m == "i":
assert rs.insert(v) == e
elif m == "d":
assert rs.remove(v) == e
else:
assert rs.getRandom() in e
| [
"zinchenko@live.com"
] | zinchenko@live.com |
b06d1cf69fab798694dd9919dbf98be6ae3625a1 | 28f5332bd313235fec220f9c84b8ee40743257b7 | /env/motion_planners/rrt_connect.py | f078fdbd0e178a610ea90a25339072a9c1f1a826 | [] | no_license | IrisLi17/bridge_construction | 0096ff8b5445ab20f3dfccea50d3bf5177740246 | 34d75e7fbac438d0607b286e03beffdf05d3102f | refs/heads/main | 2023-08-11T01:52:34.628517 | 2021-09-29T12:54:52 | 2021-09-29T12:54:52 | 411,630,645 | 13 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,759 | py | import time
from itertools import takewhile
from .smoothing import smooth_path
from .rrt import TreeNode, configs
from .utils import irange, argmin, RRT_ITERATIONS, RRT_RESTARTS, RRT_SMOOTHING, INF, elapsed_time, negate
def asymmetric_extend(q1, q2, extend_fn, backward=False):
if backward:
return reversed(list(extend_fn(q2, q1)))
return extend_fn(q1, q2)
def extend_towards(tree, target, distance_fn, extend_fn, collision_fn, swap, tree_frequency):
last = argmin(lambda n: distance_fn(n.config, target), tree)
extend = list(asymmetric_extend(last.config, target, extend_fn, swap))
safe = list(takewhile(negate(collision_fn), extend))
for i, q in enumerate(safe):
if (i % tree_frequency == 0) or (i == len(safe) - 1):
last = TreeNode(q, parent=last)
tree.append(last)
success = len(extend) == len(safe)
return last, success
def rrt_connect(q1, q2, distance_fn, sample_fn, extend_fn, collision_fn,
iterations=RRT_ITERATIONS, tree_frequency=1, max_time=INF,
disable_start_collision=False):
start_time = time.time()
assert tree_frequency >= 1
if disable_start_collision:
if collision_fn(q2):
return None
else:
if collision_fn(q1) or collision_fn(q2):
return None
nodes1, nodes2 = [TreeNode(q1)], [TreeNode(q2)]
for iteration in irange(iterations):
if max_time <= elapsed_time(start_time):
break
swap = len(nodes1) > len(nodes2)
tree1, tree2 = nodes1, nodes2
if swap:
tree1, tree2 = nodes2, nodes1
last1, _ = extend_towards(tree1, sample_fn(), distance_fn, extend_fn, collision_fn,
swap, tree_frequency)
last2, success = extend_towards(tree2, last1.config, distance_fn, extend_fn, collision_fn,
not swap, tree_frequency)
if success:
path1, path2 = last1.retrace(), last2.retrace()
if swap:
path1, path2 = path2, path1
#print('{} iterations, {} nodes'.format(iteration, len(nodes1) + len(nodes2)))
return configs(path1[:-1] + path2[::-1])
return None
# TODO: version which checks whether the segment is valid
def direct_path(q1, q2, extend_fn, collision_fn, disable_start_collision=False):
if disable_start_collision:
if collision_fn(q2):
return None
else:
if collision_fn(q1) or collision_fn(q2):
return None
path = [q1]
for q in extend_fn(q1, q2):
if collision_fn(q):
return None
path.append(q)
return path
def birrt(q1, q2, distance, sample, extend, collision,
restarts=RRT_RESTARTS, smooth=RRT_SMOOTHING, max_time=INF, disable_start_collision=False, **kwargs):
start_time = time.time()
if disable_start_collision:
if collision(q2):
return None
else:
if collision(q1) or collision(q2):
return None
path = direct_path(q1, q2, extend, collision, disable_start_collision=disable_start_collision)
if path is not None:
return path
for attempt in irange(restarts + 1):
if max_time <= elapsed_time(start_time):
break
path = rrt_connect(q1, q2, distance, sample, extend, collision,
max_time=max_time - elapsed_time(start_time),
disable_start_collision=disable_start_collision, **kwargs)
if path is not None:
#print('{} attempts'.format(attempt))
if smooth is None:
return path
return smooth_path(path, extend, collision, iterations=smooth)
return None
| [
"yunfeili.cloud@gmail.com"
] | yunfeili.cloud@gmail.com |
f387b6aa04e93a35d93e07dd14b6b8491b80e692 | d491aae08ffe85d3253f1cd1e9f861a1ffe8274a | /untitled/urls.py | 242930073d175aa88b0f6b7878d988b077d3e6cb | [] | no_license | myxahev/Django_test | 0336388b6e1b94fe78ea8151591a2fa095531bf3 | 5f2233fabd666d197b69c66da552a7d3c63713f5 | refs/heads/master | 2023-04-08T16:52:12.018845 | 2021-02-25T15:48:53 | 2021-02-25T15:48:53 | 342,295,538 | 0 | 0 | null | 2021-03-24T18:36:56 | 2021-02-25T15:48:11 | Python | UTF-8 | Python | false | false | 864 | py | """untitled URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('polls/', include('polls.urls')),
path('admin/', admin.site.urls),
] | [
"myxahev2@gmail.com"
] | myxahev2@gmail.com |
3d2ca0223893cfcb17fd1d7bea3f906d3a7be1fd | ae6d62d2daa8f1d51b8f33199198338b52cbafa1 | /node_modules/protractor/node_modules/selenium-webdriver/node_modules/ws/node_modules/bufferutil/build/config.gypi | 6df3becc34206ba9f98f0348900ea9234e1211ac | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | aritzg/abc123 | 1c41e26182b2a01b3fedef64cfeaa58f606708ff | a4d2fbe65d47d0c3506d0b381571db3234c60f41 | refs/heads/master | 2020-06-06T05:16:04.078593 | 2015-10-09T09:20:24 | 2015-10-09T09:20:24 | 39,558,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,159 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 0,
"gcc_version": 48,
"host_arch": "x64",
"node_install_npm": "true",
"node_prefix": "/usr",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_unsafe_optimizations": 0,
"node_use_dtrace": "false",
"node_use_etw": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"node_use_systemtap": "false",
"openssl_no_asm": 0,
"python": "/usr/bin/python",
"target_arch": "x64",
"v8_enable_gdbjit": 0,
"v8_no_strict_aliasing": 1,
"v8_use_snapshot": "false",
"want_separate_host_toolset": 0,
"nodedir": "/home/aritz/.node-gyp/0.10.39",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"cache_lock_stale": "60000",
"sign_git_tag": "",
"user_agent": "npm/1.4.28 node/v0.10.39 linux x64",
"always_auth": "",
"bin_links": "true",
"key": "",
"description": "true",
"fetch_retries": "2",
"heading": "npm",
"user": "1000",
"force": "",
"cache_min": "10",
"init_license": "ISC",
"editor": "vi",
"rollback": "true",
"cache_max": "Infinity",
"userconfig": "/home/aritz/.npmrc",
"engine_strict": "",
"init_author_name": "",
"init_author_url": "",
"tmp": "/tmp",
"depth": "Infinity",
"save_dev": "",
"usage": "",
"cafile": "",
"https_proxy": "",
"onload_script": "",
"rebuild_bundle": "true",
"save_bundle": "",
"shell": "/bin/bash",
"prefix": "/usr",
"registry": "https://registry.npmjs.org/",
"browser": "",
"cache_lock_wait": "10000",
"save_optional": "",
"searchopts": "",
"versions": "",
"cache": "/home/aritz/.npm",
"ignore_scripts": "",
"searchsort": "name",
"version": "",
"local_address": "",
"viewer": "man",
"color": "true",
"fetch_retry_mintimeout": "10000",
"umask": "18",
"fetch_retry_maxtimeout": "60000",
"message": "%s",
"ca": "",
"cert": "",
"global": "",
"link": "",
"save": "",
"unicode": "true",
"long": "",
"production": "",
"unsafe_perm": "",
"node_version": "0.10.39",
"tag": "latest",
"git_tag_version": "true",
"shrinkwrap": "true",
"fetch_retry_factor": "10",
"npat": "",
"proprietary_attribs": "true",
"save_exact": "",
"strict_ssl": "true",
"username": "",
"dev": "",
"globalconfig": "/usr/etc/npmrc",
"init_module": "/home/aritz/.npm-init.js",
"parseable": "",
"globalignorefile": "/usr/etc/npmignore",
"cache_lock_retries": "10",
"save_prefix": "^",
"group": "1000",
"init_author_email": "",
"searchexclude": "",
"git": "git",
"optional": "true",
"email": "",
"json": "",
"spin": "true"
}
}
| [
"galdos.aritz@gmail.com"
] | galdos.aritz@gmail.com |
fb469050fad2ef0ce31ce7700ad67c9ae42f2aa1 | a5a523fbfa2ce7800834b406c7ed892045c51bba | /modules/database.py | 1f218124d9b122b37b66021465d21475e099f380 | [] | no_license | ascherf-ml/IDA | f850328bb7821328e29252a9e17b4c87428d44ae | 7d592c291b51dd469049b53080102e69a8d8fd01 | refs/heads/main | 2023-06-09T12:35:43.904955 | 2023-06-01T07:18:18 | 2023-06-01T07:18:18 | 310,251,250 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 906 | py | import pandas as pd
import time
def todo_database_create():
todo_data_columns = ['todo', 'date', 'priority']
todo_data = pd.DataFrame(columns=todo_data_columns)
def note_database_create():
note_data_columns = ['note', 'date']
note_data = pd.DataFrame(columns=note_data_columns)
return note_data, note_data_columns
def write(input, source, command, database, database_col):
data = pd.DataFrame([[input, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), source, command]], columns=database_col)
database= database.append(data)
return database
def database_export():
main_data.to_csv('data/main_data.csv')
note_data.to_csv('data/note_data.csv')
def database_import():
main_data = pd.read_csv('data/main_data.csv',sep=',')
return main_data
def note_database_import():
note_data = pd.read_csv('data/note_data.csv',sep=',')
return note_data
| [
"alexander-scherf@gmx.net"
] | alexander-scherf@gmx.net |
42bdcefd5ebbbf29bb1fde03e25d27b62de032fc | e6329dfee74b6673786f0b8519827ec2acffa68b | /simplivity/resources/external_stores.py | 68967df1d164bf070c772dd0c5959ef28afc8275 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | mumblingMac/simplivity-python | 09316ef733ba379f540aa296cd4fbdda685bd064 | 6e07d274ac75fa1aeff095e585b6b53f40446e2a | refs/heads/master | 2022-11-06T10:31:38.907062 | 2020-06-19T12:17:10 | 2020-06-19T12:17:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,413 | py | ###
# (C) Copyright [2019-2020] Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from simplivity.resources.resource import ResourceBase
from simplivity.resources import omnistack_clusters
URL = '/external_stores'
DATA_FIELD = 'external_stores'
class ExternalStores(ResourceBase):
"""Implements features available for SimpliVity External store resources."""
def __init__(self, connection):
super(ExternalStores, self).__init__(connection)
def get_all(self, pagination=False, page_size=0, limit=500, offset=0,
sort=None, order='descending', filters=None, fields=None,
case_sensitive=True):
"""
Get all external stores
Args:
pagination: True if need pagination
page_size: Size of the page (Required when pagination is on)
limit: A positive integer that represents the maximum number of results to return
offset: A positive integer that directs the service to start returning
the <offset value> instance, up to the limit.
sort: The name of the field where the sort occurs
order: The sort order preference. Valid values: ascending or descending.
filters: Dictionary with filter values. Example: {'name': 'name'}
name: The name of the external_stores to return.
Accepts: Single value, comma-separated list, pattern using one or more asterisk characters as a wildcard.
omnistack_cluster_id: The name of the omnistack_cluster that is associated with the instances to return
cluster_group_id:The unique identifiers (UIDs) of the cluster_groups associated with the external stores to return
Accepts: Single value, comma-separated list
management_ip: The IP address of the external store
Accepts: Single value, comma-separated list, pattern using one or more asterisk characters as a wildcard
type: The type of external store
Default: StoreOnceOnPrem
Returns:
list: list of resources
"""
return self._client.get_all(URL,
members_field=DATA_FIELD,
pagination=pagination,
page_size=page_size,
limit=limit,
offset=offset,
sort=sort,
order=order,
filters=filters,
fields=fields,
case_sensitive=case_sensitive)
def get_by_data(self, data):
"""Gets ExternalStore object from data.
Args:
data: ExternalStore data
Returns:
object: ExternalStore object.
"""
return ExternalStore(self._connection, self._client, data)
def register_external_store(self, management_ip, name, cluster, username, password, management_port=9387,
storage_port=9388, external_store_type='StoreOnceOnPrem', timeout=-1):
""" Register the external store.
Args:
management_ip: The IP address of the external store
name: The name of the external_store
cluster: Destination OmnistackCluster object/name.
username: The client name of the external store
password: The client password of the external store
management_port: The management IP port of the external store. Default: 9387
storage_port: The storage IP port of the external store. Default: 9388
external_store_type: The type of external store. Default: StoreOnceOnPrem
timeout: Time out for the request in seconds.
Returns:
object: External store object.
"""
data = {'management_ip': management_ip, 'management_port': management_port, 'name': name,
'username': username, 'password': password, 'storage_port': storage_port,
'type': external_store_type}
if not isinstance(cluster, omnistack_clusters.OmnistackCluster):
# if passed name of the cluster
clusters_obj = omnistack_clusters.OmnistackClusters(self._connection)
cluster = clusters_obj.get_by_name(cluster)
data['omnistack_cluster_id'] = cluster.data['id']
self._client.do_post(URL, data, timeout)
return self.get_by_name(name)
class ExternalStore(object):
"""Implements features available for a single External store resources."""
def __init__(self, connection, resource_client, data):
self.data = data
self._connection = connection
self._client = resource_client
| [
"hchaudhari@hpe.com"
] | hchaudhari@hpe.com |
82fb173cf47cd88083962ab2b73da46b3f4fcc51 | 817142283452fd6d351f2faaaccbeb1b012155ef | /ntc_rosetta_conf/usr_datastore.py | 201738bef1e9d1f63cecf9c45559e4d5b2c7deac | [
"Apache-2.0"
] | permissive | networktocode/ntc-rosetta-conf | ada2356f9e717a9688300842dd613a9021a78456 | 06c8028e0bbafdd97d15e14ca13faa2601345d8b | refs/heads/develop | 2021-09-26T13:02:30.495809 | 2019-08-14T15:53:25 | 2019-08-14T15:53:25 | 191,575,862 | 5 | 1 | Apache-2.0 | 2021-09-16T15:04:24 | 2019-06-12T13:24:51 | Python | UTF-8 | Python | false | false | 86 | py | from jetconf.data import JsonDatastore
class UserDatastore(JsonDatastore):
pass
| [
"dbarrosop@dravetech.com"
] | dbarrosop@dravetech.com |
69352d67a7e884287a108ad00b3c4c7777b0a00a | 8edd5d112cd34f229d4e00096d55c16df46ce81c | /WeatherDashboard/settings.py | d2a155c8c6a7bedde0f98c5c44eb2181503ba323 | [] | no_license | vinaykakanuru/WeatherDashboard | 2cd29ca4cd332e55f30f0aac652fdecf141a80a9 | a2a2b485946d4fe823450086c84c16a81bdd72d2 | refs/heads/main | 2023-03-07T04:42:10.946995 | 2021-02-20T14:41:35 | 2021-02-20T14:41:35 | 340,676,472 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,223 | py | """
Django settings for WeatherDashboard project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
import environ
environ.Env.read_env()
env = environ.Env(DEBUG=(bool, False),)
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DEBUG')
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'app',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'WeatherDashboard.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'WeatherDashboard.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
API_KEY = os.environ.get('API_KEY')
| [
"kvinaykumarreddy1995@gmail.com"
] | kvinaykumarreddy1995@gmail.com |
a2f5d7d2f8d212d99c9871a3aaaaf73740fd9d75 | b99f20a208a06093684a39f0c6346c23a2249909 | /codetogether/wsgi.py | 663d7f9cc9ba551ee60eb6513528d3d44d13d142 | [] | no_license | ritikjaiswal019/CodeTogether | de0e8b4c87477f157ac41391f1f776e4480c8bc2 | 4e8c53632f7ad63a54c3f515790fedc7bad3d923 | refs/heads/main | 2023-06-19T04:48:46.184523 | 2021-07-18T18:51:40 | 2021-07-18T18:51:40 | 374,905,983 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | """
WSGI config for codetogether project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'codetogether.settings')
application = get_wsgi_application()
| [
"ritikjaiswal019@gmail.com"
] | ritikjaiswal019@gmail.com |
47ea363768f04b52b108cc1522373cc3a8f7d61a | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/BAY-STACK-NOTIFICATIONS-MIB.py | 9fe6dd9a7f0564c4dc72d5d5ffd161421012167e | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 19,230 | py | #
# PySNMP MIB module BAY-STACK-NOTIFICATIONS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BAY-STACK-NOTIFICATIONS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:19:14 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection")
bayStackConfigExpectedStackSize, bayStackUnitConfigIndex = mibBuilder.importSymbols("BAY-STACK-MIB", "bayStackConfigExpectedStackSize", "bayStackUnitConfigIndex")
dot1xAuthBackendAuthState, dot1xAuthPaeState = mibBuilder.importSymbols("IEEE8021-PAE-MIB", "dot1xAuthBackendAuthState", "dot1xAuthPaeState")
ifIndex, ifAdminStatus, InterfaceIndex = mibBuilder.importSymbols("IF-MIB", "ifIndex", "ifAdminStatus", "InterfaceIndex")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
s5AgSysUsbTargetUnit, s5AgentScriptStatus = mibBuilder.importSymbols("S5-AGENT-MIB", "s5AgSysUsbTargetUnit", "s5AgentScriptStatus")
s5ChasComType, = mibBuilder.importSymbols("S5-CHASSIS-MIB", "s5ChasComType")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ObjectIdentity, MibIdentifier, Bits, iso, Counter32, Gauge32, IpAddress, ModuleIdentity, NotificationType, Unsigned32, Counter64, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "MibIdentifier", "Bits", "iso", "Counter32", "Gauge32", "IpAddress", "ModuleIdentity", "NotificationType", "Unsigned32", "Counter64", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks")
MacAddress, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "TextualConvention", "DisplayString")
bayStackMibs, = mibBuilder.importSymbols("SYNOPTICS-ROOT-MIB", "bayStackMibs")
bayStackNotificationsMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 45, 5, 2))
bayStackNotificationsMib.setRevisions(('2014-07-07 00:00', '2014-01-27 00:00', '2013-10-11 00:00', '2013-08-22 00:00', '2013-03-19 00:00', '2012-09-04 00:00', '2012-08-22 00:00', '2012-08-16 00:00', '2012-06-21 00:00', '2012-06-20 00:00', '2011-11-30 00:00', '2010-12-21 00:00', '2009-09-28 00:00', '2008-07-09 00:00', '2008-03-31 00:00', '2007-03-05 00:00', '2006-04-06 00:00', '2006-04-04 00:00', '2005-08-22 00:00', '2005-06-30 00:00', '2005-03-26 00:00', '2004-08-06 00:00', '2004-08-02 00:00', '2004-07-20 00:00', '2003-03-16 00:00',))
if mibBuilder.loadTexts: bayStackNotificationsMib.setLastUpdated('201407070000Z')
if mibBuilder.loadTexts: bayStackNotificationsMib.setOrganization('Avaya')
bsnObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 2, 1))
bsnNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 2, 2))
bsnNotifications0 = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0))
bsnEapAccessViolationMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 1), MacAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnEapAccessViolationMacAddress.setStatus('current')
bsnLoginFailureType = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("telnet", 1), ("ssh", 2), ("web", 3), ("serialConsole", 4)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnLoginFailureType.setStatus('current')
bsnLoginFailureAddressType = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 3), InetAddressType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnLoginFailureAddressType.setStatus('current')
bsnLoginFailureAddress = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 4), InetAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnLoginFailureAddress.setStatus('current')
bsnLoginFailureUsername = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 5), SnmpAdminString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnLoginFailureUsername.setStatus('current')
bsnActualStackSize = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnActualStackSize.setStatus('current')
bsnEapUbpFailureIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 7), InterfaceIndex()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnEapUbpFailureIfIndex.setStatus('current')
bsnEapUbpFailureMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 8), MacAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnEapUbpFailureMacAddress.setStatus('current')
bsnEapUbpFailureRoleString = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnEapUbpFailureRoleString.setStatus('current')
bsnTrialLicenseExpirationTime = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 30))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnTrialLicenseExpirationTime.setStatus('current')
bsnTrialLicenseExpirationNumber = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnTrialLicenseExpirationNumber.setStatus('current')
bsnEnteredForcedStackModeMAC = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 12), MacAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnEnteredForcedStackModeMAC.setStatus('current')
bsnEapRAVErrorMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 13), MacAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnEapRAVErrorMacAddress.setStatus('current')
bsnEapRAVErrorPort = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 14), InterfaceIndex()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnEapRAVErrorPort.setStatus('current')
bsnEnteredForcedStackModeAddressType = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 15), InetAddressType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnEnteredForcedStackModeAddressType.setStatus('current')
bsnEnteredForcedStackModeAddress = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 16), InetAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnEnteredForcedStackModeAddress.setStatus('current')
bsnStackProtectionEvent = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("cannotJoinStack", 1), ("unitIgnored", 2)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnStackProtectionEvent.setStatus('current')
bsnUSBInfo = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 18), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnUSBInfo.setStatus('current')
bsnSFPInfo = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 19), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnSFPInfo.setStatus('current')
bsnAaaUserName = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 2, 1, 20), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(10, 16))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: bsnAaaUserName.setStatus('current')
bsnConfigurationSavedToNvram = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 1))
if mibBuilder.loadTexts: bsnConfigurationSavedToNvram.setStatus('current')
bsnEapAccessViolation = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 2)).setObjects(("IEEE8021-PAE-MIB", "dot1xAuthPaeState"), ("IEEE8021-PAE-MIB", "dot1xAuthBackendAuthState"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnEapAccessViolationMacAddress"))
if mibBuilder.loadTexts: bsnEapAccessViolation.setStatus('current')
bsnPortSpeedDuplexMismatch = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 3)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: bsnPortSpeedDuplexMismatch.setStatus('current')
bsnStackManagerReconfiguration = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 4))
if mibBuilder.loadTexts: bsnStackManagerReconfiguration.setStatus('current')
bsnLacTrunkUnavailable = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 5))
if mibBuilder.loadTexts: bsnLacTrunkUnavailable.setStatus('current')
bsnLoginFailure = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 6)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnLoginFailureType"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnLoginFailureAddressType"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnLoginFailureAddress"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnLoginFailureUsername"))
if mibBuilder.loadTexts: bsnLoginFailure.setStatus('current')
bsnMLTHealthFailure = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 7)).setObjects(("IF-MIB", "ifAdminStatus"))
if mibBuilder.loadTexts: bsnMLTHealthFailure.setStatus('current')
bsnTrunkPortDisabledToPreventBroadcastStorm = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 8)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: bsnTrunkPortDisabledToPreventBroadcastStorm.setStatus('current')
bsnLacPortDisabledToPreventBroadcastStorm = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 9)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: bsnLacPortDisabledToPreventBroadcastStorm.setStatus('current')
bsnTrunkPortEnabledToPreventBroadcastStorm = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 10)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: bsnTrunkPortEnabledToPreventBroadcastStorm.setStatus('current')
bsnLacPortDisabledDueToLossOfVLACPDU = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 11)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: bsnLacPortDisabledDueToLossOfVLACPDU.setStatus('current')
bsnLacPortEnabledDueToReceiptOfVLACPDU = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 12)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: bsnLacPortEnabledDueToReceiptOfVLACPDU.setStatus('current')
bsnStackConfigurationError = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 13)).setObjects(("BAY-STACK-MIB", "bayStackConfigExpectedStackSize"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnActualStackSize"))
if mibBuilder.loadTexts: bsnStackConfigurationError.setStatus('current')
bsnEapUbpFailure = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 14)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnEapUbpFailureIfIndex"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnEapUbpFailureMacAddress"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnEapUbpFailureRoleString"))
if mibBuilder.loadTexts: bsnEapUbpFailure.setStatus('current')
bsnTrialLicenseExpiration = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 15)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnTrialLicenseExpirationTime"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnTrialLicenseExpirationNumber"))
if mibBuilder.loadTexts: bsnTrialLicenseExpiration.setStatus('current')
bsnEnteredForcedStackMode = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 16)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnEnteredForcedStackModeMAC"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnEnteredForcedStackModeAddressType"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnEnteredForcedStackModeAddress"))
if mibBuilder.loadTexts: bsnEnteredForcedStackMode.setStatus('current')
bsnTemperatureExceeded = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 17)).setObjects(("S5-CHASSIS-MIB", "s5ChasComType"))
if mibBuilder.loadTexts: bsnTemperatureExceeded.setStatus('current')
bsnEapRAVError = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 18)).setObjects(("IEEE8021-PAE-MIB", "dot1xAuthPaeState"), ("IEEE8021-PAE-MIB", "dot1xAuthBackendAuthState"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnEapRAVErrorMacAddress"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnEapRAVErrorPort"))
if mibBuilder.loadTexts: bsnEapRAVError.setStatus('current')
bsnEapRateLimitExceeded = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 19)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: bsnEapRateLimitExceeded.setStatus('current')
bsnSystemUp365Days = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 20)).setObjects(("BAY-STACK-MIB", "bayStackUnitConfigIndex"))
if mibBuilder.loadTexts: bsnSystemUp365Days.setStatus('current')
bsnUSBInsertion = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 21)).setObjects(("S5-AGENT-MIB", "s5AgSysUsbTargetUnit"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnUSBInfo"))
if mibBuilder.loadTexts: bsnUSBInsertion.setStatus('current')
bsnUSBRemoval = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 22)).setObjects(("S5-AGENT-MIB", "s5AgSysUsbTargetUnit"))
if mibBuilder.loadTexts: bsnUSBRemoval.setStatus('current')
bsnSFPInsertion = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 23)).setObjects(("IF-MIB", "ifIndex"), ("BAY-STACK-NOTIFICATIONS-MIB", "bsnSFPInfo"))
if mibBuilder.loadTexts: bsnSFPInsertion.setStatus('current')
bsnSFPRemoval = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 24)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: bsnSFPRemoval.setStatus('current')
bsnROPasswordExpired = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 25))
if mibBuilder.loadTexts: bsnROPasswordExpired.setStatus('current')
bsnRWPasswordExpired = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 26))
if mibBuilder.loadTexts: bsnRWPasswordExpired.setStatus('current')
bsnStackProtection = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 27)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnStackProtectionEvent"))
if mibBuilder.loadTexts: bsnStackProtection.setStatus('current')
bsnRunScripts = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 28)).setObjects(("S5-AGENT-MIB", "s5AgentScriptStatus"))
if mibBuilder.loadTexts: bsnRunScripts.setStatus('current')
bsnAaaUserAccountNotUsed = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 29)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnAaaUserName"))
if mibBuilder.loadTexts: bsnAaaUserAccountNotUsed.setStatus('current')
bsnAaaAlreadyConnected = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 30)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnAaaUserName"))
if mibBuilder.loadTexts: bsnAaaAlreadyConnected.setStatus('current')
bsnAaaIncorrectLogOnThresholdExceeded = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 31)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnAaaUserName"))
if mibBuilder.loadTexts: bsnAaaIncorrectLogOnThresholdExceeded.setStatus('current')
bsnAaaMaxNoOfSessionsExceeded = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 32)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnAaaUserName"))
if mibBuilder.loadTexts: bsnAaaMaxNoOfSessionsExceeded.setStatus('current')
bsnAuditUnsentMessages = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 33))
if mibBuilder.loadTexts: bsnAuditUnsentMessages.setStatus('current')
bsnAuditRecordEventsFailure = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 34))
if mibBuilder.loadTexts: bsnAuditRecordEventsFailure.setStatus('current')
bsnAuditStartUpTrap = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 35))
if mibBuilder.loadTexts: bsnAuditStartUpTrap.setStatus('current')
bsnAuditShutDownTrap = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 36))
if mibBuilder.loadTexts: bsnAuditShutDownTrap.setStatus('current')
bsnAaaUserPasswordExpired = NotificationType((1, 3, 6, 1, 4, 1, 45, 5, 2, 2, 0, 37)).setObjects(("BAY-STACK-NOTIFICATIONS-MIB", "bsnAaaUserName"))
if mibBuilder.loadTexts: bsnAaaUserPasswordExpired.setStatus('current')
mibBuilder.exportSymbols("BAY-STACK-NOTIFICATIONS-MIB", bsnActualStackSize=bsnActualStackSize, bsnSFPInsertion=bsnSFPInsertion, bsnLacPortDisabledToPreventBroadcastStorm=bsnLacPortDisabledToPreventBroadcastStorm, bsnLoginFailureUsername=bsnLoginFailureUsername, bsnROPasswordExpired=bsnROPasswordExpired, bsnEapAccessViolationMacAddress=bsnEapAccessViolationMacAddress, bsnAaaMaxNoOfSessionsExceeded=bsnAaaMaxNoOfSessionsExceeded, bsnAuditStartUpTrap=bsnAuditStartUpTrap, bsnStackManagerReconfiguration=bsnStackManagerReconfiguration, bsnAuditShutDownTrap=bsnAuditShutDownTrap, bsnEnteredForcedStackModeMAC=bsnEnteredForcedStackModeMAC, bsnTrialLicenseExpirationNumber=bsnTrialLicenseExpirationNumber, bsnEapRAVErrorPort=bsnEapRAVErrorPort, bsnEnteredForcedStackModeAddress=bsnEnteredForcedStackModeAddress, bsnConfigurationSavedToNvram=bsnConfigurationSavedToNvram, bsnObjects=bsnObjects, bsnUSBRemoval=bsnUSBRemoval, bsnTrialLicenseExpirationTime=bsnTrialLicenseExpirationTime, bsnMLTHealthFailure=bsnMLTHealthFailure, bsnUSBInsertion=bsnUSBInsertion, bsnLacPortDisabledDueToLossOfVLACPDU=bsnLacPortDisabledDueToLossOfVLACPDU, bayStackNotificationsMib=bayStackNotificationsMib, bsnLacTrunkUnavailable=bsnLacTrunkUnavailable, bsnEapRateLimitExceeded=bsnEapRateLimitExceeded, bsnEnteredForcedStackModeAddressType=bsnEnteredForcedStackModeAddressType, bsnStackConfigurationError=bsnStackConfigurationError, bsnLoginFailureType=bsnLoginFailureType, bsnTemperatureExceeded=bsnTemperatureExceeded, bsnEapUbpFailureRoleString=bsnEapUbpFailureRoleString, bsnSystemUp365Days=bsnSystemUp365Days, bsnAaaAlreadyConnected=bsnAaaAlreadyConnected, bsnEapUbpFailureMacAddress=bsnEapUbpFailureMacAddress, bsnAaaIncorrectLogOnThresholdExceeded=bsnAaaIncorrectLogOnThresholdExceeded, bsnEapRAVError=bsnEapRAVError, bsnAuditRecordEventsFailure=bsnAuditRecordEventsFailure, bsnEapUbpFailure=bsnEapUbpFailure, bsnRunScripts=bsnRunScripts, bsnStackProtectionEvent=bsnStackProtectionEvent, bsnAaaUserPasswordExpired=bsnAaaUserPasswordExpired, bsnNotifications0=bsnNotifications0, bsnLoginFailureAddress=bsnLoginFailureAddress, bsnEnteredForcedStackMode=bsnEnteredForcedStackMode, bsnStackProtection=bsnStackProtection, bsnAuditUnsentMessages=bsnAuditUnsentMessages, bsnNotifications=bsnNotifications, bsnLacPortEnabledDueToReceiptOfVLACPDU=bsnLacPortEnabledDueToReceiptOfVLACPDU, bsnSFPInfo=bsnSFPInfo, bsnLoginFailure=bsnLoginFailure, bsnTrialLicenseExpiration=bsnTrialLicenseExpiration, bsnEapAccessViolation=bsnEapAccessViolation, bsnRWPasswordExpired=bsnRWPasswordExpired, bsnEapUbpFailureIfIndex=bsnEapUbpFailureIfIndex, bsnTrunkPortEnabledToPreventBroadcastStorm=bsnTrunkPortEnabledToPreventBroadcastStorm, bsnPortSpeedDuplexMismatch=bsnPortSpeedDuplexMismatch, bsnEapRAVErrorMacAddress=bsnEapRAVErrorMacAddress, PYSNMP_MODULE_ID=bayStackNotificationsMib, bsnTrunkPortDisabledToPreventBroadcastStorm=bsnTrunkPortDisabledToPreventBroadcastStorm, bsnAaaUserName=bsnAaaUserName, bsnLoginFailureAddressType=bsnLoginFailureAddressType, bsnAaaUserAccountNotUsed=bsnAaaUserAccountNotUsed, bsnUSBInfo=bsnUSBInfo, bsnSFPRemoval=bsnSFPRemoval)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
d4b561e512a4989690288120a874578673b81f51 | 436dfdb4ea52eb5d5f6f18cfd6573b5fe44ba2cb | /myvenv/bin/isort | 576638f6c74c0b85b9732a3e2c8461e1ebca114c | [] | no_license | tnaswin/wisdom-pets | 8083cd1e239007af3a5bd2d32d11fcaa7ecfb76a | ee2acff6d9eb2e3bf427cdd18613ca6a9287ae1f | refs/heads/master | 2022-07-14T12:39:45.200932 | 2020-05-15T11:00:21 | 2020-05-15T11:00:21 | 262,817,261 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 264 | #!/home/aswin/Documents/Aswin/Development/learn-django/myvenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from isort.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"tnaswin007@gmail.com"
] | tnaswin007@gmail.com | |
9f1337840235d5e135d72763e66000e0dd29022f | 8e088033bd7c41988cc05aa2487ec4ac207e8268 | /rust/src/python/libnmstate/nmpolicy.py | 8fe4c498bb957f04a217778aeff82e77c9287774 | [
"Apache-2.0",
"LGPL-2.1-only"
] | permissive | nmstate/nmstate | 43f513c289100c602370da7f73b4ab3a8be37c81 | 6baaa5ae5679f14bcec1791d43fb4080f73bb4e9 | refs/heads/base | 2023-08-31T01:26:06.985543 | 2023-08-24T11:18:17 | 2023-08-25T07:20:04 | 132,556,280 | 224 | 77 | Apache-2.0 | 2023-09-10T08:47:13 | 2018-05-08T05:02:22 | Rust | UTF-8 | Python | false | false | 217 | py | # SPDX-License-Identifier: Apache-2.0
import json
from .clib_wrapper import net_state_from_policy
def gen_net_state_from_policy(policy, cur_state):
return json.loads(net_state_from_policy(policy, cur_state))
| [
"fge@redhat.com"
] | fge@redhat.com |
278beccd4959f7b5d2b6bd3011a01f60c47f08e7 | 3f8de52ba41a7abb4a8b222908f98747d13e1afa | /rlpy/stats/_stats.py | 13b84d0d9773808baa5d5c273610176409964df4 | [
"ISC"
] | permissive | evenmarbles/rlpy | 9c6b570ca3117d2171a897e06ec6deef8fdd918a | 3c3c39a316285ca725268e81aef030e5c764f797 | refs/heads/master | 2016-08-11T06:50:19.679495 | 2016-03-12T22:04:05 | 2016-03-12T22:04:05 | 53,755,316 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,387 | py | from __future__ import division, print_function, absolute_import
# noinspection PyUnresolvedReferences
from six.moves import range
import numpy as np
# noinspection PyPackageRequirements
from sklearn.utils.extmath import logsumexp
from ..auxiliary.array import nunique
__all__ = ['is_posdef', 'randpd', 'stacked_randpd', 'normalize_logspace', 'sq_distance',
'partitioned_mean', 'partitioned_cov', 'partitioned_sum', 'shrink_cov',
'canonize_labels']
def is_posdef(a):
"""Test if matrix `a` is positive definite.
The method uses Cholesky decomposition to determine if
the matrix is positive definite.
Parameters
----------
a : ndarray
A matrix.
Returns
-------
bool :
Whether the matrix is positive definite.
Examples
--------
>>> is_posdef()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
"""
try:
np.linalg.cholesky(np.asarray(a))
return True
except np.linalg.LinAlgError:
return False
def randpd(dim):
"""Create a random positive definite matrix of size `dim`-by-`dim`.
Parameters
----------
dim : int
The dimension of the matrix to create.
Returns
-------
ndarray :
A `dim`-by-`dim` positive definite matrix.
Examples
--------
>>> randpd()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
"""
x = np.random.randn(dim, dim)
a = x * x.T
while not is_posdef(a):
a = a + np.diag(0.001 * np.ones(dim))
return a
def stacked_randpd(dim, k, p=0):
"""Create stacked positive definite matrices.
Create multiple random positive definite matrices of size
dim-by-dim and stack them.
Parameters
----------
dim : int
The dimension of each matrix.
k : int
The number of matrices.
p : int
The diagonal value of each matrix.
Returns
-------
ndarray :
Multiple stacked random positive definite matrices.
Examples
--------
>>> stacked_randpd()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
"""
s = np.zeros((k, dim, dim))
for i in range(k):
s[i] = randpd(dim) + np.diag(p * np.ones(dim))
return s
def normalize_logspace(a):
"""Normalizes the array `a` in the log domain.
Each row of `a` is a log discrete distribution. Returns
the array normalized in the log domain while minimizing the
possibility of numerical underflow.
Parameters
----------
a : ndarray
The array to normalize in the log domain.
Returns
-------
a : ndarray
The array normalized in the log domain.
lnorm : float
log normalization constant.
Examples
--------
>>> normalize_logspace()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
"""
l = logsumexp(a, 1)
y = a.T - l
return y.T, l
def sq_distance(p, q, p_sos=None, q_sos=None):
"""Efficiently compute squared Euclidean distances between stats of vectors.
Compute the squared Euclidean distances between every d-dimensional point
in `p` to every `d`-dimensional point in q. Both `p` and `q` are n-point-by-n-dimensions.
Parameters
----------
p : array_like, shape (`n`, `dim`)
Array where `n` is the number of points and `dim` is the number of
dimensions.
q : array_like, shape (`n`, `dim`)
Array where `n` is the number of points and `dim` is the number of
dimensions.
p_sos : array_like, shape (`dim`,)
q_sos : array_like, shape (`dim`,)
Returns
-------
ndarray :
The squared Euclidean distance.
Examples
--------
>>> sq_distance()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
"""
p_sos = np.sum(np.power(p, 2), 1) if p_sos is None else p_sos
# noinspection PyTypeChecker
q_sos = np.sum(np.power(q, 2), 1) if q_sos is None else q_sos
# noinspection PyUnresolvedReferences
n = q_sos.shape[0]
# noinspection PyUnresolvedReferences
return (q_sos.reshape((n, 1)) + p_sos).T - 2 * np.dot(p, q.T)
def partitioned_mean(x, y, c=None, return_counts=False):
"""Mean of groups.
Groups the rows of `x` according to the class labels in y and
takes the mean of each group.
Parameters
----------
x : array_like, shape (`n`, `dim`)
The data to group, where `n` is the number of data points and
`dim` is the dimensionality of each data point.
y : array_like, shape (`n`,)
The class label for each data point.
return_counts : bool
Whether to return the number of elements in each group or not.
Returns
-------
mean : array_like
The mean of each group.
counts : int
The number of elements in each group.
Examples
--------
>>> partitioned_mean()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
"""
c = nunique(y) if c is None else c
dim = x.shape[1]
m = np.zeros((c, dim))
for i in range(c):
ndx = y == i
m[i] = np.mean(x[ndx], 0)
if not return_counts:
ret = m
else:
ret = (m,)
# noinspection PyTupleAssignmentBalance
_, counts = np.unique(y, return_counts=True)
ret += (counts,)
return ret
def partitioned_cov(x, y, c=None):
"""Covariance of groups.
Partition the rows of `x` according to class labels in `y` and
take the covariance of each group.
Parameters
----------
x : array_like, shape (`n`, `dim`)
The data to group, where `n` is the number of data points and
`dim` is the dimensionality of each data point.
y : array_like, shape (`n`,)
The class label for each data point.
c : int
The number of components in `y`.
Returns
-------
cov : array_like
The covariance of each group.
Examples
--------
>>> partitioned_cov()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
.. warning::
Implementation of this function is not finished yet.
"""
c = nunique(y) if c is None else c
dim = x.shape[1]
cov = np.zeros((c, dim, dim))
for i in range(c):
cov[i] = np.cov(x[y == c])
def partitioned_sum(x, y, c=None):
"""Sums of groups.
Groups the rows of `x` according to the class labels in `y`
and sums each group.
Parameters
----------
x : array_like, shape (`n`, `dim`)
The data to group, where `n` is the number of data points and
`dim` is the dimensionality of each data point.
y : array_like, shape (`n`,)
The class label for each data point.
c : int
The number of components in `y`.
Returns
-------
sums : array_like
The sum of each group.
Examples
--------
>>> partitioned_sum()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
"""
c = nunique(y) if c is None else c
# noinspection PyTypeChecker
return np.dot(np.arange(0, c).reshape(c, 1) == y, x)
def shrink_cov(x, return_lambda=False, return_estimate=False):
"""Covariance shrinkage estimation.
Ledoit-Wolf optimal shrinkage estimator for cov(X)
:math:`C = \\lambda*t + (1 - \\lambda) * s`
using the diagonal variance 'target' t=np.diag(s) with the
unbiased sample cov `s` as the unconstrained estimate.
Parameters
----------
x : array_like, shape (`n`, `dim`)
The data, where `n` is the number of data points and
`dim` is the dimensionality of each data point.
return_lambda : bool
Whether to return lambda or not.
return_estimate : bool
Whether to return the unbiased estimate or not.
Returns
-------
C : array
The shrunk final estimate
lambda_ : float, optional
Lambda
estimate : array, optional
Unbiased estimate.
Examples
--------
>>> shrink_cov()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
"""
optional_returns = return_lambda or return_estimate
n, p = x.shape
x_mean = np.mean(x, 0)
x = x - x_mean
# noinspection PyTypeChecker
s = np.asarray(np.dot(x.T, x) / (n - 1)) # unbiased estimate
s_bar = (n - 1) * s / n
s_var = np.zeros((p, p))
for i in range(n):
# noinspection PyTypeChecker
s_var += np.power(x[i].reshape(p, 1) * x[i] - s_bar, 2)
s_var = np.true_divide(n, (n - 1)**3) * s_var
# calculate optimal shrinkage
o_shrink = np.triu(np.ones((p, p))) - np.eye(p)
# Ledoit-Wolf formula
lambda_ = np.sum(s_var[o_shrink.astype(np.bool)]) / np.sum(np.power(s[o_shrink.astype(np.bool)], 2))
# bound-constrain lambda
lambda_ = np.max([0, np.min([1, lambda_])])
# shrunk final estimate C
c = lambda_ * np.diag(np.diag(s)) + (1 - lambda_) * s
if not optional_returns:
ret = c
else:
ret = (c,)
if return_lambda:
ret += (lambda_,)
if return_estimate:
ret += (s,)
return ret
def canonize_labels(labels, support=None):
"""Transform labels to 1:k.
The size of canonized is the same as ladles but every label is
transformed to its corresponding 1:k. If labels does not span
the support, specify the support explicitly as the 2nd argument.
Parameters
----------
labels : array_like
support : optional
Returns
-------
Transformed labels.
Examples
--------
>>> canonize_labels()
.. note::
Adapted from Matlab:
| Project: `Probabilistic Modeling Toolkit for Matlab/Octave <https://github.com/probml/pmtk3>`_.
| Copyright (2010) Kevin Murphy and Matt Dunham
| License: `MIT <https://github.com/probml/pmtk3/blob/5fefd068a2e84ae508684d3e4750bd72a4164ba0/license.txt>`_
.. warning::
This is only a stub function. Implementation is still missing
"""
raise NotImplementedError
| [
"ajackson.us@gmail.com"
] | ajackson.us@gmail.com |
4455cb72e96eb201ce3bdde483934c9c4f9dfda3 | 54b10fbb396bcf991d349dda4e60755cb729abc5 | /4_ML_models/DecisionTree2.py | 0ab910cbbdb275add78804d59aedbcae0a35add4 | [] | no_license | Fan-Wang-nl/ML_Course | 6fe8bb8cca6fd586049d39dae6b1f0bd4b0b4beb | 8a2290015336f64c388befa467fd069966daf03b | refs/heads/master | 2020-09-02T06:31:27.993670 | 2020-06-28T09:27:49 | 2020-06-28T09:27:49 | 219,156,323 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | from sklearn.tree import DecisionTreeClassifier
from sklearn import datasets
from sklearn.model_selection import train_test_split
iris = datasets.load_iris()
x = iris.data
y = iris.target
train_x, test_x, train_y, test_y = train_test_split(x, y, test_size=0.3, random_state=3)
dt_model = DecisionTreeClassifier()
dt_model.fit(train_x, train_y)
predict_y = dt_model.predict(test_x)
score = dt_model.score(test_x, test_y)
print(predict_y)
print(test_y)
print('score:', score) | [
"buaawangfan@gmail.com"
] | buaawangfan@gmail.com |
ec86d5a0c5bcbc3d96b339302624c3c3dc1ce497 | da7ac09704fdb734731aef4b9a43f083a9c41dd6 | /src/download_tweets.py | 1c87120e47210c79b23d181e81db5eaa85e8dd85 | [] | no_license | covix/laughing-computing-machine | 400a73c7fcee0c5bbd978c366a05bd95e8b6df1d | a6dd682ce51db75f31415619c14abb0df6d2d550 | refs/heads/master | 2021-03-22T03:07:19.109405 | 2017-03-21T20:13:45 | 2017-03-21T20:13:45 | 76,380,810 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,163 | py | import sys
import jsonpickle
import os
import tweepy
import time
from time import gmtime, strftime, strptime, mktime
from datetime import datetime, date
import numpy as np
import glob
from settings import *
def download(query, output_folder='.'):
lang = 'en'
since = '2016-10-27'
tweetsPerQry = 100 # this is the max the API permits
# fName = 'tweets_mac_' + str(date.today().month) + \
# '-' + str(date.today().day) + '.txt'
fName = 'tweets_{query}_{d}.txt'.format(
query='_'.join(query), d=datetime.now().strftime(strftime('%Y-%m-%d-%H-%M-%S')))
auth = tweepy.OAuthHandler(consumerKey, consumerSecret)
api = tweepy.API(auth, wait_on_rate_limit=True,
wait_on_rate_limit_notify=True)
if (not api):
print ("Can't Authenticate")
sys.exit(-1)
ids = []
idsfile = os.path.join(output_folder, 'min_id.txt')
if os.path.isfile(idsfile):
ids = np.loadtxt(idsfile, dtype=int)
# If results only below a specific ID are, set max_id to that ID.
# else default to no upper limit, start from the most recent tweet
# matching the search query.
max_id = min(ids) if len(ids) else -1L
if max_id != -1:
print("max_id: ", max_id)
tweetCount = 0
print("Downloading max {0} tweets".format(maxTweets))
with open(os.path.join(output_folder, fName), 'w') as f:
while tweetCount < maxTweets:
try:
if (max_id <= 0):
if (not since):
new_tweets = api.search(
q=searchQuery, count=tweetsPerQry, lang=lang)
else:
new_tweets = api.search(q=searchQuery, count=tweetsPerQry,
lang=lang, since=since)
else:
if (not since):
new_tweets = api.search(q=searchQuery, count=tweetsPerQry,
max_id=str(max_id - 1), lang=lang)
else:
new_tweets = api.search(q=searchQuery, count=tweetsPerQry,
max_id=str(max_id - 1),
lang=lang, since=since)
if not new_tweets:
print("No more tweets found")
break
for tweet in new_tweets:
if tweet._json['id'] not in ids:
f.write(jsonpickle.encode(tweet._json, unpicklable=False) +
'\n')
tweetCount += len(new_tweets)
print("Downloaded {0} tweets".format(tweetCount))
max_id = new_tweets[-1].id
except tweepy.TweepError as e:
# Just exit if any error
print("some error : " + str(e))
break
print ("Downloaded {0} tweets, Saved to {1}".format(tweetCount, fName))
if __name__ == "__main__":
searchQuery = [sys.argv[1]] if len(sys.argv) > 1 else ['macbook']
download(searchQuery, output_folder='data/')
| [
"luca.coviello@gmail.com"
] | luca.coviello@gmail.com |
924f0a27d455b01da2672634bae9046e908394c2 | cead312342b4436fc02c45394c148581bbcef0d3 | /problem-010/primes-sum.py | 14b16fa402af3b43db4929c99830bddf0ae02143 | [] | no_license | johnboy5358/project-euler | 0ccd17256a163d9f5607b36b0829372968671f78 | f59c859d50ecfea128019e583805d972ff3e3870 | refs/heads/master | 2020-03-14T17:34:42.377077 | 2014-05-02T17:01:25 | 2014-05-02T17:01:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 496 | py | # Summation of primes
# Problem 10
# The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
#
# Find the sum of all the primes below two million.
from math import sqrt
def is_prime(number):
for x in range(2, int(sqrt(number)) + 1):
if number % x == 0:
return False
return True
def primes_sum(until):
# first prime
total = 2
# first odd prime
num = 3
while num < until:
if is_prime(num):
total += num
num += 2
return total
print primes_sum(2000000)
| [
"brunopsanches@gmail.com"
] | brunopsanches@gmail.com |
5be9260be1d35c3ec39dee9d9998e130a94cb8c9 | b0928c989560268e4163258aa2ff0dc604450377 | /questions/views.py | f0e92c1fb5396d426d26086746b6db1de9653950 | [] | no_license | Amandeep18-tech/Quizapp | 0f65a805057542f5ea3158b660e227415a051c16 | d3b589f792131c327a4dbc1a5898e783794fde97 | refs/heads/master | 2023-06-27T18:34:26.896584 | 2021-07-28T11:15:49 | 2021-07-28T11:15:49 | 326,179,201 | 0 | 1 | null | 2021-01-13T11:36:45 | 2021-01-02T12:34:43 | Python | UTF-8 | Python | false | false | 6,115 | py | from django.utils import decorators
from django.views.generic import ListView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.shortcuts import get_object_or_404
from django.utils.decorators import method_decorator
from django.shortcuts import HttpResponseRedirect, reverse
from datetime import datetime, timedelta
from django.contrib.auth.decorators import login_required
from .models import UserProgress, MCQ, Question, AnswerGiven, FillInTheBlank
from .decorators import (
does_user_has_permission,
does_user_has_permission_for_result_page
)
decorators_for_exam_view = [login_required, does_user_has_permission()]
decorators_for_result_view = [login_required,
does_user_has_permission_for_result_page()]
@method_decorator(decorators_for_exam_view, name='dispatch')
class ExamListView(ListView):
"""
view to represent all the questions in the database
"""
template_name = 'index.html'
context_object_name = 'questions'
paginate_by = 1
def post(self, request, *args, **kwargs):
"""
Post the user answer
"""
post_data = {
"user_answer": request.POST.get('user_answer'),
"question_id": request.POST.get('question_id')
}
question_data = self.request.POST.get('user_answer', None)
question_id = self.request.POST.get('question_id', None)
user_progress = get_object_or_404(UserProgress,
user=self.request.user)
question = get_object_or_404(Question, id=question_id)
if question_data is not None:
mcqs = MCQ.objects.all()
fill_in_the_blanks = FillInTheBlank.objects.all()
if question.is_mcq is True:
for mcq in mcqs:
if question.id == mcq.question.id:
answer_given = AnswerGiven(
user=self.request.user, question=question,
user_answer_mcq=question_data)
if int(answer_given.user_answer_mcq) == mcq.correct_answer_mcq:
answer_given.is_answer_correct = True
user_progress.user_score += 1
answer_given.save()
elif question.is_fill_in_the_blanks is True:
for fill_in_the_blank in fill_in_the_blanks:
if question.id == fill_in_the_blank.question.id:
answer_given = AnswerGiven(
user=self.request.user, question=question,
user_answer_fill_in_the_blanks=question_data)
if fill_in_the_blank.correct_answer_fill_in_the_blanks.upper() == question_data.upper():
answer_given.is_answer_correct = True
user_progress.user_score += 1
else:
answer_given.is_answer_correct = False
answer_given.save()
total_count = Question.objects.all().count()
if total_count == user_progress.current_page:
user_progress.is_finished = True
user_progress.save()
return HttpResponseRedirect(reverse('result-page'))
else:
user_progress.is_finished = False
user_progress.save()
question_page = reverse('question-page')
return_next_page = f'{question_page}?page={user_progress.current_page+1}'
return HttpResponseRedirect(return_next_page)
def get_queryset(self, **kwargs):
questions = Question.objects.all()
actual_page = self.request.GET.get('page', 1)
user_progress = get_object_or_404(UserProgress,
user=self.request.user)
if int(actual_page) == 1 and user_progress.is_started is False:
total_minutes = Question.objects.all().count()
user_progress.user_time = datetime.now()
user_progress.user_end_time = user_progress.user_time + \
timedelta(minutes=total_minutes)
user_progress.is_started = True
user_progress.is_finished = False
user_progress.user_score = 0
AnswerGiven.objects.filter(user=self.request.user).delete()
user_progress.current_page = actual_page
user_progress.save()
return questions
def get_context_data(self, **kwargs):
context = super(ExamListView, self).get_context_data(**kwargs)
user_progress = get_object_or_404(UserProgress,
user=self.request.user)
context['mcqs'] = MCQ.objects.all()
context['end_time'] = user_progress.user_end_time
context['start_time'] = user_progress.user_time
return context
@method_decorator(decorators_for_result_view, name='dispatch')
class ResultPageListView(ListView):
"""
View to represent user results
"""
template_name = 'result.html'
context_object_name = 'answer_given'
def get_queryset(self, **kwargs):
answer_given = AnswerGiven.objects.filter(user=self.request.user)
actual_page = 1
user_progress = get_object_or_404(UserProgress,
user=self.request.user)
user_progress.current_page = actual_page
user_progress.is_finished = True
user_progress.save()
return answer_given
def get_context_data(self, **kwargs):
context = super(ResultPageListView, self).get_context_data(**kwargs)
user_progress = get_object_or_404(UserProgress,
user=self.request.user)
user_score = user_progress.user_score
context['quesion'] = Question.objects.all()
context['mcq'] = MCQ.objects.all()
context['fill_in_the_blanks'] = FillInTheBlank.objects.all()
context['user_score'] = user_score
context['total_questions'] = Question.objects.all().count()
return context
| [
"amandeep@yodaplus.com"
] | amandeep@yodaplus.com |
e7289fa1f549284d7e98f8964c2d31047a9bc6da | 7c2c36ebf1a28a1b3990578bb59883d0a5fe74e6 | /turbustat/tests/test_pdf.py | 3ab83d5028113dcd19cf5de8be96265696ed77af | [
"MIT"
] | permissive | hopehhchen/TurbuStat | 1ebb6dbdd9e80fcacc0e4ed75359909a1bad8a4d | 3793c8b3a6deb4c14b1388b5290a21d93f1697cf | refs/heads/master | 2020-07-09T23:58:07.035643 | 2015-06-08T14:43:38 | 2015-06-08T14:43:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,371 | py | # Licensed under an MIT open source license - see LICENSE
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics.pdf import PDF, PDF_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testPDF(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_PDF_distance(self):
self.test_dist = \
PDF_Distance(self.dataset1["integrated_intensity"][0],
self.dataset2["integrated_intensity"][0],
min_val1=0.05,
min_val2=0.05,
weights1=self.dataset1["integrated_intensity_error"][0] ** -2.,
weights2=self.dataset2["integrated_intensity_error"][0] ** -2.)
self.test_dist.distance_metric()
assert np.allclose(self.test_dist.PDF1.pdf, computed_data["pdf_val"])
npt.assert_almost_equal(self.test_dist.hellinger_distance,
computed_distances['pdf_hellinger_distance'])
npt.assert_almost_equal(self.test_dist.ks_distance,
computed_distances['pdf_ks_distance'])
npt.assert_almost_equal(self.test_dist.ad_distance,
computed_distances['pdf_ad_distance'])
| [
"koch.eric.w@gmail.com"
] | koch.eric.w@gmail.com |
c5e3e27748215ba1bf4e3dce156218b5b8e3bc26 | de90103b3a3d021082efc358610a5d5164a91764 | /OTHER/song_threading.py | c7c4120e32051cdf2701e695843944608a75994a | [] | no_license | anish-krishnan/Air-DJ | d28a3664a750056c3ec84adfecb557da6f9a62a6 | 0866b88a684f1983dbe1b6311a708ead29e44157 | refs/heads/master | 2020-03-12T07:42:15.174491 | 2018-04-23T19:30:04 | 2018-04-23T19:30:04 | 130,166,873 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,285 | py | import threading, random
from Tkinter import *
#pyAudio
import _portaudio
import pyaudio
import wave
from array import array
from struct import pack
#pydub
import pydub
from pydub.playback import play
from pydub import AudioSegment
def init(data):
# load data.xyz as appropriate
pass
def mousePressed(event, data):
# use event.x and event.y
pass
def keyPressed(event, data):
pass
def timerFired(data):
pass
def redrawAll(canvas, data):
canvas.create_rectangle(0, 0, data.width, data.height, fill="blue")
def run(width=300, height=300):
def redrawAllWrapper(canvas, data):
canvas.delete(ALL)
canvas.create_rectangle(0, 0, data.width, data.height,
fill='white', width=0)
redrawAll(canvas, data)
canvas.update()
def mousePressedWrapper(event, canvas, data):
mousePressed(event, data)
redrawAllWrapper(canvas, data)
def keyPressedWrapper(event, canvas, data):
keyPressed(event, data)
redrawAllWrapper(canvas, data)
def timerFiredWrapper(canvas, data):
timerFired(data)
redrawAllWrapper(canvas, data)
# pause, then call timerFired again
canvas.after(data.timerDelay, timerFiredWrapper, canvas, data)
# Set up data and call init
class Struct(object): pass
data = Struct()
data.width = width
data.height = height
data.timerDelay = 50 # milliseconds
init(data)
# create the root and the canvas
root = Tk()
canvas = Canvas(root, width=data.width, height=data.height)
canvas.pack()
# set up events
root.bind("<Button-1>", lambda event:
mousePressedWrapper(event, canvas, data))
root.bind("<Key>", lambda event:
keyPressedWrapper(event, canvas, data))
timerFiredWrapper(canvas, data)
# and launch the app
root.mainloop() # blocks until window is closed
print("bye!")
########################
# Main Program
########################
def play(file):
CHUNK = 1024
wf = wave.open(file, 'rb')
p = pyaudio.PyAudio()
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output=True)
data = wf.readframes(CHUNK)
while len(data) > 0:
stream.write(data)
#print("Playing Music")
data = wf.readframes(CHUNK)
stream.stop_stream()
stream.close()
p.terminate()
def printStuff():
i = 0
while(True):
print(i)
i += 1
if __name__ == '__main__':
sentence = "I am a handsome beast. Word."
numThreads = 5
threadList = []
print("Starting..")
t1 = threading.Thread(target=play, args=("fadeawayWAV.wav",))
t1.start()
# t2 = threading.Thread(target=play, args=("grenadeWAV.wav",))
# t2.start()
# t3 = threading.Thread(target=printStuff, args=())
# t3.start()
# t4 = threading.Thread(target=run, args=(400, 400,))
# t4.start()
run(400, 400)
#threadList.append(t1)
print("Thread Count: " + str(threading.activeCount()))
print("Exiting..")
| [
"anishkrishnan@Shanmughavs-MBP.wv.cc.cmu.edu"
] | anishkrishnan@Shanmughavs-MBP.wv.cc.cmu.edu |
83ac34c589d3f1a44e27f059c40cebcdad36f63d | b54d6a18bc5e86462c1f085386bc48065db5851c | /targetDF.py | 0c442099cfd980035cfa5306b1d087212fa72489 | [] | no_license | zoshs2/Percolation_Seoul | 5b5b8ebabe186fbc9e265fc190c3d0641e196517 | 69c0aa99d1f7a2fb9259681a1ed63794cbe5ea5c | refs/heads/main | 2023-07-28T20:50:13.393765 | 2021-09-28T13:25:31 | 2021-09-28T13:25:31 | 390,687,544 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,030 | py | import pandas as pd
def targetDF(dataset, YEAR, MONTH, DAY, HOUR=False, MINUTE=False) -> pd.DataFrame:
'''
Return pd.DataFrame with only data that we concerned.
Example
-------
In[0] date_dataset = targetDF(dataset, 2021, 2, 1)
In[1] date_dataset = extract_ratio_df(date_dataset) # Generate a ratio column
In[2] time_dataset = targetDF(date_dataset, 2021, 2, 1, 9, 0) # 2021-02-01 / 09:00 AM
In[3] CheckOverRatio(time_dataset) # Check over ratio raws & do the correction by inplacing.
'''
if (HOUR is not False) & (MINUTE is not False):
vel_target = dataset[(dataset['PRCS_YEAR']==YEAR) & (dataset['PRCS_MON']==MONTH) & (dataset['PRCS_DAY']==DAY) & (dataset['PRCS_HH']==HOUR) & (dataset['PRCS_MIN']==MINUTE)]
vel_target = vel_target.reset_index(drop=True)
return vel_target
vel_target = dataset[(dataset['PRCS_YEAR']==YEAR) & (dataset['PRCS_MON']==MONTH) & (dataset['PRCS_DAY']==DAY)]
vel_target = vel_target.reset_index(drop=True)
return vel_target | [
"zoshs27@gmail.com"
] | zoshs27@gmail.com |
3170c04749e484a7ed6bc52dc2aac6b927bdd8f1 | 29790e8faa702dc52ff2ebf905d15ff8c6cfcda9 | /pyvows/assertions/inclusion.py | fc1d51ea05f322686a78849c17c541a6ad3d37a1 | [] | no_license | scraping-xx/pyvows | 0227a2b3f16bcf562acb48902ed3c58d6e616791 | b03e9bed37b93f24eca1dd910c05e78e81969ca2 | refs/heads/master | 2020-12-01T01:15:09.487368 | 2011-08-16T03:36:57 | 2011-08-16T03:36:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# pyVows testing engine
# https://github.com/heynemann/pyvows
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 Bernardo Heynemann heynemann@gmail.com
from pyvows import Vows
@Vows.assertion
def to_include(topic, expected):
message = "Expected topic(%s) to include %s, but it didn't" % (topic, expected)
assert expected in topic, message
@Vows.assertion
def not_to_include(topic, expected):
message = "Expected topic(%s) not to include %s, but it did" % (topic, expected)
assert expected not in topic, message
| [
"heynemann@gmail.com"
] | heynemann@gmail.com |
5225cec94bbd84fd01b937451ec2e442f10c6b36 | 64aadced1900d9791099228fa91995c2f8444633 | /python/prices.py | 1865f0e7dfe64d2745c9ef79321c2b43b4be11fc | [] | no_license | ctmakro/playground | 821a8c668b58ebd81cd48309e6f4c6cd16badea7 | 5d6e8e528f1913b6089322ef388213cec5264ae1 | refs/heads/master | 2020-12-25T01:51:12.041611 | 2020-07-14T19:17:24 | 2020-07-14T19:17:24 | 57,165,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | wg = '''<!-- TradingView Widget BEGIN -->
<div class="tradingview-widget-container">
<div class="asdf">{}</div>
<div class="tradingview-widget-container__widget"></div>
<script type="text/javascript" src="https://s3.tradingview.com/external-embedding/embed-widget-mini-symbol-overview.js" async>
{{
"symbol": "{}",
"width": "280",
"height": "280",
"locale": "en",
"dateRange": "{}",
"colorTheme": "light",
"trendLineColor": "#37a6ef",
"underLineColor": "#e3f2fd",
"isTransparent": false,
"autosize": false,
"largeChartUrl": ""
}}
</script>
</div>
<!-- TradingView Widget END -->'''
print('''
<style>
.tradingview-widget-container{
display:inline-block;
margin:5px;
}
.asdf { text-align:center;}
</style>''')
items = '1/FX_IDC:CNYUSD,FOREXCOM:XAUUSD/31.1034807/FX_IDC:CNYUSD,INDEX:HSI,GOOGL,AAPL'.split(',')
names = 'USD/CNY,CNY/g,HSI,Google,Apple'.split(',')
dataranges = '12m,1m'.split(',')
for n,i in zip(names,items):
for d in dataranges:
print(wg.format(n+(' ({})'.format(d)), i, d))
| [
"ctmakro@gmail.com"
] | ctmakro@gmail.com |
cc978a373d559f39eb6f5ff62ad07a09944bbb05 | c06bbcdc6ca88988faa091056b7a8482454bed70 | /MinimizeNoOfUniqueCharacters.py | 949446120c7421b1ea085f9a66beea00b206c48e | [] | no_license | Pranjal1991-arch/Pranjal_WeatherAPI | 4a9629d1f357271b02650deec29daeebef9c3274 | 8a889cfbac325a096a175059fe98a0d545182923 | refs/heads/master | 2022-08-01T01:54:57.523588 | 2020-05-20T17:00:31 | 2020-05-20T17:00:31 | 265,625,607 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | # Python code to print the string with minimum number of unique characters
list = ['abc', 'qr', 'with', 'boywp', 'cabled']
print("Original List is:" + str(list))
dict = {i: len(set(i)) for i in list}
pranjal = min(dict, key=dict.get)
print("The string with minimum unique characters is:" + str(pranjal))
| [
"gogoipranjal280@gmail.com"
] | gogoipranjal280@gmail.com |
2b85ef7d80a4c8b70fdd1fc40484b8bede78a380 | 8a5da2aed1d1dff99bb511eea6a1cd835bec31cb | /deprecated/032/python/032.py | 30451916adc99b30670108c5046166a58c2de9e3 | [] | no_license | EricRovell/project-euler | 653e7eefa517627f2b9a57e596ce93a838c045cf | fd7b47238cb3d9f581a792d525a416f941087002 | refs/heads/master | 2023-05-26T02:56:39.745587 | 2021-06-13T15:11:55 | 2021-06-13T15:11:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 941 | py | # checks if the given number is pandigital for the given range of numbers [left, right]
def is_pandigital(number, left, right):
digits = {str(digit) for digit in range(left, right + 1)}
for digit in str(number):
if digit in digits: digits.remove(digit)
else: return False
return True if len(digits) == 0 else False
# limititng the search space:
# you can get 9-digit number only by:
# (1-digit * 4-digit) OR (2-digit * 3-digit) => both need 4-digit product
# so: multiplicand * multiplier < 10 000
def pandigital_products():
result = set()
for multiplicand in range(2, 100):
start = 123 if multiplicand > 9 else 1234
end = 10000 // multiplicand + 1
for multiplier in range(start, end):
number = int(f'{multiplicand}{multiplier}{multiplicand * multiplier}')
if is_pandigital(number, 1, 9):
result.add(multiplicand * multiplier)
return sum(result)
# tests
print(pandigital_products()) | [
"ericrovell@gmail.com"
] | ericrovell@gmail.com |
60cc8e4fbb8109e2193b0fda9b98ad46979787c9 | 419c5c969fb9662a7e859bbb106b060564e768c8 | /problems/0206.reverse-linked-list/solution.py | b2e93db2769dd9431aaee5c39c67bc229879a51e | [] | no_license | SmoothMaverick/LeetCode-in-Python | 5c3064717e25f22405704f1f0e290c2723d2c42c | e0488979331133a8a29e389e3ad632b9b3ba6e3d | refs/heads/master | 2021-06-23T11:39:40.869419 | 2021-01-22T23:50:14 | 2021-01-23T01:42:39 | 182,631,478 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 682 | py | class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Iterative:
def reverseList(self, head: ListNode) -> ListNode:
curr = head
rev_head = None
curr_next = None
while curr != None:
curr_next = curr.next
curr.next = rev_head
rev_head = curr
curr = curr_next
return rev_head
class Recursive:
def reverseList(self, head: ListNode) -> ListNode:
if head == None or head.next == None:
return head
rev_head = self.reverseList(head.next)
head.next.next = head
head.next = None
return rev_head
| [
"5660582+SmoothMaverick@users.noreply.github.com"
] | 5660582+SmoothMaverick@users.noreply.github.com |
004aea04e057d0b901f90d80584e3a1b4f9ef2d9 | 5ba36aea5750d5b828ed5c79aa497c7e37ea5bbc | /muaban/middlewares.py | 9736adebfe2fea66e25768212a8583ce0df76287 | [
"MIT"
] | permissive | miamor/HouseScraper | ac19277297339bcc1fe445a640429de027a6982a | 35515ad89e3658c76e59cdd9f7512c7f3510e3f8 | refs/heads/master | 2021-07-23T01:31:22.825375 | 2017-10-31T17:14:23 | 2017-10-31T17:14:23 | 109,028,478 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,880 | py | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class muabanSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| [
"miamorwest@gmail.com"
] | miamorwest@gmail.com |
7bebc72f0d2fad3b2561a09cf189327f7e627143 | 74d3674b94186c5a0462049797b15a210ae0871d | /backend/app.py | 61035ca0013096bb784f901b770a1bcb062140eb | [
"MIT"
] | permissive | muhammetssen/dipSetup | 6dd3a6cc0a8d6384f432470ed5442e4c7b047913 | 1b4e00cc023fae99e1a5988d3836a379f8c496af | refs/heads/main | 2023-06-24T17:39:43.672461 | 2021-07-21T09:05:45 | 2021-07-21T09:05:45 | 388,057,192 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 825 | py | import sys
import os
# run from root dir
sys.path.append(".")
def register_admin():
from backend_proxy.db.mongoDB import MongoConn, MongoDB
import bcrypt
password = "DIP_DEMO_ADMIN_PASS" # TODO: os.environ["DIP_DEMO_ADMIN_PASS"]
pass_hashed = bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt())
user = {"username": "admin",
"password": pass_hashed,
"roles": ["admin"],
"tools": [],
"email": "tabilab.dip@gmail.com"}
db = MongoDB(MongoConn(), "user")
existing_user = db.find({"username": user["username"]})
if existing_user is None:
db.create(user)
else:
db.update({"username": user["username"]}, user)
#if __name__ == '__main__':
from backend_proxy.api.endpoints import *
register_admin()
app.run(host='0.0.0.0')
| [
"enmuhammet23@gmail.com"
] | enmuhammet23@gmail.com |
96a91761ad984b21cf44ecdfb8fd9d73222bffb5 | 30566c1ef1addabec6527fc7f3be7a1980bfba9a | /video_code/tests/unit/models/user_test.py | b83557dae4ad6957c1600e2f3316fa7c9d02452d | [] | no_license | PAKalucki/store-rest-api-test | 7eae71ece4b191aee955bc4f02e43f585f766ad3 | 274249e3f8605af8560ff21e19f4f26cee4c38fa | refs/heads/master | 2020-03-16T16:25:49.910872 | 2018-05-09T17:06:22 | 2018-05-09T17:06:22 | 132,783,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | from video_code.models.user import UserModel
from video_code.tests.base import BaseTest
class UserTest(BaseTest):
def test_create_user(self):
user = UserModel('test', 'abcd')
self.assertEqual(user.username, 'test',
"The name of the user after creation does not equal the constructor argument.")
self.assertEqual(user.password, 'abcd',
"The password of the user after creation does not equal the constructor argument.")
| [
"jelitospudziszin@gmail.com"
] | jelitospudziszin@gmail.com |
4251ed99e6c0aed660ad192fe39644c5f8348a6c | 2f87f95bbaa7d50dd55b19af9db586f28d22642b | /career/apps.py | 2a2e0fa76f236605cba97aed670b6127b63eabde | [] | no_license | willyborja95/ApiPaginaWeb | 0bf136961c320a5edca4f857d95bac1d316db09b | dda607cda39fe3d4aaceb40d63bc20b91ebb6efd | refs/heads/master | 2022-12-11T05:32:12.160881 | 2022-12-08T15:04:09 | 2022-12-08T15:04:09 | 215,600,253 | 2 | 0 | null | 2022-12-08T15:10:16 | 2019-10-16T16:59:35 | Python | UTF-8 | Python | false | false | 104 | py | from django.apps import AppConfig
class CoordinatorAppConfig(AppConfig):
name = 'coordinator_app'
| [
"renatojobal@gmail.com"
] | renatojobal@gmail.com |
e3af53fba43b0b71ce8efca13bf2a89e6455544d | cea45595be3e9ff0daa09b4443c7220368e5c512 | /catalog/forms.py | d17b59b3d29f1d2a4beed6697d06d27d5e996bb9 | [] | no_license | VladyslavHnatchenko/mdn | 7b65ecf2e73eff2533aae4ffe5ad6a5a928750d9 | f74736aeaf8c4b8ca51889c1a00571cb07f6dba2 | refs/heads/master | 2020-04-18T02:16:08.622726 | 2019-02-15T13:37:49 | 2019-02-15T13:37:49 | 167,149,898 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 919 | py | import datetime
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
class RenewBookForm(forms.Form):
renewal_date = forms.DateField(help_text="Enter a date between now and"
" 4 weeks (default 3).")
def clean_renewal_date(self):
data = self.cleaned_data['renewal_date']
# Check if a date is not in the past.
if data < datetime.date.today():
raise ValidationError(_('Invalid date - renewal in past'))
# Check if a date is in the allowed range (+4 weeks from today).
if data > datetime.date.today() + datetime.timedelta(weeks=4):
raise ValidationError(_('Invalid date - renewal more than 4 weeks '
'ahead'))
# Remember to always return the cleaned data.
return data
| [
"hnatchenko.vladyslav@gmail.com"
] | hnatchenko.vladyslav@gmail.com |
23b5f6e44bfa76c62daa7cde95d1d6423198972e | bac5acee40eb1ae4a67e2e4c5b6b7d2dee8a56b2 | /2020年/9月17号/page_object.py | cd3fe7024b64b2f85eb6b7ab935d262f07077ebf | [] | no_license | mzp604183841/LearningSelenium | 35399baa7491f253eaff1d9d6432ddc60d8a2dc9 | 44386de1e53ef4dbe717e1ba208fd638b65c2d74 | refs/heads/master | 2023-02-13T22:08:22.788387 | 2021-01-12T05:59:07 | 2021-01-12T05:59:07 | 267,318,156 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,777 | py | from selenium import webdriver
from selenium.webdriver.common.by import By
import time
class Page(object):
login_url = 'http://www.126.com'
def __init__(self, webdriver, base_url=login_url):
self.driver = webdriver
self.base_url = base_url
self.timeout = 30
def on_page(self):
return self.driver.current_url == (self.base_url + self.url)
def _open(self, url):
url = self.base_url + url
self.driver.get(url)
assert self.on_page(), 'Did not land on %s' % url
def open(self):
self._open(self.url)
def find_element(self, *loc):
return self.driver.find_element(*loc)
class LoginPage(Page):
url = '/'
# 定位器
username_loc = (By.ID, 'idInput')
password_loc = (By.ID, 'pwdInput')
submit_log = (By.ID, 'LoginBtn')
# Action
def type_username(self, username):
self.find_element(*self.username_loc).send_keys(username)
def type_password(self, password):
self.find_element(*self.password_loc).send_keys(password)
def submit(self):
self.find_element(*self.password_loc).click()
def test_uers_login(driver, username, password):
login_page = LoginPage(driver)
login_page.open()
login_page.type_username(username)
login_page.type_password(password)
login_page.submit()
def main():
try:
driver = webdriver.Chrome()
username = '604183841'
password = 'qwe@123qwe'
test_uers_login(driver, username, password)
time.sleep(3)
text = driver.find_element_by_xpath("//span[@id='spnUid']").text
assert (text == '604183841@126.com'), '用户名称不匹配,登录失败'
finally:
driver.close()
if __name__ == '__main__':
main()
| [
"604183841@qq.com"
] | 604183841@qq.com |
444b728370c2918aaedc996832134d855adea7c4 | 9aa9af3f83a955c188eb1b5e6b92328f653988cd | /Work89_人工神经网络/Work7_2 copy.py | 7a1f24b27133884366f693b50747d5562291ee23 | [] | no_license | EkZzz/NN | a366f03532c98c9a1c8f17cec64469074575b398 | cfec233ea1454d9690f6216f7744391f94e273bf | refs/heads/master | 2023-06-15T17:08:07.620729 | 2021-07-19T13:12:19 | 2021-07-19T13:12:19 | 387,462,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,541 | py | import tensorflow as tf
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
TRAIN_URL = "http://download.tensorflow.org/data/iris_training.csv"
train_path = tf.keras.utils.get_file(TRAIN_URL.split('/')[-1], TRAIN_URL)
TEST_URL = "http://download.tensorflow.org/data/iris_test.csv"
test_path = tf.keras.utils.get_file(TEST_URL.split('/')[-1], TEST_URL)
df_iris_train = pd.read_csv(train_path, header = 0)
df_iris_test = pd.read_csv(test_path, header = 0)
iris_train = np.array(df_iris_train)
iris_test = np.array(df_iris_test)
x_train = iris_train[:,0:4]
x_test = iris_test[:,0:4]
y_train = iris_train[:,4]
y_test = iris_test[:,4]
x_train = x_train - np.mean(x_train, axis = 0)
x_test = x_test - np.mean(x_test, axis = 0)
X_train = tf.cast(x_train, tf.float32)
X_test = tf.cast(x_test, tf.float32)
Y_train = tf.one_hot(tf.constant(y_train, dtype = tf.int32), 3)
Y_test = tf.one_hot(tf.constant(y_test, dtype = tf.int32), 3)
learn_rate = 0.5
iter = 50
display_step = 10
np.random.seed(612)
W = tf.Variable(np.random.randn(4,3),dtype=tf.float32)
B = tf.Variable(np.zeros([3]), dtype=tf.float32)
acc_train = []
acc_test = []
cce_train = []
cce_test = []
for i in range(0,iter+1):
with tf.GradientTape() as tape:
PRED_train = tf.nn.softmax(tf.matmul(X_train, W) + B)
Loss_train = tf.reduce_mean(tf.keras.losses.categorical_crossentropy(y_true=Y_train, y_pred=PRED_train))
PRED_test = tf.nn.softmax(tf.matmul(X_test,W) + B)
Loss_test = tf.reduce_mean(tf.keras.losses.categorical_crossentropy(y_true=Y_test, y_pred=PRED_test))
accuracy_train = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(PRED_train.numpy(), axis=1),y_train),tf.float32))
accuracy_test = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(PRED_test.numpy(), axis=1), y_test), tf.float32))
acc_train.append(accuracy_train)
acc_test.append(accuracy_test)
cce_train.append(Loss_train)
cce_test.append(Loss_test)
grads = tape.gradient(Loss_train, [W,B])
W.assign_sub(learn_rate*grads[0])
B.assign_sub(learn_rate*grads[1])
if i % display_step == 0:
print("i:%i, TrainAcc:%f, TrainLoss:%f, TestAcc:%f, TestLoss:%f" % (i, accuracy_train, Loss_train, accuracy_test, Loss_test))
plt.figure(figsize=(5,3))
plt.subplot(121)
plt.plot(cce_train, c='b', label='Losstrain')
plt.plot(acc_train, c='r', label='acctrain')
plt.ylim((0, 1))
plt.legend()
plt.subplot(122)
plt.plot(cce_test, c='g', label='Losstest')
plt.plot(acc_test, c='r', label='acctest')
plt.ylim((0, 1))
plt.legend()
plt.show() | [
"1746133694@qq.com"
] | 1746133694@qq.com |
c736c6c24d2e0d2f2e77cce6d33de185e00638b8 | 2e13f4af4f00a9de41883e6e50f220ba161278cc | /Basics II/IntroOOP.py | 468f9f0b99232455855d954918c76c08ea4a5a5f | [
"MIT"
] | permissive | marinaoliveira96/python-exercises | 53c7964ffbfd58fbc1477eba2dffcdf9f50182ce | 13fc0ec30dec9bb6531cdeb41c80726971975835 | refs/heads/master | 2023-02-16T18:52:29.042708 | 2021-01-06T19:44:38 | 2021-01-06T19:44:38 | 322,955,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | class Car():
wheels = 4
doors = 4
windows = 4
seats = 4
porche = Car() #instanciate
porche.color = 'Red' #properties
ferrari = Car()
ferrari.color = 'Yellow'
print(ferrari.color) | [
"marina09oliveira@gmail.com"
] | marina09oliveira@gmail.com |
59f47f5e4ae23a1d04ce88c8b002b9a486c1a0f7 | b8d056355281d031915b7350c365321abaa2bdab | /app_python/aula11_personalizar_excecao.py | 5d5b0d0342d8814a5390266700d347db1dbbf7a8 | [] | no_license | ruancortez/aulas_python | 6807811b896671681f1881632a83fc7966971194 | a2462c05d175344248cb9ef31ccea34599cbf76d | refs/heads/master | 2022-11-30T19:21:16.065974 | 2020-08-09T22:27:28 | 2020-08-09T22:27:28 | 286,325,847 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 542 | py | class Error(Exception):
pass
class ImputError(Error):
def __init__ (self, message):
self.message = message
while True:
try:
x = int(input('Entre com uma nota de 0 a 10: '))
print(x)
if x > 10:
raise ImputError('Nota não pode ser maior que 10')
elif x < 0:
raise ImputError('Nota não pode ser menor que 0')
break
except ValueError:
print('Valor inválido. Deve-se digitar apenas números.')
except ImputError as ex:
print(ex)
| [
"ruancortez@creci.org.br"
] | ruancortez@creci.org.br |
371e2253a9dfed238c59e6c0d05d3ff759ba2f77 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/coverage-big-1134.py | ddbf3edc9d3761abdb1aadc07c33a7eef98fd2b1 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,176 | py | count:int = 0
count2:int = 0
count3:int = 0
count4:int = 0
count5:int = 0
def foo(s: str) -> int:
return len(s)
def foo2(s: str, s2: str) -> int:
return len(s)
def foo3(s: str, s2: str, s3: str) -> int:
return len(s)
def foo4(s: str, s2: str, s3: str, s4: str) -> int:
return len(s)
def foo5(s: str, s2: str, s3: str, s4: str, s5: str) -> int:
return len(s)
class bar(object):
p: bool = True
def baz(self:"bar", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar2(object):
p: bool = True
p2: bool = True
def baz(self:"bar2", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar2", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar3(object):
p: bool = True
p2: bool = True
p3: bool = True
def baz(self:"bar3", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
$Block
return "Nope"
def baz2(self:"bar3", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar3", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar4(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
def baz(self:"bar4", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar4", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar4", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar4", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar5(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
p5: bool = True
def baz(self:"bar5", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar5", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar5", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz5(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int], xx5: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
x5:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
y5:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
def qux5(y: int, y2: int, y3: int, y4: int, y5: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
nonlocal x5
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
print(bar().baz([1,2]))
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
90146830bfe90f1fccd9b4b89f96401860d91053 | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part009372.py | 79176e5034b71cfcfb2a2bf71973eb4b7665d2c3 | [] | no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,292 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher77334(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({}), [
(VariableWithCount('i3.3.1.0', 1, 1, None), Mul),
(VariableWithCount('i3.3.1.0_1', 1, 1, S(1)), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher77334._instance is None:
CommutativeMatcher77334._instance = CommutativeMatcher77334()
return CommutativeMatcher77334._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 77333
return
yield
from collections import deque | [
"franz.bonazzi@gmail.com"
] | franz.bonazzi@gmail.com |
b708556634bef720d9b495751dc8e9907a72d1e2 | aa68a22d6403ffb93e89b828c075828960dcd7e9 | /app/smalltool/modifier.py | dcddab194fd8cedb05d57a90caecb42e366033e5 | [] | no_license | daigouwei/Python3 | 96b2f78c3471b88658470a0533659ae1aba4aa68 | 29cfa2ba334597f301abad53f15f3ceb40f94245 | refs/heads/master | 2020-12-23T20:30:08.507298 | 2017-11-09T09:20:08 | 2017-11-09T09:20:08 | 92,568,427 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,293 | py | #!/usr/bin/env
#-*- coding:utf-8 -*-
import subprocess
import os
import re
def autoAlter():
folder = input('请输入文件夹路径: ')#'/Users/daigouwei/Downloads/两生花'
name = input('请输入要修改成的电影名字: ')#'两生花'
formatt = input('请输入电影格式: ') #'mp4'
if not os.path.isdir(folder):
print('没有该目录,请查实...')
return
pattern = re.compile(r'.*?%s(\d{2}).*?%s$' % (name, formatt), re.S)
movieName = subprocess.check_output(['ls', folder]).decode()
movieList = movieName.split('\n')
for movie in movieList:
if movie != '':
result = pattern.search(movie)
if result:
subprocess.check_output(['sudo', 'mv', folder+'/'+movie, folder+'/'+name+result.group(1)+r'.'+formatt])
else:
print('该目录下有文件%s不匹配,确认修改请输入y:要被修改成文件名,不修改请输入n:' % movie)
command = input()
if command == 'n':
continue
else:
subprocess.check_output(['sudo', 'mv', folder+'/'+movie, folder+'/'+command[2:]])
print('大吉大利,今晚吃鸡...')
if __name__ == '__main__':
autoAlter()
| [
"daigouwei@gmail.com"
] | daigouwei@gmail.com |
6399568472f674133ea232ed648f413406c0c095 | fd15d1a9d0fdf6908bb7c8d1d4490bb6cf817d1f | /CareerFlash/migrations/0012_auto_20190918_0307.py | 4a906d6dd1216a9a77ebe27977af08c7ec4755fd | [] | no_license | stanleysh/Career-Flash | 8bca183ae2576c0aae7dbdb62c2abd60e8890e6d | 6e062afb5ef8959141475e1d73af431a0cf047b4 | refs/heads/master | 2020-08-05T06:23:26.427944 | 2019-09-19T17:34:23 | 2019-09-19T17:34:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | # Generated by Django 2.2.5 on 2019-09-18 03:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('CareerFlash', '0011_orginization'),
]
operations = [
migrations.AlterField(
model_name='orginization',
name='name',
field=models.CharField(max_length=255, unique=True),
),
]
| [
"adam.cote66@gmail.com"
] | adam.cote66@gmail.com |
66b447b3b55a5fce4e6553f39ecaa2294b17bcc3 | c27c7de35f28dfbb899b06f8493df67f4ff9c175 | /多线程测试HKR.py | 1523f787f8bc8f193181a69e6f851ff2c597d970 | [] | no_license | lanpangzi607/store1 | 04e000969bd6cf5eaaa405b04834fb86d1a983c1 | a5c5c58739741c7f196676bc70c026aaad258109 | refs/heads/master | 2023-08-24T21:08:45.473206 | 2021-10-29T03:48:14 | 2021-10-29T03:48:14 | 405,877,046 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 843 | py | import threading
from HTMLTestRunner import HTMLTestRunner
import unittest
import os
from threading import Thread
class test(threading.Thread):
pattern = ''
file = ''
def run(self) -> None:
tests = unittest.defaultTestLoader.discover(os.getcwd(),pattern=self.pattern)
runner = HTMLTestRunner.HTMLTestRunner(
title= "HKR测试报告",
description='',
verbosity=1,
stream=open(file=self.file,mode="w+",encoding="utf-8")
)
runner.run(tests)
r1 = test()
r2 = test()
r3 = test()
r1.pattern = "Testlogin.py"
r2.pattern = "Testlogin1.py"
r3.pattern = "Testlogin2.py"
r1.file = "账号密码正确的期望结果.html"
r2.file = "账号或密码不正确的结果.html"
r3.file = "账号或密码不能为空的结果.html"
r1.start()
r2.start()
r3.start() | [
"noreply@github.com"
] | noreply@github.com |
7e8794eac421517bbadf351d3e6e64d9e0f0d469 | 17f68981e00001d9c678b99568dead343b5bb3ec | /particles/Particle.py | 722a90493fe528945fc29cd782d6aca2a40978b2 | [] | no_license | KodyVanRy/Particles | 89e3cd50af7eff6cd28e018acaee251081ee9286 | bb4e29e8b17eea3ad7e16868394bcb23f50895ff | refs/heads/master | 2021-05-29T10:04:46.675586 | 2015-04-05T05:32:29 | 2015-04-05T05:32:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,280 | py | __author__ = 'Kody'
from pygame.sprite import Sprite
from pygame.surface import Surface
from pygame.locals import SRCALPHA
import time
from pygame import transform
import random
# Constants used to make code more readable and easier to edit
IMAGE = "image"
MAX_SIZE = "maxsize"
MIN_SIZE = "minsize"
ROTATE = "rotate"
GRAVITY_X = "gravity_x"
GRAVITY_Y = "gravity_y"
GRAVITY_X_RANGE = "gravity_x_range"
GRAVITY_Y_RANGE = "gravity_y_range"
SPEED_X = "speed_x"
SPEED_Y = "speed_y"
SPEED_X_RANGE = "speed_x_range"
SPEED_Y_RANGE = "speed_y_range"
ZERO_OUT_X = "zero_x"
SLOWLY_DISAPPEAR = "slowly_disappear"
TOTAL_LIFE = "total_life"
REMAINING_LIFE = "remaining_life"
MAX_LIFE = "max_life"
MIN_LIFE = "min_life"
class Particle(Sprite):
def __init__(self, pos, group, **kwargs):
# Use the key word args to add more depth to the particle (i.e. Color tinting, Min Size, Max Size, etc)
self.attributes = {}
self.setup_attributes()
self.use_kwargs(**kwargs)
self.setup_speed()
self.setup_gravity()
# Init sprite
Sprite.__init__(self, group)
if self.attributes[ROTATE]:
self.rotationAmount = random.randint(-90, 90)
self.rotationSpeed = random.randint(-10, 10)
else:
self.rotationAmount = 0
self.rotationSpeed0 = 0
# Create an image of the block, and fill it with a color.
# This could also be an image loaded from the disk.
rand_size = random.randint(self.attributes[MIN_SIZE], self.attributes[MAX_SIZE])
self.image = Surface((rand_size, rand_size), SRCALPHA)
self.originalImage = self.image
# Fetch the rectangle object that has the dimensions of the image
# Update the position of this object by setting the values of rect.x and rect.y
self.rect = self.image.get_rect()
self.rect.x = pos[0]
self.rect.y = pos[1]
# Create Speed for acceleration and physics
self.speedY = -2.0
# Draw on the sprite
self.draw_image()
self.adjust_as_necessary()
def update(self, delta):
if self.attributes[ROTATE]:
self.image = transform.rotate(self.originalImage, self.rotationAmount)
# self.rect.width = self.image.get_width()
#self.rect.height = self.image.get_height()
self.rotationAmount += self.rotationSpeed * delta
self.rect.y += int(self.attributes[SPEED_Y] * delta)
self.rect.x += int(self.attributes[SPEED_X] * delta)
self.update_alpha(delta)
self.update_gravity(delta)
self.update_life(delta)
def use_kwargs(self, **kwargs):
for key, value in kwargs.items():
if key not in self.attributes.keys():
continue
self.attributes[key] = value
self.image = self.attributes["image"]
def setup_attributes(self):
self.attributes[IMAGE] = None
self.attributes["color"] = None
self.attributes[MIN_SIZE] = 5
self.attributes[MAX_SIZE] = 20
self.attributes[SPEED_X] = 0
self.attributes[SPEED_Y] = 0
self.attributes[SPEED_X_RANGE] = []
self.attributes[SPEED_Y_RANGE] = []
self.attributes[GRAVITY_X] = 0
self.attributes[GRAVITY_Y] = 0
self.attributes[GRAVITY_X_RANGE] = []
self.attributes[GRAVITY_Y_RANGE] = []
self.attributes[ROTATE] = False
self.attributes[ZERO_OUT_X] = False
self.attributes[SLOWLY_DISAPPEAR] = False
self.attributes[REMAINING_LIFE] = 0
self.attributes[TOTAL_LIFE] = 0
def setup_speed(self):
if len(self.attributes[SPEED_Y_RANGE]) == 2:
while int(self.attributes[SPEED_Y] * 10) == 0:
self.attributes[SPEED_Y] = random.randint(self.attributes[SPEED_Y_RANGE][0] * 100,
self.attributes[SPEED_Y_RANGE][1] * 100) / 100.0
if len(self.attributes[SPEED_X_RANGE]) == 2:
while int(self.attributes[SPEED_X] * 10) == 0:
self.attributes[SPEED_X] = random.randint(self.attributes[SPEED_X_RANGE][0] * 100,
self.attributes[SPEED_X_RANGE][1] * 100) / 100.0
return
def setup_gravity(self):
"""Used to spread the gravity over the range given by GRAVITY_X_RANGE"""
if len(self.attributes[GRAVITY_X_RANGE]) == 2:
self.attributes[GRAVITY_X] = random.randint(self.attributes[GRAVITY_X_RANGE][0] * 100,
self.attributes[GRAVITY_X_RANGE][1] * 100) / 100.0
if self.attributes[ZERO_OUT_X]:
if self.attributes[GRAVITY_X] < 0 and self.attributes[SPEED_X] < 0:
self.attributes[GRAVITY_X] = self.attributes[GRAVITY_X_RANGE][0] if self.attributes[GRAVITY_X_RANGE][1] > 0 else 0.1
elif self.attributes[GRAVITY_X] == 0:
self.attributes[GRAVITY_X] = - self.attributes[SPEED_X] / 10.0
if len(self.attributes[GRAVITY_Y_RANGE]) == 2:
self.attributes[GRAVITY_Y] = random.randint(self.attributes[GRAVITY_Y_RANGE][0] * 100,
self.attributes[GRAVITY_Y_RANGE][1] * 100) / 100.0
def update_gravity(self, delta):
# UPDATE VERTICAL SPEED BASED ON VERTICAL GRAVITY
self.attributes[SPEED_Y] += self.attributes[GRAVITY_Y] * delta
# UPDATE HORIZONTAL SPEED BASED ON HORIZONTAL GRAVITY
# IF HORIZONTAL GRAVITY IS PULLING TOWARDS THE RIGHT
if self.attributes[SPEED_X] > 0:
if self.attributes[ZERO_OUT_X]: # IF THE HORIZONTAL SPEED SHOULD SLOW DOWN
if self.attributes[SPEED_X] + self.attributes[GRAVITY_X] * delta < 0: # IF SPEED AFTER TAKING AWAY IS LESS THAN 0
self.attributes[SPEED_X] = 0 # SET SPEED TO AVOID JIGGLING MOTION OR OVERSHOT
else:
self.attributes[SPEED_X] += self.attributes[GRAVITY_X] * delta # GET CLOSER TO ZERO HORIZONTAL SPEED
else:
self.attributes[SPEED_X] += self.attributes[GRAVITY_X] * delta # UPDATE HORIZONTAL SPEED BASED ON HORIZONTAL GRAVITY
# IF HORIZONTAL GRAVITY IS PULLING TOWARDS THE LEFT
elif self.attributes[SPEED_X] < 0:
if self.attributes[ZERO_OUT_X]: # IF THE HORIZONTAL SPEED SHOULD SLOW DOWN
if self.attributes[SPEED_X] + self.attributes[GRAVITY_X] * delta > 0: # IF SPEED AFTER TAKING AWAY IS GREATER THAN 0
self.attributes[SPEED_X] = 0 # SET SPEED TO AVOID JIGGLING MOTION OR OVERSHOT
else:
self.attributes[SPEED_X] += self.attributes[GRAVITY_X] * delta # GET CLOSER TO ZERO HORIZONTAL SPEED
else:
self.attributes[SPEED_X] += self.attributes[GRAVITY_X] * delta # UPDATE HORIZONTAL SPEED BASED ON HORIZONTAL GRAVITY
elif not self.attributes[ZERO_OUT_X]: # IF NOT ZEROING OUT AT HORIZONTAL SPEED OF ZERO
self.attributes[SPEED_X] += self.attributes[GRAVITY_X] * delta # ACCELERATE HORIZONTALLY
def update_alpha(self, delta):
if self.attributes[SLOWLY_DISAPPEAR]:
self.image.set_alpha((self.attributes[REMAINING_LIFE]/self.attributes[TOTAL_LIFE]) * 255)
def update_life(self, delta):
if self.attributes[TOTAL_LIFE] > 0 and self.attributes[REMAINING_LIFE] > 0:
self.attributes[REMAINING_LIFE] -= 10 * delta
else:
self.attributes[REMAINING_LIFE] = 0
def draw_image(self):
self.image = transform.scale(self.attributes[IMAGE], (self.rect.width, self.rect.height))
def adjust_as_necessary(self):
if self.attributes[GRAVITY_X] == 0 and self.attributes[GRAVITY_Y] == 0:
if len(self.attributes[GRAVITY_Y_RANGE]) != 0:
if (self.attributes[GRAVITY_Y_RANGE][1] - self.attributes[GRAVITY_Y_RANGE][0]) != 0:
self.attributes[GRAVITY_Y] = 0.1
if len(self.attributes[GRAVITY_X_RANGE]) != 0:
if (self.attributes[GRAVITY_X_RANGE][1] - self.attributes[GRAVITY_X_RANGE][0]) != 0:
self.attributes[GRAVITY_X] = 0.1 | [
"duhvanry@gmail.com"
] | duhvanry@gmail.com |
937d8e2db74f664cc00fc23719080f6c4f09d024 | a2d2f985f0bf6b65ae61148f1d4f9be7d8371f4b | /Problema/scp/repair/reparaTest.py | cf92d86dd72c538ddfb8e951375a9637377ee59a | [] | no_license | mauriceaux/solverMH | 717b386ca9521163e7b1fee17d0ae5761e9958a7 | bf7b2eb63bae6c09890fcbdfe1b65cf77e0e71ab | refs/heads/master | 2022-12-24T08:44:38.953388 | 2020-09-29T17:52:11 | 2020-09-29T17:52:11 | 283,625,079 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 813 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 21 22:19:40 2019
@author: mauri
"""
import readOrProblems as rOP
import solution as sl
import heuristic as he
import numpy as np
file = '/home/mauri/proyectos/mg/semestre1/autSearch/project/gso/instancesFinal/scpnre1.txt'
pesos, matrix = rOP.generaMatrix(file) #Se generan los pesos y la matrix desde la instancia a ejecutar
print(pesos.shape)
exit()
rHeuristic = he.getRowHeuristics(matrix)
dictCol = he.getColumnRow(matrix)
row, cols = matrix.shape #Se extrae el tamaño del problema
dictcHeuristics = {}
cHeuristic = []
lSolution = []
dict = he.getRowColumn(matrix)
lSolution = sl.generaSolucion(lSolution,matrix,pesos,rHeuristic,dictcHeuristics,dict,cHeuristic,dictCol)
print(lSolution)
sol = np.zeros(cols)
sol[lSolution] = 1
print(sol) | [
"mauricio.castillo.delpino@gmail.com"
] | mauricio.castillo.delpino@gmail.com |
5380054ff75f6b112875d44bda6bc4a922d3fd8f | a70a84c1312dee9a0b5139d7cbd69505373013a8 | /django_server/RobotKiller/tasks.py | ca440d1e580d8d1249e56409db7770ae96eec397 | [
"Apache-2.0"
] | permissive | strongbraver2017/deep_stack | df0c941b8c3f661605dc36a231ff361aee3aa0ba | d75087384b491276084c7dd2b68b35cba8434785 | refs/heads/master | 2021-05-07T18:17:54.552872 | 2017-05-18T08:47:37 | 2017-05-18T08:47:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,115 | py | # -*- coding: utf-8 -*-
"""
@file: tasks.py
@author: lyn
@contact: tonylu716@gmail.com
@python: 3.5
@editor: Vim
@create: 3/29/17 2:20 AM
@description:
用于反爬虫的一些异步任务,主要是刷新数据表中某些临时记录。
"""
from __future__ import absolute_import, unicode_literals
from celery import task as celery_task
from .models import Ban, RecentIpActivity
from django.utils import timezone
@celery_task(name="refresh_ban")
def refresh_ban():
clear_bans = []
for ban in Ban.objects.all():
if ban.ban_to < timezone.now():
ban.delete()
print("clear {} from Ban".format(ban.ip))
clear_bans.append(ban.ip)
return clear_bans
@celery_task(name="refresh_ip_activity")
def refresh_ip_activity():
clear_act_ips = []
for ip_activity in RecentIpActivity.objects.all():
if ip_activity.destroy_time < timezone.now():
ip_activity.delete()
print("clear {} acts from activities".format(ip_activity.ip))
clear_act_ips.append(ip_activity.ip)
return clear_act_ips
| [
"tonylu716@gmail.com"
] | tonylu716@gmail.com |
cd622ab7f0a0b5f79c0dad734ebca28f26e92146 | 3c24c5d4e7c97d4d1ffd65d8cb538d13eb50c629 | /__main__.py | afc7b56ee912b2a5309d18ef195f68e0b5c6ab97 | [] | no_license | lemonez/air-emissions | bb12304fe106e6dd5ffeb3e6899955f9f2f0dfe0 | 89d1900bdf2c662dbed63024dc1867d1693b819a | refs/heads/master | 2022-12-01T14:52:28.319654 | 2020-08-21T00:50:54 | 2020-08-21T00:50:54 | 243,612,811 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,650 | py | # global import external libraries
import pandas as pd
# module-level imports
import config as cf
def main():
"""main script controller function"""
import os, time
# unpack arguments as dict and push back to config module
args_dict = vars(get_args())
#if args_dict['log_suffix'] != '':
# args_dict['log_suffix'] = '_' + args_dict['log_suffix']
append_slash_to_dir(args_dict, ['out_dir', 'out_dir_child'])
args_dict['out_dir_child'] = args_dict['out_dir'] + args_dict['out_dir_child']
if args_dict['quiet']:
args_dict['verbose_logging'] = False
update_config(cf, args_dict)
if cf.verbose_logging or cf.view_config:
print('\nConfiguration options specified:\n')
for k,v in args_dict.items():
if k == 'months_to_calculate':
mos = []
for mo in args_dict['months_to_calculate']:
mos.append(str(mo))
mo_start = cf.month_map.get(mos[0])
mo_end = cf.month_map.get(mos[-1])
if mo_start != mo_end:
print(' {:<28}: {} to {}'.format(k, mo_start, mo_end))
else:
print(' {:<28}: {}'.format(k, mo_start))
elif k == 'equip_to_calculate':
print(' {:<28}: {}'.format(k, args_dict[k][0]))
for eq in args_dict[k][1:]:
print(' {:<28}: {}'.format('', eq))
else:
print(' {:<28}: {}'.format(k,v))
print('\n')
cf.verify_pollutants_to_calc(cf.pollutants_to_calculate)
# ensure output directories exist
for dir in [cf.out_dir, cf.out_dir_child, cf.log_dir]:
if not os.path.exists(dir):
os.makedirs(dir)
print('Created directory \''+dir+'\' for output files.\n')
if cf.view_config:
import sys
sys.exit(0)
# print start timestamp for checking script runtime
start_time_seconds = time.time()
start_time = time.strftime("%H:%M:%S")
print('\n'+start_time+'\tmodule \''+__name__+'\' began running.')
# parse input, calculate emissions, write output
from parserClass import AnnualParser
from equipClass import AnnualEquipment
ae = AnnualEquipment()
if cf.calculate_criteria:
print('\n')
AnnualParser(
ae, calculation='criteria').read_calculate_write_annual_emissions()
if cf.calculate_FG_toxics:
print('\n')
AnnualParser(
ae, calculation='FG_toxics').read_calculate_write_annual_emissions()
if (cf.calculate_calciner_toxics
and (
'calciner_1' in cf.equip_to_calculate
or
'calciner_2' in cf.equip_to_calculate
)
):
print('\n')
AnnualParser(
ae, calculation='calciner_toxics').read_calculate_write_annual_emissions()
if (cf.calculate_h2plant2_toxics
and 'h2_plant_2' in cf.equip_to_calculate
):
print('\n')
AnnualParser(
ae, calculation='h2plant2_toxics').read_calculate_write_annual_emissions()
# print total time for script runtime
end_time_seconds = time.time()
end_time = time.strftime("%H:%M:%S")
total_time = round(end_time_seconds - start_time_seconds)
print('\n'+end_time+'\tmodule \''+__name__+'\' completed.')
print('\t(\''+__name__+'\' total script runtime: '
+str(round((total_time / 60),1))+' minutes)')
def get_args():
"""parse arguments from command line"""
import argparse, textwrap
class BlankLinesHelpFormatter (argparse.HelpFormatter):
def _split_lines(self, text, width):
return super()._split_lines(text, width) + ['']
parser = argparse.ArgumentParser(
formatter_class=BlankLinesHelpFormatter,
prog='BP: cokerghg',
add_help=True)#,
# formatter_class = argparse.RawDescriptionHelpFormatter,
# description = textwrap.dedent('''\
# Calculate CO2 emissions for BP E/W coker heaters.
# '''),
# epilog=textwrap.dedent('''\
# can put text here for further explanation
# more text here possibly
# maybe some bullet point-like things here
# and here
# '''))
group1 = parser.add_argument_group('File I/O')
group1.add_argument('-d', '--inpath', '--data',
dest='data_dir', metavar='InDir',
default=cf.data_dir,
help='Path to data (default: \'%(default)s\'.')
group1.add_argument('-o', '--outpath',
dest='out_dir', metavar='OutDir',
default=cf.out_dir,
help='Path to save output (default: \'%(default)s\').')
group1.add_argument('-c', '--outpath_child',
dest='out_dir_child', metavar='OutChild',
default=cf.out_dir_child,
help='Path to save output iteration (default: \'%(default)s\').')
group1.add_argument('-L', '--logpath',
dest='log_dir', metavar='LogDir',
default=cf.log_dir,
help='Path to save logfiles (default: \'%(default)s\').')
group1.add_argument('-x', '--logsuffix',
dest='log_suffix', metavar='LogSuf',
default=cf.log_suffix,
help='Suffix to append to logfile names (default: \'%(default)s\').')
group2 = parser.add_argument_group('Data / Calc Options')
group2.add_argument('-e', '--equip',
dest='equip_to_calculate', metavar='Equips',
default=cf.equip_to_calculate,
help='Equipment units to calculate (default: %(default)s).')
group2.add_argument('-y', '--year',
dest='data_year', metavar='DataYear',
default=cf.data_year,
help='Year at end of met dataset (default: %(default)s).')
group2.add_argument('-m', '--months',
dest='months_to_calculate', metavar='Months',
default=cf.months_to_calculate,
help='Months of data to parse (default: %(default)s).')
group2.add_argument('--criteria',
dest='calculate_criteria', metavar='T/F',
default=cf.calculate_criteria,
help='Whether or not to calculate criteria pollutants (default: %(default)s).')
group2.add_argument('--ftoxics',
dest='calculate_FG_toxics', metavar='T/F',
default=cf.calculate_FG_toxics,
help='Whether or not to calculate fuel gas toxics (default: %(default)s).')
group2.add_argument('--ctoxics',
dest='calculate_calciner_toxics', metavar='T/F',
default=cf.calculate_calciner_toxics,
help='Whether or not to calculate calciner toxics (default: %(default)s).')
group2.add_argument('--htoxics',
dest='calculate_h2plant2_toxics', metavar='T/F',
default=cf.calculate_h2plant2_toxics,
help='Whether or not to calculate toxics for WED Pt. #46 H2 Plant #2 (default: %(default)s).')
group3 = parser.add_argument_group('Console Output / QA')
# maybe change verbosity options; this may be confusing
group3.add_argument('-q', '--quiet',
action='store_true',
help='Suppress verbose console logging.')
group3.add_argument('-v', '--view_config',
action='store_true',
help='Only view configuration parameters; do not parse.')
args = parser.parse_args()
#parser.print_help()
return args
def update_config(dst, src):
"""generic function to update attributes in module"""
for key, val in src.items():
setattr(dst, key, val)
def append_slash_to_dir(di, kys):
"""Append forward slash to dict value (if necessary) to create directory."""
for ky in kys:
if not di[ky].endswith('/'):
di[ky] += '/'
if __name__ == '__main__':
main() | [
"ecitron@wclyn-linux1.environcorp.int"
] | ecitron@wclyn-linux1.environcorp.int |
9ad911452b74aa4e413cead0138c326cda4c69f4 | 92e5edcd4cf7621a62548a40462b6e813821c571 | /Redundantdegree.py | cffba2e9c0ba3c61b6ec3647cd1388acfb26034a | [] | no_license | wz1900/one-many | 62f027478a16d4506fd65c7b60745250be04e85f | 59320fd241863cd18dea47c281c136b2c237ca59 | refs/heads/master | 2021-01-10T01:17:44.917528 | 2015-10-23T15:24:53 | 2015-10-23T15:24:53 | 43,594,396 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,321 | py | from TargetProvenance import readProvenance ;
from BasicFunction import get_tuplelist_traces, get_edge_list, get_targets_karate ;
# raw method
def get_redundant_degree(mytrace, otherTargets):
degree = 0 ;
for target in otherTargets:
flag = 0 ;
for temp in target.prolist:
mytraceset = set( get_edge_list(mytrace) ) ;
tempset = set( get_edge_list(temp) ) ;
if( mytraceset.isuperset(tempset) ):
flag = 1 ;
break ;
degree = degree + flag ;
return degree ;
def cal_outer_degree(myTarget, targetnodelist):
targetset = set(targetnodelist) ;
targetset.remove(myTarget.nodeId) ;
#print myTarget.nodeId ;
for trace in myTarget.tracelist:
myset = set( trace.split(",")[:-1] ) ;
#print "trace:", mytrace, myset ;
intersec = myset.intersection(targetset) ;
#print intersec ;
myTarget.degreedict[trace] = len(intersec) ;
def cal_inner_degree(myTarget):
allpaths = list(myTarget.tracelist) ;
for path in allpaths:
for key in allpaths:
flag = True ;
if( path != key ):
pathset = set( get_edge_list(path) ) ;
keyset = set( get_edge_list(key) ) ;
if( pathset.issubset(keyset) ):
#print "-----redundant trace-------"
myTarget.upsetdict[key] = 1 ;
# my method for speed up
def speed_redundant_degree(myTarget, targetnodelist):
cal_outer_degree(myTarget, targetnodelist) ;
cal_inner_degree(myTarget) ;
def cal_k_redundant(k):
all_traces = [] ;
num = 0 ;
for target in targetlist:
speed_redundant_degree(target, targetnodelist) ;
for trace in target.tracelist:
if( target.degreedict[trace] <= k and target.upsetdict.has_key(trace) is False):
num = num + 1 ;
all_traces.append(trace) ;
[nouse, edgedict] = get_tuplelist_traces(all_traces) ;
print "trace num:", num ;
print "tuple num:", len(edgedict) ;
for key in edgedict.keys():
print key ;
filename = "karate_provenance.txt";
targetnodelist = get_targets_karate() ;
targetlist = readProvenance(filename, targetnodelist) ;
for k in range(5,6):
print "k=", k, ;
cal_k_redundant(k) ;
| [
"root@node13-100.(none)"
] | root@node13-100.(none) |
d1c15709092c258b430c6ded3da4b80b379da6d7 | bb1e0e89fcf1f1ffb61214ddf262ba327dd10757 | /plotly_study/validators/scattergl/marker/__init__.py | 5076833461f161bf0707c189a46671576aba5327 | [
"MIT"
] | permissive | lucasiscovici/plotly_py | ccb8c3ced89a0f7eccf1ae98551fa712460033fe | 42ab769febb45fbbe0a3c677dc4306a4f59cea36 | refs/heads/master | 2020-09-12T05:43:12.363609 | 2019-12-02T15:13:13 | 2019-12-02T15:13:13 | 222,328,180 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,078 | py | import _plotly_utils.basevalidators
class SymbolsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="symbolsrc", parent_name="scattergl.marker", **kwargs
):
super(SymbolsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class SymbolValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="symbol", parent_name="scattergl.marker", **kwargs):
super(SymbolValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
values=kwargs.pop(
"values",
[
0,
"circle",
100,
"circle-open",
200,
"circle-dot",
300,
"circle-open-dot",
1,
"square",
101,
"square-open",
201,
"square-dot",
301,
"square-open-dot",
2,
"diamond",
102,
"diamond-open",
202,
"diamond-dot",
302,
"diamond-open-dot",
3,
"cross",
103,
"cross-open",
203,
"cross-dot",
303,
"cross-open-dot",
4,
"x",
104,
"x-open",
204,
"x-dot",
304,
"x-open-dot",
5,
"triangle-up",
105,
"triangle-up-open",
205,
"triangle-up-dot",
305,
"triangle-up-open-dot",
6,
"triangle-down",
106,
"triangle-down-open",
206,
"triangle-down-dot",
306,
"triangle-down-open-dot",
7,
"triangle-left",
107,
"triangle-left-open",
207,
"triangle-left-dot",
307,
"triangle-left-open-dot",
8,
"triangle-right",
108,
"triangle-right-open",
208,
"triangle-right-dot",
308,
"triangle-right-open-dot",
9,
"triangle-ne",
109,
"triangle-ne-open",
209,
"triangle-ne-dot",
309,
"triangle-ne-open-dot",
10,
"triangle-se",
110,
"triangle-se-open",
210,
"triangle-se-dot",
310,
"triangle-se-open-dot",
11,
"triangle-sw",
111,
"triangle-sw-open",
211,
"triangle-sw-dot",
311,
"triangle-sw-open-dot",
12,
"triangle-nw",
112,
"triangle-nw-open",
212,
"triangle-nw-dot",
312,
"triangle-nw-open-dot",
13,
"pentagon",
113,
"pentagon-open",
213,
"pentagon-dot",
313,
"pentagon-open-dot",
14,
"hexagon",
114,
"hexagon-open",
214,
"hexagon-dot",
314,
"hexagon-open-dot",
15,
"hexagon2",
115,
"hexagon2-open",
215,
"hexagon2-dot",
315,
"hexagon2-open-dot",
16,
"octagon",
116,
"octagon-open",
216,
"octagon-dot",
316,
"octagon-open-dot",
17,
"star",
117,
"star-open",
217,
"star-dot",
317,
"star-open-dot",
18,
"hexagram",
118,
"hexagram-open",
218,
"hexagram-dot",
318,
"hexagram-open-dot",
19,
"star-triangle-up",
119,
"star-triangle-up-open",
219,
"star-triangle-up-dot",
319,
"star-triangle-up-open-dot",
20,
"star-triangle-down",
120,
"star-triangle-down-open",
220,
"star-triangle-down-dot",
320,
"star-triangle-down-open-dot",
21,
"star-square",
121,
"star-square-open",
221,
"star-square-dot",
321,
"star-square-open-dot",
22,
"star-diamond",
122,
"star-diamond-open",
222,
"star-diamond-dot",
322,
"star-diamond-open-dot",
23,
"diamond-tall",
123,
"diamond-tall-open",
223,
"diamond-tall-dot",
323,
"diamond-tall-open-dot",
24,
"diamond-wide",
124,
"diamond-wide-open",
224,
"diamond-wide-dot",
324,
"diamond-wide-open-dot",
25,
"hourglass",
125,
"hourglass-open",
26,
"bowtie",
126,
"bowtie-open",
27,
"circle-cross",
127,
"circle-cross-open",
28,
"circle-x",
128,
"circle-x-open",
29,
"square-cross",
129,
"square-cross-open",
30,
"square-x",
130,
"square-x-open",
31,
"diamond-cross",
131,
"diamond-cross-open",
32,
"diamond-x",
132,
"diamond-x-open",
33,
"cross-thin",
133,
"cross-thin-open",
34,
"x-thin",
134,
"x-thin-open",
35,
"asterisk",
135,
"asterisk-open",
36,
"hash",
136,
"hash-open",
236,
"hash-dot",
336,
"hash-open-dot",
37,
"y-up",
137,
"y-up-open",
38,
"y-down",
138,
"y-down-open",
39,
"y-left",
139,
"y-left-open",
40,
"y-right",
140,
"y-right-open",
41,
"line-ew",
141,
"line-ew-open",
42,
"line-ns",
142,
"line-ns-open",
43,
"line-ne",
143,
"line-ne-open",
44,
"line-nw",
144,
"line-nw-open",
],
),
**kwargs
)
import _plotly_utils.basevalidators
class SizesrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="sizesrc", parent_name="scattergl.marker", **kwargs):
super(SizesrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class SizerefValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="sizeref", parent_name="scattergl.marker", **kwargs):
super(SizerefValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
import _plotly_utils.basevalidators
class SizemodeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="sizemode", parent_name="scattergl.marker", **kwargs
):
super(SizemodeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "info"),
values=kwargs.pop("values", ["diameter", "area"]),
**kwargs
)
import _plotly_utils.basevalidators
class SizeminValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="sizemin", parent_name="scattergl.marker", **kwargs):
super(SizeminValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "style"),
**kwargs
)
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="size", parent_name="scattergl.marker", **kwargs):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "style"),
**kwargs
)
import _plotly_utils.basevalidators
class ShowscaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name="showscale", parent_name="scattergl.marker", **kwargs
):
super(ShowscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class ReversescaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name="reversescale", parent_name="scattergl.marker", **kwargs
):
super(ReversescaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
import _plotly_utils.basevalidators
class OpacitysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="opacitysrc", parent_name="scattergl.marker", **kwargs
):
super(OpacitysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class OpacityValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="opacity", parent_name="scattergl.marker", **kwargs):
super(OpacityValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
max=kwargs.pop("max", 1),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "style"),
**kwargs
)
import _plotly_utils.basevalidators
class LineValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="line", parent_name="scattergl.marker", **kwargs):
super(LineValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Line"),
data_docs=kwargs.pop(
"data_docs",
"""
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `marker.line.colorscale`. Has an
effect only if in `marker.line.color`is set to
a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the
default palette will be chosen according to
whether numbers in the `color` array are all
positive, all negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `marker.line.color`) or the bounds set in
`marker.line.cmin` and `marker.line.cmax` Has
an effect only if in `marker.line.color`is set
to a numerical array. Defaults to `false` when
`marker.line.cmin` and `marker.line.cmax` are
set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by
scaling `marker.line.cmin` and/or
`marker.line.cmax` to be equidistant to this
point. Has an effect only if in
`marker.line.color`is set to a numerical array.
Value should have the same units as in
`marker.line.color`. Has no effect when
`marker.line.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmax` must be set as well.
color
Sets themarker.linecolor. It accepts either a
specific color or an array of numbers that are
mapped to the colorscale relative to the max
and min values of the array or relative to
`marker.line.cmin` and `marker.line.cmax` if
set.
coloraxis
Sets a reference to a shared color axis.
References to these shared color axes are
"coloraxis", "coloraxis2", "coloraxis3", etc.
Settings for these shared color axes are set in
the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple
color scales can be linked to the same color
axis.
colorscale
Sets the colorscale. Has an effect only if in
`marker.line.color`is set to a numerical array.
The colorscale must be an array containing
arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`.
To control the bounds of the colorscale in
color space, use`marker.line.cmin` and
`marker.line.cmax`. Alternatively, `colorscale`
may be a palette name string of the following
list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,R
eds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Black
body,Earth,Electric,Viridis,Cividis.
colorsrc
Sets the source reference on plot.ly for color
.
reversescale
Reverses the color mapping if true. Has an
effect only if in `marker.line.color`is set to
a numerical array. If true, `marker.line.cmin`
will correspond to the last color in the array
and `marker.line.cmax` will correspond to the
first color.
width
Sets the width (in px) of the lines bounding
the marker points.
widthsrc
Sets the source reference on plot.ly for width
.
""",
),
**kwargs
)
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="colorsrc", parent_name="scattergl.marker", **kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class ColorscaleValidator(_plotly_utils.basevalidators.ColorscaleValidator):
def __init__(
self, plotly_name="colorscale", parent_name="scattergl.marker", **kwargs
):
super(ColorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {"autocolorscale": False}),
role=kwargs.pop("role", "style"),
**kwargs
)
import _plotly_utils.basevalidators
class ColorBarValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name="colorbar", parent_name="scattergl.marker", **kwargs
):
super(ColorBarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "ColorBar"),
data_docs=kwargs.pop(
"data_docs",
"""
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format
We add one item to d3's date formatter: "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of plotly_study.graph_objects.scattergl.marke
r.colorbar.Tickformatstop instances or dicts
with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.scattergl.marker.colorbar.tickformatstopdefau
lts), sets the default property values to use
for elements of
scattergl.marker.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on plot.ly for
ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on plot.ly for
tickvals .
tickwidth
Sets the tick width (in px).
title
plotly_study.graph_objects.scattergl.marker.colorbar.
Title instance or dict with compatible
properties
titlefont
Deprecated: Please use
scattergl.marker.colorbar.title.font instead.
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
scattergl.marker.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
""",
),
**kwargs
)
import _plotly_utils.basevalidators
class ColoraxisValidator(_plotly_utils.basevalidators.SubplotidValidator):
def __init__(
self, plotly_name="coloraxis", parent_name="scattergl.marker", **kwargs
):
super(ColoraxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
dflt=kwargs.pop("dflt", None),
edit_type=kwargs.pop("edit_type", "calc"),
regex=kwargs.pop("regex", "/^coloraxis([2-9]|[1-9][0-9]+)?$/"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(self, plotly_name="color", parent_name="scattergl.marker", **kwargs):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
colorscale_path=kwargs.pop(
"colorscale_path", "scattergl.marker.colorscale"
),
**kwargs
)
import _plotly_utils.basevalidators
class CminValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="cmin", parent_name="scattergl.marker", **kwargs):
super(CminValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {"cauto": False}),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class CmidValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="cmid", parent_name="scattergl.marker", **kwargs):
super(CmidValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {}),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class CmaxValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="cmax", parent_name="scattergl.marker", **kwargs):
super(CmaxValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {"cauto": False}),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class CautoValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(self, plotly_name="cauto", parent_name="scattergl.marker", **kwargs):
super(CautoValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {}),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class AutocolorscaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name="autocolorscale", parent_name="scattergl.marker", **kwargs
):
super(AutocolorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {}),
role=kwargs.pop("role", "style"),
**kwargs
)
| [
"you@example.com"
] | you@example.com |
841cd9e9d8193c58fdc4c4845d4a09b81a7bd904 | 2b8e7eadb920e96c75697880a9c5461aa8e0c5ed | /nabu/processing/processors/feature_computers/fbank.py | 77c4ebb1d59833e9ebe2c1032e1545f7cb99d2f4 | [
"MIT"
] | permissive | ishandutta2007/nabu | fb963ed3cd34ee340014e0c1e77927c838bba0ad | 313018a46f68cec1d4a7eb15b8b1cf68111a959c | refs/heads/master | 2020-04-03T04:57:57.911576 | 2018-12-14T11:02:52 | 2018-12-14T11:02:52 | 155,029,958 | 0 | 0 | MIT | 2018-12-06T18:20:12 | 2018-10-28T02:59:31 | Python | UTF-8 | Python | false | false | 1,446 | py | '''@file fbank.py
contains the fbank feature computer'''
import numpy as np
import base
import feature_computer
from sigproc import snip
class Fbank(feature_computer.FeatureComputer):
'''the feature computer class to compute fbank features'''
def comp_feat(self, sig, rate):
'''
compute the features
Args:
sig: the audio signal as a 1-D numpy array
rate: the sampling rate
Returns:
the features as a [seq_length x feature_dim] numpy array
'''
#snip the edges
sig = snip(sig, rate, float(self.conf['winlen']),
float(self.conf['winstep']))
feat, energy = base.logfbank(sig, rate, self.conf)
if self.conf['include_energy'] == 'True':
feat = np.append(feat, energy[:, np.newaxis], 1)
if self.conf['dynamic'] == 'delta':
feat = base.delta(feat)
elif self.conf['dynamic'] == 'ddelta':
feat = base.ddelta(feat)
elif self.conf['dynamic'] != 'nodelta':
raise Exception('unknown dynamic type')
return feat
def get_dim(self):
'''the feature dimemsion'''
dim = int(self.conf['nfilt'])
if self.conf['include_energy'] == 'True':
dim += 1
if self.conf['dynamic'] == 'delta':
dim *= 2
elif self.conf['dynamic'] == 'ddelta':
dim *= 3
return dim
| [
"vincent.renkens@esat.kuleuven.be"
] | vincent.renkens@esat.kuleuven.be |
3ebe5f1680c8def90fc1a2e88c430d6e45775dc0 | 3d32ffe2d942694ffbfd6042bf80556def4c40b7 | /word_search.py | 6473bbc2362e121760c466b6d6df2ddbd9d6523b | [] | no_license | aiiilii/py_code | 5f26951027120acc0a61c5a2a6701df70543d050 | 59ff08d75333176d649f0f89988fa27214612cb1 | refs/heads/master | 2022-11-25T21:25:57.238575 | 2020-08-03T17:38:39 | 2020-08-03T17:38:39 | 277,424,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,120 | py | from typing import List
class WordSearch:
def exist(self, board: List[List[str]], word: str) -> bool:
if not board:
return False
self.rows = len(board)
self.cols = len(board[0])
self.board = board
for row in range(self.rows):
for col in range(self.cols):
if self.backtrack(row, col, word):
return True
return False
def backtrack(self, row: int, col: int, word: str) -> bool:
# word is begin copied frm [1:], so it's length will decrease
# once it decreased to 0, means all letters of the word has been found
if len(word) == 0:
return True
if row < 0 or row >= self.rows or col < 0 or col >= self.cols or self.board[row][col] != word[0]:
return False
self.board[row][col] = '#'
for dx, dy in [(0, 1), (-1, 0), (0, -1), (1, 0)]:
if self.backtrack(row + dx, col + dy, word[1:]): # so word[0] always refers to the current index
return True
self.board[row][col] = word[0]
return False | [
"aili.hgong@gmail.com"
] | aili.hgong@gmail.com |
cdc243853b5430781b560f6d3f53ceeb14bb4b58 | a0447b03ad89a41a5c2e2073e32aeaf4d6279340 | /ironic/tests/unit/dhcp/test_dnsmasq.py | 64fe46f3393fd13874809d60d2532be93e42bae0 | [
"Apache-2.0"
] | permissive | openstack/ironic | 2ae87e36d7a62d44b7ed62cad4e2e294d48e061b | ab76ff12e1c3c2208455e917f1a40d4000b4e990 | refs/heads/master | 2023-08-31T11:08:34.486456 | 2023-08-31T04:45:05 | 2023-08-31T04:45:05 | 10,066,301 | 411 | 365 | Apache-2.0 | 2023-07-25T02:05:53 | 2013-05-14T22:28:24 | Python | UTF-8 | Python | false | false | 5,237 | py | #
# Copyright 2022 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import tempfile
from ironic.common import dhcp_factory
from ironic.common import utils as common_utils
from ironic.conductor import task_manager
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.objects import utils as object_utils
class TestDnsmasqDHCPApi(db_base.DbTestCase):
def setUp(self):
super(TestDnsmasqDHCPApi, self).setUp()
self.config(dhcp_provider='dnsmasq',
group='dhcp')
self.node = object_utils.create_test_node(self.context)
self.ports = [
object_utils.create_test_port(
self.context, node_id=self.node.id, id=2,
uuid='1be26c0b-03f2-4d2e-ae87-c02d7f33c782',
address='52:54:00:cf:2d:32',
pxe_enabled=True)]
self.optsdir = tempfile.mkdtemp()
self.addCleanup(lambda: common_utils.rmtree_without_raise(
self.optsdir))
self.config(dhcp_optsdir=self.optsdir, group='dnsmasq')
self.hostsdir = tempfile.mkdtemp()
self.addCleanup(lambda: common_utils.rmtree_without_raise(
self.hostsdir))
self.config(dhcp_hostsdir=self.hostsdir, group='dnsmasq')
dhcp_factory.DHCPFactory._dhcp_provider = None
self.api = dhcp_factory.DHCPFactory()
self.opts = [
{
'ip_version': 4,
'opt_name': '67',
'opt_value': 'bootx64.efi'
},
{
'ip_version': 4,
'opt_name': '210',
'opt_value': '/tftpboot/'
},
{
'ip_version': 4,
'opt_name': '66',
'opt_value': '192.0.2.135',
},
{
'ip_version': 4,
'opt_name': '150',
'opt_value': '192.0.2.135'
},
{
'ip_version': 4,
'opt_name': '255',
'opt_value': '192.0.2.135'
}
]
def test_update_dhcp(self):
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.api.update_dhcp(task, self.opts)
dnsmasq_tag = task.node.driver_internal_info.get('dnsmasq_tag')
self.assertEqual(36, len(dnsmasq_tag))
hostfile = os.path.join(self.hostsdir,
'ironic-52:54:00:cf:2d:32.conf')
with open(hostfile, 'r') as f:
self.assertEqual(
'52:54:00:cf:2d:32,set:%s,set:ironic\n' % dnsmasq_tag,
f.readline())
optsfile = os.path.join(self.optsdir,
'ironic-%s.conf' % self.node.uuid)
with open(optsfile, 'r') as f:
self.assertEqual([
'tag:%s,67,bootx64.efi\n' % dnsmasq_tag,
'tag:%s,210,/tftpboot/\n' % dnsmasq_tag,
'tag:%s,66,192.0.2.135\n' % dnsmasq_tag,
'tag:%s,150,192.0.2.135\n' % dnsmasq_tag,
'tag:%s,255,192.0.2.135\n' % dnsmasq_tag],
f.readlines())
def test_get_ip_addresses(self):
with task_manager.acquire(self.context,
self.node.uuid) as task:
with tempfile.NamedTemporaryFile() as fp:
self.config(dhcp_leasefile=fp.name, group='dnsmasq')
fp.write(b"1659975057 52:54:00:cf:2d:32 192.0.2.198 * *\n")
fp.flush()
self.assertEqual(
['192.0.2.198'],
self.api.provider.get_ip_addresses(task))
def test_clean_dhcp_opts(self):
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.api.update_dhcp(task, self.opts)
hostfile = os.path.join(self.hostsdir,
'ironic-52:54:00:cf:2d:32.conf')
optsfile = os.path.join(self.optsdir,
'ironic-%s.conf' % self.node.uuid)
self.assertTrue(os.path.isfile(hostfile))
self.assertTrue(os.path.isfile(optsfile))
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.api.clean_dhcp(task)
# assert the host file remains with the ignore directive, and the opts
# file is deleted
with open(hostfile, 'r') as f:
self.assertEqual(
'52:54:00:cf:2d:32,ignore\n',
f.readline())
self.assertFalse(os.path.isfile(optsfile))
| [
"sbaker@redhat.com"
] | sbaker@redhat.com |
b25344284e08629b574c9ae158dba420ac087648 | 47a95e194d8df774c415d6d57ad2e4987367528d | /vehicles/vehicle.py | 4a49877e3b5d2465e2545e50884790faf96bedab | [] | no_license | kwohl/python-inheritance | e0185648b9441ef6c0672ef930239273373d8707 | b52f1890b7592514e1c5868952f425d544de7af7 | refs/heads/master | 2022-04-22T00:12:23.693760 | 2020-04-20T01:08:51 | 2020-04-20T01:08:51 | 257,136,805 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | class Vehicle:
def __init__(self):
self.main_color = ""
self.maximum_occupancy = ""
def drive(self):
print("Vroooom!")
def turn(self, direction):
print(f"The vehicle has turned {direction}.")
def stop(self):
print("The vehicle has stopped.") | [
"ktwohl@gmail.com"
] | ktwohl@gmail.com |
9c65ddf63bf600bbb544a5128c4c08c04cfa4145 | 791d7e411ec442786a40ecf6e911ff7b2dd1c261 | /tests/garage/tf/algos/test_batch_polopt2.py | c5010d678834f50cc709f1d7f3b3fee0c63c1645 | [
"MIT"
] | permissive | adibellathur/garage | 4153e765134e70af89042fe76c9765d7f7dbccb4 | 8394f0cf2b77c0a5b3a7b1ea977fa6cb3f9df0ca | refs/heads/master | 2021-03-03T07:30:16.201031 | 2020-03-06T23:54:25 | 2020-03-06T23:54:25 | 245,942,862 | 0 | 0 | MIT | 2020-03-09T04:20:43 | 2020-03-09T04:20:42 | null | UTF-8 | Python | false | false | 8,858 | py | from unittest import mock
import numpy as np
from garage.np.baselines import LinearFeatureBaseline
from garage.tf.algos import BatchPolopt2
from garage.tf.baselines import GaussianMLPBaseline
from garage.tf.envs import TfEnv
from garage.tf.experiment import LocalTFRunner
from garage.tf.policies import CategoricalLSTMPolicy
from garage.tf.policies import CategoricalMLPPolicy
from garage.tf.policies import GaussianLSTMPolicy
from garage.tf.policies import GaussianMLPPolicy
from tests.fixtures import snapshot_config, TfGraphTestCase
from tests.fixtures.envs.dummy import DummyBoxEnv
from tests.fixtures.envs.dummy import DummyDiscreteEnv
class TestBatchPolopt2(TfGraphTestCase):
@mock.patch.multiple(BatchPolopt2, __abstractmethods__=set())
# pylint: disable=abstract-class-instantiated, no-member
def test_process_samples_continuous_non_recurrent(self):
env = TfEnv(DummyBoxEnv())
policy = GaussianMLPPolicy(env_spec=env.spec)
baseline = GaussianMLPBaseline(env_spec=env.spec)
max_path_length = 100
with LocalTFRunner(snapshot_config, sess=self.sess) as runner:
algo = BatchPolopt2(env_spec=env.spec,
policy=policy,
baseline=baseline,
max_path_length=max_path_length,
flatten_input=True)
runner.setup(algo, env, sampler_args=dict(n_envs=1))
runner.train(n_epochs=1, batch_size=max_path_length)
paths = runner.obtain_samples(0)
samples = algo.process_samples(0, paths)
# Since there is only 1 vec_env in the sampler and DummyBoxEnv
# never terminate until it reaches max_path_length, batch size
# must be max_path_length, i.e. 100
assert samples['observations'].shape == (
max_path_length, env.observation_space.flat_dim)
assert samples['actions'].shape == (max_path_length,
env.action_space.flat_dim)
assert samples['rewards'].shape == (max_path_length, )
assert samples['baselines'].shape == (max_path_length, )
assert samples['returns'].shape == (max_path_length, )
# there is only 1 path
assert samples['lengths'].shape == (1, )
# non-recurrent policy has empty agent info
assert samples['agent_infos'] == {}
# DummyBoxEnv has env_info dummy
assert samples['env_infos']['dummy'].shape == (max_path_length, )
assert isinstance(samples['average_return'], float)
# pylint: disable=abstract-class-instantiated, no-member
@mock.patch.multiple(BatchPolopt2, __abstractmethods__=set())
def test_process_samples_continuous_recurrent(self):
env = TfEnv(DummyBoxEnv())
policy = GaussianLSTMPolicy(env_spec=env.spec)
baseline = GaussianMLPBaseline(env_spec=env.spec)
max_path_length = 100
with LocalTFRunner(snapshot_config, sess=self.sess) as runner:
algo = BatchPolopt2(env_spec=env.spec,
policy=policy,
baseline=baseline,
max_path_length=max_path_length,
flatten_input=True)
runner.setup(algo, env, sampler_args=dict(n_envs=1))
runner.train(n_epochs=1, batch_size=max_path_length)
paths = runner.obtain_samples(0)
samples = algo.process_samples(0, paths)
# Since there is only 1 vec_env in the sampler and DummyBoxEnv
# never terminate until it reaches max_path_length, batch size
# must be max_path_length, i.e. 100
assert samples['observations'].shape == (
max_path_length, env.observation_space.flat_dim)
assert samples['actions'].shape == (max_path_length,
env.action_space.flat_dim)
assert samples['rewards'].shape == (max_path_length, )
assert samples['baselines'].shape == (max_path_length, )
assert samples['returns'].shape == (max_path_length, )
# there is only 1 path
assert samples['lengths'].shape == (1, )
for key, shape in policy.state_info_specs:
assert samples['agent_infos'][key].shape == (max_path_length,
np.prod(shape))
# DummyBoxEnv has env_info dummy
assert samples['env_infos']['dummy'].shape == (max_path_length, )
assert isinstance(samples['average_return'], float)
# pylint: disable=abstract-class-instantiated, no-member
@mock.patch.multiple(BatchPolopt2, __abstractmethods__=set())
def test_process_samples_discrete_non_recurrent(self):
env = TfEnv(DummyDiscreteEnv())
policy = CategoricalMLPPolicy(env_spec=env.spec)
baseline = LinearFeatureBaseline(env_spec=env.spec)
max_path_length = 100
with LocalTFRunner(snapshot_config, sess=self.sess) as runner:
algo = BatchPolopt2(env_spec=env.spec,
policy=policy,
baseline=baseline,
max_path_length=max_path_length,
flatten_input=True)
runner.setup(algo, env, sampler_args=dict(n_envs=1))
runner.train(n_epochs=1, batch_size=max_path_length)
paths = runner.obtain_samples(0)
samples = algo.process_samples(0, paths)
# Since there is only 1 vec_env in the sampler and DummyDiscreteEnv
# always terminate, number of paths must be max_path_length, and
# batch size must be max_path_length as well, i.e. 100
assert samples['observations'].shape == (
max_path_length, env.observation_space.flat_dim)
assert samples['actions'].shape == (max_path_length,
env.action_space.n)
assert samples['rewards'].shape == (max_path_length, )
assert samples['baselines'].shape == (max_path_length, )
assert samples['returns'].shape == (max_path_length, )
# there is 100 path
assert samples['lengths'].shape == (max_path_length, )
# non-recurrent policy has empty agent info
assert samples['agent_infos'] == {}
# non-recurrent policy has empty env info
assert samples['env_infos'] == {}
assert isinstance(samples['average_return'], float)
# pylint: disable=abstract-class-instantiated, no-member
@mock.patch.multiple(BatchPolopt2, __abstractmethods__=set())
def test_process_samples_discrete_recurrent(self):
env = TfEnv(DummyDiscreteEnv())
policy = CategoricalLSTMPolicy(env_spec=env.spec)
baseline = LinearFeatureBaseline(env_spec=env.spec)
max_path_length = 100
with LocalTFRunner(snapshot_config, sess=self.sess) as runner:
algo = BatchPolopt2(env_spec=env.spec,
policy=policy,
baseline=baseline,
max_path_length=max_path_length,
flatten_input=True)
runner.setup(algo, env, sampler_args=dict(n_envs=1))
runner.train(n_epochs=1, batch_size=max_path_length)
paths = runner.obtain_samples(0)
samples = algo.process_samples(0, paths)
# Since there is only 1 vec_env in the sampler and DummyDiscreteEnv
# always terminate, number of paths must be max_path_length, and
# batch size must be max_path_length as well, i.e. 100
assert samples['observations'].shape == (
max_path_length, env.observation_space.flat_dim)
assert samples['actions'].shape == (max_path_length,
env.action_space.n)
assert samples['rewards'].shape == (max_path_length, )
assert samples['baselines'].shape == (max_path_length, )
assert samples['returns'].shape == (max_path_length, )
# there is 100 path
assert samples['lengths'].shape == (max_path_length, )
# non-recurrent policy has empty agent info
for key, shape in policy.state_info_specs:
assert samples['agent_infos'][key].shape == (max_path_length,
np.prod(shape))
# non-recurrent policy has empty env info
assert samples['env_infos'] == {}
assert isinstance(samples['average_return'], float)
| [
"noreply@github.com"
] | noreply@github.com |
e5c71a0cecf59d7144920a04c3f3d69e2191b512 | 5c7f4f3b3c8f202646bbaac6213aa398aa6fcdb1 | /hw6/icp.py | f051a82ce04995aa6d43e49953fafcad94c5a91d | [] | no_license | zengweiyi1994/gvv-hw | dcd9717423649ec643d4a65beac8a4d388068097 | f56f2baad887050221332c2423c76cecd163f2af | refs/heads/master | 2021-01-15T11:20:46.830832 | 2016-03-07T02:32:44 | 2016-03-07T02:32:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,813 | py | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import cv2
from sklearn.neighbors import NearestNeighbors as NN
def latlonelev_to_xyz(points):
EARTH_R = 6371004 #r of earth in meters
if len(points.shape) == 1:
points = points.reshape((-1, 3))
lat_rad = np.deg2rad(points[:, 0])
lon_rad = np.deg2rad(points[:, 1])
elev = points[:, 2]
x = EARTH_R * np.cos(lat_rad) * np.cos(lon_rad)
y = EARTH_R * np.cos(lat_rad) * np.sin(lon_rad)
z = EARTH_R * np.sin(lat_rad) + elev
return np.vstack((x, y, z)).T
def find_nearest(nn, points2_prime, M):
dist, idx = nn.kneighbors(np.dot(points2_prime, M))
return dist, idx[:, 0]
#M is a 4*3 Matrix, transposed since data points are in row vectors
def icp(points1, points2_prime, M_init, threshold=0.021):
M = M_init
nn = NN(n_neighbors=1).fit(points1)
for i in xrange(1000):
dist, idx = find_nearest(nn, points2_prime, M)
avg_dist = np.mean(dist)
print "Iteration %d, average distance %f" % (i, avg_dist)
if avg_dist < threshold:
break
M = np.dot(np.linalg.pinv(points2_prime), points1[idx, :])
dist, idx = find_nearest(nn, points2_prime, M)
print "Final result, average distance %f" % (np.mean(dist))
print "M ", M
print "M transpose", M.T
def main():
p1 = np.genfromtxt("data/pointcloud1.fuse")
p2 = np.genfromtxt("data/pointcloud2.fuse")
points1 = latlonelev_to_xyz(p1)
points2 = latlonelev_to_xyz(p2)
points2_prime = np.hstack( (points2, np.ones((points2.shape[0], 1))) )
translation = np.zeros((1, 3))
M_init = np.concatenate((np.eye(3), np.zeros((1, 3))), axis=0)
print M_init
icp(points1, points2_prime, M_init)
if __name__ == "__main__":
main()
| [
"lostleaf@me.com"
] | lostleaf@me.com |
da5ea17463b887c3acbf98ecc02eb5c23d360b40 | cfb8b2403d64ca6aa2a336c67159144c55bf58e8 | /module4/5_9_Excersises.py | 48d38879f313c3a8a993a341cc34f4a8d6a2cc1d | [] | no_license | DomfeLacre/zyBooksPython_CS200 | 77bd43f89fbeec7589e50842365100cc9ce284f8 | 959dcc12dec808b39a42a25adee69dbf152bac96 | refs/heads/master | 2020-08-06T22:51:05.160759 | 2018-05-15T01:38:38 | 2018-05-15T01:38:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 865 | py | empanada_cost = 3
taco_cost = 4
user_money = int(input('Enter money for meal: '))
num_diners = int(input('How many people are eating: '))
max_empanadas = user_money // empanada_cost
max_tacos = user_money // taco_cost
meal_cost = 0
num_options = 0
for num_tacos in range(max_tacos + 1):
for num_empanadas in range(max_empanadas + 1):
# Total items purchased must be equally divisible by number of diners
if (num_tacos + num_empanadas) % num_diners != 0:
continue
meal_cost = (num_empanadas * empanada_cost) + (num_tacos * taco_cost)
if meal_cost == user_money:
print('$%d buys %d empanadas and %d tacos without change.' %
(meal_cost, num_empanadas, num_tacos))
num_options += 1
if num_options == 0:
print('You cannot buy a meal without having change left over.')
| [
"thewickk@thewickk.com"
] | thewickk@thewickk.com |
9c3603b3147898a39bca1a4753664309945f4886 | 264c6f1a2202704bbd63b6aa7a658a80d8c6f1c3 | /sanctum/ClientMonitor/display/models.py | d8fee65d9a1235972777c62f3ae9aeea31f3ee77 | [] | no_license | samiulla123/sanctum | 9e3516ddfab7abce525196c8e376a96bbae24e6e | 6d4ce505b51bfeff4157acdc9e73d78fff1e28ea | refs/heads/master | 2021-08-22T10:30:50.281186 | 2018-12-22T12:39:55 | 2018-12-22T12:39:55 | 154,846,544 | 0 | 0 | null | 2021-06-10T20:56:42 | 2018-10-26T14:23:59 | Python | UTF-8 | Python | false | false | 2,914 | py | from django.db import models
import hashlib
from django.contrib.auth.models import User
from uuid import uuid4
class NetworkStatus(models.Model):
time_stamp=models.CharField(max_length=20,null=False)
Hostname=models.CharField(max_length=20,null=False)
HostIpAddress=models.CharField(max_length=20,null=False)
def __str__(self):
return "Time:{} \n Hostname: {} \n Ip address: {}".format(self.time_stamp, self.Hostname,self.HostIpAddress)
class Clients(models.Model):
c_name=models.CharField(max_length=20)
c_id = models.AutoField(primary_key=True)
c_inet=models.CharField(max_length=15,null=False,unique=True)
c_mac=models.CharField(max_length=17,unique=True)
@property
def c_token(self):
hash_object = hashlib.sha1((self.c_inet+self.c_mac).encode('utf-8'))
hex_dig = hash_object.hexdigest()
return str(hex_dig)
def __str__(self):
return "c_id:{} inet:{}\nmac:{}\n".format(self.c_id,self.c_inet,self.c_mac)
class ClientInfo(models.Model):
c_id=models.ForeignKey(Clients,on_delete=models.CASCADE)
time_stamp = models.DateTimeField(auto_now_add=True)
c_name=models.CharField(max_length=20,null=False)#
c_arch=models.CharField(max_length=5)#
c_platform=models.CharField(max_length=20)#
c_cpu=models.CharField(max_length=40)#
c_rom=models.IntegerField()#
c_ram_total=models.IntegerField()#
c_cpu_cores=models.IntegerField()#
c_cpu_freq=models.CharField(max_length=10)#
class MemoryTap(models.Model):
c_id=models.ForeignKey(Clients,on_delete=models.CASCADE)
time_stamp = models.DateTimeField(auto_now_add=True)
m_total=models.IntegerField()
m_used=models.IntegerField()
m_free=models.IntegerField()
m_active=models.IntegerField(blank=True,null=True)
m_inactive=models.IntegerField(blank=True,null=True)
m_shared=models.IntegerField(blank=True,null=True)
m_cached=models.IntegerField(blank=True,null=True)
m_buffers=models.IntegerField(blank=True,null=True)
class DiskTap(models.Model):
c_id=models.ForeignKey(Clients,on_delete=models.CASCADE)
time_stamp = models.DateTimeField(auto_now_add=True)
d_total=models.IntegerField()
d_used=models.IntegerField()
d_free=models.IntegerField()
d_used_perc=models.IntegerField()#d_used_perc
class NetworkTap(models.Model):
c_id=models.ForeignKey(Clients,on_delete=models.CASCADE)
time_stamp = models.DateTimeField(auto_now_add=True)
n_up=models.IntegerField()
n_down=models.IntegerField()
class CpuTap(models.Model):
c_id=models.ForeignKey(Clients,on_delete=models.CASCADE)
time_stamp = models.DateTimeField(auto_now_add=True)
cpu_usage=models.DecimalField(decimal_places=2,max_digits=5)
cpu_curr_freq=models.IntegerField()
cpu_min_freq=models.IntegerField()
cpu_max_freq=models.IntegerField()
cpu_curr_temp=models.DecimalField(decimal_places=5,max_digits=10)
cpu_high_temp=models.DecimalField(decimal_places=5,max_digits=10)
cpu_crit_temp=models.DecimalField(decimal_places=5,max_digits=10)
| [
"samiulla@example.com"
] | samiulla@example.com |
9d57b09da4b4391784c31591838c0a1d22bae5ab | 567feca28e22b7c7bd94a9906f84c6adcff0bbf4 | /lists/tests.py | a6ab8c8725b351b459ccb4a7d1eabf3cc59e6bf5 | [] | no_license | subliminimalist/ObeyTheTestingGoat | 24244861c3c575cf856e43beda36706d5c5ec252 | 92326c10409e8c1d42bbc1aa8bc468b2bcb16c2d | refs/heads/master | 2020-12-20T15:20:50.716999 | 2020-02-04T17:14:17 | 2020-02-04T17:14:17 | 236,121,328 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,000 | py | from django.urls import resolve
from django.test import TestCase
from django.http import HttpRequest
from lists.views import home_page
from lists.models import Item
class HomePageTest(TestCase):
def test_uses_home_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'home.html')
def test_can_save_a_POST_request(self):
response = self.client.post('/', data={'item_text': 'A new list item'})
self.assertEqual(Item.objects.count(), 1)
new_item = Item.objects.first()
self.assertEqual(new_item.text, 'A new list item')
self.assertEqual(response.status_code, 302)
self.assertEqual(response['location'], '/')
def test_redirects_after_POST(self):
response = self.client.post('/', data={'item_text': 'A new list item'})
self.assertEqual(response.status_code, 302)
self.assertEqual(response['location'], '/')
def test_only_saves_items_when_necessary(self):
self.client.get('/')
self.assertEqual(Item.objects.count(), 0)
def test_displays_all_list_items(self):
Item.objects.create(text='itemey 1')
Item.objects.create(text='itemey 2')
response = self.client.get('/')
self.assertIn('itemey 1', response.content.decode())
self.assertIn('itemey 2', response.content.decode())
class ItemModelTest(TestCase):
def test_saving_and_retrieving_items(self):
first_item = Item()
first_item.text = 'The first (ever) list item'
first_item.save()
second_item = Item()
second_item.text = 'Item the second'
second_item.save()
saved_items = Item.objects.all()
self.assertEqual(saved_items.count(), 2)
first_saved_item = saved_items[0]
second_saved_item = saved_items[1]
self.assertEqual(first_saved_item.text, 'The first (ever) list item')
self.assertEqual(second_saved_item.text, 'Item the second') | [
"benrhaas@gmail.com"
] | benrhaas@gmail.com |
a9b258e2c573143ebd36f300f43573409fb33c3b | 6b2efd7ad059a5e5590c51f97c11d550fbcd0e1b | /conftest.py | 6b32d91b7000c813bb35fd2b7cd411d2e1ddd12c | [] | no_license | lumarodrigues/NovaBase | e320fe2d0660fec31acea6e3c61179e26f5c027a | 951dd8e3f0a8eabc6520c7ccf5d32572df413dc2 | refs/heads/main | 2023-04-22T01:40:53.132868 | 2021-05-04T01:15:36 | 2021-05-04T01:31:49 | 341,574,369 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | from projeto import settings
import pytest
@pytest.fixture(autouse=True)
def turn_ssl_rediret_off_for_tests(settings):
"""
There is no need to place secure=True in all client requests
"""
settings.SECURE_SSL_REDIRECT = False
| [
"rdsluma@gmail.com"
] | rdsluma@gmail.com |
ef034d35ea236f21d1a19087ab05e83dd80a2a16 | 77adfcad98a77d285deace435dbef5e8c85189a4 | /Python Fundamentals/Regular Expressions/03_find_occurrences_of_word_in_sentence.py | 878bcf33ace3fbfb122b5f12f4fad5cba5b6e307 | [] | no_license | BillyDevEnthusiast/Programming-with-Python | 8cd72b2a6eec144d1d5294c9e9abf522c33a7b1c | 8c8971d1202d486a1c4eec1e70c00321c9c6c933 | refs/heads/main | 2023-01-06T00:57:26.193171 | 2020-10-28T14:54:37 | 2020-10-28T14:54:37 | 308,001,697 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132 | py | import re
text = input()
word = input()
pattern = rf"\b{word}\b"
res = re.findall(pattern, text, re.IGNORECASE)
print(len(res))
| [
"noreply@github.com"
] | noreply@github.com |
013144d0925057c41b4f162bf5719a8c68bfacf1 | 50da1faff8cf2a2cb3fc813cd55fe20ba62339e9 | /flipper/bucketing/percentage/linear_ramp_percentage.py | e5017c9aa7846afc9f8afe156cf32d7e7ca0fed3 | [
"Apache-2.0"
] | permissive | carta/flipper-client | bba1345269c66569c253680d4c2ed3db144c8d67 | 8c07bae36279b549625eaa1dd7a293d495332a1f | refs/heads/master | 2023-05-25T02:41:14.323629 | 2023-04-20T17:28:03 | 2023-04-20T17:42:15 | 138,798,243 | 92 | 19 | NOASSERTION | 2023-05-13T06:26:07 | 2018-06-26T21:59:59 | Python | UTF-8 | Python | false | false | 2,469 | py | # Copyright 2018 eShares, Inc. dba Carta, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from datetime import datetime
from typing import Any, Dict, Optional, cast
from .base import AbstractPercentage
class LinearRampPercentage(AbstractPercentage):
def __init__(
self,
initial_value: float = 0.0,
final_value: float = 1.0,
ramp_duration: int = 3600,
initial_time: Optional[int] = None,
) -> None:
self._initial_value = initial_value
self._final_value = final_value
self._ramp_duration = ramp_duration
if initial_time is None:
self._initial_time = datetime.now()
else:
self._initial_time = datetime.fromtimestamp(initial_time)
@classmethod
def get_type(cls) -> str:
return "LinearRampPercentage"
@property
def value(self) -> float:
if self._ramp_duration == 0:
return self._final_value
return min(self._final_value, self.slope * self.dt + self._initial_value)
@property
def slope(self) -> float:
return (self._final_value - self._initial_value) / self._ramp_duration
@property
def dt(self) -> float:
return (datetime.now() - self._initial_time).total_seconds()
def to_dict(self) -> Dict[str, Any]:
return {
**super().to_dict(),
"initial_value": self._initial_value,
"final_value": self._final_value,
"ramp_duration": self._ramp_duration,
"initial_time": int(self._initial_time.timestamp()),
}
@classmethod
def from_dict(cls, fields: Dict[str, Any]) -> "LinearRampPercentage":
return cls(
initial_value=cast(float, fields.get("initial_value", 0.0)),
final_value=cast(float, fields.get("final_value", 1.0)),
ramp_duration=cast(int, fields.get("ramp_duration", 3600)),
initial_time=fields.get("initial_time"),
)
| [
"adam.savitzky@gmail.com"
] | adam.savitzky@gmail.com |
7ff5bfc8722ee1952627a56625b5addaa4d9622d | 4dbff391e9474c83668515c995ac75680e8c78cd | /1106-404check.py | f5321ed46ea4611ce1f005ab9413483a9084ceab | [] | no_license | choser89/python-automation-practice | 93accf08e622ab0bbb99f15999dc00cc46d99e43 | c61a2fe44c696d6f948a9c6b75539f3810f84b1b | refs/heads/master | 2021-06-19T03:07:59.135290 | 2017-06-29T08:38:09 | 2017-06-29T08:38:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | import sys, requests, bs4
W = '\033[0m' # white (normal)
R = '\033[31m' # red
G = '\033[32m' # green
url = sys.argv[1]
res = requests.get(url)
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text, 'html.parser')
anchors = soup.select('a')
for anchor in anchors:
href = anchor.get('href')
if not href.startswith('http'):
if href.endswith('/'):
href = url + href
else:
href = url + '/' + href
res = requests.get(href)
if res.status_code == 200:
print('[' + G + str(res.status_code) + W + '] ', end='')
else:
print('[' + R + str(res.status_code) + W + '] ', end='')
print(href)
| [
"samkami@icloud.com"
] | samkami@icloud.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.