blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b5df53f8f08012827a1681f36a0798d9aada7ba7 | 36c53e8c914069832097fefe4d363a73947e805a | /gen_bert_index_data.py | 75d0e62cfe04baa74264737ab38ebf8288c269ef | [] | no_license | lvbu12/Bert-Training-and-News-Classification | 4a8385481e33c26a973ba863e73b9e2c72bad6ac | e3e14350d153a9d9350f92fe126450ae7c54ab88 | refs/heads/master | 2020-04-27T17:25:53.215961 | 2019-03-09T13:18:14 | 2019-03-09T13:18:14 | 174,518,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,846 | py | # -*- coding: utf-8 -*-
from utils import get_chars, get_labels, flatten
import random
import collections
import os
import sys
class Dataset(object):
def __init__(self, filepath, char2idx):
self.data = open(filepath, 'r', encoding='utf-8').readlines()
self.char2idx = char2idx
self.chars = [key for key in self.char2idx]
def _gen_item(self, index):
one, two, label = self.data[index][:-1].split('\t')
one_idx = [self.char2idx[c] for c in one]
two_idx = [self.char2idx[c] for c in two]
words_idx = [self.char2idx['[CLS]']] + one_idx + [self.char2idx['[SEP]']] + two_idx + [self.char2idx['[SEP]']]
sent_idx = [1] + [1 for _ in one_idx] + [1] + [2 for _ in two_idx] + [2]
i = 0
mask_num = int(0.15 * len(words_idx))
shuf_idx = list(range(len(words_idx)))
random.shuffle(shuf_idx)
tmp_set = set()
mask_dict = collections.OrderedDict()
while len(mask_dict) < mask_num:
if words_idx[shuf_idx[i]] == self.char2idx['[CLS]'] or words_idx[shuf_idx[i]] == self.char2idx['[SEP]']:
i += 1
continue
if shuf_idx[i] in tmp_set:
i += 1
continue
tmp_set.add(shuf_idx[i])
mask_dict[shuf_idx[i]] = words_idx[shuf_idx[i]]
if random.random() < 0.8:
words_idx[shuf_idx[i]] = self.char2idx['[MASK]']
else:
if random.random() < 0.5:
pass
else:
words_idx[shuf_idx[i]] = self.char2idx[random.choice(self.chars)]
i += 1
mask_index = sorted(mask_dict)
mask_label_idx = [mask_dict[k] for k in mask_index]
assert len(words_idx) == len(sent_idx)
assert len(mask_index) == len(mask_label_idx)
#return words_idx, sent_idx, mask_index, mask_label_idx, label_idx
res = ' '.join([str(idx) for idx in words_idx]) + '\t'
res += ' '.join([str(idx) for idx in sent_idx]) + '\t'
res += ' '.join([str(idx) for idx in mask_index]) + '\t'
res += ' '.join([str(idx) for idx in mask_label_idx]) + '\t'
res += label + '\n'
return res
def __len__(self):
return len(self.data)
def gen_bert_data(self, output_file):
with open(output_file, 'w', encoding='utf-8') as f:
for i in range(len(self.data)):
f.write(self._gen_item(i))
if __name__ == "__main__":
if len(sys.argv) != 4:
print('Using: python %s chars_vocab_path raw_text_path output_idx_data_path')
sys.exit(1)
char2idx, idx2char = get_chars(sys.argv[1])
data = Dataset(sys.argv[2], char2idx)
data.gen_bert_data(sys.argv[3])
#print(data._gen_item(0))
pass
| [
"lvbu_12@163.com"
] | lvbu_12@163.com |
1dedcfec3e6ba9a7ffcf495e0b17fc800287de3a | 94487c5ca4018dbcc614213f9b8ad1e741c5f967 | /weather_api/project_api/migrations/0001_initial.py | 564c245a9fe5054359cd0e736e2fd2024380f5d1 | [] | no_license | akanuragkumar/weather-api | 3c1ebe9d8e6d33caf14bdbf0dff7de3193cb7926 | 6e83b892f9f0a95a676c376bdd9587b1a4e67085 | refs/heads/main | 2023-05-09T10:01:15.642353 | 2021-06-08T03:14:53 | 2021-06-08T03:14:53 | 374,013,913 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,108 | py | # Generated by Django 3.2.4 on 2021-06-06 03:29
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import jsonfield.fields
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CityWeatherCollection',
fields=[
('modified_on', models.DateTimeField(auto_now=True, null=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('uuid', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('city', models.CharField(default='', max_length=100)),
('current_temp', models.CharField(default='', max_length=100)),
('feels_like_temp', models.CharField(default='', max_length=100)),
],
options={
'verbose_name': 'City Weather Collection',
'verbose_name_plural': 'City Weather Collections',
'db_table': 'city_weather_collection',
},
),
migrations.CreateModel(
name='MailingTask',
fields=[
('modified_on', models.DateTimeField(auto_now=True, null=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('uuid', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('emails', jsonfield.fields.JSONField(default=list)),
('status', models.CharField(default='', max_length=100)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_collections', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Mailing Task',
'verbose_name_plural': 'Mailing Tasks',
'db_table': 'mailing_task',
},
),
]
| [
"akanuragkumar712@gmail.com"
] | akanuragkumar712@gmail.com |
2b976c4b62cef478a9116f1d5967dc385a9ff6f8 | c6a35b1c97b56e7364a87fe4d2da6acf2b2b315b | /accounts/urls.py | 464772dca54b980757b456a7a8a5fcde9a07bf11 | [] | no_license | Youssefmesalm/Back | 45727c28413b4b280d76a2c93b3ca27907267204 | 9d1af2c51218bd8ba3d8e732610776ab6efb95b4 | refs/heads/main | 2023-05-02T01:57:12.764221 | 2021-05-18T20:01:04 | 2021-05-18T20:01:04 | 348,503,504 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | from rest_framework import routers
from .api import UserViewSet
router = routers.DefaultRouter()
router.register('api/user', UserViewSet , 'users')
urlpatterns = router.urls | [
"youssefmesalm@yahoo.com"
] | youssefmesalm@yahoo.com |
31bf08beefc0a527e97eba8fee68c0b2897a73f2 | 2926780ac13b74882b906d920968c00c48d7b181 | /simple_social_clone/simplesocial/posts/templates/posts/templates_project/templates_app/templatetags/my_extras.py | dc364df1917ba3813eff6e98ee1d4eccb8226e33 | [] | no_license | SodimuDemilade/my_space | 58aaf42701d277cd75de8fef64a55ce5b8f032b3 | 11918964bb382667746a63158869b2dd75bacaac | refs/heads/master | 2023-06-23T04:35:03.784930 | 2021-07-13T21:46:44 | 2021-07-13T21:46:44 | 385,739,523 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 248 | py | from django import template
register = template.Library()
@register.filter(name='cut')
def cutt(value, arg):
"""
This cuts out all values of "arg" from the string!
"""
return value.replace(arg, '')
# register.filter('cut', cutt)
| [
"demisodimu@gmail.com"
] | demisodimu@gmail.com |
ff3c86b1e31beb620025b6c5cc86da0482934f65 | 6e61b7ce4357f888cd0d46239a830548e5d10f89 | /Code/sam/python/lab10-simple_calculator.py | cfd11a50e626357a4769854092234083a505f6fb | [] | no_license | PdxCodeGuild/20170724-FullStack-Night | a42f612e686a906f312224dcd5d9c55cbcb4a81e | 24a6c9db8d65cf254e1b5a5d55dd00711065c048 | refs/heads/master | 2021-01-01T17:39:32.970027 | 2017-12-04T22:26:24 | 2017-12-04T22:26:24 | 98,124,598 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 799 | py |
i = True
while i:
user_operator = input('Choose an operator or type "done". ')
if user_operator == 'done':
i = False
print('Goodbye!')
break
else:
number_1 = float(input('What is the first number? '))
number_2 = float(input('What is the second number? '))
if user_operator == '+':
total = number_1 + number_2
print(total)
elif user_operator == '-':
total = number_1 - number_2
print(total)
elif user_operator == '*':
total = number_1 * number_2
print(total)
elif user_operator == '/':
total = number_1 / number_2
print(total)
elif user_operator == 'done':
i = False
print('Goodbye!')
| [
"samanthagmoad@gmail.com"
] | samanthagmoad@gmail.com |
f62f9da25416f82c08df684907a3194f8a6ad1bd | fb35b1b84936483f7be308942dfbce361673ce0c | /app.py | 5e4cc69ed13a417411ec49cd8813b4ee40bae225 | [] | no_license | kudddy/aiow2v | a6efca27fd10e0354639652cf67473d1ef48bdc2 | c25b2a71749f36fa42f9e6b71abf4d2071fc313f | refs/heads/master | 2023-06-06T13:25:32.123614 | 2021-06-24T14:54:47 | 2021-06-24T14:54:47 | 379,942,172 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,001 | py | import logging
from types import AsyncGeneratorType, MappingProxyType
from typing import AsyncIterable, Mapping
from aiohttp import PAYLOAD_REGISTRY
from aiohttp.web_app import Application
from aiohttp_apispec import setup_aiohttp_apispec, validation_middleware
import aiohttp_cors
from handlers import HANDLERS
from payloads import AsyncGenJSONListPayload, JsonPayload
from plugins.w2v import FastTextVectorizer
from plugins.config import cfg
api_address = "0.0.0.0"
api_port = 8081
MEGABYTE = 1024 ** 2
MAX_REQUEST_SIZE = 70 * MEGABYTE
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
def create_app() -> Application:
"""
Создает экземпляр приложения, готового к запуску
"""
# TODO добавить middlewares для вадидации полей сообщений
app = Application(
client_max_size=MAX_REQUEST_SIZE,
# middlewares=[check_token_middleware]
)
aiohttp_cors.setup(app, defaults={
"*": aiohttp_cors.ResourceOptions(
allow_credentials=True,
expose_headers="*",
allow_headers="*",
)
})
# Регистрация обработчика
for handler in HANDLERS:
log.debug('Registering handler %r as %r', handler, handler.URL_PATH)
route = app.router.add_route('*', handler.URL_PATH, handler)
app['aiohttp_cors'].add(route)
# регестрируем w2v
log.debug('Registering w2v')
app['w2v'] = FastTextVectorizer(cfg.app.directory.model_path)
setup_aiohttp_apispec(app=app, title="I SEE YOU API", swagger_path='/')
# Автоматическая сериализация в json данных в HTTP ответах
PAYLOAD_REGISTRY.register(AsyncGenJSONListPayload,
(AsyncGeneratorType, AsyncIterable))
PAYLOAD_REGISTRY.register(JsonPayload, (Mapping, MappingProxyType))
return app
| [
"Zx0996750"
] | Zx0996750 |
7ad5e688b276555d2a1658f6873f0d4734303e43 | 886bde6c3515503f807ee535b36f84dc3aa9dc9d | /scripts/my_prepro_labels.py | dc9a2f88eda9b071bbbebeaae2b3e6deabd32a02 | [] | no_license | HELL-TO-HEAVEN/ChangeCaptioning | e29a6669f320afc0d071e4d5d38e4552bed9c3dd | 18acadcdb30fef34068b4a480b1bb88550ba4e81 | refs/heads/master | 2021-02-28T07:23:42.670785 | 2019-06-01T05:43:37 | 2019-06-01T05:43:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,441 | py | """
Preprocess a raw json dataset into hdf5/json files for use in data_loader.lua
Input: json file that has the form
[{ file_path: 'path/img.jpg', captions: ['a caption', ...] }, ...]
example element in this list would look like
{'captions': [u'A man with a red helmet on a small moped on a dirt road. ', u'Man riding a motor bike on a dirt road on the countryside.', u'A man riding on the back of a motorcycle.', u'A dirt path with a young person on a motor bike rests to the foreground of a verdant area with a bridge and a background of cloud-wreathed mountains. ', u'A man in a red shirt and a red hat is on a motorcycle on a hill side.'], 'file_path': u'val2014/COCO_val2014_000000391895.jpg', 'id': 391895}
This script reads this json, does some basic preprocessing on the captions
(e.g. lowercase, etc.), creates a special UNK token, and encodes everything to arrays
Output: a json file and an hdf5 file
The hdf5 file contains several fields:
/images is (N,3,256,256) uint8 array of raw image data in RGB format
/labels is (M,max_length) uint32 array of encoded labels, zero padded
/label_start_ix and /label_end_ix are (N,) uint32 arrays of pointers to the
first and last indices (in range 1..M) of labels for each image
/label_length stores the length of the sequence for each of the M sequences
The json file has a dict that contains:
- an 'ix_to_word' field storing the vocab in form {ix:'word'}, where ix is 1-indexed
- an 'images' field that is a list holding auxiliary information for each image,
such as in particular the 'split' it was assigned to.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import json
import argparse
from random import shuffle, seed
import string
# non-standard dependencies:
import h5py
import numpy as np
import torch
import torchvision.models as models
import skimage.io
from PIL import Image
from nltk.tokenize import word_tokenize
def build_vocab(imgs, params):
count_thr = params['word_count_threshold']
# count up the number of words
counts = {}
for img in imgs:
for sent in img['sentences']:
tokens = word_tokenize(sent)
for w in tokens:
counts[w] = counts.get(w, 0) + 1
cw = sorted([(count,w) for w,count in counts.items()], reverse=True)
print('top 20 words and their counts:')
print('\n'.join(map(str,cw[:20])))
# print some stats
total_words = sum(counts.values())
print('total words:', total_words)
bad_words = [w for w,n in counts.items() if n <= count_thr]
vocab = [w for w,n in counts.items() if n > count_thr]
bad_count = sum(counts[w] for w in bad_words)
print('number of bad words: %d/%d = %.2f%%' % (len(bad_words), len(counts), len(bad_words)*100.0/len(counts)))
print('number of words in vocab would be %d' % (len(vocab), ))
print('number of UNKs: %d/%d = %.2f%%' % (bad_count, total_words, bad_count*100.0/total_words))
# lets look at the distribution of lengths as well
sent_lengths = {}
for i, img in enumerate(imgs):
for sent in img['sentences']:
#print("sent", sent)
tokens = word_tokenize(sent)
nw = len(tokens)
#print("nw", nw)
if nw == 0 : print("number of image with no caption :", i)
sent_lengths[nw] = sent_lengths.get(nw, 0) + 1
max_len = max(sent_lengths.keys())
print('max length sentence in raw data: ', max_len)
print('sentence length distribution (count, number of words):')
freq_len = sum(sent_lengths.values())
for i in range(max_len+1):
print('%2d: %10d %f%%' % (i, sent_lengths.get(i,0), sent_lengths.get(i,0)*100.0/freq_len))
# lets now produce the final annotations
if bad_count > 0:
# additional special UNK token we will use below to map infrequent words to
print('inserting the special UNK token')
vocab.append('UNK')
for img in imgs:
img['final_captions'] = []
for sent in img['sentences']:
tokens = word_tokenize(sent)
caption = [w if counts.get(w,0) > count_thr else 'UNK' for w in tokens]
img['final_captions'].append(caption)
return vocab
def encode_captions(imgs, params, wtoi):
"""
encode all captions into one large array, which will be 1-indexed.
also produces label_start_ix and label_end_ix which store 1-indexed
and inclusive (Lua-style) pointers to the first and last caption for
each image in the dataset.
"""
max_length = params['max_length']
N = len(imgs)
M = sum(len(img['final_captions']) for img in imgs) # total number of captions
print("final_caption_length_sum :", M)
label_arrays = []
label_start_ix = np.zeros(N, dtype='uint32') # note: these will be one-indexed
label_end_ix = np.zeros(N, dtype='uint32')
label_length = np.zeros(M, dtype='uint32')
caption_counter = 0
counter = 1
for i,img in enumerate(imgs):
n = len(img['final_captions'])
assert n > 0, 'error: some image has no captions'
Li = np.zeros((n, max_length), dtype='uint32')
for j,s in enumerate(img['final_captions']):
#if len(s) == 0 : print("=========================================================================",j, s)
label_length[caption_counter] = min(max_length, len(s)) # record the length of this sequence
caption_counter += 1
for k,w in enumerate(s):
if k < max_length:
Li[j,k] = wtoi[w]
# note: word indices are 1-indexed, and captions are padded with zeros
label_arrays.append(Li)
label_start_ix[i] = counter
label_end_ix[i] = counter + n - 1
counter += n
L = np.concatenate(label_arrays, axis=0) # put all the labels together
assert L.shape[0] == M, 'lengths don\'t match? that\'s weird'
#assert np.all(label_length > 0), 'error: some caption had no words?'
print('encoded captions to array of size ', L.shape)
return L, label_start_ix, label_end_ix, label_length
def main(params):
imgs = json.load(open(params['input_json'], 'r'))
imgs = imgs['images']
seed(123) # make reproducible
# create the vocab
vocab = build_vocab(imgs, params)
itow = {i+1:w for i,w in enumerate(vocab)} # a 1-indexed vocab translation table
wtoi = {w:i+1 for i,w in enumerate(vocab)} # inverse table
# encode captions in large arrays, ready to ship to hdf5 file
L, label_start_ix, label_end_ix, label_length = encode_captions(imgs, params, wtoi)
# create output h5 file
N = len(imgs)
f_lb = h5py.File(params['output_h5']+'_label.h5', "w")
f_lb.create_dataset("labels", dtype='uint32', data=L)
f_lb.create_dataset("label_start_ix", dtype='uint32', data=label_start_ix)
f_lb.create_dataset("label_end_ix", dtype='uint32', data=label_end_ix)
f_lb.create_dataset("label_length", dtype='uint32', data=label_length)
f_lb.close()
# create output json file
out = {}
out['ix_to_word'] = itow # encode the (1-indexed) vocab
out['images'] = []
for i,img in enumerate(imgs):
jimg = {}
jimg['split'] = img['split'] # test, train, val
jimg['file_path'] = os.path.join(img['filepath'], img['filename']) # copy it over, might need
jimg['id'] = img['id'] # copy over & mantain an id, if present (e.g. coco ids, useful)
#if params['images_root'] != '':
# with Image.open(os.path.join(params['images_root'], img['filepath'], img['filename'])) as _img:
# jimg['width'], jimg['height'] = _img.size
out['images'].append(jimg)
json.dump(out, open(params['output_json'], 'w'))
print('wrote ', params['output_json'])
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# input json
parser.add_argument('--input_json', required=True, help='input json file to process into hdf5')
parser.add_argument('--output_json', default='data.json', help='output json file')
parser.add_argument('--output_h5', default='data', help='output h5 file')
parser.add_argument('--images_root', default='', help='root location in which images are stored, to be prepended to file_path in input json')
# options
parser.add_argument('--max_length', default=60, type=int, help='max length of a caption, in number of words. captions longer than this get clipped.')
parser.add_argument('--word_count_threshold', default=0, type=int, help='only words that occur more than this number of times will be put in vocab')
args = parser.parse_args()
params = vars(args) # convert to ordinary dict
print('parsed input parameters:')
print(json.dumps(params, indent = 2))
main(params)
| [
"37833335+Gospokid@users.noreply.github.com"
] | 37833335+Gospokid@users.noreply.github.com |
df741301da60c8b876eab10d4f48c04cd6520f23 | 1d68b987357a9117dffdc8509a899d06a76ce10d | /topk-evaluate.py | 2f750b1c930550bc6690f3d7f20b3e7953395025 | [] | no_license | LibreChou/cs224u | b4658aeabbb9dba7fea87dbb8f4eb9d40e3a252d | 4cb6f44e28cca6c3320ad86fc43b9e5ec701dbaf | refs/heads/master | 2021-05-30T07:37:52.475530 | 2015-06-10T23:16:34 | 2015-06-10T23:16:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,642 | py | #!/usr/bin/env python
"""
Usage: %(program)s model_prefix data_dir
Run model.
model_prefix should be something like "wiki_en" corresponding to the
filename prefix of the ESA model files, which must be in the current directory.
data_dir should be the base folder for the newsgroups data.
Example:
%(program)s --sample_size 20 --model GloveModel --depth 6 --decay 0.9 glove.6B.300d.txt 20news-18828/ testrecords.txt
"""
from glove import GloveModel
from esa import ESAModel, ClusteredESAModel
from models import LDAModel, LSAModel
from util import sentenceSeg, PriorityQueue, cosine, DataSet, function_name, \
MaxTopicFeatureExtractor, HierarchicalTopicFeatureExtractor, \
FlatFeatureExtractor, TopKLayerHierarchicalFeatureExtractor, \
topKHierarchicalSegments
#from distributedwordreps import ShallowNeuralNetwork
import argparse
import inspect
import json
import logging
import os.path
import sys
import time
import numpy as np
import scipy.sparse
from sklearn.naive_bayes import GaussianNB, MultinomialNB
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC
from sklearn.metrics import classification_report, f1_score, precision_recall_fscore_support
try:
import cPickle as pickle
except:
import pickle
import gensim
from gensim.corpora import Dictionary
from gensim.models import TfidfModel
from gensim.similarities import Similarity
from nltk.tokenize import wordpunct_tokenize
from os import listdir
program = os.path.basename(sys.argv[0])
logger = logging.getLogger(program)
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s')
logging.root.setLevel(level=logging.INFO)
DEFAULT_MODEL = 'LDAModel' #'GloveModel'
DEFAULT_FEATURIZER = 'MaxTopicFeatureExtractor'
DEFAULT_NUM_REGIONS = 15
DEFAULT_SAMPLE_SIZE = 20
def evaluation(model = None,
clf = GaussianNB,
model_prefix = None,
data_dir = '20news-18828',
result_record = None,
record_fname = None,
sample_size = None,
depth = 0,
decay = 1.0,
fullLayer = True):
if result_record is None:
raise Exception("Must pass result_record")
if record_fname is None:
raise Exception("Must pass record_fname")
train = []
trainY = []
test = []
testY = []
# load data
pickle_suffix = ".%s.segmented.pickle" % (model.__class__.__name__,)
baseFolder = data_dir
cats = sorted(listdir(baseFolder))
for catIdx, cat in enumerate(cats):
logger.info('Processing category %s (%d/%d)', cat, catIdx, len(cats))
dirpath = os.path.join(baseFolder, cat)
#print dirpath
try:
filtered_docs = []
for d in listdir(dirpath):
#print d
if d.endswith(pickle_suffix):
filtered_docs.append(d)
docs = sorted(filtered_docs, key = lambda n: int(n.split(".")[0]))
if sample_size is not None and sample_size != 0:
docs = docs[:sample_size]
except:
continue
numDocs = len(docs)
#logger.info("Docs: %s", docs)
for docIdx, doc_filename in enumerate(docs):
doc_filename = os.path.join(baseFolder, cat, doc_filename)
logger.info('processing document %s (%d/%d)', doc_filename, docIdx, numDocs)
doc = gensim.utils.unpickle(doc_filename)
segments = doc[0]
regions = doc[1]
feature = topKHierarchicalSegments(segments,
regions,
feature_extractor = model,
depth = depth,
fullLayer = fullLayer,
decay = decay)
logger.debug('doc %d feature extracted', docIdx)
if docIdx < numDocs*0.9:
train.append(feature)
trainY.append(catIdx)
else:
test.append(feature)
testY.append(catIdx)
logger.debug('-----')
# Convert to sparse format for compact storage and minimal memory usage.
train = np.vstack(train)
trainY = np.hstack(trainY)
test = np.vstack(test)
testY = np.hstack(testY)
logger.info("Shape of training set: %s", train.shape)
logger.info("Shape of test set: %s", test.shape)
num_labels = len(cats)
logger.info("Number of labels: %d", num_labels)
# Instantiate classifiers.
classifiers = [
LogisticRegression(),
MultinomialNB()
#,
#ShallowNeuralNetwork(input_dim = feature_extractor.num_features(),
# hidden_dim = 60,
# output_dim = num_labels)
]
for clf in classifiers:
classifier_name = clf.__class__.__name__
if classifier_name is None:
raise Exception("Unable to get name of classifier class", clf_class)
logger.info("Evaluating on classifier %s...", classifier_name)
clf.fit(train, trainY)
logger.info('training finished')
# Record training error.
trainPredY = clf.predict(train)
print("Training error:")
print(classification_report(trainY, trainPredY, target_names = cats, digits = 5))
# Make prediction.
testPredY = clf.predict(test)
# Print detailed report.
print("Test error:")
print(classification_report(testY, testPredY, target_names = cats, digits = 5))
# Save the important metrics.
precision, recall, f1, support = \
precision_recall_fscore_support(testY, testPredY, average='weighted')
result_record[classifier_name + "_precision"] = precision
result_record[classifier_name + "_recall"] = recall
result_record[classifier_name + "_f1"] = f1
#result_record[classifier_name + "_support"] = support
precision, recall, f1, support = \
precision_recall_fscore_support(trainY, trainPredY, average='weighted')
result_record[classifier_name + "_f1_train"] = f1
with open(record_fname, "a") as records_out:
json.dump(result_record, records_out, sort_keys = True)
records_out.write("\n")
if __name__ == "__main__":
# Define command-line args.
parser = argparse.ArgumentParser(description='Evaluate topic classification approaches.',
epilog=str(__doc__ % {'program': program}))
parser.add_argument('--model', help=('Base feature model. Default: ' + DEFAULT_MODEL))
parser.set_defaults(model=DEFAULT_MODEL)
parser.add_argument('--featurizer',
help=('Higher level featurizer. Default: ' + DEFAULT_FEATURIZER))
parser.set_defaults(featurizer=DEFAULT_FEATURIZER)
parser.add_argument('--max_regions', type=int,
help=('Maximum regions to use. Default: ' + str(DEFAULT_NUM_REGIONS)))
parser.set_defaults(max_regions=DEFAULT_NUM_REGIONS)
parser.add_argument('--sample_size', type=int,
help=('How much to sample the dataset. Set to 0 to disable sampling. Default: ' + str(DEFAULT_SAMPLE_SIZE)))
parser.set_defaults(sample_size=DEFAULT_SAMPLE_SIZE)
parser.add_argument('--depth', type=int,
help=('Depth of the TopK layer hierarchical feature extractor'))
parser.add_argument('--decay', type=float,
help=('Decay of the TopK layer hierarchical feature extractor'))
parser.add_argument('--reverse', dest='reverse', action='store_true', help='reverse region iter')
parser.add_argument('--no-reverse', dest='reverse', action='store_false', help='reverse region iter')
parser.add_argument('model_prefix', help='Model prefix of passed to the model constructor')
parser.add_argument('data_dir', help='Directory in which to find the 20-newsgroups data.')
parser.add_argument('record_fname', help='Filename to append result records.')
args = parser.parse_args()
# load base feature model
model_clazz = globals()[args.model]
model = model_clazz(args.model_prefix)
#model = ESAModel(args.model_prefix) # ESA is not working very well.
#model = GloveModel(args.model_prefix)
"""
# load secondary feature extractor
featurizer_clazz = globals()[args.featurizer]
options = {'base_feature_extractor': model,
'max_regions': args.max_regions,
'reverse': args.reverse,
'depth': args.depth,
'decay': args.decay}
featurizer = featurizer_clazz(options)
#featurizer = MaxTopicFeatureExtractor(options)
"""
result_record = {}
result_record['timestamp'] = time.asctime()
result_record['model_prefix'] = args.model_prefix
result_record['model'] = args.model
result_record['featurizer'] = "TopKLayerHierarchicalFeatureExtractor"
result_record['max_regions'] = args.max_regions
if args.depth is not None: result_record['depth'] = args.depth
if args.decay is not None: result_record['decay'] = args.decay
result_record['sample_size'] = args.sample_size
evaluation(model = model,
model_prefix = args.model_prefix,
data_dir = args.data_dir,
result_record = result_record,
record_fname = args.record_fname,
sample_size = args.sample_size,
depth = args.depth,
decay = args.decay)
| [
"mpercy@cloudera.com"
] | mpercy@cloudera.com |
edf007e0e120a8239499496e9b3be1ab66d056c3 | 6075856d8dd85af8c05fa6bb21bb2ed3070fb9e8 | /sql_load.py | ba47a7f3c88ba68c7fd187344bff2210856fbd9b | [] | no_license | jason82603/weather_python | a1a671f04f9f2544a0c15ea26a3b34e3041117ee | fd86481bce0f3a562b5294dc195a86f8764f9afa | refs/heads/main | 2023-07-16T02:37:07.847715 | 2021-08-15T15:30:45 | 2021-08-15T15:30:45 | 396,284,488 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,622 | py |
# -*- coding: utf-8 -*-
import pymysql
import datetime ,time
from sqlalchemy import create_engine
import sys
import pandas as pd
import csv
engine = create_engine('mysql+mysqldb://root:568912@localhost:3306/weather_report?charset=utf8', max_overflow=5)
#區碼導入
def sqloutput(religion_num):
infile_csv = './csv/comparison_table.csv' # from Lab1
df_station = pd.read_csv(infile_csv, sep=',')
#df_station['縣市+區鄉鎮名稱'] = df_station.apply(lambda x: '%s%s' % (x['縣市名稱'], x['區鄉鎮名稱']), axis=1)
#df_station = df_station[['區里代碼', '縣市+區鄉鎮名稱']]
#df_station.columns = ['sid', 'district']
from sqlalchemy.orm import sessionmaker
Session = sessionmaker(bind=engine)
session = Session()
try:
df_station.to_sql(name='station', con=engine, if_exists = 'append', index=False)
except:
print('WARNING: table station might already exist')
session.close()
d_code = religion_num
with open(infile_csv,'r',encoding="utf_8") as csvfile:
rows = csv.reader(csvfile, delimiter=',')
for row in rows:
if d_code in row:
station_name=row[2]
#天氣導入
weather_csv = './csv/daan_3hr.csv' # from Lab2
df_report = pd.read_csv(weather_csv, sep=',')
df_report.insert(0, 'station_name', str(station_name))
df_report.insert(1, 'station_sid', int(d_code))
df_report.to_sql(name='report', con=engine, if_exists='append', index=False)
# 這行會直接添加 17 筆新紀錄
session.close()
print("done") | [
"jason82603@gmail.com"
] | jason82603@gmail.com |
d26865da00497232313307d61128fc8a8a17324c | ab50920ebb8d9679230c13b8f91998e47e9f4f82 | /samples/server/petstore/python-aiohttp/setup.py | b6a25d8966e5416433cab58c51cbb254d1efcc14 | [
"Apache-2.0"
] | permissive | oxidecomputer/openapi-generator | f50ee17579b02a35d30894f16a4d98dc81f8b06b | f8770d7c3388d9f1a5069a7f37378aeadcb81e16 | refs/heads/master | 2023-08-25T09:24:27.666296 | 2021-02-25T15:36:35 | 2021-02-25T15:36:35 | 334,329,847 | 6 | 0 | Apache-2.0 | 2022-10-14T05:05:39 | 2021-01-30T04:46:13 | Java | UTF-8 | Python | false | false | 937 | py | # coding: utf-8
import sys
from setuptools import setup, find_packages
NAME = "openapi_server"
VERSION = "1.0.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = [
"connexion==2.6.0",
"swagger-ui-bundle==0.0.6",
"aiohttp_jinja2==1.2.0",
]
setup(
name=NAME,
version=VERSION,
description="OpenAPI Petstore",
author_email="",
url="",
keywords=["OpenAPI", "OpenAPI Petstore"],
install_requires=REQUIRES,
packages=find_packages(),
package_data={'': ['openapi/openapi.yaml']},
include_package_data=True,
entry_points={
'console_scripts': ['openapi_server=openapi_server.__main__:main']},
long_description="""\
This is a sample server Petstore server. For this sample, you can use the api key `special-key` to test the authorization filters.
"""
)
| [
"noreply@github.com"
] | noreply@github.com |
da16383d575f23739976c33d38d4718e16da28db | bf029b2e668e1e52541dfade8931d86846c67fc1 | /Basic/Quarantine.py | 3f147145aa682643aa857cf3a867462a4f4e72c3 | [] | no_license | trueazp/Python | 714d37e63c4a65de960fa24cfc24fab2be178610 | 686497d4b690d65b8c0491468393b2f761967e46 | refs/heads/master | 2022-12-13T21:33:47.201652 | 2020-09-16T23:14:27 | 2020-09-16T23:14:27 | 252,054,491 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | # meoww
def stay_at_home():
print("eat")
print("code")
print("sleep")
print("repeat")
corona_virus = True
if __name__ == "__main__":
while corona_virus == True:
stay_at_home()
# stay home | [
"54744527+trueazp@users.noreply.github.com"
] | 54744527+trueazp@users.noreply.github.com |
bfea27733e3baaa41e1cf44c97e610c37ea4f198 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03049/s868278541.py | b3f9160fdd19cecfb8e58fc70735fa3e21e93802 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 464 | py | n=int(input())
a_num=0
b_num=0
set_num=0
ans=0
for i in range(n):
s=list(input())
for j in range(len(s)-1):
if s[j]+s[j+1]=="AB":
ans+=1
if s[0]=="B" and s[-1]=="A":
set_num+=1
elif s[0]=="B":
b_num+=1
elif s[-1]=="A":
a_num+=1
if set_num==0:
print(ans+min(a_num,b_num))
else:
ans+=set_num-1
if not(a_num==0 and b_num==0):
a_num+=1
b_num+=1
print(ans+min(a_num,b_num)) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
2198b57e1a53959d68f9caf78bf428b825300e47 | 9b0f49d825658e9cb6b4bc3e7980016809506be1 | /SqlMaster/models.py | 6bab5fbc2b1c54844895f441c45706b68590995c | [
"Apache-2.0"
] | permissive | srstack/JustCloud | 29a4fe16c43beae511e210e44c6b600ac673defc | 328a0e96e4b549846f1944b86315e7aa87070fa9 | refs/heads/master | 2020-04-16T03:23:45.165598 | 2019-05-07T05:10:54 | 2019-05-07T05:10:54 | 165,230,466 | 3 | 2 | Apache-2.0 | 2019-02-26T05:04:37 | 2019-01-11T11:06:52 | HTML | UTF-8 | Python | false | false | 4,950 | py | from django.db import models
# Create your models here.
class Domain(models.Model):
name = models.CharField(max_length=30, verbose_name="域名", null=False, unique=True)
city = models.CharField(max_length=10, verbose_name="城市", null=False)
province = models.CharField(max_length=10, verbose_name="省份", null=True)
country = models.CharField(max_length=10, verbose_name="国家", null=True, default="中国")
date = models.DateTimeField(verbose_name="创建时间", auto_now_add=True)
def __str__(self):
return self.name
class Users(models.Model):
username = models.CharField(max_length=15, verbose_name="用户名", null=False, db_index=True)
password = models.CharField(max_length=40, verbose_name="密码", null=False)
name = models.CharField(max_length=12, verbose_name="昵称", null=False)
domain = models.ForeignKey("Domain", verbose_name="所属域", on_delete=models.CASCADE, related_name="users")
system = models.ManyToManyField("System", verbose_name="子系统", related_name="admin")
phone = models.CharField(max_length=14, verbose_name="手机号", null=False, unique=True)
email = models.EmailField(verbose_name="邮箱", null=False, unique=True)
age = models.PositiveSmallIntegerField(verbose_name="年龄", null=True)
sex = models.NullBooleanField(verbose_name="性别", null=True)
date = models.DateTimeField(verbose_name="创建时间", auto_now_add=True)
rely = models.ForeignKey("Users", verbose_name="上级用户", null=True, on_delete=models.CASCADE, related_name="sub_user")
def __str__(self):
return self.username
class System(models.Model):
name = models.CharField(max_length=10, verbose_name="系统名称", null=False)
platform = models.CharField(max_length=8, verbose_name="系统平台", null=False, default="Others")
createuser = models.ForeignKey("Users", verbose_name="创建者", on_delete=models.SET_NULL, null=True,
related_name="ownsystem" , db_index=True)
protocol = models.CharField(max_length=4, verbose_name="接入协议", default="CoAP")
devicecode = models.CharField(max_length=20, verbose_name="设备注册码", null=True)
domain = models.ForeignKey("Domain", verbose_name="所属域", on_delete=models.CASCADE, related_name="system")
# JSON格式的数据模板,使用元组格式;
type = models.CharField(max_length=300, verbose_name="数据模板", null=False)
date = models.DateTimeField(verbose_name="创建时间", auto_now_add=True)
def __str__(self):
return self.name
class Device(models.Model):
name = models.CharField(max_length=30, verbose_name="设备名", null=False)
system = models.ForeignKey("System", verbose_name="所属系统", on_delete=models.CASCADE, related_name="device")
date = models.DateTimeField(verbose_name="创建时间", auto_now_add=True)
IMEI = models.CharField(max_length=15, verbose_name="序列号", null=False)
def __str__(self):
return self.name
class Data(models.Model):
device = models.ForeignKey("Device", verbose_name="所属设备", on_delete=models.CASCADE, related_name="data")
# JSON格式数据存储,采用字典(JS中的对象)格式;
data = models.CharField(max_length=600, verbose_name="设备数据", null=False)
date = models.DateTimeField(verbose_name="接收时间", auto_now_add=True)
model = models.BooleanField(verbose_name="订阅/推送", default=0, db_index=True)
# 0:订阅(pull) 1:推送(push)
waring = models.NullBooleanField(verbose_name="正常/异常", default=None, null=True, db_index=True)
# null:正常 0:解除异常 1:异常
def __str__(self):
return self.data
class Login(models.Model):
user = models.ForeignKey("Users", verbose_name="用户", on_delete=models.CASCADE, related_name="login")
IP = models.GenericIPAddressField(verbose_name="IP")
operation = models.CharField(max_length=3, verbose_name="操作", null=False, default='IN')
date = models.DateTimeField(verbose_name="时间", auto_now_add=True)
def __str__(self):
return self.user.name
class Operation(models.Model):
code = models.PositiveSmallIntegerField(verbose_name="操作码", null=False)
date = models.DateTimeField(verbose_name="操作时间", auto_now_add=True)
user = models.ForeignKey("Users", verbose_name="操作用户", on_delete=models.CASCADE, related_name="operation")
def __str__(self):
return self.code
'''
operation_code = {
900:"创建域",
100:"修改域",
101:"创建用户",
102:"删除用户",
103:"修改用户",
202:"增加系统",
203:"删除系统",
204:"修改系统",
205:"系统权限",
302:"增加设备",
303:"删除设备",
304:"修改设备",
305:"命令推送",
401:"异常处理",
500:"无效操作",
}
'''
| [
"39378935+srstack@users.noreply.github.com"
] | 39378935+srstack@users.noreply.github.com |
674aaa5d6a44b0485ab847af4392e2f00bde3c4c | e761cadf65705e1d6f8eaa03b980362b7e58d6be | /venv/bin/easy_install-3.6 | 2374b15e60e0558833aef31cd712aab3d42fa1cd | [] | no_license | alfonsho/word_count_project | 2397b4d87af493d6598551ff8fee1aedef9cd52f | e56420e08e7ec957c71345f46e6e72eba722fe30 | refs/heads/master | 2020-03-22T02:13:18.249410 | 2018-07-01T19:33:24 | 2018-07-01T19:33:24 | 139,357,253 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 458 | 6 | #!/Users/alfo/PycharmProjects/word_count_project/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==28.8.0','console_scripts','easy_install-3.6'
__requires__ = 'setuptools==28.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==28.8.0', 'console_scripts', 'easy_install-3.6')()
)
| [
"alfonsho@me.com"
] | alfonsho@me.com |
47bfe641e44add9a467cfbda95d5a088f6765ab0 | afadcf5a318ffbdd18966568fa6ba67b6ed2b5bf | /config/wsgi.py | e17c4b421924b29b70c3b91a03367e18408125a2 | [
"MIT"
] | permissive | we29758143/my_django_api | e9cb2f186218a9e909644502652829b7a7ad75ff | 0aee90dfc9b713bf5f5676065d17108639754063 | refs/heads/master | 2023-04-09T02:29:37.489492 | 2021-04-20T20:25:47 | 2021-04-20T20:25:47 | 359,941,465 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,678 | py | """
WSGI config for my_django_api project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
from pathlib import Path
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# my_django_api directory.
ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent
sys.path.append(str(ROOT_DIR / "my_django_api"))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| [
"williamtks2@hotmail.com"
] | williamtks2@hotmail.com |
911c352e7b58e0f4dfa3bfa17b13bfb5b8ec5dae | 070923bfce1097299edc5db76eeab306113c274a | /player/set1/New Text Document.txt | 075286c39bb7947e30c9337ddd645a63a4071cc2 | [] | no_license | kirtigarg11/guvi | 0c0f9f5bb782131ff4a0afb8d253d4437a6ecaaa | de12897d35306b02c8701b11bcf08e7d3ce1b4b1 | refs/heads/master | 2020-04-22T20:37:41.933179 | 2019-03-06T15:22:01 | 2019-03-06T15:22:01 | 170,647,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18 | txt | print(input()+'.') | [
"kirti.garg_cs16@gla.ac.in"
] | kirti.garg_cs16@gla.ac.in |
87ddc1477a9a7a89f4da9f65c898d02fe5fc8d9f | dbcd5044f5b076740a2291a09a5464f90387545a | /check_db_connection.py | aefeaa005b94cf2890f54d8a7b995c811b2021ed | [
"Apache-2.0"
] | permissive | Guban1990/Python_training | 4ea12ab6a506702d7509211ce68e13865678e6a7 | e1e30937f0f4f06bd8d87ee5f1675350ec8aa440 | refs/heads/master | 2023-07-13T03:52:01.680905 | 2021-09-02T11:08:49 | 2021-09-02T11:08:49 | 279,603,084 | 0 | 0 | null | 2020-08-28T20:02:57 | 2020-07-14T14:09:28 | null | UTF-8 | Python | false | false | 908 | py | from fixture.orm import ORMFixture
from model.group import Group
db = ORMFixture(host="127.0.0.1", name="addressbook", user="root", password="")
try:
l = db.get_contacts_not_in_group(Group(id="408"))
for item in l:
print(item)
print(len(l))
finally:
pass #db.destroy()
"""import pymysql.cursors
connection = pymysql.connect(host="127.0.0.1", database="addressbook", user="root", password="")
try:
cursor = connection.cursor()
cursor.execute("select * from group_list")
for row in cursor.fetchall():
print(row)
finally:
connection.close()"""
"""import mysql.connector
connection = mysql.connector.connect(host="127.0.0.1", database="addressbook", user="root", password="")
try:
cursor = connection.cursor()
cursor.execute("select * from group_list")
for row in cursor.fetchall():
print(row)
finally:
connection.close()"""
| [
"gubanov@restream.rt.ru"
] | gubanov@restream.rt.ru |
c10b740c35e4aab98ed0d1cef799df752e9a5609 | a28b0bb2c8d3d894a64f72b00dea3a499c6e1dcf | /examples/wikivote/wikivote.py | a4d5da7e68c85db7aa12ee1a01945b7a22a63029 | [
"MIT"
] | permissive | vlukiyanov/pt-splitter | 4a24e0d644abd2b37a15d5d13bd78526f88874ae | 6d0c930f556625965834c2eab8243ee501c6f99c | refs/heads/master | 2022-07-18T21:56:17.440411 | 2020-05-24T15:36:04 | 2020-05-24T15:36:04 | 179,951,145 | 3 | 0 | MIT | 2020-05-24T15:36:05 | 2019-04-07T10:40:31 | Python | UTF-8 | Python | false | false | 2,957 | py | from cytoolz.itertoolz import take
import networkx as nx
import torch.cuda as cuda
from torch.optim import SGD
from sklearn.metrics import roc_auc_score
from ptsplitter.deepwalk import (
initial_deepwalk_embedding,
initial_persona_embedding,
iter_random_walks,
lookup_tables,
to_embedding_matrix,
PersonaDeepWalkDataset,
)
from ptsplitter.model import predict, train
from ptsplitter.persona import persona_graph
from ptsplitter.splitter import SplitterEmbedding
from ptsplitter.utils import (
embedding_groups,
positive_edges,
negative_edges,
iter_get_scores,
)
# TODO this dataset is directed
print("Reading in dataset.")
G = nx.read_edgelist("data_input/wiki-Vote.txt")
sample_number = G.number_of_edges() // 2
G_original = nx.Graph(G)
positive_samples = list(take(sample_number, positive_edges(G)))
negative_samples = list(take(sample_number, negative_edges(G)))
G.remove_edges_from(positive_samples)
print("Constructing persona graph.")
PG = persona_graph(G)
print("Constructing lookups.")
forward_persona, reverse_persona = lookup_tables(PG)
forward, reverse = lookup_tables(G)
print("Generating random walks and initial embeddings.")
walks = take(10000, iter_random_walks(G, length=10))
base_embedding = initial_deepwalk_embedding(
walks=walks, forward_lookup=forward, embedding_dimension=100, window=10
)
base_matrix = to_embedding_matrix(
base_embedding, embedding_dimension=100, reverse_lookup=reverse
)
persona_matrix = to_embedding_matrix(
initial_persona_embedding(PG, base_embedding),
embedding_dimension=100,
reverse_lookup=reverse_persona,
)
print("Running splitter.")
print(f'CUDA is{str() if cuda.is_available() else " not"} utilised.')
embedding = SplitterEmbedding(
node_count=G.number_of_nodes(),
persona_node_count=PG.number_of_nodes(),
embedding_dimension=100,
initial_embedding=base_matrix,
initial_persona_embedding=persona_matrix,
)
dataset = PersonaDeepWalkDataset(
graph=PG,
window_size=5,
walk_length=40,
dataset_size=50000,
forward_lookup_persona=forward_persona,
forward_lookup=forward,
)
if cuda.is_available():
embedding = embedding.cuda()
optimizer = SGD(embedding.parameters(), lr=0.025)
train(
dataset=dataset,
model=embedding,
epochs=10,
batch_size=10,
optimizer=optimizer,
cuda=cuda.is_available(),
)
_, node_list, index_list, persona_embedding_list = predict(reverse_persona, embedding)
groups = embedding_groups(node_list, persona_embedding_list)
positive_scores = [
max(iter_get_scores(groups, node1, node2)) for (node1, node2) in positive_samples
]
negative_scores = [
max(iter_get_scores(groups, node1, node2)) for (node1, node2) in negative_samples
]
print(sum(positive_scores))
print(sum(negative_scores))
print(
roc_auc_score(
[1] * len(positive_samples) + [0] * len(negative_samples),
positive_scores + negative_scores,
)
)
print(1)
| [
"noreply@github.com"
] | noreply@github.com |
5b185f80b83a5280fb92b99ea44beca47841c15f | 4dd9a44463e15c9dab6eb979315109087ab53754 | /api/Controller/routes.py | 10703f0395f7f885af44b957030c7e56bcdced83 | [] | no_license | SCcagg5/Genevasign | f5db6d5d5daef12f9db1f28d92c6a54a285fcf28 | 02c7708ee1fcbffff2b4905c0abbc74006c78fd2 | refs/heads/master | 2022-03-28T13:47:12.342147 | 2019-12-10T13:51:28 | 2019-12-10T13:51:28 | 218,754,910 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 470 | py | from .routesfunc import *
def setuproute(app, call):
@app.route('/test/', ['OPTIONS', 'POST', 'GET'], lambda x = None: call([]) )
@app.route('/login/', ['OPTIONS', 'POST'], lambda x = None: call([getauth]) )
@app.route('/sign/', ['OPTIONS', 'POST'], lambda x = None: call([myauth, signdoc]) )
def base():
return
| [
"eliot@LAPTOP-C7G3T243.localdomain"
] | eliot@LAPTOP-C7G3T243.localdomain |
2e614246165947855457aa478ad7a314ca6bddce | 094df889c7798097d7f8b0cfc6010060adc92320 | /download.py | 4c3584deab9ac89031d29aee037451068a0f4414 | [] | no_license | Tsukiyomi-Yaori/shiyanlou-code | 87cc48042afd0aa8231efd08fef207a0117c8f5f | 3a6e2bd32c239728b7e6f87526c33313e7474c29 | refs/heads/master | 2021-01-08T01:08:30.162336 | 2020-02-28T16:09:32 | 2020-02-28T16:09:32 | 241,869,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 724 | py | #!/usr/bin/env python3
import requests
def download(url):
'''
从指定的URL中下载文件并存储到当前目录url:要下载页面内容的网址
'''
#检查URL是否存在
try:
req = requests.get(url)
except requests.exceptions.MissingSchema:
print('Invalid URL "{}"'.format(url))
return
#检查是否成功访问了该网站
if req.status_code == 403:
print('You do not have the authority to access this page.')
return
filename = url.split('/')[-1]
with open(filename, 'w') as fobj:
fobj.write(req.content.decode('utf-8'))
print("Download over.")
if __name__ == '__main__':
url = input('Enter a URL: ')
download(url)
| [
"495380695@qq.com"
] | 495380695@qq.com |
388104b9683a9e7a868f3d95f1710fc0fe4db910 | cad6b588bd66675b31d88c4490a610a44c54084d | /openprocurement/auctions/lease/views/auction.py | e94280cba51093e7eb06dee9de06c57a50e0c839 | [
"Apache-2.0"
] | permissive | Scandie/openprocurement.auctions.lease | 389ffb4079a00e9d90cfa95ca9c1ca5e4b307e20 | 1b2b974ece7ca380b51221c2ddfa5b0ad9c1410a | refs/heads/master | 2020-03-25T11:46:34.119510 | 2018-07-05T12:48:54 | 2018-07-05T12:48:54 | 143,747,885 | 0 | 0 | Apache-2.0 | 2018-08-06T15:27:39 | 2018-08-06T15:27:39 | null | UTF-8 | Python | false | false | 7,681 | py | # -*- coding: utf-8 -*-
from openprocurement.auctions.core.utils import (
json_view,
context_unpack,
APIResource,
save_auction,
apply_patch,
opresource,
cleanup_bids_for_cancelled_lots
)
from openprocurement.auctions.core.validation import (
validate_auction_auction_data,
)
from openprocurement.auctions.lease.utils import (
invalidate_bids_under_threshold
)
@opresource(name='propertyLease:Auction Auction',
collection_path='/auctions/{auction_id}/auction',
path='/auctions/{auction_id}/auction/{auction_lot_id}',
auctionsprocurementMethodType="propertyLease",
description="auction auction data")
class AuctionAuctionResource(APIResource):
@json_view(permission='auction')
def collection_get(self):
"""Get auction info.
Get auction auction info
-----------------------
Example request to get auction auction information:
.. sourcecode:: http
GET /auctions/4879d3f8ee2443169b5fbbc9f89fa607/auction HTTP/1.1
Host: example.com
Accept: application/json
This is what one should expect in response:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"data": {
"dateModified": "2014-10-27T08:06:58.158Z",
"bids": [
{
"value": {
"amount": 500,
"currency": "UAH",
"valueAddedTaxIncluded": true
}
},
{
"value": {
"amount": 485,
"currency": "UAH",
"valueAddedTaxIncluded": true
}
}
],
"minimalStep":{
"amount": 35,
"currency": "UAH"
},
"tenderPeriod":{
"startDate": "2014-11-04T08:00:00"
}
}
}
"""
if self.request.validated['auction_status'] != 'active.auction':
self.request.errors.add('body', 'data', 'Can\'t get auction info in current ({}) auction status'.format(self.request.validated['auction_status']))
self.request.errors.status = 403
return
return {'data': self.request.validated['auction'].serialize("auction_view")}
@json_view(content_type="application/json", permission='auction', validators=(validate_auction_auction_data))
def collection_patch(self):
"""Set urls for access to auction.
"""
if apply_patch(self.request, src=self.request.validated['auction_src']):
self.LOGGER.info('Updated auction urls', extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_auction_patch'}))
return {'data': self.request.validated['auction'].serialize("auction_view")}
@json_view(content_type="application/json", permission='auction', validators=(validate_auction_auction_data))
def collection_post(self):
"""Report auction results.
Report auction results
----------------------
Example request to report auction results:
.. sourcecode:: http
POST /auctions/4879d3f8ee2443169b5fbbc9f89fa607/auction HTTP/1.1
Host: example.com
Accept: application/json
{
"data": {
"dateModified": "2014-10-27T08:06:58.158Z",
"bids": [
{
"value": {
"amount": 400,
"currency": "UAH"
}
},
{
"value": {
"amount": 385,
"currency": "UAH"
}
}
]
}
}
This is what one should expect in response:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"data": {
"dateModified": "2014-10-27T08:06:58.158Z",
"bids": [
{
"value": {
"amount": 400,
"currency": "UAH",
"valueAddedTaxIncluded": true
}
},
{
"value": {
"amount": 385,
"currency": "UAH",
"valueAddedTaxIncluded": true
}
}
],
"minimalStep":{
"amount": 35,
"currency": "UAH"
},
"tenderPeriod":{
"startDate": "2014-11-04T08:00:00"
}
}
}
"""
apply_patch(self.request, save=False, src=self.request.validated['auction_src'])
auction = self.request.validated['auction']
invalidate_bids_under_threshold(auction)
if any([i.status == 'active' for i in auction.bids]):
self.request.content_configurator.start_awarding()
else:
auction.status = 'unsuccessful'
if save_auction(self.request):
self.LOGGER.info('Report auction results', extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_auction_post'}))
return {'data': self.request.validated['auction'].serialize(self.request.validated['auction'].status)}
@json_view(content_type="application/json", permission='auction', validators=(validate_auction_auction_data))
def patch(self):
"""Set urls for access to auction for lot.
"""
if apply_patch(self.request, src=self.request.validated['auction_src']):
self.LOGGER.info('Updated auction urls', extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_auction_patch'}))
return {'data': self.request.validated['auction'].serialize("auction_view")}
@json_view(content_type="application/json", permission='auction', validators=(validate_auction_auction_data))
def post(self):
"""Report auction results for lot.
"""
apply_patch(self.request, save=False, src=self.request.validated['auction_src'])
auction = self.request.validated['auction']
if all([i.auctionPeriod and i.auctionPeriod.endDate for i in auction.lots if i.numberOfBids > 1 and i.status == 'active']):
cleanup_bids_for_cancelled_lots(auction)
invalidate_bids_under_threshold(auction)
if any([i.status == 'active' for i in auction.bids]):
self.request.content_configurator.start_awarding()
else:
auction.status = 'unsuccessful'
if save_auction(self.request):
self.LOGGER.info('Report auction results', extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_lot_auction_post'}))
return {'data': self.request.validated['auction'].serialize(self.request.validated['auction'].status)}
| [
"api_service@sandbox-api"
] | api_service@sandbox-api |
e4ac9c67a39ce094715d70f29f349c94518af075 | d6f9a6adfd78ae3f835cb93114b0e15ce3a861ee | /backend/settings.py | cb65283c6e3ac47d6604b6c25f148049e9649b43 | [] | no_license | sreedom/travelogger | 479c1921236f7b945b01921994ce2397517a2ce6 | 8187c1322bd8f04da618a1d65950e9abc0af177b | refs/heads/master | 2020-04-30T17:12:03.260548 | 2013-04-05T04:56:38 | 2013-04-05T04:56:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,373 | py | # Django settings for backend project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'travelloger', # Or path to database file if using sqlite3.
'USER': 'root', # Not used with sqlite3.
'PASSWORD': 'asdfgh', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'jzvpgp0%59sdoedytz4i1w)lro5+7)@c_^t=+8%6b4sbz)qfo&'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'backend.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'backend.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
#'django.contrib.sites',
#'django.contrib.messages',
#'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| [
"sreeraj.a@inmobi.com"
] | sreeraj.a@inmobi.com |
a59e67e2f7499c2085d18c176ceba106f3d6e308 | ad0857eaba945c75e705594a53c40dbdd40467fe | /leetCode/number_of_papers_1780.py | dace9f5d455a28a9951f765e1c89a67e75850822 | [
"MIT"
] | permissive | yskang/AlgorithmPractice | c9964d463fbd0d61edce5ba8b45767785b0b5e17 | 3efa96710e97c8740d6fef69e4afe7a23bfca05f | refs/heads/master | 2023-05-25T13:51:11.165687 | 2023-05-19T07:42:56 | 2023-05-19T07:42:56 | 67,045,852 | 0 | 0 | null | 2021-06-20T02:42:27 | 2016-08-31T14:40:10 | Python | UTF-8 | Python | false | false | 1,602 | py | # Title: 종이의 개수
# Link: https://www.acmicpc.net/problem/1780
import sys
sys.setrecursionlimit(10 ** 6)
def read_list_int():
return list(map(int, sys.stdin.readline().strip().split(' ')))
def read_single_int():
return int(sys.stdin.readline().strip())
def check_all_number(matrix, start_x, start_y, length):
base = matrix[start_y][start_x]
if length == 1:
return base
for y in range(start_y, start_y+length):
for x in range(start_x, start_x+length):
if matrix[y][x] != base:
return 9
return base
def number_of_papers(matrix, start_x, start_y, length):
index = [-1, 0, 1]
sums = {-1: 0, 0: 0, 1: 0}
base = matrix[start_y][start_x]
if length != 1:
done = False
for y in range(start_y, start_y+length):
for x in range(start_x, start_x+length):
if matrix[y][x] != base:
base = 9
done = True
break
if done:
break
if base != 9:
sums[base] += 1
else:
new_length = length // 3
for x in range(3):
for y in range(3):
s = number_of_papers(matrix, start_x + new_length * x, start_y + new_length * y, new_length)
for i in index:
sums[i] += s[i]
return sums
if __name__ == '__main__':
N = read_single_int()
matrix = []
for _ in range(N):
matrix.append(read_list_int())
ret = number_of_papers(matrix, 0, 0, N)
for i in ret:
print(ret[i]) | [
"yongsung.kang@gmail.com"
] | yongsung.kang@gmail.com |
a1e1e234ea2709bbb7c6bcee51b75c86b9c958f6 | 475ec339d85366aee2b01df72c9643babc5ed91c | /Mundo 2/desafio_061.py | e5f67a7f8842a5cf729fdeda98f9e9a96947fce1 | [] | no_license | AlfredoAndrade14/AprendendoPython | 15e2298052a8916dacf8a531ff49925e47eb02b6 | c39422d2b76bd747c3c3f60bf26ff4819ee21a2f | refs/heads/main | 2023-08-25T22:57:05.032738 | 2021-10-28T08:16:04 | 2021-10-28T08:16:04 | 301,789,253 | 5 | 14 | null | 2020-10-25T17:56:57 | 2020-10-06T16:30:30 | Python | UTF-8 | Python | false | false | 211 | py | primeiro = int(input("primeiro termo: "))
razao = int(input("razao: "))
pos = 1
while True:
conta = primeiro + (pos - 1) * razao
print("a{} = {}".format(pos, conta))
if(pos == 10):
break
pos += 1
| [
"noreply@github.com"
] | noreply@github.com |
f6e4b82f7b71e7c9c088f7d5198dcf164604aa4c | ce99427b6f79ca87d3869fe521b02fc59a48005b | /Django/BLOG/blog/urls.py | 4bd7b9cc6ee8b5688af113722440c65d62b972ab | [] | no_license | mateusoliveira43/estudos-Python | 99c3837559da922916301178e6722c3dcd20db9d | fe3d4a09264aaa50b38eb2c2a55b22ea2026bffd | refs/heads/master | 2023-02-13T04:45:14.078759 | 2021-01-16T14:16:06 | 2021-01-16T14:16:06 | 298,580,952 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,006 | py | """blog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', include('posts.urls')),
path('admin/', admin.site.urls),
path('summernote/', include('django_summernote.urls')),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"matews1943@gmail.com"
] | matews1943@gmail.com |
d28596a3b41c60b5e98f6cde6ebd4cf085d4d579 | 42cbf381d6d12b29a5212f3e8482ebde2067758b | /3 - Estrutura de Repetição/9nv.py | c87bfb1840eb522246d2b97d6b305cbc5d05d381 | [] | no_license | loristron/PythonExercisesLearnPython | d76d0f7d0b96b76ca463c0d431814a6ba74bbe74 | c98a903900b41733980a5a13017dc1901b1ecee7 | refs/heads/master | 2023-02-09T12:46:44.885145 | 2021-01-07T19:39:33 | 2021-01-07T19:39:33 | 327,707,915 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Jun 25 13:11:08 2020
@author: loris
Faça um programa que imprima na tela apenas os números ímpares entre 1 e 50.
"""
lista = []
for n in range (1, 50):
if n % 2 != 0:
print(n)
lista.append(n)
print(lista)
| [
"loremmiranda@gmail.com"
] | loremmiranda@gmail.com |
9dfe8bd40de7649a1a8dc257c27d213c77e9277e | e329bde6259872205349470571448e1ad9309585 | /model_architectures.py | 439aed43727a8443cbe5dec8e2d2b0f47407e451 | [] | no_license | brandonclintonjones/2D_Segmentation | e292a28fc1e0cd494c13c775b5e896a2188fa224 | 4f12c0c5c6830095dfb203bcb17b6851773c1fcd | refs/heads/master | 2022-11-28T10:08:24.607051 | 2020-08-07T19:28:46 | 2020-08-07T19:28:46 | 285,685,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,039 | py | ###############################
###############################
###############################
###############################
#########
#########
######### CREATED BY: BRANDON CLINTON JONES
######### AND CARLOS ADOLFO OSUNA
######### APRIL 23, 2020
#########
#########
#########
###############################
###############################
###############################
###############################
from tensorflow.layers import Conv1D, Conv2D, Conv3D, Conv2DTranspose, Conv3DTranspose
from tensorflow.layers import Dense, Dropout, Flatten, Layer
from tensorflow.layers import MaxPooling1D, MaxPooling2D, MaxPooling3D
from tensorflow.layers import BatchNormalization
# from tensorflow.keras.layers import BatchNormalization
# from tensorflow.keras.models import Sequential, Model
# from tensorflow.keras.layers import Input
# from tensorflow.keras.layers import Flatten, Dense, Dropout, Lambda, Reshape
# from tensorflow.keras.layers import Conv2D, UpSampling2D, SpatialDropout2D, MaxPooling2D, ZeroPadding2D, Conv2DTranspose
# from tensorflow.keras.layers import Conv3D, MaxPooling3D, UpSampling3D, Activation, BatchNormalization, PReLU, SpatialDropout3D
# from tensorflow.keras.layers import Conv3DTranspose
from tensorflow.keras.layers import concatenate
# from tensorflow.keras.layers import Permute
# from tensorflow.keras.optimizers import SGD, RMSprop, Adam
import os
import numpy as np
import tensorflow as tf
# def conv_block_simple_3d(prevlayer, num_filters, prefix, kernel_size=(2,3,3),initializer="he_normal", strides=(1, 1, 1)):
def conv_block_simple_3d(prevlayer, num_filters, prefix, kernel_size=(2,3,3),initializer="glorot_normal", strides=(1, 1, 1)):
conv = Conv3D(filters=num_filters, kernel_size=kernel_size, padding="same", kernel_initializer=initializer, strides=strides, name=prefix + "_conv",
data_format='channels_first')(prevlayer)
conv = BatchNormalization(name=prefix + "_bn",
axis=1)(conv)
# conv = Activation('relu', name=prefix + "_activation")(conv)
conv = tf.nn.relu(conv,name=prefix + "_activation")
return conv
# def conv_block_simple_3d(prevlayer, num_filters, prefix, kernel_size=(3,3,3),initializer="he_normal", strides=(1, 1, 1)):
def conv_block_simple_3d_no_bn(prevlayer, num_filters, prefix, kernel_size=(3,3,3),initializer="glorot_normal", strides=(1, 1, 1)):
conv = Conv3D(filters=num_filters, kernel_size=kernel_size, padding="same", kernel_initializer=initializer, strides=strides, name=prefix + "_conv",
data_format='channels_first')(prevlayer)
conv = tf.nn.relu(conv,name=prefix + "_activation")
# conv = Activation('relu', name=prefix + "_activation")(conv)
return conv
def unet_7_layers_3D(input_tensor):
# print('INPUT IMAGE SHAPE')
# print(input_tensor.shape)
mp_param = (1,2,2) # (1,2,2)
stride_param=(1,2,2)
d_format = "channels_first"
pad = "same"
us_param = (1,2,2)
# kern=(1,3,3)
kern=(2,3,3)
# filt=(32,64,128,256,512)
filt=(32,64,128,256)
# filt=(64,128,256,512,1024)
conv1 = conv_block_simple_3d(prevlayer=input_tensor, num_filters=filt[0], prefix="conv1",kernel_size=kern)
conv1 = conv_block_simple_3d(prevlayer=conv1, num_filters=filt[0], prefix="conv1_1",kernel_size=kern)
pool1 = MaxPooling3D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool1")(conv1)
conv2 = conv_block_simple_3d(prevlayer=pool1, num_filters=filt[1], prefix="conv2",kernel_size=kern)
conv2 = conv_block_simple_3d(prevlayer=conv2, num_filters=filt[1], prefix="conv2_1",kernel_size=kern)
pool2 = MaxPooling3D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool2")(conv2)
conv3 = conv_block_simple_3d(prevlayer=pool2, num_filters=filt[2], prefix="conv3",kernel_size=kern)
conv3 = conv_block_simple_3d(prevlayer=conv3, num_filters=filt[2], prefix="conv3_1",kernel_size=kern)
pool3 = MaxPooling3D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool3")(conv3)
conv4 = conv_block_simple_3d(prevlayer=pool3, num_filters=filt[3], prefix="conv_4",kernel_size=kern)
conv4 = conv_block_simple_3d(prevlayer=conv4, num_filters=filt[3], prefix="conv_4_1",kernel_size=kern)
conv4 = conv_block_simple_3d(prevlayer=conv4, num_filters=filt[3], prefix="conv_4_2",kernel_size=kern)
up5 = Conv3DTranspose(filters=filt[2],kernel_size=kern,strides=(1,2,2),padding="same",data_format="channels_first")(conv4)
up5 = concatenate([up5, conv3], axis=1)
conv5 = conv_block_simple_3d(prevlayer=up5, num_filters=filt[2], prefix="conv5_1")
conv5 = conv_block_simple_3d(prevlayer=conv5, num_filters=filt[2], prefix="conv5_2")
up6 = Conv3DTranspose(filters=filt[1],kernel_size=kern,strides=(1,2,2),padding="same",data_format="channels_first")(conv5)
up6 = concatenate([up6, conv2], axis=1)
conv6 = conv_block_simple_3d(prevlayer=up6, num_filters=filt[1], prefix="conv6_1")
conv6 = conv_block_simple_3d(prevlayer=conv6, num_filters=filt[1], prefix="conv6_2")
up7 = Conv3DTranspose(filters=filt[0],kernel_size=kern,strides=(1,2,2),padding="same",data_format="channels_first")(conv6)
up7 = concatenate([up7, conv1], axis=1)
conv7 = conv_block_simple_3d(prevlayer=up7, num_filters=filt[0], prefix="conv7_1")
conv7 = conv_block_simple_3d(prevlayer=conv7, num_filters=filt[0], prefix="conv7_2")
# conv9 = SpatialDropout2D(0.2,data_format=d_format)(conv9)
prediction = Conv3D(filters=1, kernel_size=(1, 1, 1), activation="sigmoid", name="prediction", data_format=d_format)(conv7)
# print('PREDICTION SHAPE')
# print(prediction.shape)
return prediction
def unet_9_layers_3D(input_shape):
img_input = Input(input_shape)
# print('INPUT IMAGE SHAPE')
# print(img_input.shape)
mp_param = (1,2,2) # (1,2,2)
stride_param=(1,2,2)
d_format = "channels_first"
pad = "same"
us_param = (1,2,2)
filt=(32,64,128,256,512)
# filt=(64,128,256,512,1024)
conv1 = conv_block_simple_3d(prevlayer=img_input, num_filters=filt[0], prefix="conv1")
conv1 = conv_block_simple_3d(prevlayer=conv1, num_filters=filt[0], prefix="conv1_1")
pool1 = MaxPooling3D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool1")(conv1)
conv2 = conv_block_simple_3d(prevlayer=pool1, num_filters=filt[1], prefix="conv2")
conv2 = conv_block_simple_3d(prevlayer=conv2, num_filters=filt[1], prefix="conv2_1")
pool2 = MaxPooling3D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool2")(conv2)
conv3 = conv_block_simple_3d(prevlayer=pool2, num_filters=filt[2], prefix="conv3")
conv3 = conv_block_simple_3d(prevlayer=conv3, num_filters=filt[2], prefix="conv3_1")
pool3 = MaxPooling3D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool3")(conv3)
conv4 = conv_block_simple_3d(prevlayer=pool3, num_filters=filt[3], prefix="conv4")
conv4 = conv_block_simple_3d(prevlayer=conv4, num_filters=filt[3], prefix="conv4_1")
pool4 = MaxPooling3D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool4")(conv4)
conv5 = conv_block_simple_3d(prevlayer=pool4, num_filters=filt[4], prefix="conv_5")
conv5 = conv_block_simple_3d(prevlayer=conv5, num_filters=filt[4], prefix="conv_5_1")
conv5 = conv_block_simple_3d(prevlayer=conv5, num_filters=filt[4], prefix="conv_5_2")
up6 = UpSampling3D(size=us_param,data_format=d_format)(conv5)
up6 = concatenate([up6, conv4], axis=1)
conv6 = conv_block_simple_3d(prevlayer=up6, num_filters=filt[3], prefix="conv6_1")
conv6 = conv_block_simple_3d(prevlayer=conv6, num_filters=filt[3], prefix="conv6_2")
up7 = UpSampling3D(size=us_param,data_format=d_format)(conv6)
up7 = concatenate([up7, conv3], axis=1)
conv7 = conv_block_simple_3d(prevlayer=up7, num_filters=filt[2], prefix="conv7_1")
conv7 = conv_block_simple_3d(prevlayer=conv7, num_filters=filt[2], prefix="conv7_2")
up8 = UpSampling3D(size=us_param,data_format=d_format)(conv7)
up8 = concatenate([up8, conv2], axis=1)
conv8 = conv_block_simple_3d(prevlayer=up8, num_filters=filt[1], prefix="conv8_1")
conv8 = conv_block_simple_3d(prevlayer=conv8, num_filters=filt[1], prefix="conv8_2")
up9 = UpSampling3D(size=us_param,data_format=d_format)(conv8)
up9 = concatenate([up9, conv1], axis=1)
conv9 = conv_block_simple_3d(prevlayer=up9, num_filters=filt[0], prefix="conv9_1")
conv9 = conv_block_simple_3d(prevlayer=conv9, num_filters=filt[0], prefix="conv9_2")
# conv9 = SpatialDropout2D(0.2,data_format=d_format)(conv9)
prediction = Conv3D(filters=1, kernel_size=(1, 1, 1), activation="sigmoid", name="prediction", data_format=d_format)(conv9)
model = Model(img_input, prediction)
# print('PREDICTION SHAPE')
# print(prediction.shape)
return model
def unet_9_layers(input_tensor,output_tensor_channels = 1):
mp_param = (2,2)
stride_param=(2,2)
d_format = "channels_first"
pad = "same"
kern=(3,3)
filt=(32,64,128,256,512)
# filt=(64,128,256,512,1024)
conv1 = conv_block_simple_2d(prevlayer=input_tensor, num_filters=filt[0], prefix="conv1")
conv1 = conv_block_simple_2d(prevlayer=conv1, num_filters=filt[0], prefix="conv1_1")
pool1 = MaxPooling2D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool1")(conv1)
conv2 = conv_block_simple_2d(prevlayer=pool1, num_filters=filt[1], prefix="conv2")
conv2 = conv_block_simple_2d(prevlayer=conv2, num_filters=filt[1], prefix="conv2_1")
pool2 = MaxPooling2D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool2")(conv2)
conv3 = conv_block_simple_2d(prevlayer=pool2, num_filters=filt[2], prefix="conv3")
conv3 = conv_block_simple_2d(prevlayer=conv3, num_filters=filt[2], prefix="conv3_1")
pool3 = MaxPooling2D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool3")(conv3)
conv4 = conv_block_simple_2d(prevlayer=pool3, num_filters=filt[3], prefix="conv4")
conv4 = conv_block_simple_2d(prevlayer=conv4, num_filters=filt[3], prefix="conv4_1")
pool4 = MaxPooling2D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool4")(conv4)
conv5 = conv_block_simple_2d(prevlayer=pool4, num_filters=filt[4], prefix="conv_5")
conv5 = conv_block_simple_2d(prevlayer=conv5, num_filters=filt[3], prefix="conv_5_1")
# conv5 = conv_block_simple_2d(prevlayer=conv5, num_filters=filt[4], prefix="conv_5_2")
# 4 is 512,
# 3 is 256
# 2 is 128
up6 = Conv2DTranspose(filters=filt[3],kernel_size=kern,strides=(2,2),padding="same",data_format="channels_first")(conv5)
up6 = concatenate([up6, conv4], axis=1)
conv6 = conv_block_simple_2d(prevlayer=up6, num_filters=filt[3], prefix="conv6_1")
conv6 = conv_block_simple_2d(prevlayer=conv6, num_filters=filt[2], prefix="conv6_2")
up7 = Conv2DTranspose(filters=filt[2],kernel_size=kern,strides=(2,2),padding="same",data_format="channels_first")(conv6)
up7 = concatenate([up7, conv3], axis=1)
conv7 = conv_block_simple_2d(prevlayer=up7, num_filters=filt[2], prefix="conv7_1")
conv7 = conv_block_simple_2d(prevlayer=conv7, num_filters=filt[1], prefix="conv7_2")
up8 = Conv2DTranspose(filters=filt[1],kernel_size=kern,strides=(2,2),padding="same",data_format="channels_first")(conv7)
up8 = concatenate([up8, conv2], axis=1)
conv8 = conv_block_simple_2d(prevlayer=up8, num_filters=filt[1], prefix="conv8_1")
conv8 = conv_block_simple_2d(prevlayer=conv8, num_filters=filt[0], prefix="conv8_2")
up9 = Conv2DTranspose(filters=filt[0],kernel_size=kern,strides=(2,2),padding="same",data_format="channels_first")(conv8)
up9 = concatenate([up9, conv1], axis=1)
conv9 = conv_block_simple_2d(prevlayer=up9, num_filters=filt[0], prefix="conv9_1")
conv9 = conv_block_simple_2d(prevlayer=conv9, num_filters=filt[0], prefix="conv9_2")
# conv9 = SpatialDropout2D(0.2,data_format=d_format)(conv9)
# prediction = Conv2D(filters=1, kernel_size=(1, 1), activation="sigmoid", name="prediction", data_format=d_format)(conv9)
prediction = Conv2D(filters = output_tensor_channels, kernel_size = (1, 1), activation="sigmoid", use_bias = True,
name="prediction",data_format=d_format)(conv9)
# prediction = Conv2D(filters = output_tensor_channels, kernel_size = (1, 1), name="prediction",data_format=d_format)(conv9)
return prediction
def unet_7_layers(input_tensor):
# print('INPUT IMAGE SHAPE')
# print(input_tensor.shape)
mp_param = (2,2) # (1,2,2)
stride_param=(2,2)
d_format = "channels_first"
pad = "same"
us_param = (2,2)
kern=(3,3)
# filt=(32,64,128,256,512)
filt=(32,64,128,256)
# filt=(64,128,256,512,1024)
conv1 = conv_block_simple_2d(prevlayer=input_tensor, num_filters=filt[0], prefix="conv1",kernel_size=kern)
conv1 = conv_block_simple_2d(prevlayer=conv1, num_filters=filt[0], prefix="conv1_1",kernel_size=kern)
pool1 = MaxPooling2D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool1")(conv1)
conv2 = conv_block_simple_2d(prevlayer=pool1, num_filters=filt[1], prefix="conv2",kernel_size=kern)
conv2 = conv_block_simple_2d(prevlayer=conv2, num_filters=filt[1], prefix="conv2_1",kernel_size=kern)
pool2 = MaxPooling2D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool2")(conv2)
conv3 = conv_block_simple_2d(prevlayer=pool2, num_filters=filt[2], prefix="conv3",kernel_size=kern)
conv3 = conv_block_simple_2d(prevlayer=conv3, num_filters=filt[2], prefix="conv3_1",kernel_size=kern)
pool3 = MaxPooling2D(pool_size=mp_param,strides=stride_param,
padding="same",data_format="channels_first",name="pool3")(conv3)
conv4 = conv_block_simple_2d(prevlayer=pool3, num_filters=filt[3], prefix="conv_4",kernel_size=kern)
conv4 = conv_block_simple_2d(prevlayer=conv4, num_filters=filt[3], prefix="conv_4_1",kernel_size=kern)
conv4 = conv_block_simple_2d(prevlayer=conv4, num_filters=filt[3], prefix="conv_4_2",kernel_size=kern)
up5 = Conv2DTranspose(filters=filt[2],kernel_size=kern,strides=(2,2),padding="same",data_format="channels_first")(conv4)
up5 = concatenate([up5, conv3], axis=1)
conv5 = conv_block_simple_2d(prevlayer=up5, num_filters=filt[2], prefix="conv5_1")
conv5 = conv_block_simple_2d(prevlayer=conv5, num_filters=filt[2], prefix="conv5_2")
up6 = Conv2DTranspose(filters=filt[1],kernel_size=kern,strides=(2,2),padding="same",data_format="channels_first")(conv5)
up6 = concatenate([up6, conv2], axis=1)
conv6 = conv_block_simple_2d(prevlayer=up6, num_filters=filt[1], prefix="conv6_1")
conv6 = conv_block_simple_2d(prevlayer=conv6, num_filters=filt[1], prefix="conv6_2")
up7 = Conv2DTranspose(filters=filt[0],kernel_size=kern,strides=(2,2),padding="same",data_format="channels_first")(conv6)
up7 = concatenate([up7, conv1], axis=1)
conv7 = conv_block_simple_2d(prevlayer=up7, num_filters=filt[0], prefix="conv7_1")
conv7 = conv_block_simple_2d(prevlayer=conv7, num_filters=filt[0], prefix="conv7_2")
# conv9 = SpatialDropout2D(0.2,data_format=d_format)(conv9)
prediction = Conv2D(filters=1, kernel_size=(1, 1), activation="sigmoid", name="prediction", data_format=d_format)(conv7)
# print('PREDICTION SHAPE')
# print(prediction.shape)
return prediction
def simple_cnn(input_shape):
# NOTE: ThE INPUT_TENSOR IS THE SUBSAMPLED KSPACE
# SO IT IS A COMPLEX128 TENSOR OF SHAPE
# ( BATCH_DIM, CHANNELS_DIM, IMG_HEIGHT, IMG_WIDTH )
img_input = Input(input_shape)
conv1 = conv_block_simple_2d(prevlayer=img_input, num_filters=16, prefix="conv1")
conv2 = conv_block_simple_2d(prevlayer=conv1, num_filters=16, prefix="conv1_2")
conv3 = conv_block_simple_2d(prevlayer=conv2, num_filters=16, prefix="conv1_3")
prediction = Conv2D(
filters=2,
kernel_size=(1, 1),
activation="sigmoid",
name="prediction",
data_format="channels_first",
)(conv3)
# prediction = Conv2D(
# filters=1,
# kernel_size=(1, 1),
# activation="sigmoid",
# name="prediction",
# data_format="channels_first",
# )(conv3)
the_model=Model(inputs=img_input,outputs=prediction)
return the_model
def conv_block_simple_2d(prevlayer, num_filters, prefix, kernel_size=(3,3),initializer="he_normal", strides=(1, 1)):
# conv = Conv2D(filters=num_filters, kernel_size=kernel_size, padding="same", kernel_initializer=initializer, strides=strides, name=prefix + "_conv",
# data_format='channels_first')(prevlayer)
# conv = BatchNormalization(name=prefix + "_bn",
# axis=1)(conv)
# conv = Activation('relu', name=prefix + "_activation")(conv)
# return conv
conv = Conv2D(filters=num_filters, kernel_size=kernel_size, padding="same", kernel_initializer=initializer, strides=strides, name=prefix + "_conv",
data_format='channels_first')(prevlayer)
conv = BatchNormalization(name=prefix + "_bn",
axis=1)(conv)
# conv = Activation('relu', name=prefix + "_activation")(conv)
conv = tf.nn.relu(conv,name=prefix + "_activation")
return conv
def conv_block_simple_2d(prevlayer, num_filters, prefix, kernel_size=(3,3),initializer="he_normal", strides=(1, 1)):
# conv = Conv2D(filters=num_filters, kernel_size=kernel_size, padding="same", kernel_initializer=initializer, strides=strides, name=prefix + "_conv",
# data_format='channels_first')(prevlayer)
# conv = Activation('relu', name=prefix + "_activation")(conv)
# return conv
conv = Conv2D(filters=num_filters, kernel_size=kernel_size, padding="same", kernel_initializer=initializer, strides=strides, name=prefix + "_conv",
data_format='channels_first')(prevlayer)
conv = BatchNormalization(name=prefix + "_bn",
axis=1)(conv)
# conv = Activation('relu', name=prefix + "_activation")(conv)
conv = tf.nn.relu(conv,name=prefix + "_activation")
return conv
| [
"brandonclintonjones@gmail.com"
] | brandonclintonjones@gmail.com |
08fc618ae201b8f3f9809dbf4cf9931b73ece1ac | 447a7f4135cd9aee20985de8507758d49b7720dd | /build_schedule.py | cfe62d3178c823c613f5d51e6516154cb620870d | [] | no_license | subotto/website | 1b36ed887852e43eff25a8e9e0522546d7003850 | 648701e42d52b8afdd61076ca84aadcf41286174 | refs/heads/master | 2020-07-21T20:20:11.307370 | 2018-03-28T19:36:46 | 2018-03-28T19:36:46 | 16,034,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | #!/usr/bin/env python2
schedule = [l[:-1].split('\t') for l in open("schedule.txt").readlines()]
print "<?php"
print "$schedule = array();"
for line in schedule:
print """$schedule[] = array(
"time" => "%s",
"mathematicians" => "%s",
"physicists" => "%s"
);""" % (line[0], line[1], line[2])
print "?>"
| [
"mascellani@poisson.phc.unipi.it"
] | mascellani@poisson.phc.unipi.it |
dccdcdfd2e39d70e27eae488012112a1b51ee363 | 1f61891dd0e77da68e2388f08c995299827438f6 | /QR code.py | 285772321487ce11b1e432b86295b3b626c1f86f | [
"Unlicense"
] | permissive | SJISTIC-LTD/Create-and-Read-QR-code | 4ea2f0ed7c45e98e4ecdbc1b6422edd87d637a6c | 30c543e466b0db6d76a046837d2fc7f78d9aa53f | refs/heads/main | 2023-08-14T13:15:14.098066 | 2021-10-17T18:19:39 | 2021-10-17T18:19:39 | 418,218,179 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 531 | py | pip install qrcode
#Import Library
import qrcode
#Generate QR Code
img=qrcode.make('Hello World')
img.save('hello.png')
qr = qrcode.QRCode(
version=1,
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=10,
border=4,
)
qr.add_data("https://abhijithchandradas.medium.com/")
qr.make(fit=True)
img = qr.make_image(fill_color="red", back_color="black")
img.save("medium.png")
pip install cv2
import cv2
img=cv2.imread("medium.png")
det=cv2.QRCodeDetector()
val, pts, st_code=det.detectAndDecode(img)
print(val)
| [
"noreply@github.com"
] | noreply@github.com |
d12a4020fdd95b228aa01947da6345a6333cb4c4 | 67a78c33ef688973d62fcc5e2fd8b35c191494b4 | /src/io/utils.py | 87a822cabcf51a1b72b411824a893eb788b92944 | [] | no_license | steveneale/topic_modeller | e334b413358ba7a05e6ef45fbc63fc9f3105e238 | 0c79f3dd142214bd8970d82a4470c2a3664e2765 | refs/heads/master | 2022-12-09T17:49:44.583857 | 2019-07-26T23:54:11 | 2019-07-26T23:54:11 | 195,701,401 | 1 | 0 | null | 2022-12-08T05:51:32 | 2019-07-07T22:07:03 | Python | UTF-8 | Python | false | false | 1,066 | py | #!usr/bin/env python3
#-*- coding: utf-8 -*-
"""
'utils.py' (topic_modeller/src/io)
Input/output utility functions
2019 Steve Neale <steveneale3000@gmail.com>
"""
import os
import pickle
import pandas as pd
def load_from_file(file_path, split_lines=True, strip_lines=False):
with open(file_path, "r", encoding="utf-8") as loaded_file:
if split_lines:
return [line.strip() for line in loaded_file.read().splitlines()] if strip_lines \
else loaded_file.read().splitlines()
else:
return loaded_file.read()
def load_data_frame_from_csv(file_path, seperator=",", header=0):
return pd.read_csv(file_path, sep=seperator, header=header)
def save_pickle_object_to_file(object_to_pickle, file_path):
pickle.dump(object_to_pickle, open(file_path, "wb"), protocol=4)
def load_pickled_object(file_path):
return pickle.load(open(file_path, "rb"))
def create_directory(directory_path):
if not os.path.exists(directory_path):
os.makedirs(directory_path)
return directory_path
| [
"steveneale3000@gmail.com"
] | steveneale3000@gmail.com |
3630c0d39ca65c8e113772a5e5e701a400b4eb9c | 4b8979934c5040a3ef0d3d39456b4f6bbece689c | /atd.py | e0f4f806aa673caa924c736aedc6a681db104bf0 | [] | no_license | selrahc13/Apple-Trailer-Downloader | b701f70ea061010e010031e6a861c3ab31b20034 | 0445e903c047ddced71fe40563b5d6345745fa6d | refs/heads/master | 2021-01-25T05:34:16.517709 | 2010-08-21T16:38:34 | 2010-08-21T16:38:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34,992 | py | import base64
import datetime
from optparse import OptionParser
import os
import rfc822
import re
import shlex
import shutil
import string
import struct
import sys
import time
import urllib2
from xml.etree.ElementTree import ElementTree
from pkg.BeautifulSoup import BeautifulSoup
import imdb
from pkg.optparse_fmt import IndentedHelpFormatterWithNL
import pkg.y_serial_v052 as y_serial
def date_filter(obj_list, dt, date_attrib, after = True, include_none=True):
''' Takes a list of objects and returns a list that contains each
object with attribute specified in "date_attrib" after "dt" unless
"after" is set to False, in which case it returns a list of
objects before "dt".
dt and date_attrib should be a datetime object
'''
objects = []
for obj in obj_list:
comp_date = getattr(obj, date_attrib)
if after:
if comp_date:
if comp_date >= dt:
#include all objects with date_attrib => dt
objects.append(obj)
else:
if include_none:
#if we want to include objects where date_attrib is None
objects.append(obj)
else:
if comp_date:
if comp_date <= dt:
#include all objects with date_attrib <= dt
objects.append(obj)
else:
if include_none:
#if we want to include objects where date_attrib is None
objects.append(obj)
return objects
def get_movies_from_db(db):
''' Retrieve all Movie objects from database.
'''
movies = [x[2] for x in db.selectdic("*", 'movies').values() if isinstance(x[2], Movie)]
return movies
def sanitized_filename(filename, file_location=None):
''' Used to sanitize text for use as a filename. If file_location isn't
provided, we don't create a test file. Otherwise temporarily create a
0-byte file with the sanitized filename to confirm it's validity.
>>> sanitized_filename("Prince of Persia: the Sands of Time.mov", ".")
'Prince of Persia the Sands of Time.mov'
'''
#For safety's sake we only include ascii letters and digits
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
fn = ''.join(c for c in filename if c in valid_chars)
if file_location:
#test filename for validity on file system at file_location
f = os.path.join(file_location, fn)
try:
#Try to create file
open(f, 'w').close()
os.remove(f)
return fn
except:
''' if we fail, it's often because we have illegal character at begining
so try prepending some valid characters.
'''
fn = "atd-%s" % fn
f = os.path.join(file_location, fn)
try:
open(f, 'w').close()
os.remove(f)
return fn
except:
raise NameError("Cannot build valid filename!")
else:
return fn
def move_file(s, d):
''' Try to safely move a file from s to d. If filename already exists, and
file contents are different (as determined by hash_file()) we try
appending an integer to filename.
Argument d should include destination filename as well as path.
'''
if not os.path.isfile(d):
#Filename doesn't exist at destination, so move it...
shutil.move(s, d)
return d
else:
#Filename exists, so lets see if it's the same file
source_hash = hash_file(s)
if source_hash == hash_file(d):
#It is the same file, so just delete source
os.remove(s)
return d
#Different file with same filename so try up to 10 filenames before failing
for i in range(10):
d = "%s%s%s" % (os.path.splitext(d)[0],
"." + str(i),
os.path.splitext(d)[1])
if not os.path.isfile(d):
shutil.move(s, d)
return d
raise NameError("Can't find valid filename for %s" % os.path.basename(s))
def _options():
''' Process command-line options
'''
res_pref = ['1080p', '720p', '480p', '640w', '480', '320']
usage = "usage: %prog [options]\n\nIf no options passed, it will download all not already downloaded trailers to a subdir called Trailers."
parser = OptionParser(version="%prog 3.0dev1", usage=usage, formatter=IndentedHelpFormatterWithNL())
parser.add_option("-d", "--dest",
dest="destination",
metavar="DIR",
help="Destination directory. (default: %default)",
type="string",
default="Trailers")
parser.add_option("-r", "--rename",
dest="rename_mask",
help='String representing how each trailer should be named on disk. This string can include these variables:\n %TITLE%: The movies title.\n %FN%: The trailers original filename.\n %EXT%: The trailers original extension.\n %DT%: The date the trailer was posted to Apple.com\n %DTD%: The date the trailer was downloaded.\n %RES%: The resolution of the trailer.\n %MPAA%: The rating of the movie.\n------------------\nExamples:\n atd.py -r "%TITLE% - %RES%.hdmov"\nWill result in trailers named:\n Iron Man 2 - 720p.hdmov\nYou can also include path seperators to sort trailers into directories:\n atd.py --rename="%MPAA%\%TITLE% - (%DT%).hdmov"\nResults in:\n PG-13\Inception - (2010-05-12).hdmov',
type="string",
default="%FN%.%EXT%")
parser.add_option("--mdate",
dest="mdatelimit",
metavar="DATE",
help="Only get trailers for movies with a release date after this. Includes movies with no release date.(format: YYYY-MM-DD)")
parser.add_option("--tdate",
dest="tdatelimit",
metavar="DATE",
help="Only get trailers released after this date. (format: YYYY-MM-DD)")
hmsg = "Get specified resolution or less. Options are %s" % res_pref
hmsg = hmsg + " (Default: %default)"
parser.add_option("--respref",
dest="respref",
help=hmsg,
default='320')
parser.add_option("-f", "--fake",
dest="fake",
help="Don't download, just create dummy files zero bytes long.",
action="store_true")
parser.add_option("--download",
dest="redownload",
help="(Re)download trailers for the movie specified. Does a substring match so using 'Iron Man' would (re)download trailers for 'Iron Man' and 'Iron Man 2'. We also accept the '?' and '*' wildcards. Using this option skips normal download processing so only the specified movie gets downloaded on this run. Case senstive.")
parser.add_option("--flush",
help="Deletes all stored state information, which means that atd will no longer remember which trailers it has already downloaded.",
action="store_true")
(options, args) = parser.parse_args()
if options.mdatelimit:
try:
options.mdatelimit = datetime.datetime.strptime(options.mdatelimit, '%Y-%m-%d')
except:
print "Invalid date format for --mdate. Please use YYYY-MM-DD."
sys.exit()
if options.tdatelimit:
try:
options.tdatelimit = datetime.datetime.strptime(options.tdatelimit, '%Y-%m-%d')
except:
print "Invalid date format for --tdate. Please use YYYY-MM-DD."
sys.exit()
if options.respref not in res_pref:
print "Invalid respoution specified for --respref"
sys.exit()
return options
def sync_movie(old_movie, new_movie):
''' Take two Movie objects with both representing the same movie and sync
the info between them.
'''
synced_movie = old_movie
if old_movie.apple_id != new_movie.apple_id:
raise ValueError("Can only sync state info for the same movie")
#These attributes we'll always want the newest version of...
replace_attribs = ['title', 'runtime', 'mpaa', 'release_date', 'description',
'apple_genre', 'studio', 'director', 'cast']
#...so we just use the info from new_movie without regard to it's value in old_movie
for attrib in replace_attribs:
setattr(synced_movie, attrib, getattr(new_movie, attrib))
if getattr(old_movie, attrib) != getattr(new_movie, attrib):
print "Updated: %s ==> %s" % (getattr(old_movie, attrib), getattr(new_movie, attrib))
#If we have any new urls in the following lists we add them
for url in new_movie.poster_url:
if url not in old_movie.poster_url:
synced_movie.poster_url.append(url)
print "Added new poster url"
for url in new_movie.large_poster_url:
if url not in old_movie.large_poster_url:
synced_movie.large_poster_url.append(url)
print "Added new large_poster url"
for trailer in new_movie.trailers:
if trailer not in old_movie.trailers:
#We don't know about this particular trailer
synced_movie.trailers[trailer] = new_movie.trailers[trailer]
print "Found new trailer"
else:
#We do know about this trailer so we keep our old info
synced_movie.trailers[trailer] = old_movie.trailers[trailer]
#...However, we need to check if any of the resolutions for this
#trailer have been downloaded since old_movie
for res in new_movie.trailers[trailer].urls:
if res in synced_movie.trailers[trailer].urls:
if synced_movie.trailers[trailer].urls[res].downloaded:
#We have this one marked as downloaded, so don't do anything else
continue
else:
#We don't have this downloaded, so just copy our new state
synced_movie.trailers[trailer].urls[res] = new_movie.trailers[trailer].urls[res]
else:
#We don't have this res, so just copy our new state
synced_movie.trailers[trailer].urls[res] = new_movie.trailers[trailer].urls[res]
#...This also means new_movie found new resolutions so copy it's _res_fetched attribute
synced_movie.trailers[trailer]._rez_fetched = new_movie.trailers[trailer]._rez_fetched
for trailer in synced_movie.trailers:
synced_movie.trailers[trailer].movie_title == synced_movie.title
return synced_movie
def download_trailers(db, res):
''' Build a list of movies and then call the appropriate download method
on each.
'''
if options.redownload:
#User specified a title string to download, so build our list of movies
#using that
movies = fetch_by_movie_title(options.redownload, db)
else:
#Get all movies
movies = get_movies_from_db(db)
if options.mdatelimit:
#User has a movie date filter set, so filter our list of movies
movies = date_filter(movies, options.mdatelimit, 'release_date')
if options.tdatelimit:
#User has a trailer date filter set, so filter our list of movies
trailer_date_filtered = []
for movie in movies:
for trailer in movie.trailers:
if movie.trailers[trailer].date > options.tdatelimit:
trailer_date_filtered.append(movie)
break
movies = trailer_date_filtered
for movie in movies:
if isinstance(movie, Movie):
print '*'*50
print "Checking/downloading for %s" % movie.title
if options.redownload:
''' Since --download option was used, call Movie.download_trailers()
with force=True because we don't care if we've already downloaded the trailer before
'''
movie.download_trailers(res, force=True)
else:
#Call Movie.download_trailers()
movie.download_trailers(res)
persist_movie(movie, db)
def persist_movie(movie, db):
''' Surprisingly this function is used for saving a Movie object to our
database.
'''
tags = movie.get_tags()
#check if movie is in our database
persisted_movie = fetch_by_apple_id(movie.apple_id, db)
if persisted_movie:
#Movie is already stored, so we need to update our stored info
print "Updating %s in database" % movie.title
movie = sync_movie(persisted_movie, movie)
delete_by_apple_id(movie.apple_id, db)
else:
print "Saving %s to database" % movie.title
try:
db.insert(movie, tags, 'movies')
except:
raise ValueError("DB ERROR: %s, %s" % (movie.title, tags))
def update_movies(db):
''' This is the main function for freshening our database with current info
from Apple. It builds a list of all the current movies from Apple's
XML listing of trailers (which is often somewhat incomplete,
unfortunately) via the build_movies() call. It then persists each movie
to our database.
The only parameter is "db" which is a reference to a y_serial database.
'''
movies = build_movies(db)
if not movies:
return
for movie in movies:
persist_movie(movie, db)
def fetch_by_apple_id(apple_id, db):
''' Fetches the movie object for the specified apple_id from the database
'''
try:
return db.select('apple_id:%s' % apple_id, 'movies')
except:
return
def fetch_by_movie_title(title, db):
'''Fetches movies whose title contains 'title'
'''
movies = [x[2] for x in db.selectdic("title:%s" % title, 'movies').values()]
return movies
def delete_by_apple_id(apple_id, db):
db.delete('apple_id:%s' % apple_id, 'movies')
if not db.select('apple_id:%s' % apple_id, 'movies'):
return True
return False
def build_movies(db=None):
movies_xml = _fetchxml(db)
if not movies_xml:
return
movies = []
count = 0
for movie_xml in movies_xml:
print "Fetching movie info: %s/%s" % (count, len(movies_xml)) + "\r",
movies.append(Movie(movie_xml))
count += 1
print
return movies
def db_conx(filename):
if not os.path.exists(filename):
open(filename, 'w').close()
db_path = os.path.abspath(filename)
print "Database location: %s" % db_path
return y_serial.Main(db_path)
def mkdir(d):
''' Tries to make a directory and avoid race conditions.
'''
try:
os.makedirs(d)
except OSError:
if os.path.isdir(d):
# We are nearly safe
pass
else:
# There was an error on creation, so make sure we know about it
raise
def hash_file(path):
''' Generates a hopefully unique hash of a trailer.
'''
try:
longlongformat = 'q' # long long
bytesize = struct.calcsize(longlongformat)
f = open(path, "rb")
filesize = os.path.getsize(path)
hash = filesize
if filesize < 65536 * 2:
return "SizeError"
for x in range(65536/bytesize):
buffer = f.read(bytesize)
(l_value,)= struct.unpack(longlongformat, buffer)
hash += l_value
hash = hash & 0xFFFFFFFFFFFFFFFF #to remain as 64bit number
f.seek(max(0,filesize-65536),0)
for x in range(65536/bytesize):
buffer = f.read(bytesize)
(l_value,)= struct.unpack(longlongformat, buffer)
hash += l_value
hash = hash & 0xFFFFFFFFFFFFFFFF
f.close()
returnedhash = "%016x" % hash
return returnedhash
except(IOError):
return "IOError"
def _get_trailer_opener(url):
''' Returns an urllib2 opener with the user agent set to the current version
of QuickTime.
'''
user_agent = r"QuickTime/%s" % _get_QT_version('English', 'Windows')
request = urllib2.Request(url)
request.add_header('User-Agent', user_agent)
opener = urllib2.urlopen(request)
return opener
def _get_QT_version(lang, os):
return '7.0.0'
def _fetchxml(db=None):
''' Get the xml file from apple describing all their current trailers.
We then parse out the ElementTree elements for each Movie and return
a them in a list.
If we receive a reference to our db, we check to see if the date in
current.xml has changed...if not we return None.
'''
current_trailers = r"http://www.apple.com/trailers/home/xml/current.xml"
response = urllib2.urlopen(current_trailers)
tree = ElementTree(file=response)
if db:
#date checking
date = tree.getroot().attrib['date']
d = rfc822.parsedate(date)
date = datetime.datetime(d[0], d[1], d[2], d[3], d[4])
try:
stored_date = db.select('current_xml_date', 'movies')
if not stored_date:
raise
except:
stored_date = datetime.datetime(year=2000, month = 1, day = 1)
if date <= stored_date:
print "Already have current Apple trailers information"
return
else:
try:
db.delete('current_xml_date', 'movies')
except:
pass
db.insert(date, 'current_xml_date', 'movies')
#information for each trailer is stored in it's own 'movieinfo' node
#here we create list of Elements with each Element containing the tree for
#one movie/trailer
movies = tree.findall('movieinfo')
return movies
class Movie():
def __init__(self, xml):
''' Takes a movieinfo node from Apple's trailer xml file.
'''
self.apple_id = None
self.title = None
self.runtime = None
self.mpaa = None
self.release_date = None
self.description = None
self.apple_genre = None
self.poster_url = None
self.large_poster_url = None
self.studio = None
self.director = None
self.cast = None
self.trailers = {}
self.inst_on = datetime.datetime.today()
self.updated_on = datetime.datetime.today()
self._parsexml(xml)
self._getimdb()
def download_trailers(self, res, force = False):
for t in self.trailers:
download = self.trailers[t].download(res, force=force)
if not download:
return
fn = os.path.splitext(os.path.basename(self.trailers[t].urls[res].local_path))[0]
ext = os.path.splitext(os.path.basename(self.trailers[t].urls[res].local_path))[1][1:]
if self.mpaa:
rating = self.mpaa
else:
rating = 'NR'
tags = {'%TITLE%': self.title,
'%FN%': fn,
'%EXT%': ext,
'%DT%': datetime.datetime.strftime(self.trailers[t].date, '%Y-%m%d'),
'%DTD%': datetime.datetime.strftime(self.trailers[t].urls[res].downloaded, '%Y-%m%d'),
'%RES%': res,
'%MPAA%': rating
}
new_fn = options.rename_mask
for tag in tags:
while 1:
_ = new_fn
new_fn = re.sub(tag, tags[tag], new_fn)
if _ == new_fn:
#nothing left to change for this tag
break
self.move_trailer(t, new_fn, res)
print "Saved to %s" % self.trailers[t].urls[res].local_path
def move_trailer(self, trailer_key, dest_fn, res):
mkdir(options.destination)
dest = sanitized_filename(os.path.splitext(dest_fn)[0], file_location=options.destination) + os.path.splitext(dest_fn)[1]
dest = os.path.join(os.path.join(options.destination, dest))
source = self.trailers[trailer_key].urls[res].local_path
if os.path.abspath(source).lower() != os.path.abspath(dest).lower():
self.trailers[trailer_key].urls[res].local_path = move_file(source, dest)
else:
self.trailers[trailer_key].urls[res].local_path = source
def _make_tag(self, text):
return "#'%s'" % text
def get_tags(self, string=True):
''' This generates a space seperated string of "tags" for a movie. This
contains (if available):
movie title
release date
genres
director
cast members
mpaa rating
'''
tags = []
tags.append("title:%s" % self.title)
if self.release_date:
tags.append("release:%s" % datetime.datetime.strftime(self.release_date, "%Y-%m-%d"))
for c in self.cast:
tags.append("cast:%s" % c)
tags.append("mpaa:%s" % self.mpaa)
tags.append("apple_id:%s" % self.apple_id)
tags2 = []
for tag in tags:
tags2.append(self._make_tag(tag))
if string:
return ' '.join(tags2)
else:
return tags2
def _parsexml(self, xml):
''' Get all the trailer attributes from the xml.
'''
self.apple_id = xml.attrib['id']
self.title = xml.find('info/title').text
self.runtime = xml.find('info/runtime').text
self.mpaa = xml.find('info/rating').text
#Some trailers don't have a release date yet
try:
self.release_date = datetime.datetime.strptime(xml.find('info/releasedate').text, "%Y-%m-%d")
except:
pass
self.description = xml.find('info/description').text
#Make a list of all the associated genre's
self.apple_genre = [x.text for x in xml.findall('genre/name')]
self.poster_url = [xml.find('poster/location').text]
self.large_poster_url = [xml.find('poster/xlarge').text]
#self.trailer_url = [xml.find('preview/large').text]
self.studio = xml.find('info/studio').text
self.director = xml.find('info/director').text
#Make a list of all the listed cast members
self.cast = [x.text for x in xml.findall('cast/name')]
#Build a Trailer() for this trailer
trailer_url = xml.find('preview/large').text
trailer_date = datetime.datetime.strptime(xml.find('info/postdate').text, "%Y-%m-%d")
self.trailers[trailer_url] = Trailer(trailer_date, trailer_url, self.title)
#Find any other trailers for the movie.
self.find_trailers(trailer_url)
def find_trailers(self, url):
urls = []
other_urls = self._build_other_trailer_urls(url)
for purl in other_urls:
#just checking for file existance, don't need to download
try:
opener = _get_trailer_opener(purl)
except urllib2.HTTPError:
continue
except:
print "Unknown error with additional trailer finder"
headers = opener.info().headers
for header in headers:
#make sure file is a quicktime video
if header.lower().count('content-type:'):
if header.lower().count('video/quicktime'):
urls.append(purl)
for u in urls:
if u not in self.trailers:
self.trailers[u] = Trailer(datetime.datetime.today(), u, self.title)
def _build_other_trailer_urls(self, url):
potential_urls = []
try:
trailer_number = int(re.search(r"tlr(?P<num>\d)", url).group('num'))
except:
return []
if trailer_number == 1:
return []
for i in range(1, trailer_number-1):
potential_urls.append(re.sub(r"tlr\d", "tlr%s" % i, url))
return potential_urls
def have_trailer(self, trailer_url):
''' Checks our list of trailers to see if we already know about
trailer_url.
'''
for trailer in self.trailers:
if trailer_url == self.trailers[trailer].url:
return trailer
return False
def _getimdb(self):
''' A lot of movies don't have an MPAA rating when they're posted to Apple.
Here we try to get their current rating from IMDb.
'''
if self.mpaa.lower() == 'not yet rated':
i = imdb.IMDb()
#try to access imdb up to 3 times
for x in range(3):
try:
i_results = i.search_movie(self.title.lower())
fail = False
break
except:
fail = True
time.sleep(1)
if fail:
print "Failed to connect to imdb"
self.mpaa = None
return
if self.release_date:
year = self.release_date.year
else:
#guess at the year by adding 12 weeks to today
year = (datetime.datetime.today() + datetime.timedelta(weeks=12)).year
i_result = None
#Use an exact title and year match to make sure we've found the
#movie listing for this trailer.
for result in i_results:
try:
if result['title'].lower() == self.title.lower() and result['year'] == year:
i_result = result
break
except:
continue
if not i_result:
#We didn't get a matching movie from imdb...most likely the result
#of a bad guess at the release year, or improper title naming on
#Apple or IMDb's site.
self.mpaa = None
else:
#This is a list of MPAA ratings in descending order of restrictiveness
cert_list = ["NC-17", "R", "PG-13", "PG", "G", "UNRATED"]
#Have to update the movie object IMDbPy gave us so it contains rating info
i.update(i_result)
if i_result.has_key('certificates'):
usa_certs = []
for cert in i_result['certificates']:
#Parse out all the USA certs because USA certs seems to be what most
#software I'm familiar with care about
try:
rating = re.match(r"usa:(?P<rating>[a-zA-Z0-9- ]+)(\Z|:)", cert.lower()).group('rating').upper()
if rating in cert_list:
usa_certs.append(rating)
except:
pass
#Sort via cert_list and take least-restrictive rating
if len(usa_certs) > 0:
self.mpaa = sorted(usa_certs, key=cert_list.index)[-1]
else:
self.mpaa = None
if not self.mpaa and i_result.has_key('mpaa'):
#Some movies have the mpaa field such as "Rated R for sexuality."
#We'll parse the rating out of it if available.
try:
self.mpaa = re.search(r"(?P<rating>[a-zA-Z0-9-]+) for", i_result['mpaa']).group('rating').upper()
except:
self.mpaa = None
else:
self.mpaa = None
def __str__(self):
if self.release_date:
return "<Title: %s, Trailers: %s, Movie date: %s, MPAA: %s>" % (self.title,
len(self.trailers),
datetime.datetime.strftime(self.release_date, "%Y-%m-%d"),
self.mpaa)
else:
return "<Title: %s, Trailers: %s, Movie date: %s, MPAA: %s>" % (self.title,
len(self.trailers),
self.release_date,
self.mpaa)
def __repr__(self):
return "<Movie: %s>" % self.title
class Trailer():
def __init__(self, date, url, movie_title, potential_res=None):
self.movie_title = movie_title
self.date = date
self.url = url
if not potential_res:
self.potential_res = ['1080p', '720p', '480p', '640w', '480', '320']
self._rez_fetched = datetime.datetime.today()
self.urls = {}
def download(self, res, force):
res_choice = self.choose_res(res)
if not res_choice:
print "Can't choose res for %s" % self.movie_title
return None
if res_choice:
self.urls[res].download(force)
return True
else:
print "%s is not an available resolution" % res
def build_urls(self, rezs):
for res in rezs:
self.urls[res] = TrailerResUrl(res, self.url)
def choose_res(self, target_res, go_higher=False, exact=True):
if exact:
if target_res not in self.available_res:
return None
if target_res not in self.potential_res:
raise ValueError("Invalid resolution.")
if target_res in self.available_res:
#easy choice...what we want is available
return target_res
else:
tres_index = self.potential_res.index(target_res)
highest_index = len(self.potential_res)-1
while 1:
if go_higher:
tres_index = tres_index + 1
else:
tres_index = tres_index - 1
if tres_index > highest_index or tres_index < 0:
#out of bounds
return
if self.potential_res[tres_index] in self.available_res:
return self.potential_res[tres_index]
def res_url(self, res):
try:
url = re.sub(re.search(r"_h(?P<res>.*)\.mov", self.url).group('res'), res, self.url)
except:
url = ''
return url
#treat method as attribute to save on calls to apple.com
@property
def available_res(self):
#go fetch available resolutions only if it's been more than 6 days
if (datetime.datetime.today() - self._rez_fetched).days > 6 or len(self.urls) == 0:
rezs = []
for res in self.potential_res:
#build the url for the resolution
url = self.res_url(res)
if not url:
continue
#just checking for file existance, don't need to download
try:
opener = _get_trailer_opener(url)
except urllib2.HTTPError:
continue
except:
print "Unknown error with trailer resolution finder (http)"
headers = opener.info().headers
for header in headers:
#make sure file is a quicktime video
if header.lower().count('content-type:'):
if header.lower().count('video/quicktime'):
rezs.append(res)
#store datetime for cache purposes
self._rez_fetched = datetime.datetime.today()
#populate our list of urls for this trailer
self.build_urls(rezs)
return rezs
else:
return self.urls.keys()
def __str__(self):
return "<Trailer: %s>" % self.movie_title
def __repr__(self):
return self.__str__()
class TrailerResUrl():
def __init__(self, res, master_url):
self.master_url = master_url
self.res = res
self.url = self.build_url()
self.downloaded = False
self.size = 0
self.local_path = None
self.hash = None
def build_url(self):
try:
url = re.sub(re.search(r"_h(?P<res>.*)\.mov", self.master_url).group('res'), self.res, self.master_url)
except:
url = ''
return url
def download(self, force):
if self.downloaded and not force:
print "already downloaded"
return
self.local_path = os.path.abspath(self.filename(self.url))
if not fake:
opener = _get_trailer_opener(self.url)
f = open(self.local_path, 'wb')
f.write(opener.read())
f.close()
else:
open(self.local_path, 'w').close()
self.downloaded = datetime.datetime.today()
self.hash = hash_file(self.local_path)
self.size = os.path.getsize(self.local_path)
def filename(self, url):
orig = os.path.basename(url)
ext = os.path.splitext(orig)[1][1:]
fn = os.path.splitext(orig)[0]
return orig
def __str__(self):
return "<Trailer url: %s>" % self.url
def __repr__(self):
return self.__str__()
options = _options()
if __name__ == "__main__":
if options.fake:
fake = True
else:
fake = False
db = db_conx('atd.db')
if options.flush:
db.delete("*", "movies")
update_movies(db)
download_trailers(db, options.respref) | [
"dustin.wyatt@gmail.com"
] | dustin.wyatt@gmail.com |
9e2012281769750a83766197f67867136e065d83 | ccf94dcb6b1500fcbbd56964ae8c4832a496b8b3 | /python/baiduads-sdk-auto/baiduads/shield/model/update_b_shield_black_ip_request_wrapper.py | 5dddbab9eff0ff0dd58e237b6dbb103c6cfa1cf1 | [
"Apache-2.0"
] | permissive | baidu/baiduads-sdk | 24c36b5cf3da9362ec5c8ecd417ff280421198ff | 176363de5e8a4e98aaca039e4300703c3964c1c7 | refs/heads/main | 2023-06-08T15:40:24.787863 | 2023-05-20T03:40:51 | 2023-05-20T03:40:51 | 446,718,177 | 16 | 11 | Apache-2.0 | 2023-06-02T05:19:40 | 2022-01-11T07:23:17 | Python | UTF-8 | Python | false | false | 11,564 | py | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from baiduads.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from baiduads.exceptions import ApiAttributeError
def lazy_import():
from baiduads.common.model.api_request_header import ApiRequestHeader
from baiduads.shield.model.shield_ip_mod_request import ShieldIPModRequest
globals()['ApiRequestHeader'] = ApiRequestHeader
globals()['ShieldIPModRequest'] = ShieldIPModRequest
class UpdateBShieldBlackIPRequestWrapper(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'header': (ApiRequestHeader,), # noqa: E501
'body': (ShieldIPModRequest,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'header': 'header', # noqa: E501
'body': 'body', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""UpdateBShieldBlackIPRequestWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiRequestHeader): [optional] # noqa: E501
body (ShieldIPModRequest): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""UpdateBShieldBlackIPRequestWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiRequestHeader): [optional] # noqa: E501
body (ShieldIPModRequest): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| [
"tokimekiyxp@foxmail.com"
] | tokimekiyxp@foxmail.com |
f40c2d3d543d26cf3f9ce1489b9adb13b3062903 | 17b3248f33faa158e20243662181692e37bf3c17 | /Qt5Player.py | f6500d12d5a58f3ef4d4e990f1b36adbd992efa0 | [] | no_license | Axel-Erfurt/QFilemanager | 0e347605d07642e9879c600b76fe9702b181978e | 65e9e3ea14a9594bb34cb3fb08d41319598c8db5 | refs/heads/master | 2023-05-28T08:58:49.934052 | 2023-05-10T18:43:24 | 2023-05-10T18:43:24 | 203,257,923 | 18 | 14 | null | null | null | null | UTF-8 | Python | false | false | 16,982 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
from PyQt5.QtGui import QPalette, QKeySequence, QIcon
from PyQt5.QtCore import QDir, Qt, QUrl, QSize, QPoint, QTime, QMimeData, QProcess
from PyQt5.QtMultimedia import QMediaContent, QMediaPlayer, QMediaMetaData
from PyQt5.QtMultimediaWidgets import QVideoWidget
from PyQt5.QtWidgets import (QApplication, QFileDialog, QHBoxLayout, QLineEdit,
QPushButton, QSizePolicy, QSlider, QMessageBox, QStyle, QVBoxLayout,
QWidget, QShortcut, QMenu)
import sys
import os
import subprocess
#QT_DEBUG_PLUGINS
class VideoPlayer(QWidget):
def __init__(self, aPath, parent=None):
super(VideoPlayer, self).__init__(parent)
self.setAttribute( Qt.WA_NoSystemBackground, True )
self.setAcceptDrops(True)
self.mediaPlayer = QMediaPlayer(None, QMediaPlayer.StreamPlayback)
self.mediaPlayer.mediaStatusChanged.connect(self.printMediaData)
self.mediaPlayer.setVolume(80)
self.videoWidget = QVideoWidget(self)
self.lbl = QLineEdit('00:00:00')
self.lbl.setReadOnly(True)
self.lbl.setFixedWidth(70)
self.lbl.setUpdatesEnabled(True)
self.lbl.setStyleSheet(stylesheet(self))
self.elbl = QLineEdit('00:00:00')
self.elbl.setReadOnly(True)
self.elbl.setFixedWidth(70)
self.elbl.setUpdatesEnabled(True)
self.elbl.setStyleSheet(stylesheet(self))
self.playButton = QPushButton()
self.playButton.setEnabled(False)
self.playButton.setFixedWidth(32)
self.playButton.setStyleSheet("background-color: black")
self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay))
self.playButton.clicked.connect(self.play)
self.positionSlider = QSlider(Qt.Horizontal, self)
self.positionSlider.setStyleSheet (stylesheet(self))
self.positionSlider.setRange(0, 100)
self.positionSlider.sliderMoved.connect(self.setPosition)
self.positionSlider.sliderMoved.connect(self.handleLabel)
self.positionSlider.setSingleStep(2)
self.positionSlider.setPageStep(20)
self.positionSlider.setAttribute(Qt.WA_TranslucentBackground, True)
self.clip = QApplication.clipboard()
self.process = QProcess(self)
self.process.readyRead.connect(self.dataReady)
# self.process.started.connect(lambda: print("grabbing YouTube URL"))
self.process.finished.connect(self.playFromURL)
self.myurl = ""
controlLayout = QHBoxLayout()
controlLayout.setContentsMargins(5, 0, 5, 0)
controlLayout.addWidget(self.playButton)
controlLayout.addWidget(self.lbl)
controlLayout.addWidget(self.positionSlider)
controlLayout.addWidget(self.elbl)
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(self.videoWidget)
layout.addLayout(controlLayout)
self.setLayout(layout)
self.myinfo = "©2016\nAxel Schneider\n\nMouse Wheel = Zoom\nUP = Volume Up\nDOWN = Volume Down\n" + \
"LEFT = < 1 Minute\nRIGHT = > 1 Minute\n" + \
"SHIFT+LEFT = < 10 Minutes\nSHIFT+RIGHT = > 10 Minutes"
self.widescreen = True
#### shortcuts ####
self.shortcut = QShortcut(QKeySequence("q"), self)
self.shortcut.activated.connect(self.handleQuit)
self.shortcut = QShortcut(QKeySequence("u"), self)
self.shortcut.activated.connect(self.playFromURL)
self.shortcut = QShortcut(QKeySequence("y"), self)
self.shortcut.activated.connect(self.getYTUrl)
self.shortcut = QShortcut(QKeySequence("o"), self)
self.shortcut.activated.connect(self.openFile)
self.shortcut = QShortcut(QKeySequence(" "), self)
self.shortcut.activated.connect(self.play)
self.shortcut = QShortcut(QKeySequence("f"), self)
self.shortcut.activated.connect(self.handleFullscreen)
self.shortcut = QShortcut(QKeySequence("i"), self)
self.shortcut.activated.connect(self.handleInfo)
self.shortcut = QShortcut(QKeySequence("s"), self)
self.shortcut.activated.connect(self.toggleSlider)
self.shortcut = QShortcut(QKeySequence(Qt.Key_Right), self)
self.shortcut.activated.connect(self.forwardSlider)
self.shortcut = QShortcut(QKeySequence(Qt.Key_Left), self)
self.shortcut.activated.connect(self.backSlider)
self.shortcut = QShortcut(QKeySequence(Qt.Key_Up), self)
self.shortcut.activated.connect(self.volumeUp)
self.shortcut = QShortcut(QKeySequence(Qt.Key_Down), self)
self.shortcut.activated.connect(self.volumeDown)
self.shortcut = QShortcut(QKeySequence(Qt.ShiftModifier + Qt.Key_Right) , self)
self.shortcut.activated.connect(self.forwardSlider10)
self.shortcut = QShortcut(QKeySequence(Qt.ShiftModifier + Qt.Key_Left) , self)
self.shortcut.activated.connect(self.backSlider10)
self.mediaPlayer.setVideoOutput(self.videoWidget)
self.mediaPlayer.stateChanged.connect(self.mediaStateChanged)
self.mediaPlayer.positionChanged.connect(self.positionChanged)
self.mediaPlayer.positionChanged.connect(self.handleLabel)
self.mediaPlayer.durationChanged.connect(self.durationChanged)
self.mediaPlayer.error.connect(self.handleError)
self.setAcceptDrops(True)
self.setWindowTitle("QT5 Player")
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
self.setGeometry(100, 300, 600, 380)
self.setContextMenuPolicy(Qt.CustomContextMenu);
self.customContextMenuRequested[QPoint].connect(self.contextMenuRequested)
self.hideSlider()
self.widescreen = True
print("QT5 Player started")
self.suspend_screensaver()
def playFromURL(self):
self.mediaPlayer.pause()
self.myurl = self.clip.text()
self.mediaPlayer.setMedia(QMediaContent(QUrl(self.myurl)))
self.playButton.setEnabled(True)
self.mediaPlayer.play()
self.hideSlider()
print(self.myurl)
def getYTUrl(self):
cmd = "youtube-dl -g -f best " + self.clip.text()
print("grabbing YouTube URL")
self.process.start(cmd)
def dataReady(self):
self.myurl = str(self.process.readAll(), encoding = 'utf8').rstrip() ###
self.myurl = self.myurl.partition("\n")[0]
print(self.myurl)
self.clip.setText(self.myurl)
self.playFromURL()
def suspend_screensaver(self):
'suspend linux screensaver'
proc = subprocess.Popen('gsettings set org.gnome.desktop.screensaver idle-activation-enabled false', shell=True)
proc.wait()
def resume_screensaver(self):
'resume linux screensaver'
proc = subprocess.Popen('gsettings set org.gnome.desktop.screensaver idle-activation-enabled true', shell=True)
proc.wait()
def openFile(self):
fileName, _ = QFileDialog.getOpenFileName(self, "Open Movie",
QDir.homePath() + "/Videos", "Media (*.webm *.mp4 *.ts *.avi *.mpeg *.mpg *.mkv *.VOB *.m4v *.3gp *.mp3 *.m4a *.wav *.ogg *.flac *.m3u *.m3u8)")
if fileName != '':
self.loadFilm(fileName)
print("File loaded")
def play(self):
if self.mediaPlayer.state() == QMediaPlayer.PlayingState:
self.mediaPlayer.pause()
else:
self.mediaPlayer.play()
def mediaStateChanged(self, state):
if self.mediaPlayer.state() == QMediaPlayer.PlayingState:
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPause))
else:
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPlay))
def positionChanged(self, position):
self.positionSlider.setValue(position)
def durationChanged(self, duration):
self.positionSlider.setRange(0, duration)
mtime = QTime(0,0,0,0)
mtime = mtime.addMSecs(self.mediaPlayer.duration())
self.elbl.setText(mtime.toString())
def setPosition(self, position):
self.mediaPlayer.setPosition(position)
def handleError(self):
self.playButton.setEnabled(False)
print("Error: ", self.mediaPlayer.errorString())
def handleQuit(self):
self.mediaPlayer.stop()
self.resume_screensaver()
print("Goodbye ...")
self.close()
def contextMenuRequested(self,point):
menu = QMenu()
actionFile = menu.addAction(QIcon.fromTheme("video-x-generic"),"open File (o)")
actionclipboard = menu.addSeparator()
actionURL = menu.addAction(QIcon.fromTheme("browser"),"URL from Clipboard (u)")
actionclipboard = menu.addSeparator()
actionYTurl = menu.addAction(QIcon.fromTheme("youtube"), "URL from YouTube (y)")
actionclipboard = menu.addSeparator()
actionToggle = menu.addAction(QIcon.fromTheme("next"),"show / hide Slider (s)")
actionFull = menu.addAction(QIcon.fromTheme("view-fullscreen"),"Fullscreen (f)")
action169 = menu.addAction(QIcon.fromTheme("tv-symbolic"),"16 : 9")
action43 = menu.addAction(QIcon.fromTheme("tv-symbolic"),"4 : 3")
actionSep = menu.addSeparator()
actionInfo = menu.addAction(QIcon.fromTheme("help-about"),"Info (i)")
action5 = menu.addSeparator()
actionQuit = menu.addAction(QIcon.fromTheme("application-exit"),"Exit (q)")
actionFile.triggered.connect(self.openFile)
actionQuit.triggered.connect(self.handleQuit)
actionFull.triggered.connect(self.handleFullscreen)
actionInfo.triggered.connect(self.handleInfo)
actionToggle.triggered.connect(self.toggleSlider)
actionURL.triggered.connect(self.playFromURL)
actionYTurl.triggered.connect(self.getYTUrl)
action169.triggered.connect(self.screen169)
action43.triggered.connect(self.screen43)
menu.exec_(self.mapToGlobal(point))
def wheelEvent(self,event):
mwidth = self.frameGeometry().width()
mheight = self.frameGeometry().height()
mleft = self.frameGeometry().left()
mtop = self.frameGeometry().top()
mscale = event.angleDelta().y() / 5
if self.widescreen == True:
self.setGeometry(mleft, mtop, mwidth + mscale, (mwidth + mscale) / 1.778)
else:
self.setGeometry(mleft, mtop, mwidth + mscale, (mwidth + mscale) / 1.33)
def screen169(self):
self.widescreen = True
mwidth = self.frameGeometry().width()
mheight = self.frameGeometry().height()
mleft = self.frameGeometry().left()
mtop = self.frameGeometry().top()
mratio = 1.778
self.setGeometry(mleft, mtop, mwidth, mwidth / mratio)
def screen43(self):
self.widescreen = False
mwidth = self.frameGeometry().width()
mheight = self.frameGeometry().height()
mleft = self.frameGeometry().left()
mtop = self.frameGeometry().top()
mratio = 1.33
self.setGeometry(mleft, mtop, mwidth, mwidth / mratio)
def handleFullscreen(self):
if self.windowState() & Qt.WindowFullScreen:
QApplication.setOverrideCursor(Qt.ArrowCursor)
self.showNormal()
print("no Fullscreen")
else:
self.showFullScreen()
QApplication.setOverrideCursor(Qt.BlankCursor)
print("Fullscreen entered")
def handleInfo(self):
msg = QMessageBox.about(self, "QT5 Player", self.myinfo)
def toggleSlider(self):
if self.positionSlider.isVisible():
self.hideSlider()
else:
self.showSlider()
def hideSlider(self):
self.playButton.hide()
self.lbl.hide()
self.positionSlider.hide()
self.elbl.hide()
mwidth = self.frameGeometry().width()
mheight = self.frameGeometry().height()
mleft = self.frameGeometry().left()
mtop = self.frameGeometry().top()
if self.widescreen == True:
self.setGeometry(mleft, mtop, mwidth, mwidth / 1.778)
else:
self.setGeometry(mleft, mtop, mwidth, mwidth / 1.33)
def showSlider(self):
self.playButton.show()
self.lbl.show()
self.positionSlider.show()
self.elbl.show()
mwidth = self.frameGeometry().width()
mheight = self.frameGeometry().height()
mleft = self.frameGeometry().left()
mtop = self.frameGeometry().top()
if self.widescreen == True:
self.setGeometry(mleft, mtop, mwidth, mwidth / 1.55)
else:
self.setGeometry(mleft, mtop, mwidth, mwidth / 1.33)
def forwardSlider(self):
self.mediaPlayer.setPosition(self.mediaPlayer.position() + 1000*60)
def forwardSlider10(self):
self.mediaPlayer.setPosition(self.mediaPlayer.position() + 10000*60)
def backSlider(self):
self.mediaPlayer.setPosition(self.mediaPlayer.position() - 1000*60)
def backSlider10(self):
self.mediaPlayer.setPosition(self.mediaPlayer.position() - 10000*60)
def volumeUp(self):
self.mediaPlayer.setVolume(self.mediaPlayer.volume() + 10)
print("Volume: " + str(self.mediaPlayer.volume()))
def volumeDown(self):
self.mediaPlayer.setVolume(self.mediaPlayer.volume() - 10)
print("Volume: " + str(self.mediaPlayer.volume()))
def mouseMoveEvent(self, event):
if event.buttons() == Qt.LeftButton:
self.move(event.globalPos() \
- QPoint(self.frameGeometry().width() / 2, \
self.frameGeometry().height() / 2))
event.accept()
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
event.accept()
elif event.mimeData().hasText():
event.accept()
else:
event.ignore()
def dropEvent(self, event):
if event.mimeData().hasUrls():
f = str(event.mimeData().urls()[0].toLocalFile())
self.loadFilm(f)
elif event.mimeData().hasText():
mydrop = str(event.mimeData().text())
print(mydrop)
### YouTube url
if "https://www.youtube" in mydrop:
print("is YouTube")
# mydrop = mydrop.partition("&")[0].replace("watch?v=", "v/")
self.clip.setText(mydrop)
self.getYTUrl()
else:
### normal url
self.mediaPlayer.setMedia(QMediaContent(QUrl(mydrop)))
self.playButton.setEnabled(True)
self.mediaPlayer.play()
self.hideSlider()
print(mydrop)
def loadFilm(self, f):
self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile(f)))
self.playButton.setEnabled(True)
self.mediaPlayer.play()
def printMediaData(self):
if self.mediaPlayer.mediaStatus() == 6:
if self.mediaPlayer.isMetaDataAvailable():
res = str(self.mediaPlayer.metaData("Resolution")).partition("PyQt5.QtCore.QSize(")[2].replace(", ", " x ").replace(")", "")
print("%s%s" % ("Video Resolution = ",res))
else:
print("no metaData available")
def openFileAtStart(self, filelist):
matching = [s for s in filelist if ".myformat" in s]
if len(matching) > 0:
self.loadFilm(matching)
##################### update Label ##################################
def handleLabel(self):
self.lbl.clear()
mtime = QTime(0,0,0,0)
self.time = mtime.addMSecs(self.mediaPlayer.position())
self.lbl.setText(self.time.toString())
###################################################################
def stylesheet(self):
return """
QSlider::handle:horizontal
{
background: qlineargradient(x1:0, y1:0, x2:0, y2:1, stop:0 #333, stop:1 #555555);
width: 14px;
border-radius: 0px;
}
QSlider::groove:horizontal {
border: 1px solid #444;
height: 10px;
background: qlineargradient(x1:0, y1:0, x2:0, y2:1, stop:0 #000, stop:1 #222222);
}
QLineEdit
{
background: black;
color: #585858;
border: 0px solid #076100;
font-size: 8pt;
font-weight: bold;
}
"""
#if __name__ == '__main__':
#
# QApplication.setDesktopSettingsAware(False)
# app = QApplication(sys.argv)
#
# player = VideoPlayer('')
# player.show()
# if len(sys.argv) > 1:
# print(sys.argv[1])
# player.loadFilm(sys.argv[1])
#sys.exit(app.exec_())
| [
"noreply@github.com"
] | noreply@github.com |
2d07e3e7c688400917292198da3ca2366537e1a7 | cb919300d685bef47e73c08f87ef864ec43c626b | /gbe/migrations/0004_auto_20201224_1145.py | af779268696e0f18696fcc085d560b723c33ac3c | [
"Apache-2.0"
] | permissive | bethlakshmi/gbe-divio-djangocms-python2.7 | d5ca26897b388e632f0b7aba0165239d55adb0c3 | d43dd81bdac2ca068a1f14e1b4b0ae33e8d25c07 | refs/heads/master | 2023-08-31T14:36:43.577308 | 2023-08-28T13:02:36 | 2023-08-28T13:02:36 | 206,438,505 | 7 | 1 | Apache-2.0 | 2023-09-13T21:03:41 | 2019-09-05T00:18:03 | Python | UTF-8 | Python | false | false | 55,532 | py | # Generated by Django 3.0.11 on 2020-12-24 11:45
from django.db import migrations
init_values = [
{
'selector': '.gbe-header-band',
'pseudo_class': '',
'description': '''Boldly colored header bands with contrasting
text, such as above performer grids''',
'target_element': 'div',
'usage': 'General',
'prop_val': [('background-color', 'rgba(150,3,32,1)'),
('color', 'rgba(255,255,255,1)')]},
{
'selector': 'body.gbe-body',
'pseudo_class': '',
'description': 'Body of the page, except printable pages',
'target_element': 'div',
'usage': 'General',
'prop_val': [('background-color', 'rgba(255,255,255,1)'),
('color', 'rgba(51,51,51,1)'),
('background-image', 'image', '')]},
{
'selector': 'body.gbe-printable',
'pseudo_class': '',
'description': 'Body of the page, when it is printable',
'target_element': 'div',
'usage': 'Printable',
'prop_val': [('background-color', 'rgba(255,255,255,1)'),
('color', 'rgba(0,0,0,1)')]},
{
'selector': '.printable-table',
'pseudo_class': '',
'description': 'Table on a printable page. Purposefully simple.',
'target_element': 'div',
'usage': 'Printable',
'prop_val': [('border-color', 'rgba(0,0,0,1)')]},
{
'selector': '.printable-header',
'pseudo_class': '',
'description': 'Header of table on a printable page.',
'target_element': 'div',
'usage': 'Printable',
'prop_val': [('border-color', 'rgba(0,0,0,1)'),
('color', 'rgba(255,255,255,1)'),
('background-color', 'rgba(128,0,0,1)')]},
{
'selector': '.interested-sched',
'pseudo_class': '',
'description': '''Row for a committment the user marked as
'interested'.''',
'target_element': 'div',
'usage': 'Printable',
'prop_val': [('color', 'rgba(128,128,128,1)')]},
{
'selector': '.gbe-alert-danger',
'pseudo_class': '',
'description': 'Alerts that show up dynamically on Error',
'target_element': 'div',
'usage': 'Alerts',
'prop_val': [('background-color', 'rgba(248,215,218,1)'),
('border-color', 'rgba(245,198,203,1)'),
('color', 'rgba(114,28,36,1)')]},
{
'selector': '.gbe-alert-info',
'pseudo_class': '',
'description': 'Alerts that show up dynamically as Information',
'target_element': 'div',
'usage': 'Alerts',
'prop_val': [('background-color', 'rgba(209,236,241,1)'),
('border-color', 'rgba(190,229,235,1)'),
('color', 'rgba(12,84,96,1)')]},
{
'selector': '.gbe-alert-success',
'pseudo_class': '',
'description': 'Alerts that show up dynamically on Success',
'target_element': 'div',
'usage': 'Alerts',
'prop_val': [('background-color', 'rgba(212,237,218,1)'),
('border-color', 'rgba(195,230,203,1)'),
('color', 'rgba(21,87,36,1)')]},
{
'selector': '.gbe-alert-warning',
'pseudo_class': '',
'description': 'Alerts that show up dynamically on Warning',
'target_element': 'div',
'usage': 'Alerts',
'prop_val': [('background-color', 'rgba(255,243,205,1)'),
('border-color', 'rgba(255,238,186,1)'),
('color', 'rgba(133,100,4,1)')]},
{
'selector': '.gbe-btn-primary',
'pseudo_class': 'hover',
'description': 'Buttons do the main work flow.',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(71,31,31,1)'),
('border-color', 'rgba(71,31,31,1)'),
('color', 'rgba(255,255,255,1)')]},
{
'selector': '.gbe-btn-primary',
'pseudo_class': '',
'description': '''Buttons to do the main work flow but not the
paypal button''',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(107,46,46,1)'),
('border-color', 'rgba(71,31,31,1)'),
('color', 'rgba(255,255,255,1)')]},
{
'selector': '.gbe-btn-primary',
'pseudo_class': 'focus',
'description': 'Buttons do the main work flow.',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('outline-color', 'rgba(71,31,31,1)'),
('color', 'rgba(255,255,255,1)'),
('background-color', 'rgba(71,31,31,1)'),
('border-color', 'rgba(71,31,31,1)')]},
{
'selector': '.gbe-btn-table',
'pseudo_class': '',
'description': '''Small buttons to do actions on table rows''',
'target_element': 'a',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(0,0,0,.05)'),
('border-color', 'rgba(0,0,0,.15)'),
('color', 'rgba(51,51,51,1)'),
('font-size', 'px', '12px')]},
{
'selector': '.gbe-btn-table',
'pseudo_class': 'hover',
'description': '''Small buttons to do actions on table rows''',
'target_element': 'a',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(0,0,0,.20)'),
('border-color', 'rgba(0,0,0,.30)')]},
{
'selector': '.gbe-table-link',
'pseudo_class': '',
'description': '''Links in tables''',
'target_element': 'a',
'usage': 'Table',
'prop_val': [('color', 'rgba(0,123,255,1)'),
('text-decoration-color', 'rgba(0,123,255,1)')]},
{
'selector': '.gbe-table-link',
'pseudo_class': 'hover',
'description': '''Links in tables - on hover''',
'target_element': 'a',
'usage': 'Table',
'prop_val': [('color', 'rgba(0,86,179,1)'),
('text-decoration-color', 'rgba(0,86,179,1)')]},
{
'selector': '.gbe-table-row td.approval_needed',
'pseudo_class': '',
'description': '''Cells where special handling is needed.''',
'target_element': 'a',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(254,255,185,1)')]},
{
'selector': '#sub-table table tbody tr td',
'pseudo_class': '',
'description': '''Sub table within a table''',
'target_element': 'td',
'usage': 'Table',
'prop_val': [('border-color', 'rgba(50,50,50,1)')]},
{
'selector': '.paypal-button form input',
'pseudo_class': 'hover',
'description': 'Buttons do the main work flow.',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(71,31,31,1)'),
('border-color', 'rgba(71,31,31,1)'),
('color', 'rgba(255,255,255,1)')]},
{
'selector': '.paypal-button form input',
'pseudo_class': '',
'description': '''The paypal button on act/vendor payment is
unusual - it's mostly an image, but what settings we can control
are grouped with the other buttons.''',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(107,46,46,1)'),
('border-color', 'rgba(71,31,31,1)'),
('color', 'rgba(255,255,255,1)')]},
{
'selector': '.paypal-button form input',
'pseudo_class': 'focus',
'description': 'Buttons do the main work flow.',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('outline-color', 'rgba(71,31,31,1)'),
('color', 'rgba(255,255,255,1)'),
('background-color', 'rgba(71,31,31,1)'),
('border-color', 'rgba(71,31,31,1)')]},
{
'selector': '.input-group-text:hover, .gbe-btn-secondary',
'pseudo_class': 'hover',
'description': 'Buttons that do not do the main work flow.',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(149,120,123,1)'),
('border-color', 'rgba(88,71,73,1)'),
('color', 'rgba(255,255,255,1)')]},
{
'selector': '.gbe-btn-secondary.active',
'pseudo_class': '',
'description': 'Table columns when they are selected',
'target_element': 'input',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(149,120,123,1)'),
('border-color', 'rgba(88,71,73,1)'),
('color', 'rgba(255,255,255,1)')]},
{
'selector': '.input-group-text, .gbe-btn-secondary',
'pseudo_class': '',
'description': 'Buttons that do not do the main work flow.',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(223,180,185,1)'),
('border-color', 'rgba(149,120,123,1)'),
('color', 'rgba(65,65,65,1)')]},
{
'selector': '.input-group-text:focus, .gbe-btn-secondary',
'pseudo_class': 'focus',
'description': 'Buttons that do not do the main work flow.',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('outline-color', 'rgba(88,71,73,1)'),
('color', 'rgba(255,255,255,1)'),
('background-color', 'rgba(149,120,123,1)'),
('border-color', 'rgba(88,71,73,1)')]},
{
'selector': '.form-control:focus, .btn.focus, .btn:focus',
'pseudo_class': '',
'description': 'Right now - the buttons above the table.',
'target_element': 'button',
'usage': 'General',
'prop_val': [('box-shadow',
'px px px px rgba',
'0px 0px 0px 3px rgba(0,0,0,0.14)')]},
{
'selector': 'input[type=search]',
'pseudo_class': '',
'description': 'Search Box on tables',
'target_element': 'input',
'usage': 'Table',
'prop_val': [('outline-color', 'rgba(223,180,185,1)'),
('color', 'rgba(0,0,0,1)'),
('background-color', 'rgba(255,255,255,1)'),
('border-color', 'rgba(200,200,200,1)')]},
{
'selector': '.gbe-btn-light',
'pseudo_class': 'hover',
'description': '''Hover for buttons that terminate the work, or
on transactions, the slider button that is active.''',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(226,230,234,1)'),
('border-color', 'rgba(226,230,234,1)'),
('color', 'rgba(33,37,41,1)')]},
{
'selector': '.gbe-btn-light',
'pseudo_class': '',
'description': '''Buttons like cancel that interrupt work, or the
slider on the transactions page.''',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(248,249,250,1)'),
('border-color', 'rgba(175,176,177,1)'),
('color', 'rgba(33,37,41,1)')]},
{
'selector': '.gbe-btn-light',
'pseudo_class': 'focus',
'description': '''Focus for buttons that terminate the work, or
on transactions''',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(248,249,250,1)'),
('border-color', 'rgba(175,176,177,1)'),
('color', 'rgba(33,37,41,1)')]},
{
'selector': '.gbe-btn-secondary-disabled, .gbe-btn-secondary-disabled',
'pseudo_class': 'hover',
'description': '''Dark and disabled button''',
'target_element': 'input',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(108,117,125,1)'),
('border-color', 'rgba(108,117,125,1)'),
('color', 'rgba(255,255,255,1)')]},
{
'selector': '.gbe-form-error',
'pseudo_class': '',
'description': '''Text that informs user of a form error or a
table with problem data.''',
'target_element': 'font',
'usage': 'General',
'prop_val': [('color', 'rgba(255,0,0,1)')]},
{
'selector': '.gbe-form-required',
'pseudo_class': '',
'description': 'The * on required form fields',
'target_element': 'font',
'usage': 'Forms',
'prop_val': [('color', 'rgba(255,0,0,1)')]},
{
'selector': '.gbe-table-success td',
'pseudo_class': '',
'description': 'Table row when it was just successfully updated',
'target_element': 'div',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(195,230,203,1)')]},
{
'selector': '.gbe-table-row.gbe-table-info td',
'pseudo_class': '',
'description': '''Table row when it's highlighted for important
information''',
'target_element': 'div',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(217,237,247,1)')]},
{
'selector': '.gbe-table-row.gbe-table-danger td',
'pseudo_class': '',
'description': 'Table row with a concern, like an inactive user.',
'target_element': 'div',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(242,222,222,1)')]},
{
'selector': '.gbe-striped-table-danger td',
'pseudo_class': '',
'description': '''Table row with a concern on a striped table,
it's darker to make it stick out better, should match
"gbe-form-error"''',
'target_element': 'div',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(255,0,0,1)')]},
{
'selector': 'table thead tr.gbe-table-header th',
'pseudo_class': '',
'description': 'Header of tables',
'target_element': 'tr',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(200,200,200,1)'),
('border-color', 'rgba(50,50,50,1)'),
('color', 'rgba(0,0,0,1)')]},
{
'selector': 'table thead tr.gbe-table-header th',
'pseudo_class': 'hover',
'description': 'Header of tables, when moused over',
'target_element': 'tr',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(200,200,200,1)')]},
{
'selector': 'table tfoot tr.gbe-table-header th',
'pseudo_class': '',
'description': 'Footer of tables',
'target_element': 'tr',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(200,200,200,1)'),
('border-color', 'rgba(50,50,50,1)'),
('color', 'rgba(0,0,0,1)')]},
{
'selector': 'table tfoot tr.gbe-table-header th',
'pseudo_class': 'hover',
'description': 'Footer of tables, when moused over',
'target_element': 'tr',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(200,200,200,1)')]},
{
'selector': '.gbe-table-row td',
'pseudo_class': '',
'description': 'Non-header/footer rows',
'target_element': 'tr',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(255,255,255,1)'),
('border-color', 'rgba(50,50,50,1)'),
('color', 'rgba(0,0,0,1)')]},
{
'selector':
'table.striped_table tr.striped_table_row:nth-child(even)',
'pseudo_class': '',
'description': 'Alternately striped table rows',
'target_element': 'tr',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(204,204,204,1)')]},
{
'selector': 'table.striped_table tr.striped_table_row:nth-child(odd)',
'pseudo_class': '',
'description': 'Alternately striped table rows (the other half_',
'target_element': 'tr',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(238,238,238,1)')]},
{
'selector':
'.gbe-list-even',
'pseudo_class': '',
'description': '''Alternately striped rows, done via divs for
moble friendliness''',
'target_element': 'tr',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(204,204,204,1)')]},
{
'selector': '.gbe-list-odd',
'pseudo_class': '',
'description': '''Alternately striped rows, done via divs for
moble friendliness''',
'target_element': 'tr',
'usage': 'Table',
'prop_val': [('background-color', 'rgba(238,238,238,1)')]},
{
'selector': '.border-table tbody tr td,.border-table tbody tr th',
'pseudo_class': '',
'description': 'border around striped table(s)',
'target_element': 'table',
'usage': 'Table',
'prop_val': [('border-color', 'rgba(0,0,0,1)')]},
{
'selector': '.gbe-text-success',
'pseudo_class': '',
'description': '''Text that means to show success, like icons for
something that is live.''',
'target_element': 'i',
'usage': 'General',
'prop_val': [('color', 'rgba(35,145,60,1)')]},
{
'selector': '.gbe-text-muted',
'pseudo_class': '',
'description': '''Text that is possibly active, but muted to
defer tp something else.''',
'target_element': 'i',
'usage': 'General',
'prop_val': [('color', 'rgba(108,117,125,1)')]},
{
'selector': '.gbe-text-secondary',
'pseudo_class': '',
'description': '''Text that should recede a bit, because it's
secondary.''',
'target_element': 'i',
'usage': 'General',
'prop_val': [('color', 'rgba(108,117,125,1)')]},
{
'selector': '.gbe-text-warning',
'pseudo_class': '',
'description': '''Text that indicates warning, but not complete
failure.''',
'target_element': 'i',
'usage': 'General',
'prop_val': [('color', 'rgba(255,193,7,1)')]},
{
'selector': '.gbe-text-danger',
'pseudo_class': '',
'description': '''Text that means danger - not exactly an error,
but something permanent,like reject/delete.''',
'target_element': 'i',
'usage': 'General',
'prop_val': [('color', 'rgba(220,53,69,1)')]},
{
'selector': '.gbe-text-danger',
'pseudo_class': 'hover',
'description': '''Text that means danger - not exactly an error,
but something permanent,like reject/delete.''',
'target_element': 'i',
'usage': 'General',
'prop_val': [('color', 'rgba(167,29,42,1)')]},
{
'selector': '.gbe-text-info',
'pseudo_class': '',
'description': '''Text that is highlighted because it gives useful
information.''',
'target_element': 'i',
'usage': 'General',
'prop_val': [('color', 'rgba(23,162,184,1)')]},
{
'selector': '.link-events-plus',
'pseudo_class': '',
'description': '''Used for a plus sign that is an active link
information. Right now only on ticket manage page.''',
'target_element': 'i',
'usage': 'General',
'prop_val': [('color', 'rgba(220,220,220,1)')]},
{
'selector': '.link-events-plus',
'pseudo_class': 'hover',
'description': '''Plus sign as active link when hovered on.
Right now only on ticket manage page.''',
'target_element': 'i',
'usage': 'General',
'prop_val': [('color', 'rgba(0,0,0,1)')]},
{
'selector': '.gbe-draft',
'pseudo_class': '',
'description': 'The * on required form fields',
'target_element': 'font',
'usage': 'Forms',
'prop_val': [('color', 'rgba(0,0,0,1)')]},
{
'selector': '.sched_label',
'pseudo_class': '',
'description': 'labels for event details',
'target_element': 'font',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(0,0,0,1)')]},
{
'selector': '.gallery-item .icons i',
'pseudo_class': '',
'description': '''icons that show when one hovers over a
performer image''',
'target_element': 'a',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(255,255,255,1)')]},
{
'selector': '.gallery-item .icons i',
'pseudo_class': 'hover',
'description': '''icons that show when one hovers over a
performer image and also hovers over the icon''',
'target_element': 'i',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(233,30,99,1)'),
('border-color', 'rgba(233,30,99,1)')]},
{
'selector': '#team .single-member',
'pseudo_class': 'hover',
'description': '''Block to put focus to featured items on a page,
see it on fashion faire and shows with special guests. On hover,
the shadow gets a bit deeper,throwing more focus.''',
'target_element': 'div',
'usage': 'Event Display',
'prop_val': [('box-shadow',
'px px px px rgba',
'0px 6px 15px 0px rgba(0,0,0,0.14)')]},
{
'selector': '#team .single-member',
'pseudo_class': '',
'description': '''Block to put focus to featured items on a page,
see it on fashion faire and shows with special guests.''',
'target_element': 'div',
'usage': 'Event Display',
'prop_val': [
('box-shadow',
'px px px px rgba',
'0px 1px 3px 0px rgba(0,0,0,0.2)'),
('background', 'rgba(255,255,255,1)')]},
{
'selector': '#team .team-img',
'pseudo_class': 'before',
'description': '''Shaded color that comes over the image of a
featured block when the buttons show up. Used in both vendors,
and shows.''',
'target_element': 'div',
'usage': 'Event Display',
'prop_val': [('background', 'rgba(233,30,99,0.7)')]},
{
'selector': '.social-icon .social i',
'pseudo_class': '',
'description': '''Buttons on top of featured item images. Appear
on hover.''',
'target_element': 'div',
'usage': 'Event Display',
'prop_val': [
('box-shadow',
'px px px px rgba',
'0px 2px 4px 0px rgba(0,0,0,0.2)'),
('color', 'rgba(119,119,119,1)'),
('background', 'rgba(255,255,255,1)')]},
{
'selector': 'code',
'pseudo_class': '',
'description': '''Any text displaying code, right now that is
theme editor''',
'target_element': 'code',
'usage': 'Forms',
'prop_val': [('color', 'rgba(220,53,69,1)')]},
{
'selector': 'span.dropt:hover span',
'pseudo_class': 'hover',
'description': 'The help text when it is triggerd by hover',
'target_element': 'span',
'usage': 'Forms',
'prop_val': [('background', 'rgba(255,255,255,1)')]},
{
'selector': 'span.dropt span',
'pseudo_class': '',
'description': 'The help text border',
'target_element': 'span',
'usage': 'Forms',
'prop_val': [('border-color', 'rgba(0,0,0,1)')]},
{
'selector': '.gbe-box-shadow',
'pseudo_class': '',
'description': '''Shadow beneath panels to add focus - includig
update email''',
'target_element': 'div',
'usage': 'Forms',
'prop_val': [('box-shadow',
'px px px px rgba',
'0px 8px 16px 0px rgba(0,0,0,.15)')]},
{
'selector': '.gbe-bg-light',
'pseudo_class': '',
'description': '''lighter colored panels - sub panels within
site, including update profile email options, review bids,
view bids, event lists, and others.''',
'target_element': 'div',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(248,249,250,1)'),
('border', 'px rgba', '1px rgba(50,50,50,1)')]},
{
'selector': '.gbe-bg-dark',
'pseudo_class': '',
'description': '''darker colored panels - sub panels within
site, including act tech info, and event time/date details.''',
'target_element': 'div',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(195,189,191,1)'),
('border-color', 'rgba(50,50,50,1)')]},
{
'selector': '.gbe-panel-list',
'pseudo_class': '',
'description': '''when the dark panels is used on a heading
within a long list, the border gets very interruptive, so it's
currently set to blend with the gbe-bg-dark background. If
changed, look at class description list and bio list.''',
'target_element': 'div',
'usage': 'Forms',
'prop_val': [('border-color', 'rgba(195,189,191,1)'),
('background-color', 'rgba(0,0,0,0)')]},
{
'selector': '.gbe-panel-list div.card-header',
'pseudo_class': '',
'description': '''blend the bottom of the panel header''',
'target_element': 'div',
'usage': 'Forms',
'prop_val': [('border-color', 'rgba(195,189,191,1)')]},
{
'selector': '.gbe-border-danger',
'pseudo_class': '',
'description': 'important outline to give focus on active panels',
'target_element': 'div',
'usage': 'Forms',
'prop_val': [('border-color', 'rgba(220,53,69,1)')]},
{
'selector': '.login-button',
'pseudo_class': '',
'description': 'Login drop down button on nav bar.',
'target_element': 'button',
'usage': 'General',
'prop_val': [('color', 'rgba(255,255,255,1)'),
('background-color', 'rgba(107,46,46,1)'),
('border-color', 'rgba(71,31,31,1)')]},
{
'selector': '.login-button',
'pseudo_class': 'hover',
'description': 'Login drop down button on nav bar, hover.',
'target_element': 'button',
'usage': 'General',
'prop_val': [('color', 'rgba(211,211,211,1)')]},
{
'selector': '#login-dp,.gbe-form-dropdown',
'pseudo_class': '',
'description': '''Dropdowns with forms in them like login and
theme cloning''',
'target_element': 'div',
'usage': 'General',
'prop_val': [('background-color', 'rgba(180,80,80,1)'),
('color', 'rgba(33,37,41,1)')]},
{
'selector': '#login-dp a',
'pseudo_class': '',
'description': 'Links in the login dropdown',
'target_element': 'a',
'usage': 'General',
'prop_val': [('color', 'rgba(255,255,255,1)')]},
{
'selector': '#login-dp .bottom',
'pseudo_class': '',
'description': 'Bottom of the login box - box for new users',
'target_element': 'div',
'usage': 'General',
'prop_val': [('background-color', 'rgba(180,80,80,1)'),
('color', 'rgba(255,255,255,1)')]},
{
'selector': '.gbe-navbar-default',
'pseudo_class': '',
'description': 'Navbar in default state - the not-active options',
'target_element': 'div',
'usage': 'Navbar',
'prop_val': [('background-color', 'rgba(229,229,229,.49)'),
('border-color', 'rgba(0,0,0,0)')]},
{
'selector': '#gbe_header_menu',
'pseudo_class': 'hover',
'description': 'Non-active text in navbar, on hover.',
'target_element': 'font',
'usage': 'Navbar',
'prop_val': [('color', 'rgba(175,21,21,1)'),
('background-color', 'rgba(0,0,0,0)')]},
{
'selector': '#gbe_header_menu',
'pseudo_class': '',
'description': 'Non-active text in navbar, on hover.',
'target_element': 'font',
'usage': 'Navbar',
'prop_val': [('color', 'rgba(0,0,0,1)')]},
{
'selector': 'button.navbar-toggler',
'pseudo_class': 'focus',
'description': 'Navbar button while it is clicked.',
'target_element': 'button',
'usage': 'Navbar',
'prop_val': [('outline-color', 'rgba(0,0,0,1)')]},
{
'selector': '.navbar-light .navbar-toggler',
'pseudo_class': '',
'description': 'Navbar button when screen is mobile/tablet sized.',
'target_element': 'button',
'usage': 'Navbar',
'prop_val': [('color', 'rgba(0,0,0,.5)'),
('border-color', 'rgba(0,0,0,.1)')]},
{
'selector': '.active #gbe_header_menu',
'pseudo_class': '',
'description': '''Currenty active navbar menu item, matches panel
of content.''',
'target_element': 'a',
'usage': 'Navbar',
'prop_val': [('background-color', 'rgba(235,235,235,1)'),
('text-shadow',
'px px px rgba',
'0px 0px 8px rgba(255,0,51,1)')]},
{
'selector': '.shadow-highlight',
'pseudo_class': '',
'description': 'Used to highlight text that was/is being updated.',
'target_element': 'span',
'usage': 'Navbar',
'prop_val': [('text-shadow',
'px px px rgba',
'0px 0px 8px rgba(255,0,51,1)')]},
{
'selector': '.gbe-dropdown-menu',
'pseudo_class': '',
'description': 'Dropdown navigational menu (any level)',
'target_element': 'ul',
'usage': 'Navbar',
'prop_val': [('background-color', 'rgba(173,3,37,1)')]},
{
'selector': '#gbe_dropdown',
'pseudo_class': '',
'description': 'Dropdown menu text',
'target_element': 'a',
'usage': 'Navbar',
'prop_val': [('color', 'rgba(255,255,255,1)')]},
{
'selector': '#gbe_dropdown',
'pseudo_class': 'hover',
'description': 'Dropdown menu text, on hover',
'target_element': 'a',
'usage': 'Navbar',
'prop_val': [('color', 'rgba(233,250,163,1)'),
('background-color', "rgba(0,0,0,1)")]},
{
'selector': '#gbe_dropdown',
'pseudo_class': 'focus',
'description': '''Dropdown menu text, on focus (selected but not
currently moused over)''',
'target_element': 'a',
'usage': 'Navbar',
'prop_val': [('color', 'rgba(233,250,163,1)'),
('background-color', "rgba(0,0,0,1)")]},
{
'selector': '.gbe-panel',
'pseudo_class': '',
'description': '''top level panel on every page, all content is
inside''',
'target_element': 'div',
'usage': 'General',
'prop_val': [('background-color', 'rgba(235,235,235,1)'),
('border-color', 'rgba(221,221,221,1)'),
('border-top-color', 'rgba(235,235,235,0)')]},
{
'selector': '.gbe-tab-active,.gbe-tab-active:hover,.gbe-tab-area',
'pseudo_class': '',
'description': '''Background of the active tab and everything
"on" it. Conference navigation, also landing page on small screens''',
'target_element': 'div',
'usage': 'General',
'prop_val': [('background-color', 'rgba(221,221,221,1)')]},
{
'selector': '.gbe-tab-active,.gbe-tab-active:hover',
'pseudo_class': '',
'description': 'Text of the active tab',
'target_element': 'div',
'usage': 'General',
'prop_val': [('color', 'rgba(180,80,80,1)')]},
{
'selector': '.gbe-tab',
'pseudo_class': '',
'description': 'Text of the inactive tabs',
'target_element': 'div',
'usage': 'General',
'prop_val': [('color', 'rgba(150,150,150,1)')]},
{
'selector': '.gbe-tab:hover',
'pseudo_class': '',
'description': 'Inactive tabs on hover',
'target_element': 'div',
'usage': 'General',
'prop_val': [('color', 'rgba(150,150,150,1)')]},
{
'selector': '.gbe-title',
'pseudo_class': '',
'description': 'Main Title of every page',
'target_element': 'h2',
'usage': 'General',
'prop_val': [('color', 'rgba(0,0,0,1)')]},
{
'selector': '.gbe-subtitle',
'pseudo_class': '',
'description': 'Secondary titles in any page',
'target_element': 'h2',
'usage': 'General',
'prop_val': [('color', 'rgba(0,0,0,1)')]},
{
'selector': '.gbe-footer',
'pseudo_class': '',
'description': 'footer at bottom of every page',
'target_element': 'div',
'usage': 'General',
'prop_val': [('color', 'rgba(255,255,255,1)'),
('background-color', 'rgba(0,0,0,0)'),
('border-color', 'rgba(0,0,0,0)')]},
{
'selector': '.gbe-modal-content',
'pseudo_class': '',
'description': 'Background of modal panes.',
'target_element': 'div',
'usage': 'Modal',
'prop_val': [('color', 'rgba(0,0,0,1)'),
('background-color', 'rgba(235,235,235,1)'),
('border-color', 'rgba(0,0,0,.2)')]},
{
'selector': '.gbe-modal-header',
'pseudo_class': '',
'description': 'Header and footer of modal.',
'target_element': 'div',
'usage': 'Modal',
'prop_val': [('color', 'rgba(0,0,0,1)'),
('background-color', 'rgba(216,216,216,1)'),
('border-color', 'rgba(229,229,229,1)')]},
{
'selector': '.gbe-link',
'pseudo_class': '',
'description': 'Links within modal panes.',
'target_element': 'a',
'usage': 'General',
'prop_val': [('color', 'rgba(51,122,183,1)'),
('text-decoration-color', 'rgba(51,122,183,1)')]},
{
'selector': '.bio_block',
'pseudo_class': '',
'description': 'box around bios on classes',
'target_element': 'div',
'usage': 'Event Display',
'prop_val': [('border-color', 'rgba(0,0,0,1)')]},
{
'selector': '.gbe-panel-link',
'pseudo_class': '',
'description': 'Links as headers of panels',
'target_element': 'a',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('text-decoration-color', 'rgba(0,0,0,1)')]},
{
'selector': '.gbe-panel-link',
'pseudo_class': 'hover',
'description': 'Links as headers of panels',
'target_element': 'a',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('text-decoration-color', 'rgba(51,51,51,1)')]},
{
'selector': '.gbe-panel-link',
'pseudo_class': 'focus',
'description': 'Links as headers of panels',
'target_element': 'a',
'usage': 'Event Display',
'prop_val': [('box-shadow',
'rgba px px px px',
'rgba(0,0,0,0) 0px 0px 0px 0px')]},
{'selector': '.gbe-panel-table, .gbe-panel-table td, .gbe-panel-table th',
'pseudo_class': '',
'description': '''Tables embedded within panels - used in
reporting. Covers header, rows and border and is fairly subtle''',
'target_element': 'table',
'usage': 'Reporting',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('border-top-color', 'rgba(221,221,221,1)')]},
{
'selector': '.detail_link',
'pseudo_class': '',
'description': 'Icon for more information on events page',
'target_element': 'a',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(0,0,0,1)')]},
{
'selector': '.checkbox-box-success',
'pseudo_class': '',
'description': 'Shows a newly made staff area',
'target_element': 'div',
'usage': 'Forms',
'prop_val': [('background-color', 'rgba(212,237,218,1)'),
('color', 'rgba(21,87,36,1)')]},
{
'selector': '.checkbox-box',
'pseudo_class': '',
'description': 'Rounded box around staff area',
'target_element': 'div',
'usage': 'Forms',
'prop_val': [('border-color', 'rgba(128,128,128,1)')]},
{
'selector': '.detail_link',
'pseudo_class': 'hover',
'description': 'Icon for more information on events page',
'target_element': 'a',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(110,110,110,1)')]},
{
'selector': '.detail_link',
'pseudo_class': 'active',
'description': 'Icon for more information on events page',
'target_element': 'a',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(110,110,110,1)')]},
{
'selector': (
'.detail_link-disabled,.detail_link-disabled:hover,' +
' .detail_link-disabled:active,.detail_link-disabled:visited'),
'pseudo_class': '',
'description': '''Detail link on events list that is disabled
because this user is not eligible''',
'target_element': 'a',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(195,189,191,1)')]},
{
'selector': (
'.detail_link-detail_disable,.detail_link-detail_' +
'disable:hover,.detail_link-detail_disable:active, ' +
'.detail_link-detail_disable:visited'),
'pseudo_class': '',
'description': '''Detail link on event detail pages that is
disabled because user is not eligible. Darker for dark panel.''',
'target_element': 'a',
'usage': 'Event Display',
'prop_val': [('color', 'rgba(140,140,140,1)')]},
{
'selector': '.gbe-link',
'pseudo_class': 'hover',
'description': 'Links within modal panes - on hover',
'target_element': 'a',
'usage': 'General',
'prop_val': [('color', 'rgba(35,82,124,1)'),
('text-decoration-color', 'rgba(35,82,124,1)')]},
{
'selector': '.calendar-container',
'pseudo_class': '',
'description': 'Border around the whole calendar',
'target_element': 'div',
'usage': 'Calendar',
'prop_val': [('border', 'rgba px', 'rgba(110,110,110,1) 1px')]},
{
'selector': '.header1',
'pseudo_class': '',
'description': 'The top header with conf and calendar name',
'target_element': 'div',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(208,208,208,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 1px')]},
{
'selector': '.header2',
'pseudo_class': '',
'description': 'The second header with date/day, larger size',
'target_element': 'div',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(181,181,181,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 1px')]},
{
'selector': '.time-row',
'pseudo_class': '',
'description': 'Time display header at the top of every block',
'target_element': 'div',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(255,255,255,1)'),
('background-color', 'rgba(150,3,32,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 1px')]},
{
'selector': '.event-row',
'pseudo_class': '',
'description': '''The background behind event boxes. Shows
through at times based on event sizing and logic.''',
'target_element': 'div',
'usage': 'Calendar',
'prop_val': [('background-color', 'rgba(196,196,196,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 1px')]},
{
'selector': '.event-row > div',
'pseudo_class': '',
'description': '''Actual event box.''',
'target_element': 'div',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(228,189,197,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 1px')]},
{
'selector': ('div.teacher, div.performer, div.volunteer, ' +
'div.panelist, div.moderator, #volunteer'),
'pseudo_class': '',
'description': '''Event box when user has a special role in this
event.''',
'target_element': 'div',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(243,225,229,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 1px')]},
{
'selector': 'div.interested',
'pseudo_class': '',
'description': '''Event box when user is interested.''',
'target_element': 'div',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(243,225,229,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 1px')]},
{
'selector': 'div.pending-volunteer',
'pseudo_class': '',
'description': '''Event box when user is a pending volunteer,
awaiting approval.''',
'target_element': 'div',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(217,237,247,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 1px')]},
{
'selector': '.backward > a > button, .forward > a > button',
'pseudo_class': '',
'description': '''Backward/forward buttons on the calendar page.''',
'target_element': 'a',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(181,181,181,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 0px')]},
{
'selector': ('.backward > a > button:hover, ' +
'.forward > a > button:hover'),
'pseudo_class': '',
'description': '''Backward/forward buttons on the calendar page, on
hover.''',
'target_element': 'a',
'usage': 'Calendar',
'prop_val': [('background-color', 'rgba(230,230,230,1)')]},
{
'selector': '.vol_shift_table',
'pseudo_class': '',
'description': '''Volunteer Shift table background''',
'target_element': 'a',
'usage': 'Calendar',
'prop_val': [('background-color', 'rgba(228,224,224,1)'),
('border', 'rgba px', 'rgba(51,51,51,1) 0px')]},
{
'selector': '.vol_shift_header',
'pseudo_class': '',
'description': '''Time header/footer on volunteer calendar''',
'target_element': 'a',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(228,189,197,1)'),
('border', 'rgba px', 'rgba(110,110,110,1) 2px')]},
{
'selector': '.vol_shift_hour, .vol_shift_mid',
'pseudo_class': '',
'description': '''Grid of events - the 15 min marks''',
'target_element': 'a',
'usage': 'Calendar',
'prop_val': [('border-top', 'px', '0px'),
('border-bottom', 'px', '0px'),
('border-right', 'rgba px', 'rgba(173,173,173,1) 1px')]},
{
'selector': '.vol_shift_mid',
'pseudo_class': '',
'description': '''Between the hours, the 15 min ticks are light.''',
'target_element': 'a',
'usage': 'Calendar',
'prop_val': [('border-left', 'rgba px', 'rgba(173,173,173,1) 1px')]},
{
'selector': '.vol_shift_hour',
'pseudo_class': '',
'description': '''Between the hours, the 15 min ticks are light.''',
'target_element': 'a',
'usage': 'Calendar',
'prop_val': [('border-left', 'rgba px', 'rgba(110,110,110,1) 2px')]},
{
'selector': '.vol_shift_event',
'pseudo_class': '',
'description': '''Time header/footer on volunteer calendar''',
'target_element': 'a',
'usage': 'Calendar',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(228,189,197,1)'),
('border', 'rgba px', 'rgba(0,0,0,1) 2px')]},
{
'selector': '.gbe-badge',
'pseudo_class': '',
'description': '''Interest badge for teachers on landing page.''',
'target_element': 'span',
'usage': 'Landing Page',
'prop_val': [('color', 'rgba(255,255,255,1)'),
('background-color', 'rgba(119,119,119,1)')]},
{
'selector': '.performer_section',
'pseudo_class': '',
'description': '''Block of performer items, currently coded to match
background, but can be made to pop more.''',
'target_element': 'div',
'usage': 'Landing Page',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(235,235,235,1) ')]},
{
'selector': '.landing_box',
'pseudo_class': '',
'description': '''Border around boxes on the landing page''',
'target_element': 'div',
'usage': 'Landing Page',
'prop_val': [('border-color', 'rgba', 'rgba(0,0,0,1)')]},
{
'selector': '.device-small li a.active',
'pseudo_class': '',
'description': '''Bottom of tabs when landing page is on mobile''',
'target_element': 'div',
'usage': 'Landing Page',
'prop_val': [('border-color', 'rgba', 'rgba(142,14,10,1)')]},
{
'selector': '.device-small li a',
'pseudo_class': '',
'description': '''Tabs when landing page is on mobile''',
'target_element': 'div',
'usage': 'Landing Page',
'prop_val': [('color', 'rgba', 'rgba(0,0,0,1)')]},
{
'selector': '.gbe_thumb_button',
'pseudo_class': '',
'description': '''Border around thumbnails''',
'target_element': 'button',
'usage': 'Landing Page',
'prop_val': [('border-color', 'rgba', 'rgba(51,51,51,1)')]},
{
'selector': '.gbe-alert',
'pseudo_class': '',
'description': '''Alerts on the landing page.''',
'target_element': 'div',
'usage': 'Landing Page',
'prop_val': [('background-color', 'rgba(241,221,91,1)')]},
{
'selector': '.sidebar-text',
'pseudo_class': '',
'description': '''Text on the right of the sidebar''',
'target_element': 'div',
'usage': 'Landing Page',
'prop_val': [('border-color', 'rgba', 'rgba(195,189,191,1)'),
('border-width', 'px', '3px')]},
{
'selector': '.gbe-medium-light',
'pseudo_class': '',
'description': '''Alternating bid sections''',
'target_element': 'div',
'usage': 'Landing Page',
'prop_val': [('background-color', 'rgba(227,221,223,1)')]},
{
'selector': ('.gbe_accordion .card, ' +
'.gbe_accordion .card:last-child .card-header'),
'pseudo_class': '',
'description': '''Border around accordion on ticket management.''',
'target_element': 'div',
'usage': 'General',
'prop_val': [('border-color', 'rgba(88,88,88,1)')]},
{
'selector': '.pricing-table',
'pseudo_class': '',
'description': '''Boxes with prices of tickets on ticket display.''',
'target_element': 'div',
'usage': 'Ticket List',
'prop_val': [('color', 'rgba(51,51,51,1)'),
('background-color', 'rgba(255,255,255,1)'),
('box-shadow',
'rgba px px px px',
'rgba(0,0,0,0.08) 0px 1px 9px 0px')]},
{
'selector': ('.pricing-table .edit-icon i, ' +
'.pricing-table .table-icon i'),
'pseudo_class': '',
'description': '''Icons on ticket list''',
'target_element': 'span',
'usage': 'Ticket List',
'prop_val': [('color', 'rgba(190,14,10,1)')]},
{
'selector': '.gbe-btn-common',
'pseudo_class': '',
'description': '''Buttons to buy tickets - they pop intentionally.''',
'target_element': 'a',
'usage': 'Ticket List',
'prop_val': [('color', 'rgba(255,255,255,1)'),
('background-color', 'rgba(238,14,10,1)')]},
{
'selector': '.gbe-panel-default',
'pseudo_class': '',
'description': '''Panel showing how payment will look.''',
'target_element': 'div',
'usage': 'Ticket List',
'prop_val': [('border-color', 'rgba(221,221,221,1)')]},
{
'selector': '.gbe-btn-common',
'pseudo_class': 'hover',
'description': '''Buttons to buy tickets - they pop intentionally.''',
'target_element': 'a',
'usage': 'Ticket List',
'prop_val': [('color', 'rgba(255,255,255,1)'),
('box-shadow',
'px px px px rgba',
'0px 4px 23px 0px rgba(233,30,99,0.5)')]},
]
def initialize_style(apps, schema_editor):
# We get the model from the versioned app registry;
# if we directly import it, it'll be the wrong version
StyleVersion = apps.get_model("gbe", "StyleVersion")
StyleSelector = apps.get_model("gbe", "StyleSelector")
StyleProperty = apps.get_model("gbe", "StyleProperty")
StyleValue = apps.get_model("gbe", "StyleValue")
version = StyleVersion(
name="Baseline",
number=1.0,
currently_live=True,
currently_test=True)
version.save()
for select_val in init_values:
selector = StyleSelector(
selector=select_val['selector'],
description=select_val['description'],
pseudo_class=select_val['pseudo_class'],
used_for=select_val['usage'])
selector.save()
for prop_val in select_val['prop_val']:
val = prop_val[1]
if len(prop_val) == 2:
style_prop = StyleProperty(
selector=selector,
style_property=prop_val[0],
value_type='rgba')
elif len(prop_val) == 3:
style_prop = StyleProperty(
selector=selector,
style_property=prop_val[0],
value_type=prop_val[1])
val = prop_val[2]
else:
raise Exception(
"there should be 2 or 3 values here" + str(select_val))
style_prop.save()
value = StyleValue(style_property=style_prop,
style_version=version,
value=val)
value.save()
def destroy_style(apps, schema_editor):
StyleVersion = apps.get_model("gbe", "StyleVersion")
StyleSelector = apps.get_model("gbe", "StyleSelector")
StyleVersion.objects.filter(name="Baseline", number=1.0).delete()
for select_val in init_values:
StyleSelector.objects.filter(
selector=select_val['selector'],
pseudo_class=select_val['pseudo_class']).delete()
class Migration(migrations.Migration):
dependencies = [
('gbe', '0003_auto_20201224_0729'),
]
operations = [
migrations.RunPython(initialize_style, reverse_code=destroy_style),
]
| [
"noreply@github.com"
] | noreply@github.com |
17538b0801d060811a2100feec9d1813ce772fcd | 0ec5cf9dca760f0c7574691a8c3b2f922b027241 | /generate.py | 95927604382df3efc75532ff634421ad2379321b | [] | no_license | AleksanderObuchowski/Markov-Chains-Messenger | 72dde047b1fd9b0a365e4de5228c9132d03cc419 | ada2b08f9868b001deea72ab64682d0e46692dea | refs/heads/master | 2020-03-15T18:28:47.626136 | 2018-05-05T21:14:30 | 2018-05-05T21:14:30 | 132,284,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,561 | py | import sys
import random
import os
import json
def main():
length,filename = readArguments()
dictionary = loadDictionary(filename)
lastWord = "-------"
result = ""
for i in range(0,length):
newWord = getNextWord(lastWord,dictionary)
result = result + " " + newWord
lastWord = newWord
print(result)
def readArguments():
length =50
filename = "dictionary.json"
numArguments = len(sys.argv)-1
if numArguments>=1:
length = int(sys.argv[1])
if numArguments>=2:
filename = sys.argv[2]
return length,filename
def loadDictionary(filename):
if not os.path.exists(filename):
sys.exit("Error : Dictionary file not found")
file = open(filename,"r")
dictionary = json.load(file)
file.close()
return dictionary
def getNextWord(lastWord,dict):
if lastWord not in dict:
newWord = pickRandom(dict)
return newWord
else:
candidates = dict[lastWord]
candidatesNormalized = []
for word in candidates:
freq = candidates[word]
for i in range(0,freq):
candidatesNormalized.append(word)
rnd = random.randint(0,len(candidatesNormalized)-1)
random.shuffle(candidatesNormalized)
return candidatesNormalized[rnd]
def pickRandom(dict):
randNum = random.randint(0, len(dict)-1)
("randNum" + str(randNum))
newWord = list(dict.keys())[randNum]
return newWord
main()
| [
"noreply@github.com"
] | noreply@github.com |
a7269b20a5c610f30c91b6222a8bdbb865b37e57 | 01ee737722510ba5baa7bc1d898de22777ffbd6a | /PhotoTaker.py | ca36464ed14bed06c9e5e2abaf6860d30b9d77d8 | [] | no_license | tckb/time-lapse-photography | 61622bc99cc3507d26b493a175c40307708ab73a | e8e6e806d366bd31c5caaf2ed59286d95357fa7f | refs/heads/master | 2021-01-23T21:39:07.098408 | 2013-07-31T21:11:51 | 2013-07-31T21:11:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,170 | py | import getpass
import os
import cv2
import time
import subprocess
class PhotoTaker:
daysofweek = ("Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday")
choices = ("Seconds", "Minutes", "Hours", "Days", "Weeks", "Daily", "Weekly")
def __init__(self, cm=False):
self.USER = getpass.getuser()
self.cm = cm
if cm:
self.WEBCAM = ['raspistill', '-o']
else:
for i in range(11):
if os.path.lexists("/dev/video" + str(i)):
self.WEBCAM = cv2.VideoCapture(i)
break
if not os.path.lexists("Photos"):
os.mkdir("Photos")
@classmethod
def getChoices(klass):
return klass.choices
@classmethod
def getDaysofWeek(klass):
return klass.daysofweek
def start(self, action, interval, day=None):
if day:
getattr(self, action.lower())(interval, day)
elif action in self.getChoices():
getattr(self, action.lower())(interval)
def readConfig(self):
if os.path.lexists("/home/" + self.USER + "/.pysnap.conf"):
config = open("/home/" + self.USER + "/.pysnap.conf", "r")
config = config.readlines()
for line in config:
if len(line) > 0:
if line[0] is not "#":
line2 = line.split("=")
if len(line2) == 2:
return line2[0], int(line2[1])
def takePictureCV(self, directory, currtime=None):
if not currtime:
currtime = str(time.strftime("%X"))
rval, img = self.WEBCAM.read()
cv2.waitKey(20)
if rval:
cv2.imwrite(directory + currtime + '.jpeg', img)
def takePicture(self, *args):
if self.cm:
self.takePictureCmd(*args)
else:
self.takePictureCV(*args)
def takePictureCmd(self, directory, currtime=None):
if not currtime:
currtime = str(time.strftime("%X"))
fname = directory + currtime + '.jpeg'
subprocess.Popen(self.WEBCAM + [fname])
def removeConfig(self):
try:
os.remove("/home" + self.USER + "/.pysnap.conf")
except Exception:
pass
def seconds(self, num):
try:
if not os.path.lexists("Photos/Seconds"):
os.mkdir("Photos/Seconds")
while True:
self.takePicture("./Photos/Seconds/")
time.sleep(float(num))
except KeyboardInterrupt:
print("\nGoodbye!")
quit()
def minutes(self, num):
try:
if not os.path.lexists("Photos/Minutes"):
os.mkdir("Photos/Minutes")
num *= 60
while True:
self.takePicture("./Photos/Minutes/")
time.sleep(float(num))
except KeyboardInterrupt:
print("\nGoodbye!")
quit()
def hours(self, num=None):
if not os.path.lexists("Photos/Hours"):
os.mkdir("Photos/Hours")
num *= 60 ** 2
while True:
try:
self.takePicture("./Photos/Hours/")
time.sleep(num)
except KeyboardInterrupt:
print("\nGoodbye!")
quit()
def days(self, num=None):
if not os.path.lexists("Photos/Weeks"):
os.mkdir("Photos/Weeks")
num *= 60 ** 2 * 24
while True:
try:
self.takePicture("./Photos/Days/")
time.sleep(num)
except KeyboardInterrupt:
print("\nGoodbye!")
quit()
def weeks(self, num=None):
if not os.path.lexists("Photos/Weeks"):
os.mkdir("Photos/Weeks")
num *= 60 ** 2 * 24 * 7
while True:
try:
self.takePicture("./Photos/Weeks/")
time.sleep(num)
except KeyboardInterrupt:
print("\nGoodbye!")
quit()
def daily(self, picturetime):
if not os.path.lexists("Photos/Daily"):
os.mkdir("Photos/Daily")
while True:
try:
currtime = time.strftime('%I:%M:%S %p')
if currtime == picturetime:
self.takePicture("./Photos/Daily/", currtime=currtime)
time.sleep(5)
except KeyboardInterrupt:
print("\nGoodbye!")
quit()
def weekly(self, phototime, day):
if not os.path.lexists("Photos/Weekly"):
os.mkdir("Photos/Weekly")
print self.daysofweek[day - 1]
while True:
try:
currtime = time.strftime('%I:%M:%S %p')
currday = time.strftime('%A')
if currtime == phototime and currday == self.daysofweek[day - 1]:
self.takePicture("./Photos/Weekly/", currtime=currtime)
time.sleep(5)
except KeyboardInterrupt:
print("\nGoodbye!")
quit()
| [
"aa1ronham@gmail.com"
] | aa1ronham@gmail.com |
05a4721b459df9940953ad4a1017d56d9fd40f02 | a80920c853b51f34acfd37b260a872f4057a52ed | /daz1.1.4.py | 7ea7d3af0ed7fb0b9baa05a2df4c5f99dbefae5f | [] | no_license | yzhzbys/python-dazi | 366e32e9015e2cc97ddf19cf4f6f01d891fe1bef | 5df5800c76a13026634419c791d507d75f8ee440 | refs/heads/master | 2023-02-13T04:27:21.818283 | 2021-01-12T07:54:18 | 2021-01-12T07:54:18 | 330,923,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,253 | py | from tkinter import *
import string
import random
import time
import tkinter
import re
from datetime import datetime
hh =Tk()
hh.rowconfigure(1, weight=1)
hh.columnconfigure(0, weight=1)
tit1=Text(hh,fg='#78a2a1',wrap='none',bg='#68312e',state='disabled',width=77,height=1,font=('楷体',11,'bold'))
tit2=Text(hh,fg='#ccc',state='disabled',width=77,height=1,font=('楷体',11,'bold'))
hh.title("鱼王的打字练习课")
Label(hh,text='例文:',justify='right',font=('黑体',12),width=5).grid(row=0,column=0)
Label(hh,text='输入',justify='right',font=('黑体',12),width=5).grid(row=1,column=0)
Label(hh,text='下一行',justify='right',font=('黑体',12),width=5).grid(row=2,column=0)
Label(hh,text='成绩').grid(row=3,column=3)
v2=StringVar()
v3=StringVar()
v4=StringVar()
v5=StringVar()
v6=StringVar()
tit1.insert(END,"点击开始按钮开始练习")
class typing:
def __init__(self):
self.i=0
self.a=''#第一段例文字数
self.b=''
self.c=''#直到当前的例文
self.d=''#当输入超过当前例文时的前部分
self.e=''#当输入超过当前例文时的后部分
self.clock=[]
self.string_all=open('文章.txt','r')#读取文章
self.titnum=50#此处设置字数
self.b_all=self.strQ2B(self.string_all.read())#将输入文章字符变更为半角
self.all=''.join(self.b_all.splitlines())#read返回字符串 .splitlines()去除换行
self.all_num=len(self.all)
self.a=self.all[:self.titnum]
self.b=self.all[self.titnum:self.titnum*2]
print(self.b)
self.string_tit=self.a
self.check_a=''#全篇判断
self.check_b=''#单行判断
self.check_tt=''#速度提示
self.tips=''#提示
self.all_spd=0#当前速度
self.wrong_index=[]#错误索引
v2.trace('w',self.text_trace)
def strQ2B(self,ustring):
#全角转半角
rstring = ""
for uchar in ustring:
inside_code=ord(uchar)
if inside_code == 12288: #全角空格直接转换
inside_code = 32
elif (inside_code >= 65281 and inside_code <= 65374): #全角字符(除空格)根据关系转化
inside_code -= 65248
rstring += chr(inside_code)
return rstring
def strB2Q(self,ustring):
#半角转全角
rstring = ""
for uchar in ustring:
print(uchar)
inside_code=ord(uchar)
if inside_code == 32: #半角空格直接转化
inside_code = 12288
elif inside_code >= 32 and inside_code <= 126: #半角字符(除空格)根据关系转化
inside_code += 65248
rstring += chr(inside_code)
return rstring
def tit_change(self):#变更例文
self.i+=1
print(self.i)
tit1.config(state='normal')
tit2.config(state='normal')
tit1.delete(1.0,END)
tit2.delete(1.0,END)
self.a=self.all[self.titnum*self.i:self.titnum*(self.i+1)]
self.b=self.all[self.titnum*(self.i+1):self.titnum*(self.i+2)]
tit1.insert(0.0,self.a)
tit2.insert(0.0,self.b)
tit1.config(state='disable')
tit2.config(state='disable')
if len(self.b)==0:#最后一页
over.config(state='active')
if len(self.a)==0:#完成页
v2.set("本篇文章已完成")
e2.config(state='disable')
self.score()
v6.set('恭喜你完成本篇章')
over.config(state='disable')
def text_trace(self, var, mode, event):#此为v2(输入栏)的跟踪项
a=v2.get()
c=self.strQ2B(a)
print(c)#此处显示的是我想让他显示在输入栏的值(半角字符)
v2.set(c)
tag1_index=[]
tit1.tag_delete('tag1')
tag1_index.append(len(a))
print(tag1_index)
tag1_info= list(map(lambda x:'1.'+str(x),tag1_index))
b=tit1.get(0.0,END)[:-1]#text控件最后有一个换行符
self.tips=''.join("还需要输入%d个字呀\n"%(len(b)-len(a)))
v6.set(self.tips)
if len(a) <len(b):#此处添加判断目的是优化指示tag
tit1.tag_add('tag1',tag1_info)
tit1.tag_config('tag1',background='#ccc')
def enter(self,event):#回车触发事件 因为在class中使用了故需要添加一个event参数
#self.score()#此处定义一个copy的方法执行同样的命令以完成enter键的共存绑定
a=v2.get()
print(len(a))
b=tit1.get(0.0,END)[:-1]
print(len(b))
if len(a)>=len(b):#判断输入栏内的字符数量
self.d = a[:len(b)]
self.e = a[len(b):]
self.c=self.all[:self.titnum*(self.i+1)]
self.check_al()
self.tit_change()#变更例文
e2.delete(0,END)
e2.insert(0,self.e)
else:
self.tips=''.join("还需要输入%d个字呀\n"%(len(b)-len(a)))
v6.set(self.tips)
def create(self):#开始
print(self.all)
cutout.config(state='active')
tit1.config(state='normal')
tit2.config(state='normal')
tit1.delete(0.0,END)
grades.delete(0.0,END)
tit1.insert(END,self.a)
tit2.insert(END,self.b)
self.time_clock().__next__()#迭代器返回下一个元素
tit1.config(state='disable')
tit2.config(state='disable')
grades.config(state='normal')
grades.insert(END,"开始:%s \n" % str(self.clock[-1]))
start.config(state='disable')
a=datetime.now()
e2.config(state='normal')
grades.config(state='disable')
def restart(self):#重置
restart.config(state='disable')
cutout.config(state='active')
e2.config(state='normal')
grades.config(state='normal')
self.check_tt=''.join("前次速度:%d字/分"%(self.all_spd))
v5.set(self.check_tt)
v4.set('本次速度:--字/分')
grades.delete(0.0,END)
self.i=-1
self.check_b=''#重置时清除输入内容
self.tit_change()
e2.delete(0,END)
self.clock.clear()
self.time_clock().__next__()#迭代器返回下一个元素
grades.insert(END,"开始:%s \n" % str(self.clock[-1]))
grades.config(state='disable')
def check_al(self): #验证输入完成的内容
right_num = 0 #正确率
self.time_clock().__next__()
use_time=(self.clock[-1] - self.clock[0]).seconds
self.check_a=self.d
self.check_b=self.check_b+self.check_a
self.check_num=len(self.check_b)
print("当前输入:",self.check_a)
print("已输入:",self.check_b)
print("当前例文:",self.a)
print('已完成例文:',self.c)
e3.config(state='normal')
for i in range(len(self.check_b)):
if self.check_b[i] == self.c[i]:
right_num += 1
else:
self.wrong_index.append(i)
if right_num == len(self.all):
v3.set("完全正确,正确率%.2f%%用时:%s秒"%((right_num*1.0)/self.check_num*100,use_time))
else:
v3.set("正确率%.2f%%用时:%s秒"%((right_num*1.0)/self.check_num*100,use_time))
e3.config(state='disable')
def time_clock(self):#计时器
self.clock.append(datetime.now())
yield#生成器
def score(self):#结束
grades.config(state='normal')
a=v2.get()
print(len(a))
b=tit1.get(0.0,END)[:-1]
if len(a) >= len(b):#输入完成,待改进
over.config(state='active')
self.d = a[:len(b)]
self.e = a[len(b):]
self.c=self.all[:self.titnum*(self.i+1)]
self.check_al()
grades.insert(END,"结束:%s\n"%str(self.clock[-1]))
all_time=(self.clock[-1] - self.clock[0]).seconds#最后一次时间和第一次计时
self.all_spd=len(self.check_b)*60/(all_time)
self.check_tt=''.join("本次速度:%d字/分"%(self.all_spd))
v4.set(self.check_tt)
grades.insert(END,"题目:%s\n"% self.c)
tag_info= list(map(lambda x:'4.'+str(x+3),self.wrong_index))
print(tag_info)
grades.insert(END,"结果:%s\n"% self.check_b)
for i in range(len(tag_info)):
grades.tag_add("tag2",tag_info[i])
grades.tag_config("tag2",background='red')
restart.config(state='active')
grades.config(state='disable')
e2.config(state='disable')
v6.set('恭喜你完成本篇章')
over.config(state='disable')
cutout.config(state='disable')
elif len(a)<len(b):
self.tips=''.join("还需要输入%d个字呀\n"%(len(b)-len(a)))
v6.set(self.tips)
def cut(self):#还未完成但希望终止测试
grades.config(state='normal')
a=v2.get()
print(len(a))
b=tit1.get(0.0,END)[:-1]
self.d = a[:len(b)]
self.e = a[len(b):]
self.c=self.all[:self.titnum*(self.i+1)]
self.check_al()
grades.insert(END,"结束:%s\n"%str(self.clock[-1]))
all_time=(self.clock[-1] - self.clock[0]).seconds#最后一次时间和第一次计时
self.all_spd=len(self.check_b)*60/(all_time)
self.check_tt=''.join("本次速度:%d字/分"%(self.all_spd))
v4.set(self.check_tt)
grades.insert(END,"题目:%s\n"% self.c)
tag_info= list(map(lambda x:'4.'+str(x+3),self.wrong_index))
print(tag_info)
grades.insert(END,"结果:%s\n"% self.check_b)
for i in range(len(tag_info)):
grades.tag_add("tag2",tag_info[i])
grades.tag_config("tag2",background='red')
over.config(state='disabled')
restart.config(state='active')
e2.config(state='disabled')
grades.config(state='disable')
if __name__=='__main__':
tp = typing()
tp.string_all.close()
e2=Entry(hh,textvariable=v2,state='disabled',width=77,font=('楷体',11,'bold'))
e2.bind('<Return>',tp.enter)
e3=Entry(hh,textvariable=v3,state='disabled',width=30,font=('宋体',8),foreground='red')
l1=Label(hh,textvariable=v4,width=20,foreground='red')
l2=Label(hh,textvariable=v5,width=20,foreground='red')
tit1.grid(row=0,column=1,columnspan=3,ipadx=80,ipady=1,padx=10,pady=1)
e2.grid(row=1,column=1,columnspan=3,ipadx=80,ipady=1,padx=10,pady=1)
tit2.grid(row=2,column=1,columnspan=3,ipadx=80,ipady=1,padx=10,pady=1)
e3.grid(row=4,column=3,padx=10,pady=1)
l1.grid(row=5,column=3,padx=10,pady=1,)
l2.grid(row=6,column=3,padx=10,pady=1)
grades=Text(hh,width=90,height=7,font=('宋体',10),state='disable')
grades.grid(row=3,column=1,columnspan=1,rowspan=3,pady=1,padx=10)
scroll=tkinter.Scrollbar(width=20,command=grades.yview)
grades.config(yscrollcommand = scroll.set)
scroll.grid(row=3,column=2,rowspan=3,sticky=S + N+E,padx=1)
tips=Label(hh,textvariable=v6,width=40,height=1,pady=5)
tips.grid(row=6,column=1,columnspan=1,rowspan=1)
start=Button(hh,text="开始",width=20,command=tp.create )
start.grid(row=3,column=0)
over=Button(hh,text="结束",width=20,command=tp.score ,state='disable')
over.grid(row=5,column=0)
restart=Button(hh,text="再来一次",width=20,command=tp.restart,state='disable')
restart.grid(row=4,column=0)
cutout=Button(hh,text="终止",width=20,command=tp.cut,state='disable')
cutout.grid(row=6,column=0)
mainloop()
| [
"noreply@gitee.com"
] | noreply@gitee.com |
3c1a0b539c6f6d96a5f568b0e93dea70e904ad93 | db6fd2ecbc8b725c48f8f983505f1643294a628b | /cmd/CommandShell.py | a54c098513e9b1ea95ae1650733912fa7c6cec93 | [] | no_license | weleek/python_examples | e1ed3b23115dd8ef0163194484c8ab0f118a2deb | 8ecdc3d662e106d89a3ca25b547f9f2fd2078162 | refs/heads/master | 2023-07-25T07:42:08.388414 | 2023-07-11T06:57:58 | 2023-07-11T06:57:58 | 203,959,963 | 0 | 0 | null | 2023-07-11T06:57:59 | 2019-08-23T08:44:12 | Python | UTF-8 | Python | false | false | 4,824 | py | # -*- coding: utf-8 -*-
"""Command Line Interface
Usage:
run -h | --help
run -i | --interactive
run start [process name]
run stop [process name]
Commands:
start Program Start.
stop Program Stop.
Options:
-v -V --version show version info.
-i --interactive use interactive console.
"""
import os
import sys
import platform
import cmd
from docopt import docopt
from colorama import Fore, Style
def parse_command(argv=None):
return docopt(__doc__, argv, version='0.0.1')
def run_subcommand(args):
for k, v in args.items():
if k[:2] == '--': # 옵션 건너뛰기
continue
if v:
os.system(f'echo "call {k}"')
def colorize(s, color):
color = eval(f'Fore.{color.upper()}')
return f'{color}{s}{Style.RESET_ALL}'
class QuitException(Exception):
pass
class ArgsException(Exception):
pass
class CommandShell(cmd.Cmd):
"""Common command shell interface."""
def _clear(self):
os.system("cls" if platform.system().lower() == "windows" else "clear")
def __init__(self, name):
"""
Create common shell class
"""
cmd.Cmd.__init__(self)
self.intro = """================================ Test Cli Program ==============================================
[L] Process List.
[S] Process Status.
[Q] Quit."""
self.help_text = "press <Tab> to expand command or type ? to get any helps."
self.prompt = f'{colorize(name, "lightgreen_ex")} > '
self.choice = False
def emptyline(self):
"""빈 입력값인 경우 마지막 명령어를 실행하는게 기본이나 마지막 명령어 반복을 막기 위해 해당 메서드 재정의"""
self._clear()
print(self.intro)
print("Please select a Menu")
pass
def default(self, line):
"""입력값의 맞는 실행 함수가 없는 경우 실행"""
if not self.choice:
self._clear()
print(f"Please select a Menu.\nDoes not exists. [{line}]")
pass
"""onecmd 함수를 재정의 하였기 때문에 아래 함수를 이용하여 기능을 이용하지 않으므로 주석 처리한다."""
# def preloop(self):
# """기본 loop가 돌기 이전 실행 되는 루프"""
# print(f'{__name__}')
# def precmd(self, line):
# """명령어 처리 이전에 제일 처음 실행 된다."""
# self._clear()
# switch = line.upper()
# if switch == 'S':
# self.choice = True
# print(f'choice menu : [{line}]')
# elif switch == 'L':
# self.choice = True
# print(f'choice menu : [{line}]')
# elif switch == 'Q':
# self.choice = True
# self.do_exit(line)
# return line
def get_cmd(self, cmd):
"""기본 규칙인 함수 앞에 do_ 를 붙인 함수명을 반환하지 않도록 커스텀 하기 위한 함수."""
func_list = self.get_names()
cmd = f'cmd_{cmd}'
for func_name in func_list:
if func_name.startswith(cmd):
cmd = func_name
break
return cmd
def onecmd(self, line):
"""기본 명령 실행 함수."""
cmd, arg, line = self.parseline(line)
if not line:
return self.emptyline()
if cmd is None:
return self.default(line)
self.lastcmd = line
if line == 'EOF':
self.lastcmd = ''
if cmd == '':
return self.default(line)
else:
try:
func = getattr(self, self.get_cmd(cmd))
except AttributeError:
return self.default(line)
return func(arg)
def cmd_list_of_process(self, arg):
"""[L] Show Process List."""
self._clear()
print(f'List')
def cmd_status_of_process(self, arg):
"""[S] Show Process Status."""
self._clear()
print(f'Status')
def cmd_quit(self, arg):
return self._quit()
def _quit(self):
raise QuitException("quit")
def cmdloop(self, intro=None):
while True:
try:
self._clear()
cmd.Cmd.cmdloop(self, intro)
except QuitException as qe:
print(f'{qe}')
break
except KeyboardInterrupt:
print('Program Exit...')
break
except ArgsException as e:
print(f"Error parsing arguments!\n {e}")
continue
except Exception as e:
print(f'Unknown Exception : {e}')
break
| [
"weleek@gmail.com"
] | weleek@gmail.com |
27d0fcb1b8ffe87fa60bc957c1617a8b801e5f29 | c2777c1e132642ae033fd0eaeabb617823fc7e7b | /initial/number.py | f114b94ad6f66e7e630c11f9e6225b1a40f45119 | [] | no_license | Ludug323/python | 278e7c30f082e6e6e823e17b04e376cc4f2c98c2 | b822098059dd0be69c11b6dc0406134d1c321da3 | refs/heads/master | 2022-11-14T07:46:24.155364 | 2020-06-27T09:59:35 | 2020-06-27T09:59:35 | 272,355,022 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | #數字運算
x = 7/6#3/6=0.5 小數除法 3//6=0 整數除法
print(x)
x = 7//6
print(x)
x = 2**0.5
x = 3.5
print(x)
x = x + 1
print(x)
| [
"60188596+Ludug323@users.noreply.github.com"
] | 60188596+Ludug323@users.noreply.github.com |
59b2660370d66e34056e4768c0ea0fabd477cd5b | 7d77d49372973637f9ca9a03bfa47ff0f6aadd57 | /mathematics/root-finding/root_of_1d-function_using_secant-method/ro1dfusm.py | fb1280e50d2d60116c08dd5b942f38339e1b6c46 | [] | no_license | udy11/basic | 18309a65455deaae5c9d6af6a795df0a027fed15 | 8dd3d45b099a06d9ecb7a03ab296fd152f0256e9 | refs/heads/main | 2023-08-17T20:50:58.847732 | 2023-08-06T10:20:15 | 2023-08-06T10:20:15 | 492,775,065 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,121 | py | # Last updated: 16-Apr-2013
# Udaya Maurya (udaya_cbscients@yahoo.com, telegram: https://t.me/udy11)
# Source: https://github.com/udy11, https://gitlab.com/udy11
# Function to find a root of a one
# dimensional function using Secant Method
# ALL YOU NEED TO DO:
# Specify two initial guesses x0 and x1,
# they must be near the desired root
# Specify the function y(x), whose root is needed
# Specify positive small er; when |y(x2)|<er, further
# computation stops and x2 is returned as root
def scnt(x0,x1,er):
''' (num,num,num) -> num
Function to find root of y(x)=0
using Secant Method
x0, x1 are initial guesses
er is a small positive number such that when
|y(x2)|<er, further calculation is stopped
and x2 is returned as root
'''
while True:
x2=x1-y(x1)*(x1-x0)/(y(x1)-y(x0))
if(abs(y(x2))<er):
return x2
x0=x1
x1=x2
import math
def y(x):
''' num -> num
Input Function y(x), whose root is to be found
'''
return math.sin(x)
x0=2.1
x1=4.8
er=2.0e-16
x2=scnt(x0,x1,er)
print(x2,y(x2))
| [
"udaya_cbscients@yahoo.com"
] | udaya_cbscients@yahoo.com |
303b5b1e30a0347d0b5f6e3e7fefdc0086d6d2a9 | b142291a856847f29cb73281f71fd6d162fc8061 | /mwall/ws28xx-0.3.py | fc46625686649fc67b42fcd4148cdf8acaab1ae1 | [] | no_license | matthewwall/weewx-ws28xx | 6498353c563df60df9ef8126247693a92d92946b | bc86e6b016f9999ba8a8b6c860352244b0eb92b3 | refs/heads/master | 2021-01-17T11:36:12.631830 | 2016-04-17T05:37:22 | 2016-04-17T05:37:22 | 34,466,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169,172 | py | #!/usr/bin/python
# $Id: ws28xx-0.3.py 2357 2014-09-28 12:17:35Z mwall $
#
# Copyright 2013 Matthew Wall
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE.
#
# See http://www.gnu.org/licenses/
#
# Thanks to Eddie De Pieri for the first Python implementation for WS-28xx.
# Eddie did the difficult work of decompiling HeavyWeather then converting
# and reverse engineering into a functional Python implementation. Eddie's
# work was based on reverse engineering of HeavyWeather 2800 v 1.54
#
# Thanks to Luc for enumerating the console message types and for debugging
# the transceiver/console communication timing issues.
"""Classes and functions for interfacing with WS-28xx weather stations.
LaCrosse makes a number of stations in the 28xx series, including:
WS-2810, WS-2810U-IT
WS-2811, WS-2811SAL-IT, WS-2811BRN-IT, WS-2811OAK-IT
WS-2812, WS-2812U-IT
WS-2813
WS-2814, WS-2814U-IT
WS-2815, WS-2815U-IT
C86234
The station is also sold as the TFA Primus and TechnoLine.
HeavyWeather is the software provided by LaCrosse.
There are two versions of HeavyWeather for the WS-28xx series: 1.5.4 and 1.5.4b
Apparently there is a difference between TX59UN-1-IT and TX59U-IT models (this
identifier is printed on the thermo-hygro sensor).
HeavyWeather Version Firmware Version Thermo-Hygro Model
1.54 333 or 332 TX59UN-1-IT
1.54b 288, 262, 222 TX59U-IT
HeavyWeather provides the following weather station settings:
time display: 12|24 hour
temperature display: C|F
air pressure display: inhg|hpa
wind speed display: m/s|knos|bft|km/h|mph
rain display: mm|inch
recording interval: 1m
keep weather station in hi-speed communication mode: true/false
According to the HeavyWeatherPro User Manual (1.54, rev2), "Hi speed mode wears
down batteries on your display much faster, and similarly consumes more power
on the PC. We do not believe most users need to enable this setting. It was
provided at the request of users who prefer ultra-frequent uploads."
The HeavyWeatherPro 'CurrentWeather' view is updated as data arrive from the
console. The consonle sends current weather data approximately every 13
seconds.
Historical data are updated less frequently - every 2 hours in the default
HeavyWeatherPro configuration.
According to the User Manual, "The 2800 series weather station uses the
'original' wind chill calculation rather than the 2001 'North American'
formula because the original formula is international."
Apparently the station console determines when data will be sent, and, once
paired, the transceiver is always listening. The station console sends a
broadcast on the hour. If the transceiver responds, the station console may
continue to broadcast data, depending on the transceiver response and the
timing of the transceiver response.
According to the C86234 Operations Manual (Revision 7):
- Temperature and humidity data are sent to the console every 13 seconds.
- Wind data are sent to the temperature/humidity sensor every 17 seconds.
- Rain data are sent to the temperature/humidity sensor every 19 seconds.
- Air pressure is measured every 15 seconds.
Each tip of the rain bucket is 0.26 mm of rain.
The following information was obtained by logging messages from the ws28xx.py
driver in weewx and by capturing USB messages between Heavy Weather Pro for
ws2800 and the TFA Primus Weather Station via windows program USB sniffer
busdog64_v0.2.1.
Pairing
The transceiver must be paired with a console before it can receive data. Each
frame sent by the console includes the device identifier of the transceiver
with which it is paired.
Synchronizing
When the console and transceiver stop communicating, they can be synchronized
by one of the following methods:
- Push the SET button on the console
- Wait till the next full hour when the console sends a clock message
In each case a Request Time message is received by the transceiver from the
console. The 'Send Time to WS' message should be sent within ms (10 ms
typical). The transceiver should handle the 'Time SET' message about 125 ms
after the 'Send Time to WS' message. When complete, the console and transceiver
will have been synchronized.
Timing
Outstanding history messages follow each other after 300 - 2600 ms (typical
500 ms). The best polling period appears to be 50 ms, with an average duration
of the polling loop of 3 - 4 ms. This will catch both Clock SET and History
messages. A longer polling period will catch some messages, but often misses
History messages and results in console and transceiver becoming out of synch.
Message Types
The first byte of a message determines the message type.
ID Type Length
01 ? 0x0f (15)
d0 SetRX 0x15 (21)
d1 SetTX 0x15 (21)
d5 SetFrame 0x111 (273)
d6 GetFrame 0x111 (273)
d7 SetState 0x15 (21)
d8 SetPreamblePattern 0x15 (21)
d9 Execute 0x0f (15)
dc ReadConfigFlash< 0x15 (21)
dd ReadConfigFlash> 0x15 (21)
de GetState 0x0a (10)
f0 WriteReg 0x05 (5)
Some messages are decomposed using the following structure:
start position in message buffer
hi-lo data starts on first (hi) or second (lo) nibble
chars data length in characters (nibbles)
rem remark (see below)
name variable
-------------------------------------------------------------------------------
1. 01 message (15 bytes)
000: 01 15 00 0b 08 58 3f 53 00 00 00 00 ff 15 0b (detected via USB sniffer)
000: 01 15 00 57 01 92 3f 53 00 00 00 00 ff 15 0a (detected via USB sniffer)
00: messageID
02-15: ??
-------------------------------------------------------------------------------
2. SetRX message (21 bytes)
000: d0 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
020: 00
00: messageID
01-20: 00
-------------------------------------------------------------------------------
3. SetTX message (21 bytes)
000: d1 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
020: 00
00: messageID
01-20: 00
-------------------------------------------------------------------------------
4. SetFrame message (273 bytes)
Action:
00: rtGetHistory ? Ask for History message
01: rtSetTime - Ask for Request Time message
02: rtSetConfig ? Send Config to WS
03: rtGetConfig - Ask for Request Config message
05: rtGetCurrent - Ask for Current Weather message
c0: Send Time - Send Time to WS
000: d5 00 09 DevID 00 CfgCS cIntThisIdx xx xx xx rtGetHistory
000: d5 00 09 DevID 01 CfgCS cIntThisIdx xx xx xx rtSetTime
000: d5 00 30 DevID 02 CfgCS 00 [ConfigData .. .. rtSetConfig
000: d5 00 09 DevID 03 CfgCS cIntThisIdx xx xx xx rtGetConfig
000: d5 00 09 DevID 05 CfgCS cIntThisIdx xx xx xx rtGetCurrent
000: d5 00 0c DevID c0 CfgCS [TimeData . .. .. .. Send Time
All SetFrame messages:
00: messageID
01: 00
02: Message Length (starting with next byte)
03-04: DeviceID [DevID]
05: Action
06-07: Config checksum [CfgCS]
Additional bytes rtGetCurrent, rtGetHistory, rtSetTime messages:
08-09hi: ComInt [cINT] 1.5 bytes (high byte first)
09lo-12: ThisHistoryIndex [ThisIdx] 2.5 bytes (high byte first)
Additional bytes Send Time message:
08: seconds
09: minutes
10: hours
11hi: DayOfWeek
11lo: day_lo (low byte)
12hi: month_lo (low byte)
12lo: day_hi (high byte)
13hi: (year-2000)_lo (low byte)
13lo: month_hi (high byte)
14lo: (year-2000)_hi (high byte)
-------------------------------------------------------------------------------
5. GetFrame message
Response type:
20: WS SetTime / SetConfig - Data written
40: GetConfig
60: Current Weather
80: Actual / Outstanding History
a2: Request SetConfig
a3: Request SetTime
000: 00 00 06 DevID 20 64 CfgCS xx xx xx xx xx xx xx xx xx Time/Config written
000: 00 00 30 DevID 40 64 [ConfigData .. .. .. .. .. .. .. GetConfig
000: 00 00 d7 DevID 60 64 CfgCS [CurData .. .. .. .. .. .. Current Weather
000: 00 00 1e DevID 80 64 CfgCS 0LateIdx 0ThisIdx [HisData Outstanding History
000: 00 00 1e DevID 80 64 CfgCS 0ThisIdx 0ThisIdx [HisData Actual History
000: 00 00 06 DevID a2 64 CfgCS xx xx xx xx xx xx xx xx xx Request SetConfig
000: 00 00 06 DevID a3 64 CfgCS xx xx xx xx xx xx xx xx xx Request SetTime
ReadConfig example:
000: 01 2e 40 5f 36 53 02 00 00 00 00 81 00 04 10 00 82 00 04 20
020: 00 71 41 72 42 00 05 00 00 00 27 10 00 02 83 60 96 01 03 07
040: 21 04 01 00 00 00 CfgCS
WriteConfig example:
000: 01 2e 40 64 36 53 02 00 00 00 00 00 10 04 00 81 00 20 04 00
020: 82 41 71 42 72 00 00 05 00 00 00 10 27 01 96 60 83 02 01 04
040: 21 07 03 10 00 00 CfgCS
00: messageID
01: 00
02: Message Length (starting with next byte)
03-04: DeviceID [devID]
05hi: responseType
06: Quality (in steps of 5)
Additional byte all GetFrame messages except Request SetConfig and Request SetTime:
05lo: BatteryStat
Additional byte Request SetConfig and Request SetTime:
05lo: RequestID
Additional bytes all GetFrame messages except ReadConfig and WriteConfig
07-08: Config checksum [CfgCS]
Additional bytes Outstanding History:
09lo-11: LatestHistoryIndex [LateIdx] 2.5 bytes (Latest to sent)
12lo-14: ThisHistoryIndex [ThisIdx] 2.5 bytes (Outstanding)
Additional bytes Actual History:
09lo-11: LatestHistoryIndex [ThisIdx] 2.5 bytes (LatestHistoryIndex is the same
12lo-14: ThisHistoryIndex [ThisIdx] 2.5 bytes as ThisHistoryIndex)
Additional bytes ReadConfig and WriteConfig
43-45: ResetMinMaxFlags (Output only; not included in checksum calculation)
46-47: Config checksum [CfgCS] (CheckSum = sum of bytes (00-42) + 7)
-------------------------------------------------------------------------------
6. SetState message
000: d7 00 00 00 00 00 00 00 00 00 00 00 00 00 00
00: messageID
01-14: 00
-------------------------------------------------------------------------------
7. SetPreablePattern message
000: d8 aa 00 00 00 00 00 00 00 00 00 00 00 00 00
00: messageID
01: ??
02-14: 00
-------------------------------------------------------------------------------
8. Execute message
000: d9 05 00 00 00 00 00 00 00 00 00 00 00 00 00
00: messageID
01: ??
02-14: 00
-------------------------------------------------------------------------------
9. ReadConfigFlash in ? receive data
000: dc 0a 01 f5 00 01 78 a0 01 02 0a 0c 0c 01 2e ff ff ff ff ff ? freq. correction
000: dc 0a 01 f9 01 02 0a 0c 0c 01 2e ff ff ff ff ff ff ff ff ff - transceiver data
00: messageID
01: length
02-03: address
Additional bytes frequention correction
05lo-07hi: frequention correction
Additional bytes transceiver data
05-10: serial number
09-10: DeviceID [devID]
-------------------------------------------------------------------------------
10. ReadConfigFlash out ? ask for data
000: dd 0a 01 f5 cc cc cc cc cc cc cc cc cc cc cc ? Ask for frequention correction
000: dd 0a 01 f9 cc cc cc cc cc cc cc cc cc cc cc ? Ask for transceiver data
00: messageID
01: length
02-03: address
04-14: cc
-------------------------------------------------------------------------------
11. GetState message
000: de 14 00 00 00 00 (between SetPreamblePattern and first de16 message)
000: de 15 00 00 00 00 Idle message
000: de 16 00 00 00 00 Normal message
000: de 0b 00 00 00 00 (detected via USB sniffer)
00: messageID
01: stateID
02-05: 00
-------------------------------------------------------------------------------
12. Writereg message
000: f0 08 01 00 00 - AX5051RegisterNames.IFMODE
000: f0 10 01 41 00 - AX5051RegisterNames.MODULATION
000: f0 11 01 07 00 - AX5051RegisterNames.ENCODING
...
000: f0 7b 01 88 00 - AX5051RegisterNames.TXRATEMID
000: f0 7c 01 23 00 - AX5051RegisterNames.TXRATELO
000: f0 7d 01 35 00 - AX5051RegisterNames.TXDRIVER
00: messageID
01: register address
02: 01
03: AX5051RegisterName
04: 00
-------------------------------------------------------------------------------
13. Current Weather message
start hi-lo chars rem name
0 hi 4 DevID
2 hi 2 Action
3 hi 2 Quality
4 hi 4 DeviceCS
6 hi 4 6 _AlarmRingingFlags
8 hi 1 _WeatherTendency
8 lo 1 _WeatherState
9 hi 1 not used
9 lo 10 _TempIndoorMinMax._Max._Time
14 lo 10 _TempIndoorMinMax._Min._Time
19 lo 5 _TempIndoorMinMax._Max._Value
22 hi 5 _TempIndoorMinMax._Min._Value
24 lo 5 _TempIndoor
27 lo 10 _TempOutdoorMinMax._Max._Time
32 lo 10 _TempOutdoorMinMax._Min._Time
37 lo 5 _TempOutdoorMinMax._Max._Value
40 hi 5 _TempOutdoorMinMax._Min._Value
42 lo 5 _TempOutdoor
45 hi 1 not used
45 lo 10 1 _WindchillMinMax._Max._Time
50 lo 10 2 _WindchillMinMax._Min._Time
55 lo 5 1 _WindchillMinMax._Max._Value
57 hi 5 1 _WindchillMinMax._Min._Value
60 lo 6 _Windchill
63 hi 1 not used
63 lo 10 _DewpointMinMax._Max._Time
68 lo 10 _DewpointMinMax._Min._Time
73 lo 5 _DewpointMinMax._Max._Value
76 hi 5 _DewpointMinMax._Min._Value
78 lo 5 _Dewpoint
81 hi 10 _HumidityIndoorMinMax._Max._Time
86 hi 10 _HumidityIndoorMinMax._Min._Time
91 hi 2 _HumidityIndoorMinMax._Max._Value
92 hi 2 _HumidityIndoorMinMax._Min._Value
93 hi 2 _HumidityIndoor
94 hi 10 _HumidityOutdoorMinMax._Max._Time
99 hi 10 _HumidityOutdoorMinMax._Min._Time
104 hi 2 _HumidityOutdoorMinMax._Max._Value
105 hi 2 _HumidityOutdoorMinMax._Min._Value
106 hi 2 _HumidityOutdoor
107 hi 10 3 _RainLastMonthMax._Time
112 hi 6 3 _RainLastMonthMax._Max._Value
115 hi 6 _RainLastMonth
118 hi 10 3 _RainLastWeekMax._Time
123 hi 6 3 _RainLastWeekMax._Max._Value
126 hi 6 _RainLastWeek
129 hi 10 _Rain24HMax._Time
134 hi 6 _Rain24HMax._Max._Value
137 hi 6 _Rain24H
140 hi 10 _Rain24HMax._Time
145 hi 6 _Rain24HMax._Max._Value
148 hi 6 _Rain24H
151 hi 1 not used
152 lo 10 _LastRainReset
158 lo 7 _RainTotal
160 hi 1 _WindDirection5
160 lo 1 _WindDirection4
161 hi 1 _WindDirection3
161 lo 1 _WindDirection2
162 hi 1 _WindDirection1
162 lo 1 _WindDirection
163 hi 18 unknown data
172 hi 6 _WindSpeed
175 hi 1 _GustDirection5
175 lo 1 _GustDirection4
176 hi 1 _GustDirection3
176 lo 1 _GustDirection2
177 hi 1 _GustDirection1
177 lo 1 _GustDirection
178 hi 2 not used
179 hi 10 _GustMax._Max._Time
184 hi 6 _GustMax._Max._Value
187 hi 6 _Gust
190 hi 10 4 _PressureRelative_MinMax._Max/Min._Time
195 hi 5 5 _PressureRelative_inHgMinMax._Max._Value
197 lo 5 5 _PressureRelative_hPaMinMax._Max._Value
200 hi 5 _PressureRelative_inHgMinMax._Max._Value
202 lo 5 _PressureRelative_hPaMinMax._Max._Value
205 hi 5 _PressureRelative_inHgMinMax._Min._Value
207 lo 5 _PressureRelative_hPaMinMax._Min._Value
210 hi 5 _PressureRelative_inHg
212 lo 5 _PressureRelative_hPa
214 lo 430 end
Remarks
1 since factory reset
2 since software reset
3 not used?
4 should be: _PressureRelative_MinMax._Max._Time
5 should be: _PressureRelative_MinMax._Min._Time
8 _AlarmRingingFlags (values in hex)
80 00 = Hi Al Gust
40 00 = Al WindDir
20 00 = One or more WindDirs set
10 00 = Hi Al Rain24H
08 00 = Hi Al Outdoor Humidity
04 00 = Lo Al Outdoor Humidity
02 00 = Hi Al Indoor Humidity
01 00 = Lo Al Indoor Humidity
00 80 = Hi Al Outdoor Temp
00 40 = Lo Al Outdoor Temp
00 20 = Hi Al Indoor Temp
00 10 = Lo Al Indoor Temp
00 08 = Hi Al Pressure
00 04 = Lo Al Pressure
00 02 = not used
00 01 = not used
-------------------------------------------------------------------------------
14. History Message
start hi-lo chars rem name
0 hi 4 DevID
2 hi 2 Action
3 hi 2 Quality
4 hi 4 DeviceCS
6 hi 6 LatestIndex
9 hi 6 ThisIndex
12 hi 1 not used
12 lo 3 m_Gust
14 hi 1 m_WindDirection
14 lo 3 m_WindSpeed
16 hi 3 m_RainCounterRaw
17 lo 2 m_HumidityOutdoor
18 lo 2 m_HumidityIndoor
19 lo 5 m_PressureRelative
22 hi 3 m_TempOutdoor
23 lo 3 m_TempIndoor
25 hi 10 m_Time
29 lo 60 end
-------------------------------------------------------------------------------
15. Set Config Message
start hi-lo chars rem name
0 hi 4 DevID
2 hi 2 Action
3 hi 2 Quality
4 hi 1 1 _WindspeedFormat
4 lo 0,25 2 _RainFormat
4 lo 0,25 3 _PressureFormat
4 lo 0,25 4 _TemperatureFormat
4 lo 0,25 5 _ClockMode
5 hi 1 _WeatherThreshold
5 lo 1 _StormThreshold
6 hi 1 _LowBatFlags
6 lo 1 6 _LCDContrast
7 hi 8 7 _WindDirAlarmFlags (reverse group 1)
8 _OtherAlarmFlags (reverse group 1)
11 hi 10 _TempIndoorMinMax._Min._Value (reverse group 2)
_TempIndoorMinMax._Max._Value (reverse group 2)
16 hi 10 _TempOutdoorMinMax._Min._Value (reverse group 3)
_TempOutdoorMinMax._Max._Value (reverse group 3)
21 hi 2 _HumidityIndoorMinMax._Min._Value
22 hi 2 _HumidityIndoorMinMax._Max._Value
23 hi 2 _HumidityOutdoorMinMax._Min._Value
24 hi 2 _HumidityOutdoorMinMax._Max._Value
25 hi 1 not used
25 lo 7 _Rain24HMax._Max._Value (reverse bytes)
29 hi 2 _HistoryInterval
30 hi 1 not used
30 lo 5 _GustMax._Max._Value (reverse bytes)
33 hi 10 _PressureRelative_hPaMinMax._Min._Value (rev grp4)
_PressureRelative_inHgMinMax._Min._Value(rev grp4)
38 hi 10 _PressureRelative_hPaMinMax._Max._Value (rev grp5)
_PressureRelative_inHgMinMax._Max._Value(rev grp5)
43 hi 6 9 _ResetMinMaxFlags
46 hi 4 10 _InBufCS
47 lo 96 end
Remarks
1 0=m/s 1=knots 2=bft 3=km/h 4=mph
2 0=mm 1=inch
3 0=inHg 2=hPa
4 0=F 1=C
5 0=24h 1=12h
6 values 0-7 => LCD contrast 1-8
7 WindDir Alarms (not-reversed values in hex)
80 00 = NNW
40 00 = NW
20 00 = WNW
10 00 = W
08 00 = WSW
04 00 = SW
02 00 = SSW
01 00 = S
00 80 = SSE
00 40 = SE
00 20 = ESE
00 10 = E
00 08 = ENE
00 04 = NE
00 02 = NNE
00 01 = N
8 Other Alarms (not-reversed values in hex)
80 00 = Hi Al Gust
40 00 = Al WindDir
20 00 = One or more WindDirs set
10 00 = Hi Al Rain24H
08 00 = Hi Al Outdoor Humidity
04 00 = Lo Al Outdoor Humidity
02 00 = Hi Al Indoor Humidity
01 00 = Lo Al Indoor Humidity
00 80 = Hi Al Outdoor Temp
00 40 = Lo Al Outdoor Temp
00 20 = Hi Al Indoor Temp
00 10 = Lo Al Indoor Temp
00 08 = Hi Al Pressure
00 04 = Lo Al Pressure
00 02 = not used
00 01 = not used
9 ResetMinMaxFlags (not-reversed values in hex)
"Output only; not included in checksum calc"
80 00 00 = Reset DewpointMax
40 00 00 = Reset DewpointMin
20 00 00 = not used
10 00 00 = Reset WindchillMin*
"*Reset dateTime only; Min._Value is preserved"
08 00 00 = Reset TempOutMax
04 00 00 = Reset TempOutMin
02 00 00 = Reset TempInMax
01 00 00 = Reset TempInMin
00 80 00 = Reset Gust
00 40 00 = not used
00 20 00 = not used
00 10 00 = not used
00 08 00 = Reset HumOutMax
00 04 00 = Reset HumOutMin
00 02 00 = Reset HumInMax
00 01 00 = Reset HumInMin
00 00 80 = not used
00 00 40 = Reset Rain Total
00 00 20 = Reset last month?
00 00 10 = Reset lastweek?
00 00 08 = Reset Rain24H
00 00 04 = Reset Rain1H
00 00 02 = Reset PresRelMax
00 00 01 = Reset PresRelMin
10 Checksum = sum bytes (0-42) + 7
-------------------------------------------------------------------------------
16. Get Config Message
start hi-lo chars rem name
0 hi 4 DevID
2 hi 2 Action
3 hi 2 Quality
4 hi 1 1 _WindspeedFormat
4 lo 0,25 2 _RainFormat
4 lo 0,25 3 _PressureFormat
4 lo 0,25 4 _TemperatureFormat
4 lo 0,25 5 _ClockMode
5 hi 1 _WeatherThreshold
5 lo 1 _StormThreshold
6 hi 1 _LowBatFlags
6 lo 1 6 _LCDContrast
7 hi 4 7 _WindDirAlarmFlags
9 hi 4 8 _OtherAlarmFlags
11 hi 5 _TempIndoorMinMax._Min._Value
13 lo 5 _TempIndoorMinMax._Max._Value
16 hi 5 _TempOutdoorMinMax._Min._Value
18 lo 5 _TempOutdoorMinMax._Max._Value
21 hi 2 _HumidityIndoorMinMax._Max._Value
22 hi 2 _HumidityIndoorMinMax._Min._Value
23 hi 2 _HumidityOutdoorMinMax._Max._Value
24 hi 2 _HumidityOutdoorMinMax._Min._Value
25 hi 1 not used
25 lo 7 _Rain24HMax._Max._Value
29 hi 2 _HistoryInterval
30 hi 5 _GustMax._Max._Value
32 lo 1 not used
33 hi 5 _PressureRelative_hPaMinMax._Min._Value
35 lo 5 _PressureRelative_inHgMinMax._Min._Value
38 hi 5 _PressureRelative_hPaMinMax._Max._Value
40 lo 5 _PressureRelative_inHgMinMax._Max._Value
43 hi 6 9 _ResetMinMaxFlags
46 hi 4 10 _InBufCS
47 lo 96 end
Remarks
1 0=m/s 1=knots 2=bft 3=km/h 4=mph
2 0=mm 1=inch
3 0=inHg 2=hPa
4 0=F 1=C
5 0=24h 1=12h
6 values 0-7 => LCD contrast 1-8
7 WindDir Alarms (values in hex)
80 00 = NNW
40 00 = NW
20 00 = WNW
10 00 = W
08 00 = WSW
04 00 = SW
02 00 = SSW
01 00 = S
00 80 = SSE
00 40 = SE
00 20 = ESE
00 10 = E
00 08 = ENE
00 04 = NE
00 02 = NNE
00 01 = N
8 Other Alarms (values in hex)
80 00 = Hi Al Gust
40 00 = Al WindDir
20 00 = One or more WindDirs set
10 00 = Hi Al Rain24H
08 00 = Hi Al Outdoor Humidity
04 00 = Lo Al Outdoor Humidity
02 00 = Hi Al Indoor Humidity
01 00 = Lo Al Indoor Humidity
00 80 = Hi Al Outdoor Temp
00 40 = Lo Al Outdoor Temp
00 20 = Hi Al Indoor Temp
00 10 = Lo Al Indoor Temp
00 08 = Hi Al Pressure
00 04 = Lo Al Pressure
00 02 = not used
00 01 = not used
9 ResetMinMaxFlags (values in hex)
"Output only; input = 00 00 00"
10 Checksum = sum bytes (0-42) + 7
-------------------------------------------------------------------------------
Examples of messages
readCurrentWeather
Cur 000: 01 2e 60 5f 05 1b 00 00 12 01 30 62 21 54 41 30 62 40 75 36
Cur 020: 59 00 60 70 06 35 00 01 30 62 31 61 21 30 62 30 55 95 92 00
Cur 040: 53 10 05 37 00 01 30 62 01 90 81 30 62 40 90 66 38 00 49 00
Cur 060: 05 37 00 01 30 62 21 53 01 30 62 22 31 75 51 11 50 40 05 13
Cur 080: 80 13 06 22 21 40 13 06 23 19 37 67 52 59 13 06 23 06 09 13
Cur 100: 06 23 16 19 91 65 86 00 00 00 00 00 00 00 00 00 00 00 00 00
Cur 120: 00 00 00 00 00 00 00 00 00 13 06 23 09 59 00 06 19 00 00 51
Cur 140: 13 06 22 20 43 00 01 54 00 00 00 01 30 62 21 51 00 00 38 70
Cur 160: a7 cc 7b 50 09 01 01 00 00 00 00 00 00 fc 00 a7 cc 7b 14 13
Cur 180: 06 23 14 06 0e a0 00 01 b0 00 13 06 23 06 34 03 00 91 01 92
Cur 200: 03 00 91 01 92 02 97 41 00 74 03 00 91 01 92
WeatherState: Sunny(Good) WeatherTendency: Rising(Up) AlarmRingingFlags: 0000
TempIndoor 23.500 Min:20.700 2013-06-24 07:53 Max:25.900 2013-06-22 15:44
HumidityIndoor 59.000 Min:52.000 2013-06-23 19:37 Max:67.000 2013-06-22 21:40
TempOutdoor 13.700 Min:13.100 2013-06-23 05:59 Max:19.200 2013-06-23 16:12
HumidityOutdoor 86.000 Min:65.000 2013-06-23 16:19 Max:91.000 2013-06-23 06:09
Windchill 13.700 Min: 9.000 2013-06-24 09:06 Max:23.800 2013-06-20 19:08
Dewpoint 11.380 Min:10.400 2013-06-22 23:17 Max:15.111 2013-06-22 15:30
WindSpeed 2.520
Gust 4.320 Max:37.440 2013-06-23 14:06
WindDirection WSW GustDirection WSW
WindDirection1 SSE GustDirection1 SSE
WindDirection2 W GustDirection2 W
WindDirection3 W GustDirection3 W
WindDirection4 SSE GustDirection4 SSE
WindDirection5 SW GustDirection5 SW
RainLastMonth 0.000 Max: 0.000 1900-01-01 00:00
RainLastWeek 0.000 Max: 0.000 1900-01-01 00:00
Rain24H 0.510 Max: 6.190 2013-06-23 09:59
Rain1H 0.000 Max: 1.540 2013-06-22 20:43
RainTotal 3.870 LastRainReset 2013-06-22 15:10
PresRelhPa 1019.200 Min:1007.400 2013-06-23 06:34 Max:1019.200 2013-06-23 06:34
PresRel_inHg 30.090 Min: 29.740 2013-06-23 06:34 Max: 30.090 2013-06-23 06:34
Bytes with unknown meaning at 157-165: 50 09 01 01 00 00 00 00 00
-------------------------------------------------------------------------------
readHistory
His 000: 01 2e 80 5f 05 1b 00 7b 32 00 7b 32 00 0c 70 0a 00 08 65 91
His 020: 01 92 53 76 35 13 06 24 09 10
m_Time 2013-06-24 09:10:00
m_TempIndoor= 23.5
m_HumidityIndoor= 59
m_TempOutdoor= 13.7
m_HumidityOutdoor= 86
m_PressureRelative= 1019.2
m_RainCounterRaw= 0.0
m_WindDirection= SSE
m_WindSpeed= 1.0
m_Gust= 1.2
-------------------------------------------------------------------------------
readConfig
In 000: 01 2e 40 5f 36 53 02 00 00 00 00 81 00 04 10 00 82 00 04 20
In 020: 00 71 41 72 42 00 05 00 00 00 27 10 00 02 83 60 96 01 03 07
In 040: 21 04 01 00 00 00 05 1b
writeConfig
Out 000: 01 2e 40 64 36 53 02 00 00 00 00 00 10 04 00 81 00 20 04 00
Out 020: 82 41 71 42 72 00 00 05 00 00 00 10 27 01 96 60 83 02 01 04
Out 040: 21 07 03 10 00 00 05 1b
OutBufCS= 051b
ClockMode= 0
TemperatureFormat= 1
PressureFormat= 1
RainFormat= 0
WindspeedFormat= 3
WeatherThreshold= 3
StormThreshold= 5
LCDContrast= 2
LowBatFlags= 0
WindDirAlarmFlags= 0000
OtherAlarmFlags= 0000
HistoryInterval= 0
TempIndoor_Min= 1.0
TempIndoor_Max= 41.0
TempOutdoor_Min= 2.0
TempOutdoor_Max= 42.0
HumidityIndoor_Min= 41
HumidityIndoor_Max= 71
HumidityOutdoor_Min= 42
HumidityOutdoor_Max= 72
Rain24HMax= 50.0
GustMax= 100.0
PressureRel_hPa_Min= 960.1
PressureRel_inHg_Min= 28.36
PressureRel_hPa_Max= 1040.1
PressureRel_inHg_Max= 30.72
ResetMinMaxFlags= 100000 (Output only; Input always 00 00 00)
-------------------------------------------------------------------------------
WS SetTime - Send time to WS
Time 000: 01 2e c0 05 1b 19 14 12 40 62 30 01
time sent: 2013-06-24 12:14:19
-------------------------------------------------------------------------------
ReadConfigFlash data
Ask for frequention correction
rcfo 000: dd 0a 01 f5 cc cc cc cc cc cc cc cc cc cc cc
readConfigFlash frequention correction
rcfi 000: dc 0a 01 f5 00 01 78 a0 01 02 0a 0c 0c 01 2e ff ff ff ff ff
frequency correction: 96416 (178a0)
adjusted frequency: 910574957 (3646456d)
Ask for transceiver data
rcfo 000: dd 0a 01 f9 cc cc cc cc cc cc cc cc cc cc cc
readConfigFlash serial number and DevID
rcfi 000: dc 0a 01 f9 01 02 0a 0c 0c 01 2e ff ff ff ff ff ff ff ff ff
transceiver ID: 302 (12e)
transceiver serial: 01021012120146
"""
# TODO: how often is currdat.lst modified with/without hi-speed mode?
# TODO: add conditionals around DataStore and LastStat
# TODO: display time of rain reset
# FIXME: there are inconsistencies in use of 'device checksum' vs 'device id'
from datetime import datetime
from datetime import timedelta
from configobj import ConfigObj
import StringIO
import copy
import math
import platform
import syslog
import threading
import time
import traceback
import usb
import weeutil.weeutil
import weewx.abstractstation
import weewx.units
DRIVER_VERSION = '0.3'
# name of the pseudo configuration filename
# FIXME: consolidate with stats cache, since config comes from weewx
CFG_CACHE = '/tmp/ws28xx.cfg'
# location of the 'last status' cache file
STATS_CACHE = '/tmp/ws28xx-status.tmp'
# flags for enabling/disabling debug verbosity
DEBUG_WRITES = 0
DEBUG_COMM = 1
def logmsg(dst, msg):
syslog.syslog(dst, 'ws28xx: %s: %s' %
(threading.currentThread().getName(), msg))
def logdbg(msg):
logmsg(syslog.LOG_DEBUG, msg)
def loginf(msg):
logmsg(syslog.LOG_INFO, msg)
def logcrt(msg):
logmsg(syslog.LOG_CRIT, msg)
def logerr(msg):
logmsg(syslog.LOG_ERR, msg)
def log_traceback(dst=syslog.LOG_INFO, prefix='****'):
sfd = StringIO.StringIO()
traceback.print_exc(file=sfd)
sfd.seek(0)
for line in sfd:
logmsg(dst, prefix+line)
del sfd
# noaa definitions for station pressure, altimeter setting, and sea level
# http://www.crh.noaa.gov/bou/awebphp/definitions_pressure.php
# FIXME: this goes in wxformulas
# implementation copied from wview
def sp2ap(sp_mbar, elev_meter):
"""Convert station pressure to sea level pressure.
http://www.wrh.noaa.gov/slc/projects/wxcalc/formulas/altimeterSetting.pdf
sp_mbar - station pressure in millibars
elev_meter - station elevation in meters
ap - sea level pressure (altimeter) in millibars
"""
if sp_mbar is None or elev_meter is None:
return None
N = 0.190284
slp = 1013.25
ct = (slp ** N) * 0.0065 / 288
vt = elev_meter / ((sp_mbar - 0.3) ** N)
ap_mbar = (sp_mbar - 0.3) * ((ct * vt + 1) ** (1/N))
return ap_mbar
# FIXME: this goes in wxformulas
# implementation copied from wview
def sp2bp(sp_mbar, elev_meter, t_C):
"""Convert station pressure to sea level pressure.
sp_mbar - station pressure in millibars
elev_meter - station elevation in meters
t_C - temperature in degrees Celsius
bp - sea level pressure (barometer) in millibars
"""
if sp_mbar is None or elev_meter is None or t_C is None:
return None
t_K = t_C + 273.15
pt = math.exp( - elev_meter / (t_K * 29.263))
bp_mbar = sp_mbar / pt if pt != 0 else 0
return bp_mbar
# FIXME: this goes in weeutil.weeutil or weewx.units
def getaltitudeM(config_dict):
# The driver needs the altitude in meters in order to calculate relative
# pressure. Get it from the Station data and do any necessary conversions.
altitude_t = weeutil.weeutil.option_as_list(
config_dict['Station'].get('altitude', (None, None)))
altitude_vt = (float(altitude_t[0]), altitude_t[1], "group_altitude")
altitude_m = weewx.units.convert(altitude_vt, 'meter')[0]
return altitude_m
# FIXME: this goes in weeutil.weeutil
# let QC handle rainfall that is too big
def calculate_rain(newtotal, oldtotal):
"""Calculate the rain differential given two cumulative measurements."""
if newtotal is not None and oldtotal is not None:
if newtotal >= oldtotal:
delta = newtotal - oldtotal
else: # wraparound
logerr('rain counter wraparound detected: new: %s old: %s' % (newtotal, oldtotal))
delta = None
else:
delta = None
return delta
def loader(config_dict, engine):
altitude_m = getaltitudeM(config_dict)
station = WS28xx(altitude=altitude_m, **config_dict['WS28xx'])
return station
class WS28xx(weewx.abstractstation.AbstractStation):
"""Driver for LaCrosse WS28xx stations."""
def __init__(self, **stn_dict) :
"""Initialize the station object.
altitude: Altitude of the station
[Required. No default]
pressure_offset: Calibration offset in millibars for the station
pressure sensor. This offset is added to the station sensor output
before barometer and altimeter pressures are calculated.
[Optional. No Default]
model: Which station model is this?
[Optional. Default is 'LaCrosse WS28xx']
transceiver_frequency: Frequency for transceiver-to-console. Specify
either US or EU.
[Required. Default is US]
polling_interval: How often to sample the USB interface for data.
[Optional. Default is 30 seconds]
vendor_id: The USB vendor ID for the transceiver.
[Optional. Default is 6666]
product_id: The USB product ID for the transceiver.
[Optional. Default is 5555]
"""
self.altitude = stn_dict['altitude']
self.model = stn_dict.get('model', 'LaCrosse WS28xx')
self.cfgfile = CFG_CACHE
self.polling_interval = int(stn_dict.get('polling_interval', 30))
self.frequency = stn_dict.get('transceiver_frequency', 'US')
self.vendor_id = int(stn_dict.get('vendor_id', '0x6666'), 0)
self.product_id = int(stn_dict.get('product_id', '0x5555'), 0)
self.pressure_offset = stn_dict.get('pressure_offset', None)
if self.pressure_offset is not None:
self.pressure_offset = float(self.pressure_offset)
self._service = None
self._last_rain = None
self._last_obs_ts = None
loginf('driver version is %s' % DRIVER_VERSION)
loginf('frequency is %s' % self.frequency)
loginf('altitude is %s meters' % str(self.altitude))
loginf('pressure offset is %s' % str(self.pressure_offset))
@property
def hardware_name(self):
return self.model
def openPort(self):
# FIXME: init the usb here
pass
def closePort(self):
# FIXME: shutdown the usb port here
pass
def genLoopPackets(self):
"""Generator function that continuously returns decoded packets"""
self.startup()
maxnodata = 20
nodata = 0
while True:
try:
packet = self.get_observation()
if packet is not None:
yield packet
nodata = 0
else:
nodata += 1
if nodata >= maxnodata:
dur = nodata * self.polling_interval
logerr('no new data after %d seconds' % dur)
nodata = 0
time.sleep(self.polling_interval)
except KeyboardInterrupt:
self.shutdown()
raise
except Exception, e:
logerr('exception in genLoopPackets: %s' % e)
if weewx.debug:
log_traceback(dst=syslog.LOG_DEBUG)
raise
def startup(self):
if self._service is not None:
return
self._service = CCommunicationService(self.cfgfile)
self._service.setup(self.frequency)
self._service.startRFThread()
def shutdown(self):
self._service.stopRFThread()
self._service.teardown()
self._service = None
def pair(self, msg_to_console=False, maxtries=0):
ntries = 0
while ntries < maxtries or maxtries == 0:
if self._service.DataStore.getDeviceRegistered():
return
ntries += 1
msg = 'press [v] key on station console'
if maxtries > 0:
msg += ' (attempt %d of %d)' % (ntries, maxtries)
else:
msg += ' (attempt %d)' % ntries
if msg_to_console:
print msg
logerr(msg)
timeout = 30000 # milliseconds
self._service.DataStore.firstTimeConfig(timeout)
else:
raise Exception('Transceiver not paired to console.')
def check_transceiver(self, msg_to_console=False, maxtries=3):
ntries = 0
while ntries < maxtries:
ntries += 1
t = self._service.DataStore.getFlag_FLAG_TRANSCEIVER_PRESENT()
if t:
msg = 'transceiver is present'
else:
msg = 'transceiver not found (attempt %d of %d)' % (
ntries, maxtries)
if msg_to_console:
print msg
loginf(msg)
if t:
return
time.sleep(5)
else:
raise Exception('Transceiver not responding.')
def get_datum_diff(self, v, np):
if abs(np - v) > 0.001:
return v
return None
def get_datum_match(self, v, np):
if np != v:
return v
return None
def get_observation(self):
ts = self._service.DataStore.CurrentWeather._timestamp
if ts is None:
return None
if self._last_obs_ts is not None and self._last_obs_ts == ts:
return None
self._last_obs_ts = ts
# add elements required for weewx LOOP packets
packet = {}
packet['usUnits'] = weewx.METRIC
packet['dateTime'] = int(ts + 0.5)
# data from the station sensors
packet['inTemp'] = self.get_datum_diff(
self._service.DataStore.CurrentWeather._TempIndoor,
CWeatherTraits.TemperatureNP())
packet['inHumidity'] = self.get_datum_diff(
self._service.DataStore.CurrentWeather._HumidityIndoor,
CWeatherTraits.HumidityNP())
packet['outTemp'] = self.get_datum_diff(
self._service.DataStore.CurrentWeather._TempOutdoor,
CWeatherTraits.TemperatureNP())
packet['outHumidity'] = self.get_datum_diff(
self._service.DataStore.CurrentWeather._HumidityOutdoor,
CWeatherTraits.HumidityNP())
packet['pressure'] = self.get_datum_diff(
self._service.DataStore.CurrentWeather._PressureRelative_hPa,
CWeatherTraits.PressureNP())
packet['windSpeed'] = self.get_datum_diff(
self._service.DataStore.CurrentWeather._WindSpeed,
CWeatherTraits.WindNP())
packet['windGust'] = self.get_datum_diff(
self._service.DataStore.CurrentWeather._Gust,
CWeatherTraits.WindNP())
if packet['windSpeed'] is not None and packet['windSpeed'] > 0:
packet['windSpeed'] *= 3.6 # weewx wants km/h
packet['windDir'] = self._service.DataStore.CurrentWeather._WindDirection * 360 / 16
else:
packet['windDir'] = None
if packet['windGust'] is not None and packet['windGust'] > 0:
packet['windGust'] *= 3.6 # weewx wants km/h
packet['windGustDir'] = self._service.DataStore.CurrentWeather._GustDirection * 360 / 16
else:
packet['windGustDir'] = None
# calculated elements not directly reported by station
packet['rainRate'] = self.get_datum_match(
self._service.DataStore.CurrentWeather._Rain1H,
CWeatherTraits.RainNP())
if packet['rainRate'] is not None:
packet['rainRate'] /= 10 # weewx wants cm/hr
rain_total = self.get_datum_match(
self._service.DataStore.CurrentWeather._RainTotal,
CWeatherTraits.RainNP())
delta = calculate_rain(rain_total, self._last_rain)
self._last_rain = rain_total
packet['rain'] = delta
if packet['rain'] is not None:
packet['rain'] /= 10 # weewx wants cm/hr
packet['heatindex'] = weewx.wxformulas.heatindexC(
packet['outTemp'], packet['outHumidity'])
packet['dewpoint'] = weewx.wxformulas.dewpointC(
packet['outTemp'], packet['outHumidity'])
packet['windchill'] = weewx.wxformulas.windchillC(
packet['outTemp'], packet['windSpeed'])
# station reports gauge pressure, must calculate other pressures
adjp = packet['pressure']
if self.pressure_offset is not None and adjp is not None:
adjp += self.pressure_offset
packet['barometer'] = sp2bp(adjp, self.altitude, packet['outTemp'])
packet['altimeter'] = sp2ap(adjp, self.altitude)
# track the signal strength and battery levels
packet['signal'] = self._service.DataStore.LastStat.LastLinkQuality
packet['battery'] = self._service.DataStore.LastStat.LastBatteryStatus
return packet
def get_config(self):
logdbg('get station configuration')
self._service.DataStore.getConfig()
# Format frames for diagnostics and debugging.
def log_frame(n, buf):
logdbg('frame length is %d' % n)
strbuf = ''
for i in xrange(0,n):
strbuf += str('%02x ' % buf[i])
if (i+1) % 16 == 0:
logdbg(strbuf)
strbuf = ''
if len(strbuf) > 0:
logdbg(strbuf)
def frame2str(n, buf):
strbuf = ''
for i in xrange(0,n):
strbuf += str('%.2x' % buf[i])
return strbuf
# The following classes and methods are adapted from the implementation by
# eddie de pieri, which is in turn based on the HeavyWeather implementation.
class BitHandling:
# return a nonzero result, 2**offset, if the bit at 'offset' is one.
@staticmethod
def testBit(int_type, offset):
mask = 1 << offset
return(int_type & mask)
# return an integer with the bit at 'offset' set to 1.
@staticmethod
def setBit(int_type, offset):
mask = 1 << offset
return(int_type | mask)
# return an integer with the bit at 'offset' set to 1.
@staticmethod
def setBitVal(int_type, offset, val):
mask = val << offset
return(int_type | mask)
# return an integer with the bit at 'offset' cleared.
@staticmethod
def clearBit(int_type, offset):
mask = ~(1 << offset)
return(int_type & mask)
# return an integer with the bit at 'offset' inverted, 0->1 and 1->0.
@staticmethod
def toggleBit(int_type, offset):
mask = 1 << offset
return(int_type ^ mask)
class EHistoryInterval:
hi01Min = 0
hi05Min = 1
hi10Min = 2
hi15Min = 3
hi20Min = 4
hi30Min = 5
hi60Min = 6
hi02Std = 7
hi04Std = 8
hi06Std = 9
hi08Std = 0xA
hi12Std = 0xB
hi24Std = 0xC
class EWindspeedFormat:
wfMs = 0
wfKnots = 1
wfBFT = 2
wfKmh = 3
wfMph = 4
class ERainFormat:
rfMm = 0
rfInch = 1
class EPressureFormat:
pfinHg = 0
pfHPa = 1
class ETemperatureFormat:
tfFahrenheit = 0
tfCelsius = 1
class EClockMode:
ct24H = 0
ctAmPm = 1
class EWeatherTendency:
TREND_NEUTRAL = 0
TREND_UP = 1
TREND_DOWN = 2
TREND_ERR = 3
class EWeatherState:
WEATHER_BAD = 0
WEATHER_NEUTRAL = 1
WEATHER_GOOD = 2
WEATHER_ERR = 3
class EWindDirection:
wdN = 0
wdNNE = 1
wdNE = 2
wdENE = 3
wdE = 4
wdESE = 5
wdSE = 6
wdSSE = 7
wdS = 8
wdSSW = 9
wdSW = 0x0A
wdWSW = 0x0B
wdW = 0x0C
wdWNW = 0x0D
wdNW = 0x0E
wdNNW = 0x0F
wdERR = 0x10
wdInvalid = 0x11
class EResetMinMaxFlags:
rmTempIndoorHi = 0
rmTempIndoorLo = 1
rmTempOutdoorHi = 2
rmTempOutdoorLo = 3
rmWindchillHi = 4
rmWindchillLo = 5
rmDewpointHi = 6
rmDewpointLo = 7
rmHumidityIndoorLo = 8
rmHumidityIndoorHi = 9
rmHumidityOutdoorLo = 0x0A
rmHumidityOutdoorHi = 0x0B
rmWindspeedHi = 0x0C
rmWindspeedLo = 0x0D
rmGustHi = 0x0E
rmGustLo = 0x0F
rmPressureLo = 0x10
rmPressureHi = 0x11
rmRain1hHi = 0x12
rmRain24hHi = 0x13
rmRainLastWeekHi = 0x14
rmRainLastMonthHi = 0x15
rmRainTotal = 0x16
rmInvalid = 0x17
class ERequestType:
rtGetCurrent = 0
rtGetHistory = 1
rtGetConfig = 2
rtSetConfig = 3
rtSetTime = 4
rtFirstConfig = 5
rtINVALID = 6
class ERequestState:
rsQueued = 0
rsRunning = 1
rsFinished = 2
rsPreamble = 3
rsWaitDevice = 4
rsWaitConfig = 5
rsError = 6
rsChanged = 7
rsINVALID = 8
# frequency standards and their associated transmission frequencies
class EFrequency:
fsUS = 'US'
tfUS = 905000000
fsEU = 'EU'
tfEU = 868300000
def getFrequency(standard):
if standard == EFrequency.fsUS:
return EFrequency.tfUS
elif standard == EFrequency.fsEU:
return EFrequency.tfEU
logerr("unknown frequency standard '%s', using US" % standard)
return EFrequency.tfUS
def getFrequencyStandard(frequency):
if frequency == EFrequency.tfUS:
return EFrequency.fsUS
elif frequency == EFrequency.tfEU:
return EFrequency.fsEU
logerr("unknown frequency '%s', using US" % frequency)
return EFrequency.fsUS
class CWeatherTraits(object):
windDirMap = {
0:"N", 1:"NNE", 2:"NE", 3:"ENE", 4:"E", 5:"ESE", 6:"SE", 7:"SSE",
8:"S", 9:"SSW", 10:"SW", 11:"WSW", 12:"W", 13:"WNW", 14:"NW",
15:"NWN", 16:"err", 17:"inv" }
forecastMap = {
0:"Rainy(Bad)", 1:"Cloudy(Neutral)", 2:"Sunny(Good)", 3:"Error" }
trends = {
0:"Stable(Neutral)", 1:"Rising(Up)", 2:"Falling(Down)", 3:"Error" }
@staticmethod
def TemperatureNP():
return 81.099998
@staticmethod
def TemperatureOFL():
return 136.0
@staticmethod
def PressureNP():
return 10101010.0
@staticmethod
def PressureOFL():
return 16666.5
@staticmethod
def HumidityNP():
return 110.0
@staticmethod
def HumidityOFL():
return 121.0
@staticmethod
def RainNP():
return -0.2
@staticmethod
def RainOFL():
return 16666.664
@staticmethod
def WindNP():
return 51.0
@staticmethod
def WindOFL():
return 51.099998
@staticmethod
def TemperatureOffset():
return 40.0
class CMeasurement:
_Value = 0.0
_ResetFlag = 23
_IsError = 1
_IsOverflow = 1
_Time = time.time()
def Reset(self):
self._Value = 0.0
self._ResetFlag = 23
self._IsError = 1
self._IsOverflow = 1
class CMinMaxMeasurement(object):
def __init__(self):
self._Min = CMeasurement()
self._Max = CMeasurement()
# firmware XXX has bogus date values for these fields
_bad_labels = ['RainLastMonthMax','RainLastWeekMax','PressureRelativeMin']
class USBHardware(object):
@staticmethod
def isOFL2(buf, start, StartOnHiNibble):
if StartOnHiNibble :
result = (buf[0][start+0] >> 4) == 15 \
or (buf[0][start+0] & 0xF) == 15
else:
result = (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15
return result
@staticmethod
def isOFL3(buf, start, StartOnHiNibble):
if StartOnHiNibble :
result = (buf[0][start+0] >> 4) == 15 \
or (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15
else:
result = (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15 \
or (buf[0][start+1] & 0xF) == 15
return result
@staticmethod
def isOFL5(buf, start, StartOnHiNibble):
if StartOnHiNibble :
result = (buf[0][start+0] >> 4) == 15 \
or (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15 \
or (buf[0][start+1] & 0xF) == 15 \
or (buf[0][start+2] >> 4) == 15
else:
result = (buf[0][start+0] & 0xF) == 15 \
or (buf[0][start+1] >> 4) == 15 \
or (buf[0][start+1] & 0xF) == 15 \
or (buf[0][start+2] >> 4) == 15 \
or (buf[0][start+2] & 0xF) == 15
return result
@staticmethod
def isErr2(buf, start, StartOnHiNibble):
if StartOnHiNibble :
result = (buf[0][start+0] >> 4) >= 10 \
and (buf[0][start+0] >> 4) != 15 \
or (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15
else:
result = (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15
return result
@staticmethod
def isErr3(buf, start, StartOnHiNibble):
if StartOnHiNibble :
result = (buf[0][start+0] >> 4) >= 10 \
and (buf[0][start+0] >> 4) != 15 \
or (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15
else:
result = (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15 \
or (buf[0][start+1] & 0xF) >= 10 \
and (buf[0][start+1] & 0xF) != 15
return result
@staticmethod
def isErr5(buf, start, StartOnHiNibble):
if StartOnHiNibble :
result = (buf[0][start+0] >> 4) >= 10 \
and (buf[0][start+0] >> 4) != 15 \
or (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15 \
or (buf[0][start+1] & 0xF) >= 10 \
and (buf[0][start+1] & 0xF) != 15 \
or (buf[0][start+2] >> 4) >= 10 \
and (buf[0][start+2] >> 4) != 15
else:
result = (buf[0][start+0] & 0xF) >= 10 \
and (buf[0][start+0] & 0xF) != 15 \
or (buf[0][start+1] >> 4) >= 10 \
and (buf[0][start+1] >> 4) != 15 \
or (buf[0][start+1] & 0xF) >= 10 \
and (buf[0][start+1] & 0xF) != 15 \
or (buf[0][start+2] >> 4) >= 10 \
and (buf[0][start+2] >> 4) != 15 \
or (buf[0][start+2] & 0xF) >= 10 \
and (buf[0][start+2] & 0xF) != 15
return result
@staticmethod
def reverseByteOrder(buf, start, Count):
nbuf=buf[0]
for i in xrange(0, Count >> 1):
tmp = nbuf[start + i]
nbuf[start + i] = nbuf[start + Count - i - 1]
nbuf[start + Count - i - 1 ] = tmp
buf[0]=nbuf
@staticmethod
def readWindDirectionShared(buf, start):
return (buf[0][0+start] & 0xF, buf[0][start] >> 4)
@staticmethod
def toInt_2(buf, start, StartOnHiNibble):
'''read 2 nibbles'''
if StartOnHiNibble:
rawpre = (buf[0][start+0] >> 4)* 10 \
+ (buf[0][start+0] & 0xF)* 1
else:
rawpre = (buf[0][start+0] & 0xF)* 10 \
+ (buf[0][start+1] >> 4)* 1
return rawpre
@staticmethod
def toRain_7_3(buf, start, StartOnHiNibble):
'''read 7 nibbles, presentation with 3 decimals'''
if ( USBHardware.isErr2(buf, start+0, StartOnHiNibble) or
USBHardware.isErr5(buf, start+1, StartOnHiNibble)):
result = CWeatherTraits.RainNP()
elif ( USBHardware.isOFL2(buf, start+0, StartOnHiNibble) or
USBHardware.isOFL5(buf, start+1, StartOnHiNibble) ):
result = CWeatherTraits.RainOFL()
elif StartOnHiNibble:
result = (buf[0][start+0] >> 4)* 1000 \
+ (buf[0][start+0] & 0xF)* 100 \
+ (buf[0][start+1] >> 4)* 10 \
+ (buf[0][start+1] & 0xF)* 1 \
+ (buf[0][start+2] >> 4)* 0.1 \
+ (buf[0][start+2] & 0xF)* 0.01 \
+ (buf[0][start+3] >> 4)* 0.001
else:
result = (buf[0][start+0] & 0xF)* 1000 \
+ (buf[0][start+1] >> 4)* 100 \
+ (buf[0][start+1] & 0xF)* 10 \
+ (buf[0][start+2] >> 4)* 1 \
+ (buf[0][start+2] & 0xF)* 0.1 \
+ (buf[0][start+3] >> 4)* 0.01 \
+ (buf[0][start+3] & 0xF)* 0.001
return result
@staticmethod
def toRain_6_2(buf, start, StartOnHiNibble):
'''read 6 nibbles, presentation with 2 decimals'''
if ( USBHardware.isErr2(buf, start+0, StartOnHiNibble) or
USBHardware.isErr2(buf, start+1, StartOnHiNibble) or
USBHardware.isErr2(buf, start+2, StartOnHiNibble) ):
result = CWeatherTraits.RainNP()
elif ( USBHardware.isOFL2(buf, start+0, StartOnHiNibble) or
USBHardware.isOFL2(buf, start+1, StartOnHiNibble) or
USBHardware.isOFL2(buf, start+2, StartOnHiNibble) ):
result = CWeatherTraits.RainOFL()
elif StartOnHiNibble:
result = (buf[0][start+0] >> 4)* 1000 \
+ (buf[0][start+0] & 0xF)* 100 \
+ (buf[0][start+1] >> 4)* 10 \
+ (buf[0][start+1] & 0xF)* 1 \
+ (buf[0][start+2] >> 4)* 0.1 \
+ (buf[0][start+2] & 0xF)* 0.01
else:
result = (buf[0][start+0] & 0xF)* 1000 \
+ (buf[0][start+1] >> 4)* 100 \
+ (buf[0][start+1] & 0xF)* 10 \
+ (buf[0][start+2] >> 4)* 1 \
+ (buf[0][start+2] & 0xF)* 0.1 \
+ (buf[0][start+3] >> 4)* 0.01
return result
@staticmethod
def toRain_3_1(buf, start, StartOnHiNibble):
'''read 3 nibbles, presentation with 1 decimal'''
if StartOnHiNibble :
hibyte = buf[0][start+0]
lobyte = (buf[0][start+1] >> 4) & 0xF
else:
hibyte = 16*(buf[0][start+0] & 0xF) + ((buf[0][start+1] >> 4) & 0xF)
lobyte = buf[0][start+1] & 0xF
if hibyte == 0xFF and lobyte == 0xE :
result = CWeatherTraits.RainNP()
elif hibyte == 0xFF and lobyte == 0xF :
result = CWeatherTraits.RainOFL()
else:
val = USBHardware.toFloat_3_1(buf, start, StartOnHiNibble)
result = val
return result
@staticmethod
def toFloat_3_1(buf, start, StartOnHiNibble):
'''read 3 nibbles, presentation with 1 decimal'''
if StartOnHiNibble:
result = (buf[0][start+0] >> 4)*16**2 \
+ (buf[0][start+0] & 0xF)* 16**1 \
+ (buf[0][start+1] >> 4)* 16**0
else:
result = (buf[0][start+0] & 0xF)*16**2 \
+ (buf[0][start+1] >> 4)* 16**1 \
+ (buf[0][start+1] & 0xF)* 16**0
result = result / 10.0
return result
@staticmethod
def toDateTime(buf, start, StartOnHiNibble, label):
'''read 10 nibbles, presentation as DateTime'''
result = None
if ( USBHardware.isErr2(buf, start+0, StartOnHiNibble)
or USBHardware.isErr2(buf, start+1, StartOnHiNibble)
or USBHardware.isErr2(buf, start+2, StartOnHiNibble)
or USBHardware.isErr2(buf, start+3, StartOnHiNibble)
or USBHardware.isErr2(buf, start+4, StartOnHiNibble) ):
logerr('ToDateTime: bogus date for %s: error status in buffer' %
label)
else:
year = USBHardware.toInt_2(buf, start+0, StartOnHiNibble) + 2000
month = USBHardware.toInt_2(buf, start+1, StartOnHiNibble)
days = USBHardware.toInt_2(buf, start+2, StartOnHiNibble)
hours = USBHardware.toInt_2(buf, start+3, StartOnHiNibble)
minutes = USBHardware.toInt_2(buf, start+4, StartOnHiNibble)
try:
result = datetime(year, month, days, hours, minutes)
except:
if label not in _bad_labels:
logerr(('ToDateTime: bogus date for %s:'
' bad date conversion from'
' %s %s %s %s %s') %
(label, minutes, hours, days, month, year))
if result is None:
# FIXME: use None instead of a really old date to indicate invalid
result = datetime(1900, 01, 01, 00, 00)
return result
@staticmethod
def toHumidity_2_0(buf, start, StartOnHiNibble):
'''read 2 nibbles, presentation with 0 decimal'''
if USBHardware.isErr2(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.HumidityNP()
elif USBHardware.isOFL2(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.HumidityOFL()
else:
result = USBHardware.toInt_2(buf, start, StartOnHiNibble)
return result
@staticmethod
def toTemperature_5_3(buf, start, StartOnHiNibble):
'''read 5 nibbles, presentation with 3 decimals'''
if USBHardware.isErr5(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.TemperatureNP()
elif USBHardware.isOFL5(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.TemperatureOFL()
else:
if StartOnHiNibble:
rawtemp = (buf[0][start+0] >> 4)* 10 \
+ (buf[0][start+0] & 0xF)* 1 \
+ (buf[0][start+1] >> 4)* 0.1 \
+ (buf[0][start+1] & 0xF)* 0.01 \
+ (buf[0][start+2] >> 4)* 0.001
else:
rawtemp = (buf[0][start+0] & 0xF)* 10 \
+ (buf[0][start+1] >> 4)* 1 \
+ (buf[0][start+1] & 0xF)* 0.1 \
+ (buf[0][start+2] >> 4)* 0.01 \
+ (buf[0][start+2] & 0xF)* 0.001
result = rawtemp - CWeatherTraits.TemperatureOffset()
return result
@staticmethod
def toTemperature_3_1(buf, start, StartOnHiNibble):
'''read 3 nibbles, presentation with 1 decimal'''
if USBHardware.isErr3(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.TemperatureNP()
elif USBHardware.isOFL3(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.TemperatureOFL()
else:
if StartOnHiNibble :
rawtemp = (buf[0][start+0] >> 4)* 10 \
+ (buf[0][start+0] & 0xF)* 1 \
+ (buf[0][start+1] >> 4)* 0.1
else:
rawtemp = (buf[0][start+0] & 0xF)* 10 \
+ (buf[0][start+1] >> 4)* 1 \
+ (buf[0][start+1] & 0xF)* 0.1
result = rawtemp - CWeatherTraits.TemperatureOffset()
return result
@staticmethod
def toWindspeed_5_2(buf, start, StartOnHiNibble):
'''read 5 nibbles, presentation with 2 decimals'''
if StartOnHiNibble:
result = (buf[0][start+2] >> 4)* 16**6 \
+ (buf[0][start+0] >> 4)* 16**5 \
+ (buf[0][start+0] & 0xF)* 16**4 \
+ (buf[0][start+1] >> 4)* 16**3 \
+ (buf[0][start+1] & 0xF)* 16**2
else:
result = (buf[0][start+2] >> 4)* 16**6 \
+ (buf[0][start+2] & 0xF)* 16**5 \
+ (buf[0][start+0] >> 4)* 16**5 \
+ (buf[0][start+1] & 0xF)* 16**3 \
+ (buf[0][start+1] >> 4)* 16**2
result = result / 256.0 / 100.0
return result
@staticmethod
def toWindspeed_3_1(buf, start, StartOnHiNibble):
'''read 3 nibbles, presentation with 1 decimal'''
if StartOnHiNibble :
hibyte = buf[0][start+0]
lobyte = (buf[0][start+1] >> 4) & 0xF
else:
hibyte = 16*(buf[0][start+0] & 0xF) + ((buf[0][start+1] >> 4) & 0xF)
lobyte = buf[0][start+1] & 0xF
if hibyte == 0xFF and lobyte == 0xE :
result = CWeatherTraits.WindNP()
elif hibyte == 0xFF and lobyte == 0xF :
result = CWeatherTraits.WindOFL()
else:
val = USBHardware.toFloat_3_1(buf, start, StartOnHiNibble)
result = val
return result
@staticmethod
def readPressureShared(buf, start, StartOnHiNibble):
return ( USBHardware.toPressure_hPa_5_1(buf, start+2, 1-StartOnHiNibble) ,
USBHardware.toPressure_inHg_5_2(buf, start, StartOnHiNibble))
@staticmethod
def toPressure_hPa_5_1(buf, start, StartOnHiNibble):
'''read 5 nibbles, presentation with 1 decimal'''
if USBHardware.isErr5(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.PressureNP()
elif USBHardware.isOFL5(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.PressureOFL()
elif StartOnHiNibble :
result = (buf[0][start+0] >> 4)* 1000 \
+ (buf[0][start+0] & 0xF)* 100 \
+ (buf[0][start+1] >> 4)* 10 \
+ (buf[0][start+1] & 0xF)* 1 \
+ (buf[0][start+2] >> 4)* 0.1
else:
result = (buf[0][start+0] & 0xF)* 1000 \
+ (buf[0][start+1] >> 4)* 100 \
+ (buf[0][start+1] & 0xF)* 10 \
+ (buf[0][start+2] >> 4)* 1 \
+ (buf[0][start+2] & 0xF)* 0.1
return result
@staticmethod
def toPressure_inHg_5_2(buf, start, StartOnHiNibble):
'''read 5 nibbles, presentation with 2 decimals'''
if USBHardware.isErr5(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.PressureNP()
elif USBHardware.isOFL5(buf, start+0, StartOnHiNibble) :
result = CWeatherTraits.PressureOFL()
elif StartOnHiNibble :
result = (buf[0][start+0] >> 4)* 100 \
+ (buf[0][start+0] & 0xF)* 10 \
+ (buf[0][start+1] >> 4)* 1 \
+ (buf[0][start+1] & 0xF)* 0.1 \
+ (buf[0][start+2] >> 4)* 0.01
else:
result = (buf[0][start+0] & 0xF)* 100 \
+ (buf[0][start+1] >> 4)* 10 \
+ (buf[0][start+1] & 0xF)* 1 \
+ (buf[0][start+2] >> 4)* 0.1 \
+ (buf[0][start+2] & 0xF)* 0.01
return result
@staticmethod
def dumpBuf(cmd, buf, length):
buflen = len(buf)
end = min(buflen,length)
pos = 1
startnr = pos-1
strbuf = str(' %.3d: ' % startnr)
while pos <= end:
strbuf += str('%.2x ' % buf[pos-1])
if pos%10 == 0:
strbuf += str(' ')
if pos%30 == 0:
logerr('%s %s' % (cmd,strbuf))
startnr = pos
strbuf = str(' %.3d: ' % startnr)
pos += 1
if pos-1 != startnr:
logerr('%s %s' % (cmd,strbuf))
@staticmethod
def dumpBufRev(cmd, buf, start, length):
buflen = len(buf)
end = min(buflen,length)
pos = 1
startnr = pos-1
strbuf = str(' %.3d: ' % startnr)
while pos <= end:
strbuf += str('%.2x ' % buf[end-pos+start])
if pos%10 == 0:
strbuf += str(' ')
if pos%30 == 0:
logerr('Rev %s %s' % (cmd,strbuf))
startnr = pos
strbuf = str(' %.3d: ' % startnr)
pos += 1
if pos-1 != startnr:
logerr('Rev %s %s' % (cmd,strbuf))
class CCurrentWeatherData(object):
def __init__(self):
self._timestamp = None
self._PressureRelative_hPa = CWeatherTraits.PressureNP()
self._PressureRelative_hPaMinMax = CMinMaxMeasurement()
self._PressureRelative_inHg = CWeatherTraits.PressureNP()
self._PressureRelative_inHgMinMax = CMinMaxMeasurement()
self._WindSpeed = CWeatherTraits.WindNP()
self._WindSpeedMinMax = CMinMaxMeasurement()
self._WindDirection = EWindDirection.wdERR
self._WindDirection1 = EWindDirection.wdERR
self._WindDirection2 = EWindDirection.wdERR
self._WindDirection3 = EWindDirection.wdERR
self._WindDirection4 = EWindDirection.wdERR
self._WindDirection5 = EWindDirection.wdERR
self._Gust = CWeatherTraits.WindNP()
self._GustMax = CMinMaxMeasurement()
self._GustDirection = EWindDirection.wdERR
self._GustDirection1 = EWindDirection.wdERR
self._GustDirection2 = EWindDirection.wdERR
self._GustDirection3 = EWindDirection.wdERR
self._GustDirection4 = EWindDirection.wdERR
self._GustDirection5 = EWindDirection.wdERR
self._Rain1H = CWeatherTraits.RainNP()
self._Rain1HMax = CMinMaxMeasurement()
self._Rain24H = CWeatherTraits.RainNP()
self._Rain24HMax = CMinMaxMeasurement()
self._RainLastWeek = CWeatherTraits.RainNP()
self._RainLastWeekMax = CMinMaxMeasurement()
self._RainLastMonth = CWeatherTraits.RainNP()
self._RainLastMonthMax = CMinMaxMeasurement()
self._RainTotal = CWeatherTraits.RainNP()
self._LastRainReset = None
self._TempIndoor = CWeatherTraits.TemperatureNP()
self._TempIndoorMinMax = CMinMaxMeasurement()
self._TempOutdoor = CWeatherTraits.TemperatureNP()
self._TempOutdoorMinMax = CMinMaxMeasurement()
self._HumidityIndoor = CWeatherTraits.HumidityNP()
self._HumidityIndoorMinMax = CMinMaxMeasurement()
self._HumidityOutdoor = CWeatherTraits.HumidityNP()
self._HumidityOutdoorMinMax = CMinMaxMeasurement()
self._Dewpoint = CWeatherTraits.TemperatureNP()
self._DewpointMinMax = CMinMaxMeasurement()
self._Windchill = CWeatherTraits.TemperatureNP()
self._WindchillMinMax = CMinMaxMeasurement()
self._WeatherState = EWeatherState.WEATHER_ERR
self._WeatherTendency = EWeatherTendency.TREND_ERR
self._AlarmRingingFlags = 0
self._AlarmMarkedFlags = 0
self._PresRel_hPa_Max = 0.0
self._PresRel_inHg_Max = 0.0
def read(self, buf, pos):
logdbg('CCurrentWeatherData::read')
nbuf = [0]
nbuf[0] = buf[0]
###USBHardware.dumpBuf('Cur ', nbuf[0], 0xd7)
self._StartBytes = nbuf[0][6]*0xF + nbuf[0][7]
self._WeatherTendency = (nbuf[0][8] >> 4) & 0xF
if self._WeatherTendency > 3:
self._WeatherTendency = 3
self._WeatherState = nbuf[0][8] & 0xF
if self._WeatherState > 3:
self._WeatherState = 3
self._TempIndoorMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 19, 0)
self._TempIndoorMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 22, 1)
self._TempIndoor = USBHardware.toTemperature_5_3(nbuf, 24, 0)
if self._TempIndoorMinMax._Min._Value == CWeatherTraits.TemperatureNP():
self._TempIndoorMinMax._Min._IsError = 1
else:
self._TempIndoorMinMax._Min._IsError = 0
if self._TempIndoorMinMax._Min._Value == CWeatherTraits.TemperatureOFL():
self._TempIndoorMinMax._Min._IsOverflow = 1
else:
self._TempIndoorMinMax._Min._IsOverflow = 0
if self._TempIndoorMinMax._Max._Value == CWeatherTraits.TemperatureNP():
self._TempIndoorMinMax._Max._IsError = 1
else:
self._TempIndoorMinMax._Max._IsError = 0
if self._TempIndoorMinMax._Max._Value == CWeatherTraits.TemperatureOFL():
self._TempIndoorMinMax._Max._IsOverflow = 1
else:
self._TempIndoorMinMax._Max._IsOverflow = 0
if self._TempIndoorMinMax._Max._IsError or self._TempIndoorMinMax._Max._IsOverflow:
self._TempIndoorMinMax._Max._Time = None
else:
self._TempIndoorMinMax._Max._Time = USBHardware.toDateTime(nbuf, 9, 0, 'TempIndoorMax')
if self._TempIndoorMinMax._Min._IsError or self._TempIndoorMinMax._Min._IsOverflow:
self._TempIndoorMinMax._Min._Time = None
else:
self._TempIndoorMinMax._Min._Time = USBHardware.toDateTime(nbuf, 14, 0, 'TempIndoorMin')
self._TempOutdoorMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 37, 0)
self._TempOutdoorMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 40, 1)
self._TempOutdoor = USBHardware.toTemperature_5_3(nbuf, 42, 0)
if self._TempOutdoorMinMax._Min._Value == CWeatherTraits.TemperatureNP():
self._TempOutdoorMinMax._Min._IsError = 1
else:
self._TempOutdoorMinMax._Min._IsError = 0
if self._TempOutdoorMinMax._Min._Value == CWeatherTraits.TemperatureOFL():
self._TempOutdoorMinMax._Min._IsOverflow = 1
else:
self._TempOutdoorMinMax._Min._IsOverflow = 0
if self._TempOutdoorMinMax._Max._Value == CWeatherTraits.TemperatureNP():
self._TempOutdoorMinMax._Max._IsError = 1
else:
self._TempOutdoorMinMax._Max._IsError = 0
if self._TempOutdoorMinMax._Max._Value == CWeatherTraits.TemperatureOFL():
self._TempOutdoorMinMax._Max._IsOverflow = 1
else:
self._TempOutdoorMinMax._Max._IsOverflow = 0
if self._TempOutdoorMinMax._Max._IsError or self._TempOutdoorMinMax._Max._IsOverflow:
self._TempOutdoorMinMax._Max._Time = None
else:
self._TempOutdoorMinMax._Max._Time = USBHardware.toDateTime(nbuf, 27, 0, 'TempOutdoorMax')
if self._TempOutdoorMinMax._Min._IsError or self._TempOutdoorMinMax._Min._IsOverflow:
self._TempOutdoorMinMax._Min._Time = None
else:
self._TempOutdoorMinMax._Min._Time = USBHardware.toDateTime(nbuf, 32, 0, 'TempOutdoorMin')
self._WindchillMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 55, 0)
self._WindchillMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 58, 1)
self._Windchill = USBHardware.toTemperature_5_3(nbuf, 60, 0)
if self._WindchillMinMax._Min._Value == CWeatherTraits.TemperatureNP():
self._WindchillMinMax._Min._IsError = 1
else:
self._WindchillMinMax._Min._IsError = 0
if self._WindchillMinMax._Min._Value == CWeatherTraits.TemperatureOFL():
self._WindchillMinMax._Min._IsOverflow = 1
else:
self._WindchillMinMax._Min._IsOverflow = 0
if self._WindchillMinMax._Max._Value == CWeatherTraits.TemperatureNP():
self._WindchillMinMax._Max._IsError = 1
else:
self._WindchillMinMax._Max._IsError = 0
if self._WindchillMinMax._Max._Value == CWeatherTraits.TemperatureOFL():
self._WindchillMinMax._Max._IsOverflow = 1
else:
self._WindchillMinMax._Max._IsOverflow = 0
if self._WindchillMinMax._Max._IsError or self._WindchillMinMax._Max._IsOverflow:
self._WindchillMinMax._Max._Time = None
else:
self._WindchillMinMax._Max._Time = USBHardware.toDateTime(nbuf, 45, 0, 'WindchillMax')
if self._WindchillMinMax._Min._IsError or self._WindchillMinMax._Min._IsOverflow:
self._WindchillMinMax._Min._Time = None
else:
self._WindchillMinMax._Min._Time = USBHardware.toDateTime(nbuf, 50, 0, 'WindchillMin')
self._DewpointMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 73, 0)
self._DewpointMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 76, 1)
self._Dewpoint = USBHardware.toTemperature_5_3(nbuf, 78, 0)
if self._DewpointMinMax._Min._Value == CWeatherTraits.TemperatureNP():
self._DewpointMinMax._Min._IsError = 1
else:
self._DewpointMinMax._Min._IsError = 0
if self._DewpointMinMax._Min._Value == CWeatherTraits.TemperatureOFL():
self._DewpointMinMax._Min._IsOverflow = 1
else:
self._DewpointMinMax._Min._IsOverflow = 0
if self._DewpointMinMax._Max._Value == CWeatherTraits.TemperatureNP():
self._DewpointMinMax._Max._IsError = 1
else:
self._DewpointMinMax._Max._IsError = 0
if self._DewpointMinMax._Max._Value == CWeatherTraits.TemperatureOFL():
self._DewpointMinMax._Max._IsOverflow = 1
else:
self._DewpointMinMax._Max._IsOverflow = 0
if self._DewpointMinMax._Min._IsError or self._DewpointMinMax._Min._IsOverflow:
self._DewpointMinMax._Min._Time = None
else:
self._DewpointMinMax._Min._Time = USBHardware.toDateTime(nbuf, 68, 0, 'DewpointMin')
if self._DewpointMinMax._Max._IsError or self._DewpointMinMax._Max._IsOverflow:
self._DewpointMinMax._Max._Time = None
else:
self._DewpointMinMax._Max._Time = USBHardware.toDateTime(nbuf, 63, 0, 'DewpointMax')
self._HumidityIndoorMinMax._Max._Value = USBHardware.toHumidity_2_0(nbuf, 91, 1)
self._HumidityIndoorMinMax._Min._Value = USBHardware.toHumidity_2_0(nbuf, 92, 1)
self._HumidityIndoor = USBHardware.toHumidity_2_0(nbuf, 93, 1)
if self._HumidityIndoorMinMax._Min._Value == CWeatherTraits.HumidityNP():
self._HumidityIndoorMinMax._Min._IsError = 1
else:
self._HumidityIndoorMinMax._Min._IsError = 0
if self._HumidityIndoorMinMax._Min._Value == CWeatherTraits.HumidityOFL():
self._HumidityIndoorMinMax._Min._IsOverflow = 1
else:
self._HumidityIndoorMinMax._Min._IsOverflow = 0
if self._HumidityIndoorMinMax._Max._Value == CWeatherTraits.HumidityNP():
self._HumidityIndoorMinMax._Max._IsError = 1
else:
self._HumidityIndoorMinMax._Max._IsError = 0
if self._HumidityIndoorMinMax._Max._Value == CWeatherTraits.HumidityOFL():
self._HumidityIndoorMinMax._Max._IsOverflow = 1
else:
self._HumidityIndoorMinMax._Max._IsOverflow = 0
if self._HumidityIndoorMinMax._Max._IsError or self._HumidityIndoorMinMax._Max._IsOverflow:
self._HumidityIndoorMinMax._Max._Time = None
else:
self._HumidityIndoorMinMax._Max._Time = USBHardware.toDateTime(nbuf, 81, 1, 'HumidityIndoorMax')
if self._HumidityIndoorMinMax._Min._IsError or self._HumidityIndoorMinMax._Min._IsOverflow:
self._HumidityIndoorMinMax._Min._Time = None
else:
self._HumidityIndoorMinMax._Min._Time = USBHardware.toDateTime(nbuf, 86, 1, 'HumidityIndoorMin')
self._HumidityOutdoorMinMax._Max._Value = USBHardware.toHumidity_2_0(nbuf, 104, 1)
self._HumidityOutdoorMinMax._Min._Value = USBHardware.toHumidity_2_0(nbuf, 105, 1)
self._HumidityOutdoor = USBHardware.toHumidity_2_0(nbuf, 106, 1)
if self._HumidityOutdoorMinMax._Min._Value == CWeatherTraits.HumidityNP():
self._HumidityOutdoorMinMax._Min._IsError = 1
else:
self._HumidityOutdoorMinMax._Min._IsError = 0
if self._HumidityOutdoorMinMax._Min._Value == CWeatherTraits.HumidityOFL():
self._HumidityOutdoorMinMax._Min._IsOverflow = 1
else:
self._HumidityOutdoorMinMax._Min._IsOverflow = 0
if self._HumidityOutdoorMinMax._Max._Value == CWeatherTraits.HumidityNP():
self._HumidityOutdoorMinMax._Max._IsError = 1
else:
self._HumidityOutdoorMinMax._Max._IsError = 0
if self._HumidityOutdoorMinMax._Max._Value == CWeatherTraits.HumidityOFL():
self._HumidityOutdoorMinMax._Max._IsOverflow = 1
else:
self._HumidityOutdoorMinMax._Max._IsOverflow = 0
if self._HumidityOutdoorMinMax._Max._IsError or self._HumidityOutdoorMinMax._Max._IsOverflow:
self._HumidityOutdoorMinMax._Max._Time = None
else:
self._HumidityOutdoorMinMax._Max._Time = USBHardware.toDateTime(nbuf, 94, 1, 'HumidityOutdoorMax')
if self._HumidityOutdoorMinMax._Min._IsError or self._HumidityOutdoorMinMax._Min._IsOverflow:
self._HumidityOutdoorMinMax._Min._Time = None
else:
self._HumidityOutdoorMinMax._Min._Time = USBHardware.toDateTime(nbuf, 99, 1, 'HumidityOutdoorMin')
self._RainLastMonthMax._Max._Time = USBHardware.toDateTime(nbuf, 107, 1, 'RainLastMonthMax')
self._RainLastMonthMax._Max._Value = USBHardware.toRain_6_2(nbuf, 112, 1)
self._RainLastMonth = USBHardware.toRain_6_2(nbuf, 115, 1)
self._RainLastWeekMax._Max._Time = USBHardware.toDateTime(nbuf, 118, 1, 'RainLastWeekMax')
self._RainLastWeekMax._Max._Value = USBHardware.toRain_6_2(nbuf, 123, 1)
self._RainLastWeek = USBHardware.toRain_6_2(nbuf, 126, 1)
self._Rain24HMax._Max._Time = USBHardware.toDateTime(nbuf, 129, 1, 'Rain24HMax')
self._Rain24HMax._Max._Value = USBHardware.toRain_6_2(nbuf, 134, 1)
self._Rain24H = USBHardware.toRain_6_2(nbuf, 137, 1)
self._Rain1HMax._Max._Time = USBHardware.toDateTime(nbuf, 140, 1, 'Rain1HMax')
self._Rain1HMax._Max._Value = USBHardware.toRain_6_2(nbuf, 145, 1)
self._Rain1H = USBHardware.toRain_6_2(nbuf, 148, 1)
self._LastRainReset = USBHardware.toDateTime(nbuf, 151, 0, 'LastRainReset')
self._RainTotal = USBHardware.toRain_7_3(nbuf, 156, 0)
(w ,w1) = USBHardware.readWindDirectionShared(nbuf, 162)
(w2,w3) = USBHardware.readWindDirectionShared(nbuf, 161)
(w4,w5) = USBHardware.readWindDirectionShared(nbuf, 160)
self._WindDirection = w
self._WindDirection1 = w1
self._WindDirection2 = w2
self._WindDirection3 = w3
self._WindDirection4 = w4
self._WindDirection5 = w5
unknownbuf = [0]
unknownbuf[0] = [0]*9
for i in xrange(0,9):
unknownbuf[0][i] = nbuf[0][163+i]
strbuf = ""
for i in unknownbuf[0]:
strbuf += str("%.2x " % i)
self._WindSpeed = USBHardware.toWindspeed_5_2(nbuf, 172, 1)
(g ,g1) = USBHardware.readWindDirectionShared(nbuf, 177)
(g2,g3) = USBHardware.readWindDirectionShared(nbuf, 176)
(g4,g5) = USBHardware.readWindDirectionShared(nbuf, 175)
self._GustDirection = g
self._GustDirection1 = g1
self._GustDirection2 = g2
self._GustDirection3 = g3
self._GustDirection4 = g4
self._GustDirection5 = g5
self._GustMax._Max._Time = USBHardware.toDateTime(nbuf, 179, 1, 'GustMax')
self._GustMax._Max._Value = USBHardware.toWindspeed_5_2(nbuf, 184, 1)
self._Gust = USBHardware.toWindspeed_5_2(nbuf, 187, 1)
#lh The data has only ONE date time for both hPa/inHg Min Time Reset and Max Time Reset
self._PressureRelative_hPaMinMax._Max._Time = USBHardware.toDateTime(nbuf, 190, 1, 'PressureRelative_hPaMax')
self._PressureRelative_inHgMinMax._Max._Time = self._PressureRelative_hPaMinMax._Max._Time
self._PressureRelative_hPaMinMax._Min._Time = self._PressureRelative_hPaMinMax._Max._Time # WS bug, should be: USBHardware.toDateTime(nbuf, 195, 1)
self._PressureRelative_inHgMinMax._Min._Time = self._PressureRelative_hPaMinMax._Min._Time
(self._PresRel_hPa_Max, self._PresRel_inHg_Max) = USBHardware.readPressureShared(nbuf, 195, 1) #bug in WS; here should go self._PressureRelative_hPaMinMax._Min._Time
(self._PressureRelative_hPaMinMax._Max._Value, self._PressureRelative_inHgMinMax._Max._Value) = USBHardware.readPressureShared(nbuf, 200, 1)
(self._PressureRelative_hPaMinMax._Min._Value, self._PressureRelative_inHgMinMax._Min._Value) = USBHardware.readPressureShared(nbuf, 205, 1)
(self._PressureRelative_hPa, self._PressureRelative_inHg) = USBHardware.readPressureShared(nbuf, 210, 1)
self._timestamp = time.time()
logdbg("_WeatherState=%s _WeatherTendency=%s _AlarmRingingFlags %04x" % (CWeatherTraits.forecastMap[self._WeatherState], CWeatherTraits.trends[self._WeatherTendency], self._AlarmRingingFlags))
logdbg("_TempIndoor= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._TempIndoor, self._TempIndoorMinMax._Min._Value, self._TempIndoorMinMax._Min._Time, self._TempIndoorMinMax._Max._Value, self._TempIndoorMinMax._Max._Time))
logdbg("_HumidityIndoor= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._HumidityIndoor, self._HumidityIndoorMinMax._Min._Value, self._HumidityIndoorMinMax._Min._Time, self._HumidityIndoorMinMax._Max._Value, self._HumidityIndoorMinMax._Max._Time))
logdbg("_TempOutdoor= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._TempOutdoor, self._TempOutdoorMinMax._Min._Value, self._TempOutdoorMinMax._Min._Time, self._TempOutdoorMinMax._Max._Value, self._TempOutdoorMinMax._Max._Time))
logdbg("_HumidityOutdoor=%8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._HumidityOutdoor, self._HumidityOutdoorMinMax._Min._Value, self._HumidityOutdoorMinMax._Min._Time, self._HumidityOutdoorMinMax._Max._Value, self._HumidityOutdoorMinMax._Max._Time))
logdbg("_Windchill= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._Windchill, self._WindchillMinMax._Min._Value, self._WindchillMinMax._Min._Time, self._WindchillMinMax._Max._Value, self._WindchillMinMax._Max._Time))
logdbg("_Dewpoint= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s)" % (self._Dewpoint, self._DewpointMinMax._Min._Value, self._DewpointMinMax._Min._Time, self._DewpointMinMax._Max._Value, self._DewpointMinMax._Max._Time))
logdbg("_WindSpeed= %8.3f" % self._WindSpeed)
logdbg("_Gust= %8.3f _Max=%8.3f (%s)" % (self._Gust, self._GustMax._Max._Value, self._GustMax._Max._Time))
logdbg('_WindDirection= %3s _GustDirection= %3s' % (CWeatherTraits.windDirMap[self._WindDirection], CWeatherTraits.windDirMap[self._GustDirection]))
logdbg('_WindDirection1= %3s _GustDirection1= %3s' % (CWeatherTraits.windDirMap[self._WindDirection1], CWeatherTraits.windDirMap[self._GustDirection1]))
logdbg('_WindDirection2= %3s _GustDirection2= %3s' % (CWeatherTraits.windDirMap[self._WindDirection2], CWeatherTraits.windDirMap[self._GustDirection2]))
logdbg('_WindDirection3= %3s _GustDirection3= %3s' % (CWeatherTraits.windDirMap[self._WindDirection3], CWeatherTraits.windDirMap[self._GustDirection3]))
logdbg('_WindDirection4= %3s _GustDirection4= %3s' % (CWeatherTraits.windDirMap[self._WindDirection4], CWeatherTraits.windDirMap[self._GustDirection4]))
logdbg('_WindDirection5= %3s _GustDirection5= %3s' % (CWeatherTraits.windDirMap[self._WindDirection5], CWeatherTraits.windDirMap[self._GustDirection5]))
if (self._RainLastMonth > 0) or (self._RainLastWeek > 0):
logdbg("_RainLastMonth= %8.3f _Max=%8.3f (%s)" % (self._RainLastMonth, self._RainLastMonthMax._Max._Value, self._RainLastMonthMax._Max._Time))
logdbg("_RainLastWeek= %8.3f _Max=%8.3f (%s)" % (self._RainLastWeek, self._RainLastWeekMax._Max._Value, self._RainLastWeekMax._Max._Time))
logdbg("_Rain24H= %8.3f _Max=%8.3f (%s)" % (self._Rain24H, self._Rain24HMax._Max._Value, self._Rain24HMax._Max._Time))
logdbg("_Rain1H= %8.3f _Max=%8.3f (%s)" % (self._Rain1H, self._Rain1HMax._Max._Value, self._Rain1HMax._Max._Time))
logdbg("_RainTotal= %8.3f _LastRainReset= (%s)" % (self._RainTotal, self._LastRainReset))
logdbg("PressureRel_hPa= %8.3f _Min=%8.3f (%s) _Max=%8.3f (%s) " % (self._PressureRelative_hPa, self._PressureRelative_hPaMinMax._Min._Value, self._PressureRelative_hPaMinMax._Min._Time, self._PressureRelative_hPaMinMax._Max._Value, self._PressureRelative_hPaMinMax._Max._Time))
logdbg("PressureRel_inHg=%8.3f _Min=%8.3f (%s) _Max=%8.3f (%s) " % (self._PressureRelative_inHg, self._PressureRelative_inHgMinMax._Min._Value, self._PressureRelative_inHgMinMax._Min._Time, self._PressureRelative_inHgMinMax._Max._Value, self._PressureRelative_inHgMinMax._Max._Time))
###logdbg('(* Bug in Weather Station: PressureRelative._Min._Time is written to location of _PressureRelative._Max._Time')
###logdbg('Instead of PressureRelative._Min._Time we get: _PresRel_hPa_Max= %8.3f, _PresRel_inHg_max =%8.3f;' % (self._PresRel_hPa_Max, self._PresRel_inHg_Max))
logdbg('Bytes with unknown meaning at 157-165: %s' % strbuf)
class CWeatherStationConfig(object):
def __init__(self, cfgfn):
self.filename = cfgfn
config = ConfigObj(cfgfn)
config.filename = cfgfn
self._InBufCS = 0 # checksum of received config
self._OutBufCS = 0 # calculated conig checksum from outbuf config
self._DeviceCS = 0 # config checksum received via messages
try:
self._DeviceCS = int(config['ws28xx']['DeviceCS'])
except:
self._DeviceCS = 0
self._ClockMode = 0
self._TemperatureFormat = 0
self._PressureFormat = 0
self._RainFormat = 0
self._WindspeedFormat = 0
self._WeatherThreshold = 0
self._StormThreshold = 0
self._LCDContrast = 0
self._LowBatFlags = 0
self._WindDirAlarmFlags = 0
self._OtherAlarmFlags = 0
self._ResetMinMaxFlags = 0 # output only
self._HistoryInterval = 0
self._TempIndoorMinMax = CMinMaxMeasurement()
self._TempOutdoorMinMax = CMinMaxMeasurement()
self._HumidityIndoorMinMax = CMinMaxMeasurement()
self._HumidityOutdoorMinMax = CMinMaxMeasurement()
self._Rain24HMax = CMinMaxMeasurement()
self._GustMax = CMinMaxMeasurement()
self._PressureRelative_hPaMinMax = CMinMaxMeasurement()
self._PressureRelative_inHgMinMax = CMinMaxMeasurement()
def readAlertFlags(self,buf):
logdbg('readAlertFlags')
def setTemps(self,TempFormat,InTempLo,InTempHi,OutTempLo,OutTempHi):
logdbg('setTemps')
f1 = TempFormat
t1 = InTempLo
t2 = InTempHi
t3 = OutTempLo
t4 = OutTempHi
if (f1 == ETemperatureFormat.tfFahrenheit) or (f1 == ETemperatureFormat.tfCelsius):
if ((t1 >= -40.0) and (t1 <= 59.9) and (t2 >= -40.0) and (t2 <= 59.9) and \
(t3 >= -40.0) and (t3 <= 59.9) and (t4 >= -40.0) and (t4 <= 59.9)):
self._TemperatureFormat = f1
else:
logerr('Value outside range')
return 0
else:
logerr('Unknown format')
return 0
self._TempIndoorMinMax._Min._Value = t1
self._TempIndoorMinMax._Max._Value = t2
self._TempOutdoorMinMax._Min._Value = t3
self._TempOutdoorMinMax._Max._Value = t4
return 1
def setHums(self,InHumLo,InHumHi,OutHumLo,OutHumHi):
h1 = InHumLo
h2 = InHumHi
h3 = OutHumLo
h4 = OutHumHi
if not ((h1 >= 1) and (h1 <= 99) and (h2 >= 1) and (h2 <= 99) and \
(h3 >= 1) and (h3 <= 99) and (h4 >= 1) and (h4 <= 99)):
logerr('Humidity value outside range')
return 0
self._HumidityIndoorMinMax._Min._Value = h1
self._HumidityIndoorMinMax._Max._Value = h2
self._HumidityOutdoorMinMax._Min._Value = h3
self._HumidityOutdoorMinMax._Max._Value = h4
return 1
def setRain24H(self,RainFormat,Rain24hHi):
f1 = RainFormat
r1 = Rain24hHi
if (f1 == ERainFormat.rfMm) or (f1 == ERainFormat.rfInch):
if (r1>=0.0) and (r1 <= 9999.9):
self._RainFormat = f1
else:
logerr('Rain24H value outside range')
return 0
else:
logerr('Unknown RainFormat')
return 0
self._Rain24HMax._Max._Value = r1
return 1
def setGust(self,WindSpeedFormat,GustHi):
f1 = WindSpeedFormat
g1 = GustHi
if (f1 >= EWindspeedFormat.wfMs) and (f1 <= EWindspeedFormat.wfMph):
if (g1>=0.0) and (g1 <= 180.0):
self._WindSpeedFormat = f1
else:
logerr('Gust value outside range')
return 0
else:
logerr('Unknown WindSpeedFormat')
return 0
self._GustMax._Max._Value = g1
return 1
def setPresRels(self,PressureFormat,PresRelhPaLo,PresRelhPaHi,PresRelinHgLo,PresRelinHgHi):
f1 = PressureFormat
p1 = PresRelhPaLo
p2 = PresRelhPaHi
p3 = PresRelinHgLo
p4 = PresRelinHgHi
if (f1 == EPressureFormat.pfinHg) or (f1 == EPressureFormat.pfHPa):
if ((p1>=920.0) and (p1 <= 1080.0) and (p2>=920.0) and (p2 <= 1080.0) and \
(p3>=27.10) and (p3 <= 31.90) and (p4>=27.10) and (p4 <= 31.90)):
self._RainFormat = f1
else:
logerr('PresRel value outside range')
return 0
else:
logerr('Unknown PressureFormat')
return 0
self._PressureRelative_hPaMinMax._Min._Value = p1
self._PressureRelative_hPaMinMax._Max._Value = p2
self._PressureRelative_inHgMinMax._Min._Value = p3
self._PressureRelative_inHgMinMax._Max._Value = p4
return 1
def calcOutBufCS(self, buf, start):
# For the calculation of the CheckSum the _ResetMinMaxFlags
# and the Checksum itself are excluded.
nbuf=[0]
nbuf[0]=buf[0]
outbufCS = 7
for i in xrange(0, 39):
outbufCS += nbuf[0][i+start]
logdbg('calcOutBufCS: outbufCS=%04x' % outbufCS)
return outbufCS
def getOutBufCS(self):
return self._OutBufCS
def getInBufCS(self):
return self._InBufCS
def setDeviceCS(self, deviceCS):
logdbg('setDeviceCS: %s' % deviceCS)
self._DeviceCS = deviceCS
def getDeviceCS(self):
return self._DeviceCS
def setResetMinMaxFlags(self, resetMinMaxFlags):
logdbg('setResetMinMaxFlags: %s' % resetMinMaxFlags)
self._ResetMinMaxFlags = resetMinMaxFlags
def parseRain_3(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 7-digit number with 3 decimals'''
num = int(number*1000)
parsebuf=[0]*7
for i in xrange(7-numbytes,7):
parsebuf[i] = num%10
num = num//10
if StartOnHiNibble:
buf[0][0+start] = parsebuf[6]*16 + parsebuf[5]
buf[0][1+start] = parsebuf[4]*16 + parsebuf[3]
buf[0][2+start] = parsebuf[2]*16 + parsebuf[1]
buf[0][3+start] = parsebuf[0]*16 + (buf[0][3+start] & 0xF)
else:
buf[0][0+start] = (buf[0][0+start] & 0xF0) + parsebuf[6]
buf[0][1+start] = parsebuf[5]*16 + parsebuf[4]
buf[0][2+start] = parsebuf[3]*16 + parsebuf[2]
buf[0][3+start] = parsebuf[1]*16 + parsebuf[0]
def parseWind_2(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 4-digit number with 1 decimal'''
num = int(number*100)
parsebuf=[0]*5
for i in xrange(5-numbytes,5):
parsebuf[i] = num%16
num = num//16
buf[0][0+start] = parsebuf[3]*16 + parsebuf[2]
buf[0][1+start] = parsebuf[1]*16 + parsebuf[0]
def parse_0(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 5-digit number with 0 decimals'''
num = int(number)
nbuf=[0]*5
for i in xrange(5-numbytes,5):
nbuf[i] = num%10
num = num//10
if StartOnHiNibble:
buf[0][0+start] = nbuf[4]*16 + nbuf[3]
buf[0][1+start] = nbuf[2]*16 + nbuf[1]
buf[0][2+start] = nbuf[0]*16 + (buf[0][2+start] & 0x0F)
else:
buf[0][0+start] = (buf[0][0+start] & 0xF0) + nbuf[4]
buf[0][1+start] = nbuf[3]*16 + nbuf[2]
buf[0][2+start] = nbuf[1]*16 + nbuf[0]
def parse_1(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 5 digit number with 1 decimal'''
self.parse_0(number*10.0, buf, start, StartOnHiNibble, numbytes)
def parse_2(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 5 digit number with 2 decimals'''
self.parse_0(number*100.0, buf, start, StartOnHiNibble, numbytes)
def parse_3(self, number, buf, start, StartOnHiNibble, numbytes):
'''Parse 5 digit number with 3 decimals'''
self.parse_0(number*1000.0, buf, start, StartOnHiNibble, numbytes)
def write(self):
config = ConfigObj(self.filename)
config.filename = self.filename
config['ws28xx'] = {}
config['ws28xx']['DeviceCS'] = str(self._DeviceCS)
config['ws28xx']['ClockMode'] = str(self._ClockMode)
config['ws28xx']['TemperatureFormat'] = str(self._TemperatureFormat)
config['ws28xx']['PressureFormat'] = str(self._PressureFormat)
config['ws28xx']['RainFormat'] = str(self._RainFormat)
config['ws28xx']['WindspeedFormat'] = str(self._WindspeedFormat)
config['ws28xx']['WeatherThreshold'] = str(self._WeatherThreshold)
config['ws28xx']['StormThreshold'] = str(self._StormThreshold)
config['ws28xx']['LCDContrast'] = str(self._LCDContrast)
config['ws28xx']['LowBatFlags'] = str(self._LowBatFlags)
config['ws28xx']['WindDirAlarmFlags'] = str(self._WindDirAlarmFlags)
config['ws28xx']['OtherAlarmFlags'] = str(self._OtherAlarmFlags)
config['ws28xx']['HistoryInterval'] = str(self._HistoryInterval)
config['ws28xx']['ResetMinMaxFlags'] = str(self._ResetMinMaxFlags)
config['ws28xx']['TempIndoor_Min'] = str(self._TempIndoorMinMax._Min._Value)
config['ws28xx']['TempIndoor_Max'] = str(self._TempIndoorMinMax._Max._Value)
config['ws28xx']['Outdoor_Min'] = str(self._TempOutdoorMinMax._Min._Value)
config['ws28xx']['TempOutdoorMax'] = str(self._TempOutdoorMinMax._Max._Value)
config['ws28xx']['HumidityIndoor_Min'] = str(self._HumidityIndoorMinMax._Min._Value)
config['ws28xx']['HumidityIndoor_Max'] = str(self._HumidityIndoorMinMax._Max._Value)
config['ws28xx']['HumidityOutdoor_Min'] = str(self._HumidityOutdoorMinMax._Min._Value)
config['ws28xx']['HumidityOutdoor_Max'] = str(self._HumidityOutdoorMinMax._Max._Value)
config['ws28xx']['Rain24HMax'] = str(self._Rain24HMax._Max._Value)
config['ws28xx']['GustMax'] = str(self._GustMax._Max._Value)
config['ws28xx']['PressureRel_hPa_Min'] = str(self._PressureRelative_hPaMinMax._Min._Value)
config['ws28xx']['PressureRel_inHg_Min'] = str(self._PressureRelative_inHgMinMax._Min._Value)
config['ws28xx']['PressureRel_hPa_Max'] = str(self._PressureRelative_hPaMinMax._Max._Value)
config['ws28xx']['PressureRel_inHg_Max'] = str(self._PressureRelative_inHgMinMax._Max._Value)
if DEBUG_WRITES > 0:
logdbg('write: write to %s' % self.filename)
config.write()
def read(self,buf,pos):
logdbg('read')
nbuf=[0]
nbuf[0]=buf[0]
###USBHardware.dumpBuf('In ', nbuf[0], 0x30)
self._WindspeedFormat = (nbuf[0][4] >> 4) & 0xF
self._RainFormat = (nbuf[0][4] >> 3) & 1
self._PressureFormat = (nbuf[0][4] >> 2) & 1
self._TemperatureFormat = (nbuf[0][4] >> 1) & 1
self._ClockMode = nbuf[0][4] & 1
self._StormThreshold = (nbuf[0][5] >> 4) & 0xF
self._WeatherThreshold = nbuf[0][5] & 0xF
self._LowBatFlags = (nbuf[0][6] >> 4) & 0xF
self._LCDContrast = nbuf[0][6] & 0xF
self._WindDirAlarmFlags = (nbuf[0][7] << 8) | nbuf[0][8]
self._OtherAlarmFlags = (nbuf[0][9] << 8) | nbuf[0][10]
self._TempIndoorMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 11, 1)
self._TempIndoorMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 13, 0)
self._TempOutdoorMinMax._Max._Value = USBHardware.toTemperature_5_3(nbuf, 16, 1)
self._TempOutdoorMinMax._Min._Value = USBHardware.toTemperature_5_3(nbuf, 18, 0)
self._HumidityIndoorMinMax._Max._Value = USBHardware.toHumidity_2_0(nbuf, 21, 1)
self._HumidityIndoorMinMax._Min._Value = USBHardware.toHumidity_2_0(nbuf, 22, 1)
self._HumidityOutdoorMinMax._Max._Value = USBHardware.toHumidity_2_0(nbuf, 23, 1)
self._HumidityOutdoorMinMax._Min._Value = USBHardware.toHumidity_2_0(nbuf, 24, 1)
self._Rain24HMax._Max._Value = USBHardware.toRain_7_3(nbuf, 25, 0)
self._HistoryInterval = nbuf[0][29]
self._GustMax._Max._Value = USBHardware.toWindspeed_5_2(nbuf, 30, 1)
(self._PressureRelative_hPaMinMax._Min._Value, self._PressureRelative_inHgMinMax._Min._Value) = USBHardware.readPressureShared(nbuf, 33, 1)
(self._PressureRelative_hPaMinMax._Max._Value, self._PressureRelative_inHgMinMax._Max._Value) = USBHardware.readPressureShared(nbuf, 38, 1)
self._ResetMinMaxFlags = (nbuf[0][43]) <<16 | (nbuf[0][44] << 8) | (nbuf[0][45])
self._InBufCS = (nbuf[0][46] << 8) | nbuf[0][47]
self._OutBufCS = self.calcOutBufCS(buf,pos)
self.logConfigData()
if DEBUG_WRITES > 0:
logdbg('read: write to %s' % self.filename)
self.write()
# Preset historyInterval to 1 minute (default: 2 hours)
self._HistoryInterval = EHistoryInterval.hi01Min
# Clear all alarm flags, because the datastream from the weather station will pauze during an alarm
###self._WindDirAlarmFlags = 0x0000
###self._OtherAlarmFlags = 0x0000
return 1
def testConfigChanged(self,buf):
logdbg('testConfigChanged')
nbuf = [0]
nbuf[0] = buf[0]
nbuf[0][0] = 16*(self._WindspeedFormat & 0xF) + 8*(self._RainFormat & 1) + 4*(self._PressureFormat & 1) + 2*(self._TemperatureFormat & 1) + (self._ClockMode & 1)
nbuf[0][1] = self._WeatherThreshold & 0xF | 16 * self._StormThreshold & 0xF0
nbuf[0][2] = self._LCDContrast & 0xF | 16 * self._LowBatFlags & 0xF0
nbuf[0][3] = (self._OtherAlarmFlags >> 0) & 0xFF
nbuf[0][4] = (self._OtherAlarmFlags >> 8) & 0xFF
nbuf[0][5] = (self._WindDirAlarmFlags >> 0) & 0xFF
nbuf[0][6] = (self._WindDirAlarmFlags >> 8) & 0xFF
# reverse buf from here
self.parse_2(self._PressureRelative_inHgMinMax._Max._Value, nbuf, 7, 1, 5)
self.parse_1(self._PressureRelative_hPaMinMax._Max._Value, nbuf, 9, 0, 5)
self.parse_2(self._PressureRelative_inHgMinMax._Min._Value, nbuf, 12, 1, 5)
self.parse_1(self._PressureRelative_hPaMinMax._Min._Value, nbuf, 14, 0, 5)
self.parseWind_2(self._GustMax._Max._Value, nbuf, 17, 0, 5)
nbuf[0][20] = self._HistoryInterval & 0xF
self.parseRain_3(self._Rain24HMax._Max._Value, nbuf, 21, 0, 7)
self.parse_0(self._HumidityOutdoorMinMax._Max._Value, nbuf, 25, 1, 2)
self.parse_0(self._HumidityOutdoorMinMax._Min._Value, nbuf, 26, 1, 2)
self.parse_0(self._HumidityIndoorMinMax._Max._Value, nbuf, 27, 1, 2)
self.parse_0(self._HumidityIndoorMinMax._Min._Value, nbuf, 28, 1, 2)
self.parse_3(self._TempOutdoorMinMax._Max._Value + CWeatherTraits.TemperatureOffset(), nbuf, 29, 1, 5)
self.parse_3(self._TempOutdoorMinMax._Min._Value + CWeatherTraits.TemperatureOffset(), nbuf, 31, 0, 5)
self.parse_3(self._TempIndoorMinMax._Max._Value + CWeatherTraits.TemperatureOffset(), nbuf, 34, 1, 5)
self.parse_3(self._TempIndoorMinMax._Min._Value + CWeatherTraits.TemperatureOffset(), nbuf, 36, 0, 5)
# reverse buf to here
USBHardware.reverseByteOrder(nbuf, 7, 32)
nbuf[0][39] = (self._ResetMinMaxFlags >> 16) & 0xFF #lh Don't calculate CheckSum
nbuf[0][40] = (self._ResetMinMaxFlags >> 8) & 0xFF # for the 3 (output only)
nbuf[0][41] = (self._ResetMinMaxFlags >> 0) & 0xFF # _ResetMinMaxFlags bytes
self._OutBufCS = self.calcOutBufCS(nbuf,0)
nbuf[0][42] = (self._OutBufCS >> 8) & 0xFF
nbuf[0][43] = (self._OutBufCS >> 0) & 0xFF
buf[0] = nbuf[0]
if (self._OutBufCS == self._InBufCS) and (self._ResetMinMaxFlags == 0):
logdbg('testConfigChanged: checksum not changed %04x' % self._OutBufCS)
State = 0
else:
logerr('Checksum or resetMinMaxFlags changed, InBufCS=%04x, OutBufCS=%04x, _ResetMinMaxFlags=%06x' % (self._InBufCS, self._OutBufCS, self._ResetMinMaxFlags))
self.logConfigData()
self.write()
State = 1
return State
def logConfigData(self):
logerr('OutBufCS= %04x' % self._OutBufCS)
logerr('InBufCS= %04x' % self._InBufCS)
logerr('DeviceCS= %04x' % self._DeviceCS)
logdbg('ClockMode= %s' % self._ClockMode)
logdbg('TemperatureFormat= %s' % self._TemperatureFormat)
logdbg('PressureFormat= %s' % self._PressureFormat)
logdbg('RainFormat= %s' % self._RainFormat)
logdbg('WindspeedFormat= %s' % self._WindspeedFormat)
logdbg('WeatherThreshold= %s' % self._WeatherThreshold)
logdbg('StormThreshold= %s' % self._StormThreshold)
logdbg('LCDContrast= %s' % self._LCDContrast)
logdbg('LowBatFlags= %01x' % self._LowBatFlags)
logdbg('WindDirAlarmFlags= %04x' % self._WindDirAlarmFlags)
logerr('OtherAlarmFlags= %04x' % self._OtherAlarmFlags)
loginf('HistoryInterval= %s' % self._HistoryInterval)
logdbg('TempIndoor_Min= %s' % self._TempIndoorMinMax._Min._Value)
logdbg('TempIndoor_Max= %s' % self._TempIndoorMinMax._Max._Value)
logdbg('TempOutdoor_Min= %s' % self._TempOutdoorMinMax._Min._Value)
logdbg('TempOutdoor_Max= %s' % self._TempOutdoorMinMax._Max._Value)
logdbg('HumidityIndoor_Min= %s' % self._HumidityIndoorMinMax._Min._Value)
logdbg('HumidityIndoor_Max= %s' % self._HumidityIndoorMinMax._Max._Value)
logdbg('HumidityOutdoor_Min= %s' % self._HumidityOutdoorMinMax._Min._Value)
logdbg('HumidityOutdoor_Max= %s' % self._HumidityOutdoorMinMax._Max._Value)
logdbg('Rain24HMax= %s' % self._Rain24HMax._Max._Value)
logdbg('GustMax= %s' % self._GustMax._Max._Value)
logdbg('PressureRel_hPa_Min= %s' % self._PressureRelative_hPaMinMax._Min._Value)
logdbg('PressureRel_inHg_Min= %s' % self._PressureRelative_inHgMinMax._Min._Value)
logdbg('PressureRel_hPa_Max= %s' % self._PressureRelative_hPaMinMax._Max._Value)
logdbg('PressureRel_inHg_Max= %s' % self._PressureRelative_inHgMinMax._Max._Value)
logerr('ResetMinMaxFlags= %06x (Output only)' % self._ResetMinMaxFlags)
class CHistoryDataSet(object):
def __init__(self):
self.m_Time = None
self.m_TempIndoor = CWeatherTraits.TemperatureNP()
self.m_HumidityIndoor = CWeatherTraits.HumidityNP()
self.m_TempOutdoor = CWeatherTraits.TemperatureNP()
self.m_HumidityOutdoor = CWeatherTraits.HumidityNP()
self.m_PressureRelative = None
self.m_WindDirection = 16
self.m_RainCounterRaw = 0
self.m_WindSpeed = CWeatherTraits.WindNP()
self.m_Gust = CWeatherTraits.WindNP()
def read(self, buf, pos):
logdbg('CHistoryDataSet::read')
nbuf = [0]
nbuf[0] = buf[0]
self.m_Gust = USBHardware.toWindspeed_3_1(nbuf, 12, 0)
self.m_WindDirection = (nbuf[0][14] >> 4) & 0xF
self.m_WindSpeed = USBHardware.toWindspeed_3_1(nbuf, 14, 0)
if ( self.m_WindSpeed == CWeatherTraits.WindNP() ):
self.m_WindDirection = 16
if ( self.m_WindDirection < 0 and self.m_WindDirection > 16 ):
self.m_WindDirection = 16
self.m_RainCounterRaw = USBHardware.toRain_3_1(nbuf, 16, 1)
self.m_HumidityOutdoor = USBHardware.toHumidity_2_0(nbuf, 17, 0)
self.m_HumidityIndoor = USBHardware.toHumidity_2_0(nbuf, 18, 0)
self.m_PressureRelative = USBHardware.toPressure_hPa_5_1(nbuf, 19, 0)
self.m_TempIndoor = USBHardware.toTemperature_3_1(nbuf, 23, 0)
self.m_TempOutdoor = USBHardware.toTemperature_3_1(nbuf, 22, 1)
self.m_Time = USBHardware.toDateTime(nbuf, 25, 1, 'HistoryDataSet')
logdbg("Time %s" % self.m_Time)
logdbg("TempIndoor= %7.1f" % self.m_TempIndoor)
logdbg("HumidityIndoor= %7.0f" % self.m_HumidityIndoor)
logdbg("TempOutdoor= %7.1f" % self.m_TempOutdoor)
logdbg("HumidityOutdoor= %7.0f" % self.m_HumidityOutdoor)
logdbg("PressureRelative= %7.1f" % self.m_PressureRelative)
logdbg("RainCounterRaw= %7.1f" % self.m_RainCounterRaw)
logdbg("WindDirection= %7.0f" % self.m_WindDirection)
logdbg("WindSpeed= %7.1f" % self.m_WindSpeed)
logdbg("Gust= %7.1f" % self.m_Gust)
class CDataStore(object):
class TTransceiverSettings(object):
def __init__(self):
self.VendorId = 0x6666
self.ProductId = 0x5555
self.VersionNo = 1
self.manufacturer = "LA CROSSE TECHNOLOGY"
self.product = "Weather Direct Light Wireless Device"
self.FrequencyStandard = EFrequency.fsUS
self.Frequency = getFrequency(self.FrequencyStandard)
self.SerialNumber = None
self.DeviceID = None
class TRequest(object):
def __init__(self):
self.Type = 6
self.State = ERequestState.rsError
self.TTL = 90000
self.Lock = threading.Lock()
self.CondFinish = threading.Condition()
class TLastStat(object):
def __init__(self):
self.LastBatteryStatus = [0]
self.LastLinkQuality = 0
self.OutstandingHistorySets = -1
self.LastCurrentWeatherTime = datetime(1900, 01, 01, 00, 00)
self.LastHistoryDataTime = datetime(1900, 01, 01, 00, 00)
self.LastConfigTime = datetime(1900, 01, 01, 00, 00)
self.LastSeen = None
self.LastHistoryIndex = 0xffff
filename = STATS_CACHE
config = ConfigObj(filename)
config.filename = filename
try:
self.LastHistoryIndex = int(config['LastStat']['HistoryIndex'])
except:
pass
class TSettings(object):
def __init__(self):
self.CommModeInterval = 3
self.PreambleDuration = 5000
self.RegisterWaitTime = 20000
self.DeviceID = None
def __init__(self, cfgfn):
self.filename = cfgfn
self.Guards = 0
self.Flags = 0
self.FrontEndConfig = 0
self.LastHistTimeStamp = 0
self.BufferCheck = 0
self.Request = CDataStore.TRequest()
self.LastStat = CDataStore.TLastStat()
self.Settings = CDataStore.TSettings()
self.TransceiverSettings = CDataStore.TTransceiverSettings()
self.DeviceConfig = CWeatherStationConfig(cfgfn)
self.HistoryData = CHistoryDataSet()
self.CurrentWeather = CCurrentWeatherData()
def writeLastStat(self):
filename = STATS_CACHE
config = ConfigObj(filename)
config.filename = filename
config['LastStat'] = {}
config['LastStat']['LastSeen'] = str(self.LastStat.LastSeen)
config['LastStat']['LinkQuality'] = str(self.LastStat.LastLinkQuality)
config['LastStat']['BatteryStatus'] = str(self.LastStat.LastBatteryStatus)
config['LastStat']['HistoryIndex'] = str(self.LastStat.LastHistoryIndex)
config['LastStat']['CurrentWeatherTime'] = str(self.LastStat.LastCurrentWeatherTime)
config['LastStat']['HistoryDataTime'] = str(self.LastStat.LastHistoryDataTime)
config['LastStat']['ConfigTime'] = str(self.LastStat.LastConfigTime)
if DEBUG_WRITES > 0:
logdbg('writeLastStat: write to %s' % filename)
config.write()
def writeTransceiverSettings(self):
config = ConfigObj(self.filename)
config.filename = self.filename
config['TransceiverSettings'] = {}
config['TransceiverSettings']['SerialNumber'] = self.TransceiverSettings.SerialNumber
config['TransceiverSettings']['DeviceID'] = self.TransceiverSettings.DeviceID
config['TransceiverSettings']['FrequencyStandard'] = self.TransceiverSettings.FrequencyStandard
if DEBUG_WRITES > 0:
logdbg('writeTransceiverSettings: write to %s' % self.filename)
config.write()
def getFrequencyStandard(self):
config = ConfigObj(self.filename)
config.filename = self.filename
try:
self.TransceiverSettings.FrequencyStandard = config['TransceiverSettings'].get('FrequencyStandard', EFrequency.fsUS)
except:
pass
return self.TransceiverSettings.FrequencyStandard
def setFrequencyStandard(self, val):
logdbg('setFrequency: %s' % val)
self.TransceiverSettings.FrequencyStandard = val
self.TransceiverSettings.Frequency = getFrequency(val)
self.writeTransceiverSettings()
def getDeviceID(self):
config = ConfigObj(self.filename)
config.filename = self.filename
try:
self.TransceiverSettings.DeviceID = int(config['TransceiverSettings']['DeviceID'])
except:
pass
return self.TransceiverSettings.DeviceID
def setDeviceID(self,val):
logdbg("setDeviceID: %x" % val)
self.TransceiverSettings.DeviceID = val
self.writeTransceiverSettings()
def getRegisteredDeviceID(self):
return self.Settings.DeviceID
def setRegisteredDeviceID(self, val):
if val != self.Settings.DeviceID:
loginf("console is paired to device with ID %x" % val)
self.Settings.DeviceID = val
def getFlag_FLAG_TRANSCEIVER_SETTING_CHANGE(self): # <4>
flag = BitHandling.testBit(self.Flags, 4)
#std::bitset<5>::at(thisa->Flags, &result, 4u);
return flag
def getFlag_FLAG_FAST_CURRENT_WEATHER(self): # <2>
flag = BitHandling.testBit(self.Flags, 2)
#return self.Flags_FLAG_SERVICE_RUNNING
#std::bitset<5>::at(thisa->Flags, &result, 2u);
return flag
def getFlag_FLAG_TRANSCEIVER_PRESENT(self): # <0>
flag = BitHandling.testBit(self.Flags, 0)
#return self.Flags_FLAG_TRANSCEIVER_PRESENT
return flag
def getFlag_FLAG_SERVICE_RUNNING(self): # <3>
flag = BitHandling.testBit(self.Flags, 3)
#return self.Flags_FLAG_SERVICE_RUNNING
return flag
def setFlag_FLAG_TRANSCEIVER_SETTING_CHANGE(self,val): # <4>
logdbg('set FLAG_TRANSCEIVER_SETTING_CHANGE to %s' % val)
#std::bitset<5>::set(thisa->Flags, 4u, val);
self.Flags = BitHandling.setBitVal(self.Flags,4,val)
def setFlag_FLAG_FAST_CURRENT_WEATHER(self,val): # <2>
logdbg('set FLAG_FAST_CURRENT_WEATHER to %s' % val)
#std::bitset<5>::set(thisa->Flags, 2u, val);
self.Flags = BitHandling.setBitVal(self.Flags,2,val)
def setFlag_FLAG_TRANSCEIVER_PRESENT(self,val): # <0>
logdbg('set FLAG_TRANSCEIVER_PRESENT to %s' % val)
#std::bitset<5>::set(thisa->Flags, 0, val);
self.Flags = BitHandling.setBitVal(self.Flags,0,val)
def setFlag_FLAG_SERVICE_RUNNING(self,val): # <3>
logdbg('set FLAG_SERVICE_RUNNING to %s' % val)
#std::bitset<5>::set(thisa->Flags, 3u, val);
self.Flags = BitHandling.setBitVal(self.Flags,3,val)
def setLastStatCache(self, seen=None,
quality=None, battery=None,
currentWeatherTime=None):
logdbg('setLinkState')
if seen is not None:
self.LastStat.LastSeen = seen
if quality is not None:
self.LastStat.LastLinkQuality = quality
if battery is not None:
self.LastStat.LastBatteryStatus = battery
if currentWeatherTime is not None:
self.LastStat.LastCurrentWeatherTime = currentWeatherTime
self.writeLastStat()
def setLastLinkQuality(self, val):
logdbg("setLastLinkQuality: quality=%d" % val)
self.LastStat.LastLinkQuality = val
self.writeLastStat()
def setLastSeen(self, val):
logdbg("setLastSeen: time=%s" % val)
self.LastStat.LastSeen = val
self.writeLastStat()
def getLastSeen(self):
return self.LastStat.LastSeen
def setLastBatteryStatus(self, status):
# console, thermo-hygro sensor, rain sensor, wind sensor (solar)
# 0 - ?
# 1 - ?
# 2 - ?
# 3 - display battery
logdbg('setLastBatteryStatus: 3=%d 0=%d 1=%d 2=%d' %
(BitHandling.testBit(status,3),
BitHandling.testBit(status,0),
BitHandling.testBit(status,1),
BitHandling.testBit(status,2)))
self.LastStat.LastBatteryStatus = status
self.writeLastStat()
def setCurrentWeather(self, data):
logdbg('setCurrentWeather')
self.CurrentWeather = data
def setHistoryData(self, data):
logdbg('setHistoryData')
self.HistoryData = data
def getHistoryData(self,clear):
logdbg('getHistoryData')
self.Request.Lock.acquire()
History = copy.copy(self.HistoryData)
self.Request.Lock.release()
return History
def requestNotify(self):
logdbg('requestNotify: not implemented')
#ATL::CStringT<char_ATL::StrTraitATL<char_ATL::ChTraitsCRT<char>>>::CStringT<char_ATL::StrTraitATL<char_ATL::ChTraitsCRT<char>>>(
# &FuncName,
# "void __thiscall CDataStore::RequestNotify(void) const");
#v6 = 0;
#ATL::CStringT<char_ATL::StrTraitATL<char_ATL::ChTraitsCRT<char>>>::CStringT<char_ATL::StrTraitATL<char_ATL::ChTraitsCRT<char>>>(
# &Name,
# "Request->Lock");
#LOBYTE(v6) = 1;
#CScopedLock::CScopedLock(&lock, &thisa->Request->Lock, &Name, &FuncName);
#LOBYTE(v6) = 3;
#ATL::CStringT<char_ATL::StrTraitATL<char_ATL::ChTraitsCRT<char>>>::_CStringT<char_ATL::StrTraitATL<char_ATL::ChTraitsCRT<char>>>(&Name);
#LOBYTE(v6) = 4;
#ATL::CStringT<char_ATL::StrTraitATL<char_ATL::ChTraitsCRT<char>>>::_CStringT<char_ATL::StrTraitATL<char_ATL::ChTraitsCRT<char>>>(&FuncName);
#boost::interprocess::interprocess_condition::notify_all(&thisa->Request->CondFinish);
#v6 = -1;
#self.Request.CondFinish.notifyAll()
#CScopedLock::_CScopedLock(&lock);
def setLastCurrentWeatherTime(self, val):
logdbg("setLastCurrentWeatherTime to %s" % val)
self.LastStat.LastCurrentWeatherTime = val
self.writeLastStat()
def setLastHistoryDataTime(self, val):
logdbg("setLastHistoryDataTime to %s" % val)
self.LastStat.LastHistoryDataTime = val
self.writeLastStat()
def setLastConfigTime(self, val):
logdbg("setLastConfigTime to %s" % val)
self.LastStat.LastConfigTime = val
self.writeLastStat()
def getBufferCheck(self):
logdbg("BufferCheck=%x" % self.BufferCheck)
return self.BufferCheck
def setBufferCheck(self, val):
logdbg("setBufferCheck to %x" % val)
self.BufferCheck = val
def operator(self):
logdbg('operator')
return (self.Guards
and self.HistoryData
and self.Flags
and self.Settings
and self.TransceiverSettings
and self.LastSeen
and self.CurrentWeather
and self.DeviceConfig
and self.FrontEndConfig
and self.LastStat
and self.Request
and self.LastHistTimeStamp
and self.BufferCheck)
def getDeviceRegistered(self):
if ( self.Settings.DeviceID is None
or self.TransceiverSettings.DeviceID is None
or self.Settings.DeviceID != self.TransceiverSettings.DeviceID ):
return False
return True
def getRequestType(self):
return self.Request.Type
def setRequestType(self, val):
logdbg('setRequestType to %s' % val)
self.Request.Type = val
def getRequestState(self):
return self.Request.State
def setRequestState(self,state):
logdbg("setRequestState to %x" % state)
self.Request.State = state
def getPreambleDuration(self):
return self.Settings.PreambleDuration
def getRegisterWaitTime(self):
return self.Settings.RegisterWaitTime
def getCommModeInterval(self):
return self.Settings.CommModeInterval
def setCommModeInterval(self,val):
logdbg("setCommModeInterval to %x" % val)
self.Settings.CommModeInterval = val
def setOutstandingHistorySets(self,val):
logdbg("setOutstandingHistorySets to %d" % val)
self.LastStat.OutstandingHistorySets = val
def setTransceiverSerNo(self,val):
logdbg("setTransceiverSerialNumber to %s" % val)
self.TransceiverSettings.SerialNumber = val
self.writeTransceiverSettings()
def getTransceiverSerNo(self):
return self.TransceiverSettings.SerialNumber
def setLastHistoryIndex(self,val):
logdbg("setLastHistoryIndex to %x" % val)
self.LastStat.LastHistoryIndex = val
self.writeLastStat()
def getLastHistoryIndex(self):
logdbg("LastHistoryIndex=%x" % self.LastStat.LastHistoryIndex)
return self.LastStat.LastHistoryIndex
def firstTimeConfig(self, timeout):
logdbg('firstTimeConfig: timeout=%s' % timeout)
if not self.getFlag_FLAG_TRANSCEIVER_PRESENT():
logerr('firstTimeConfig: no transceiver')
return
self.DataStore.DeviceID = None
self.Request.Type = ERequestType.rtFirstConfig
self.Request.State = ERequestState.rsQueued
self.Request.TTL = 90000
self.BufferCheck = 0
try:
self.Request.CondFinish.acquire()
except:
pass
if self.Request.CondFinish.wait(timedelta(milliseconds=timeout).seconds):
logdbg('firstTimeConfig: wait completed with state %s' %
self.Request.State)
if self.Request.State == ERequestState.rsFinished: #2
tid = self.DataStore.getDeviceID()
rid = self.DataStore.getRegisteredDeviceID()
if tid == rid:
loginf('firstTimeConfig: found device ID %s' % tid)
else:
logerr('firstTimeConfig: pairing failed')
else:
logerr('firstTimeConfig: failed to obtain device ID')
self.Request.Type = ERequestType.rtINVALID #6
self.Request.State = ERequestState.rsINVALID #8
else:
logerr('firstTimeConfig: timeout before obtaining device ID')
self.Request.CondFinish.release()
def getCurrentWeather(self, data, timeout):
logdbg('getCurrentWeather: timeout=%s' % timeout)
if not self.getFlag_FLAG_TRANSCEIVER_PRESENT():
logerr('getCurrentWeather: no transceiver')
return
if not self.getDeviceRegistered():
logerr('getCurrentWeather: transceiver is not paired')
return
self.Request.Type = ERequestType.rtGetCurrent
self.Request.State = ERequestState.rsQueued
self.Request.TTL = 90000
try:
self.Request.CondFinish.acquire()
except:
pass
if self.Request.CondFinish.wait(timedelta(milliseconds=timeout).seconds):
# FIXME: implement getCurrentWeather
#CDataStore::getCurrentWeather(thisa, Weather);
pass
else:
pass
self.Request.Type = ERequestType.rtINVALID
self.Request.State = ERequestState.rsINVALID
self.Request.CondFinish.release()
def getHistory(self, data, timeout):
logdbg('getHistory: timeout=%s' % timeout)
if not self.getFlag_FLAG_TRANSCEIVER_PRESENT():
logerr('getHistory: no transceiver')
return
if not self.getDeviceRegistered():
logerr('getHistory: transceiver is not paired')
return
self.Request.Type = ERequestType.rtGetHistory
self.Request.State = ERequestState.rsQueued
self.Request.TTL = 90000
try:
self.Request.CondFinish.acquire()
except:
pass
if self.Request.CondFinish.wait(timedelta(milliseconds=timeout).seconds):
# FIXME: implement getHistory
#CDataStore::getHistoryData(thisa, History, 1);
pass
else:
pass
self.Request.Type = ERequestType.rtINVALID
self.Request.State = ERequestState.rsINVALID
self.Request.CondFinish.release()
def getConfig(self):
logdbg('getConfig')
if not self.getFlag_FLAG_TRANSCEIVER_PRESENT():
logerr('getConfig: no transceiver')
return
if not self.getDeviceRegistered():
logerr('getConfig: transceiver is not paired')
return
# FIXME: implement getConfig
self.Request.Type = ERequestType.rtGetConfig
self.Request.State = ERequestState.rsQueued
self.Request.TTL = 90000
def setConfig(self):
logdbg('setConfig')
if not self.getFlag_FLAG_TRANSCEIVER_PRESENT():
logerr('setConfig: no transceiver')
return
if not self.getDeviceRegistered():
logerr('setConfig: transceiver is not paired')
return
self.Request.Type = ERequestType.rtSetConfig
self.Request.State = ERequestState.rsQueued
self.Request.TTL = 90000
def setTime(self):
logdbg('setTime')
if not self.getFlag_FLAG_TRANSCEIVER_PRESENT():
logerr('setTime: no transceiver')
return
if not self.getDeviceRegistered():
logerr('setTime: transceiver is not paired')
return
# FIXME: implement setTime
self.Request.Type = ERequestType.rtSetTime
self.Request.State = ERequestState.rsQueued
self.Request.TTL = 90000
class sHID(object):
"""USB driver abstraction"""
def __init__(self):
self.devh = None
self.timeout = 1000
def open(self, vid=0x6666, pid=0x5555):
device = self._find_device(vid, pid)
if device is None:
logcrt('Cannot find USB device with Vendor=0x%04x ProdID=0x%04x' %
(vid, pid))
raise weewx.WeeWxIOError('Unable to find USB device')
self._open_device(device)
def close(self):
self._close_device()
def _find_device(self, vid, pid):
for bus in usb.busses():
for device in bus.devices:
if device.idVendor == vid and device.idProduct == pid:
return device
return None
def _open_device(self, device, interface=0, configuration=1):
self._device = device
self._configuration = device.configurations[0]
self._interface = self._configuration.interfaces[0][0]
self._endpoint = self._interface.endpoints[0]
self.devh = device.open()
loginf('manufacturer: %s' % self.devh.getString(device.iManufacturer,30))
loginf('product: %s' % self.devh.getString(device.iProduct,30))
loginf('interface: %d' % self._interface.interfaceNumber)
# detach any old claimed interfaces
try:
self.devh.detachKernelDriver(self._interface.interfaceNumber)
except:
pass
# FIXME: this seems to be specific to ws28xx?
usbWait = 0.05
self.devh.getDescriptor(0x1, 0, 0x12)
time.sleep(usbWait)
self.devh.getDescriptor(0x2, 0, 0x9)
time.sleep(usbWait)
self.devh.getDescriptor(0x2, 0, 0x22)
time.sleep(usbWait)
# attempt to claim the interface
try:
if platform.system() is 'Windows':
loginf('set USB device configuration to %d' % configuration)
self.devh.setConfiguration(configuration)
logdbg('claiming USB interface %d' % interface)
self.devh.claimInterface(interface)
self.devh.setAltInterface(interface)
except usb.USBError, e:
self._close_device()
raise weewx.WeeWxIOError(e)
# FIXME: this seems to be specific to ws28xx?
# FIXME: check return value
self.devh.controlMsg(
usb.TYPE_CLASS + usb.RECIP_INTERFACE,
0x000000a, [], 0x0000000, 0x0000000, 1000)
time.sleep(0.05)
self.devh.getDescriptor(0x22, 0, 0x2a9)
time.sleep(usbWait)
def _close_device(self):
try:
logdbg('release USB interface')
self.devh.releaseInterface()
except:
pass
try:
logdbg('detach kernel driver')
self.devh.detachKernelDriver(self._interface.interfaceNumber)
except:
pass
def setTX(self):
buf = [0]*0x15
buf[0] = 0xD1
if DEBUG_COMM > 0:
self.dump('setTX', buf)
try:
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d1,
index=0x0000000,
timeout=self.timeout)
result = 1
except:
result = 0
return result
def setRX(self):
buf = [0]*0x15
buf[0] = 0xD0
if DEBUG_COMM > 0:
self.dump('setRX', buf)
try:
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d0,
index=0x0000000,
timeout=self.timeout)
result = 1
except:
result = 0
return result
def getState(self,StateBuffer):
try:
buf = self.devh.controlMsg(requestType=usb.TYPE_CLASS |
usb.RECIP_INTERFACE | usb.ENDPOINT_IN,
request=usb.REQ_CLEAR_FEATURE,
buffer=0x0a,
value=0x00003de,
index=0x0000000,
timeout=self.timeout)
if DEBUG_COMM > 0:
self.dump('getState', buf)
StateBuffer[0]=[0]*0x2
StateBuffer[0][0]=buf[1]
StateBuffer[0][1]=buf[2]
result = 1
except:
result = 0
return result
def readConfigFlash(self,addr,numBytes,data):
if numBytes <= 512:
while ( numBytes ):
buf=[0xcc]*0x0f #0x15
buf[0] = 0xdd
buf[1] = 0x0a
buf[2] = (addr >>8) & 0xFF
buf[3] = (addr >>0) & 0xFF
if DEBUG_COMM > 0:
self.dump('readConfigFlash>', buf)
try:
# FIXME: check return value
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003dd,
index=0x0000000,
timeout=self.timeout)
result = 1
except:
result = 0
try:
buf = self.devh.controlMsg(requestType=usb.TYPE_CLASS |
usb.RECIP_INTERFACE |
usb.ENDPOINT_IN,
request=usb.REQ_CLEAR_FEATURE,
buffer=0x15,
value=0x00003dc,
index=0x0000000,
timeout=self.timeout)
result = 1
except:
result = 0
new_data=[0]*0x15
if ( numBytes < 16 ):
for i in xrange(0, numBytes):
new_data[i] = buf[i+4]
numBytes = 0
else:
for i in xrange(0, 16):
new_data[i] = buf[i+4]
numBytes -= 16
addr += 16
if DEBUG_COMM > 0:
self.dump('readConfigFlash<', buf)
result = 1
else:
result = 0
data[0] = new_data
return result
def setState(self,state):
buf = [0]*0x15
buf[0] = 0xd7
buf[1] = state
if DEBUG_COMM > 0:
self.dump('setState', buf)
try:
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d7,
index=0x0000000,
timeout=self.timeout)
result = 1
except:
result = 0
return result
def setFrame(self,data,numBytes):
# 00000000: d5 00 09 f0 f0 03 00 32 00 3f ff ff 00 00 00 00
# 00000000: d5 00 0c 00 32 c0 00 8f 45 25 15 91 31 20 01 00
# 00000000: d5 00 09 00 32 00 06 c1 00 3f ff ff 00 00 00 00
# 00000000: d5 00 09 00 32 01 06 c1 00 3f ff ff 00 00 00 00
# 00000000: d5 00 0c 00 32 c0 06 c1 47 25 15 91 31 20 01 00
# 00000000: d5 00 09 00 32 00 06 c1 00 30 01 a0 00 00 00 00
# 00000000: d5 00 09 00 32 02 06 c1 00 30 01 a0 00 00 00 00
# 00000000: d5 00 30 00 32 40 64 33 53 04 00 00 00 00 00 00
# 00000000: d5 00 09 00 32 00 06 ab 00 30 01 a0 00 00 00 00
# 00000000: d5 00 09 00 32 00 04 d0 00 30 01 a0 00 00 00 00
# 00000000: d5 00 09 00 32 02 04 d0 00 30 01 a0 00 00 00 00
# 00000000: d5 00 30 00 32 40 64 32 53 04 00 00 00 00 00 00
# 00000000: d5 00 09 00 32 00 04 cf 00 30 01 a0 00 00 00 00
buf = [0]*0x111
buf[0] = 0xd5
buf[1] = numBytes >> 8
buf[2] = numBytes
for i in xrange(0, numBytes):
buf[i+3] = data[i]
if DEBUG_COMM > 0:
self.dump('setFrame', buf)
try:
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d5,
index=0x0000000,
timeout=self.timeout)
result = 1
except:
result = 0
return result
def getFrame(self,data,numBytes):
try:
buf = self.devh.controlMsg(requestType=usb.TYPE_CLASS |
usb.RECIP_INTERFACE |
usb.ENDPOINT_IN,
request=usb.REQ_CLEAR_FEATURE,
buffer=0x111,
value=0x00003d6,
index=0x0000000,
timeout=self.timeout)
new_data=[0]*0x131
new_numBytes=(buf[1] << 8 | buf[2])& 0x1ff
for i in xrange(0, new_numBytes):
new_data[i] = buf[i+3]
if DEBUG_COMM > 0:
self.dump('getFrame', buf)
data[0] = new_data
numBytes[0] = new_numBytes
result = 1
except:
result = 0
return result
def writeReg(self,regAddr,data):
buf = [0]*0x05
buf[0] = 0xf0
buf[1] = regAddr & 0x7F
buf[2] = 0x01
buf[3] = data
buf[4] = 0x00
try:
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003f0,
index=0x0000000,
timeout=self.timeout)
result = 1
except:
result = 0
return result
def execute(self,command):
buf = [0]*0x0f #*0x15
buf[0] = 0xd9
buf[1] = command
if DEBUG_COMM > 0:
self.dump('execute', buf)
try:
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d9,
index=0x0000000,
timeout=self.timeout)
result = 1
except:
result = 0
return result
def setPreamblePattern(self,pattern):
buf = [0]*0x15
buf[0] = 0xd8
buf[1] = pattern
if DEBUG_COMM > 0:
self.dump('setPreamblePattern', buf)
try:
self.devh.controlMsg(usb.TYPE_CLASS + usb.RECIP_INTERFACE,
request=0x0000009,
buffer=buf,
value=0x00003d8,
index=0x0000000,
timeout=self.timeout)
result = 1
except:
result = 0
return result
def dump(self, cmd, buf, fmt='matrix'):
if fmt == 'matrix':
strbuf = ''
for i,x in enumerate(buf):
strbuf += str('%02x ' % x)
if (i+1) % 16 == 0:
logdbg('%s: %s' % (cmd, strbuf))
strbuf = ''
if len(strbuf) > 0:
logdbg('%s: %s' % (cmd, strbuf))
else:
strbuf = ''
for i in buf:
strbuf += str("%.2x" % i)
if strbuf != 'de1500000000' or DEBUG_COMM > 1:
logdbg("%s: %s" % (cmd, strbuf))
class CCommunicationService(object):
AX5051RegisterNames_map = dict()
class AX5051RegisterNames:
REVISION = 0x0
SCRATCH = 0x1
POWERMODE = 0x2
XTALOSC = 0x3
FIFOCTRL = 0x4
FIFODATA = 0x5
IRQMASK = 0x6
IFMODE = 0x8
PINCFG1 = 0x0C
PINCFG2 = 0x0D
MODULATION = 0x10
ENCODING = 0x11
FRAMING = 0x12
CRCINIT3 = 0x14
CRCINIT2 = 0x15
CRCINIT1 = 0x16
CRCINIT0 = 0x17
FREQ3 = 0x20
FREQ2 = 0x21
FREQ1 = 0x22
FREQ0 = 0x23
FSKDEV2 = 0x25
FSKDEV1 = 0x26
FSKDEV0 = 0x27
IFFREQHI = 0x28
IFFREQLO = 0x29
PLLLOOP = 0x2C
PLLRANGING = 0x2D
PLLRNGCLK = 0x2E
TXPWR = 0x30
TXRATEHI = 0x31
TXRATEMID = 0x32
TXRATELO = 0x33
MODMISC = 0x34
FIFOCONTROL2 = 0x37
ADCMISC = 0x38
AGCTARGET = 0x39
AGCATTACK = 0x3A
AGCDECAY = 0x3B
AGCCOUNTER = 0x3C
CICDEC = 0x3F
DATARATEHI = 0x40
DATARATELO = 0x41
TMGGAINHI = 0x42
TMGGAINLO = 0x43
PHASEGAIN = 0x44
FREQGAIN = 0x45
FREQGAIN2 = 0x46
AMPLGAIN = 0x47
TRKFREQHI = 0x4C
TRKFREQLO = 0x4D
XTALCAP = 0x4F
SPAREOUT = 0x60
TESTOBS = 0x68
APEOVER = 0x70
TMMUX = 0x71
PLLVCOI = 0x72
PLLCPEN = 0x73
PLLRNGMISC = 0x74
AGCMANUAL = 0x78
ADCDCLEVEL = 0x79
RFMISC = 0x7A
TXDRIVER = 0x7B
REF = 0x7C
RXMISC = 0x7D
def __init__(self, cfgfn, interval=3):
logdbg('CCommunicationService.init')
now = datetime.now()
self.filename = cfgfn
self.RepeatSize = 0
self.RepeatInterval = None
self.RepeatTime = now #ptime
self.Regenerate = 0
self.GetConfig = 0
self.TimeSent = 0
self.TimeUpdate = 0
self.TimeUpdateComplete = 0
self.DataStore = CDataStore(cfgfn)
self.DataStore.setCommModeInterval(interval)
self.running = False
self.TimeDifSec = 0
self.DifHis = 0
self.shid = sHID()
def buildFirstConfigFrame(self,Buffer):
logdbg('buildFirstConfigFrame')
newBuffer = [0]
newBuffer[0] = [0]*9
cs = Buffer[0][5] | (Buffer[0][4] << 8)
self.DataStore.DeviceConfig.setDeviceCS(cs)
comInt = self.DataStore.getCommModeInterval()
historyAddress = 0xFFFFFF
newBuffer[0][0] = 0xf0
newBuffer[0][1] = 0xf0
newBuffer[0][2] = 3
newBuffer[0][3] = (cs >> 8) & 0xff
newBuffer[0][4] = (cs >> 0) & 0xff
newBuffer[0][5] = (comInt >> 4) & 0xff
newBuffer[0][6] = (historyAddress >> 16) & 0x0f | 16 * (comInt & 0xf)
newBuffer[0][7] = (historyAddress >> 8 ) & 0xff
newBuffer[0][8] = (historyAddress >> 0 ) & 0xff
Buffer[0] = newBuffer[0]
Length = 0x09
return Length
def buildConfigFrame(self,Buffer):
logdbg("buildConfigFrame")
newBuffer = [0]
newBuffer[0] = [0]*48
cfgBuffer = [0]
cfgBuffer[0] = [0]*44
changed = self.DataStore.DeviceConfig.testConfigChanged(cfgBuffer)
if changed:
newBuffer[0][0] = Buffer[0][0]
newBuffer[0][1] = Buffer[0][1]
newBuffer[0][2] = 0x40 # change this value if we won't store config
newBuffer[0][3] = Buffer[0][3]
for i in xrange(0,44):
newBuffer[0][i+4] = cfgBuffer[0][i]
Buffer[0] = newBuffer[0]
Length = 48 # 0x30
else: # current config not up to date; do not write yet
Length = 0
return Length
def buildTimeFrame(self,Buffer,checkMinuteOverflow):
logdbg("buildTimeFrame: checkMinuteOverflow=%x" % checkMinuteOverflow)
cs = self.DataStore.DeviceConfig.getDeviceCS()
now = time.time()
tm = time.localtime(now)
newBuffer=[0]
newBuffer[0]=Buffer[0]
Second = tm[5]
if Second > 59:
Second = 0 # I don't know if La Crosse support leap seconds...
if ( checkMinuteOverflow and (Second <= 5 or Second >= 55) ):
if ( Second < 55 ):
Second = 6 - Second
else:
Second = 60 - Second + 6
logdbg('buildTimeFrame: second=%s' % Second)
idx = self.DataStore.getLastHistoryIndex()
Length = self.buildACKFrame(newBuffer, 0, cs, idx, Second)
Buffer[0]=newBuffer[0]
else:
#00000000: d5 00 0c 00 32 c0 00 8f 45 25 15 91 31 20 01 00
#00000000: d5 00 0c 00 32 c0 06 c1 47 25 15 91 31 20 01 00
# 3 4 5 6 7 8 9 10 11
newBuffer[0][2] = 0xc0
newBuffer[0][3] = (cs >>8) & 0xFF
newBuffer[0][4] = (cs >>0) & 0xFF
newBuffer[0][5] = (tm[5] % 10) + 0x10 * (tm[5] // 10) #sec
newBuffer[0][6] = (tm[4] % 10) + 0x10 * (tm[4] // 10) #min
newBuffer[0][7] = (tm[3] % 10) + 0x10 * (tm[3] // 10) #hour
#DayOfWeek = tm[6] - 1; #ole from 1 - 7 - 1=Sun... 0-6 0=Sun
DayOfWeek = tm[6] #py from 0 - 6 - 0=Mon
newBuffer[0][8] = DayOfWeek % 10 + 0x10 * (tm[2] % 10) #DoW + Day
newBuffer[0][9] = (tm[2] // 10) + 0x10 * (tm[1] % 10) #day + month
newBuffer[0][10] = (tm[1] // 10) + 0x10 * ((tm[0] - 2000) % 10) #month + year
newBuffer[0][11] = (tm[0] - 2000) // 10 #year
self.Regenerate = 1
self.TimeSent = 1
Buffer[0]=newBuffer[0]
Length = 0x0c
return Length
def buildACKFrame(self,Buffer, action, deviceCS, historyIndex, comInt):
logdbg("action=%x deviceCS=%04x historyIndex=%x comInt=%x" % (action, deviceCS, historyIndex, comInt))
now = datetime.now()
newBuffer = [0]
newBuffer[0] = [0]*9
for i in xrange(0,2):
newBuffer[0][i] = Buffer[0][i]
if action != 3 and now - self.DataStore.LastStat.LastCurrentWeatherTime >= timedelta(seconds=8):
action = 5
newBuffer[0][2] = action & 0xF
if ( historyIndex >= 0x705 ):
historyAddress = 0xffffff
else:
if ( self.DataStore.getBufferCheck() != 1
and self.DataStore.getBufferCheck() != 2 ):
historyAddress = 18 * historyIndex + 0x1a0
else:
if ( historyIndex != 0xffff ):
historyAddress = 18 * (historyIndex - 1) + 0x1a0
else:
historyAddress = 0x7fe8
self.DataStore.setBufferCheck( 2)
newBuffer[0][3] = (deviceCS >> 8) &0xFF
newBuffer[0][4] = (deviceCS >> 0) &0xFF
if ( comInt == 0xFFFFFFFF ):
comInt = self.DataStore.getCommModeInterval()
newBuffer[0][5] = (comInt >> 4) & 0xFF
newBuffer[0][6] = (historyAddress >> 16) & 0x0F | 16 * (comInt & 0xF)
newBuffer[0][7] = (historyAddress >> 8 ) & 0xFF
newBuffer[0][8] = (historyAddress >> 0 ) & 0xFF
#d5 00 09 f0 f0 03 00 32 00 3f ff ff
Buffer[0]=newBuffer[0]
self.Regenerate = 0
self.TimeSent = 0
return 9
def handleWsAck(self,Buffer,Length):
logdbg('handleWsAck')
self.DataStore.setLastStatCache(seen=datetime.now(),
quality=(Buffer[0][3] & 0x7f),
battery=(Buffer[0][2] & 0xf))
Length[0] = 0
def handleConfig(self,Buffer,Length):
logdbg('handleConfig')
newBuffer=[0]
newBuffer[0] = Buffer[0]
newLength = [0]
now = datetime.now()
self.DataStore.setLastStatCache(seen=now,
quality=(Buffer[0][3] & 0x7f),
battery=(Buffer[0][2] & 0xf))
self.DataStore.DeviceConfig.read(newBuffer, 4)
idx = self.DataStore.getLastHistoryIndex()
start = 4
cs = newBuffer[0][43+start] | (newBuffer[0][42+start] << 8)
self.DataStore.DeviceConfig.setDeviceCS(cs)
self.DataStore.setLastConfigTime(now)
self.DataStore.setRequestType(ERequestType.rtGetCurrent)
rt = self.DataStore.getRequestType()
if rt == ERequestType.rtGetCurrent:
#self.DataStore.setRequestState(ERequestState.rsFinished)
#self.DataStore.requestNotify()
newLength[0] = self.buildACKFrame(newBuffer, 0, cs, idx, 0xFFFFFFFF)
elif rt == ERequestType.rtGetConfig:
newLength[0] = self.buildACKFrame(newBuffer, 3, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState(ERequestState.rsRunning)
elif rt == ERequestType.rtSetConfig:
newLength[0] = self.buildACKFrame(newBuffer, 2, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState(ERequestState.rsRunning)
elif rt == ERequestType.rtGetHistory:
newLength[0] = self.buildACKFrame(newBuffer, 5, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState(ERequestState.rsRunning)
elif rt == ERequestType.rtSetTime:
newLength[0] = self.buildACKFrame(newBuffer, 1, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState(ERequestState.rsRunning)
elif rt == ERequestType.rtINVALID:
newLength[0] = self.buildACKFrame(newBuffer, 0, cs, idx, 0xFFFFFFFF)
Buffer[0] = newBuffer[0]
Length[0] = newLength[0]
def handleCurrentData(self,Buffer,Length):
logdbg('handleCurrentData')
now = datetime.now()
self.DataStore.setLastStatCache(seen=now,
quality=(Buffer[0][3] & 0x7f),
battery=(Buffer[0][2] & 0xf),
currentWeatherTime=now)
newBuffer = [0]
newBuffer[0] = Buffer[0]
newLength = [0]
data = CCurrentWeatherData()
data.read(newBuffer, 6)
self.DataStore.setCurrentWeather(data)
cs = newBuffer[0][5] | (newBuffer[0][4] << 8)
self.DataStore.DeviceConfig.setDeviceCS(cs)
cfgBuffer = [0]
cfgBuffer[0] = [0]*44
changed = self.DataStore.DeviceConfig.testConfigChanged(cfgBuffer)
inBufCS = self.DataStore.DeviceConfig.getInBufCS()
if inBufCS == 0 or inBufCS != cs:
loginf('handleCurrentData: inBufCS of station not actual')
self.DataStore.setRequestType(ERequestType.rtGetConfig)
elif changed:
loginf('handleCurrentData: outBufCS of station changed')
self.DataStore.setRequestType(ERequestType.rtSetConfig)
else:
self.DataStore.setRequestType(ERequestType.rtGetHistory)
# cs = self.DataStore.GetDeviceConfigCS()
idx = self.DataStore.getLastHistoryIndex()
rt = self.DataStore.getRequestType()
if rt == ERequestType.rtGetCurrent: #0
self.DataStore.setRequestState(ERequestState.rsFinished) #2
self.DataStore.requestNotify()
newLength[0] = self.buildACKFrame(newBuffer, 0, cs, idx, 0xFFFFFFFF)
elif rt == ERequestType.rtGetConfig: #2
newLength[0] = self.buildACKFrame(newBuffer, 3, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState(ERequestState.rsRunning) #1
elif rt == ERequestType.rtSetConfig: #3
newLength[0] = self.buildACKFrame(newBuffer, 2, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState(ERequestState.rsRunning) #1
elif rt == ERequestType.rtGetHistory: #1
newLength[0] = self.buildACKFrame(newBuffer, 4, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState(ERequestState.rsRunning) #1
elif rt == ERequestType.rtSetTime: #4
newLength[0] = self.buildACKFrame(newBuffer, 1, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState(ERequestState.rsRunning) #1
elif rt == ERequestType.rtINVALID:
newLength[0] = self.buildACKFrame(newBuffer, 0, cs, idx, 0xFFFFFFFF)
Length[0] = newLength[0]
Buffer[0] = newBuffer[0]
def handleHistoryData(self,Buffer,Length):
logdbg('handleHistoryData')
now = datetime.now()
newBuffer = [0]
newBuffer[0] = Buffer[0]
newLength = [0]
Data = CHistoryDataSet()
Data.read(newBuffer, 12)
cs = newBuffer[0][5] | (newBuffer[0][4] << 8)
self.DataStore.DeviceConfig.setDeviceCS(cs)
self.DataStore.setLastStatCache(seen=now,
quality=(Buffer[0][3] & 0x7f),
battery=(Buffer[0][2] & 0xf))
LatestHistoryAddres = ((((Buffer[0][6] & 0xF) << 8) | Buffer[0][7]) << 8) | Buffer[0][8]
ThisHistoryAddres = ((((Buffer[0][9] & 0xF) << 8) | Buffer[0][10]) << 8) | Buffer[0][11]
ThisHistoryIndex = (ThisHistoryAddres - 415) / 0x12
LatestHistoryIndex = (LatestHistoryAddres - 415) / 0x12
if ( ThisHistoryIndex == self.DataStore.getLastHistoryIndex()):
self.DataStore.setLastHistoryDataTime(now)
self.DataStore.setBufferCheck(0)
else:
self.DataStore.setHistoryData(Data)
self.DataStore.setLastHistoryIndex(ThisHistoryIndex)
if ( LatestHistoryIndex >= ThisHistoryIndex ): #unused
self.DifHis = LatestHistoryIndex - ThisHistoryIndex
else:
self.DifHis = LatestHistoryIndex + 1797 - ThisHistoryIndex
if self.DifHis > 0:
logdbg('handleHistoryData: m_Time=%s OutstandingHistorySets=%4i' %
(Data.m_Time, self.DifHis))
if ThisHistoryIndex == LatestHistoryIndex:
self.TimeDifSec = (Data.m_Time - now).seconds
if self.TimeDifSec > 43200:
self.TimeDifSec = self.TimeDifSec - 86400 + 1
logdbg('handleHistoryData: timeDifSec=%4s m_Time=%s max=%s' %
(self.TimeDifSec, Data.m_Time, maxTimeDifference))
else:
logdbg('handleHistoryData: no recent history data: m_Time=%s' %
Data.m_Time)
rt = self.DataStore.getRequestType()
# cs = self.DataStore.GetDeviceConfigCS()
idx = ThisHistoryIndex
if rt == ERequestType.rtGetCurrent:
newLength[0] = self.buildACKFrame(newBuffer, 5, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState( ERequestState.rsRunning)
elif rt == ERequestType.rtGetConfig:
newLength[0] = self.buildACKFrame(newBuffer, 3, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState( ERequestState.rsRunning)
elif rt == ERequestType.rtSetConfig:
newLength[0] = self.buildACKFrame(newBuffer, 2, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState( ERequestState.rsRunning)
elif rt == ERequestType.rtGetHistory:
#self.DataStore.setRequestState( ERequestState.rsFinished)
#self.DataStore.requestNotify()
newLength[0] = self.buildACKFrame(newBuffer, 0, cs, idx, 0xFFFFFFFF)
elif rt == ERequestType.rtSetTime:
newLength[0] = self.buildACKFrame(newBuffer, 1, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState( ERequestState.rsRunning)
elif rt == ERequestType.rtINVALID:
newLength[0] = self.buildACKFrame(newBuffer, 0, cs, idx, 0xFFFFFFFF)
Length[0] = newLength[0]
Buffer[0] = newBuffer[0]
def handleNextAction(self,Buffer,Length):
logdbg('handleNextAction')
newBuffer = [0]
newBuffer[0] = Buffer[0]
newLength = [0]
newLength[0] = Length[0]
self.DataStore.setLastStatCache(seen=datetime.now(),
quality=(Buffer[0][3] & 0x7f))
# cs = self.DataStore.GetDeviceConfigCS()
cs = newBuffer[0][5] | (newBuffer[0][4] << 8)
self.DataStore.DeviceConfig.setDeviceCS(cs)
# FIXME: is not [0][2] & 0xf the battery status?
if (Buffer[0][2] & 0xF) == 1:
logdbg('handleNextAction: 1 (first-time config)')
newLength[0] = self.buildFirstConfigFrame(newBuffer)
elif (Buffer[0][2] & 0xF) == 2:
logdbg('handleNextAction: 2 (set config data)')
# newLength[0] = self.buildConfigFrame(newBuffer, v16)
pass
elif (Buffer[0][2] & 0xF) == 3:
logdbg('handleNextAction: 3 (set time data)')
newLength[0] = self.buildTimeFrame(newBuffer, 1)
else:
logdbg('handleNextAction: %x' % (Buffer[0][2] & 0xF))
rt = self.DataStore.getRequestType()
idx = self.DataStore.getLastHistoryIndex()
if rt == ERequestType.rtGetCurrent:
newLength[0] = self.buildACKFrame(newBuffer, 5, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState( ERequestState.rsRunning)
elif rt == ERequestType.rtGetHistory:
newLength[0] = self.buildACKFrame(newBuffer, 4, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState( ERequestState.rsRunning)
elif rt == ERequestType.rtGetConfig:
newLength[0] = self.buildACKFrame(newBuffer, 3, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState( ERequestState.rsRunning)
elif rt == ERequestType.rtSetConfig:
newLength[0] = self.buildACKFrame(newBuffer, 2, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState( ERequestState.rsRunning)
elif rt == ERequestType.rtSetTime:
newLength[0] = self.buildACKFrame(newBuffer, 1, cs, idx, 0xFFFFFFFF)
self.DataStore.setRequestState( ERequestState.rsRunning)
else:
if self.DataStore.getFlag_FLAG_FAST_CURRENT_WEATHER():
newLength[0] = self.buildACKFrame(newBuffer, 5, cs, idx, 0xFFFFFFFF)
else:
newLength[0] = self.buildACKFrame(newBuffer, 0, cs, idx, 0xFFFFFFFF)
Length[0] = newLength[0]
Buffer[0] = newBuffer[0]
def configureRegisterNames(self):
self.AX5051RegisterNames_map[self.AX5051RegisterNames.IFMODE] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.MODULATION]=0x41 #fsk
self.AX5051RegisterNames_map[self.AX5051RegisterNames.ENCODING] =0x07
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FRAMING] =0x84 #1000:0100 ##?hdlc? |1000 010 0
self.AX5051RegisterNames_map[self.AX5051RegisterNames.CRCINIT3] =0xff
self.AX5051RegisterNames_map[self.AX5051RegisterNames.CRCINIT2] =0xff
self.AX5051RegisterNames_map[self.AX5051RegisterNames.CRCINIT1] =0xff
self.AX5051RegisterNames_map[self.AX5051RegisterNames.CRCINIT0] =0xff
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ3] =0x38
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ2] =0x90
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ1] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ0] =0x01
self.AX5051RegisterNames_map[self.AX5051RegisterNames.PLLLOOP] =0x1d
self.AX5051RegisterNames_map[self.AX5051RegisterNames.PLLRANGING]=0x08
self.AX5051RegisterNames_map[self.AX5051RegisterNames.PLLRNGCLK] =0x03
self.AX5051RegisterNames_map[self.AX5051RegisterNames.MODMISC] =0x03
self.AX5051RegisterNames_map[self.AX5051RegisterNames.SPAREOUT] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.TESTOBS] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.APEOVER] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.TMMUX] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.PLLVCOI] =0x01
self.AX5051RegisterNames_map[self.AX5051RegisterNames.PLLCPEN] =0x01
self.AX5051RegisterNames_map[self.AX5051RegisterNames.RFMISC] =0xb0
self.AX5051RegisterNames_map[self.AX5051RegisterNames.REF] =0x23
self.AX5051RegisterNames_map[self.AX5051RegisterNames.IFFREQHI] =0x20
self.AX5051RegisterNames_map[self.AX5051RegisterNames.IFFREQLO] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.ADCMISC] =0x01
self.AX5051RegisterNames_map[self.AX5051RegisterNames.AGCTARGET] =0x0e
self.AX5051RegisterNames_map[self.AX5051RegisterNames.AGCATTACK] =0x11
self.AX5051RegisterNames_map[self.AX5051RegisterNames.AGCDECAY] =0x0e
self.AX5051RegisterNames_map[self.AX5051RegisterNames.CICDEC] =0x3f
self.AX5051RegisterNames_map[self.AX5051RegisterNames.DATARATEHI]=0x19
self.AX5051RegisterNames_map[self.AX5051RegisterNames.DATARATELO]=0x66
self.AX5051RegisterNames_map[self.AX5051RegisterNames.TMGGAINHI] =0x01
self.AX5051RegisterNames_map[self.AX5051RegisterNames.TMGGAINLO] =0x96
self.AX5051RegisterNames_map[self.AX5051RegisterNames.PHASEGAIN] =0x03
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQGAIN] =0x04
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQGAIN2] =0x0a
self.AX5051RegisterNames_map[self.AX5051RegisterNames.AMPLGAIN] =0x06
self.AX5051RegisterNames_map[self.AX5051RegisterNames.AGCMANUAL] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.ADCDCLEVEL]=0x10
self.AX5051RegisterNames_map[self.AX5051RegisterNames.RXMISC] =0x35
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FSKDEV2] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FSKDEV1] =0x31
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FSKDEV0] =0x27
self.AX5051RegisterNames_map[self.AX5051RegisterNames.TXPWR] =0x03
self.AX5051RegisterNames_map[self.AX5051RegisterNames.TXRATEHI] =0x00
self.AX5051RegisterNames_map[self.AX5051RegisterNames.TXRATEMID] =0x51
self.AX5051RegisterNames_map[self.AX5051RegisterNames.TXRATELO] =0xec
self.AX5051RegisterNames_map[self.AX5051RegisterNames.TXDRIVER] =0x88
def calculateFrequency(self, freq):
logdbg('calculateFrequency')
loginf('base frequency: %d' % freq)
freqVal = long(freq / 16000000.0 * 16777216.0)
corVec = [None]
if self.shid.readConfigFlash(0x1F5, 4, corVec):
corVal = corVec[0][0] << 8
corVal |= corVec[0][1]
corVal <<= 8
corVal |= corVec[0][2]
corVal <<= 8
corVal |= corVec[0][3]
loginf('frequency correction: %d (%x)' % (corVal,corVal)) #0x184e8
freqVal += corVal
if not (freqVal % 2):
freqVal += 1
loginf('adjusted frequency: %d (%x)' % (freqVal,freqVal))
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ3] = (freqVal >>24) & 0xFF
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ2] = (freqVal >>16) & 0xFF
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ1] = (freqVal >>8) & 0xFF
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ0] = (freqVal >>0) & 0xFF
logdbg('frequency registers: %x %x %x %x' % (
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ3],
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ2],
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ1],
self.AX5051RegisterNames_map[self.AX5051RegisterNames.FREQ0]))
def generateResponse(self, Buffer, Length):
newBuffer = [0]
newBuffer[0] = Buffer[0]
newLength = [0]
newLength[0] = Length[0]
if Length[0] != 0:
requestType = self.DataStore.getRequestType()
bufferID = (Buffer[0][0] <<8) | Buffer[0][1]
responseType = (Buffer[0][2] & 0xE0)
logdbg("generateResponse: id=%04x resp=%x req=%x length=%x" %
(bufferID, responseType, requestType, Length[0]))
deviceID = self.DataStore.getDeviceID()
self.DataStore.setRegisteredDeviceID(bufferID)
if bufferID == 0xF0F0:
loginf('generateResponse: console not paired, attempting to pair to 0x%04x' % deviceID)
# 00000000: dd 0a 01 fe 18 f6 aa 01 2a a2 4d 00 00 87 16
newLength[0] = self.buildACKFrame(newBuffer,3,deviceID,0xFFFF,0xFFFFFFFF)
elif bufferID == deviceID:
if responseType == 0x20:
# 00000000: 00 00 06 00 32 20
if Length[0] == 0x06:
self.DataStore.DeviceConfig.setResetMinMaxFlags(0)
self.DataStore.setRequestType(ERequestType.rtGetCurrent)
self.handleWsAck(newBuffer, newLength)
else:
newLength[0] = 0
elif responseType == 0x40:
# 00000000: 00 00 30 00 32 40
if Length[0] == 0x30:
self.handleConfig(newBuffer, newLength)
else:
newLength[0] = 0
elif responseType == 0x60:
# 00000000: 00 00 d7 00 32 60
if Length[0] == 0xd7: #215
self.handleCurrentData(newBuffer, newLength)
else:
newLength[0] = 0
elif responseType == 0x80:
# 00000000: 00 00 1e 00 32 80
if Length[0] == 0x1e:
self.handleHistoryData(newBuffer, newLength)
else:
newLength[0] = 0
elif responseType == 0xa0:
# 00000000: 00 00 06 f0 f0 a1
# 00000000: 00 00 06 00 32 a3
# 00000000: 00 00 06 00 32 a2
if Length[0] == 0x06:
self.handleNextAction(newBuffer, newLength)
else:
newLength[0] = 0
else:
logcrt('unrecognized response type %x' % responseType)
newLength[0] = 0
else:
loginf('generateResponse: message from console contains unknown device ID (id=%04x resp=%x req=%x)' % (bufferID, responseType, requestType))
log_frame(Length[0],Buffer[0])
newLength[0] = 0
Buffer[0] = newBuffer[0]
Length[0] = newLength[0]
if newLength[0] == 0:
return 0
return 1
def initTransceiver(self):
logdbg('initTransceiver')
self.configureRegisterNames()
self.calculateFrequency(self.DataStore.TransceiverSettings.Frequency)
errmsg = ''
buf = [None]
if self.shid.readConfigFlash(0x1F9, 7, buf):
ID = buf[0][5] << 8
ID += buf[0][6]
loginf('transceiver ID: %d (%x)' % (ID,ID))
self.DataStore.setDeviceID(ID)
SN = str("%02d"%(buf[0][0]))
SN += str("%02d"%(buf[0][1]))
SN += str("%02d"%(buf[0][2]))
SN += str("%02d"%(buf[0][3]))
SN += str("%02d"%(buf[0][4]))
SN += str("%02d"%(buf[0][5]))
SN += str("%02d"%(buf[0][6]))
loginf('transceiver serial: %s' % SN)
self.DataStore.setTransceiverSerNo(SN)
for r in self.AX5051RegisterNames_map:
self.shid.writeReg(r, self.AX5051RegisterNames_map[r])
if self.shid.execute(5):
self.shid.setPreamblePattern(0xaa)
if self.shid.setState(0x1e): # original was 0
time.sleep(1)
if self.shid.setRX():
pass
else:
errmsg = 'setRX failed'
else:
errmsg = 'setState failed'
else:
errmsg = 'execute failed'
else:
errmsg = 'readConfigFlash failed'
if errmsg != '':
raise Exception('transceiver initialization failed: %s' % errmsg)
def setup(self, frequency):
self.DataStore.setFrequencyStandard(frequency)
self.DataStore.setFlag_FLAG_TRANSCEIVER_SETTING_CHANGE(1)
self.shid.open()
self.initTransceiver()
self.DataStore.setFlag_FLAG_TRANSCEIVER_PRESENT(1)
self.shid.setRX()
def teardown(self):
self.shid.close()
def startRFThread(self):
logdbg('startRFThread')
self.running = True
child = threading.Thread(target=self.doRF)
child.setName('RFComm')
child.setDaemon(True)
child.start()
def stopRFThread(self):
logdbg('stopRFThread')
self.running = False
def isRunning(self):
return self.running
def doRF(self):
try:
logdbg('starting rf communication')
while self.running:
self.doRFCommunication()
logdbg('stopping rf communication')
except Exception, e:
logerr('exception in doRF: %s' % e)
self.running = False
if weewx.debug:
log_traceback(dst=syslog.LOG_DEBUG)
raise
def doRFCommunication(self):
StateBuffer = [None]
ret = self.shid.getState(StateBuffer)
if ret == 1:
DataLength = [0]
DataLength[0] = 0
FrameBuffer=[0]
FrameBuffer[0]=[0]*0x03
ret = self.shid.getFrame(FrameBuffer, DataLength)
if ret == 1:
ret = self.generateResponse(FrameBuffer, DataLength)
if ret == 1:
self.shid.setState(0)
ret = self.shid.setFrame(FrameBuffer[0], DataLength[0])
if ret != 1:
logerr('setFrame failed')
else:
logerr('generateResponse failed')
else:
logerr('getFrame failed')
else:
logerr('getState failed')
if self.shid.setTX() != 1:
logerr('setTX failed')
| [
"mwall@picorino.local"
] | mwall@picorino.local |
69494011db6f576fb6ac7d0777a1dfb030eaad40 | 7847a3efcf248c260e161929a40469c3783abd0c | /tests/SampleSplitter_example1.py | 34095fcd61052935894a75495506047783ad8eb8 | [
"BSD-3-Clause"
] | permissive | paul-smiles/acoular | f8575f328c331c6334c46864178420482c0d5421 | 49647ff54150554422f44da90a80a0137ad82958 | refs/heads/master | 2023-02-16T09:50:03.823784 | 2021-01-03T23:40:21 | 2021-01-03T23:40:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,223 | py | # -*- coding: utf-8 -*-
#pylint: disable-msg=E0611, E1101, C0103, R0901, R0902, R0903, R0904, W0232
#------------------------------------------------------------------------------
# Copyright (c) 2007-2019, Acoular Development Team.
#------------------------------------------------------------------------------
"""
This Examples shows the use of SampleSplitter class in a multithreading scenario
"""
from acoular import TimePower,MaskedTimeSamples, SampleSplitter
import threading
from time import sleep
import numpy as np
samples = 25000
h5savefile = 'example_data.h5'
ts = MaskedTimeSamples(name=h5savefile,
start = 0,
stop = samples)
# set up Sample Splitter
ss = SampleSplitter(source = ts)
# set up following objects
tp1 = TimePower(source=ss)
tp2 = TimePower(source=ss)
tp3 = TimePower(source=ss)
ss.register_object(tp1,tp2,tp3) # register objects
init_array1 = np.empty((samples, ts.numchannels),dtype=np.float32)
init_array2 = np.empty((samples, ts.numchannels),dtype=np.float32)
init_array3 = np.empty((samples, ts.numchannels),dtype=np.float32)
def print_number_of_blocks_in_block_buffers():
buffers = list(ss.block_buffer.values())
elements = [len(buf) for buf in buffers]
print(dict(zip(['tp1','tp2','tp3'], elements)))
def do_stuff1(obj): # not time consuming function
for _ in obj.result(2048): #
print("tp1 calls sample splitter")
print_number_of_blocks_in_block_buffers()
sleep(0.3)
def do_stuff2(obj): # not time consuming function
for i in obj.result(2048): #
print("tp2 calls sample splitter")
print_number_of_blocks_in_block_buffers()
sleep(0.5)
def do_stuff3(obj): # more time consuming function
for i in obj.result(2048): #
print("tp3 calls sample splitter")
print_number_of_blocks_in_block_buffers()
sleep(0.7)
worker1 = threading.Thread(target=do_stuff1, args=(tp1,))
worker2 = threading.Thread(target=do_stuff2, args=(tp2,))
worker3 = threading.Thread(target=do_stuff3, args=(tp3,))
print("start threads")
worker1.start()
worker2.start()
worker3.start()
[thr.join() for thr in [worker1,worker2,worker3]]
print("threads finished")
| [
"kujawski.ad@gmail.com"
] | kujawski.ad@gmail.com |
0a6e42f7bbf25c493ed7c8c4a43b48bfa4bdac9c | 9553faf5286927bc52a64d43b5c01efa4b92f70e | /slicing.py | 2d305bbb5cb78177af0688773953c711f4e3e73b | [] | no_license | Mishrashivanand/Pythonmaster | 3f384c79bea9e3849a3ef36d6fe5af74e160ba14 | e0121827825504c5f78d19e6477038062da3607f | refs/heads/master | 2023-07-05T00:35:21.010715 | 2021-08-24T04:56:38 | 2021-08-24T04:56:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 446 | py | parrot = "Shivanand"
print(parrot[0:6])
print(parrot[3:6])
print(parrot[:9])
print(parrot[5:])
print(parrot[:5] + parrot[5:])
print(parrot[:])
# ===================
print("Slicing with negative no.")
print(parrot[-4:4])
print(parrot[-4:-1])
# =================
print("Slicing with using step")
print(parrot[0:7:2])
print(parrot[1::3])
# ====================
new_str = "1,2,3,4,5:5,6;7,7:8:3:4;i'"
print(new_str[1::2])
# ==================
| [
"shivanandmishra14@gmail.com"
] | shivanandmishra14@gmail.com |
a6b3de13814493ba217c8efd7c3c78a34f6c9856 | dcd7eb9ee6cf2b78896a630fb2ebaa9470194971 | /blog/accounts/urls.py | 2621357d5562621a54ff38731e297135c9be5890 | [] | no_license | anuj-jaryal/django-blog | 0a10a275f421d9f8c23c0d2c4bad080d7d8ad8a7 | be90ed645bcaf02f8c12826da61869554e253c44 | refs/heads/master | 2023-07-17T14:54:44.018621 | 2021-09-03T06:44:02 | 2021-09-03T06:44:02 | 402,673,219 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,368 | py | from django.urls import include, path
from . import views
from django.contrib.auth import views as auth_views
urlpatterns=[
path('login/', auth_views.LoginView.as_view(template_name ='accounts/login.html'), name='login'),
path('logout/', auth_views.LogoutView.as_view(template_name ='accounts/log_out.html'), name='logout'),
path('password_change/', auth_views.PasswordChangeView.as_view(template_name ='accounts/password_change_form.html'), name='password_change'),
path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(template_name ='accounts/password_change_done.html'), name='password_change_done'),
path('password_reset/', auth_views.PasswordResetView.as_view(template_name ='accounts/password_reset_form.html'), name='password_reset'),
path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(template_name ='accounts/password_reset_done.html'), name='password_reset_done'),
path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(template_name ='accounts/password_reset_confirm.html'), name='password_reset_confirm'),
path('reset/done/', auth_views.PasswordResetCompleteView.as_view(template_name ='accounts/password_reset_complete.html'), name='password_reset_complete'),
path('sign_up/',views.sign_up,name="sign-up"),
path('profile/',views.profile,name="profile")
] | [
"anuj@ourdesignz.in"
] | anuj@ourdesignz.in |
d5b44d84be7c7901c1635edaa14a5f4c9d9321c6 | fa04309288a0f8b2daae2fd73c8224a1c0ad4d95 | /eventkit_cloud/utils/tests/test_wcs.py | 14f1277c8a48f019d673a0273edd556c62f11626 | [] | no_license | jj0hns0n/eventkit-cloud | 7bb828c57f29887621e47fe7ce0baa14071ef39e | 2f749090baf796b507e79251a4c4b30cb0b4e126 | refs/heads/master | 2021-01-01T19:45:32.464729 | 2017-07-24T19:01:24 | 2017-07-24T19:01:24 | 98,675,805 | 0 | 0 | null | 2017-07-28T18:16:34 | 2017-07-28T18:16:34 | null | UTF-8 | Python | false | false | 2,133 | py | # -*- coding: utf-8 -*-
import logging
import os
from mock import Mock, patch
from django.conf import settings
from django.test import TransactionTestCase
from string import Template
from ..wcs import WCStoGPKG, WCStoGeotiff
from uuid import uuid4
logger = logging.getLogger(__name__)
class TestWCSToGPKG(TransactionTestCase):
def setUp(self):
self.path = settings.ABS_PATH()
self.task_process_patcher = patch('eventkit_cloud.utils.wcs.TaskProcess')
self.task_process = self.task_process_patcher.start()
self.addCleanup(self.task_process_patcher.stop)
self.task_uid = uuid4()
@patch('eventkit_cloud.utils.wcs.os.path.exists')
def test_create_convert(self, exists):
gpkg = '/path/to/sqlite.gpkg'
bbox = [-45, -45, 45, 45]
layer = 'awesomeLayer'
name = 'Great export'
service_url = 'http://my-service.org/some-server/wcs?'
cmd = Template("gdal_translate -projwin $minX $maxY $maxX $minY -of GPKG -ot byte $wcs $out")
exists.return_value = True
self.task_process.return_value = Mock(exitcode=0)
w2g = WCStoGPKG(out=gpkg,
bbox=bbox,
service_url=service_url,
layer=layer,
debug=False,
name=name,
service_type=None,
task_uid=self.task_uid)
out = w2g.convert()
self.task_process.assert_called_once_with(task_uid=self.task_uid)
exists.assert_called_once_with(os.path.dirname(gpkg))
cmd = cmd.safe_substitute({'out': gpkg, 'wcs': w2g.wcs_xml_path, 'minX': bbox[0], 'minY': bbox[1],
'maxX': bbox[2], 'maxY': bbox[3]})
self.task_process().start_process.assert_called_once_with(cmd, executable='/bin/sh', shell=True, stderr=-1,
stdout=-1)
self.assertEquals(out, gpkg)
self.task_process.return_value = Mock(exitcode=1)
with self.assertRaises(Exception):
w2g.convert()
| [
"joseph.svrcek@rgi-corp.com"
] | joseph.svrcek@rgi-corp.com |
db978cb55308c9705010fe8e0799d0f3dfcee515 | 62ccdb11daefaecc8e63f235c7519cc7594f705a | /images/google-cloud-sdk/lib/googlecloudsdk/command_lib/compute/instance_groups/managed/rolling_action.py | 62c3d07cac5b5ba9303df7b4585f2a7c5fcf9414 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | hiday1979/kalabasa-mas | eccc869bfe259bb474f9d2a4dc4b8561a481f308 | 53a9818eb2a6f35ee57c4df655e7abaaa3e7ef5b | refs/heads/master | 2021-07-05T16:34:44.962142 | 2018-07-10T10:22:24 | 2018-07-10T10:22:24 | 129,709,974 | 0 | 1 | null | 2020-07-24T22:15:29 | 2018-04-16T08:27:13 | Python | UTF-8 | Python | false | false | 4,470 | py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create requests for rolling-action restart/recreate commands."""
from __future__ import absolute_import
from googlecloudsdk.api_lib.compute import managed_instance_groups_utils
from googlecloudsdk.command_lib.compute import flags
from googlecloudsdk.command_lib.compute import scope as compute_scope
from googlecloudsdk.command_lib.compute.instance_groups import flags as instance_groups_flags
from googlecloudsdk.command_lib.compute.managed_instance_groups import update_instances_utils
from googlecloudsdk.core.util import times
def CreateRequest(args,
cleared_fields,
client,
resources,
minimal_action,
max_surge=None):
"""Create request helper for compute instance-groups managed rolling-action.
Args:
args: argparse namespace
cleared_fields: Fields which are left cleared, but should be send in request
client: The compute client
resources: The compute resources
minimal_action: MinimalActionValueValuesEnum value
max_surge: InstanceGroupManagerUpdatePolicy.maxSurge value
Returns:
ComputeInstanceGroupManagersPatchRequest or
ComputeRegionInstanceGroupManagersPatchRequest instance
"""
resource_arg = instance_groups_flags.MULTISCOPE_INSTANCE_GROUP_MANAGER_ARG
default_scope = compute_scope.ScopeEnum.ZONE
scope_lister = flags.GetDefaultScopeLister(client)
igm_ref = resource_arg.ResolveAsResource(
args, resources, default_scope=default_scope, scope_lister=scope_lister)
update_policy_type = (client.messages.InstanceGroupManagerUpdatePolicy.
TypeValueValuesEnum.PROACTIVE)
max_unavailable = update_instances_utils.ParseFixedOrPercent(
'--max-unavailable', 'max-unavailable', args.max_unavailable,
client.messages)
igm_info = managed_instance_groups_utils.GetInstanceGroupManagerOrThrow(
igm_ref, client)
versions = (igm_info.versions or [
client.messages.InstanceGroupManagerVersion(
instanceTemplate=igm_info.instanceTemplate)
])
current_time_str = str(times.Now(times.UTC))
for i, version in enumerate(versions):
version.name = '%d/%s' % (i, current_time_str)
update_policy = client.messages.InstanceGroupManagerUpdatePolicy(
maxSurge=max_surge,
maxUnavailable=max_unavailable,
minReadySec=args.min_ready,
minimalAction=minimal_action,
type=update_policy_type)
igm_resource = client.messages.InstanceGroupManager(
instanceTemplate=None, updatePolicy=update_policy, versions=versions)
if igm_ref.Collection() == 'compute.instanceGroupManagers':
service = client.apitools_client.instanceGroupManagers
request = client.messages.ComputeInstanceGroupManagersPatchRequest(
instanceGroupManager=igm_ref.Name(),
instanceGroupManagerResource=igm_resource,
project=igm_ref.project,
zone=igm_ref.zone)
elif igm_ref.Collection() == 'compute.regionInstanceGroupManagers':
service = client.apitools_client.regionInstanceGroupManagers
request = client.messages.ComputeRegionInstanceGroupManagersPatchRequest(
instanceGroupManager=igm_ref.Name(),
instanceGroupManagerResource=igm_resource,
project=igm_ref.project,
region=igm_ref.region)
# Due to 'Patch' semantics, we have to clear either 'fixed' or 'percent'.
# Otherwise, we'll get an error that both 'fixed' and 'percent' are set.
if max_surge is not None:
cleared_fields.append('updatePolicy.maxSurge.fixed' if max_surge.fixed is
None else 'updatePolicy.maxSurge.percent')
if max_unavailable is not None:
cleared_fields.append('updatePolicy.maxUnavailable.fixed'
if max_unavailable.fixed is None else
'updatePolicy.maxUnavailable.percent')
return (service, 'Patch', request)
| [
"accounts@wigitech.com"
] | accounts@wigitech.com |
639b345f0dd64741c9480a1203ddfbbc9fef7881 | 75f3bd9718c7e8f36b3b93a53295955b0de67c10 | /cgi-bin/register.py | d4b4a21d8fb683bf2e57a48e40afae994947df46 | [] | no_license | suenchunfung/csci4140assignment_1 | 7a10e262dd604e9ed3421b873e1aff82c8ec7e85 | e27bfd642e12437eaed9b40dafa794b49485726e | refs/heads/master | 2021-09-07T20:51:20.698174 | 2018-02-28T23:05:23 | 2018-02-28T23:05:23 | 122,481,347 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,783 | py | #!D:/CSCI4140/python/python-3.5.4.amd64/python
import cgi, cgitb
import mysql.connector
import os
#connect tio the data basestring
userdb = mysql.connector.connect(user='alan', password='alansuen',host='localhost',database='accountdata')
cursor = userdb.cursor()
#get user 's entry
form = cgi.FieldStorage()
uName = form.getvalue('userName')
password = form.getvalue('password')
pswConfirm = form.getvalue('pswConfirm')
#html
print ("Content-type:text/html\r\n\r\n")
print ("<html><head>")
print ("<title>Web Instagram</title>")
print ("</head>")
print ("<body>")
#Register user
##Check if the password enter correct or not
if pswConfirm != password:
print ("<h2>Error</h2><h4>re-entered password is not the same</h4>")
print ("<a href='/createAccount.py'>back</a>")
else:
##Search if the user name is already in user
query = ("SELECT username FROM account WHERE username='"+uName+"'")
cursor.execute(query)
row = cursor.fetchone()
if row is not None:
print ("<h2>Error</h2><h4>The user name is already in used</h4>")
print ("<a href='/createAccount.py'>back</a>")
else:
newUser = ("INSERT INTO account (username, password) VALUES (%(username)s, %(password)s)")
data = { 'username': uName, 'password': password}
cursor.execute(newUser,data)
userdb.commit()
#create directory for the user
# if not os.path.exists('/CSCI4140/xampp/htdocs/'+uName):
# os.makedirs('/CSCI4140/xampp/htdocs/'+uName)
# if not os.path.exists('/CSCI4140/xampp/htdocs/'+uName+'/thumbnails'):
# os.makedirs('/CSCI4140/xampp/htdocs/'+uName+'/thumbnails')
print ("<h2>Create user account success.</h2>")
print ("<a href='/index.py'>back</a>")
cursor.close()
userdb.close()
print ("</body></html>") | [
"noreply@github.com"
] | noreply@github.com |
34156a81e74b28c1ef78db596a80568585cedbfa | 835734b9bbe59130dd19cc0b921ae73ec65e9e28 | /tests/sequence_problems/test_open_reading_frames.py | 5e814dc9494fdd572bccacedef9cfa348f185cb0 | [
"MIT"
] | permissive | Vikdemen/RosalindPS | a588e76841692cffe2e3f45333b5875160e42e59 | 05cb3c2162e569bd92a99b9be127999cae1babf7 | refs/heads/master | 2022-04-09T18:49:28.885834 | 2020-03-06T22:11:17 | 2020-03-06T22:11:17 | 236,085,620 | 1 | 1 | MIT | 2020-03-02T02:03:13 | 2020-01-24T21:20:50 | Python | UTF-8 | Python | false | false | 547 | py | from rps.sequence_problems.open_reading_frames import get_possible_proteins
def test_get_possible_proteins():
fasta_sequences = [
">Rosalind_99",
"AGCCATGTAGCTAACTCAGGTTACATGGGGATGACCCCGCGACTTGGATTAGAGTCTCTTTTGGAATAAGCCTGAATGATCCGAGTAGCATCTCAG"
]
expected_proteins = {"MLLGSFRLIPKETLIQVAGSSPCNLS", "M", "MGMTPRLGLESLLE", "MTPRLGLESLLE"}
translated_proteins = get_possible_proteins(fasta_sequences)
translated_proteins = set(translated_proteins.split('\n'))
assert translated_proteins == expected_proteins
| [
"viktor.demen@gmail.com"
] | viktor.demen@gmail.com |
cdd55f8af17b80727f2dda56691dd14105158f6d | 9d374965105f8de9eda124aa347b2caf79e948e2 | /venv/bin/pip3 | 331ced65569ac4a97275c5d53ab22930ae17cc25 | [] | no_license | askvart/todolist | 4d6879fe4028a70a3627345927aa3c0912ccfd2b | f4b17b57458929b74c9d09629354630c2c0f170c | refs/heads/master | 2020-07-30T09:41:30.954550 | 2019-09-22T16:17:33 | 2019-09-22T16:17:33 | 210,177,343 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | #!/home/askvart/PycharmProjects/todoapp/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"askvart@gmail.com"
] | askvart@gmail.com | |
108a32ed3ade78f6a46e94da15d907ca749b279e | 71e3e2429385094f2d77d06c6de2a59f80c2ad69 | /file downloading/project40/asgi.py | 1cef47061134737b27dd6c6573c8c29ee306de1c | [] | no_license | hiitsRanjan/file-downloading | 153a797d1ef131635e000b22d12d8ab5783f3fbe | 82652fe394c2da3193e22d3288864e09904333a9 | refs/heads/main | 2023-02-17T20:29:08.023754 | 2021-01-19T11:06:35 | 2021-01-19T11:06:35 | 330,951,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
ASGI config for project40 project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'project40.settings')
application = get_asgi_application()
| [
"gmhohanty890@gmail.com"
] | gmhohanty890@gmail.com |
3e3a1a5603e76c1b63c542ea26dd7461d1ec909d | e6ba92a23cbf92d8727e8a7a057e664d4873db40 | /extdirect/django/store.py | 4adb8baea09ef75a92b67bfe27905cb4a6f90145 | [
"BSD-3-Clause"
] | permissive | bitkeeper/extdirect.cherrypy | 0e4cd4c9b5abf538bad407fded5cf265b020392f | e725ad8fe7b8f3fc144d0e78778a054fc6b293c6 | refs/heads/master | 2020-04-01T21:47:26.146205 | 2010-09-22T14:06:41 | 2010-09-22T14:06:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,837 | py | from django.core.serializers import serialize
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from metadata import meta_fields
class ExtDirectStore(object):
"""
Implement the server-side needed to load an Ext.data.DirectStore
"""
def __init__(self, model, extras=[], root='records', total='total', \
success='success', message='message', start='start', limit='limit', \
sort='sort', dir='dir', metadata=False, id_property='id', \
mappings={}, sort_info={}, custom_meta={}, exclude_fields=[], \
extra_fields=[], get_metadata=None):
self.model = model
self.root = root
self.total = total
self.success = success
self.extras = extras
self.id_property = id_property
self.message = message
self.exclude_fields = exclude_fields
# paramNames
self.start = start
self.limit = limit
self.sort = sort
self.dir = dir
self.metadata = {}
if metadata:
fields = meta_fields(model, mappings, exclude_fields, get_metadata) + extra_fields
self.metadata = {
'idProperty': id_property,
'root': root,
'totalProperty': total,
'successProperty': success,
'fields': fields,
'messageProperty': message
}
if sort_info:
self.metadata.update({'sortInfo': sort_info})
self.metadata.update(custom_meta)
def query(self, qs=None, metadata=True, **kw):
paginate = False
total = None
order = False
if kw.has_key(self.start) and kw.has_key(self.limit):
start = kw.pop(self.start)
limit = kw.pop(self.limit)
paginate = True
if kw.has_key(self.sort) and kw.has_key(self.dir):
sort = kw.pop(self.sort)
dir = kw.pop(self.dir)
order = True
if dir == 'DESC':
sort = '-' + sort
if not qs is None:
# Don't use queryset = qs or self.model.objects
# because qs could be empty list (evaluate to False)
# but it's actually an empty queryset that must have precedence
queryset = qs
else:
queryset = self.model.objects
queryset = queryset.filter(**kw)
if order:
queryset = queryset.order_by(sort)
if not paginate:
objects = queryset
total = queryset.count()
else:
paginator = Paginator(queryset, limit)
total = paginator.count
try:
page = paginator.page(start / limit + 1)
except (EmptyPage, InvalidPage):
#out of range, deliver last page of results.
page = paginator.page(paginator.num_pages)
objects = page.object_list
return self.serialize(objects, metadata, total)
def serialize(self, queryset, metadata=True, total=None):
meta = {
'root': self.root,
'total' : self.total,
'success': self.success,
'idProperty': self.id_property
}
res = serialize('extdirect', queryset, meta=meta, extras=self.extras,
total=total, exclude_fields=self.exclude_fields)
if metadata and self.metadata:
res['metaData'] = self.metadata
return res
| [
"santiago.videla@gmail.com"
] | santiago.videla@gmail.com |
5928094b2687fc804ce02fb2e9df0facf5960f5d | 76f05e9d10c6368d2866958db9ff5959c66a3177 | /polly/asgi.py | a1494a45269ed38148339cee0554a3302df7b5dd | [] | no_license | medvykes/polls | 3a093460a51bdbe806525c88b96026a4340e22af | 0fc06f1fe9e8393750b5e3f98c19abddb491fbde | refs/heads/main | 2023-08-01T07:23:37.052517 | 2021-09-03T17:16:40 | 2021-09-03T17:16:40 | 402,844,678 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | """
ASGI config for funny project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'funny.settings')
application = get_asgi_application()
| [
"61887497+medvykes@users.noreply.github.com"
] | 61887497+medvykes@users.noreply.github.com |
c7d90f35e98f547498e4ac58bd24c52bf0e03f4f | 9404b743f04a87626f117e394ed0877445f88efe | /DK_Project/market/urls.py | 0e353a9701ea3776ba2bdda7ad4315f8865da274 | [
"Apache-2.0"
] | permissive | xedporject/DK | 3497ddfb03521d856e3e9a1874e310db30d64fee | af8f9521011ac1ee0256db4863220abbbf9699ac | refs/heads/master | 2020-03-24T22:37:29.871222 | 2018-08-11T07:42:47 | 2018-08-11T07:42:47 | 143,094,839 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py |
from django.conf.urls import url
from rest_framework.routers import SimpleRouter
from market import views
router = SimpleRouter()
router.register(r'^goods', views.GoodsApi)
router.register(r'brand', views.BrandApi)
router.register(r'category', views.CategoryApi)
urlpatterns = [
url(r'^index/', views.index, name='index'),
url(r'goods/details/(\d+)/', views.details)
]
urlpatterns += router.urls
| [
"1367000465@qq.com"
] | 1367000465@qq.com |
19b14422c661048132ff074511e6e2f9dafaa607 | dd50c8e20eb21d7becd94ec04cf8084bb8998ce2 | /LiePinSpider/pipelines.py | d26a61abe248a6bd8f88a0f4e6d329e6a2d07ded | [] | no_license | pekeng/Tokenhouse | 4f9b4afce67ff998728cf87944a29fb8513316f3 | 1477ede1cefa7c43fbbde9fde18d81bf73ea3a57 | refs/heads/master | 2020-03-28T07:23:25.121993 | 2018-09-08T03:35:29 | 2018-09-08T03:35:29 | 147,894,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,932 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql
class LiepinspiderPipeline(object):
def process_item(self, item, spider):
return item
class MyliepinPipelines(object):
# 连接数据库
def __init__(self):
self.connect = pymysql.connect(host='192.168.0.126',
port=3306,
user='root',
password='rootpasswd',
database='jianjian',
charset='utf8'
)
self.cursor = self.connect.cursor()
def process_item(self, item, spider):
insert_sql = 'insert into ss(company,release_time,\
money,qualifications,job_descript,department,\
major,reportor,under_nums,place,company_place,\
company_scale,company_major,company_introduce)\
value (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
try:
self.cursor.execute(insert_sql,(item['company'] ,item['release_time'] \
,item['money'] ,item['qualifications'] ,item['job_descript'] \
,item['department'] ,item['major'] ,item['reportor'] \
,item['under_nums'] ,item['place'] ,item['company_place'] \
,item['company_scale'] ,item['company_major'] \
,item['company_introduce']))
self.connect.commit()
except Exception as e:
print(e)
self.connect.rollback()
return item
| [
"1304677936@qq.com"
] | 1304677936@qq.com |
d51e0b54497d62f9511db1030a8af93fea2fdc67 | 931a3304ea280d0a160acb87e770d353368d7d7d | /vendor/swagger_client/models/get_fw_leaderboards_characters_active_total.py | 84445a28e19347e99ebfd6bf4119f7a6f4c946d2 | [] | no_license | LukeS5310/Broadsword | c44786054e1911a96b02bf46fe4bdd0f5ad02f19 | 3ba53d446b382c79253dd3f92c397cca17623155 | refs/heads/master | 2021-09-08T00:05:26.296092 | 2017-10-24T07:01:48 | 2017-10-24T07:01:48 | 105,143,152 | 0 | 1 | null | 2017-11-03T14:29:38 | 2017-09-28T12:03:19 | Python | UTF-8 | Python | false | false | 3,883 | py | # coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.6.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class GetFwLeaderboardsCharactersActiveTotal(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, amount=None, character_id=None):
"""
GetFwLeaderboardsCharactersActiveTotal - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'amount': 'int',
'character_id': 'int'
}
self.attribute_map = {
'amount': 'amount',
'character_id': 'character_id'
}
self._amount = amount
self._character_id = character_id
@property
def amount(self):
"""
Gets the amount of this GetFwLeaderboardsCharactersActiveTotal.
Amount of kills
:return: The amount of this GetFwLeaderboardsCharactersActiveTotal.
:rtype: int
"""
return self._amount
@amount.setter
def amount(self, amount):
"""
Sets the amount of this GetFwLeaderboardsCharactersActiveTotal.
Amount of kills
:param amount: The amount of this GetFwLeaderboardsCharactersActiveTotal.
:type: int
"""
self._amount = amount
@property
def character_id(self):
"""
Gets the character_id of this GetFwLeaderboardsCharactersActiveTotal.
character_id integer
:return: The character_id of this GetFwLeaderboardsCharactersActiveTotal.
:rtype: int
"""
return self._character_id
@character_id.setter
def character_id(self, character_id):
"""
Sets the character_id of this GetFwLeaderboardsCharactersActiveTotal.
character_id integer
:param character_id: The character_id of this GetFwLeaderboardsCharactersActiveTotal.
:type: int
"""
self._character_id = character_id
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, GetFwLeaderboardsCharactersActiveTotal):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"cyberlibertyx@gmail.com"
] | cyberlibertyx@gmail.com |
f2e96690da834c6acb105e680115e2c9711f39c0 | 80acc6e7e09024bf9a0f30d58be1835035dd0305 | /bonfire/bonfire.py | c96131deb0813b1ad91afaf678b4530da8735895 | [] | no_license | btbytes/2007 | 6f0c77683aefa81e9b03ea2edec03866c88d6b42 | a275dbd673196a95815e9d1b39a05cf7b204f9f5 | refs/heads/master | 2021-01-19T16:58:53.297440 | 2008-09-09T04:09:03 | 2008-09-09T04:09:03 | 32,154,604 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,604 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Bonfire website builder
======================
Generate HTML documents out of .rst files
and Mako templates.
Derived from Georg Brandl's Pocoo projects www site builder - sphinx.py
:copyright: 2006 by Georg Brandl, 2007 Pradeep Gowda
:license: GNU GPL.
"""
import sys, os
import os.path as path
import time
import getopt
import shutil
import string
from email.Utils import formatdate
from docutils import nodes
from docutils.core import publish_parts
from docutils.writers import html4css1
from docutils.parsers import rst
from docutils.parsers.rst import directives
from mako.template import Template
from mako.runtime import Context
from mako.lookup import TemplateLookup
from StringIO import StringIO
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import HtmlFormatter
pygments_formatter = HtmlFormatter()
def pygments_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
try:
lexer = get_lexer_by_name(arguments[0])
except ValueError:
# no lexer found - use the text one instead of an exception
lexer = get_lexer_by_name('text')
parsed = highlight(u'\n'.join(content), lexer, pygments_formatter)
return [nodes.raw('', parsed, format='html')]
pygments_directive.arguments = (1, 0, 1)
pygments_directive.content = 1
directives.register_directive('sourcecode', pygments_directive)
__version__ = '2'
slur_chars = string.letters + string.digits + "_-"
def mkdir(dir):
try:
os.makedirs(dir)
except:
if not path.isdir(dir):
raise
def rmtree(dir):
try:
shutil.rmtree(dir)
except:
if path.isdir(dir):
raise
def slurify(text):
return ''.join(let.lower() for let in text.replace(' ', '-') if let in slur_chars)
def ingroups(iterable, count):
l = []
for item in iterable:
l.append(item)
if len(l) == count+1:
yield l[:count], True
l = l[count:]
yield l, False
def writefile(text, *fnparts):
filename = path.join(*fnparts[:-1])
mkdir(filename)
filename = path.join(filename, fnparts[-1])
fp = open(filename, 'w')
fp.write(text)
fp.close()
class PageTranslator(html4css1.HTMLTranslator):
commentfields = {'template': ('template', 'default'),
'navbar-order': ('navbar_order', ''),
'title': ('pagetitle', '')}
def visit_comment(self, node):
text = node.astext()
if ':' in text:
name, val = text.split(':', 1)
if name in self.commentfields:
setattr(self, self.commentfields[name][0], val.strip())
raise nodes.SkipNode
def visit_reference(self, node):
uri = node.get('refuri', 'http://')
if not (uri.startswith('http://') or uri.startswith('mailto:')):
# stupid but it works.
node['refuri'] = '##base##/' + node['refuri']
return html4css1.HTMLTranslator.visit_reference(self, node)
class BlogPageTranslator(PageTranslator):
commentfields = {'posted': ('posted', ''),
'author': ('author', 'Anonymous')}
class PageWriter(html4css1.Writer):
def __init__(self, translator):
html4css1.Writer.__init__(self)
self.translator_class = translator
def assemble_parts(self):
html4css1.Writer.assemble_parts(self)
for field, default in self.translator_class.commentfields.values():
self.parts[field] = getattr(self.visitor, field, default)
class SiteBuilder(object):
def __init__(self, rootdir, outdir):
self.rootdir = rootdir
self.outdir = outdir
mkdir(self.outdir)
self.tmpl = path.join(os.path.realpath(self.rootdir), 'templates')
print "TEMPLATE PATH", self.tmpl
self.lookup = TemplateLookup(directories=[self.tmpl])
self.buf = StringIO()
self.sources = {}
srcdir = rootdir
for fn in os.listdir(srcdir):
if not fn.endswith('.rst'):
continue
basename = path.splitext(fn)[0]
self.sources[basename] = path.join(srcdir, fn)
def start(self):
writer = PageWriter(PageTranslator)
contexts = []
navitems = []
# two passes:
# first, render all ReST content to HTML, gathering navitems
for docname, sourcefile in self.sources.iteritems():
parts = publish_parts(
file(sourcefile).read(),
source_path=sourcefile,
writer=writer,
settings_overrides={'initial_header_level': 2}
)
if not parts['pagetitle']:
parts['pagetitle'] = parts['title']
base = (docname == 'index' and '.' or '..')
contexts.append({
'docname': docname,
'template': parts['template'],
'title': parts['title'],
'pagetitle': parts['pagetitle'],
'content': parts['body'].replace('##base##', base),
'base': base,
'navitems': navitems,
'sphinxver': __version__,
})
if parts['navbar_order'] != '0' and parts['navbar_order'].strip():
navitems.append({
'docname': docname,
'name': parts['pagetitle'],
'link': (docname != 'index' and docname or ''),
'navbar_order': parts['navbar_order'],
})
# second, render all HTML in the Mako templates
for ctx in contexts:
context = Context(self.buf, **ctx)
tmpl = os.path.join(self.tmpl, ctx['template']+'.html')
tmpl = Template(filename=tmpl).render_context(context)
text = self.buf.getvalue()
docname = ctx['docname'] or 'default'
writefile(text, self.outdir, docname+'.html')
def main(argv):
usage = "Usage: %s [-o outputdir] [srcdir]" % argv[0]
try:
gopts, args = getopt.getopt(argv[1:], "o:")
except getopt.GetoptError:
print usage
return 2
opts = dict(gopts)
if len(args) == 0:
dir = '.'
elif len(args) == 1:
dir = args[0]
else:
print usage
return 2
builder = SiteBuilder(dir, opts.get('-o', path.join(dir, 'build')))
builder.start()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| [
"btbytes@users.noreply.github.com"
] | btbytes@users.noreply.github.com |
ddc4ef2ae03062798fcdbc7a62554e3e6ef57781 | d9d76dc6f465d8b731e445469c9184ed8255aded | /Codechef/cheflr.py | 16fe5ed5bcdbd143616acac727e29b70cce9dd8a | [
"MIT"
] | permissive | hoodakaushal/Snippets | 40538b75a05a54ce650f192ad4f30bc74b15f4ba | 8944a11c3dee5d7839c209e7079af0f263cd10b1 | refs/heads/master | 2016-09-06T12:10:34.892279 | 2014-09-10T12:38:32 | 2014-09-10T12:38:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,669 | py | __author__ = 'hooda'
def node_num(path):
num = 1
x = 1000000007
for fork in path:
if num % 2 == 0:
# print("even")
if fork == "l":
num = (2 * num - 1)
# print("2x-1")
else:
num = (2 * num + 1)
# print("2x+1")
else:
# print("odd")
if fork == "l":
num = (2 * num)
# print("2x")
else:
num = (2 * num + 2)
# print("2x+2")
return num % x
def runner():
cases = int(raw_input())
paths = []
for i in range(0, cases):
paths += [raw_input()]
for path in paths:
print(node_num(path))
return
# runner()
import subprocess
def crun(s):
program = "cheflr.exe"
piddi = subprocess.Popen([program, s], stdout=subprocess.PIPE)
out, err = piddi.communicate()
return out.replace("\n", "").replace("\r", "")
def comparator():
query = ""
flag = True
while flag:
query += "r"
flag = match(query)
print(query)
def qgen(n):
def match(query):
piddi = crun(query)
me = str(node_num(query))
print(me, piddi)
return piddi == me
comparator()
#print(node_num('rrlrlrlrrlrlrlrlrrlrlllrlrrrrrrllrrlrlrlrlrlrlrrlrlrlrlrlrlrlrlrllrlrllrllrlrllrllrllrlrlrlrllrlrlrlrllrlrlrlllllllllllrllrrlrllrlrlrlrlrlrllrlrlrlrllrlrlrlrllrlrlrlrlrllrlrlrlrllrlrlrlrllrlrllrlrrrrrrrllllllllllllllllllllllllllllrrrrrrrrrrrrrrlllrllllllllllllllrrrrrrrrrrrlllllllllllllllllrrrrrrrrrrrrrrrrrrrrrrrrrlllllllllllllllllllrrrrrrrrrrllllllrlrlrrlrlrlrlrrlrlllrlrrrrrrllrrlrlrlrlrlrlrrlrlrlrlrlrlrlrlrllrlrllrllrlrllrllrllrlrlrlrllrlrlrlrllrlrlrlllllllllllrllrrlrllrlrlrlrlrlrllrlrlrlrllrlrlrlrllrlrlrlrlrllrlrlrlrllrlrlrlrllrlrllrlrrrrrrrllllllllllllllllllllllllllllrrrrrrrrrrrrrrlllrllllllllllllllrrrrrrrrrrrlllllllllllllllllrrrrrrrrrrrrrrrrrrrrrrrrrlllllllllllllllllllrrrrrrrrrrllllllrlrlrrlrlrlrlrrlrlllrlrrrrrrllrrlrlrlrlrlrlrrlrlrlrlrlrlrlrlrllrlrllrllrlrllrllrllrlrlrlrllrlrlrlrllrlrlrlllllllllllrllrrlrllrlrlrlrlrlrllrlrlrlrllrlrlrlrllrlrlrlrlrllrlrlrlrllrlrlrlrllrlrllrlrrrrrrrllllllllllllllllllllllllllllrrrrrrrrrrrrrrlllrllllllllllllllrrrrrrrrrrrlllllllllllllllllrrrrrrrrrrrrrrrrrrrrrrrrrlllllllllllllllllllrrrrrrrrrrllllllrlrlrrlrlrlrlrrlrlllrlrrrrrrllrrlrlrlrlrlrlrrlrlrlrlrlrlrlrlrllrlrllrllrlrllrllrllrlrlrlrllrlrlrlrllrlrlrlllllllllllrllrrlrllrlrlrlrlrlrllrlrlrlrllrlrlrlrllrlrlrlrlrllrlrlrlrllrlrlrlrllrlrllrlrrrrrrrllllllllllllllllllllllllllllrrrrrrrrrrrrrrlllrllllllllllllllrrrrrrrrrrrlllllllllllllllllrrrrrrrrrrrrrrrrrrrrrrrrrlllllllllllllllllllrrrrrrrrrrlllll')) | [
"hoodakaushal@gmail.com"
] | hoodakaushal@gmail.com |
ddb8c2fd44e293fa8451287a455f483138028bc6 | 9bf7d46fbe759ecb6497e2ead1a61e08234cf838 | /src/Functions/utils.py | bbcea471f603bbc0975b0f73ded6689ad8465d63 | [
"MIT"
] | permissive | amitsou/Multimodal-User-Monitoring | e682a2a78a7cb70dfc8fbcff8e4fcd212080dc08 | 178049485b4afe5fbf9511e74e4c2f78bd89d426 | refs/heads/master | 2023-05-04T09:56:43.386202 | 2021-05-31T12:16:05 | 2021-05-31T12:16:05 | 275,675,347 | 2 | 1 | null | 2021-01-09T17:13:35 | 2020-06-28T21:59:48 | Python | UTF-8 | Python | false | false | 18,628 | py | # -*- coding: utf-8 -*-
from serial import Serial
from datetime import datetime, timedelta
import pandas as pd
import collections
import argparse
import logging
import shutil
import serial
import time
import sys
import os
click_held = False
button = None
def parse_CLI():
"""CLI arguments
Returns:
number: The number of seconds in order to extract features
"""
parser = argparse.ArgumentParser(description='Collect arguments')
parser.add_argument("--segment_size", metavar='segment_size(int)',help="Please provide the segment size")
args = parser.parse_args()
segment_size = args.segment_size
return segment_size
def on_move(x,y):
"""The callback to call when mouse move events occur
Args:
x (float): The new pointer position
y (float): The new pointer poisition
"""
if click_held:
logging.info("MV {0:>8} {1:>8} {2:>8}:".format(x,y,str(None)))
else:
logging.info("MV {0:>8} {1:>8} {2:>8}:".format(x,y,str(None)))
def on_click(x,y,button,pressed):
"""The callback to call when a mouse button is clicked
Args:
x (float): Mouse coordinates on screen
y (float): Mouse coordinates on screen
button (str): one of the Button values
pressed (bool): Pressed is whether the button was pressed
"""
global click_held
if pressed:
click_held = True
logging.info("CLK {0:>7} {1:>6} {2:>13}".format(x,y,button))
else:
click_held = False
logging.info("RLS {0:>7} {1:>6} {2:>13}".format(x,y,button))
def on_scroll(x,y,dx,dy):
"""The callback to call when mouse scroll events occur
Args:
x (float): The new pointer position on screen
y (float): The new pointer position on screen
dx (int): The horizontal scroll. The units of scrolling is undefined
dy (int): The vertical scroll. The units of scrolling is undefined
"""
if dy == -1:
logging.info("SCRD {0:>6} {1:>6} {2:>6}".format(x,y,str(None)))
elif dy == 1:
logging.info("SCRU {0:>6} {1:>6} {2:>6}".format(x,y,str(None)))
else:
pass
def on_press_keys(key):
"""The callback to call when a button is pressed.
Args:
key (str): A KeyCode,a Key or None if the key is unknown
"""
subkeys = [
'Key.alt','Key.alt_gr','Key.alt_r','Key.backspace',
'Key.space','Key.ctrl','Key.ctrl_r','Key.down',
'Key.up','Key.left','Key.right','Key.page_down',
'Key.page_up','Key.enter','Key.shift','Key.shift_r'
]
key = str(key).strip('\'')
if key in subkeys:
logging.info(key)
else:
pass
def record_chair(output_file):
"""Read the data stream coming from the serial monitor
in order to get the sensor readings
Args:
output_file (str): The file name, where the data stream will be stored
"""
serial_port = "/dev/ttyACM0"
baud_rate = 9600
ser = serial.Serial(serial_port,baud_rate)
logging.basicConfig(filename=output_file,level=logging.DEBUG,format="%(asctime)s %(message)s")
flag = False
start = time.time()
while time.time() - start < 100.0:
try:
serial_data = str(ser.readline().decode().strip('\r\n'))
time.sleep(0.2)
tmp = serial_data.split(' ')[0] #Getting Sensor Id
if tmp == 'A0':
flag = True
if flag and tmp != 'A4':
#print(serial_data)
logging.info(serial_data)
if flag and tmp == 'A4':
flag = False
#print(serial_data)
logging.info(serial_data)
except (UnicodeDecodeError, KeyboardInterrupt) as err:
print(err)
print(err.args)
sys.exit(0)
def concat_names(dir) -> str:
"""Concatenate the given folder names
with the appropriate path
Args:
dir (str): The directory to create the subfolders
Returns:
list: The new absolute paths
"""
raw_data = os.path.join(dir,'Raw')
edited_data = os.path.join(dir,'Edited_logs')
csv_data = os.path.join(dir,'CSV')
dirs = [raw_data,edited_data,csv_data]
return dirs
def create_subdirs(paths):
"""Create sub directories given some absolute paths
Args:
paths (list): A list containing the paths to be created
"""
for index,path in enumerate(paths):
if(os.path.isdir(path)):
pass
else:
os.mkdir(path)
def round_down(num,divisor) -> int:
"""Round the number of lines contained into the recording file,
down to the nearest multiple of the given divisor
Args:
num (int): The number of lines contained into the given log file
divisor (int): The divisor in order to get tuples of divisor
Returns:
int: The nearest multiple of five
"""
return num-(num%divisor)
def get_date() -> str:
"""Get the current date in order to properly name
the recored log files
Returns:
str: The current date in: YY_MM_DD format
"""
return datetime.now().strftime('%Y_%m_%d')
def get_time() -> str:
"""Get the current time in order to properly name
the recored log files
Returns:
str: The current time in H_M_S format
"""
return datetime.now().strftime('%H_%M_%S')
def time_in_range(start, end, x):
"""Return true if x is in the range [start, end]"""
if start <= end:
return start <= x <= end
else:
return start <= x or x <= end
def crawl_dir(target,folder) -> str:
"""Enumerate all the given files in a directory
based on the given file extension
Args:
target (str): The file to search for
folder (str): The folder to search
Returns:
[type]: A list containing the file names
"""
current_path = os.path.abspath(os.getcwd())
path = os.path.join(current_path,folder)
file_names =[]
for f in os.listdir(path):
if f.endswith(target):
fname=os.path.join(path,f)
file_names.append(fname)
return file_names
def check_divisor(input_file):
"""Count the file's lines
Args:
input_file (str): The file to count
Returns:
number: The nearest multiple of five
"""
line_number = count_lines(input_file)
rounded_line = round_down(line_number,5)
return rounded_line
def preprocess_chair_raw_data(input_file):
"""Transpose the .txt file containing the
chair's raw data
Args:
input_file (str): The .txt file to process
Returns:
list: A list of lists to the .csv corresponding rows
"""
d = collections.defaultdict(list)
tmp0,tmp1,tmp2,tmp3,tmp4 = 0,1,2,3,4
rounded_line = check_divisor(input_file)
with open(input_file,'r') as f1:
lines = f1.readlines()
for i in range(rounded_line // 5):
Sid0 = lines[i+tmp0]
temp = Sid0.split()
d['Sid0'].append([temp[0],temp[1],temp[2],temp[3]])
Sid1 = lines[i+tmp1]
temp = Sid1.split()
d['Sid1'].append([temp[0],temp[1],temp[2],temp[3]])
Sid2 = lines[i+tmp2]
temp = Sid2.split()
d['Sid2'].append([temp[0],temp[1],temp[2],temp[3]])
Sid3 = lines[i+tmp3]
temp = Sid3.split()
d['Sid3'].append([temp[0],temp[1],temp[2],temp[3]])
Sid4 = lines[i+tmp4]
temp = Sid4.split()
d['Sid4'].append([temp[0],temp[1],temp[2],temp[3]])
tmp0 += 4
tmp1 += 4
tmp2 += 4
tmp3 += 4
tmp4 += 4
l = []
for i in range(rounded_line // 5):
date = d['Sid0'][i][0]
time = d['Sid0'][i][1]
A0_val = d['Sid0'][i][3]
A1_val = d['Sid1'][i][3]
A2_val = d['Sid2'][i][3]
A3_val = d['Sid3'][i][3]
A4_val = d['Sid4'][i][3]
l.append([date,time,A0_val,A1_val,A2_val,A3_val,A4_val])
return l
def convert_keys2_csv(input_file,output_file):
"""Convert the data stream file(keylogger recording) from .txt to .csv format
Args:
input_file (str): The data stream file in .txt format
output_file (str): The csv extension file name
"""
if os.stat(input_file).st_size != 0:
df = pd.read_fwf(input_file)
col_names = ['Date','Time','Key']
df.to_csv(output_file,header=col_names,encoding='utf-8',index=False)
def convert_mouse2_csv(input_file,output_file):
"""Convert the data stream file(mouselogger recording) from .txt to .csv format
Args:
input_file (str): The data stream file in .txt format
output_file (str): The csv extension file name
"""
if os.stat(input_file).st_size != 0:
df = pd.read_fwf(input_file)
col_names = ['Date','Time','Action','PosX','PosY','Button']
df.to_csv(output_file,header=col_names,encoding='utf-8',index=False)
def convert_chair_2_csv(input_file,output_file):
"""Convert the data stream file(chair recording)
from .txt to .csv format
Args:
input_file (str): The data stream file in .txt format
output_file (str): The csv extension file name
"""
if os.stat(input_file).st_size != 0:
l = preprocess_chair_raw_data(input_file)
sensor_readings_df = pd.DataFrame.from_records(l)
sensor_readings_df.columns = ['Date','Time','A0','A1','A2','A3','A4']
sensor_readings_df.to_csv(output_file, encoding='utf-8', index=False)
del l
#REVIEW
def get_dirs(modality) -> list:
current_path = os.path.abspath(os.getcwd())
os.chdir('..')
current_path = (os.path.abspath(os.curdir))
#os.chdir('./Debug')
os.chdir('./Data')
current_path = (os.path.abspath(os.curdir))
current_path = os.path.join(current_path,modality)
raw_data_path = os.path.join(current_path,'Raw')
csv_data_path = os.path.join(current_path,'CSV')
edited_logs_path = os.path.join(current_path,'Edited_logs')
features_path = os.path.join(current_path,'Features')
return raw_data_path, csv_data_path, edited_logs_path, features_path
def initialize_dirs():
"""Create the appropriate directories in order to save
and process the collected data
"""
current_path = os.path.abspath(os.getcwd())
os.chdir('..')
current_path = (os.path.abspath(os.curdir)) #Parent folder
current_path = os.path.join(current_path,'Data')
create_subdirs([current_path])
features = os.path.join(current_path,'Features')
create_subdirs([features])
#Create mouse log folder
mouse = os.path.join(current_path,'Mouse')
create_subdirs([mouse])
#Create mouse subfolders
names = concat_names(mouse)
create_subdirs(names)
#Create keyboard log folder
keyboard = os.path.join(current_path,'Keyboard')
create_subdirs([keyboard])
#Create keyboard subfolders
names = concat_names(keyboard)
create_subdirs(names)
#Create the chair log folder
chair = os.path.join(current_path,'Chair')
create_subdirs([chair])
#Create chair subfolders
names = concat_names(chair)
create_subdirs(names)
#Create webcam log folder
webcam = os.path.join(current_path,'Webcam')
create_subdirs([webcam])
def get_name(modality,dest) -> str:
"""Save the recorded log into /Data/<Modality_name>/Raw
Args:
modality (str): The log data source
dest(str): The folder to save the data
Returns:
str: The absolute path where each recording is saved
"""
current_path = os.path.abspath(os.getcwd())
os.chdir('..')
current_path = (os.path.abspath(os.curdir))
current_path = os.path.join(current_path,'Data')
if modality == 'Chair':
chair_path = os.path.join(current_path,modality,dest)
return chair_path
elif modality == 'Mouse':
mouse_path = os.path.join(current_path,modality,dest)
return mouse_path
elif modality == 'Keyboard':
keyboard_path = os.path.join(current_path,modality,dest)
return keyboard_path
def parse_raw_data(modality):
"""Convert each modality's raw data into csv format and move
the edited raw data into the appropriate Edited_logs folder
Args:
modality (str): The data source
"""
raw_data_path, csv_data_path, edited_logs_path,_ = get_dirs(modality)
txt_names = crawl_dir('.txt',raw_data_path)
csv_names = []
for elem in txt_names:
name = elem.split('/')[-1].split('.')[0]
csv_name = name+'.csv'
tmp = os.path.join(csv_data_path,csv_name)
csv_names.append(tmp)
if modality == 'Mouse':
if len(txt_names) == len(csv_names):
for i, elem in enumerate(txt_names):
convert_mouse2_csv(txt_names[i],csv_names[i])
add_mouse_missing_values(csv_names[i])
shutil.move(txt_names[i],edited_logs_path)
elif modality == 'Keyboard':
if len(txt_names) == len(csv_names):
for i, elem in enumerate(txt_names):
convert_keys2_csv(txt_names[i],csv_names[i])
add_key_missing_values(csv_names[i])
shutil.move(txt_names[i],edited_logs_path)
elif modality == 'Chair':
if len(txt_names) == len(csv_names):
for i, elem in enumerate(txt_names):
convert_chair_2_csv(txt_names[i],csv_names[i])
add_chair_missing_values(csv_names[i])
shutil.move(txt_names[i],edited_logs_path)
def splitall(path) -> list:
"""Split a string containing an abs path into parts
Args:
path (str): The abs path to the directory
Returns:
list: A list containing the string parts
"""
allparts = []
while 1:
parts = os.path.split(path)
if parts[0] == path: # sentinel for absolute paths
allparts.insert(0, parts[0])
break
elif parts[1] == path: # sentinel for relative paths
allparts.insert(0, parts[1])
break
else:
path = parts[0]
allparts.insert(0, parts[1])
return allparts
def check_empty(path) -> tuple:
"""Check if a given file does not contain any data
Args:
path (str): The abs path to the directory
Returns:
tuple: A tuple containing a bool and a message
"""
tmp = splitall(path)
df = pd.read_csv(path)
if df.empty:
if 'Keyboard' in tmp:
msg = 'Empty Keyboard File, name:',tmp[-1]
return True, msg
elif 'Mouse' in tmp:
msg = 'Empty Mouse File, name:',tmp[-1]
return True, msg
elif 'Chair' in tmp:
msg = 'Empty Chair File, name:',tmp[-1]
return True, msg
msg = 'No empty Dataframe'
return False, msg
def count_lines(input_file) -> int:
"""Count the lines of a given file
Args:
input_file (str): The file to open
Returns:
int: An integer that shows the line number
"""
with open(input_file) as f:
return sum(1 for line in f)
def list_dir(path,target):
file_names =[]
for f in os.listdir(path):
if(f.endswith(target)):
fname=os.path.join(path,f)
file_names.append(fname)
return file_names
def preprocess_empty():
"""Find the empty .csv files for keyboard and mouse.
This function fills the empty files with time
and date based on the recorded chair .csv file.
It also fills with zero or None values the empty cells
This function is useful for the absent class
"""
_, chair_dir, _, _ = get_dirs('Chair')
_, keys_dir, _, _ = get_dirs('Keyboard')
_, mouse_dir, _, _ = get_dirs('Mouse')
chair_files = list_dir(chair_dir,'.csv')
key_files = list_dir(keys_dir,'.csv')
mouse_files = list_dir(mouse_dir,'.csv')
pairs = [(i, j, k) for i in mouse_files for j in key_files for k in chair_files if i.split('/')[-1].split('.')[0] == j.split('/')[-1].split('.')[0] == k.split('/')[-1].split('.')[0]]
for m,k,c in pairs:
chair_df = pd.read_csv(c)
key_empt, _ = check_empty(k)
mouse_empt, _ = check_empty(m)
if key_empt:
key_df = pd.read_csv(k)
key_df['Date'] = chair_df['Date']
key_df['Time'] = chair_df['Time']
key_df['Key'] = 'None'
key_df.to_csv(k, mode='a', header=False, index=False)
del key_df
if mouse_empt:
mouse_df = pd.read_csv(m)
mouse_df['Date'] = chair_df['Date']
mouse_df['Time'] = chair_df['Time']
mouse_df['PosX'] = mouse_df['PosX'].fillna(0)
mouse_df['PosY'] = mouse_df['PosY'].fillna(0)
mouse_df['Action'] = 'None'
mouse_df['Button'] = 'None'
mouse_df.to_csv(m, mode='a', header=False, index=False)
del mouse_df
def insert_last_timestamp(filename):
"""Insert the timestamp value after the recording has terminated
Args:
filename (str): The file to open
"""
#now = datetime.datetime.now()
now = datetime.now()
date = now.date()
date = str(date)
timestamp = ('{:%H:%M:%S}.{:03.0f}'.format(now.time(),now.time().microsecond/1000.0)).replace('.',',')
with open(filename, 'a') as f:
f.write(date)
f.write(' ')
f.write(timestamp)
def add_mouse_missing_values(filename):
"""Add the missing values for the last recorded timestamp
Args:
filename (str): The csv file to process
"""
df = pd.read_csv(filename)
df[['Action','Button']] = df[['Action','Button']].fillna(value='None')
df[['PosX','PosY']] = df[['PosX','PosY']].fillna(value=0)
df.to_csv(filename, mode='a', index = False, header=False)
def add_key_missing_values(filename):
"""Add the missing values for the last recorded timestamp
Args:
filename (str): The csv file to process
"""
df = pd.read_csv(filename)
df[['Key']] = df[['Key']].fillna(value='None')
df.to_csv(filename, mode='a', index = False, header=False)
def add_chair_missing_values(filename):
"""Add the missing values for the last recorded timestamp
Args:
filename (str): The csv file to process
"""
df = pd.read_csv(filename)
df[['A0','A1','A2','A3','A4']] = df[['A0','A1','A2','A3','A4']].fillna(value=0)
df.to_csv(filename, mode='a', index = False, header=False) | [
"amitsou95@gmail.com"
] | amitsou95@gmail.com |
8e9d43ebc4e167e7f0bc8966d7f024720626f37e | fdb9bdc6c4ab2f14ba71e544493706d5e275899f | /fhir/resources/devicedispense.py | 964f6e14d09cf007d918ee4453586472cff99dad | [
"BSD-3-Clause"
] | permissive | nazrulworld/fhir.resources | 6ae8aea8180c611b0c5050759c6dcdf63e4cb061 | 1fd6ea476b27b3fcb8c4ef8f23bc51cf161e69e3 | refs/heads/main | 2023-08-30T18:27:27.277249 | 2023-07-03T19:57:06 | 2023-07-03T19:57:06 | 165,297,877 | 256 | 83 | NOASSERTION | 2023-08-24T15:34:05 | 2019-01-11T19:26:41 | Python | UTF-8 | Python | false | false | 16,368 | py | # -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/DeviceDispense
Release: R5
Version: 5.0.0
Build ID: 2aecd53
Last updated: 2023-03-26T15:21:02.749+11:00
"""
import typing
from pydantic import Field, root_validator
from pydantic.error_wrappers import ErrorWrapper, ValidationError
from pydantic.errors import MissingError, NoneIsNotAllowedError
from . import backboneelement, domainresource, fhirtypes
class DeviceDispense(domainresource.DomainResource):
"""Disclaimer: Any field name ends with ``__ext`` doesn't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
A record of dispensation of a device.
A record of dispensation of a device - i.e., assigning a device to a
patient, or to a professional for their use.
"""
resource_type = Field("DeviceDispense", const=True)
basedOn: typing.List[fhirtypes.ReferenceType] = Field(
None,
alias="basedOn",
title="The order or request that this dispense is fulfilling",
description=None,
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["CarePlan", "DeviceRequest"],
)
category: typing.List[fhirtypes.CodeableConceptType] = Field(
None,
alias="category",
title="Type of device dispense",
description="Indicates the type of device dispense.",
# if property is element of this resource.
element_property=True,
)
destination: fhirtypes.ReferenceType = Field(
None,
alias="destination",
title="Where the device was sent or should be sent",
description=(
"Identification of the facility/location where the device was /should "
"be shipped to, as part of the dispense process."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Location"],
)
device: fhirtypes.CodeableReferenceType = Field(
...,
alias="device",
title="What device was supplied",
description=(
"Identifies the device being dispensed. This is either a link to a "
"resource representing the details of the device or a simple attribute "
"carrying a code that identifies the device from a known list of "
"devices."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Device", "DeviceDefinition"],
)
encounter: fhirtypes.ReferenceType = Field(
None,
alias="encounter",
title="Encounter associated with event",
description="The encounter that establishes the context for this event.",
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Encounter"],
)
eventHistory: typing.List[fhirtypes.ReferenceType] = Field(
None,
alias="eventHistory",
title="A list of relevant lifecycle events",
description=(
"A summary of the events of interest that have occurred, such as when "
"the dispense was verified."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Provenance"],
)
identifier: typing.List[fhirtypes.IdentifierType] = Field(
None,
alias="identifier",
title="Business identifier for this dispensation",
description=None,
# if property is element of this resource.
element_property=True,
)
location: fhirtypes.ReferenceType = Field(
None,
alias="location",
title="Where the dispense occurred",
description="The principal physical location where the dispense was performed.",
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Location"],
)
note: typing.List[fhirtypes.AnnotationType] = Field(
None,
alias="note",
title="Information about the dispense",
description=(
"Extra information about the dispense that could not be conveyed in the"
" other attributes."
),
# if property is element of this resource.
element_property=True,
)
partOf: typing.List[fhirtypes.ReferenceType] = Field(
None,
alias="partOf",
title="The bigger event that this dispense is a part of",
description=None,
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Procedure"],
)
performer: typing.List[fhirtypes.DeviceDispensePerformerType] = Field(
None,
alias="performer",
title="Who performed event",
description="Indicates who or what performed the event.",
# if property is element of this resource.
element_property=True,
)
preparedDate: fhirtypes.DateTime = Field(
None,
alias="preparedDate",
title="When product was packaged and reviewed",
description="The time when the dispensed product was packaged and reviewed.",
# if property is element of this resource.
element_property=True,
)
preparedDate__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_preparedDate", title="Extension field for ``preparedDate``."
)
quantity: fhirtypes.QuantityType = Field(
None,
alias="quantity",
title="Amount dispensed",
description="The number of devices that have been dispensed.",
# if property is element of this resource.
element_property=True,
)
receiver: fhirtypes.ReferenceType = Field(
None,
alias="receiver",
title="Who collected the device or where the medication was delivered",
description=(
"Identifies the person who picked up the device or the person or "
"location where the device was delivered. This may be a patient or "
"their caregiver, but some cases exist where it can be a healthcare "
"professional or a location."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=[
"Patient",
"Practitioner",
"RelatedPerson",
"Location",
"PractitionerRole",
],
)
status: fhirtypes.Code = Field(
None,
alias="status",
title=(
"preparation | in-progress | cancelled | on-hold | completed | entered-"
"in-error | stopped | declined | unknown"
),
description="A code specifying the state of the set of dispense events.",
# if property is element of this resource.
element_property=True,
element_required=True,
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=[
"preparation",
"in-progress",
"cancelled",
"on-hold",
"completed",
"entered-in-error",
"stopped",
"declined",
"unknown",
],
)
status__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_status", title="Extension field for ``status``."
)
statusReason: fhirtypes.CodeableReferenceType = Field(
None,
alias="statusReason",
title="Why a dispense was or was not performed",
description="Indicates the reason why a dispense was or was not performed.",
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["DetectedIssue"],
)
subject: fhirtypes.ReferenceType = Field(
...,
alias="subject",
title="Who the dispense is for",
description=(
"A link to a resource representing the person to whom the device is "
"intended."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Patient", "Practitioner"],
)
supportingInformation: typing.List[fhirtypes.ReferenceType] = Field(
None,
alias="supportingInformation",
title="Information that supports the dispensing of the device",
description="Additional information that supports the device being dispensed.",
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Resource"],
)
type: fhirtypes.CodeableConceptType = Field(
None,
alias="type",
title="Trial fill, partial fill, emergency fill, etc",
description="Indicates the type of dispensing event that is performed.",
# if property is element of this resource.
element_property=True,
)
usageInstruction: fhirtypes.Markdown = Field(
None,
alias="usageInstruction",
title="Full representation of the usage instructions",
description="The full representation of the instructions.",
# if property is element of this resource.
element_property=True,
)
usageInstruction__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None,
alias="_usageInstruction",
title="Extension field for ``usageInstruction``.",
)
whenHandedOver: fhirtypes.DateTime = Field(
None,
alias="whenHandedOver",
title="When product was given out",
description=(
"The time the dispensed product was made available to the patient or "
"their representative."
),
# if property is element of this resource.
element_property=True,
)
whenHandedOver__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_whenHandedOver", title="Extension field for ``whenHandedOver``."
)
@classmethod
def elements_sequence(cls):
"""returning all elements names from
``DeviceDispense`` according specification,
with preserving original sequence order.
"""
return [
"id",
"meta",
"implicitRules",
"language",
"text",
"contained",
"extension",
"modifierExtension",
"identifier",
"basedOn",
"partOf",
"status",
"statusReason",
"category",
"device",
"subject",
"receiver",
"encounter",
"supportingInformation",
"performer",
"location",
"type",
"quantity",
"preparedDate",
"whenHandedOver",
"destination",
"note",
"usageInstruction",
"eventHistory",
]
@root_validator(pre=True, allow_reuse=True)
def validate_required_primitive_elements_1588(
cls, values: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
"""https://www.hl7.org/fhir/extensibility.html#Special-Case
In some cases, implementers might find that they do not have appropriate data for
an element with minimum cardinality = 1. In this case, the element must be present,
but unless the resource or a profile on it has made the actual value of the primitive
data type mandatory, it is possible to provide an extension that explains why
the primitive value is not present.
"""
required_fields = [("status", "status__ext")]
_missing = object()
def _fallback():
return ""
errors: typing.List["ErrorWrapper"] = []
for name, ext in required_fields:
field = cls.__fields__[name]
ext_field = cls.__fields__[ext]
value = values.get(field.alias, _missing)
if value not in (_missing, None):
continue
ext_value = values.get(ext_field.alias, _missing)
missing_ext = True
if ext_value not in (_missing, None):
if isinstance(ext_value, dict):
missing_ext = len(ext_value.get("extension", [])) == 0
elif (
getattr(ext_value.__class__, "get_resource_type", _fallback)()
== "FHIRPrimitiveExtension"
):
if ext_value.extension and len(ext_value.extension) > 0:
missing_ext = False
else:
validate_pass = True
for validator in ext_field.type_.__get_validators__():
try:
ext_value = validator(v=ext_value)
except ValidationError as exc:
errors.append(ErrorWrapper(exc, loc=ext_field.alias))
validate_pass = False
if not validate_pass:
continue
if ext_value.extension and len(ext_value.extension) > 0:
missing_ext = False
if missing_ext:
if value is _missing:
errors.append(ErrorWrapper(MissingError(), loc=field.alias))
else:
errors.append(
ErrorWrapper(NoneIsNotAllowedError(), loc=field.alias)
)
if len(errors) > 0:
raise ValidationError(errors, cls) # type: ignore
return values
class DeviceDispensePerformer(backboneelement.BackboneElement):
"""Disclaimer: Any field name ends with ``__ext`` doesn't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
Who performed event.
Indicates who or what performed the event.
"""
resource_type = Field("DeviceDispensePerformer", const=True)
actor: fhirtypes.ReferenceType = Field(
...,
alias="actor",
title="Individual who was performing",
description=(
"The device, practitioner, etc. who performed the action. It should be"
" assumed that the actor is the dispenser of the device."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=[
"Practitioner",
"PractitionerRole",
"Organization",
"Patient",
"Device",
"RelatedPerson",
"CareTeam",
],
)
function: fhirtypes.CodeableConceptType = Field(
None,
alias="function",
title="Who performed the dispense and what they did",
description=(
"Distinguishes the type of performer in the dispense. For example, "
"date enterer, packager, final checker."
),
# if property is element of this resource.
element_property=True,
)
@classmethod
def elements_sequence(cls):
"""returning all elements names from
``DeviceDispensePerformer`` according specification,
with preserving original sequence order.
"""
return ["id", "extension", "modifierExtension", "function", "actor"]
| [
"connect2nazrul@gmail.com"
] | connect2nazrul@gmail.com |
32184d18843876ee3bb3bcb5944c60c4b0656baf | df46cf831cba3e2b91e794be6e03f0cbd1f2f8af | /p68.py | f1c6987177250a90ad65977fe9c4f9ad95a9d4d3 | [] | no_license | manjupinky/Phyton | 7653022f8a9b64f7213902c34998e7900f529b5e | 711bb5f491a904c1e35b4204223a90d8f3c16a6e | refs/heads/master | 2020-03-27T10:10:07.963601 | 2019-01-22T07:58:54 | 2019-01-22T07:58:54 | 146,399,913 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | l=int(input("enter value"))
if(l%7==0):
print("yes")
else:
print("no")
| [
"noreply@github.com"
] | noreply@github.com |
1855ff41f0680927d0fb741b4f92b16f35a8f8c3 | 87a68bc58c6255976a1ca66a3a44f0e9a2b1188e | /backend/new_app/migrations/0003_auto_20210123_1146.py | d50335c66c781fc37b3255c562e78a2d0028d1f5 | [] | no_license | LukovVI/jhfjhf | e86389c483cc38bc87345973f1f5a4801da22387 | 4cd433279b325866aec0a0ae8cb5219913770eb6 | refs/heads/master | 2023-04-01T13:45:07.836562 | 2021-04-11T10:50:37 | 2021-04-11T10:50:37 | 356,840,529 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | # Generated by Django 3.1.3 on 2021-01-22 23:46
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('new_app', '0002_auto_20210103_0242'),
]
operations = [
migrations.AlterField(
model_name='message',
name='time_create',
field=models.DateTimeField(default=datetime.date.today),
),
migrations.AlterField(
model_name='topic',
name='data_create',
field=models.DateTimeField(default=datetime.date.today),
),
]
| [
"61895305+qwertyFOREVER@users.noreply.github.com"
] | 61895305+qwertyFOREVER@users.noreply.github.com |
b59ab99fb40ed3daf44d3ca895f964f99c2c40c2 | 9e18b15cd87ad91fb656f80ad2ce052e081ad3b3 | /tests/__init__.py | 6b176b029469b47e5e1ac9754069f148a8c20f9c | [] | no_license | sneawo/asyncio-rest-example | fe03191ad291166cb22a6993563a0af86224054f | a18a6b3a4ce20d856f31c6f7f51a06adc0b3b981 | refs/heads/master | 2021-06-21T15:02:55.988469 | 2019-08-15T11:18:49 | 2019-08-15T11:18:49 | 202,528,110 | 8 | 4 | null | 2021-02-26T02:35:22 | 2019-08-15T11:19:38 | Python | UTF-8 | Python | false | false | 327 | py | from aiohttp.test_utils import AioHTTPTestCase
from app.main import init
from app.config import TestConfig
class AppTestCase(AioHTTPTestCase):
async def get_application(self):
return init(TestConfig())
async def tearDownAsync(self) -> None:
await self.app['db'].client.drop_database(self.app['db'])
| [
"sneawo@gmail.com"
] | sneawo@gmail.com |
3dfaec274a449ba446b15980b0eb9dfcb8f0e0b2 | 02c378a8f3afdb3299325d997e488e2ee3879ad7 | /failover_gw/test.py | 5f49e22ccbbe5737d6d92a8b2c4cbfc8d0cd8a44 | [
"MIT"
] | permissive | ddowling/failover_gw | 2ab734b5c58ac10a56fd6ffa5eb7f97cd2fe7f4d | 6cd7f64dda0ac30a3b057095571008d95577d411 | refs/heads/master | 2021-06-27T11:48:09.624193 | 2020-10-28T06:23:22 | 2020-10-28T06:23:22 | 176,870,216 | 1 | 0 | null | 2019-03-21T07:45:37 | 2019-03-21T04:31:44 | Python | UTF-8 | Python | false | false | 860 | py | from arprequest import ArpRequest
ip = '192.168.90.1'
dev = 'ens33'
print("ip = ", ip, "dev = ", dev)
ar = ArpRequest(ip, dev)
print("Request returned", ar.request())
print("getHardwareAddress() = ", ar.getHardwareAddress())
print("getHardwareAddressStr() = ", ar.getHardwareAddressStr())
from pyping import Ping
for i in range(4):
is_raw = ( i >= 2 )
is_quiet = ( i % 2 == 0 )
print("is_raw=", is_raw, "is_quiet=", is_quiet)
p = Ping(destination=ip,
timeout=1,
raw=is_raw,
source_interface=dev,
dest_mac=ar.getHardwareAddress(),
quiet_output=is_quiet)
res = p.run(4)
if res is not None:
print("Ping %s via %s RTT=%.3f,%.3f,%.3fms" % (ip, dev,
res.min_rtt, res.avg_rtt,
res.max_rtt))
| [
"denis.dowling@raedyne.systems"
] | denis.dowling@raedyne.systems |
bd84e65dda55986bc282e7d3e83537a6731b2602 | a383c8d5f8f50d617af8b1e5cd6d211c6012d888 | /python/hjbbs/test.py | d5c69cc1049e7b24d907514c25cc6fae5d602729 | [] | no_license | qxj/jqian | ff15711ba4e5b17eb7d5a5b1e1d5d2d591b7d645 | 1e16fd46a6a0c633d9c7d9819238c98d479edfd3 | refs/heads/master | 2020-05-21T22:17:49.829583 | 2017-02-09T03:52:57 | 2017-02-09T03:52:57 | 32,244,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | # -*- coding: utf-8 -*-
# test.py --- Time-stamp: <2010-07-08 12:51:31 Thursday by julian>
# Copyright 2010 Julian Qian
# Author: julian@PDDES.cdc.veritas.com
# Version: $Id: test.py,v 0.0 2010/07/08 04:50:26 julian Exp $
# Keywords:
from log import logger
logger.debug('Test Debug a = %s', 1)
logger.error('Test Error!')
logger.info('info here')
def main():
"test.py"
if __name__ == "__main__":
main()
| [
"jqian@jqian.net"
] | jqian@jqian.net |
1bdd3f8a9b215240b0be7c93bcc5f130d6797050 | 7a16034d1fad2d402bef6c4d947aaba4a4cf35c8 | /wikidocs/pratice02.py | 92cb31ac16131c85d86d4082099961260e6c89b4 | [] | no_license | mungmange2/pythonstudy | 5bf24af634a5cda0b5915c11bcb215935f605005 | 48a5ab703859f572c776770677ae67363e10b0a4 | refs/heads/master | 2023-06-09T17:58:10.395418 | 2021-06-24T22:56:30 | 2021-06-24T22:56:30 | 381,248,765 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,109 | py | # 02장 연습문제
# https://wikidocs.net/42526
# Q01
print("---Q.01---")
korean = 80
english = 75
math = 55
print((korean + english + math) / 3)
# # #
# list = [80, 75, 55]
# print(sum(list) / len(list))
# # #
print("--------\n")
print("---Q.02---")
# Q02
if float(13 % 2) == 0:
print("짝수")
else:
print("홀수")
print("--------\n")
pin = "881120-1068234" # 03, 04
print("---Q.03---")
print(pin[0:6])
print(pin[7:14])
#print(pin.split("-")) # 문자열 분리
# print(pin.find("-")) # 위치 알려주기
# print(pin.index("-")) # 위치 알려주기
#print(pin[7:]) # 이렇게 해도 됨
print("--------\n")
print("---Q.04---")
if pin[7] == "2":
print("여자")
else:
print("남자")
print("--------\n")
print("---Q.05---")
a = "a:b:c:d"
print(a.replace(":", "#"))
print("--------\n")
print("---Q.06---")
list = [1, 3, 5, 4, 2]
# list.sort()
# list.reverse()
# 한번에 하는 방법
list.sort(reverse=True)
print(list)
print("--------\n")
print("---Q.07---")
str_list = ['Life', 'is', 'too', 'short']
str_data = " ".join(str_list)
print(str_data)
print("--------\n")
print("?---Q.08---")
t1 = (1,2,3) + (4,) # (4,) 같이 한개의 요소를 가질때에 ,을 붙여줘야 함.
print(t1)
print("--------\n")
print("?---Q.09---")
a = dict()
# a['name'] = 'python'
#a[('a','b')] = 'python'
#a[[1]] = 'python' # -> {}안에 빈 {}을 넣을 수 없음. 정확히는 딕셔너리 키로 변하는 값을 사용할 수 없음 (문자열 튜프 숫자등은 가능)
# a[250] = 'python'
print(a)
print("--------\n")
print("---Q.10---")
aa = {'A':90, 'B':80, 'C':70}
print(aa.pop('B'))
print("--------\n")
print("?---Q.11---")
# aaaa = [1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 5]
# aSet = set(aaaa) # a 리스트를 집합자료형으로 변환
# b = list(aSet) # 집합자료형을 리스트 자료형으로 다시 변환
# print(b) # [1,2,3,4,5] 출력
print("--------\n")
print("---Q.12---")
a = b = [1, 2, 3]
a[1] = 4
print(b) # b = [1,2,3] 이고 b[1] 에 4로 치환하여(a[1]=4할당한것이 참조됨) [1, 4, 3] 이 됨
print("--------\n")
| [
"sohyun@vendys.co.kr"
] | sohyun@vendys.co.kr |
28314992041d5943d947c65c7469c6d091d08f02 | 0f201ad033df76bce7e968616ea135dd453cf081 | /clu/me/gen1/g2.py | 15552c88feb97c757ddbc5841dc7f84059dce83b | [] | no_license | noDefinition/works | 8d1faa47aa98eb8de97a082333b3518e92d8dd5d | 2f7f5d0534909e6ae1b19672d4edcc5ee0182729 | refs/heads/master | 2020-04-24T09:00:19.200685 | 2019-05-07T12:21:42 | 2019-05-07T12:21:42 | 171,849,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,485 | py | from me.gen1.g1 import *
# noinspection PyAttributeOutsideInit,PyMethodMayBeStatic,PyPep8Naming
class G2(G1):
""" 判别器分别输入正例与重构(要求对二者使用同样的结构),输出概率值并比较 """
def define_denses(self):
super(G2, self).define_denses()
sc, ed, md = self.scale, self.e_dim, self.m_dim
self.DL_dis = MyDenses(ed, [md, int(md / 2), 1], sc, (relu, relu, sigmoid))
self.D_dis = [self.DL_dis]
def forward(self):
l1, _, _, l4 = self.l_reg
mgn, nis = self.margin, self.noise
c, p, n = self.get_original_cpn()
with tf.name_scope('get_c_probs'):
pc_score = tensor_dot(p, tf.transpose(c))
pc_probs = tf.nn.softmax(pc_score, axis=1)
with tf.name_scope('get_p_res'):
r = tensor_dot(pc_probs, c)
r = self.D_r(r, name='get_r')
# r = tf.nn.relu(r)
with tf.name_scope('l2norm_prn'):
p_norm, r_norm, n_norm = l2_norm_tensors(p, r, n)
with tf.name_scope('discriminate'):
p_score_D = self.DL_dis(p)
r_score_D = self.DL_dis(r)
with tf.name_scope('loss'):
p_score_D_v = tf.reduce_mean(p_score_D)
r_score_D_v = tf.reduce_mean(1. - r_score_D)
loss_D = -(p_score_D_v + r_score_D_v)
with tf.name_scope('generate'):
# batch_size * 1
pr_sim_G = inner_dot(p_norm, r_norm, keepdims=True)
# batch_size * (neg sample size)
pn_sim_G = tensor_dot(p_norm, tf.transpose(n_norm))
pn_sim_G_v = tf.reduce_mean(pn_sim_G, axis=1, keepdims=True)
# rn_sim_G = tensor_dot(r_norm, tf.transpose(n_norm))
# rn_sim_G_v = tf.reduce_mean(rn_sim_G, axis=1, keepdims=True)
with tf.name_scope('loss'):
reg_G = sum([d.get_norm(order=2) for d in self.W_doc]) * l4
loss_G_pre = tf.reduce_mean(
tf.maximum(0., mgn - pr_sim_G + pn_sim_G_v * l1)) + reg_G
loss_G = loss_G_pre - loss_D
self.pc_probs = pc_probs
self.loss_D = loss_D
self.loss_G_pre = loss_G_pre
self.loss = self.loss_G = loss_G
with tf.name_scope('register'):
tf.summary.scalar(name='loss_D', tensor=loss_D)
tf.summary.scalar(name='loss_G_pre', tensor=loss_G_pre)
tf.summary.scalar(name='loss_G', tensor=loss_G)
tf.summary.scalar(name='p_score_D_v', tensor=p_score_D_v)
tf.summary.scalar(name='r_score_D_v', tensor=r_score_D_v)
tf.summary.histogram(name='pr_sim_G', values=pr_sim_G, family='G_sim')
tf.summary.histogram(name='pn_sim_G', values=pn_sim_G, family='G_sim')
tf.summary.histogram(name='p_score_D', values=p_score_D, family='D_score')
tf.summary.histogram(name='r_score_D', values=r_score_D, family='D_score')
tf.summary.histogram(name='p', values=p, family='doc')
tf.summary.histogram(name='r', values=r, family='doc')
tf.summary.histogram(name='n', values=n, family='doc')
# tf.summary.histogram(name='p_norm', values=p_norm, family='doc')
# tf.summary.histogram(name='r_norm', values=r_norm, family='doc')
# tf.summary.histogram(name='n_norm', values=n_norm, family='doc')
| [
"devincdong@163.com"
] | devincdong@163.com |
fbb7584b6adb9e3442c396592bae7b6a93f49a97 | 7e3b9821ab5d75b7f013cb817bc6d655e06bed22 | /shell.py | b26876c3f689d557508429bd5add06ba3b7e59e9 | [] | no_license | zhangzhen796/day03 | 6120b57c55f862263d40c626ab998a80aee92e63 | 2372bfda04748a5cef7c854d996b8fb0e38cedaa | refs/heads/master | 2022-12-15T20:33:48.311337 | 2020-08-31T10:27:00 | 2020-08-31T10:27:00 | 291,682,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 504 | py | import sys
import string
import keyword
first_str = string.ascii_letters + '_'
str = first_str + string.digits
def check_string(inp):
if keyword.iskeyword(inp):
return "%s is guanjianzi" % inp
if inp[0] not in first_str:
return "1 no"
for i in range(len(inp)):
if i == 0:
continue
if inp[i] not in str:
return "di %s ge bu xing %s" %(i+1,inp[i])
return "all he ge"
if __name__ == '__main__':
print(check_string(sys.argv[1])) | [
"zhangzhen@aliyun.com"
] | zhangzhen@aliyun.com |
b347a2aefd4be9913db6db41c8fabd5ac47b172a | eab1756b01717e81537133400f36aea4d7a0876f | /cifar/ray_sync.py | 46fcca4fe1782621341778e8127e5a3db497e151 | [] | no_license | bearpelican/cluster | d677fe392ac1196b77e3f8fb79e530ec8371080f | 2e316cf1def0b72b47f79a864ed3aa778c297b95 | refs/heads/master | 2020-03-21T06:52:57.514901 | 2018-08-10T10:20:26 | 2018-08-10T22:33:05 | 138,246,892 | 3 | 1 | null | 2018-06-22T02:51:07 | 2018-06-22T02:51:07 | null | UTF-8 | Python | false | false | 11,470 | py | # from https://gist.github.com/robertnishihara/87aa7a9a68ef8fa0f3184129346cffc3
# To run the example, use a command like the following.
#
# python sharded_parameter_server_benchmark.py \
# --num-workers=1 \
# --num-parameter-servers=1 \
# --dim=25000
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import numpy as np
import os
import sys
import time
from collections import OrderedDict
from collections import defaultdict
import ray
import cifar10
import cifar10_model
import cifar10_utils
# move some methods to util later, for now "u" points to this file
util = sys.modules[__name__]
u = util
# TODO: do not hardwire parameter sizes/splitting
parser = argparse.ArgumentParser(description="Run the synchronous parameter "
"server example.")
parser.add_argument("--num-workers", default=2, type=int,
help="The number of workers to use.")
parser.add_argument("--num-parameter-servers", default=2, type=int,
help="The number of parameter servers to use.")
parser.add_argument("--dim", default=75360, type=int,
help="The number of parameters, defaults to size of "
"TF default CIFAR10 model")
parser.add_argument("--redis-address", default=None, type=str,
help="The Redis address of the cluster.")
parser.add_argument("--add-pause", default=0, type=int,
help="Add pause to avoid melting my laptop.")
parser.add_argument('--logdir', type=str, default='asdfasdfasdf',
help="location of logs")
parser.add_argument('--real-model', action='store_true',
default=False,
help="use real CIFAR model for gradients?")
args = parser.parse_args()
########################################
# Tensorboard logging, move to util.py
########################################
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
global_timeit_dict = OrderedDict()
class timeit:
"""Decorator to measure length of time spent in the block in millis and log
it to TensorBoard."""
def __init__(self, tag=""):
self.tag = tag
def __enter__(self):
self.start = time.perf_counter()
return self
def __exit__(self, *args):
self.end = time.perf_counter()
interval_ms = 1000*(self.end - self.start)
global_timeit_dict.setdefault(self.tag, []).append(interval_ms)
logger = u.get_last_logger(skip_existence_check=True)
if logger:
newtag = 'time/'+self.tag
logger(newtag, interval_ms)
# TODO: have global experiment_base that I can use to move logging to
# non-current directory
GLOBAL_RUNS_DIRECTORY='runs'
global_last_logger = None
def get_last_logger(skip_existence_check=False):
"""Returns last logger, if skip_existence_check is set, doesn't
throw error if logger doesn't exist."""
global global_last_logger
if not skip_existence_check:
assert global_last_logger
return global_last_logger
class TensorboardLogger:
"""Helper class to log to single tensorboard writer from multiple places.
logger = u.TensorboardLogger("mnist7")
logger = u.get_last_logger() # gets last logger created
logger('svd_time', 5) # records "svd_time" stat at 5
logger.next_step() # advances step counter
logger.set_step(5) # sets step counter to 5
"""
def __init__(self, logdir, step=0):
# TODO: do nothing for default run
global global_last_logger
assert global_last_logger is None
self.logdir = logdir,
self.summary_writer = tf.summary.FileWriter(logdir,
flush_secs=5,
graph=tf.get_default_graph())
self.step = step
self.summary = tf.Summary()
global_last_logger = self
self.last_timestamp = time.perf_counter()
def __call__(self, *args):
assert len(args)%2 == 0
for (tag, value) in chunks(args, 2):
self.summary.value.add(tag=tag, simple_value=float(value))
def next_step(self):
new_timestamp = time.perf_counter()
interval_ms = 1000*(new_timestamp - self.last_timestamp)
self.summary.value.add(tag='time/step',
simple_value=interval_ms)
self.last_timestamp = new_timestamp
self.summary_writer.add_summary(self.summary, self.step)
self.step+=1
self.summary = tf.Summary()
################################################################################
## Main stuff
################################################################################
# TODO(rkn): This is a placeholder.
class CNN(object):
def __init__(self, dim):
self.dim = dim
# param values from cifar10_main.py
if not tf.test.is_gpu_available():
data_format = 'channels_last'
else:
data_format = 'channels_first'
is_training = True
weight_decay = 2e-4,
num_layers = 8
batch_size = 32
batch_norm_decay=0.997
batch_norm_epsilon=1e-5
image_batch = tf.random_uniform((batch_size, 32, 32, 3))
label_batch = tf.ones((batch_size,), dtype=tf.int32)
self.model = cifar10_model.ResNetCifar10(
num_layers,
batch_norm_decay=batch_norm_decay,
batch_norm_epsilon=batch_norm_epsilon,
is_training=is_training,
data_format=data_format)
self.logits = self.model.forward_pass(image_batch,
input_data_format='channels_last')
# make size of parameters multiple of 8 (75360)
dummy_var = tf.Variable(tf.ones((5,)))
self.pred = {
'classes': tf.argmax(input=self.logits, axis=1),
'probabilities': tf.nn.softmax(self.logits)
}
self.loss = tf.losses.sparse_softmax_cross_entropy(logits=self.logits,
labels=label_batch)
self.model_params = tf.trainable_variables()
self.loss += weight_decay * tf.add_n(
[tf.nn.l2_loss(v) for v in self.model_params])
grads = tf.gradients(self.loss, self.model_params)
self.grad = tf.concat([tf.reshape(g,[-1]) for g in grads], axis=0)
self.weights = np.zeros(self.grad.shape, dtype=np.float32)
# TODO: make this into an op that accepts actual values
self.set_weights_op = tf.global_variables_initializer()
# todo(y): pad things so that it's divisible by num_ps?
self.sess = tf.Session()
def get_gradients(self):
if args.real_model:
return self.sess.run(self.grad)
else:
return np.ones(self.dim, dtype=np.float32)
def set_weights(self, weights):
self.weights = weights
# TODO, pass weights into set_weights_op
if args.real_model:
self.sess.run(self.set_weights_op)
# TODO(rkn): Once we have better custom resource support for actors, we should
# not use GPUs here.
@ray.remote(num_gpus=1)
class ParameterServer(object):
def __init__(self, dim):
self.params = np.zeros(dim)
def update_and_get_new_weights(self, *gradients):
for grad in gradients:
self.params += grad
return self.params
def ip(self):
return ray.services.get_node_ip_address()
@ray.remote(num_gpus=1)
class Worker(object):
def __init__(self, num_ps, dim):
self.net = CNN(dim)
self.num_ps = num_ps
self.fixed = np.zeros(dim)
@ray.method(num_return_vals=args.num_parameter_servers)
def compute_gradient(self, *weights):
all_weights = np.concatenate(weights)
self.net.set_weights(all_weights)
gradient = self.net.get_gradients()
if self.num_ps == 1:
return gradient
else:
return np.split(gradient, self.num_ps)
def ip(self):
return ray.services.get_node_ip_address()
if __name__ == "__main__":
import tensorflow as tf
tf.constant(1) # dummy default graph to appease tensorboard
if args.redis_address is None:
# Run everything locally.
ray.init(num_gpus=args.num_parameter_servers + args.num_workers)
else:
# Connect to a cluster.
ray.init(redis_address=args.redis_address)
split_weights = np.split(np.zeros(args.dim, dtype=np.float32),
args.num_parameter_servers)
# create tensorboard logger
logger = u.TensorboardLogger(args.logdir)
# Create the parameter servers.
pss = [ParameterServer.remote(split_weights[i].size)
for i in range(args.num_parameter_servers)]
# Create the workers.
workers = [Worker.remote(args.num_parameter_servers, args.dim)
for _ in range(args.num_workers)]
# As a sanity check, make sure all workers and parameter servers are on
# different machines.
if args.redis_address is not None:
all_ips = ray.get([ps.ip.remote() for ps in pss] +
[w.ip.remote() for w in workers])
print("ps ips:")
for (i, ps) in enumerate(pss):
print(i, ps.ip.remote(), ray.get([ps.ip.remote()]))
print("worker ips:")
for (i, worker) in enumerate(workers):
print(i, worker.ip.remote(), ray.get([worker.ip.remote()]))
if len(all_ips) != len(set(all_ips)):
print("Warning, some IPs are reused")
LOG_FREQUENCY = 10
step = 0
last_step = 0
last_time = time.time()
while True:
step+=1
logger.next_step()
t1 = time.time()
# Compute and apply gradients.
assert len(split_weights) == args.num_parameter_servers
grad_id_lists = [[] for _ in range(len(pss))]
for worker in workers:
gradients = worker.compute_gradient.remote(*split_weights)
if len(pss) == 1:
gradients = [gradients]
assert len(gradients) == len(pss)
for i in range(len(gradients)):
grad_id_lists[i].append(gradients[i])
# TODO(rkn): This weight should not be removed. Does it affect
# performance?
all_grad_ids = [grad_id for grad_id_list in grad_id_lists
for grad_id in grad_id_list]
with u.timeit('wait_compute_grads'):
ray.wait(all_grad_ids, num_returns=len(all_grad_ids))
t2 = time.time()
split_weights = []
for i in range(len(pss)):
assert len(grad_id_lists[i]) == args.num_workers
new_weights_id = pss[i].update_and_get_new_weights.remote(
*(grad_id_lists[i]))
split_weights.append(new_weights_id)
# TODO(rkn): This weight should not be removed. Does it affect
# performance?
with u.timeit('wait_ps_add'):
ray.wait(split_weights, num_returns=len(split_weights))
t3 = time.time()
print("elapsed times: ", t3 - t1, t2 - t1, t3 - t2)
if step%LOG_FREQUENCY == 0:
steps_per_sec = (step - last_step)/(time.time()-last_time)
logger("steps_per_sec", steps_per_sec)
last_step = step
last_time = time.time()
if args.add_pause:
time.sleep(0.1)
| [
"yaroslavvb@gmail.com"
] | yaroslavvb@gmail.com |
11cb7e789ab84179e5ab4c03818628b3c19be50b | b3e9884c931479a9feb699a30a390f85bfa2d3cd | /src/models/items/views.py | 488b29a165ec273e5acb9ab1e1a202f40751cd08 | [] | no_license | dpw1/price-monitor | b05fd460dda6fe48f790a4a58e9de87d6b71db22 | b2f30ff98a949bcca3e2347b43b79e353264b5bd | refs/heads/master | 2020-04-16T08:38:05.072794 | 2019-01-12T20:18:17 | 2019-01-12T20:18:17 | 165,431,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,504 | py | from flask import Blueprint, render_template, request, url_for
from werkzeug.utils import redirect
from src.models.items.item import Item
from src.models.alerts.alert import Alert
import src.models.users.decorators as user_decorators
item_blueprint = Blueprint('items', __name__, )
@item_blueprint.route('/<string:name>')
@user_decorators.requires_login
def item_page(name):
pass
@item_blueprint.route('/create', methods=['POST', 'GET'])
@user_decorators.requires_login
def create_item():
if request.method == 'POST':
print('hey')
return render_template('items/create_item.html')
@item_blueprint.route('/load')
@user_decorators.requires_login
def load_item():
"""
Loads an item's data using their store and return a JSON representation of it
:return:
"""
pass
@item_blueprint.route('/delete/<string:item_id>', methods=['GET'])
def delete_item(item_id):
if request.method == 'GET':
Item.delete_by_item_id(item_id)
return redirect(url_for('users.user_monitors'))
@item_blueprint.route('/update/<string:item_id>', methods=['POST', 'GET'])
def update_item_price(item_id):
"""
Checks if price has changed. Connected to the monitors.html template.
:return:
"""
item = Item.get_by_id(item_id)
current_price = item.load_price()
old_price = item.price
if old_price == current_price:
return redirect(url_for('users.user_monitors', alert='No changes!'))
else:
# save new price
pass
pass | [
"diego.boarutto.fortes@gmail.com"
] | diego.boarutto.fortes@gmail.com |
b09c6e1074d997a3f178d5ee43d59926a97c4a30 | 812045c3ec6587827aeb18bde666237dfffc21ae | /tf_quant_finance/experimental/pricing_platform/framework/core/interpolation_method.py | 57fddd9fa9cd9a759018d3941917eee158a4cbf6 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | google/tf-quant-finance | 2062082c85e8679b71e69bbeb579fe338c1b0288 | 0d3a2193c0f2d320b65e602cf01d7a617da484df | refs/heads/master | 2023-08-31T01:58:15.415811 | 2023-08-15T07:37:46 | 2023-08-15T07:38:22 | 198,669,252 | 4,165 | 557 | Apache-2.0 | 2023-08-04T19:25:55 | 2019-07-24T16:09:50 | Python | UTF-8 | Python | false | false | 781 | py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Supported interpolation methods."""
import enum
class InterpolationMethod(enum.Enum):
LINEAR = "LINEAR"
CUBIC = "CUBIC"
CONSTANT_FORWARD = "CONSTANT_FORWARD"
__all__ = ["InterpolationMethod"]
| [
"tf-quant-finance-robot@google.com"
] | tf-quant-finance-robot@google.com |
81ae3cf4619b6801f526fe160ab1905e1015b058 | e405095912863eb5f4747bce811b085902d04f3f | /tests/api/test_register.py | 5febfbf16b8b4b1a2b539085485e6c2c0db3125a | [
"MIT"
] | permissive | mtmvu/django-rest-registration | 329690626e63a68a9d1ac1fe5f2fb004291fbf90 | 881fcb7144f2463469a9c1030de8b9d78ebff1b6 | refs/heads/master | 2021-05-08T20:12:33.677529 | 2018-01-30T22:42:32 | 2018-01-30T22:42:32 | 119,598,596 | 0 | 0 | null | 2018-01-30T22:42:33 | 2018-01-30T21:48:18 | Python | UTF-8 | Python | false | false | 11,167 | py | import math
import time
from unittest.mock import patch
from django.test.utils import override_settings
from rest_framework import status
from rest_registration.api.views import register, verify_registration
from rest_registration.api.views.register import RegisterSigner
from rest_registration.settings import registration_settings
from .base import APIViewTestCase
REGISTER_VERIFICATION_URL = '/verify-account/'
REST_REGISTRATION_WITH_VERIFICATION = {
'REGISTER_VERIFICATION_ENABLED': True,
'REGISTER_VERIFICATION_URL': REGISTER_VERIFICATION_URL,
'VERIFICATION_FROM_EMAIL': 'no-reply@example.com',
}
REST_REGISTRATION_WITH_VERIFICATION_NO_PASSWORD = {
'REGISTER_VERIFICATION_ENABLED': True,
'REGISTER_VERIFICATION_URL': REGISTER_VERIFICATION_URL,
'VERIFICATION_FROM_EMAIL': 'no-reply@example.com',
'REGISTER_SERIALIZER_PASSWORD_CONFIRM': False,
}
REST_REGISTRATION_WITHOUT_VERIFICATION = {
'REGISTER_VERIFICATION_ENABLED': False,
}
@override_settings(REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION)
class RegisterViewTestCase(APIViewTestCase):
def test_register_serializer_ok(self):
serializer_class = registration_settings.REGISTER_SERIALIZER_CLASS
serializer = serializer_class(data={})
field_names = {f for f in serializer.get_fields()}
self.assertEqual(
field_names,
{'username', 'first_name', 'last_name', 'email',
'password', 'password_confirm'},
)
@override_settings(
REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION_NO_PASSWORD,
)
def test_register_serializer_no_password_ok(self):
serializer_class = registration_settings.REGISTER_SERIALIZER_CLASS
serializer = serializer_class(data={})
field_names = {f for f in serializer.get_fields()}
self.assertEqual(
field_names,
{'username', 'first_name', 'last_name', 'email', 'password'},
)
def test_register_ok(self):
data = self._get_register_user_data(password='testpassword')
request = self.factory.post('', data)
time_before = math.floor(time.time())
with self.assert_one_mail_sent() as sent_emails:
response = register(request)
time_after = math.ceil(time.time())
self.assert_valid_response(response, status.HTTP_201_CREATED)
user_id = response.data['id']
# Check database state.
user = self.user_class.objects.get(id=user_id)
self.assertEqual(user.username, data['username'])
self.assertTrue(user.check_password(data['password']))
self.assertFalse(user.is_active)
# Check verification e-mail.
sent_email = sent_emails[0]
self.assertEqual(
sent_email.from_email,
REST_REGISTRATION_WITH_VERIFICATION['VERIFICATION_FROM_EMAIL'],
)
self.assertListEqual(sent_email.to, [data['email']])
url = self.assert_one_url_line_in_text(sent_email.body)
verification_data = self.assert_valid_verification_url(
url,
expected_path=REGISTER_VERIFICATION_URL,
expected_query_keys={'signature', 'user_id', 'timestamp'},
)
url_user_id = int(verification_data['user_id'])
self.assertEqual(url_user_id, user_id)
url_sig_timestamp = int(verification_data['timestamp'])
self.assertGreaterEqual(url_sig_timestamp, time_before)
self.assertLessEqual(url_sig_timestamp, time_after)
signer = RegisterSigner(verification_data)
signer.verify()
@override_settings(
REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION_NO_PASSWORD,
)
def test_register_no_password_confirm_ok(self):
data = self._get_register_user_data(password='testpassword')
data.pop('password_confirm')
request = self.factory.post('', data)
time_before = math.floor(time.time())
with self.assert_one_mail_sent() as sent_emails:
response = register(request)
self.assert_valid_response(response, status.HTTP_201_CREATED)
time_after = math.ceil(time.time())
user_id = response.data['id']
# Check database state.
user = self.user_class.objects.get(id=user_id)
self.assertEqual(user.username, data['username'])
self.assertTrue(user.check_password(data['password']))
self.assertFalse(user.is_active)
# Check verification e-mail.
sent_email = sent_emails[0]
self.assertEqual(
sent_email.from_email,
REST_REGISTRATION_WITH_VERIFICATION['VERIFICATION_FROM_EMAIL'],
)
self.assertListEqual(sent_email.to, [data['email']])
url = self.assert_one_url_line_in_text(sent_email.body)
verification_data = self.assert_valid_verification_url(
url,
expected_path=REGISTER_VERIFICATION_URL,
expected_query_keys={'signature', 'user_id', 'timestamp'},
)
url_user_id = int(verification_data['user_id'])
self.assertEqual(url_user_id, user_id)
url_sig_timestamp = int(verification_data['timestamp'])
self.assertGreaterEqual(url_sig_timestamp, time_before)
self.assertLessEqual(url_sig_timestamp, time_after)
signer = RegisterSigner(verification_data)
signer.verify()
def test_register_same_username(self):
self.create_test_user(username='testusername')
data = self._get_register_user_data(
username='testusername', password='testpassword')
request = self.factory.post('', data)
with self.assert_no_mail_sent():
response = register(request)
self.assert_invalid_response(response, status.HTTP_400_BAD_REQUEST)
@override_settings(
REST_REGISTRATION=REST_REGISTRATION_WITHOUT_VERIFICATION,
)
def test_register_without_verification_ok(self):
data = self._get_register_user_data(password='testpassword')
request = self.factory.post('', data)
with self.assert_no_mail_sent():
response = register(request)
self.assert_valid_response(response, status.HTTP_201_CREATED)
user_id = response.data['id']
user = self.user_class.objects.get(id=user_id)
self.assertEqual(user.username, data['username'])
self.assertTrue(user.check_password(data['password']))
self.assertTrue(user.is_active)
def test_register_no_email(self):
data = self._get_register_user_data(password='testpassword', email='')
request = self.factory.post('', data)
with self.assert_no_mail_sent():
response = register(request)
self.assert_response_is_bad_request(response)
def test_register_short_password(self):
data = self._get_register_user_data(password='a')
request = self.factory.post('', data)
with self.assert_no_mail_sent():
response = register(request)
self.assert_response_is_bad_request(response)
def test_register_password_numeric(self):
data = self._get_register_user_data(password='4321332211113322')
request = self.factory.post('', data)
with self.assert_no_mail_sent():
response = register(request)
self.assert_response_is_bad_request(response)
def test_register_password_same_as_username(self):
username = 'testusername'
data = self._get_register_user_data(
username=username, password=username)
request = self.factory.post('', data)
with self.assert_no_mail_sent():
response = register(request)
self.assert_response_is_bad_request(response)
def test_register_not_matching_password(self):
data = self._get_register_user_data(
password='testpassword1',
password_confirm='testpassword2')
request = self.factory.post('', data)
with self.assert_no_mail_sent():
response = register(request)
self.assert_response_is_bad_request(response)
def _get_register_user_data(
self, password, password_confirm=None, **options):
username = 'testusername'
email = 'testusername@example.com'
if password_confirm is None:
password_confirm = password
data = {
'username': username,
'password': password,
'password_confirm': password_confirm,
'email': email,
}
data.update(options)
return data
class VerifyRegistrationViewTestCase(APIViewTestCase):
@override_settings(REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION)
def test_verify_ok(self):
user = self.create_test_user(is_active=False)
self.assertFalse(user.is_active)
signer = RegisterSigner({'user_id': user.pk})
data = signer.get_signed_data()
request = self.factory.post('', data)
response = verify_registration(request)
self.assert_valid_response(response, status.HTTP_200_OK)
user.refresh_from_db()
self.assertTrue(user.is_active)
@override_settings(REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION)
def test_verify_tampered_timestamp(self):
user = self.create_test_user(is_active=False)
self.assertFalse(user.is_active)
signer = RegisterSigner({'user_id': user.pk})
data = signer.get_signed_data()
data['timestamp'] += 1
request = self.factory.post('', data)
response = verify_registration(request)
self.assert_invalid_response(response, status.HTTP_400_BAD_REQUEST)
user.refresh_from_db()
self.assertFalse(user.is_active)
@override_settings(REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION)
def test_verify_expired(self):
timestamp = int(time.time())
user = self.create_test_user(is_active=False)
self.assertFalse(user.is_active)
with patch('time.time',
side_effect=lambda: timestamp):
signer = RegisterSigner({'user_id': user.pk})
data = signer.get_signed_data()
request = self.factory.post('', data)
with patch('time.time',
side_effect=lambda: timestamp + 3600 * 24 * 8):
response = verify_registration(request)
self.assert_invalid_response(response, status.HTTP_400_BAD_REQUEST)
user.refresh_from_db()
self.assertFalse(user.is_active)
@override_settings(
REST_REGISTRATION={
'REGISTER_VERIFICATION_ENABLED': False,
'REGISTER_VERIFICATION_URL': REGISTER_VERIFICATION_URL,
}
)
def test_verify_disabled(self):
user = self.create_test_user(is_active=False)
self.assertFalse(user.is_active)
signer = RegisterSigner({'user_id': user.pk})
data = signer.get_signed_data()
request = self.factory.post('', data)
response = verify_registration(request)
self.assert_invalid_response(response, status.HTTP_404_NOT_FOUND)
user.refresh_from_db()
self.assertFalse(user.is_active)
| [
"apragacz@o2.pl"
] | apragacz@o2.pl |
bd16b319ed4dcba3137983cd669010343712e3de | 3cb91e30e13a776157291ed12cc3a1c5016cbd0b | /tests/scripts/trajectory/parameter.py | af2971c157756e1eaefc39fda9c11e53fba0f6d7 | [] | no_license | juanmed/alpha_ai | 7b545fd63b29a3e9bde4a7fb9d6aeeffc89dc2b7 | 0c7ba285c6cd63f95bb5af2fa26af51b330c9866 | refs/heads/master | 2020-04-25T03:56:52.080914 | 2019-05-09T04:52:19 | 2019-05-09T04:52:19 | 172,494,004 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 865 | py | #!/usr/bin/env python
import rospy
import numpy as np
# Trajectory parameter
order = 10
flat_output = 4
# way point checking
# inflation means the scale of virtual cube including way point
inflation = 2
# tolerance is like threshold to decide whether drone pass or not
tolerance = 0.3
ros_isrunning = True
if ros_isrunning:
# Waypoint
init_pose = rospy.get_param("/uav/flightgoggles_uav_dynamics/init_pose")
gate_name = rospy.get_param("/uav/gate_names")
# Drone physical property
mass = rospy.get_param("/uav/flightgoggles_uav_dynamics/vehicle_mass")
Ixx = rospy.get_param("/uav/flightgoggles_uav_dynamics/vehicle_inertia_xx")
Iyy = rospy.get_param("/uav/flightgoggles_uav_dynamics/vehicle_inertia_yy")
Izz = rospy.get_param("/uav/flightgoggles_uav_dynamics/vehicle_inertia_zz")
else:
init_pose = np.array([0, 0, 1, 0])
| [
"pdydgml93@google.com"
] | pdydgml93@google.com |
7e0667037c44de942b3814486bf6c27aec8611e8 | 72f20ed2decad165c1c48338741cbe24ee318eda | /mini_ic3.py | 17abf27054ac26a0d31faf8e6859ba339ca68c41 | [] | no_license | NerdonblooR/pic3 | 457917cf38f402bac980ece736b4a67b32ca4167 | ce459218791fafb965a7a7a3797ab0a41fd8c1e7 | refs/heads/master | 2020-11-23T23:39:26.791256 | 2020-01-18T21:12:16 | 2020-01-18T21:12:16 | 227,867,578 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,789 | py | from z3 import *
import heapq
import re
import json
import boto3
import pickle
import datetime
# Simplistic (and fragile) converter from
# a class of Horn clauses corresponding to
# a transition system into a transition system
# representation as <init, trans, goal>
# It assumes it is given three Horn clauses
# of the form:
# init(x) => Invariant(x)
# Invariant(x) and trans(x,x') => Invariant(x')
# Invariant(x) and goal(x) => Goal(x)
# where Invariant and Goal are uninterpreted predicates
class Horn2Transitions:
def __init__(self):
self.trans = True
self.init = True
self.inputs = []
self.goal = True
self.index = 0
def parse(self, file):
fp = Fixedpoint()
goals = fp.parse_file(file)
for r in fp.get_rules():
if not is_quantifier(r):
continue
b = r.body()
if not is_implies(b):
continue
f = b.arg(0)
g = b.arg(1)
if self.is_goal(f, g):
continue
if self.is_transition(f, g):
continue
if self.is_init(f, g):
continue
def is_pred(self, p, name):
return is_app(p) and p.decl().name() == name
def is_goal(self, body, head):
if not self.is_pred(head, "Goal"):
return False
pred, inv = self.is_body(body)
if pred is None:
return False
self.goal = self.subst_vars("x", inv, pred)
self.goal = self.subst_vars("i", self.goal, self.goal)
self.inputs += self.vars
self.inputs = list(set(self.inputs))
return True
def is_body(self, body):
if not is_and(body):
return None, None
fmls = [f for f in body.children() if self.is_inv(f) is None]
inv = None
for f in body.children():
if self.is_inv(f) is not None:
inv = f;
break
return And(fmls), inv
def is_inv(self, f):
if self.is_pred(f, "Invariant"):
return f
return None
def is_transition(self, body, head):
pred, inv0 = self.is_body(body)
if pred is None:
return False
inv1 = self.is_inv(head)
if inv1 is None:
return False
pred = self.subst_vars("x", inv0, pred)
self.xs = self.vars
pred = self.subst_vars("xn", inv1, pred)
self.xns = self.vars
pred = self.subst_vars("i", pred, pred)
self.inputs += self.vars
self.inputs = list(set(self.inputs))
self.trans = pred
return True
def is_init(self, body, head):
for f in body.children():
if self.is_inv(f) is not None:
return False
inv = self.is_inv(head)
if inv is None:
return False
self.init = self.subst_vars("x", inv, body)
return True
def subst_vars(self, prefix, inv, fml):
subst = self.mk_subst(prefix, inv)
self.vars = [v for (k, v) in subst]
return substitute(fml, subst)
def mk_subst(self, prefix, inv):
self.index = 0
if self.is_inv(inv) is not None:
return [(f, self.mk_bool(prefix)) for f in inv.children()]
else:
vars = self.get_vars(inv)
return [(f, self.mk_bool(prefix)) for f in vars]
def mk_bool(self, prefix):
self.index += 1
return Bool("%s%d" % (prefix, self.index))
def get_vars(self, f, rs=[]):
if is_var(f):
return z3util.vset(rs + [f], str)
else:
for f_ in f.children():
rs = self.get_vars(f_, rs)
return z3util.vset(rs, str)
# Produce a finite domain solver.
# The theory QF_FD covers bit-vector formulas
# and pseudo-Boolean constraints.
# By default cardinality and pseudo-Boolean
# constraints are converted to clauses. To override
# this default for cardinality constraints
# we set sat.cardinality.solver to True
def fd_solver():
s = SolverFor("QF_FD")
s.set("sat.cardinality.solver", True)
return s
# negate, avoid double negation
def negate(f):
if is_not(f):
return f.arg(0)
else:
return Not(f)
def cube2clause(cube):
return Or([negate(f) for f in cube])
class State:
def __init__(self, s):
self.R = set([])
self.solver = s
def add(self, clause):
if clause not in self.R:
self.R |= {clause}
self.solver.add(clause)
class Goal:
def __init__(self, cube, parent, level):
self.level = level
self.cube = cube
self.parent = parent
def is_seq(f):
return isinstance(f, list) or isinstance(f, tuple) or isinstance(f, AstVector)
# Check if the initial state is bad
def check_disjoint(a, b):
s = fd_solver()
s.add(a)
s.add(b)
return unsat == s.check()
# Remove clauses that are subsumed
def prune(R):
removed = set([])
s = fd_solver()
for f1 in R:
s.push()
for f2 in R:
if f2 not in removed:
s.add(Not(f2) if f1.eq(f2) else f2)
if s.check() == unsat:
removed |= {f1}
s.pop()
return R - removed
class MiniIC3:
def __init__(self, init, trans, goal, x0, inputs, xn):
self.x0 = x0
self.inputs = inputs
self.xn = xn
self.init = init
self.bad = goal
self.trans = trans
self.min_cube_solver = fd_solver()
self.min_cube_solver.add(Not(trans))
self.goals = []
s = State(fd_solver())
s.add(init)
s.solver.add(trans)
self.states = [s]
self.s_bad = fd_solver()
self.s_good = fd_solver()
self.s_bad.add(self.bad)
self.s_good.add(Not(self.bad))
def next(self, f):
if is_seq(f):
return [self.next(f1) for f1 in f]
return substitute(f, zip(self.x0, self.xn))
def prev(self, f):
if is_seq(f):
return [self.prev(f1) for f1 in f]
return substitute(f, zip(self.xn, self.x0))
# add a new frame to states, each state solver contains a new solver that
# embed a transition
def add_solver(self):
s = fd_solver()
s.add(self.trans)
self.states += [State(s)]
# retrive the lemmas of f_i
def R(self, i):
return And(self.states[i].R)
# Check if there are two states next to each other that have the same clauses.
def is_valid(self):
i = 1
while i + 1 < len(self.states):
if not (self.states[i].R - self.states[i + 1].R):
return And(prune(self.states[i].R))
i += 1
return None
def value2literal(self, m, x):
value = m.eval(x)
if is_true(value):
return x
if is_false(value):
return Not(x)
return None
def values2literals(self, m, xs):
p = [self.value2literal(m, x) for x in xs]
return [x for x in p if x is not None]
def project0(self, m):
return self.values2literals(m, self.x0)
def projectI(self, m):
return self.values2literals(m, self.inputs)
def projectN(self, m):
return self.values2literals(m, self.xn)
# Determine if there is a cube for the current state
# that is potentially reachable.
def unfold(self):
core = []
# add a checkpoint
self.s_bad.push()
R = self.R(len(self.states) - 1)
self.s_bad.add(R)
is_sat = self.s_bad.check()
if is_sat == sat:
m = self.s_bad.model()
cube = self.project0(m)
props = cube + self.projectI(m)
self.s_good.push()
self.s_good.add(R)
is_sat2 = self.s_good.check(props)
assert is_sat2 == unsat
core = self.s_good.unsat_core()
core = [c for c in core if c in set(cube)]
self.s_good.pop()
self.s_bad.pop()
return is_sat, core
# Block a cube by asserting the clause corresponding to its negation
def block_cube(self, i, cube):
self.assert_clause(i, cube2clause(cube))
# Add a clause to levels 0 until i
def assert_clause(self, i, clause):
for j in range(i + 1):
self.states[j].add(clause)
# minimize cube that is core of Dual solver.
# this assumes that props & cube => Trans
def minimize_cube(self, cube, inputs, lits):
is_sat = self.min_cube_solver.check(lits + [c for c in cube] + [i for i in inputs])
assert is_sat == unsat
core = self.min_cube_solver.unsat_core()
assert core
return [c for c in core if c in set(cube)]
# push a goal on a heap
def push_heap(self, goal):
heapq.heappush(self.goals, (goal.level, goal))
# A state s0 and level f0 such that
# not(s0) is f0-1 inductive
def ic3_blocked(self, s0, f0):
self.push_heap(Goal(self.next(s0), None, f0))
while self.goals:
f, g = heapq.heappop(self.goals)
sys.stdout.write("%d." % f)
sys.stdout.flush()
# Not(g.cube) is f-1 invariant
if f == 0:
print("")
return g
cube, f, is_sat = self.is_inductive(f, g.cube)
if is_sat == unsat:
self.block_cube(f, self.prev(cube))
if f < f0:
self.push_heap(Goal(g.cube, g.parent, f + 1))
elif is_sat == sat:
self.push_heap(Goal(cube, g, f - 1))
self.push_heap(g)
else:
return is_sat
print("")
return None
# Rudimentary generalization:
# If the cube is already unsat with respect to transition relation
# extract a core (not necessarily minimal)
# otherwise, just return the cube.
def generalize(self, cube, f):
s = self.states[f - 1].solver
if unsat == s.check(cube):
core = s.unsat_core()
if not check_disjoint(self.init, self.prev(And(core))):
return core, f
return cube, f
# Check if the negation of cube is inductive at level f
def is_inductive(self, f, cube):
s = self.states[f - 1].solver
s.push()
s.add(self.prev(Not(And(cube))))
is_sat = s.check(cube)
if is_sat == sat:
m = s.model()
s.pop()
if is_sat == sat:
cube = self.next(self.minimize_cube(self.project0(m), self.projectI(m), self.projectN(m)))
elif is_sat == unsat:
cube, f = self.generalize(cube, f)
return cube, f, is_sat
def checkpoint(self):
for i in range(len(self.states)):
state = self.states[i]
for lemma in state.R:
print str(lemma)
def run(self):
if not check_disjoint(self.init, self.bad):
return "goal is reached in initial state"
level = 0
while True:
self.checkpoint()
inv = self.is_valid()
if inv is not None:
return inv
is_sat, cube = self.unfold()
if is_sat == unsat:
level += 1
print("Unfold %d" % level)
sys.stdout.flush()
self.add_solver()
elif is_sat == sat:
cex = self.ic3_blocked(cube, level)
if cex is not None:
return cex
else:
return is_sat
def test(file):
h2t = Horn2Transitions()
h2t.parse(file)
mp = MiniIC3(h2t.init, h2t.trans, h2t.goal, h2t.xs, h2t.inputs, h2t.xns)
result = mp.run()
if isinstance(result, Goal):
g = result
print("Trace")
while g:
print(g.level, g.cube)
g = g.parent
return
if isinstance(result, ExprRef):
print("Invariant:\n%s " % result)
return
print(result)
# result = mp.run()
# if isinstance(result, Goal):
# g = result
# print("Trace")
# while g:
# print(g.level, g.cube)
# g = g.parent
# return
# if isinstance(result, ExprRef):
# print("Invariant:\n%s " % result)
# return
# print(result)
# test("data/horn1.smt2")
# test("data/horn2.smt2")
# test("data/horn3.smt2")
# test("data/horn4.smt2")
# test("data/horn5.smt2")
# test("data/horn6.smt2") # takes long time to finish
# test("data/horn3.smt2")
def value2literal(m, x):
value = m.eval(x)
if is_true(value):
return x
if is_false(value):
return Not(x)
return None
def values2literals(m, xs):
p = [value2literal(m, x) for x in xs]
return [x for x in p if x is not None]
def project_var(m, vars):
return values2literals(m, vars)
def partition_bad_state(h2t, partition_num):
partitioner = fd_solver()
good_solver = fd_solver()
partitioner.add(h2t.goal)
good_solver.add(Not(h2t.goal))
bad_states = []
# partition the bad state
while sat == partitioner.check():
m = partitioner.model()
cube = project_var(m, h2t.xs) + project_var(m, h2t.inputs)
# good_solver.check(cube)
# assert (good_solver.check(cube) == unsat)
# core = good_solver.unsat_core()
partitioner.add(Not(And(cube)))
bad_states.append(And(cube))
batch_size = int(math.ceil(float(len(bad_states)) / float(partition_num)))
subgoals = []
batch = []
print len(bad_states)
print batch_size
count = 0
for bad in bad_states:
count += 1
batch.append(bad)
if count == batch_size:
if len(batch) > 1:
subgoals.append(" ".join(str(Or(batch)).split()))
else:
subgoals.append(" ".join(str(batch[0]).split()))
count = 0
batch = []
if len(batch):
if len(batch) > 1:
subgoals.append(" ".join(str(Or(batch)).split()))
else:
subgoals.append(" ".join(str(batch[0]).split()))
return subgoals
# test lambda expression
def invoke_lambda_function(h2t, goal):
input_list = map(lambda x: str(x), h2t.inputs)
xs_list = map(lambda x: str(x), h2t.xs)
xns_list = map(lambda x: str(x), h2t.xns)
event = {}
event['init'] = str(h2t.init)
event['trans'] = str(h2t.trans)
event['goal'] = str(goal)
event['inputs'] = " ".join(input_list)
event['xs'] = " ".join(xs_list)
event['xns'] = " ".join(xns_list)
client = boto3.client('lambda')
response = client.invoke(
FunctionName='pic3lambda',
InvocationType='RequestResponse',
LogType='Tail',
Payload=json.dumps(event)
)
resp = json.loads(response['Payload'].read())
return resp['message']
h2t = Horn2Transitions()
h2t.parse("data/horn4.smt2")
print h2t.init
print h2t.trans
print h2t.goal
print h2t.inputs
print h2t.xs
print h2t.xns
input_list = map(lambda x: str(x), h2t.inputs)
xs_list = map(lambda x: str(x), h2t.xs)
xns_list = map(lambda x: str(x), h2t.xns)
init_str = str(h2t.init)
trans_str = str(h2t.trans)
goal_str = str(h2t.goal)
inputs_str = " ".join(input_list)
xs_str = " ".join(xs_list)
xns_str = " ".join(xns_list)
variables = "{0} {1} {2}".format(xs_str, inputs_str, xns_str)
v_list = variables.split()
var_str = "{0} = Bools('{1}')".format(",".join(v_list), " ".join(v_list))
#
init_str = init_str.replace('x','xn')
goal_str = goal_str.replace('x','xn')
print init_str
print goal_str
exec var_str
xs = Bools(xs_str)
inputs = Bools(inputs_str)
xns = Bools(xns_str)
init = eval(init_str)
goal = eval(goal_str)
trans_str = trans_str.replace("\n", "")
trans_str = re.sub(r'(.*)AtMost\(\((.*)\), ([0-9])\)', r'\1AtMost(\2, \3)', trans_str)
#trans_str = trans_str.replace('x','xn')
print trans_str
trans = eval(trans_str)
# print goal_str
# init = eval(init_str)
#goal = eval(ps[3])
#
mp = MiniIC3(goal, trans, init, xns, inputs, xs)#
#mp = MiniIC3(init, trans, goal, xs, inputs, xns)
start = datetime.datetime.now()
result = mp.run()
print result
if isinstance(result, Goal):
g = result
print("Trace")
while g:
print(g.level, g.cube)
g = g.parent
if isinstance(result, ExprRef):
print("Invariant:\n%s " % result)
end = datetime.datetime.now()
diff = end - start
print float(diff.microseconds) / float(1000)
# print "========================="
# print h2t.init
# print "========================="
# print h2t.trans
# print "========================="
# print h2t.goal
# print "========================="
# print h2t.inputs
# print "========================="
# print h2t.xs
# print "========================="
# print h2t.xns
| [
"njbbtan@gmail.com"
] | njbbtan@gmail.com |
02d357be02aff4280810f88b1334e5be84618ad8 | dabd7e42b063590513aa97e2c37c42aabe5b3d66 | /bookings/migrations/0008_settings.py | 9e7769d60e600c5db6d5d028bc7b33fbff0ca47f | [] | no_license | JsnSwny/Booking-System | 432357b031414e861d38ae38b2e518961961522a | 7100cf1a47f355a4baaa723c68fbe0ef37d70ef4 | refs/heads/master | 2023-05-20T04:58:29.469055 | 2021-06-07T10:11:01 | 2021-06-07T10:11:01 | 209,545,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 951 | py | # Generated by Django 2.2 on 2019-11-28 19:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bookings', '0007_delete_settings'),
]
operations = [
migrations.CreateModel(
name='Settings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('restaurant_name', models.TextField()),
('monday', models.CharField(max_length=100)),
('tuesday', models.CharField(max_length=100)),
('wednesday', models.CharField(max_length=100)),
('thursday', models.CharField(max_length=100)),
('friday', models.CharField(max_length=100)),
('saturday', models.CharField(max_length=100)),
('sunday', models.CharField(max_length=100)),
],
),
]
| [
"jsnswny@gmail.com"
] | jsnswny@gmail.com |
de49832167eaa5ce137d611d48d18adc111306a6 | 5e61c2a6441a53412ab25de4ed227450d6a48038 | /lesson_003/01_days_in_month.py | c566ef901c3dbabd5e12899368ea4f0effd03727 | [] | no_license | AntonButyrin/SBhomeworks | 577584ba71110f709944270f5edd415247591fee | 100f1c1ca91bb69e55e819d9793b0f55d0ddb663 | refs/heads/main | 2023-03-01T12:00:23.124122 | 2021-02-01T13:09:08 | 2021-02-01T13:09:08 | 334,947,275 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,221 | py | # -*- coding: utf-8 -*-
# (if/elif/else)
# По номеру месяца вывести кол-во дней в нем (без указания названия месяца, в феврале 28 дней)
# Результат проверки вывести на консоль
# Если номер месяца некорректен - сообщить об этом
month_number = {
1: 31,
2: 28,
3: 31,
4: 30,
5: 31,
6: 30,
7: 31,
8: 31,
9: 30,
10: 31,
11: 30,
12: 31
}
# Номер месяца получать от пользователя следующим образом
while True:
try:
user_input = int(input("Введите, пожалуйста, номер месяца: "))
if user_input < 1 or user_input > 12:
raise Exception
month = int(user_input)
print('Вы ввели', month)
break
except ValueError:
print('Неверный формат')
except Exception:
print('Такого месяца не существует. Введите номер месяца от 1 до 12')
print(month_number[user_input])
# Зачёт!
| [
"noreply@github.com"
] | noreply@github.com |
607d0391a5ca1d9afb87fe0ca87b84523daaf788 | c2dd97c9d51ce3a44cade6e1426564fbc67df10e | /userprofile/migrations/0004_auto_20180523_1407.py | c0c623481f95ba6e765740476c7b2d6999eca002 | [
"MIT"
] | permissive | Gigibit/ubildin | d15e6e6e376d8ce84adbb530ca99779ce4694215 | 7d0834d1a5432f0c00122159f3099cfedf398f10 | refs/heads/master | 2021-07-15T06:32:57.513918 | 2020-02-19T17:44:46 | 2020-02-19T17:44:46 | 241,682,709 | 0 | 0 | MIT | 2021-03-19T23:16:32 | 2020-02-19T17:40:07 | Python | UTF-8 | Python | false | false | 363 | py | # Generated by Django 2.0.4 on 2018-05-23 14:07
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0003_auto_20180516_1534'),
]
operations = [
migrations.AlterModelOptions(
name='profile',
options={'verbose_name_plural': 'profiles'},
),
]
| [
"gigibit92@gmail.com"
] | gigibit92@gmail.com |
8dafa079019e6389f13ac0f9fa585da38ee8b52a | 19634dfd431c400807cbea50973daad8a8283646 | /src/entangled.py | 9cfd9b5c22110071b3164798de9c3d8e77c28723 | [] | no_license | tianer2820/Entangled | 592f5e85464481edd117e2dc1a629159a878d25b | 8902a629db918b2f0a06436cee7a07badc93e07d | refs/heads/master | 2022-06-01T04:31:18.924471 | 2019-10-29T04:42:55 | 2019-10-29T04:42:55 | 188,439,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,280 | py | """
Main program
"""
from encript import encrypt
from key_gen import generate_key
from key_management import read_key_info, write_key_info
import os
import re
def realtime_mode(args):
keyname = args[0]
if not os.path.isfile(keyname + '.qkey'):
print('can\'t find key')
return
if os.path.isfile(keyname + '.qkeyinfo'):
shift = read_key_info(keyname)['cursor']
else:
shift = 0
with open(keyname + '.qkey', mode='rb') as f:
f.seek(shift)
print('entered realtime mode, use \'\\exit\' to quit')
while True:
t = input('> ')
if t == '\\exit':
break
elif t[0] == '\\': # if input is ciphertext
m = re.match('\\\\(.+):(.+):(.+)', t)
g = m.groups()
decryption_key = g[0]
decryption_shift = int(g[1])
ciphertext = g[2]
plaintext = [] # convert to bytes
assert len(ciphertext) % 2 == 0
i = 0
while i < len(ciphertext):
c = ciphertext[i:i+2]
plaintext.append(int(c, 16))
i += 2
plaintext = bytes(plaintext)
if not os.path.isfile(decryption_key + '.qkey'):
print('can\'t find key')
continue
with open(decryption_key + '.qkey', 'rb') as key: # open key file and decrypt
key.seek(decryption_shift)
key_bytes = key.read(len(plaintext))
plaintext = encrypt(plaintext, key_bytes)
print(plaintext.decode('utf8'))
else: # if input is plain text
t = t.encode('utf8')
key_bytes = f.read(len(t))
if len(key_bytes) < len(t):
print("Runing out of keys!")
return
miwen = encrypt(t, key_bytes)
print('\\' + keyname + ':' + str(f.tell() - len(t)) + ':' + miwen.hex())
write_key_info(keyname, f.tell())
def file_mode(args):
print('unimplemented yet, please use realtime mode')
'''
if len(args) == 2:
# decryption mode
source = args[0]
outdir = args[1]
elif len(args) == 3:
# encryption mode
source = args[0]
keyname = args[1]
outdir = args[2]
else:
raise ValueError('you must provide 1 or 2 arguments for file mode')
'''
return
def keygen_mode(args):
outdir = args[0]
size = args[1].lower()
m = re.match('([0-9]+)(kb|mb|gb)?', size) # look for unit
g = m.groups()
assert len(g) == 2
if g[1] is None:
multiplier = 1
elif g[1] == 'kb':
multiplier = 1
elif g[1] == 'mb':
multiplier = 1024
elif g[1] == 'gb':
multiplier = 1024 ** 2
else:
print('Unknown unit, must be kb, mb or gb')
return
size = int(g[0]) * multiplier
if os.path.isfile(outdir):
ans = input('file exists, overwrite? [Y/N]:\n')
if ans.upper() == 'N':
return
try:
generate_key(size, outdir)
except FileNotFoundError:
print('dir not exist!')
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--realtime', '-r', action='store', nargs=2, metavar=('KEY_NAME', 'KEY_SHIFT'), help='enter the realtime mode')
parser.add_argument('--encrypt', '-e', action='store', nargs=3, metavar=('FILE', 'KEY_NAME', 'OUT_DIR'), help='encrypt a file')
parser.add_argument('--decrypt', '-d', action='store', nargs=2, metavar=('FILE', 'OUT_DIR'), help='decrypt a file')
parser.add_argument('--keygen', '-k', action='store', nargs=2, metavar=('KEY_NAME', 'KEY_SIZE'), help='generate a key file')
args = parser.parse_args()
if args.realtime:
print('Entered RealTime mode')
realtime_mode(args.realtime)
exit(0)
elif args.encrypt:
file_mode(args.encrypt)
elif args.decrypt:
file_mode(args.decrypt)
elif args.keygen:
keygen_mode(args.keygen)
else:
print('At least one mode should be specified, use -h to get more help')
| [
"tianer2820@163.com"
] | tianer2820@163.com |
742ac4cb0de10696de786121651155b128a26bc5 | 1e4c7e1c949bd6c396454dccab5a17ed543c5546 | /snippets/settings.py | 76d838983684016722954ac2dda015c565ddf7b5 | [] | no_license | Kennedy-Njeri/Registration-Token-Based-Authentication | 05f621f4e2a3445c6685af1e73921aee009f234b | 464629b94a831bb8e41ddbbea913c6e37d9f8217 | refs/heads/master | 2020-05-09T19:10:45.680297 | 2019-04-14T21:04:09 | 2019-04-14T21:04:09 | 181,367,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,620 | py | """
Django settings for snippets project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '2zp9f54aw1_l3ke!tr)687fpj)wk*p5vlujyc#bmh=!7ip=c+0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'crispy_forms',
'account',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'snippets.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'snippets.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT= os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'account/static')
]
LOGIN_REDIRECT_URL = 'account'
LOGIN_URL = 'login'
CRISPY_TEMPLATE_PACK = 'bootstrap4'
EMAIL_USE_TLS = True
SERVER_EMAIL = 'securesally@gmail.com'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = 'mistakenz123@gmail.com'
EMAIL_HOST_PASSWORD = 'qlwcpapjlisegoie'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
| [
"mistakenz123@gmail.com"
] | mistakenz123@gmail.com |
b263c4ffc852c144f4043915f25493e9a48a2724 | df2ad917f861d791d6819bd9dac179f58dcbc583 | /questions/forms.py | 88892711fb511d6b4a845c144785e2fab35c0b4e | [] | no_license | ark85/TrackBackEnd2_project | ec419ae3d78596a8c780b47d7e4d9aa524b723b0 | ab99bd17ed50126e1e7b63fe59205c0cc3da4939 | refs/heads/master | 2020-03-31T06:22:29.663806 | 2018-12-17T23:47:02 | 2018-12-17T23:47:02 | 151,979,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | from django import forms
class QuestionViewsForm(forms.Form):
sort = forms.ChoiceField(choices=(
('name', 'Name asc'),
('-name', 'Name desc'),
('id', 'Id'),
('author', 'Author')
), required=False)
search = forms.CharField(required=False) | [
"ark100295@yandex.ru"
] | ark100295@yandex.ru |
603e90a1a261eb5641311b929c34e4dbcffdaeb9 | 47334dfe26797df79b7fcd880fae31dc315f525f | /ICPC/2019/Southern/K_Magic_Lamp_2/generate_test.py | 2320f329c236c9c3ffcd7f097c43704a788d55dc | [] | no_license | chinhhi102/GiaiThuat | 08d787701ca47c9053d56978e3f0d7c451328d6f | 4f9fd2810fe4565e35d676c28762836d94d4560d | refs/heads/master | 2020-09-22T07:59:13.193561 | 2019-12-27T17:09:00 | 2019-12-27T17:09:00 | 225,110,911 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 299 | py | from itertools import permutations
from random import randint
n = 100
use = [False for i in range(n + 1)]
print (n)
k = n // 2
for i in range(1, k):
x = randint(1, n)
if not use[x]:
print (x, end = " ")
use[x] = True
for i in range(1, n + 1):
if not use[i]:
print (i, end = " ")
| [
"chinhhi102@gmail.com"
] | chinhhi102@gmail.com |
990910a7c0604e5ad4b193a947481295582b0a82 | 1393abb7d0750205a6fe252dd500e951cce2f284 | /svm_HandWrittenLetters.py | d95e1418ed255aef0b2f95a057764bc8d70be274 | [] | no_license | nm4archana/data-mining | 71d12df6ce9acb3423f7869517ef12900edf85e2 | b236e6409117b0f0d1a131876ace295988584404 | refs/heads/master | 2021-07-24T05:56:13.517865 | 2017-11-02T21:51:25 | 2017-11-02T21:51:25 | 109,322,508 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,396 | py | import numpy as np
from sklearn import svm
from sklearn.model_selection import cross_val_score
import pandas as pd
"""
Date : Sep 25 2017
@author: Archana Neelipalayam Masilamani
Project Description:
Implemented Support Vector Machine using scikit-learn. Machine Learning in Python.
The dataset used is HandWritten Letters.
Text file:
1st row is cluster labels.
2nd-end rows: each column is a feature vectors (vector length=20x16).
Total 26 classes. each class has 39 images. Total 26*39=1014 images.
The task is to predict the HandWrittenLetter using Support Vector Machine classification
"""
"""
Import data from the textfile
"""
x = pd.read_csv('InputData/HandWrittenLetters.txt', sep=",", header=None)
"""
Transpose the data imported
"""
df = pd.DataFrame.transpose(x);
"""
Split the training data into X and y where X has training
samples and y has the corresponding class labels
"""
X = np.array(df.drop([0],1))
y = np.array(df[0])
"""
C-Support Vector Classification -
http://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html#sklearn.svm.SVC
Used Soft Margin Classification and got a better accuracy by setting C hyperparameter to 10
"""
clf = svm.SVC(C=10);
"""
Evaluating score by cross-validation
"""
scores = cross_val_score(clf,X,y,cv = 5)
"""
Print Classification Accuracy
"""
for m in scores:
print("\nClassification accuracy: {:.2f}".format(m))
| [
"nm4archana@gmail.com"
] | nm4archana@gmail.com |
ae2efdc26f66073cc42df3fde7a1cc79b763a0b1 | e6ce462bb17492a092fde16e44508ae17c15ac53 | /load_jetmet_tree.py | 800af5e8a470b16645f14c88adca6e53e5ea5058 | [] | no_license | siqiyyyy/jet_depth_analyzer | 8abf01aeac6effd12719164a60cbcc08498e6ffe | ebbda38b11d31d9541b36554a56b3f392a82580f | refs/heads/master | 2020-04-25T07:17:33.633741 | 2019-07-09T17:54:33 | 2019-07-09T17:54:33 | 172,609,847 | 0 | 0 | null | 2019-05-21T22:33:53 | 2019-02-26T00:49:02 | Python | UTF-8 | Python | false | false | 22,659 | py | import numpy as n
import ROOT
rhoall = n.zeros(1,dtype=float)
maxjet = 10000
#event information
nrun = n.zeros(1,dtype=int)
nlumi = n.zeros(1,dtype=int)
nevent = n.zeros(1,dtype=float)
npv = n.zeros(1,dtype=int)
rhoall = n.zeros(1,dtype=float)
rhocentral = n.zeros(1,dtype=float)
rhoneutral = n.zeros(1,dtype=float)
rhochargedpileup = n.zeros(1,dtype=float)
dphipfmet = n.zeros(1,dtype=float)
#jet information
njet = n.zeros(1,dtype=int)
jet_pt = n.zeros(maxjet,dtype=float)
jet_energy = n.zeros(maxjet,dtype=float)
jet_eta = n.zeros(maxjet,dtype=float)
jet_phi = n.zeros(maxjet,dtype=float)
genjet_pt = n.zeros(maxjet,dtype=float)
genjet_energy = n.zeros(maxjet,dtype=float)
genjet_eta = n.zeros(maxjet,dtype=float)
genjet_phi = n.zeros(maxjet,dtype=float)
rawjet_pt = n.zeros(maxjet,dtype=float)
rawjet_energy = n.zeros(maxjet,dtype=float)
rawjet_eta = n.zeros(maxjet,dtype=float)
rawjet_phi = n.zeros(maxjet,dtype=float)
jet_loose = n.zeros(maxjet,dtype=int)
jet_depth = n.zeros([maxjet,7],dtype=float) #hcal energy distributed in each layer
jet_depth_uncorrected = n.zeros([maxjet,7],dtype=float) #similar as above but used total energy rather than hcal energy
#puppi jet information
npjet = n.zeros(1,dtype=int)
pjet_pt = n.zeros(maxjet,dtype=float)
pjet_energy = n.zeros(maxjet,dtype=float)
pjet_eta = n.zeros(maxjet,dtype=float)
pjet_phi = n.zeros(maxjet,dtype=float)
genpjet_pt = n.zeros(maxjet,dtype=float)
genpjet_energy = n.zeros(maxjet,dtype=float)
genpjet_eta = n.zeros(maxjet,dtype=float)
genpjet_phi = n.zeros(maxjet,dtype=float)
rawpjet_pt = n.zeros(maxjet,dtype=float)
rawpjet_energy = n.zeros(maxjet,dtype=float)
rawpjet_eta = n.zeros(maxjet,dtype=float)
rawpjet_phi = n.zeros(maxjet,dtype=float)
pjet_loose = n.zeros(maxjet,dtype=int)
pjet_depth = n.zeros([maxjet,7],dtype=float) #hcal energy distributed in each layer
pjet_depth_uncorrected = n.zeros([maxjet,7],dtype=float) #similar as above but used total energy rather than hcal energy
#jet energy fraction
NHF = n.zeros(maxjet,dtype=float)
NEMF = n.zeros(maxjet,dtype=float)
CHF = n.zeros(maxjet,dtype=float)
MUF = n.zeros(maxjet,dtype=float)
CEMF = n.zeros(maxjet,dtype=float)
NumConst = n.zeros(maxjet,dtype=int)
NumNeutralParticle = n.zeros(maxjet,dtype=int)
CHM = n.zeros(maxjet,dtype=int)
#jet fraction information for each jet
charged = n.zeros(maxjet,dtype=float)
neutral = n.zeros(maxjet,dtype=float)
photon = n.zeros(maxjet,dtype=float)
muon = n.zeros(maxjet,dtype=float)
electron = n.zeros(maxjet,dtype=float)
hhf = n.zeros(maxjet,dtype=float)
ehf = n.zeros(maxjet,dtype=float)
other = n.zeros(maxjet,dtype=float)
charged_e = n.zeros(maxjet,dtype=float)
neutral_e = n.zeros(maxjet,dtype=float)
photon_e = n.zeros(maxjet,dtype=float)
muon_e = n.zeros(maxjet,dtype=float)
electron_e = n.zeros(maxjet,dtype=float)
hhf_e = n.zeros(maxjet,dtype=float)
ehf_e = n.zeros(maxjet,dtype=float)
other_e = n.zeros(maxjet,dtype=float)
charged_n = n.zeros(maxjet,dtype=int)
neutral_n = n.zeros(maxjet,dtype=int)
photon_n = n.zeros(maxjet,dtype=int)
muon_n = n.zeros(maxjet,dtype=int)
electron_n = n.zeros(maxjet,dtype=int)
hhf_n = n.zeros(maxjet,dtype=int)
ehf_n = n.zeros(maxjet,dtype=int)
other_n = n.zeros(maxjet,dtype=int)
#puppi jet fraction information for each jet
charged_pjet = n.zeros(maxjet,dtype=float)
neutral_pjet = n.zeros(maxjet,dtype=float)
photon_pjet = n.zeros(maxjet,dtype=float)
muon_pjet = n.zeros(maxjet,dtype=float)
electron_pjet = n.zeros(maxjet,dtype=float)
hhf_pjet = n.zeros(maxjet,dtype=float)
ehf_pjet = n.zeros(maxjet,dtype=float)
other_pjet = n.zeros(maxjet,dtype=float)
charged_e_pjet = n.zeros(maxjet,dtype=float)
neutral_e_pjet = n.zeros(maxjet,dtype=float)
photon_e_pjet = n.zeros(maxjet,dtype=float)
muon_e_pjet = n.zeros(maxjet,dtype=float)
electron_e_pjet = n.zeros(maxjet,dtype=float)
hhf_e_pjet = n.zeros(maxjet,dtype=float)
ehf_e_pjet = n.zeros(maxjet,dtype=float)
other_e_pjet = n.zeros(maxjet,dtype=float)
charged_n_pjet = n.zeros(maxjet,dtype=int)
neutral_n_pjet = n.zeros(maxjet,dtype=int)
photon_n_pjet = n.zeros(maxjet,dtype=int)
muon_n_pjet = n.zeros(maxjet,dtype=int)
electron_n_pjet = n.zeros(maxjet,dtype=int)
hhf_n_pjet = n.zeros(maxjet,dtype=int)
ehf_n_pjet = n.zeros(maxjet,dtype=int)
other_n_pjet = n.zeros(maxjet,dtype=int)
#met information
met = n.zeros(1,dtype=float)
mex = n.zeros(1,dtype=float)
mey = n.zeros(1,dtype=float)
met_phi= n.zeros(1,dtype=float)
genmet = n.zeros(1,dtype=float)
rawmet = n.zeros(1,dtype=float)
charged_met = n.zeros(1,dtype=float)
neutral_met = n.zeros(1,dtype=float)
photon_met = n.zeros(1,dtype=float)
muon_met = n.zeros(1,dtype=float)
electron_met = n.zeros(1,dtype=float)
hhf_met = n.zeros(1,dtype=float)
ehf_met = n.zeros(1,dtype=float)
other_met = n.zeros(1,dtype=float)
chsmet = n.zeros(1,dtype=float)
trkmet = n.zeros(1,dtype=float)
phomet = n.zeros(1,dtype=float)
neumet = n.zeros(1,dtype=float)
#pmet information
pmet = n.zeros(1,dtype=float)
pmex = n.zeros(1,dtype=float)
pmey = n.zeros(1,dtype=float)
pmet_phi= n.zeros(1,dtype=float)
genpmet = n.zeros(1,dtype=float)
rawpmet = n.zeros(1,dtype=float)
#Init branches
b_rhoall = ROOT.TBranch()
b_maxjet = ROOT.TBranch()
b_nrun = ROOT.TBranch()
b_nlumi = ROOT.TBranch()
b_nevent = ROOT.TBranch()
b_npv = ROOT.TBranch()
b_rhoall = ROOT.TBranch()
b_rhocentral = ROOT.TBranch()
b_rhoneutral = ROOT.TBranch()
b_rhochargedpileup = ROOT.TBranch()
b_dphipfmet = ROOT.TBranch()
b_njet = ROOT.TBranch()
b_jet_pt = ROOT.TBranch()
b_jet_energy = ROOT.TBranch()
b_jet_eta = ROOT.TBranch()
b_jet_phi = ROOT.TBranch()
b_genjet_pt = ROOT.TBranch()
b_genjet_energy = ROOT.TBranch()
b_genjet_eta = ROOT.TBranch()
b_genjet_phi = ROOT.TBranch()
b_rawjet_pt = ROOT.TBranch()
b_rawjet_energy = ROOT.TBranch()
b_rawjet_eta = ROOT.TBranch()
b_rawjet_phi = ROOT.TBranch()
b_jet_loose = ROOT.TBranch()
b_jet_depth = ROOT.TBranch()
b_jet_depth_uncorrected = ROOT.TBranch()
b_npjet = ROOT.TBranch()
b_pjet_pt = ROOT.TBranch()
b_pjet_energy = ROOT.TBranch()
b_pjet_eta = ROOT.TBranch()
b_pjet_phi = ROOT.TBranch()
b_genpjet_pt = ROOT.TBranch()
b_genpjet_energy = ROOT.TBranch()
b_genpjet_eta = ROOT.TBranch()
b_genpjet_phi = ROOT.TBranch()
b_rawpjet_pt = ROOT.TBranch()
b_rawpjet_energy = ROOT.TBranch()
b_rawpjet_eta = ROOT.TBranch()
b_rawpjet_phi = ROOT.TBranch()
b_pjet_loose = ROOT.TBranch()
b_pjet_depth = ROOT.TBranch()
b_pjet_depth_uncorrected = ROOT.TBranch()
b_NHF = ROOT.TBranch()
b_NEMF = ROOT.TBranch()
b_CHF = ROOT.TBranch()
b_MUF = ROOT.TBranch()
b_CEMF = ROOT.TBranch()
b_NumConst = ROOT.TBranch()
b_NumNeutralParticle = ROOT.TBranch()
b_CHM = ROOT.TBranch()
b_charged = ROOT.TBranch()
b_neutral = ROOT.TBranch()
b_photon = ROOT.TBranch()
b_muon = ROOT.TBranch()
b_electron = ROOT.TBranch()
b_hhf = ROOT.TBranch()
b_ehf = ROOT.TBranch()
b_other = ROOT.TBranch()
b_charged_e = ROOT.TBranch()
b_neutral_e = ROOT.TBranch()
b_photon_e = ROOT.TBranch()
b_muon_e = ROOT.TBranch()
b_electron_e = ROOT.TBranch()
b_hhf_e = ROOT.TBranch()
b_ehf_e = ROOT.TBranch()
b_other_e = ROOT.TBranch()
b_charged_n = ROOT.TBranch()
b_neutral_n = ROOT.TBranch()
b_photon_n = ROOT.TBranch()
b_muon_n = ROOT.TBranch()
b_electron_n = ROOT.TBranch()
b_hhf_n = ROOT.TBranch()
b_ehf_n = ROOT.TBranch()
b_other_n = ROOT.TBranch()
b_charged_pjet = ROOT.TBranch()
b_neutral_pjet = ROOT.TBranch()
b_photon_pjet = ROOT.TBranch()
b_muon_pjet = ROOT.TBranch()
b_electron_pjet = ROOT.TBranch()
b_hhf_pjet = ROOT.TBranch()
b_ehf_pjet = ROOT.TBranch()
b_other_pjet = ROOT.TBranch()
b_charged_e_pjet = ROOT.TBranch()
b_neutral_e_pjet = ROOT.TBranch()
b_photon_e_pjet = ROOT.TBranch()
b_muon_e_pjet = ROOT.TBranch()
b_electron_e_pjet = ROOT.TBranch()
b_hhf_e_pjet = ROOT.TBranch()
b_ehf_e_pjet = ROOT.TBranch()
b_other_e_pjet = ROOT.TBranch()
b_charged_n_pjet = ROOT.TBranch()
b_neutral_n_pjet = ROOT.TBranch()
b_photon_n_pjet = ROOT.TBranch()
b_muon_n_pjet = ROOT.TBranch()
b_electron_n_pjet = ROOT.TBranch()
b_hhf_n_pjet = ROOT.TBranch()
b_ehf_n_pjet = ROOT.TBranch()
b_other_n_pjet = ROOT.TBranch()
b_met = ROOT.TBranch()
b_mex = ROOT.TBranch()
b_mey = ROOT.TBranch()
b_met_phi = ROOT.TBranch()
b_genmet = ROOT.TBranch()
b_rawmet = ROOT.TBranch()
b_charged_met = ROOT.TBranch()
b_neutral_met = ROOT.TBranch()
b_photon_met = ROOT.TBranch()
b_muon_met = ROOT.TBranch()
b_electron_met = ROOT.TBranch()
b_hhf_met = ROOT.TBranch()
b_ehf_met = ROOT.TBranch()
b_other_met = ROOT.TBranch()
b_chsmet = ROOT.TBranch()
b_trkmet = ROOT.TBranch()
b_phomet = ROOT.TBranch()
b_neumet = ROOT.TBranch()
b_pmet = ROOT.TBranch()
b_pmex = ROOT.TBranch()
b_pmey = ROOT.TBranch()
b_pmet_phi = ROOT.TBranch()
b_genpmet = ROOT.TBranch()
b_rawpmet = ROOT.TBranch()
def declare_branches(t):
t.SetBranchAddress("run" , nrun , b_nrun )
t.SetBranchAddress("lumi" , nlumi , b_nlumi )
t.SetBranchAddress("event" , nevent , b_nevent )
t.SetBranchAddress("npv" , npv , b_npv )
t.SetBranchAddress("dphipfmet" , dphipfmet , b_dphipfmet )
t.SetBranchAddress("rhoall" , rhoall , b_rhoall )
t.SetBranchAddress("rhocentral" , rhocentral , b_rhocentral )
t.SetBranchAddress("rhoneutral" , rhoneutral , b_rhoneutral )
t.SetBranchAddress("rhochargedpileup" , rhochargedpileup , b_rhochargedpileup )
t.SetBranchAddress("njet" , njet , b_njet )
t.SetBranchAddress("npjet" , npjet , b_npjet )
t.SetBranchAddress("jet_pt" , jet_pt , b_jet_pt )
t.SetBranchAddress("jet_energy" , jet_energy , b_jet_energy )
t.SetBranchAddress("jet_eta" , jet_eta , b_jet_eta )
t.SetBranchAddress("jet_phi" , jet_phi , b_jet_phi )
t.SetBranchAddress("genjet_pt" , genjet_pt , b_genjet_pt )
t.SetBranchAddress("genjet_energy" , genjet_energy , b_genjet_energy )
t.SetBranchAddress("genjet_eta" , genjet_eta , b_genjet_eta )
t.SetBranchAddress("genjet_phi" , genjet_phi , b_genjet_phi )
t.SetBranchAddress("rawjet_pt" , rawjet_pt , b_rawjet_pt )
t.SetBranchAddress("rawjet_energy" , rawjet_energy , b_rawjet_energy )
t.SetBranchAddress("rawjet_eta" , rawjet_eta , b_rawjet_eta )
t.SetBranchAddress("rawjet_phi" , rawjet_phi , b_rawjet_phi )
t.SetBranchAddress("pjet_pt" , pjet_pt , b_pjet_pt )
t.SetBranchAddress("pjet_energy" , pjet_energy , b_pjet_energy )
t.SetBranchAddress("pjet_eta" , pjet_eta , b_pjet_eta )
t.SetBranchAddress("pjet_phi" , pjet_phi , b_pjet_phi )
t.SetBranchAddress("genpjet_pt" , genpjet_pt , b_genpjet_pt )
t.SetBranchAddress("genpjet_energy" , genpjet_energy , b_genpjet_energy )
t.SetBranchAddress("genpjet_eta" , genpjet_eta , b_genpjet_eta )
t.SetBranchAddress("genpjet_phi" , genpjet_phi , b_genpjet_phi )
t.SetBranchAddress("rawpjet_pt" , rawpjet_pt , b_rawpjet_pt )
t.SetBranchAddress("rawpjet_energy" , rawpjet_energy , b_rawpjet_energy )
t.SetBranchAddress("rawpjet_eta" , rawpjet_eta , b_rawpjet_eta )
t.SetBranchAddress("rawpjet_phi" , rawpjet_phi , b_rawpjet_phi )
t.SetBranchAddress("NHF" , NHF , b_NHF )
t.SetBranchAddress("NEMF" , NEMF , b_NEMF )
t.SetBranchAddress("CHF" , CHF , b_CHF )
t.SetBranchAddress("MUF" , MUF , b_MUF )
t.SetBranchAddress("CEMF" , CEMF , b_CEMF )
t.SetBranchAddress("NumConst" , NumConst , b_NumConst )
t.SetBranchAddress("NumNeutralParticle" , NumNeutralParticle , b_NumNeutralParticle )
t.SetBranchAddress("CHM" , CHM , b_CHM )
t.SetBranchAddress("jet_loose" , jet_loose , b_jet_loose )
t.SetBranchAddress("pjet_loose" , pjet_loose , b_pjet_loose )
t.SetBranchAddress("jet_depth" , jet_depth , b_jet_depth )
t.SetBranchAddress("jet_depth_uncorrected" , jet_depth_uncorrected , b_jet_depth_uncorrected )
t.SetBranchAddress("pjet_depth" , pjet_depth , b_pjet_depth )
t.SetBranchAddress("pjet_depth_uncorrected" , pjet_depth_uncorrected , b_pjet_depth_uncorrected )
t.SetBranchAddress("charged" , charged , b_charged )
t.SetBranchAddress("neutral" , neutral , b_neutral )
t.SetBranchAddress("photon" , photon , b_photon )
t.SetBranchAddress("muon" , muon , b_muon )
t.SetBranchAddress("electron" , electron , b_electron )
t.SetBranchAddress("hhf" , hhf , b_hhf )
t.SetBranchAddress("ehf" , ehf , b_ehf )
t.SetBranchAddress("other" , other , b_other )
t.SetBranchAddress("charged_e" , charged_e , b_charged_e )
t.SetBranchAddress("neutral_e" , neutral_e , b_neutral_e )
t.SetBranchAddress("photon_e" , photon_e , b_photon_e )
t.SetBranchAddress("muon_e" , muon_e , b_muon_e )
t.SetBranchAddress("electron_e" , electron_e , b_electron_e )
t.SetBranchAddress("hhf_e" , hhf_e , b_hhf_e )
t.SetBranchAddress("ehf_e" , ehf_e , b_ehf_e )
t.SetBranchAddress("other_e" , other_e , b_other_e )
t.SetBranchAddress("charged_n" , charged_n , b_charged_n )
t.SetBranchAddress("neutral_n" , neutral_n , b_neutral_n )
t.SetBranchAddress("photon_n" , photon_n , b_photon_n )
t.SetBranchAddress("muon_n" , muon_n , b_muon_n )
t.SetBranchAddress("electron_n" , electron_n , b_electron_n )
t.SetBranchAddress("hhf_n" , hhf_n , b_hhf_n )
t.SetBranchAddress("ehf_n" , ehf_n , b_ehf_n )
t.SetBranchAddress("other_n" , other_n , b_other_n )
t.SetBranchAddress("charged_pjet" , charged_pjet , b_charged_pjet )
t.SetBranchAddress("neutral_pjet" , neutral_pjet , b_neutral_pjet )
t.SetBranchAddress("photon_pjet" , photon_pjet , b_photon_pjet )
t.SetBranchAddress("muon_pjet" , muon_pjet , b_muon_pjet )
t.SetBranchAddress("electron_pjet" , electron_pjet , b_electron_pjet )
t.SetBranchAddress("hhf_pjet" , hhf_pjet , b_hhf_pjet )
t.SetBranchAddress("ehf_pjet" , ehf_pjet , b_ehf_pjet )
t.SetBranchAddress("other_pjet" , other_pjet , b_other_pjet )
t.SetBranchAddress("charged_e_pjet" , charged_e_pjet , b_charged_e_pjet )
t.SetBranchAddress("neutral_e_pjet" , neutral_e_pjet , b_neutral_e_pjet )
t.SetBranchAddress("photon_e_pjet" , photon_e_pjet , b_photon_e_pjet )
t.SetBranchAddress("muon_e_pjet" , muon_e_pjet , b_muon_e_pjet )
t.SetBranchAddress("electron_e_pjet" , electron_e_pjet , b_electron_e_pjet )
t.SetBranchAddress("hhf_e_pjet" , hhf_e_pjet , b_hhf_e_pjet )
t.SetBranchAddress("ehf_e_pjet" , ehf_e_pjet , b_ehf_e_pjet )
t.SetBranchAddress("other_e_pjet" , other_e_pjet , b_other_e_pjet )
t.SetBranchAddress("charged_n_pjet" , charged_n_pjet , b_charged_n_pjet )
t.SetBranchAddress("neutral_n_pjet" , neutral_n_pjet , b_neutral_n_pjet )
t.SetBranchAddress("photon_n_pjet" , photon_n_pjet , b_photon_n_pjet )
t.SetBranchAddress("muon_n_pjet" , muon_n_pjet , b_muon_n_pjet )
t.SetBranchAddress("electron_n_pjet" , electron_n_pjet , b_electron_n_pjet )
t.SetBranchAddress("hhf_n_pjet" , hhf_n_pjet , b_hhf_n_pjet )
t.SetBranchAddress("ehf_n_pjet" , ehf_n_pjet , b_ehf_n_pjet )
t.SetBranchAddress("other_n_pjet" , other_n_pjet , b_other_n_pjet )
t.SetBranchAddress("met" , met , b_met )
t.SetBranchAddress("mex" , mex , b_mex )
t.SetBranchAddress("mey" , mey , b_mey )
t.SetBranchAddress("met_phi" , met_phi , b_met_phi )
t.SetBranchAddress("genmet" , genmet , b_genmet )
t.SetBranchAddress("rawmet" , rawmet , b_rawmet )
t.SetBranchAddress("charged_met" , charged_met , b_charged_met )
t.SetBranchAddress("neutral_met" , neutral_met , b_neutral_met )
t.SetBranchAddress("photon_met" , photon_met , b_photon_met )
t.SetBranchAddress("muon_met" , muon_met , b_muon_met )
t.SetBranchAddress("electron_met" , electron_met , b_electron_met )
t.SetBranchAddress("hhf_met" , hhf_met , b_hhf_met )
t.SetBranchAddress("ehf_met" , ehf_met , b_ehf_met )
t.SetBranchAddress("other_met" , other_met , b_other_met )
t.SetBranchAddress("pmet" , pmet , b_pmet )
t.SetBranchAddress("pmex" , pmex , b_pmex )
t.SetBranchAddress("pmey" , pmey , b_pmey )
t.SetBranchAddress("pmet_phi" , pmet_phi , b_pmet_phi )
t.SetBranchAddress("genpmet" , genpmet , b_genpmet )
t.SetBranchAddress("rawpmet" , rawpmet , b_rawpmet )
print "All branches configured"
| [
"yuansiqi0114@hotmail.com"
] | yuansiqi0114@hotmail.com |
b3ce66d5ab56ec78ae434f553712d4027996e0c8 | 7bf377472dea25a39933e34726dc581e8f7efb6f | /4_lr_analysis/get_lu_data.py | 393f8e9613fef98a48f8ad3e0c721ad64001b5b4 | [] | no_license | gordonje/deadly_work | 8fe655cca4fea522842609cfdc7fff1582cd4775 | cdd8586eaf71b643b2076ef4389333a64e04da8e | refs/heads/master | 2021-01-21T08:01:09.981286 | 2015-03-20T02:55:28 | 2015-03-20T02:55:28 | 21,902,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,707 | py | import getpass
import psycopg2
import requests
db = raw_input("Enter name of target database:")
user = raw_input("Enter your PostgreSQL username (this might just be 'postgres'):")
password = getpass.getpass("Enter your PostgreSQL user password:")
conn = psycopg2.connect("dbname=%(db)s user=%(user)s password=%(password)s" % {"db": db, "user": user, "password":password})
cur = conn.cursor()
session = requests.Session()
session.headers.update({"Connection": "keep-alive"})
# create the schema if it doesn't already exist
cur.execute('''CREATE SCHEMA IF NOT EXISTS lu;''')
conn.commit()
# if the current data table doesn't already exist, create and populate it
cur.execute('''SELECT * FROM information_schema.tables WHERE table_name = 'current_data' AND table_schema = 'lu';''')
has_current_data = cur.fetchone()
if has_current_data == None:
print "Getting current data..."
cur.execute('''CREATE TABLE lu.current_data (
series_id varchar(17)
, year int4
, period varchar(3)
, value numeric
, footnote_codes varchar(255)
, PRIMARY KEY (series_id, year)
);''')
conn.commit()
response = session.get("http://download.bls.gov/pub/time.series/lu/lu.data.0.Current")
rows = response.content.split('\n')
for row in rows[1:]:
values = row.split('\t')
if len(values) > 1:
cur.execute('''INSERT INTO lu.current_data (series_id, year, period, value, footnote_codes)
VALUES (%s, %s, %s, %s, %s);''',
[values[0].strip(), values[1].strip(), values[2].strip(), values[3].strip(), values[4].strip()])
conn.commit()
# if the all data table doesn't already exist, create and populate it
cur.execute('''SELECT * FROM information_schema.tables WHERE table_name = 'all_data' AND table_schema = 'lu';''')
has_all_data = cur.fetchone()
if has_all_data == None:
print "Getting all data..."
cur.execute('''CREATE TABLE lu.all_data (
series_id varchar(17)
, year int4
, period varchar(3)
, value numeric
, footnote_codes varchar(255)
, PRIMARY KEY (series_id, year)
);''')
conn.commit()
response = session.get("http://download.bls.gov/pub/time.series/lu/lu.data.1.AllData")
rows = response.content.split('\n')
for row in rows[1:]:
values = row.split('\t')
if len(values) > 1:
cur.execute('''INSERT INTO lu.all_data (series_id, year, period, value, footnote_codes)
VALUES (%s, %s, %s, %s, %s);''',
[values[0].strip(), values[1].strip(), values[2].strip(), values[3].strip(), values[4].strip()])
conn.commit()
# if the series table doesn't already exist, create and populate it
cur.execute('''SELECT * FROM information_schema.tables WHERE table_name = 'series' AND table_schema = 'lu';''')
has_series = cur.fetchone()
if has_series == None:
print "Getting series..."
cur.execute('''CREATE TABLE lu.series (
series_id varchar(17) PRIMARY KEY
, lfst_code varchar(2)
, fips_code varchar(2)
, series_description varchar(255)
, tdata_code varchar(2)
, pcts_code varchar(2)
, earn_code varchar(2)
, class_code varchar(2)
, unin_code varchar(1)
, indy_code varchar(4)
, occupation_code varchar(4)
, education_code varchar(2)
, ages_code varchar(2)
, race_code varchar(2)
, orig_code varchar(2)
, sexs_code varchar(2)
, seasonal varchar(2)
, footnote_codes varchar(255)
, begin_year int4
, begin_period varchar(3)
, end_year int4
, end_period varchar(3)
);''')
conn.commit()
response = session.get("http://download.bls.gov/pub/time.series/lu/lu.series")
rows = response.content.split('\n')
for row in rows[1:]:
values = row.split('\t')
if len(values) > 1:
cur.execute('''INSERT INTO lu.series (series_id, lfst_code, fips_code, series_description, tdata_code, pcts_code, earn_code,
class_code, unin_code, indy_code, occupation_code, education_code, ages_code, race_code,
orig_code, sexs_code, seasonal, footnote_codes, begin_year, begin_period, end_year, end_period)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);''',
[values[0].strip(), values[1].strip(), values[2].strip(), values[3].strip(), values[4].strip(),
values[5].strip(), values[6].strip(), values[7].strip(), values[8].strip(), values[9].strip(),
values[10].strip(), values[11].strip(), values[12].strip(), values[13].strip(), values[14].strip(),
values[15].strip(), values[16].strip(), values[17].strip(), values[18].strip(), values[19].strip(),
values[20].strip(), values[21].strip()])
conn.commit()
# check to see if the columns are on the areas table, then add them.
cur.close()
conn.close() | [
"gordon.je@gmail.com"
] | gordon.je@gmail.com |
7c0079ea785618c2599e7afb0bff571e34ec667c | 3d74cb066e0bf01d422e5fc71ba0c1678873f465 | /main.py | e7a34c2cf51f4f9d0ed1d2a794ae667ca1b4b9cc | [] | no_license | jsuvanto/15-puzzle | 77af8f85bbbae197276c6d243cd275e6af1163f6 | 12682bc5fa0fd35bfc19ba2ccd298db7f7b41a9e | refs/heads/master | 2021-06-24T10:04:00.347360 | 2017-09-10T16:25:18 | 2017-09-10T16:25:18 | 103,012,063 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,184 | py | import board
# TODO GUI:
# - move buttons
# - quit
# - reset
# - solve (requires a whole new function)
def main():
# Ask user for board size
try:
rows = int(input("Enter number of board rows: "))
cols = int(input("Enter number of board columns: "))
except ValueError:
print("User error. Forcing size to 4*4")
rows = 4
cols = 4
# Create a new board and draw it
board1 = board.Board(rows, cols)
board1.draw()
cmd = ""
while cmd != "q":
cmd = input("Move empty tile [u]p, [d]own, [l]eft or [r]ight, "
"[s]huffle the board or [q]uit. ")
if cmd in ["u", "d", "l", "r"]:
board1.move(cmd)
board1.draw()
elif cmd == "s":
try:
shufflecount = int(input("Enter number of random moves: "))
except ValueError:
print("User error. Forcing random move count to 100.")
shufflecount = 100
board1.shuffle(shufflecount)
board1.draw()
elif cmd == "q":
break
else:
print("Unknown command, please try again")
main()
| [
"jsuvanto@iki.fi"
] | jsuvanto@iki.fi |
4eb866f5fb8682ab8fadaf4c89ab62399ada0266 | 9b8476110a0cc9952af771be0b1f99a0ff6d5272 | /canvasapp/apps.py | 7c11523751d652cc22b6755d3486d3cf2fe44627 | [] | no_license | salonishah331/CanvasPage | 9830a5815b81bf9d2075063ce47e2305eed15e3d | 5b1e3acc5afd51fb58ef2f147ef119a94ed31505 | refs/heads/master | 2021-01-14T04:29:59.802141 | 2020-02-23T22:20:02 | 2020-02-23T22:20:02 | 242,599,949 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | from django.apps import AppConfig
class CanvasappConfig(AppConfig):
name = 'canvasapp'
| [
"salonishah331@gatech.edu"
] | salonishah331@gatech.edu |
ecf60a86f09334ed25b05b794995f4bcc03ae7b5 | 8f73568f02e4ef744841322f4b3f1321efb41261 | /environments/my_env/bin/django-admin | b56cb1201042f5f6a6176ee7fef5a8607d4cf910 | [] | no_license | aonreport2/cpebooklast | 4154839dff0f284f83233bae0ae329f818a5d92e | 6029ef3b99145dacf0685443778dbeae4a37628e | refs/heads/main | 2023-06-20T14:50:03.198971 | 2021-07-18T18:00:08 | 2021-07-18T18:00:08 | 380,532,562 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 321 | #!/home/thanapat/Documents/newcpebook-master/environments/my_env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"aonreport2@gmail.com"
] | aonreport2@gmail.com | |
185170f4c8815d42a0eff3114ea6b35622bb1af3 | 62e1a1479ccdc2651f2e339cd657584c2f2fc62f | /move_mouse.py | 8788249862b5f01fdce09dbe885566eb2ac3d2b9 | [] | no_license | daniel-cretney/PythonProjects | d2075883c0ca5ab165749ace69e86e923215b168 | 17b6baefe8126b589cc18c05e0571f755dabd4e6 | refs/heads/main | 2023-03-13T19:53:04.624620 | 2021-03-02T20:21:02 | 2021-03-02T20:21:02 | 343,901,456 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py | import pyautogui
print(pyautogui.size())
pyautogui.moveTo(150,100, duration = 20)
| [
"noreply@github.com"
] | noreply@github.com |
6b0a7bd2d974d61447504b9f0b45adcff16dc291 | f4d710f68d715470905daa1245f3b9f4f4c4cef5 | /local_settings.py | da93c18c7fe1d80df9288fb1a2a37e53e9ee22de | [] | no_license | powellc/findhistory_me | c3044a894840e62f12bb2ee4dc0ad7dbe8a524fd | d72eb449eb0e15f0d62a46986ad8551ab1cb66ca | refs/heads/master | 2016-09-15T20:55:57.994241 | 2014-04-15T03:56:18 | 2014-04-15T03:56:18 | 10,428,262 | 0 | 0 | null | 2014-04-15T03:54:33 | 2013-06-01T20:46:48 | JavaScript | UTF-8 | Python | false | false | 658 | py |
DEBUG = True
AWS_ACCESS_KEY_ID = 'AKIAIC6KSWVHASDPKERQ'
AWS_SECRET_ACCESS_KEY = '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'
DATABASES = {
"default": {
# Ends with "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.sqlite3",
# DB name or path to database file if using sqlite3.
"NAME": "dev.db",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
| [
"colin.powell@gmail.com"
] | colin.powell@gmail.com |
20d016d8c835bb90dcdfc491f101ee23416a4a9d | 0ccb70cd22862f5c1617113cec62fb4438093ce7 | /src/gamer/application/tests/fixtures.py | 1667dcd3275df70908ece80cd9b9f344daf4c45a | [] | no_license | socek/gamer | f4590a557819047158c1a8c0e9605632dbaac58c | 040216b44d38f2ab5a111cb55981645d331c2ba3 | refs/heads/master | 2020-09-13T09:42:26.427433 | 2014-10-26T19:44:17 | 2014-10-26T22:23:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | from haplugin.toster.fixtures import Fixtures as Base
class Fixtures(Base):
def __call__(self):
# example:
# self.create_nameless(Model, name=value)
pass
| [
"msocek@gmail.com"
] | msocek@gmail.com |
5958f2d96723eb9a20b3880f5670aec2ce17f23e | d904ecb1cf65ffbd2bd0332b46ae3c90a9d96158 | /Practiceself/practice02.py | 8c4c829dd45d487ddfda11cd8a41208e19f2d793 | [] | no_license | EraSilv/day2 | 9e1b5573017a6c3e3a4afa3cc0d19de10fcc4d34 | a1528897b150cd856d72e7ab387a1dbb5e0189ee | refs/heads/master | 2023-06-04T00:09:18.992833 | 2021-06-28T11:45:36 | 2021-06-28T11:45:36 | 368,862,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,778 | py | my_list = [ 'Jan' , 'Feb' , 'March']
print(my_list[2])
# my_list.append('April')
# print(my_list[3])
#-----------------------------------SETS--------------------
# my_set = {'Jan', 'Feb' , 'March'}
# for element in my_set:
# print(element)
# my_set.add('April')
# print(my_set)
# my_set.remove('Jan')
# print(my_set)
#------------------------------------------------------------
# a = 10
# while a < 100:
# a = a + 1
# print(a)
#-------------------------------------------------------------
# info =''
# fname = ''
# while len(info) == 0 or len(fname) == 0 :
# info = input('ENter n:').title()
# fname = input('enter l:').title()
# if info == 'Erlan' and fname == 'Yrysbaev':
# print('Welcome! ' , info , fname )
# else:
# print('Error!')
#-------------------------------------------------------------
# guests = ['Axe', 'Tom', 'Jax', 'Rik', 'Sam', 'Zot']
# come = 0
# while len(guests) != come:
# a = input('Name:').title()
# if a in guests and guests.count(a) == 1:
# print('Welcome')
# come = come + 1
# else:
# print('Sorry!!')
# break
#----------------------------------------------------------------
# password = ''
# while True:
# password = input('enter password:')
# if len(password) >= 8:
# print('Good! PS')
# elif len(password) <6:
# print('Easy ps')
#--------------------------------------------------------------------------
# number = int(input(' count till:'))
# a = 1
#
# for i in range(number):
# print(i)
# while a < number:
# a = a + 1
# print(a)
# #---------------------------------------------------------------------------------
| [
"erlan2766@gmail.com"
] | erlan2766@gmail.com |
5d5433366a37f373a7f314a8b25e6aa3f725b5ff | edbb5e98d97ffb9a8e516b6d3538e7e801977e8e | /stockosaurus-api/stockosaurus/stocks/serializers.py | c73ce30f64b043fdeb02a620abfd830360cdc2e0 | [] | no_license | kristian-ellis/stockosaurus | d732e5969b1087dc9f1bdeaf5597e64f1dfad079 | cc314d7cf057d683f593560dc1108407e1567777 | refs/heads/main | 2023-06-05T11:23:52.615152 | 2021-06-21T06:13:02 | 2021-06-21T06:13:02 | 378,759,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 671 | py | from rest_framework import serializers
from .models import StockPrice
class StockPriceSerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = StockPrice
fields = ['id', 'price', 'time', 'ticker_symbol', 'owner']
def create(self, validated_data):
return StockPrice.objects.create(**validated_data)
def update(self, instance, validated_data):
instance.price = validated_data.get('price', instance.price)
instance.time = validated_data.get('time', instance.price)
instance.ticker_symbol = validated_data.get('ticker_symbol', instance.price)
instance.save()
return instance
| [
"sillesirk@gmail.com"
] | sillesirk@gmail.com |
c94a1a475e1b0bb0560958dde86555cbfa3e67a1 | e4c5ee30ec34cd464d20cbac4087732d71efa690 | /branch/migrations/0001_initial.py | 5d04a56defcb35418969ccb9a91642a641730562 | [] | no_license | Bvegasf/e.api | 8b1067d7f60cb26176698ee824f729f3712443e2 | 0f46637a2684cf33d0eeed95aec08bad1739f606 | refs/heads/master | 2023-04-19T15:15:13.282950 | 2021-05-07T20:09:52 | 2021-05-07T20:09:52 | 353,373,119 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,177 | py | # Generated by Django 3.1.7 on 2021-04-07 21:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('location', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='branch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(choices=[('ECHO', 'Echomert'), ('HIPER', 'Hipermerch'), ('MEGA', 'Mega')], default='ECHO', max_length=200, verbose_name='nombre')),
('adress', models.TextField(blank=True, max_length=400)),
('city', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='location.city')),
('continental_region', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='location.continentalregion')),
('country', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='location.country')),
],
),
]
| [
"brayanjovegas@gmail.com"
] | brayanjovegas@gmail.com |
4d34fb5f2fcc168e6533e3ce8da19ea79db073f5 | 5a5903be0fe32048dfa90cd02334d28ab4a0b236 | /rating/migrations/Trigram_migration.py | f310dfc2ec742a6e47cc60c289456a1bc3f7ca35 | [] | no_license | faaizajaz/consultDB | 3719ad6a7d67ab70898e1dcd553d846be4865d60 | b0c6abe0877557b5118f8cd6a609f0c83fb5d66f | refs/heads/master | 2023-06-27T17:35:56.469423 | 2021-07-26T06:17:16 | 2021-07-26T06:17:16 | 315,247,592 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 267 | py | from django.contrib.postgres.operations import TrigramExtension
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('rating', '0005_auto_20210602_0634'),
]
operations = [
TrigramExtension(),
] | [
"faaizajaz@gmail.com"
] | faaizajaz@gmail.com |
9b34c6eb8a68435e7d22837385604c8d8c0674b0 | dfa4bc19560164d81cc767cdc50136e06d33fb13 | /src/common/data.py | 8f2d7154d392cf50f2ff643dfe9506a56d7e7c3d | [
"MIT"
] | permissive | wdoppenberg/crater-detection | 2da7f7e9843171b63b073d013d7f193863165e07 | 471d1bc508dee873cc5d05329147dfc5314bc15d | refs/heads/main | 2023-06-06T11:46:50.748956 | 2021-06-22T07:42:15 | 2021-06-22T07:42:15 | 305,447,421 | 16 | 0 | MIT | 2021-06-07T22:02:33 | 2020-10-19T16:32:29 | Python | UTF-8 | Python | false | false | 6,705 | py | import datetime as dt
import os
import uuid
import h5py
import numpy as np
from matplotlib import pyplot as plt
from tqdm.auto import tqdm as tq
import src.common.constants as const
from src.common.conics import MaskGenerator
from src.common.surrender import SurRenderer
class DataGenerator(MaskGenerator, SurRenderer):
def __init__(self, *args, **kwargs):
super(DataGenerator, self).__init__(**kwargs)
def image_mask_pair(self, **mask_kwargs):
return self.generate_image(), self.generate_mask(**mask_kwargs)
def generate(size, **kwargs):
generator = DataGenerator.from_robbins_dataset(
diamlims=kwargs["diamlims"],
ellipse_limit=kwargs["ellipse_limit"],
arc_lims=kwargs["arc_lims"],
axis_threshold=kwargs["axis_threshold"],
fov=kwargs["fov"],
resolution=kwargs["resolution"],
filled=kwargs["filled"],
mask_thickness=kwargs["mask_thickness"],
instancing=kwargs["instancing"]
)
date_dataset = np.empty((size, 3), int)
images_dataset = np.empty((size, 1, *generator.resolution), np.float32)
if kwargs["instancing"]:
masks_dataset = np.empty((size, 1, *generator.resolution), np.int16)
else:
masks_dataset = np.empty((size, 1, *generator.resolution), np.bool_)
position_dataset = np.empty((size, 3, 1), np.float64)
attitude_dataset = np.empty((size, 3, 3), np.float64)
sol_incidence_dataset = np.empty((size, 1), np.float16)
A_craters = []
for i in tq(range(size), desc="Creating dataset"):
date = dt.date(2021, np.random.randint(1, 12), 1)
generator.set_random_position()
generator.scene_time = date
date_dataset[i] = np.array((date.year, date.month, date.day))
while not (kwargs["min_sol_incidence"] <= generator.solar_incidence_angle <= kwargs["max_sol_incidence"]):
generator.set_random_position() # Generate random position
position_dataset[i] = generator.position
sol_incidence_dataset[i] = generator.solar_incidence_angle
generator.point_nadir()
if kwargs["randomized_orientation"]:
# Rotations are incremental (order matters)
generator.rotate('roll', np.random.randint(0, 360))
generator.rotate('pitch', np.random.randint(-30, 30))
generator.rotate('yaw', np.random.randint(-30, 30))
attitude_dataset[i] = generator.attitude
image, mask = generator.image_mask_pair()
masks_dataset[i] = mask[None, None, ...]
images_dataset[i] = image[None, None, ...]
if kwargs["save_craters"]:
A_craters.append(generator.craters_in_image())
return images_dataset, masks_dataset, position_dataset, attitude_dataset, date_dataset, sol_incidence_dataset, A_craters
def demo_settings(n_demo=20,
generation_kwargs=None):
generation_kwargs_ = const.GENERATION_KWARGS
if generation_kwargs is not None:
generation_kwargs_.update(generation_kwargs)
images, mask, _, _, _, _, _ = generate(n_demo, **generation_kwargs_)
fig, axes = plt.subplots(n_demo, 2, figsize=(10, 5 * n_demo))
for i in range(n_demo):
axes[i, 0].imshow(images[i, 0], cmap='Greys_r')
axes[i, 1].imshow(mask[i, 0], cmap='gray')
plt.tight_layout()
plt.show()
def make_dataset(n_training,
n_validation,
n_testing,
output_path=None,
identifier=None,
generation_kwargs=None):
if output_path is None:
if identifier is not None:
output_path = f"data/dataset_{identifier}.h5"
else:
output_path = "data/dataset_crater_detection.h5"
generation_kwargs_ = const.GENERATION_KWARGS
if generation_kwargs is not None:
generation_kwargs_.update(generation_kwargs)
if os.path.exists(output_path):
raise ValueError(f"Dataset named `{os.path.basename(output_path)}` already exists!")
with h5py.File(output_path, 'w') as hf:
g_header = hf.create_group("header")
for k, v in generation_kwargs_.items():
g_header.create_dataset(k, data=v)
for group_name, dset_size in zip(
("training", "validation", "test"),
(n_training, n_validation, n_testing)
):
print(f"Creating dataset '{group_name}' @ {dset_size} images")
group = hf.create_group(group_name)
(images, masks, position, attitude, date, sol_incidence, A_craters) = generate(dset_size,
**generation_kwargs_)
for ds, name in zip(
(images, masks, position, attitude, date, sol_incidence),
("images", "masks", "position", "attitude", "date", "sol_incidence")
):
group.create_dataset(name, data=ds)
lengths = np.array([len(cs) for cs in A_craters])
crater_list_idx = np.insert(lengths.cumsum(), 0, 0)
A_craters = np.concatenate(A_craters)
cg = group.create_group("craters")
cg.create_dataset("crater_list_idx", data=crater_list_idx)
cg.create_dataset("A_craters", data=A_craters)
def inspect_dataset(dataset_path, plot=True, summary=True, n_inspect=25, pixel_range=(0, 1), return_fig=False):
with h5py.File(dataset_path, "r") as hf:
idx = np.random.choice(np.arange(len(hf['training/images'])), n_inspect)
idx = np.sort(idx)
images = hf['training/images'][idx]
masks = hf['training/masks'][idx]
header = hf["header"]
header_dict = dict()
for k, v in header.items():
header_dict[k] = v[()]
if summary:
print("Dataset header:")
for k, v in header_dict.items():
print(f"\t{k}: {v}")
if plot:
fig, axes = plt.subplots(n_inspect, 3, figsize=(15, 5 * n_inspect))
n_bins = 256
for i in range(n_inspect):
axes[i, 0].imshow(images[i, 0], cmap='gray')
weights = np.ones_like(images[i, 0].flatten()) / float(len(images[i, 0].flatten()))
axes[i, 1].hist(images[i, 0].flatten(), n_bins, pixel_range, color='r', weights=weights)
axes[i, 1].set_xlim(pixel_range)
axes[i, 1].set_ylabel('Probability')
axes[i, 1].set_xlabel('Pixel value')
axes[i, 2].imshow(masks[i][0] * 10, cmap='Blues')
plt.tight_layout()
if return_fig:
return fig
else:
plt.show()
else:
return header_dict
| [
"w_doppie@hotmail.com"
] | w_doppie@hotmail.com |
00686c1b3bbd8f8b1e0be4132b5f74e32b32863d | 8a2cbaece98bf32d315330246b8e19fa9c616789 | /workload/migrations/0007_teaching_subject_id.py | 9464633810edabb7f467411de34a89800d4430fd | [] | no_license | cn-class/workload | b6ab3f0fc80e83720c54f9de48184912a32f7529 | 535028874d1ecfab94aef2837e10f2aa44577de1 | refs/heads/master | 2021-06-19T11:42:34.584840 | 2017-07-23T15:14:06 | 2017-07-23T15:14:06 | 66,363,024 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 503 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-15 15:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workload', '0006_auto_20170315_0916'),
]
operations = [
migrations.AddField(
model_name='teaching',
name='subject_ID',
field=models.CharField(default=1, max_length=10),
preserve_default=False,
),
]
| [
"5610520172@student.tu.ac.th"
] | 5610520172@student.tu.ac.th |
d7893781c8869541e806fcbcbc353555c39f40fe | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_280/ch39_2020_03_31_00_53_54_201792.py | 47ea9f05865c88712d8501fe79b7fc7736d39472 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | def tamanho(x):
i = 1
while x != 1:
if x%2 == 0:
x = x/2
else:
x = 3*x + 1
i += 1
return i
nms = 1
ms = 1
x = 2
while 1 < x < 1000:
if tamanho(x) >= ms:
ms = tamanho (x)
nms = x
x = x + 1
print(nms) | [
"you@example.com"
] | you@example.com |
2bc832b1fce648d9a602d9d3e10c9770e92bf65d | 4a49d51b97271d61c1a34a472fd8ad22314e8a8e | /week6/assignment1/assignment1_8.py | 8419c4fdd6c82217e1b43a17b7bfe54cfe3e2406 | [] | no_license | RTae/CPE463 | edd7fda443cd00136e1cb43f313cc27b99905d17 | e44308b0dd40acd811d9ad98fa4a9ae2e0bfa9f2 | refs/heads/main | 2023-06-24T09:39:46.956556 | 2021-07-27T02:37:17 | 2021-07-27T02:37:17 | 330,554,963 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,304 | py | import cv2
import numpy as np
import matplotlib.pyplot as plt
img_table = cv2.imread("./images/circles.bmp")
img_table = cv2.cvtColor(img_table, cv2.COLOR_BGR2RGB)
# Define range for value to filter back and white out
lower_hue = np.array([0,0,40])
upper_hue = np.array([255,255,215])
# Convert image to HSV color space
img_hsv = cv2.cvtColor(img_table, cv2.COLOR_RGB2HSV)
# Filter the image
mask_hsv = cv2.inRange(img_hsv, lower_hue, upper_hue)
# Masking the image
masked_image = np.copy(img_table)
masked_image[mask_hsv==0] = [0,0,0]
# Convert mask image to gray scale image
masked_image = cv2.cvtColor(masked_image,cv2.COLOR_RGB2GRAY)
# Use color tresholding to create binary image
_, binary = cv2.threshold(masked_image, 40, 215, cv2.THRESH_BINARY)
# Find contours of image
contours, _ = cv2.findContours(binary, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# Remove not circle contours by size
temp_array = []
for idx_contour in range(len(contours)):
temp_array.append(not (contours[idx_contour].shape[0] < 20))
# Draw contours on image
contours_image = np.copy(img_table)
contours_image = cv2.drawContours(contours_image, contours, -1, (0,255,0), 3)
# Show draw image
plt.imshow(contours_image, cmap="gray")
plt.title("Number of circle: "+str(sum(temp_array)))
plt.axis('off')
plt.show()
| [
"potae02@gmail.com"
] | potae02@gmail.com |
0082966d84900fd401c848fc452040242b8351db | 5fad4b8e82e2e9bc64910f9788335f442827c4dd | /evo/migrations/0001_initial.py | 3b8a7736eca16b3a304e76898d3702e2de8e8387 | [] | no_license | mmost12/pokemon | 2f373d37687f268e75374fd204f94f75fc599f9e | f4172147df2f2154b7e5e36cd49146b0145ece79 | refs/heads/master | 2021-01-10T22:52:31.082635 | 2016-10-09T18:28:37 | 2016-10-09T18:28:37 | 70,342,625 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,070 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-10-08 17:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Pokemon',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('no', models.IntegerField()),
('name', models.CharField(max_length=200)),
('type1', models.CharField(max_length=200)),
('type2', models.CharField(max_length=200)),
('total', models.IntegerField()),
('hp', models.IntegerField()),
('attack', models.IntegerField()),
('defense', models.IntegerField()),
('spatk', models.IntegerField()),
('spdef', models.IntegerField()),
('speed', models.IntegerField()),
],
),
]
| [
"carlsonwes@gmail.com"
] | carlsonwes@gmail.com |
6f272becb6a9c5882e84a26058b7d26e94968ef4 | 3ad616e2adfa7aa7d4c8e16d2daeb318e6823b8a | /player.py | 442ffefc44600bc6366b81190c4be3e48e0aa868 | [] | no_license | DavKle132/cs364_project01_klein | e058d59baa49ed494f9681016cf1b082c07109bc | 7aec15aec4070cc015e6542e90a84fbff583df86 | refs/heads/master | 2020-03-14T07:53:13.606130 | 2018-05-03T23:18:27 | 2018-05-03T23:18:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,985 | py | import sqlite3
from playerGod import PlayerGod
from playerMatch import PlayerMatch
class Player:
def __init__(self, json, jsonPG, jsonM):
self.attributes = (
json[0]['Avatar_URL'],
json[0]['Created_Datetime'],
json[0]['Id'],
json[0]['Last_Login_Datetime'],
json[0]['Leaves'],
json[0]['Level'],
json[0]['Losses'],
json[0]['MasteryLevel'],
json[0]['Name'],
json[0]['Personal_Status_Message'],
json[0]['Region'],
json[0]['TeamId'],
json[0]['Team_Name'],
json[0]['Total_Achievements'],
json[0]['Total_Worshippers'],
json[0]['Wins'],
)
def toDB(self, dbPath, json, jsonPG, jsonM):
conn = sqlite3.connect(dbPath)
c = conn.cursor()
c.execute('INSERT INTO Player ( \
Avatar_URL, \
Created_Datetime, \
PlayerID, \
Last_Login_Datetime, \
Leaves, \
Level, \
Losses, \
MasteryLevel, \
Name, \
Personal_Status_Message, \
Region, \
TeamId, \
Team_Name, \
Total_Achievements, \
Total_Worshippers, \
Wins \
) VALUES ( \
?,?,?,?,?,?, \
?,?,?,?,?,?, \
?,?,?,?)', self.attributes)
conn.commit()
conn.close()
for x in jsonM:
m = PlayerMatch( x )
m.toDB('db.sqlite3')
for x in jsonPG:
pg = PlayerGod( x )
pg.toDB('db.sqlite3')
| [
"klein.david@uwlax.edu"
] | klein.david@uwlax.edu |
064964e742e576e3039314cbce62c85970602318 | 2d87756794ab27a8a82ad6389f1678f0ed14f906 | /tests/unit/test_trainer_dict_params.py | b75ffe9f2d15f8385c2b39733f7865ad833640cc | [
"Apache-2.0"
] | permissive | blagojce95/ai-research-mamo-framework | a9e0f70896f686b1cb3a7526480cd0c26c8e2b29 | 7f3b5a5a9fb8b19c9eef453b81b03b6046a33bf2 | refs/heads/master | 2022-11-17T22:43:34.596561 | 2020-07-06T07:02:35 | 2020-07-06T07:02:35 | 277,468,836 | 0 | 0 | Apache-2.0 | 2020-07-06T07:10:47 | 2020-07-06T07:10:46 | null | UTF-8 | Python | false | false | 8,638 | py | import torch
import numpy as np
import os
import pytest
from dataloader.ae_data_handler import AEDataHandler
from models.multi_VAE import MultiVAE
from loss.vae_loss import VAELoss
from metric.recall_at_k import RecallAtK
from metric.revenue_at_k import RevenueAtK
from paretomanager.pareto_manager_class import ParetoManager
from validator import Validator
from trainer import Trainer
import torch.nn as nn
from torch.utils.data import DataLoader
# set cuda if available
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# create temporary directories
if not os.path.isdir('test_data_mo_dp'):
os.mkdir('test_data_mo_dp')
if not os.path.isdir('test_data_mo_dp/models'):
os.mkdir('test_data_mo_dp/models')
# generate random data
np.random.seed(42)
dir_path = 'test_data_mo_dp/'
train_data_path = os.path.join(
dir_path, 'movielens_small_training.npy')
validation_input_data_path = os.path.join(
dir_path, 'movielens_small_validation_input.npy')
validation_output_data_path = os.path.join(
dir_path, 'movielens_small_validation_test.npy')
test_input_data_path = os.path.join(
dir_path, 'movielens_small_test_input.npy')
test_output_data_path = os.path.join(
dir_path, 'movielens_small_test_test.npy')
products_data_path = os.path.join(
dir_path, 'movielens_products_data.npy')
np.save(train_data_path, np.random.rand(10000, 8936).astype('float32'))
np.save(validation_input_data_path, np.random.rand(2000, 8936).astype('float32'))
np.save(validation_output_data_path, np.random.rand(2000, 8936).astype('float32'))
np.save(test_input_data_path, np.random.rand(2000, 8936).astype('float32'))
np.save(test_output_data_path, np.random.rand(2000, 8936).astype('float32'))
np.save(products_data_path, np.random.rand(8936))
dataHandler = AEDataHandler(
'Testing trainer random dataset', train_data_path, validation_input_data_path,
validation_output_data_path, test_input_data_path,
test_output_data_path)
input_dim = dataHandler.get_input_dim()
output_dim = dataHandler.get_output_dim()
products_data_np = np.load(products_data_path)
products_data_torch = torch.tensor(
products_data_np, dtype=torch.float32).to(device)
# create model
model = MultiVAE(params='yaml_files/params_multi_VAE.yaml')
correctness_loss = VAELoss()
revenue_loss = VAELoss(weighted_vector=products_data_torch)
losses = [correctness_loss, revenue_loss]
recallAtK = RecallAtK(k=10)
revenueAtK = RevenueAtK(k=10, revenue=products_data_np)
validation_metrics = [recallAtK, revenueAtK]
# Set up this
save_to_path = 'test_data_mo_dp/models'
params = {
'seed': 42,
'normalize_gradients': True,
'learning_rate': 1e-3,
'batch_size_training': 500,
'shuffle_training': True,
'drop_last_batch_training': True,
'batch_size_validation': 500,
'shuffle_validation': True,
'drop_last_batch_validation': False,
'batch_size_testing': 500,
'shuffle_testing': True,
'drop_last_batch_testing': True,
'number_of_epochs': 50,
'frank_wolfe_max_iter': 100,
'anneal': True,
'beta_start': 0,
'beta_cap': 0.3,
'beta_step': '0.3/10000'
}
# test the init arguments
def test_check_input1():
with pytest.raises(TypeError, match='Please check you are using the right data handler object,'
+ ' or the right order of the attributes!'):
trainer = Trainer(None, model, losses, validation_metrics, save_to_path, params)
trainer.train()
with pytest.raises(TypeError, match='Please check you are using the right data handler object,'
+ ' or the right order of the attributes!'):
trainer = Trainer(model, dataHandler, losses, validation_metrics, save_to_path, params)
trainer.train()
def test_check_input2():
with pytest.raises(TypeError, match='Please check you are using the right model object,'
+ ' or the right order of the attributes!'):
trainer = Trainer(dataHandler, None, losses, validation_metrics, save_to_path, params)
trainer.train()
def test_check_input3():
class TestModel(nn.Module):
def forward(self):
return 1
with pytest.raises(TypeError, match='Please check if your models has initialize_model\\(\\) method defined!'):
trainer = Trainer(dataHandler, TestModel(), losses, validation_metrics, save_to_path, params)
trainer.train()
def test_check_input4():
with pytest.raises(TypeError, match='Please check you are using the right loss objects,'
+ ' or the right order of the attributes!'):
losses_tmp = losses.copy()
losses_tmp[0] = validation_metrics[0]
trainer = Trainer(dataHandler, model, losses_tmp, validation_metrics, save_to_path, params)
trainer.train()
def test_check_input5():
with pytest.raises(TypeError, match='Please check you are using the right metric objects,'
+ ' or the right order of the attributes!'):
validation_metrics_tmp = validation_metrics.copy()
validation_metrics_tmp[0] = model
trainer = Trainer(dataHandler, model, losses, validation_metrics_tmp, save_to_path, params)
trainer.train()
def test_check_input6():
with pytest.raises(ValueError, match='Please make sure that the directory where'
+ ' you want to save the models is empty!'):
trainer = Trainer(dataHandler, model, losses, validation_metrics, '.', params)
trainer.train()
def test_check_input7():
# check for None losses
with pytest.raises(ValueError, match='The losses are None, please make sure to give valid losses!'):
trainer = Trainer(dataHandler, model, None, validation_metrics, save_to_path, params)
trainer.train()
# check the legnth of Losses at least 2
losses_tmp = []
losses_tmp.append(losses[0])
with pytest.raises(ValueError, match='Please check you have defined at least two losses,'
+ ' for training with one loss use the Single Objective Loss class!'):
trainer = Trainer(dataHandler, model, losses_tmp, validation_metrics, save_to_path, params)
trainer.train()
def test_check_input8():
# check for None metrics
with pytest.raises(ValueError, match='The validation_metrics are None,'
+ ' please make sure to give valid validation_metrics!'):
trainer = Trainer(dataHandler, model, losses, None, save_to_path, params)
trainer.train()
# check if length is at least 1
validation_metrics_tmp = []
with pytest.raises(ValueError, match='Please check you have defined at least one validation metric!'):
trainer = Trainer(dataHandler, model, losses, validation_metrics_tmp, save_to_path, params)
trainer.train()
def test_check_input9():
with pytest.raises(TypeError, match='Please make sure that the optimizer is a pytorch Optimizer object!'):
trainer = Trainer(dataHandler, model, losses, validation_metrics, save_to_path, params, model)
trainer.train()
# test the reading from the yaml files
def test_read_yaml_params():
trainer = Trainer(dataHandler, model, losses, validation_metrics, save_to_path, params)
assert trainer.seed == 42
assert trainer.normalize_gradients is True
assert trainer.learning_rate == 1e-3
assert trainer.batch_size_training == 500
assert trainer.shuffle_training is True
assert trainer.drop_last_batch_training is True
assert trainer.batch_size_validation == 500
assert trainer.shuffle_validation is True
assert trainer.drop_last_batch_validation is False
assert trainer.number_of_epochs == 50
assert trainer.frank_wolfe_max_iter == 100
assert trainer.anneal is True
assert trainer.beta_start == 0
assert trainer.beta_cap == 0.3
assert trainer.beta_step == 0.3/10000
# test the init of the objects
def test_init_objects():
trainer = Trainer(dataHandler, model, losses, validation_metrics, save_to_path, params)
assert type(trainer._train_dataloader) == DataLoader
assert type(trainer.pareto_manager) == ParetoManager
assert trainer.pareto_manager.path == save_to_path
assert type(trainer.validator) == Validator
assert len(trainer.max_empirical_losses) == 2
# removing generated data
def test_cleanup():
os.remove(train_data_path)
os.remove(validation_input_data_path)
os.remove(validation_output_data_path)
os.remove(test_input_data_path)
os.remove(test_output_data_path)
os.remove(products_data_path)
os.rmdir('test_data_mo_dp/models')
os.rmdir('test_data_mo_dp')
| [
"taamucl2@um00698.home"
] | taamucl2@um00698.home |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.